--- a/.hgignore Wed Jul 05 11:24:22 2017 -0400
+++ b/.hgignore Wed Jul 19 07:51:41 2017 -0500
@@ -55,14 +55,6 @@
locale/*/LC_MESSAGES/hg.mo
hgext/__index__.py
-# files installed with a local --pure build
-mercurial/base85.py
-mercurial/bdiff.py
-mercurial/diffhelpers.py
-mercurial/mpatch.py
-mercurial/osutil.py
-mercurial/parsers.py
-
# Generated wheels
wheelhouse/
--- a/Makefile Wed Jul 05 11:24:22 2017 -0400
+++ b/Makefile Wed Jul 19 07:51:41 2017 -0500
@@ -64,7 +64,6 @@
-$(PYTHON) setup.py clean --all # ignore errors from this command
find contrib doc hgext hgext3rd i18n mercurial tests \
\( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
- rm -f $(addprefix mercurial/,$(notdir $(wildcard mercurial/pure/[a-z]*.py)))
rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
rm -f mercurial/__modulepolicy__.py
if test -d .hg; then rm -f mercurial/__version__.py; fi
@@ -177,6 +176,14 @@
# location of our own.
install -d build/mercurial/usr/local/hg/contrib/
install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
+ make -C contrib/chg \
+ HGPATH=/usr/local/bin/hg \
+ PYTHON=/usr/bin/python2.7 \
+ HG=/usr/local/bin/hg \
+ HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
+ DESTDIR=../../build/mercurial \
+ PREFIX=/usr/local \
+ clean install
mkdir -p $${OUTPUTDIR:-dist}
HGVER=$$((cat build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py; echo 'print(version)') | python) && \
OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
--- a/contrib/bdiff-torture.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/bdiff-torture.py Wed Jul 19 07:51:41 2017 -0500
@@ -5,8 +5,7 @@
import sys
from mercurial import (
- bdiff,
- mpatch,
+ mdiff,
)
def reducetest(a, b):
@@ -42,10 +41,10 @@
sys.exit(0)
def test1(a, b):
- d = bdiff.bdiff(a, b)
+ d = mdiff.textdiff(a, b)
if not d:
raise ValueError("empty")
- c = mpatch.patches(a, [d])
+ c = mdiff.patches(a, [d])
if c != b:
raise ValueError("bad")
--- a/contrib/check-code.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/check-code.py Wed Jul 19 07:51:41 2017 -0500
@@ -116,6 +116,7 @@
(r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
(r'\$\(.*\)', "don't use $(expr), use `expr`"),
(r'rm -rf \*', "don't use naked rm -rf, target a directory"),
+ (r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
(r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
"use egrep for extended grep syntax"),
(r'/bin/', "don't use explicit paths for tools"),
@@ -137,6 +138,7 @@
"put a backslash-escaped newline after sed 'i' command"),
(r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"),
(r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
+ (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
(r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
(r'\butil\.Abort\b', "directly use error.Abort"),
(r'\|&', "don't use |&, use 2>&1"),
@@ -144,6 +146,7 @@
(r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
(r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
(r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
+ (r'grep.* -[ABC] ', "don't use grep's context flags"),
],
# warnings
[
@@ -298,8 +301,10 @@
"comparison with singleton, use 'is' or 'is not' instead"),
(r'^\s*(while|if) [01]:',
"use True/False for constant Boolean expression"),
+ (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
(r'(?:(?<!def)\s+|\()hasattr\(',
- 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
+ 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
+ 'instead', r'#.*hasattr-py3-only'),
(r'opener\([^)]*\).read\(',
"use opener.read() instead"),
(r'opener\([^)]*\).write\(',
@@ -338,6 +343,8 @@
(r'^import pickle', "don't use pickle, use util.pickle"),
(r'^import httplib', "don't use httplib, use util.httplib"),
(r'^import BaseHTTPServer', "use util.httpserver instead"),
+ (r'^(from|import) mercurial\.(cext|pure|cffi)',
+ "use mercurial.policy.importmod instead"),
(r'\.next\(\)', "don't use .next(), use next(...)"),
(r'([a-z]*).revision\(\1\.node\(',
"don't convert rev to node before passing to revision(nodeorrev)"),
@@ -474,7 +481,7 @@
py3pats = [
[
- (r'os\.environ', "use encoding.environ instead (py3)"),
+ (r'os\.environ', "use encoding.environ instead (py3)", r'#.*re-exports'),
(r'os\.name', "use pycompat.osname instead (py3)"),
(r'os\.getcwd', "use pycompat.getcwd instead (py3)"),
(r'os\.sep', "use pycompat.ossep instead (py3)"),
@@ -492,8 +499,8 @@
checks = [
('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
- ('python 3', r'.*(hgext|mercurial).*(?<!pycompat)\.py', '',
- pyfilters, py3pats),
+ ('python 3', r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
+ '', pyfilters, py3pats),
('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
('c', r'.*\.[ch]$', '', cfilters, cpats),
('unified test', r'.*\.t$', '', utestfilters, utestpats),
--- a/contrib/check-config.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/check-config.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,18 +13,44 @@
foundopts = {}
documented = {}
+allowinconsistent = set()
-configre = (r"""ui\.config(|int|bool|list)\(['"](\S+)['"],\s*"""
- r"""['"](\S+)['"](,\s+(?:default=)?(\S+?))?\)""")
+configre = re.compile(r'''
+ # Function call
+ ui\.config(?P<ctype>|int|bool|list)\(
+ # First argument.
+ ['"](?P<section>\S+)['"],\s*
+ # Second argument
+ ['"](?P<option>\S+)['"](,\s+
+ (?:default=)?(?P<default>\S+?))?
+ \)''', re.VERBOSE | re.MULTILINE)
+
+configwithre = re.compile('''
+ ui\.config(?P<ctype>with)\(
+ # First argument is callback function. This doesn't parse robustly
+ # if it is e.g. a function call.
+ [^,]+,\s*
+ ['"](?P<section>\S+)['"],\s*
+ ['"](?P<option>\S+)['"](,\s+
+ (?:default=)?(?P<default>\S+?))?
+ \)''', re.VERBOSE | re.MULTILINE)
+
configpartialre = (r"""ui\.config""")
+ignorere = re.compile(r'''
+ \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
+ config:\s(?P<config>\S+\.\S+)$
+ ''', re.VERBOSE | re.MULTILINE)
+
def main(args):
for f in args:
sect = ''
prevname = ''
confsect = ''
carryover = ''
+ linenum = 0
for l in open(f):
+ linenum += 1
# check topic-like bits
m = re.match('\s*``(\S+)``', l)
@@ -64,28 +90,32 @@
documented[m.group(1)] = 1
# look for ignore markers
- m = re.search(r'# (?:internal|experimental|deprecated|developer)'
- ' config: (\S+\.\S+)$', l)
+ m = ignorere.search(l)
if m:
- documented[m.group(1)] = 1
+ if m.group('reason') == 'inconsistent':
+ allowinconsistent.add(m.group('config'))
+ else:
+ documented[m.group('config')] = 1
# look for code-like bits
line = carryover + l
- m = re.search(configre, line, re.MULTILINE)
+ m = configre.search(line) or configwithre.search(line)
if m:
- ctype = m.group(1)
+ ctype = m.group('ctype')
if not ctype:
ctype = 'str'
- name = m.group(2) + "." + m.group(3)
- default = m.group(5)
+ name = m.group('section') + "." + m.group('option')
+ default = m.group('default')
if default in (None, 'False', 'None', '0', '[]', '""', "''"):
default = ''
if re.match('[a-z.]+$', default):
default = '<variable>'
- if name in foundopts and (ctype, default) != foundopts[name]:
- print(l)
+ if (name in foundopts and (ctype, default) != foundopts[name]
+ and name not in allowinconsistent):
+ print(l.rstrip())
print("conflict on %s: %r != %r" % (name, (ctype, default),
foundopts[name]))
+ print("at %s:%d:" % (f, linenum))
foundopts[name] = (ctype, default)
carryover = ''
else:
--- a/contrib/check-py3-compat.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/check-py3-compat.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,6 +10,7 @@
from __future__ import absolute_import, print_function
import ast
+import importlib
import os
import sys
import traceback
@@ -40,7 +41,6 @@
def check_compat_py3(f):
"""Check Python 3 compatibility of a file with Python 3."""
- import importlib # not available on Python 2.6
with open(f, 'rb') as fh:
content = fh.read()
@@ -51,11 +51,12 @@
return
# Try to import the module.
- # For now we only support mercurial.* and hgext.* modules because figuring
- # out module paths for things not in a package can be confusing.
- if f.startswith(('hgext/', 'mercurial/')) and not f.endswith('__init__.py'):
+ # For now we only support modules in packages because figuring out module
+ # paths for things not in a package can be confusing.
+ if (f.startswith(('hgdemandimport/', 'hgext/', 'mercurial/'))
+ and not f.endswith('__init__.py')):
assert f.endswith('.py')
- name = f.replace('/', '.')[:-3].replace('.pure.', '.')
+ name = f.replace('/', '.')[:-3]
try:
importlib.import_module(name)
except Exception as e:
--- a/contrib/debian/control Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/debian/control Wed Jul 19 07:51:41 2017 -0500
@@ -13,7 +13,7 @@
unzip,
zip
Standards-Version: 3.9.4
-X-Python-Version: >= 2.6
+X-Python-Version: >= 2.7
Package: mercurial
Depends:
--- a/contrib/debugshell.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/debugshell.py Wed Jul 19 07:51:41 2017 -0500
@@ -6,12 +6,12 @@
import mercurial
import sys
from mercurial import (
- cmdutil,
demandimport,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
def pdb(ui, repo, msg, **opts):
objects = {
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/editmergeps.bat Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,2 @@
+@echo off
+powershell -NoProfile -ExecutionPolicy unrestricted -Command "& '%~dp0\editmergeps.ps1' %*"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/editmergeps.ps1 Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,78 @@
+# A simple script for opening merge conflicts in editor
+# A loose translation of contrib/editmerge to powershell
+# Please make sure that both editmergeps.bat and editmerge.ps1 are available
+# via %PATH% and use the following Mercurial settings to enable it
+#
+# [ui]
+# editmergeps
+# editmergeps.args=$output
+# editmergeps.check=changed
+# editmergeps.premerge=keep
+
+$file=$args[0]
+
+function Get-Lines
+{
+ Select-String "^<<<<<<" $file | % {"$($_.LineNumber)"}
+}
+
+$ed = $Env:HGEDITOR;
+if ($ed -eq $nil)
+{
+ $ed = $Env:VISUAL;
+}
+if ($ed -eq $nil)
+{
+ $ed = $Env:EDITOR;
+}
+if ($ed -eq $nil)
+{
+ $ed = $(hg showconfig ui.editor);
+}
+if ($ed -eq $nil)
+{
+ Write-Error "merge failed - unable to find editor"
+ exit 1
+}
+
+if (($ed -eq "vim") -or ($ed -eq "emacs") -or `
+ ($ed -eq "nano") -or ($ed -eq "notepad++"))
+{
+ $lines = Get-Lines
+ $firstline = if ($lines.Length -gt 0) { $lines[0] } else { $nil }
+ $previousline = $nil;
+
+
+ # open the editor to the first conflict until there are no more
+ # or the user stops editing the file
+ while (($firstline -ne $nil) -and ($firstline -ne $previousline))
+ {
+ if ($ed -eq "notepad++")
+ {
+ $linearg = "-n$firstline"
+ }
+ else
+ {
+ $linearg = "+$firstline"
+ }
+
+ Start-Process -Wait -NoNewWindow $ed $linearg,$file
+ $previousline = $firstline
+ $lines = Get-Lines
+ $firstline = if ($lines.Length -gt 0) { $lines[0] } else { $nil }
+ }
+}
+else
+{
+ & "$ed" $file
+}
+
+$conflicts=Get-Lines
+if ($conflicts.Length -ne 0)
+{
+ Write-Output "merge failed - resolve the conflicts (line $conflicts) then use 'hg resolve --mark'"
+ exit 1
+}
+
+exit 0
+
--- a/contrib/import-checker.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/import-checker.py Wed Jul 19 07:51:41 2017 -0500
@@ -22,6 +22,18 @@
'mercurial.hgweb.request',
'mercurial.i18n',
'mercurial.node',
+ # for cffi modules to re-export pure functions
+ 'mercurial.pure.base85',
+ 'mercurial.pure.bdiff',
+ 'mercurial.pure.diffhelpers',
+ 'mercurial.pure.mpatch',
+ 'mercurial.pure.osutil',
+ 'mercurial.pure.parsers',
+)
+
+# Whitelist of symbols that can be directly imported.
+directsymbols = (
+ 'demandimport',
)
# Modules that must be aliased because they are commonly confused with
@@ -55,13 +67,11 @@
todo.extend(ast.iter_child_nodes(node))
yield node, newscope
-def dotted_name_of_path(path, trimpure=False):
+def dotted_name_of_path(path):
"""Given a relative path to a source file, return its dotted module name.
>>> dotted_name_of_path('mercurial/error.py')
'mercurial.error'
- >>> dotted_name_of_path('mercurial/pure/parsers.py', trimpure=True)
- 'mercurial.parsers'
>>> dotted_name_of_path('zlibmodule.so')
'zlib'
"""
@@ -69,8 +79,6 @@
parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
if parts[-1].endswith('module'):
parts[-1] = parts[-1][:-6]
- if trimpure:
- return '.'.join(p for p in parts if p != 'pure')
return '.'.join(parts)
def fromlocalfunc(modulename, localmods):
@@ -80,9 +88,8 @@
`modulename` is an `dotted_name_of_path()`-ed source file path,
which may have `.__init__` at the end of it, of the target source.
- `localmods` is a dict (or set), of which key is an absolute
- `dotted_name_of_path()`-ed source file path of locally defined (=
- Mercurial specific) modules.
+ `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
+ paths of locally defined (= Mercurial specific) modules.
This function assumes that module names not existing in
`localmods` are from the Python standard library.
@@ -106,9 +113,9 @@
convenient, even though this is also equivalent to "absname !=
dottednpath")
- >>> localmods = {'foo.__init__': True, 'foo.foo1': True,
- ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
- ... 'baz.__init__': True, 'baz.baz1': True }
+ >>> localmods = {'foo.__init__', 'foo.foo1',
+ ... 'foo.bar.__init__', 'foo.bar.bar1',
+ ... 'baz.__init__', 'baz.baz1'}
>>> fromlocal = fromlocalfunc('foo.xxx', localmods)
>>> # relative
>>> fromlocal('foo1')
@@ -163,6 +170,16 @@
return False
return fromlocal
+def populateextmods(localmods):
+ """Populate C extension modules based on pure modules"""
+ newlocalmods = set(localmods)
+ for n in localmods:
+ if n.startswith('mercurial.pure.'):
+ m = n[len('mercurial.pure.'):]
+ newlocalmods.add('mercurial.cext.' + m)
+ newlocalmods.add('mercurial.cffi._' + m)
+ return newlocalmods
+
def list_stdlib_modules():
"""List the modules present in the stdlib.
@@ -203,7 +220,7 @@
yield m
for m in ['cffi']:
yield m
- stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
+ stdlib_prefixes = {sys.prefix, sys.exec_prefix}
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
for mod in (BaseHTTPServer, zlib):
@@ -227,7 +244,8 @@
for top, dirs, files in os.walk(libpath):
for i, d in reversed(list(enumerate(dirs))):
if (not os.path.exists(os.path.join(top, d, '__init__.py'))
- or top == libpath and d in ('hgext', 'mercurial')):
+ or top == libpath and d in ('hgdemandimport', 'hgext',
+ 'mercurial')):
del dirs[i]
for name in files:
if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
@@ -249,7 +267,7 @@
Args:
source: The python source to examine as a string.
modulename: of specified python source (may have `__init__`)
- localmods: dict of locally defined module names (may have `__init__`)
+ localmods: set of locally defined module names (may have `__init__`)
ignore_nested: If true, import statements that do not start in
column zero will be ignored.
@@ -468,10 +486,11 @@
found = fromlocal(node.module, node.level)
if found and found[2]: # node.module is a package
prefix = found[0] + '.'
- symbols = [n.name for n in node.names
- if not fromlocal(prefix + n.name)]
+ symbols = (n.name for n in node.names
+ if not fromlocal(prefix + n.name))
else:
- symbols = [n.name for n in node.names]
+ symbols = (n.name for n in node.names)
+ symbols = [sym for sym in symbols if sym not in directsymbols]
if node.module and node.col_offset == root_col_offset:
if symbols and fullname not in allowsymbolimports:
yield msg('direct symbol import %s from %s',
@@ -687,13 +706,14 @@
if argv[1] == '-':
argv = argv[:1]
argv.extend(l.rstrip() for l in sys.stdin.readlines())
- localmods = {}
+ localmodpaths = {}
used_imports = {}
any_errors = False
for source_path in argv[1:]:
- modname = dotted_name_of_path(source_path, trimpure=True)
- localmods[modname] = source_path
- for localmodname, source_path in sorted(localmods.items()):
+ modname = dotted_name_of_path(source_path)
+ localmodpaths[modname] = source_path
+ localmods = populateextmods(localmodpaths)
+ for localmodname, source_path in sorted(localmodpaths.items()):
for src, modname, name, line in sources(source_path, localmodname):
try:
used_imports[modname] = sorted(
--- a/contrib/mercurial.spec Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/mercurial.spec Wed Jul 19 07:51:41 2017 -0500
@@ -37,8 +37,8 @@
%if "%{?withpython}"
BuildRequires: readline-devel, openssl-devel, ncurses-devel, zlib-devel, bzip2-devel
%else
-BuildRequires: python >= 2.6, python-devel, python-docutils >= 0.5
-Requires: python >= 2.6
+BuildRequires: python >= 2.7, python-devel, python-docutils >= 0.5
+Requires: python >= 2.7
%endif
# The hgk extension uses the wish tcl interpreter, but we don't enforce it
#Requires: tk
@@ -153,10 +153,9 @@
%{_bindir}/%{pythonhg}
%{hgpyprefix}
%else
-%if "%{?pythonver}" != "2.4"
%{_libdir}/python%{pythonver}/site-packages/%{name}-*-py%{pythonver}.egg-info
-%endif
%{_libdir}/python%{pythonver}/site-packages/%{name}
%{_libdir}/python%{pythonver}/site-packages/hgext
%{_libdir}/python%{pythonver}/site-packages/hgext3rd
+%{_libdir}/python%{pythonver}/site-packages/hgdemandimport
%endif
--- a/contrib/packagelib.sh Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/packagelib.sh Wed Jul 19 07:51:41 2017 -0500
@@ -14,7 +14,7 @@
$HG version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
- hgversion=`$HG version | sed -ne 's/.*(version \(.*\))$/\1/p'`
+ hgversion=`LANGUAGE=C $HG version | sed -ne 's/.*(version \(.*\))$/\1/p'`
if echo $hgversion | grep + > /dev/null 2>&1 ; then
tmp=`echo $hgversion | cut -d+ -f 2`
--- a/contrib/perf.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/perf.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,10 +23,10 @@
import gc
import os
import random
+import struct
import sys
import time
from mercurial import (
- bdiff,
changegroup,
cmdutil,
commands,
@@ -35,6 +35,7 @@
extensions,
mdiff,
merge,
+ revlog,
util,
)
@@ -50,6 +51,11 @@
except ImportError:
pass
try:
+ from mercurial import registrar # since 3.7 (or 37d50250b696)
+ dir(registrar) # forcibly load it
+except ImportError:
+ registrar = None
+try:
from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
except ImportError:
pass
@@ -81,18 +87,20 @@
# available, because commands.formatteropts has been available since
# 3.2 (or 7a7eed5176a4), even though formatting itself has been
# available since 2.2 (or ae5f92e154d3)
-formatteropts = getattr(commands, "formatteropts", [])
+formatteropts = getattr(cmdutil, "formatteropts",
+ getattr(commands, "formatteropts", []))
# for "historical portability":
# use locally defined option list, if debugrevlogopts isn't available,
# because commands.debugrevlogopts has been available since 3.7 (or
# 5606f7d0d063), even though cmdutil.openrevlog() has been available
# since 1.9 (or a79fea6b3e77).
-revlogopts = getattr(commands, "debugrevlogopts", [
+revlogopts = getattr(cmdutil, "debugrevlogopts",
+ getattr(commands, "debugrevlogopts", [
('c', 'changelog', False, ('open changelog')),
('m', 'manifest', False, ('open manifest')),
('', 'dir', False, ('open directory manifest')),
- ])
+ ]))
cmdtable = {}
@@ -102,7 +110,9 @@
def parsealiases(cmd):
return cmd.lstrip("^").split("|")
-if safehasattr(cmdutil, 'command'):
+if safehasattr(registrar, 'command'):
+ command = registrar.command(cmdtable)
+elif safehasattr(cmdutil, 'command'):
import inspect
command = cmdutil.command(cmdtable)
if 'norepo' not in inspect.getargspec(command)[0]:
@@ -347,6 +357,14 @@
# - perf.py itself has been available since 1.1 (or eb240755386d)
raise error.Abort(("tags API of this hg command is unknown"))
+# utilities to clear cache
+
+def clearfilecache(repo, attrname):
+ unfi = repo.unfiltered()
+ if attrname in vars(unfi):
+ delattr(unfi, attrname)
+ unfi._filecache.pop(attrname, None)
+
# perf commands
@command('perfwalk', formatteropts)
@@ -449,6 +467,16 @@
timer(d)
fm.end()
+@command('perfbookmarks', formatteropts)
+def perfbookmarks(ui, repo, **opts):
+ """benchmark parsing bookmarks from disk to memory"""
+ timer, fm = gettimer(ui, opts)
+ def d():
+ clearfilecache(repo, '_bookmarks')
+ repo._bookmarks
+ timer(d)
+ fm.end()
+
@command('perfchangegroupchangelog', formatteropts +
[('', 'version', '02', 'changegroup version'),
('r', 'rev', '', 'revisions to add to changegroup')])
@@ -573,6 +601,24 @@
timer(d)
fm.end()
+@command('perfphases',
+ [('', 'full', False, 'include file reading time too'),
+ ], "")
+def perfphases(ui, repo, **opts):
+ """benchmark phasesets computation"""
+ timer, fm = gettimer(ui, opts)
+ _phases = repo._phasecache
+ full = opts.get('full')
+ def d():
+ phases = _phases
+ if full:
+ clearfilecache(repo, '_phasecache')
+ phases = repo._phasecache
+ phases.invalidate()
+ phases.loadphaserevs(repo)
+ timer(d)
+ fm.end()
+
@command('perfmanifest', [], 'REV')
def perfmanifest(ui, repo, rev, **opts):
timer, fm = gettimer(ui, opts)
@@ -615,7 +661,7 @@
if os.name != 'nt':
os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
else:
- os.environ['HGRCPATH'] = ''
+ os.environ['HGRCPATH'] = ' '
os.system("%s version -q > NUL" % cmd)
timer(d)
fm.end()
@@ -812,7 +858,7 @@
def d():
for pair in textpairs:
- bdiff.bdiff(*pair)
+ mdiff.textdiff(*pair)
timer, fm = gettimer(ui, opts)
timer(d)
@@ -838,12 +884,129 @@
timer(d, title)
fm.end()
-@command('perfrevlog', revlogopts + formatteropts +
+@command('perfrevlogindex', revlogopts + formatteropts,
+ '-c|-m|FILE')
+def perfrevlogindex(ui, repo, file_=None, **opts):
+ """Benchmark operations against a revlog index.
+
+ This tests constructing a revlog instance, reading index data,
+ parsing index data, and performing various operations related to
+ index data.
+ """
+
+ rl = cmdutil.openrevlog(repo, 'perfrevlogindex', file_, opts)
+
+ opener = getattr(rl, 'opener') # trick linter
+ indexfile = rl.indexfile
+ data = opener.read(indexfile)
+
+ header = struct.unpack('>I', data[0:4])[0]
+ version = header & 0xFFFF
+ if version == 1:
+ revlogio = revlog.revlogio()
+ inline = header & (1 << 16)
+ else:
+ raise error.Abort(('unsupported revlog version: %d') % version)
+
+ rllen = len(rl)
+
+ node0 = rl.node(0)
+ node25 = rl.node(rllen // 4)
+ node50 = rl.node(rllen // 2)
+ node75 = rl.node(rllen // 4 * 3)
+ node100 = rl.node(rllen - 1)
+
+ allrevs = range(rllen)
+ allrevsrev = list(reversed(allrevs))
+ allnodes = [rl.node(rev) for rev in range(rllen)]
+ allnodesrev = list(reversed(allnodes))
+
+ def constructor():
+ revlog.revlog(opener, indexfile)
+
+ def read():
+ with opener(indexfile) as fh:
+ fh.read()
+
+ def parseindex():
+ revlogio.parseindex(data, inline)
+
+ def getentry(revornode):
+ index = revlogio.parseindex(data, inline)[0]
+ index[revornode]
+
+ def getentries(revs, count=1):
+ index = revlogio.parseindex(data, inline)[0]
+
+ for i in range(count):
+ for rev in revs:
+ index[rev]
+
+ def resolvenode(node):
+ nodemap = revlogio.parseindex(data, inline)[1]
+ # This only works for the C code.
+ if nodemap is None:
+ return
+
+ try:
+ nodemap[node]
+ except error.RevlogError:
+ pass
+
+ def resolvenodes(nodes, count=1):
+ nodemap = revlogio.parseindex(data, inline)[1]
+ if nodemap is None:
+ return
+
+ for i in range(count):
+ for node in nodes:
+ try:
+ nodemap[node]
+ except error.RevlogError:
+ pass
+
+ benches = [
+ (constructor, 'revlog constructor'),
+ (read, 'read'),
+ (parseindex, 'create index object'),
+ (lambda: getentry(0), 'retrieve index entry for rev 0'),
+ (lambda: resolvenode('a' * 20), 'look up missing node'),
+ (lambda: resolvenode(node0), 'look up node at rev 0'),
+ (lambda: resolvenode(node25), 'look up node at 1/4 len'),
+ (lambda: resolvenode(node50), 'look up node at 1/2 len'),
+ (lambda: resolvenode(node75), 'look up node at 3/4 len'),
+ (lambda: resolvenode(node100), 'look up node at tip'),
+ # 2x variation is to measure caching impact.
+ (lambda: resolvenodes(allnodes),
+ 'look up all nodes (forward)'),
+ (lambda: resolvenodes(allnodes, 2),
+ 'look up all nodes 2x (forward)'),
+ (lambda: resolvenodes(allnodesrev),
+ 'look up all nodes (reverse)'),
+ (lambda: resolvenodes(allnodesrev, 2),
+ 'look up all nodes 2x (reverse)'),
+ (lambda: getentries(allrevs),
+ 'retrieve all index entries (forward)'),
+ (lambda: getentries(allrevs, 2),
+ 'retrieve all index entries 2x (forward)'),
+ (lambda: getentries(allrevsrev),
+ 'retrieve all index entries (reverse)'),
+ (lambda: getentries(allrevsrev, 2),
+ 'retrieve all index entries 2x (reverse)'),
+ ]
+
+ for fn, title in benches:
+ timer, fm = gettimer(ui, opts)
+ timer(fn, title=title)
+ fm.end()
+
+@command('perfrevlogrevisions', revlogopts + formatteropts +
[('d', 'dist', 100, 'distance between the revisions'),
('s', 'startrev', 0, 'revision to start reading at'),
('', 'reverse', False, 'read in reverse')],
'-c|-m|FILE')
-def perfrevlog(ui, repo, file_=None, startrev=0, reverse=False, **opts):
+def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
+ **opts):
"""Benchmark reading a series of revisions from a revlog.
By default, we read every ``-d/--dist`` revision from 0 to tip of
@@ -851,23 +1014,26 @@
The start revision can be defined via ``-s/--startrev``.
"""
- timer, fm = gettimer(ui, opts)
- _len = getlen(ui)
+ rl = cmdutil.openrevlog(repo, 'perfrevlogrevisions', file_, opts)
+ rllen = getlen(ui)(rl)
def d():
- r = cmdutil.openrevlog(repo, 'perfrevlog', file_, opts)
+ rl.clearcaches()
- startrev = 0
- endrev = _len(r)
+ beginrev = startrev
+ endrev = rllen
dist = opts['dist']
if reverse:
- startrev, endrev = endrev, startrev
+ beginrev, endrev = endrev, beginrev
dist = -1 * dist
- for x in xrange(startrev, endrev, dist):
- r.revision(r.node(x))
+ for x in xrange(beginrev, endrev, dist):
+ # Old revisions don't support passing int.
+ n = rl.node(x)
+ rl.revision(n)
+ timer, fm = gettimer(ui, opts)
timer(d)
fm.end()
@@ -885,10 +1051,16 @@
This command measures the time it takes to read+decompress and recompress
chunks in a revlog. It effectively isolates I/O and compression performance.
For measurements of higher-level operations like resolving revisions,
- see ``perfrevlog`` and ``perfrevlogrevision``.
+ see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
"""
rl = cmdutil.openrevlog(repo, 'perfrevlogchunks', file_, opts)
+ # _chunkraw was renamed to _getsegmentforrevs.
+ try:
+ segmentforrevs = rl._getsegmentforrevs
+ except AttributeError:
+ segmentforrevs = rl._chunkraw
+
# Verify engines argument.
if engines:
engines = set(e.strip() for e in engines.split(','))
@@ -919,22 +1091,22 @@
def doread():
rl.clearcaches()
for rev in revs:
- rl._chunkraw(rev, rev)
+ segmentforrevs(rev, rev)
def doreadcachedfh():
rl.clearcaches()
fh = rlfh(rl)
for rev in revs:
- rl._chunkraw(rev, rev, df=fh)
+ segmentforrevs(rev, rev, df=fh)
def doreadbatch():
rl.clearcaches()
- rl._chunkraw(revs[0], revs[-1])
+ segmentforrevs(revs[0], revs[-1])
def doreadbatchcachedfh():
rl.clearcaches()
fh = rlfh(rl)
- rl._chunkraw(revs[0], revs[-1], df=fh)
+ segmentforrevs(revs[0], revs[-1], df=fh)
def dochunk():
rl.clearcaches()
@@ -1003,6 +1175,13 @@
raise error.CommandError('perfrevlogrevision', 'invalid arguments')
r = cmdutil.openrevlog(repo, 'perfrevlogrevision', file_, opts)
+
+ # _chunkraw was renamed to _getsegmentforrevs.
+ try:
+ segmentforrevs = r._getsegmentforrevs
+ except AttributeError:
+ segmentforrevs = r._chunkraw
+
node = r.lookup(rev)
rev = r.rev(node)
@@ -1034,7 +1213,7 @@
def doread(chain):
if not cache:
r.clearcaches()
- r._chunkraw(chain[0], chain[-1])
+ segmentforrevs(chain[0], chain[-1])
def dorawchunks(data, chain):
if not cache:
@@ -1062,7 +1241,7 @@
r.revision(node)
chain = r._deltachain(rev)[0]
- data = r._chunkraw(chain[0], chain[-1])[1]
+ data = segmentforrevs(chain[0], chain[-1])[1]
rawchunks = getrawchunks(data, chain)
bins = r._chunks(chain)
text = str(bins[0])
@@ -1105,7 +1284,9 @@
timer(d)
fm.end()
-@command('perfvolatilesets', formatteropts)
+@command('perfvolatilesets',
+ [('', 'clear-obsstore', False, 'drop obsstore between each call.'),
+ ] + formatteropts)
def perfvolatilesets(ui, repo, *names, **opts):
"""benchmark the computation of various volatile set
@@ -1116,6 +1297,8 @@
def getobs(name):
def d():
repo.invalidatevolatilesets()
+ if opts['clear_obsstore']:
+ clearfilecache(repo, 'obsstore')
obsolete.getrevs(repo, name)
return d
@@ -1129,6 +1312,8 @@
def getfiltered(name):
def d():
repo.invalidatevolatilesets()
+ if opts['clear_obsstore']:
+ clearfilecache(repo, 'obsstore')
repoview.filterrevs(repo, name)
return d
@@ -1143,8 +1328,10 @@
@command('perfbranchmap',
[('f', 'full', False,
'Includes build time of subset'),
+ ('', 'clear-revbranch', False,
+ 'purge the revbranch cache between computation'),
] + formatteropts)
-def perfbranchmap(ui, repo, full=False, **opts):
+def perfbranchmap(ui, repo, full=False, clear_revbranch=False, **opts):
"""benchmark the update of a branchmap
This benchmarks the full repo.branchmap() call with read and write disabled
@@ -1157,6 +1344,8 @@
else:
view = repo.filtered(filtername)
def d():
+ if clear_revbranch:
+ repo.revbranchcache()._clear()
if full:
view._branchcaches.clear()
else:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/phabricator.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,580 @@
+# phabricator.py - simple Phabricator integration
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""simple Phabricator integration
+
+This extension provides a ``phabsend`` command which sends a stack of
+changesets to Phabricator without amending commit messages, and a ``phabread``
+command which prints a stack of revisions in a format suitable
+for :hg:`import`.
+
+By default, Phabricator requires ``Test Plan`` which might prevent some
+changeset from being sent. The requirement could be disabled by changing
+``differential.require-test-plan-field`` config server side.
+
+Config::
+
+ [phabricator]
+ # Phabricator URL
+ url = https://phab.example.com/
+
+ # API token. Get it from https://$HOST/conduit/login/
+ token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
+
+ # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
+ # callsign is "FOO".
+ callsign = FOO
+
+"""
+
+from __future__ import absolute_import
+
+import json
+import re
+
+from mercurial.node import bin, nullid
+from mercurial.i18n import _
+from mercurial import (
+ encoding,
+ error,
+ mdiff,
+ obsolete,
+ patch,
+ registrar,
+ scmutil,
+ tags,
+ url as urlmod,
+ util,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+def urlencodenested(params):
+ """like urlencode, but works with nested parameters.
+
+ For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
+ flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
+ urlencode. Note: the encoding is consistent with PHP's http_build_query.
+ """
+ flatparams = util.sortdict()
+ def process(prefix, obj):
+ items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
+ if items is None:
+ flatparams[prefix] = obj
+ else:
+ for k, v in items(obj):
+ if prefix:
+ process('%s[%s]' % (prefix, k), v)
+ else:
+ process(k, v)
+ process('', params)
+ return util.urlreq.urlencode(flatparams)
+
+def readurltoken(repo):
+ """return conduit url, token and make sure they exist
+
+ Currently read from [phabricator] config section. In the future, it might
+ make sense to read from .arcconfig and .arcrc as well.
+ """
+ values = []
+ section = 'phabricator'
+ for name in ['url', 'token']:
+ value = repo.ui.config(section, name)
+ if not value:
+ raise error.Abort(_('config %s.%s is required') % (section, name))
+ values.append(value)
+ return values
+
+def callconduit(repo, name, params):
+ """call Conduit API, params is a dict. return json.loads result, or None"""
+ host, token = readurltoken(repo)
+ url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
+ urlopener = urlmod.opener(repo.ui, authinfo)
+ repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
+ params = params.copy()
+ params['api.token'] = token
+ request = util.urlreq.request(url, data=urlencodenested(params))
+ body = urlopener.open(request).read()
+ repo.ui.debug('Conduit Response: %s\n' % body)
+ parsed = json.loads(body)
+ if parsed.get(r'error_code'):
+ msg = (_('Conduit Error (%s): %s')
+ % (parsed[r'error_code'], parsed[r'error_info']))
+ raise error.Abort(msg)
+ return parsed[r'result']
+
+@command('debugcallconduit', [], _('METHOD'))
+def debugcallconduit(ui, repo, name):
+ """call Conduit API
+
+ Call parameters are read from stdin as a JSON blob. Result will be written
+ to stdout as a JSON blob.
+ """
+ params = json.loads(ui.fin.read())
+ result = callconduit(repo, name, params)
+ s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
+ ui.write('%s\n' % s)
+
+def getrepophid(repo):
+ """given callsign, return repository PHID or None"""
+ # developer config: phabricator.repophid
+ repophid = repo.ui.config('phabricator', 'repophid')
+ if repophid:
+ return repophid
+ callsign = repo.ui.config('phabricator', 'callsign')
+ if not callsign:
+ return None
+ query = callconduit(repo, 'diffusion.repository.search',
+ {'constraints': {'callsigns': [callsign]}})
+ if len(query[r'data']) == 0:
+ return None
+ repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
+ repo.ui.setconfig('phabricator', 'repophid', repophid)
+ return repophid
+
+_differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
+_differentialrevisiondescre = re.compile(
+ '^Differential Revision:\s*(.*)D([1-9][0-9]*)$', re.M)
+
+def getoldnodedrevmap(repo, nodelist):
+ """find previous nodes that has been sent to Phabricator
+
+ return {node: (oldnode or None, Differential Revision ID)}
+ for node in nodelist with known previous sent versions, or associated
+ Differential Revision IDs.
+
+ Examines all precursors and their tags. Tags with format like "D1234" are
+ considered a match and the node with that tag, and the number after "D"
+ (ex. 1234) will be returned.
+
+ If tags are not found, examine commit message. The "Differential Revision:"
+ line could associate this changeset to a Differential Revision.
+ """
+ url, token = readurltoken(repo)
+ unfi = repo.unfiltered()
+ nodemap = unfi.changelog.nodemap
+
+ result = {} # {node: (oldnode or None, drev)}
+ toconfirm = {} # {node: (oldnode, {precnode}, drev)}
+ for node in nodelist:
+ ctx = unfi[node]
+ # For tags like "D123", put them into "toconfirm" to verify later
+ precnodes = list(obsolete.allprecursors(unfi.obsstore, [node]))
+ for n in precnodes:
+ if n in nodemap:
+ for tag in unfi.nodetags(n):
+ m = _differentialrevisiontagre.match(tag)
+ if m:
+ toconfirm[node] = (n, set(precnodes), int(m.group(1)))
+ continue
+
+ # Check commit message (make sure URL matches)
+ m = _differentialrevisiondescre.search(ctx.description())
+ if m:
+ if m.group(1).rstrip('/') == url.rstrip('/'):
+ result[node] = (None, int(m.group(2)))
+ else:
+ unfi.ui.warn(_('%s: Differential Revision URL ignored - host '
+ 'does not match config\n') % ctx)
+
+ # Double check if tags are genuine by collecting all old nodes from
+ # Phabricator, and expect precursors overlap with it.
+ if toconfirm:
+ confirmed = {} # {drev: {oldnode}}
+ drevs = [drev for n, precs, drev in toconfirm.values()]
+ diffs = callconduit(unfi, 'differential.querydiffs',
+ {'revisionIDs': drevs})
+ for diff in diffs.values():
+ drev = int(diff[r'revisionID'])
+ oldnode = bin(encoding.unitolocal(getdiffmeta(diff).get(r'node')))
+ if node:
+ confirmed.setdefault(drev, set()).add(oldnode)
+ for newnode, (oldnode, precset, drev) in toconfirm.items():
+ if bool(precset & confirmed.get(drev, set())):
+ result[newnode] = (oldnode, drev)
+ else:
+ tagname = 'D%d' % drev
+ tags.tag(repo, tagname, nullid, message=None, user=None,
+ date=None, local=True)
+ unfi.ui.warn(_('D%s: local tag removed - does not match '
+ 'Differential history\n') % drev)
+
+ return result
+
+def getdiff(ctx, diffopts):
+ """plain-text diff without header (user, commit message, etc)"""
+ output = util.stringio()
+ for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
+ None, opts=diffopts):
+ output.write(chunk)
+ return output.getvalue()
+
+def creatediff(ctx):
+ """create a Differential Diff"""
+ repo = ctx.repo()
+ repophid = getrepophid(repo)
+ # Create a "Differential Diff" via "differential.createrawdiff" API
+ params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
+ if repophid:
+ params['repositoryPHID'] = repophid
+ diff = callconduit(repo, 'differential.createrawdiff', params)
+ if not diff:
+ raise error.Abort(_('cannot create diff for %s') % ctx)
+ return diff
+
+def writediffproperties(ctx, diff):
+ """write metadata to diff so patches could be applied losslessly"""
+ params = {
+ 'diff_id': diff[r'id'],
+ 'name': 'hg:meta',
+ 'data': json.dumps({
+ 'user': ctx.user(),
+ 'date': '%d %d' % ctx.date(),
+ 'node': ctx.hex(),
+ 'parent': ctx.p1().hex(),
+ }),
+ }
+ callconduit(ctx.repo(), 'differential.setdiffproperty', params)
+
+def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
+ actions=None):
+ """create or update a Differential Revision
+
+ If revid is None, create a new Differential Revision, otherwise update
+ revid. If parentrevid is not None, set it as a dependency.
+
+ If oldnode is not None, check if the patch content (without commit message
+ and metadata) has changed before creating another diff.
+
+ If actions is not None, they will be appended to the transaction.
+ """
+ repo = ctx.repo()
+ if oldnode:
+ diffopts = mdiff.diffopts(git=True, context=1)
+ oldctx = repo.unfiltered()[oldnode]
+ neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
+ else:
+ neednewdiff = True
+
+ transactions = []
+ if neednewdiff:
+ diff = creatediff(ctx)
+ writediffproperties(ctx, diff)
+ transactions.append({'type': 'update', 'value': diff[r'phid']})
+
+ # Use a temporary summary to set dependency. There might be better ways but
+ # I cannot find them for now. But do not do that if we are updating an
+ # existing revision (revid is not None) since that introduces visible
+ # churns (someone edited "Summary" twice) on the web page.
+ if parentrevid and revid is None:
+ summary = 'Depends on D%s' % parentrevid
+ transactions += [{'type': 'summary', 'value': summary},
+ {'type': 'summary', 'value': ' '}]
+
+ if actions:
+ transactions += actions
+
+ # Parse commit message and update related fields.
+ desc = ctx.description()
+ info = callconduit(repo, 'differential.parsecommitmessage',
+ {'corpus': desc})
+ for k, v in info[r'fields'].items():
+ if k in ['title', 'summary', 'testPlan']:
+ transactions.append({'type': k, 'value': v})
+
+ params = {'transactions': transactions}
+ if revid is not None:
+ # Update an existing Differential Revision
+ params['objectIdentifier'] = revid
+
+ revision = callconduit(repo, 'differential.revision.edit', params)
+ if not revision:
+ raise error.Abort(_('cannot create revision for %s') % ctx)
+
+ return revision
+
+def userphids(repo, names):
+ """convert user names to PHIDs"""
+ query = {'constraints': {'usernames': names}}
+ result = callconduit(repo, 'user.search', query)
+ # username not found is not an error of the API. So check if we have missed
+ # some names here.
+ data = result[r'data']
+ resolved = set(entry[r'fields'][r'username'] for entry in data)
+ unresolved = set(names) - resolved
+ if unresolved:
+ raise error.Abort(_('unknown username: %s')
+ % ' '.join(sorted(unresolved)))
+ return [entry[r'phid'] for entry in data]
+
+@command('phabsend',
+ [('r', 'rev', [], _('revisions to send'), _('REV')),
+ ('', 'reviewer', [], _('specify reviewers'))],
+ _('REV [OPTIONS]'))
+def phabsend(ui, repo, *revs, **opts):
+ """upload changesets to Phabricator
+
+ If there are multiple revisions specified, they will be send as a stack
+ with a linear dependencies relationship using the order specified by the
+ revset.
+
+ For the first time uploading changesets, local tags will be created to
+ maintain the association. After the first time, phabsend will check
+ obsstore and tags information so it can figure out whether to update an
+ existing Differential Revision, or create a new one.
+ """
+ revs = list(revs) + opts.get('rev', [])
+ revs = scmutil.revrange(repo, revs)
+
+ if not revs:
+ raise error.Abort(_('phabsend requires at least one changeset'))
+
+ actions = []
+ reviewers = opts.get('reviewer', [])
+ if reviewers:
+ phids = userphids(repo, reviewers)
+ actions.append({'type': 'reviewers.add', 'value': phids})
+
+ oldnodedrev = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
+
+ # Send patches one by one so we know their Differential Revision IDs and
+ # can provide dependency relationship
+ lastrevid = None
+ for rev in revs:
+ ui.debug('sending rev %d\n' % rev)
+ ctx = repo[rev]
+
+ # Get Differential Revision ID
+ oldnode, revid = oldnodedrev.get(ctx.node(), (None, None))
+ if oldnode != ctx.node():
+ # Create or update Differential Revision
+ revision = createdifferentialrevision(ctx, revid, lastrevid,
+ oldnode, actions)
+ newrevid = int(revision[r'object'][r'id'])
+ if revid:
+ action = _('updated')
+ else:
+ action = _('created')
+
+ # Create a local tag to note the association
+ tagname = 'D%d' % newrevid
+ tags.tag(repo, tagname, ctx.node(), message=None, user=None,
+ date=None, local=True)
+ else:
+ # Nothing changed. But still set "newrevid" so the next revision
+ # could depend on this one.
+ newrevid = revid
+ action = _('skipped')
+
+ ui.write(_('D%s: %s - %s: %s\n') % (newrevid, action, ctx,
+ ctx.description().split('\n')[0]))
+ lastrevid = newrevid
+
+# Map from "hg:meta" keys to header understood by "hg import". The order is
+# consistent with "hg export" output.
+_metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
+ (r'node', 'Node ID'), (r'parent', 'Parent ')])
+
+def querydrev(repo, params, stack=False):
+ """return a list of "Differential Revision" dicts
+
+ params is the input of "differential.query" API, and is expected to match
+ just a single Differential Revision.
+
+ A "Differential Revision dict" looks like:
+
+ {
+ "id": "2",
+ "phid": "PHID-DREV-672qvysjcczopag46qty",
+ "title": "example",
+ "uri": "https://phab.example.com/D2",
+ "dateCreated": "1499181406",
+ "dateModified": "1499182103",
+ "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
+ "status": "0",
+ "statusName": "Needs Review",
+ "properties": [],
+ "branch": null,
+ "summary": "",
+ "testPlan": "",
+ "lineCount": "2",
+ "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
+ "diffs": [
+ "3",
+ "4",
+ ],
+ "commits": [],
+ "reviewers": [],
+ "ccs": [],
+ "hashes": [],
+ "auxiliary": {
+ "phabricator:projects": [],
+ "phabricator:depends-on": [
+ "PHID-DREV-gbapp366kutjebt7agcd"
+ ]
+ },
+ "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
+ "sourcePath": null
+ }
+
+ If stack is True, return a list of "Differential Revision dict"s in an
+ order that the latter ones depend on the former ones. Otherwise, return a
+ list of a unique "Differential Revision dict".
+ """
+ prefetched = {} # {id or phid: drev}
+ def fetch(params):
+ """params -> single drev or None"""
+ key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
+ if key in prefetched:
+ return prefetched[key]
+ # Otherwise, send the request. If we're fetching a stack, be smarter
+ # and fetch more ids in one batch, even if it could be unnecessary.
+ batchparams = params
+ if stack and len(params.get(r'ids', [])) == 1:
+ i = int(params[r'ids'][0])
+ # developer config: phabricator.batchsize
+ batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
+ batchparams = {'ids': range(max(1, i - batchsize), i + 1)}
+ drevs = callconduit(repo, 'differential.query', batchparams)
+ # Fill prefetched with the result
+ for drev in drevs:
+ prefetched[drev[r'phid']] = drev
+ prefetched[int(drev[r'id'])] = drev
+ if key not in prefetched:
+ raise error.Abort(_('cannot get Differential Revision %r') % params)
+ return prefetched[key]
+
+ visited = set()
+ result = []
+ queue = [params]
+ while queue:
+ params = queue.pop()
+ drev = fetch(params)
+ if drev[r'id'] in visited:
+ continue
+ visited.add(drev[r'id'])
+ result.append(drev)
+ if stack:
+ auxiliary = drev.get(r'auxiliary', {})
+ depends = auxiliary.get(r'phabricator:depends-on', [])
+ for phid in depends:
+ queue.append({'phids': [phid]})
+ result.reverse()
+ return result
+
+def getdescfromdrev(drev):
+ """get description (commit message) from "Differential Revision"
+
+ This is similar to differential.getcommitmessage API. But we only care
+ about limited fields: title, summary, test plan, and URL.
+ """
+ title = drev[r'title']
+ summary = drev[r'summary'].rstrip()
+ testplan = drev[r'testPlan'].rstrip()
+ if testplan:
+ testplan = 'Test Plan:\n%s' % testplan
+ uri = 'Differential Revision: %s' % drev[r'uri']
+ return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
+
+def getdiffmeta(diff):
+ """get commit metadata (date, node, user, p1) from a diff object
+
+ The metadata could be "hg:meta", sent by phabsend, like:
+
+ "properties": {
+ "hg:meta": {
+ "date": "1499571514 25200",
+ "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
+ "user": "Foo Bar <foo@example.com>",
+ "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
+ }
+ }
+
+ Or converted from "local:commits", sent by "arc", like:
+
+ "properties": {
+ "local:commits": {
+ "98c08acae292b2faf60a279b4189beb6cff1414d": {
+ "author": "Foo Bar",
+ "time": 1499546314,
+ "branch": "default",
+ "tag": "",
+ "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
+ "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
+ "local": "1000",
+ "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
+ "summary": "...",
+ "message": "...",
+ "authorEmail": "foo@example.com"
+ }
+ }
+ }
+
+ Note: metadata extracted from "local:commits" will lose time zone
+ information.
+ """
+ props = diff.get(r'properties') or {}
+ meta = props.get(r'hg:meta')
+ if not meta and props.get(r'local:commits'):
+ commit = sorted(props[r'local:commits'].values())[0]
+ meta = {
+ r'date': r'%d 0' % commit[r'time'],
+ r'node': commit[r'rev'],
+ r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
+ }
+ if len(commit.get(r'parents', ())) >= 1:
+ meta[r'parent'] = commit[r'parents'][0]
+ return meta or {}
+
+def readpatch(repo, params, write, stack=False):
+ """generate plain-text patch readable by 'hg import'
+
+ write is usually ui.write. params is passed to "differential.query". If
+ stack is True, also write dependent patches.
+ """
+ # Differential Revisions
+ drevs = querydrev(repo, params, stack)
+
+ # Prefetch hg:meta property for all diffs
+ diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
+ diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
+
+ # Generate patch for each drev
+ for drev in drevs:
+ repo.ui.note(_('reading D%s\n') % drev[r'id'])
+
+ diffid = max(int(v) for v in drev[r'diffs'])
+ body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
+ desc = getdescfromdrev(drev)
+ header = '# HG changeset patch\n'
+
+ # Try to preserve metadata from hg:meta property. Write hg patch
+ # headers that can be read by the "import" command. See patchheadermap
+ # and extract in mercurial/patch.py for supported headers.
+ meta = getdiffmeta(diffs[str(diffid)])
+ for k in _metanamemap.keys():
+ if k in meta:
+ header += '# %s %s\n' % (_metanamemap[k], meta[k])
+
+ write(('%s%s\n%s') % (header, desc, body))
+
+@command('phabread',
+ [('', 'stack', False, _('read dependencies'))],
+ _('REVID [OPTIONS]'))
+def phabread(ui, repo, revid, **opts):
+ """print patches from Phabricator suitable for importing
+
+ REVID could be a Differential Revision identity, like ``D123``, or just the
+ number ``123``, or a full URL like ``https://phab.example.com/D123``.
+
+ If --stack is given, follow dependencies information and read all patches.
+ """
+ try:
+ revid = int(revid.split('/')[-1].replace('D', ''))
+ except ValueError:
+ raise error.Abort(_('invalid Revision ID: %s') % revid)
+ readpatch(repo, {'ids': [revid]}, ui.write, opts.get('stack'))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/python3-ratchet.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,116 @@
+# Copyright 2012 Facebook
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""Find tests that newly pass under Python 3.
+
+The approach is simple: we maintain a whitelist of Python 3 passing
+tests in the repository, and periodically run all the /other/ tests
+and look for new passes. Any newly passing tests get automatically
+added to the whitelist.
+
+You probably want to run it like this:
+
+ $ cd tests
+ $ python3 ../contrib/python3-ratchet.py \
+ > --working-tests=../contrib/python3-whitelist
+"""
+from __future__ import print_function
+from __future__ import absolute_import
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+
+_hgenv = dict(os.environ)
+_hgenv.update({
+ 'HGPLAIN': '1',
+ })
+
+_HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
+
+def _runhg(*args):
+ return subprocess.check_output(args, env=_hgenv)
+
+def _is_hg_repo(path):
+ return _runhg('hg', 'log', '-R', path,
+ '-r0', '--template={node}').strip() == _HG_FIRST_CHANGE
+
+def _py3default():
+ if sys.version_info[0] >= 3:
+ return sys.executable
+ return 'python3'
+
+def main(argv=()):
+ p = argparse.ArgumentParser()
+ p.add_argument('--working-tests',
+ help='List of tests that already work in Python 3.')
+ p.add_argument('--commit-to-repo',
+ help='If set, commit newly fixed tests to the given repo')
+ p.add_argument('-j', default=os.sysconf(r'SC_NPROCESSORS_ONLN'), type=int,
+ help='Number of parallel tests to run.')
+ p.add_argument('--python3', default=_py3default(),
+ help='python3 interpreter to use for test run')
+ p.add_argument('--commit-user',
+ default='python3-ratchet@mercurial-scm.org',
+ help='Username to specify when committing to a repo.')
+ opts = p.parse_args(argv)
+ if opts.commit_to_repo:
+ if not _is_hg_repo(opts.commit_to_repo):
+ print('abort: specified repository is not the hg repository')
+ sys.exit(1)
+ if not opts.working_tests or not os.path.isfile(opts.working_tests):
+ print('abort: --working-tests must exist and be a file (got %r)' %
+ opts.working_tests)
+ sys.exit(1)
+ elif opts.commit_to_repo:
+ root = _runhg('hg', 'root').strip()
+ if not opts.working_tests.startswith(root):
+ print('abort: if --commit-to-repo is given, '
+ '--working-tests must be from that repo')
+ sys.exit(1)
+ try:
+ subprocess.check_call([opts.python3, '-c',
+ 'import sys ; '
+ 'assert ((3, 5) <= sys.version_info < (3, 6) '
+ 'or sys.version_info >= (3, 6, 2))'])
+ except subprocess.CalledProcessError:
+ print('warning: Python 3.6.0 and 3.6.1 have '
+ 'a bug which breaks Mercurial')
+ print('(see https://bugs.python.org/issue29714 for details)')
+ # TODO(augie): uncomment exit when Python 3.6.2 is available
+ # sys.exit(1)
+
+ rt = subprocess.Popen([opts.python3, 'run-tests.py', '-j', str(opts.j),
+ '--blacklist', opts.working_tests, '--json'])
+ rt.wait()
+ with open('report.json') as f:
+ data = f.read()
+ report = json.loads(data.split('=', 1)[1])
+ newpass = set()
+ for test, result in report.items():
+ if result['result'] != 'success':
+ continue
+ # A new passing test! Huzzah!
+ newpass.add(test)
+ if newpass:
+ # We already validated the repo, so we can just dive right in
+ # and commit.
+ if opts.commit_to_repo:
+ print(len(newpass), 'new passing tests on Python 3!')
+ with open(opts.working_tests) as f:
+ oldpass = {l for l in f.read().splitlines() if l}
+ with open(opts.working_tests, 'w') as f:
+ for p in sorted(oldpass | newpass):
+ f.write('%s\n' % p)
+ _runhg('hg', 'commit', '-R', opts.commit_to_repo,
+ '--user', opts.commit_user,
+ '--message', 'python3: expand list of passing tests')
+ else:
+ print('Newly passing tests:', '\n'.join(sorted(newpass)))
+ sys.exit(2)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/python3-whitelist Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,34 @@
+test-ancestor.py
+test-backwards-remove.t
+test-branch-tag-confict.t
+test-casecollision.t
+test-check-commit.t
+test-check-execute.t
+test-check-pyflakes.t
+test-check-pylint.t
+test-check-shbang.t
+test-contrib-check-code.t
+test-contrib-check-commit.t
+test-diff-issue2761.t
+test-diff-newlines.t
+test-diff-reverse.t
+test-diff-subdir.t
+test-dirstate-nonnormalset.t
+test-doctest.py
+test-empty-dir.t
+test-excessive-merge.t
+test-issue1089.t
+test-issue1993.t
+test-issue842.t
+test-locate.t
+test-lrucachedict.py
+test-manifest.py
+test-merge-default.t
+test-merge-subrepos.t
+test-merge2.t
+test-merge5.t
+test-revlog-packentry.t
+test-run-tests.py
+test-unified-test.t
+test-update-reverse.t
+test-xdg.t
--- a/contrib/synthrepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/synthrepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -53,11 +53,11 @@
short,
)
from mercurial import (
- cmdutil,
context,
error,
hg,
patch,
+ registrar,
scmutil,
util,
)
@@ -69,9 +69,9 @@
testedwith = 'ships-with-hg-core'
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
-newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
+newfile = {'new fi', 'rename', 'copy f', 'copy t'}
def zerodict():
return collections.defaultdict(lambda: 0)
@@ -336,7 +336,7 @@
wlock = repo.wlock()
lock = repo.lock()
- nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
+ nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
progress = ui.progress
_synthesizing = _('synthesizing')
--- a/contrib/wix/dist.wxs Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/wix/dist.wxs Wed Jul 19 07:51:41 2017 -0500
@@ -12,12 +12,12 @@
<Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib">
<Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'>
<File Name="library.zip" KeyPath="yes" />
- <File Name="mercurial.base85.pyd" />
- <File Name="mercurial.bdiff.pyd" />
- <File Name="mercurial.diffhelpers.pyd" />
- <File Name="mercurial.mpatch.pyd" />
- <File Name="mercurial.osutil.pyd" />
- <File Name="mercurial.parsers.pyd" />
+ <File Name="mercurial.cext.base85.pyd" />
+ <File Name="mercurial.cext.bdiff.pyd" />
+ <File Name="mercurial.cext.diffhelpers.pyd" />
+ <File Name="mercurial.cext.mpatch.pyd" />
+ <File Name="mercurial.cext.osutil.pyd" />
+ <File Name="mercurial.cext.parsers.pyd" />
<File Name="mercurial.zstd.pyd" />
<File Name="pyexpat.pyd" />
<File Name="bz2.pyd" />
--- a/contrib/zsh_completion Wed Jul 05 11:24:22 2017 -0400
+++ b/contrib/zsh_completion Wed Jul 19 07:51:41 2017 -0500
@@ -672,6 +672,7 @@
'(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \
'--bypass[apply patch without touching the working directory]' \
'--no-commit[do not commit, just update the working directory]' \
+ '--partial[commit even if some hunks fail]' \
'--exact[apply patch to the nodes from which it was generated]' \
'--import-branch[use any branch information in patch (implied by --exact)]' \
'(--date -d)'{-d+,--date=}'[record datecode as commit date]:date code:' \
--- a/doc/check-seclevel.py Wed Jul 05 11:24:22 2017 -0400
+++ b/doc/check-seclevel.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,11 +23,11 @@
table = commands.table
helptable = help.helptable
-level2mark = ['"', '=', '-', '.', '#']
-reservedmarks = ['"']
+level2mark = [b'"', b'=', b'-', b'.', b'#']
+reservedmarks = [b'"']
mark2level = {}
-for m, l in zip(level2mark, xrange(len(level2mark))):
+for m, l in zip(level2mark, range(len(level2mark))):
if m not in reservedmarks:
mark2level[m] = l
@@ -37,22 +37,25 @@
initlevel_ext_cmd = 3
def showavailables(ui, initlevel):
- ui.warn((' available marks and order of them in this help: %s\n') %
- (', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]])))
+ avail = (' available marks and order of them in this help: %s\n') % (
+ ', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]]))
+ ui.warn(avail.encode('utf-8'))
def checkseclevel(ui, doc, name, initlevel):
ui.note(('checking "%s"\n') % name)
+ if not isinstance(doc, bytes):
+ doc = doc.encode('utf-8')
blocks, pruned = minirst.parse(doc, 0, ['verbose'])
errorcnt = 0
curlevel = initlevel
for block in blocks:
- if block['type'] != 'section':
+ if block[b'type'] != b'section':
continue
- mark = block['underline']
- title = block['lines'][0]
+ mark = block[b'underline']
+ title = block[b'lines'][0]
if (mark not in mark2level) or (mark2level[mark] <= initlevel):
- ui.warn(('invalid section mark %r for "%s" of %s\n') %
- (mark * 4, title, name))
+ ui.warn((('invalid section mark %r for "%s" of %s\n') %
+ (mark * 4, title, name)).encode('utf-8'))
showavailables(ui, initlevel)
errorcnt += 1
continue
@@ -72,7 +75,7 @@
def checkcmdtable(ui, cmdtable, namefmt, initlevel):
errorcnt = 0
for k, entry in cmdtable.items():
- name = k.split("|")[0].lstrip("^")
+ name = k.split(b"|")[0].lstrip(b"^")
if not entry[0].__doc__:
ui.note(('skip checking %s: no help document\n') %
(namefmt % name))
@@ -93,8 +96,8 @@
errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd)
- for name in sorted(extensions.enabled().keys() +
- extensions.disabled().keys()):
+ for name in sorted(list(extensions.enabled()) +
+ list(extensions.disabled())):
mod = extensions.load(ui, name, None)
if not mod.__doc__:
ui.note(('skip checking %s extension: no help document\n') % name)
@@ -106,7 +109,7 @@
cmdtable = getattr(mod, 'cmdtable', None)
if cmdtable:
errorcnt += checkcmdtable(ui, cmdtable,
- '%s command of ' + name + ' extension',
+ '%%s command of %s extension' % name,
initlevel_ext_cmd)
return errorcnt
--- a/doc/gendoc.py Wed Jul 05 11:24:22 2017 -0400
+++ b/doc/gendoc.py Wed Jul 19 07:51:41 2017 -0500
@@ -16,6 +16,10 @@
# import from the live mercurial repo
sys.path.insert(0, "..")
from mercurial import demandimport; demandimport.enable()
+# Load util so that the locale path is set by i18n.setdatapath() before
+# calling _().
+from mercurial import util
+util.datapath
from mercurial import (
commands,
extensions,
--- a/hg Wed Jul 05 11:24:22 2017 -0400
+++ b/hg Wed Jul 19 07:51:41 2017 -0500
@@ -28,8 +28,8 @@
# enable importing on demand to reduce startup time
try:
- if sys.version_info[0] < 3:
- from mercurial import demandimport; demandimport.enable()
+ if sys.version_info[0] < 3 or sys.version_info >= (3, 6):
+ import hgdemandimport; hgdemandimport.enable()
except ImportError:
sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
' '.join(sys.path))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,73 @@
+# hgdemandimport - global demand-loading of modules for Mercurial
+#
+# Copyright 2017 Facebook Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''demandimport - automatic demand-loading of modules'''
+
+# This is in a separate package from mercurial because in Python 3,
+# demand loading is per-package. Keeping demandimport in the mercurial package
+# would disable demand loading for any modules in mercurial.
+
+from __future__ import absolute_import
+
+import sys
+
+if sys.version_info[0] >= 3:
+ from . import demandimportpy3 as demandimport
+else:
+ from . import demandimportpy2 as demandimport
+
+# Extensions can add to this list if necessary.
+ignore = [
+ '__future__',
+ '_hashlib',
+ # ImportError during pkg_resources/__init__.py:fixup_namespace_package
+ '_imp',
+ '_xmlplus',
+ 'fcntl',
+ 'nt', # pathlib2 tests the existence of built-in 'nt' module
+ 'win32com.gen_py',
+ 'win32com.shell', # 'appdirs' tries to import win32com.shell
+ '_winreg', # 2.7 mimetypes needs immediate ImportError
+ 'pythoncom',
+ # imported by tarfile, not available under Windows
+ 'pwd',
+ 'grp',
+ # imported by profile, itself imported by hotshot.stats,
+ # not available under Windows
+ 'resource',
+ # this trips up many extension authors
+ 'gtk',
+ # setuptools' pkg_resources.py expects "from __main__ import x" to
+ # raise ImportError if x not defined
+ '__main__',
+ '_ssl', # conditional imports in the stdlib, issue1964
+ '_sre', # issue4920
+ 'rfc822',
+ 'mimetools',
+ 'sqlalchemy.events', # has import-time side effects (issue5085)
+ # setuptools 8 expects this module to explode early when not on windows
+ 'distutils.msvc9compiler',
+ '__builtin__',
+ 'builtins',
+ 'urwid.command_map', # for pudb
+ ]
+
+_pypy = '__pypy__' in sys.builtin_module_names
+
+if _pypy:
+ ignore.extend([
+ # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
+ '_ctypes.pointer',
+ ])
+
+demandimport.init(ignore)
+
+# Re-export.
+isenabled = demandimport.isenabled
+enable = demandimport.enable
+disable = demandimport.disable
+deactivated = demandimport.deactivated
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/demandimportpy2.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,306 @@
+# demandimport.py - global demand-loading of modules for Mercurial
+#
+# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''
+demandimport - automatic demandloading of modules
+
+To enable this module, do:
+
+ import demandimport; demandimport.enable()
+
+Imports of the following forms will be demand-loaded:
+
+ import a, b.c
+ import a.b as c
+ from a import b,c # a will be loaded immediately
+
+These imports will not be delayed:
+
+ from a import *
+ b = __import__(a)
+'''
+
+from __future__ import absolute_import
+
+import __builtin__ as builtins
+import contextlib
+import os
+import sys
+
+contextmanager = contextlib.contextmanager
+
+_origimport = __import__
+
+nothing = object()
+
+def _hgextimport(importfunc, name, globals, *args, **kwargs):
+ try:
+ return importfunc(name, globals, *args, **kwargs)
+ except ImportError:
+ if not globals:
+ raise
+ # extensions are loaded with "hgext_" prefix
+ hgextname = 'hgext_%s' % name
+ nameroot = hgextname.split('.', 1)[0]
+ contextroot = globals.get('__name__', '').split('.', 1)[0]
+ if nameroot != contextroot:
+ raise
+ # retry to import with "hgext_" prefix
+ return importfunc(hgextname, globals, *args, **kwargs)
+
+class _demandmod(object):
+ """module demand-loader and proxy
+
+ Specify 1 as 'level' argument at construction, to import module
+ relatively.
+ """
+
+ def __init__(self, name, globals, locals, level):
+ if '.' in name:
+ head, rest = name.split('.', 1)
+ after = [rest]
+ else:
+ head = name
+ after = []
+ object.__setattr__(self, r"_data",
+ (head, globals, locals, after, level, set()))
+ object.__setattr__(self, r"_module", None)
+
+ def _extend(self, name):
+ """add to the list of submodules to load"""
+ self._data[3].append(name)
+
+ def _addref(self, name):
+ """Record that the named module ``name`` imports this module.
+
+ References to this proxy class having the name of this module will be
+ replaced at module load time. We assume the symbol inside the importing
+ module is identical to the "head" name of this module. We don't
+ actually know if "as X" syntax is being used to change the symbol name
+ because this information isn't exposed to __import__.
+ """
+ self._data[5].add(name)
+
+ def _load(self):
+ if not self._module:
+ head, globals, locals, after, level, modrefs = self._data
+ mod = _hgextimport(_origimport, head, globals, locals, None, level)
+ if mod is self:
+ # In this case, _hgextimport() above should imply
+ # _demandimport(). Otherwise, _hgextimport() never
+ # returns _demandmod. This isn't intentional behavior,
+ # in fact. (see also issue5304 for detail)
+ #
+ # If self._module is already bound at this point, self
+ # should be already _load()-ed while _hgextimport().
+ # Otherwise, there is no way to import actual module
+ # as expected, because (re-)invoking _hgextimport()
+ # should cause same result.
+ # This is reason why _load() returns without any more
+ # setup but assumes self to be already bound.
+ mod = self._module
+ assert mod and mod is not self, "%s, %s" % (self, mod)
+ return
+
+ # load submodules
+ def subload(mod, p):
+ h, t = p, None
+ if '.' in p:
+ h, t = p.split('.', 1)
+ if getattr(mod, h, nothing) is nothing:
+ setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__,
+ level=1))
+ elif t:
+ subload(getattr(mod, h), t)
+
+ for x in after:
+ subload(mod, x)
+
+ # Replace references to this proxy instance with the actual module.
+ if locals:
+ if locals.get(head) is self:
+ locals[head] = mod
+ elif locals.get(head + r'mod') is self:
+ locals[head + r'mod'] = mod
+
+ for modname in modrefs:
+ modref = sys.modules.get(modname, None)
+ if modref and getattr(modref, head, None) is self:
+ setattr(modref, head, mod)
+
+ object.__setattr__(self, r"_module", mod)
+
+ def __repr__(self):
+ if self._module:
+ return "<proxied module '%s'>" % self._data[0]
+ return "<unloaded module '%s'>" % self._data[0]
+
+ def __call__(self, *args, **kwargs):
+ raise TypeError("%s object is not callable" % repr(self))
+
+ def __getattr__(self, attr):
+ self._load()
+ return getattr(self._module, attr)
+
+ def __setattr__(self, attr, val):
+ self._load()
+ setattr(self._module, attr, val)
+
+ @property
+ def __dict__(self):
+ self._load()
+ return self._module.__dict__
+
+ @property
+ def __doc__(self):
+ self._load()
+ return self._module.__doc__
+
+_pypy = '__pypy__' in sys.builtin_module_names
+
+def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
+ if locals is None or name in ignore or fromlist == ('*',):
+ # these cases we can't really delay
+ return _hgextimport(_origimport, name, globals, locals, fromlist, level)
+ elif not fromlist:
+ # import a [as b]
+ if '.' in name: # a.b
+ base, rest = name.split('.', 1)
+ # email.__init__ loading email.mime
+ if globals and globals.get('__name__', None) == base:
+ return _origimport(name, globals, locals, fromlist, level)
+ # if a is already demand-loaded, add b to its submodule list
+ if base in locals:
+ if isinstance(locals[base], _demandmod):
+ locals[base]._extend(rest)
+ return locals[base]
+ return _demandmod(name, globals, locals, level)
+ else:
+ # There is a fromlist.
+ # from a import b,c,d
+ # from . import b,c,d
+ # from .a import b,c,d
+
+ # level == -1: relative and absolute attempted (Python 2 only).
+ # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
+ # The modern Mercurial convention is to use absolute_import everywhere,
+ # so modern Mercurial code will have level >= 0.
+
+ # The name of the module the import statement is located in.
+ globalname = globals.get('__name__')
+
+ def processfromitem(mod, attr):
+ """Process an imported symbol in the import statement.
+
+ If the symbol doesn't exist in the parent module, and if the
+ parent module is a package, it must be a module. We set missing
+ modules up as _demandmod instances.
+ """
+ symbol = getattr(mod, attr, nothing)
+ nonpkg = getattr(mod, '__path__', nothing) is nothing
+ if symbol is nothing:
+ if nonpkg:
+ # do not try relative import, which would raise ValueError,
+ # and leave unknown attribute as the default __import__()
+ # would do. the missing attribute will be detected later
+ # while processing the import statement.
+ return
+ mn = '%s.%s' % (mod.__name__, attr)
+ if mn in ignore:
+ importfunc = _origimport
+ else:
+ importfunc = _demandmod
+ symbol = importfunc(attr, mod.__dict__, locals, level=1)
+ setattr(mod, attr, symbol)
+
+ # Record the importing module references this symbol so we can
+ # replace the symbol with the actual module instance at load
+ # time.
+ if globalname and isinstance(symbol, _demandmod):
+ symbol._addref(globalname)
+
+ def chainmodules(rootmod, modname):
+ # recurse down the module chain, and return the leaf module
+ mod = rootmod
+ for comp in modname.split('.')[1:]:
+ obj = getattr(mod, comp, nothing)
+ if obj is nothing:
+ obj = _demandmod(comp, mod.__dict__, mod.__dict__, level=1)
+ setattr(mod, comp, obj)
+ elif mod.__name__ + '.' + comp in sys.modules:
+ # prefer loaded module over attribute (issue5617)
+ obj = sys.modules[mod.__name__ + '.' + comp]
+ mod = obj
+ return mod
+
+ if level >= 0:
+ if name:
+ # "from a import b" or "from .a import b" style
+ rootmod = _hgextimport(_origimport, name, globals, locals,
+ level=level)
+ mod = chainmodules(rootmod, name)
+ elif _pypy:
+ # PyPy's __import__ throws an exception if invoked
+ # with an empty name and no fromlist. Recreate the
+ # desired behaviour by hand.
+ mn = globalname
+ mod = sys.modules[mn]
+ if getattr(mod, '__path__', nothing) is nothing:
+ mn = mn.rsplit('.', 1)[0]
+ mod = sys.modules[mn]
+ if level > 1:
+ mn = mn.rsplit('.', level - 1)[0]
+ mod = sys.modules[mn]
+ else:
+ mod = _hgextimport(_origimport, name, globals, locals,
+ level=level)
+
+ for x in fromlist:
+ processfromitem(mod, x)
+
+ return mod
+
+ # But, we still need to support lazy loading of standard library and 3rd
+ # party modules. So handle level == -1.
+ mod = _hgextimport(_origimport, name, globals, locals)
+ mod = chainmodules(mod, name)
+
+ for x in fromlist:
+ processfromitem(mod, x)
+
+ return mod
+
+ignore = []
+
+def init(ignorelist):
+ global ignore
+ ignore = ignorelist
+
+def isenabled():
+ return builtins.__import__ == _demandimport
+
+def enable():
+ "enable global demand-loading of modules"
+ if os.environ.get('HGDEMANDIMPORT') != 'disable':
+ builtins.__import__ = _demandimport
+
+def disable():
+ "disable global demand-loading of modules"
+ builtins.__import__ = _origimport
+
+@contextmanager
+def deactivated():
+ "context manager for disabling demandimport in 'with' blocks"
+ demandenabled = isenabled()
+ if demandenabled:
+ disable()
+
+ try:
+ yield
+ finally:
+ if demandenabled:
+ enable()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgdemandimport/demandimportpy3.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,112 @@
+# demandimportpy3 - global demand-loading of modules for Mercurial
+#
+# Copyright 2017 Facebook Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""Lazy loading for Python 3.6 and above.
+
+This uses the new importlib finder/loader functionality available in Python 3.5
+and up. The code reuses most of the mechanics implemented inside importlib.util,
+but with a few additions:
+
+* Allow excluding certain modules from lazy imports.
+* Expose an interface that's substantially the same as demandimport for
+ Python 2.
+
+This also has some limitations compared to the Python 2 implementation:
+
+* Much of the logic is per-package, not per-module, so any packages loaded
+ before demandimport is enabled will not be lazily imported in the future. In
+ practice, we only expect builtins to be loaded before demandimport is
+ enabled.
+"""
+
+# This line is unnecessary, but it satisfies test-check-py3-compat.t.
+from __future__ import absolute_import
+
+import contextlib
+import os
+import sys
+
+import importlib.abc
+import importlib.machinery
+import importlib.util
+
+_deactivated = False
+
+class _lazyloaderex(importlib.util.LazyLoader):
+ """This is a LazyLoader except it also follows the _deactivated global and
+ the ignore list.
+ """
+ def exec_module(self, module):
+ """Make the module load lazily."""
+ if _deactivated or module.__name__ in ignore:
+ self.loader.exec_module(module)
+ else:
+ super().exec_module(module)
+
+# This is 3.6+ because with Python 3.5 it isn't possible to lazily load
+# extensions. See the discussion in https://python.org/sf/26186 for more.
+_extensions_loader = _lazyloaderex.factory(
+ importlib.machinery.ExtensionFileLoader)
+_bytecode_loader = _lazyloaderex.factory(
+ importlib.machinery.SourcelessFileLoader)
+_source_loader = _lazyloaderex.factory(importlib.machinery.SourceFileLoader)
+
+def _makefinder(path):
+ return importlib.machinery.FileFinder(
+ path,
+ # This is the order in which loaders are passed in in core Python.
+ (_extensions_loader, importlib.machinery.EXTENSION_SUFFIXES),
+ (_source_loader, importlib.machinery.SOURCE_SUFFIXES),
+ (_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
+ )
+
+ignore = []
+
+def init(ignorelist):
+ global ignore
+ ignore = ignorelist
+
+def isenabled():
+ return _makefinder in sys.path_hooks and not _deactivated
+
+def disable():
+ try:
+ while True:
+ sys.path_hooks.remove(_makefinder)
+ except ValueError:
+ pass
+
+def enable():
+ if os.environ.get('HGDEMANDIMPORT') != 'disable':
+ sys.path_hooks.insert(0, _makefinder)
+
+@contextlib.contextmanager
+def deactivated():
+ # This implementation is a bit different from Python 2's. Python 3
+ # maintains a per-package finder cache in sys.path_importer_cache (see
+ # PEP 302). This means that we can't just call disable + enable.
+ # If we do that, in situations like:
+ #
+ # demandimport.enable()
+ # ...
+ # from foo.bar import mod1
+ # with demandimport.deactivated():
+ # from foo.bar import mod2
+ #
+ # mod2 will be imported lazily. (The converse also holds -- whatever finder
+ # first gets cached will be used.)
+ #
+ # Instead, have a global flag the LazyLoader can use.
+ global _deactivated
+ demandenabled = isenabled()
+ if demandenabled:
+ _deactivated = True
+ try:
+ yield
+ finally:
+ if demandenabled:
+ _deactivated = False
--- a/hgext/acl.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/acl.py Wed Jul 19 07:51:41 2017 -0500
@@ -199,6 +199,7 @@
from mercurial import (
error,
match,
+ registrar,
util,
)
@@ -210,6 +211,17 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+# deprecated config: acl.config
+configitem('acl', 'config',
+ default=None,
+)
+configitem('acl', 'sources',
+ default=lambda: ['serve'],
+)
+
def _getusers(ui, group):
# First, try to use group definition from section [acl.groups]
@@ -280,7 +292,7 @@
raise error.Abort(_('config error - hook type "%s" cannot stop '
'incoming changesets nor commits') % hooktype)
if (hooktype == 'pretxnchangegroup' and
- source not in ui.config('acl', 'sources', 'serve').split()):
+ source not in ui.configlist('acl', 'sources')):
ui.debug('acl: changes have source "%s" - skipping\n' % source)
return
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/amend.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,51 @@
+# amend.py - provide the amend command
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""provide the amend command (EXPERIMENTAL)
+
+This extension provides an ``amend`` command that is similar to
+``commit --amend`` but does not prompt an editor.
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ cmdutil,
+ commands,
+ registrar,
+)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+@command('amend',
+ [('A', 'addremove', None,
+ _('mark new/missing files as added/removed before committing')),
+ ('e', 'edit', None, _('invoke editor on commit messages')),
+ ('i', 'interactive', None, _('use interactive mode')),
+ ] + cmdutil.walkopts + cmdutil.commitopts + cmdutil.commitopts2,
+ _('[OPTION]... [FILE]...'),
+ inferrepo=True)
+def amend(ui, repo, *pats, **opts):
+ """amend the working copy parent with all or specified outstanding changes
+
+ Similar to :hg:`commit --amend`, but reuse the commit message without
+ invoking editor, unless ``--edit`` was set.
+
+ See :hg:`help commit` for more details.
+ """
+ with repo.wlock(), repo.lock():
+ if not opts.get('logfile'):
+ opts['message'] = opts.get('message') or repo['.'].description()
+ opts['amend'] = True
+ return commands._docommit(ui, repo, *pats, **opts)
--- a/hgext/automv.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/automv.py Wed Jul 19 07:51:41 2017 -0500
@@ -32,10 +32,18 @@
copies,
error,
extensions,
+ registrar,
scmutil,
similar
)
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('automv', 'similarity',
+ default=95,
+)
+
def extsetup(ui):
entry = extensions.wrapcommand(
commands.table, 'commit', mvcheck)
@@ -48,7 +56,7 @@
renames = None
disabled = opts.pop('no_automv', False)
if not disabled:
- threshold = ui.configint('automv', 'similarity', 95)
+ threshold = ui.configint('automv', 'similarity')
if not 0 <= threshold <= 100:
raise error.Abort(_('automv.similarity must be between 0 and 100'))
if threshold > 0:
--- a/hgext/blackbox.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/blackbox.py Wed Jul 19 07:51:41 2017 -0500
@@ -44,18 +44,33 @@
from mercurial.node import hex
from mercurial import (
- cmdutil,
+ registrar,
ui as uimod,
util,
)
-cmdtable = {}
-command = cmdutil.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('blackbox', 'dirty',
+ default=False,
+)
+configitem('blackbox', 'maxsize',
+ default='1 MB',
+)
+configitem('blackbox', 'logsource',
+ default=False,
+)
+
lastui = None
filehandles = {}
@@ -118,7 +133,7 @@
(newpath, oldpath, err.strerror))
fp = _openlog(self._bbvfs)
- maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
+ maxsize = self.configbytes('blackbox', 'maxsize')
if maxsize > 0:
st = self._bbvfs.fstat(fp)
if st.st_size >= maxsize:
@@ -171,9 +186,10 @@
return
try:
ui._bbinlog = True
- date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
+ default = self.configdate('devel', 'default-date')
+ date = util.datestr(default, '%Y/%m/%d %H:%M:%S')
user = util.getuser()
- pid = str(util.getpid())
+ pid = '%d' % util.getpid()
formattedmsg = msg[0] % msg[1:]
rev = '(unknown)'
changed = ''
@@ -181,12 +197,10 @@
ctx = ui._bbrepo[None]
parents = ctx.parents()
rev = ('+'.join([hex(p.node()) for p in parents]))
- if (ui.configbool('blackbox', 'dirty', False) and (
- any(ui._bbrepo.status()) or
- any(ctx.sub(s).dirty() for s in ctx.substate)
- )):
+ if (ui.configbool('blackbox', 'dirty') and
+ ctx.dirty(missing=True, merge=False, branch=False)):
changed = '+'
- if ui.configbool('blackbox', 'logsource', False):
+ if ui.configbool('blackbox', 'logsource'):
src = ' [%s]' % event
else:
src = ''
@@ -220,6 +234,7 @@
if util.safehasattr(ui, 'setrepo'):
ui.setrepo(repo)
+ repo._wlockfreeprefix.add('blackbox.log')
@command('^blackbox',
[('l', 'limit', 10, _('the number of events to show')),
--- a/hgext/bugzilla.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/bugzilla.py Wed Jul 19 07:51:41 2017 -0500
@@ -301,8 +301,10 @@
from mercurial.node import short
from mercurial import (
cmdutil,
+ configitems,
error,
mail,
+ registrar,
url,
util,
)
@@ -315,6 +317,75 @@
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('bugzilla', 'apikey',
+ default='',
+)
+configitem('bugzilla', 'bzdir',
+ default='/var/www/html/bugzilla',
+)
+configitem('bugzilla', 'bzemail',
+ default=None,
+)
+configitem('bugzilla', 'bzurl',
+ default='http://localhost/bugzilla/',
+)
+configitem('bugzilla', 'bzuser',
+ default=None,
+)
+configitem('bugzilla', 'db',
+ default='bugs',
+)
+configitem('bugzilla', 'fixregexp',
+ default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
+ r'(?:nos?\.?|num(?:ber)?s?)?\s*'
+ r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+ r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+)
+configitem('bugzilla', 'fixresolution',
+ default='FIXED',
+)
+configitem('bugzilla', 'fixstatus',
+ default='RESOLVED',
+)
+configitem('bugzilla', 'host',
+ default='localhost',
+)
+configitem('bugzilla', 'notify',
+ default=configitems.dynamicdefault,
+)
+configitem('bugzilla', 'password',
+ default=None,
+)
+configitem('bugzilla', 'regexp',
+ default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
+ r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+ r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+)
+configitem('bugzilla', 'strip',
+ default=0,
+)
+configitem('bugzilla', 'style',
+ default=None,
+)
+configitem('bugzilla', 'template',
+ default=None,
+)
+configitem('bugzilla', 'timeout',
+ default=5,
+)
+configitem('bugzilla', 'user',
+ default='bugs',
+)
+configitem('bugzilla', 'usermap',
+ default=None,
+)
+configitem('bugzilla', 'version',
+ default=None,
+)
+
class bzaccess(object):
'''Base class for access to Bugzilla.'''
@@ -389,11 +460,11 @@
bzaccess.__init__(self, ui)
- host = self.ui.config('bugzilla', 'host', 'localhost')
- user = self.ui.config('bugzilla', 'user', 'bugs')
+ host = self.ui.config('bugzilla', 'host')
+ user = self.ui.config('bugzilla', 'user')
passwd = self.ui.config('bugzilla', 'password')
- db = self.ui.config('bugzilla', 'db', 'bugs')
- timeout = int(self.ui.config('bugzilla', 'timeout', 5))
+ db = self.ui.config('bugzilla', 'db')
+ timeout = int(self.ui.config('bugzilla', 'timeout'))
self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
(host, db, user, '*' * len(passwd)))
self.conn = bzmysql._MySQLdb.connect(host=host,
@@ -449,8 +520,7 @@
for id in bugs.keys():
self.ui.status(_(' bug %s\n') % id)
cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
- bzdir = self.ui.config('bugzilla', 'bzdir',
- '/var/www/html/bugzilla')
+ bzdir = self.ui.config('bugzilla', 'bzdir')
try:
# Backwards-compatible with old notify string, which
# took one string. This will throw with a new format
@@ -636,16 +706,14 @@
def __init__(self, ui):
bzaccess.__init__(self, ui)
- bzweb = self.ui.config('bugzilla', 'bzurl',
- 'http://localhost/bugzilla/')
+ bzweb = self.ui.config('bugzilla', 'bzurl')
bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
- user = self.ui.config('bugzilla', 'user', 'bugs')
+ user = self.ui.config('bugzilla', 'user')
passwd = self.ui.config('bugzilla', 'password')
- self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
- self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
- 'FIXED')
+ self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
+ self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
ver = self.bzproxy.Bugzilla.version()['version'].split('.')
@@ -758,7 +826,7 @@
matches = self.bzproxy.User.get({'match': [user],
'token': self.bztoken})
if not matches['users']:
- user = self.ui.config('bugzilla', 'user', 'bugs')
+ user = self.ui.config('bugzilla', 'user')
matches = self.bzproxy.User.get({'match': [user],
'token': self.bztoken})
if not matches['users']:
@@ -797,15 +865,13 @@
"""
def __init__(self, ui):
bzaccess.__init__(self, ui)
- bz = self.ui.config('bugzilla', 'bzurl',
- 'http://localhost/bugzilla/')
+ bz = self.ui.config('bugzilla', 'bzurl')
self.bzroot = '/'.join([bz, 'rest'])
- self.apikey = self.ui.config('bugzilla', 'apikey', '')
- self.user = self.ui.config('bugzilla', 'user', 'bugs')
+ self.apikey = self.ui.config('bugzilla', 'apikey')
+ self.user = self.ui.config('bugzilla', 'user')
self.passwd = self.ui.config('bugzilla', 'password')
- self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
- self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
- 'FIXED')
+ self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
+ self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
def apiurl(self, targets, include_fields=None):
url = '/'.join([self.bzroot] + [str(t) for t in targets])
@@ -930,15 +996,6 @@
'restapi': bzrestapi,
}
- _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
- r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
- r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
-
- _default_fix_re = (r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
- r'(?:nos?\.?|num(?:ber)?s?)?\s*'
- r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
- r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
-
def __init__(self, ui, repo):
self.ui = ui
self.repo = repo
@@ -952,11 +1009,9 @@
self.bzdriver = bzclass(self.ui)
self.bug_re = re.compile(
- self.ui.config('bugzilla', 'regexp',
- bugzilla._default_bug_re), re.IGNORECASE)
+ self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
self.fix_re = re.compile(
- self.ui.config('bugzilla', 'fixregexp',
- bugzilla._default_fix_re), re.IGNORECASE)
+ self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
self.split_re = re.compile(r'\D+')
def find_bugs(self, ctx):
@@ -1023,7 +1078,7 @@
def webroot(root):
'''strip leading prefix of repo root and turn into
url-safe path.'''
- count = int(self.ui.config('bugzilla', 'strip', 0))
+ count = int(self.ui.config('bugzilla', 'strip'))
root = util.pconvert(root)
while count > 0:
c = root.find('/')
@@ -1040,8 +1095,9 @@
if not mapfile and not tmpl:
tmpl = _('changeset {node|short} in repo {root} refers '
'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
- t = cmdutil.changeset_templater(self.ui, self.repo,
- False, None, tmpl, mapfile, False)
+ spec = cmdutil.logtemplatespec(tmpl, mapfile)
+ t = cmdutil.changeset_templater(self.ui, self.repo, spec,
+ False, None, False)
self.ui.pushbuffer()
t.show(ctx, changes=ctx.changeset(),
bug=str(bugid),
--- a/hgext/censor.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/censor.py Wed Jul 19 07:51:41 2017 -0500
@@ -31,17 +31,17 @@
from mercurial.node import short
from mercurial import (
- cmdutil,
error,
filelog,
lock as lockmod,
+ registrar,
revlog,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -102,7 +102,7 @@
hint=_('clean/delete/update first'))
flogv = flog.version & 0xFFFF
- if flogv != revlog.REVLOGNG:
+ if flogv != revlog.REVLOGV1:
raise error.Abort(
_('censor does not support revlog version %d') % (flogv,))
@@ -117,7 +117,7 @@
# Using two files instead of one makes it easy to rewrite entry-by-entry
idxread = repo.svfs(flog.indexfile, 'r')
idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
- if flog.version & revlog.REVLOGNGINLINEDATA:
+ if flog.version & revlog.FLAG_INLINE_DATA:
dataread, datawrite = idxread, idxwrite
else:
dataread = repo.svfs(flog.datafile, 'r')
--- a/hgext/children.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/children.py Wed Jul 19 07:51:41 2017 -0500
@@ -19,13 +19,13 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
+ registrar,
)
-templateopts = commands.templateopts
+templateopts = cmdutil.templateopts
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/churn.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/churn.py Wed Jul 19 07:51:41 2017 -0500
@@ -17,24 +17,21 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
encoding,
patch,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
-def maketemplater(ui, repo, tmpl):
- return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
-
def changedlines(ui, repo, ctx1, ctx2, fns):
added, removed = 0, 0
fmatch = scmutil.matchfiles(repo, fns)
@@ -55,7 +52,7 @@
return date.strftime(opts['dateformat'])
else:
tmpl = opts.get('oldtemplate') or opts.get('template')
- tmpl = maketemplater(ui, repo, tmpl)
+ tmpl = cmdutil.makelogtemplater(ui, repo, tmpl)
def getkey(ctx):
ui.pushbuffer()
tmpl.show(ctx)
@@ -114,7 +111,7 @@
('s', 'sort', False, _('sort by key (default: sort by count)')),
('', 'diffstat', False, _('display added/removed lines separately')),
('', 'aliases', '', _('file with email aliases'), _('FILE')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
inferrepo=True)
def churn(ui, repo, *pats, **opts):
--- a/hgext/clonebundles.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/clonebundles.py Wed Jul 19 07:51:41 2017 -0500
@@ -136,7 +136,7 @@
Manifests can contain multiple entries. Assuming metadata is defined, clients
will filter entries from the manifest that they don't support. The remaining
entries are optionally sorted by client preferences
-(``experimental.clonebundleprefers`` config option). The client then attempts
+(``ui.clonebundleprefers`` config option). The client then attempts
to fetch the bundle at the first URL in the remaining list.
**Errors when downloading a bundle will fail the entire clone operation:
--- a/hgext/color.py Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,31 +0,0 @@
-# color.py color output for Mercurial commands
-#
-# Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-'''enable Mercurial color mode (DEPRECATED)
-
-This extension enables Mercurial color mode. The feature is now directly
-available in Mercurial core. You can access it using::
-
- [ui]
- color = auto
-
-See :hg:`help color` for details.
-'''
-
-from __future__ import absolute_import
-
-from mercurial import color
-
-# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
-
-def extsetup(ui):
- # change default color config
- color._enabledbydefault = True
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/commitextras.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,67 @@
+# commitextras.py
+#
+# Copyright 2013 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''adds a new flag extras to commit (ADVANCED)'''
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ commands,
+ error,
+ extensions,
+ registrar,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+testedwith = 'ships-with-hg-core'
+
+usedinternally = {
+ 'amend_source',
+ 'branch',
+ 'close',
+ 'histedit_source',
+ 'topic',
+ 'rebase_source',
+ 'intermediate-source',
+ '__touch-noise__',
+ 'source',
+ 'transplant_source',
+}
+
+def extsetup(ui):
+ entry = extensions.wrapcommand(commands.table, 'commit', _commit)
+ options = entry[1]
+ options.append(('', 'extra', [],
+ _('set a changeset\'s extra values'), _("KEY=VALUE")))
+
+def _commit(orig, ui, repo, *pats, **opts):
+ origcommit = repo.commit
+ try:
+ def _wrappedcommit(*innerpats, **inneropts):
+ extras = opts.get('extra')
+ if extras:
+ for raw in extras:
+ if '=' not in raw:
+ msg = _("unable to parse '%s', should follow "
+ "KEY=VALUE format")
+ raise error.Abort(msg % raw)
+ k, v = raw.split('=', 1)
+ if k in usedinternally:
+ msg = _("key '%s' is used internally, can't be set "
+ "manually")
+ raise error.Abort(msg % k)
+ inneropts['extra'][k] = v
+ return origcommit(*innerpats, **inneropts)
+
+ # This __dict__ logic is needed because the normal
+ # extension.wrapfunction doesn't seem to work.
+ repo.__dict__['commit'] = _wrappedcommit
+ return orig(ui, repo, *pats, **opts)
+ finally:
+ del repo.__dict__['commit']
--- a/hgext/convert/__init__.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,7 +11,6 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
registrar,
)
@@ -22,7 +21,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -182,8 +181,8 @@
where "original_branch_name" is the name of the branch in the
source repository, and "new_branch_name" is the name of the branch
- is the destination repository. No whitespace is allowed in the
- branch names. This can be used to (for instance) move code in one
+ is the destination repository. No whitespace is allowed in the new
+ branch name. This can be used to (for instance) move code in one
repository from "default" to a named branch.
Mercurial Source
@@ -229,6 +228,12 @@
part of a changeset then the default may not be long enough.
The default is 60.
+ :convert.cvsps.logencoding: Specify encoding name to be used for
+ transcoding CVS log messages. Multiple encoding names can be
+ specified as a list (see :hg:`help config.Syntax`), but only
+ the first acceptable encoding in the list is used per CVS log
+ entries. This transcoding is executed before cvslog hook below.
+
:convert.cvsps.mergeto: Specify a regular expression to which
commit log messages are matched. If a match occurs, then the
conversion process will insert a dummy revision merging the
--- a/hgext/convert/cvsps.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/cvsps.py Wed Jul 19 07:51:41 2017 -0500
@@ -12,6 +12,7 @@
from mercurial.i18n import _
from mercurial import (
encoding,
+ error,
hook,
pycompat,
util,
@@ -491,6 +492,35 @@
ui.status(_('%d log entries\n') % len(log))
+ encodings = ui.configlist('convert', 'cvsps.logencoding')
+ if encodings:
+ def revstr(r):
+ # this is needed, because logentry.revision is a tuple of "int"
+ # (e.g. (1, 2) for "1.2")
+ return '.'.join(pycompat.maplist(pycompat.bytestr, r))
+
+ for entry in log:
+ comment = entry.comment
+ for e in encodings:
+ try:
+ entry.comment = comment.decode(e).encode('utf-8')
+ if ui.debugflag:
+ ui.debug("transcoding by %s: %s of %s\n" %
+ (e, revstr(entry.revision), entry.file))
+ break
+ except UnicodeDecodeError:
+ pass # try next encoding
+ except LookupError as inst: # unknown encoding, maybe
+ raise error.Abort(inst,
+ hint=_('check convert.cvsps.logencoding'
+ ' configuration'))
+ else:
+ raise error.Abort(_("no encoding can transcode"
+ " CVS log message for %s of %s")
+ % (revstr(entry.revision), entry.file),
+ hint=_('check convert.cvsps.logencoding'
+ ' configuration'))
+
hook.hook(ui, None, "cvslog", True, log=log)
return log
--- a/hgext/convert/filemap.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/filemap.py Wed Jul 19 07:51:41 2017 -0500
@@ -310,7 +310,7 @@
# map to any revision in the restricted graph. Put SKIPREV
# in the set of wanted ancestors to simplify code elsewhere
self.parentmap[rev] = SKIPREV
- self.wantedancestors[rev] = set((SKIPREV,))
+ self.wantedancestors[rev] = {SKIPREV}
return
# Reuse the data from our parent.
--- a/hgext/convert/git.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/git.py Wed Jul 19 07:51:41 2017 -0500
@@ -32,7 +32,7 @@
return "%s %s" % (self.node, self.path)
# Keys in extra fields that should not be copied if the user requests.
-bannedextrakeys = set([
+bannedextrakeys = {
# Git commit object built-ins.
'tree',
'parent',
@@ -41,7 +41,7 @@
# Mercurial built-ins.
'branch',
'close',
-])
+}
class convert_git(common.converter_source, common.commandline):
# Windows does not support GIT_DIR= construct while other systems
@@ -455,9 +455,9 @@
('refs/heads/', '')
]
- exclude = set([
+ exclude = {
'refs/remotes/origin/HEAD',
- ])
+ }
try:
output, status = self.gitrunlines('show-ref')
--- a/hgext/convert/hg.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/hg.py Wed Jul 19 07:51:41 2017 -0500
@@ -345,8 +345,8 @@
if commit.rev != node:
ctx = self.repo[node]
if ctx.phase() < phases.draft:
- phases.retractboundary(self.repo, tr, phases.draft,
- [ctx.node()])
+ phases.registernew(self.repo, tr, phases.draft,
+ [ctx.node()])
text = "(octopus merge fixup)\n"
p2 = node
@@ -425,9 +425,9 @@
tr = self.repo.transaction('bookmark')
self.ui.status(_("updating bookmarks\n"))
destmarks = self.repo._bookmarks
- for bookmark in updatedbookmark:
- destmarks[bookmark] = nodemod.bin(updatedbookmark[bookmark])
- destmarks.recordchange(tr)
+ changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
+ for bookmark in updatedbookmark]
+ destmarks.applychanges(self.repo, tr, changes)
tr.close()
finally:
lockmod.release(lock, wlock, tr)
--- a/hgext/convert/transport.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/convert/transport.py Wed Jul 19 07:51:41 2017 -0500
@@ -81,11 +81,6 @@
if ra is None or not util.safehasattr(svn.ra, 'reparent'):
self.client = svn.client.create_context(self.pool)
ab = _create_auth_baton(self.pool)
- if False:
- svn.core.svn_auth_set_parameter(
- ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
- svn.core.svn_auth_set_parameter(
- ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
self.client.auth_baton = ab
global svn_config
if svn_config is None:
--- a/hgext/extdiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/extdiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -74,16 +74,16 @@
from mercurial import (
archival,
cmdutil,
- commands,
error,
filemerge,
pycompat,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -101,7 +101,7 @@
dirname = '%s.%s' % (dirname, short(node))
base = os.path.join(tmproot, dirname)
os.mkdir(base)
- fns_and_mtime = []
+ fnsandstat = []
if node is not None:
ui.note(_('making snapshot of %d files from rev %s\n') %
@@ -124,9 +124,8 @@
if node is None:
dest = os.path.join(base, wfn)
- fns_and_mtime.append((dest, repo.wjoin(fn),
- os.lstat(dest).st_mtime))
- return dirname, fns_and_mtime
+ fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
+ return dirname, fnsandstat
def dodiff(ui, repo, cmdline, pats, opts):
'''Do the actual diff:
@@ -199,7 +198,7 @@
dir1b = None
rev1b = ''
- fns_and_mtime = []
+ fnsandstat = []
# If node2 in not the wc or there is >1 change, copy it
dir2root = ''
@@ -212,8 +211,8 @@
#the working dir in this case (because the other cases
#are: diffing 2 revisions or single file -- in which case
#the file is already directly passed to the diff tool).
- dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot,
- subrepos)
+ dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
+ subrepos)
else:
# This lets the diff tool open the changed file directly
dir2 = ''
@@ -241,7 +240,7 @@
else:
template = 'hg-%h.patch'
cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
- template=repo.vfs.reljoin(tmproot, template),
+ fntemplate=repo.vfs.reljoin(tmproot, template),
match=matcher)
label1a = cmdutil.makefilename(repo, template, node1a)
label2 = cmdutil.makefilename(repo, template, node2)
@@ -249,7 +248,7 @@
dir2 = repo.vfs.reljoin(tmproot, label2)
dir1b = None
label1b = None
- fns_and_mtime = []
+ fnsandstat = []
# Function to quote file/dir names in the argument string.
# When not operating in 3-way mode, an empty string is
@@ -275,8 +274,17 @@
ui.debug('running %r in %s\n' % (cmdline, tmproot))
ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
- for copy_fn, working_fn, mtime in fns_and_mtime:
- if os.lstat(copy_fn).st_mtime != mtime:
+ for copy_fn, working_fn, st in fnsandstat:
+ cpstat = os.lstat(copy_fn)
+ # Some tools copy the file and attributes, so mtime may not detect
+ # all changes. A size check will detect more cases, but not all.
+ # The only certain way to detect every case is to diff all files,
+ # which could be expensive.
+ # copyfile() carries over the permission, so the mode check could
+ # be in an 'elif' branch, but for the case where the file has
+ # changed without affecting mtime or size.
+ if (cpstat.st_mtime != st.st_mtime or cpstat.st_size != st.st_size
+ or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
ui.debug('file changed while diffing. '
'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
util.copyfile(copy_fn, working_fn)
@@ -292,7 +300,7 @@
('r', 'rev', [], _('revision'), _('REV')),
('c', 'change', '', _('change made by revision'), _('REV')),
('', 'patch', None, _('compare patches for two revisions'))
- ] + commands.walkopts + commands.subrepoopts
+ ] + cmdutil.walkopts + cmdutil.subrepoopts
@command('extdiff',
[('p', 'program', '', _('comparison program to run'), _('CMD')),
--- a/hgext/factotum.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/factotum.py Wed Jul 19 07:51:41 2017 -0500
@@ -52,6 +52,7 @@
from mercurial import (
error,
httpconnection,
+ registrar,
url,
util,
)
@@ -63,6 +64,19 @@
_executable = _mountpoint = _service = None
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('factotum', 'executable',
+ default='/bin/auth/factotum',
+)
+configitem('factotum', 'mountpoint',
+ default='/mnt/factotum',
+)
+configitem('factotum', 'service',
+ default='hg',
+)
+
def auth_getkey(self, params):
if not self.ui.interactive():
raise error.Abort(_('factotum not interactive'))
@@ -127,8 +141,8 @@
def uisetup(ui):
global _executable
- _executable = ui.config('factotum', 'executable', '/bin/auth/factotum')
+ _executable = ui.config('factotum', 'executable')
global _mountpoint
- _mountpoint = ui.config('factotum', 'mountpoint', '/mnt/factotum')
+ _mountpoint = ui.config('factotum', 'mountpoint')
global _service
- _service = ui.config('factotum', 'service', 'hg')
+ _service = ui.config('factotum', 'service')
--- a/hgext/fetch.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/fetch.py Wed Jul 19 07:51:41 2017 -0500
@@ -15,17 +15,17 @@
)
from mercurial import (
cmdutil,
- commands,
error,
exchange,
hg,
lock,
+ registrar,
util,
)
release = lock.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -35,10 +35,10 @@
@command('fetch',
[('r', 'rev', [],
_('a specific revision you would like to pull'), _('REV')),
- ('e', 'edit', None, _('invoke editor on commit messages')),
+ ('', 'edit', None, _('invoke editor on commit messages')),
('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
('', 'switch-parent', None, _('switch parents when merging')),
- ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
+ ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
_('hg fetch [SOURCE]'))
def fetch(ui, repo, source='default', **opts):
'''pull changes from a remote repository, merge new changes if needed.
--- a/hgext/fsmonitor/__init__.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/fsmonitor/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -148,19 +148,7 @@
"""
sha1 = hashlib.sha1()
- if util.safehasattr(ignore, 'includepat'):
- sha1.update(ignore.includepat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, 'excludepat'):
- sha1.update(ignore.excludepat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, 'patternspat'):
- sha1.update(ignore.patternspat)
- sha1.update('\0\0')
- if util.safehasattr(ignore, '_files'):
- for f in ignore._files:
- sha1.update(f)
- sha1.update('\0')
+ sha1.update(repr(ignore))
return sha1.hexdigest()
_watchmanencoding = pywatchman.encoding.get_local_encoding()
@@ -253,10 +241,10 @@
fresh_instance = False
exact = skipstep3 = False
- if matchfn == match.exact: # match.exact
+ if match.isexact(): # match.exact
exact = True
dirignore = util.always # skip step 2
- elif match.files() and not match.anypats(): # match.match, no patterns
+ elif match.prefix(): # match.match, no patterns
skipstep3 = True
if not exact and self._checkcase:
@@ -497,17 +485,14 @@
else:
stateunknown = listunknown
+ if updatestate:
+ ps = poststatus(startclock)
+ self.addpostdsstatus(ps)
+
r = orig(node1, node2, match, listignored, listclean, stateunknown,
listsubrepos)
modified, added, removed, deleted, unknown, ignored, clean = r
- if updatestate:
- notefiles = modified + added + removed + deleted + unknown
- self._fsmonitorstate.set(
- self._fsmonitorstate.getlastclock() or startclock,
- _hashignore(self.dirstate._ignore),
- notefiles)
-
if not listunknown:
unknown = []
@@ -540,8 +525,19 @@
return scmutil.status(
modified, added, removed, deleted, unknown, ignored, clean)
-def makedirstate(cls):
- class fsmonitordirstate(cls):
+class poststatus(object):
+ def __init__(self, startclock):
+ self._startclock = startclock
+
+ def __call__(self, wctx, status):
+ clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
+ hashignore = _hashignore(wctx.repo().dirstate._ignore)
+ notefiles = (status.modified + status.added + status.removed +
+ status.deleted + status.unknown)
+ wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
+
+def makedirstate(repo, dirstate):
+ class fsmonitordirstate(dirstate.__class__):
def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
# _fsmonitordisable is used in paranoid mode
self._fsmonitordisable = False
@@ -562,18 +558,19 @@
self._fsmonitorstate.invalidate()
return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
- return fsmonitordirstate
+ dirstate.__class__ = fsmonitordirstate
+ dirstate._fsmonitorinit(repo._fsmonitorstate, repo._watchmanclient)
def wrapdirstate(orig, self):
ds = orig(self)
# only override the dirstate when Watchman is available for the repo
if util.safehasattr(self, '_fsmonitorstate'):
- ds.__class__ = makedirstate(ds.__class__)
- ds._fsmonitorinit(self._fsmonitorstate, self._watchmanclient)
+ makedirstate(self, ds)
return ds
def extsetup(ui):
- wrapfilecache(localrepo.localrepository, 'dirstate', wrapdirstate)
+ extensions.wrapfilecache(
+ localrepo.localrepository, 'dirstate', wrapdirstate)
if pycompat.sysplatform == 'darwin':
# An assist for avoiding the dangling-symlink fsevents bug
extensions.wrapfunction(os, 'symlink', wrapsymlink)
@@ -600,18 +597,31 @@
self.node = node
self.distance = distance
self.partial = partial
+ self._lock = None
+ self.need_leave = False
def __enter__(self):
- self._state('state-enter')
+ # We explicitly need to take a lock here, before we proceed to update
+ # watchman about the update operation, so that we don't race with
+ # some other actor. merge.update is going to take the wlock almost
+ # immediately anyway, so this is effectively extending the lock
+ # around a couple of short sanity checks.
+ self._lock = self.repo.wlock()
+ self.need_leave = self._state('state-enter')
return self
def __exit__(self, type_, value, tb):
- status = 'ok' if type_ is None else 'failed'
- self._state('state-leave', status=status)
+ try:
+ if self.need_leave:
+ status = 'ok' if type_ is None else 'failed'
+ self._state('state-leave', status=status)
+ finally:
+ if self._lock:
+ self._lock.release()
def _state(self, cmd, status='ok'):
if not util.safehasattr(self.repo, '_watchmanclient'):
- return
+ return False
try:
commithash = self.repo[self.node].hex()
self.repo._watchmanclient.command(cmd, {
@@ -626,10 +636,12 @@
# whether the working copy parent is changing
'partial': self.partial,
}})
+ return True
except Exception as e:
# Swallow any errors; fire and forget
self.repo.ui.log(
'watchman', 'Exception %s while running %s\n', e, cmd)
+ return False
# Bracket working copy updates with calls to the watchman state-enter
# and state-leave commands. This allows clients to perform more intelligent
@@ -654,7 +666,7 @@
with state_update(repo, node, distance, partial):
return orig(
repo, node, branchmerge, force, ancestor, mergeancestor,
- labels, matcher, *kwargs)
+ labels, matcher, **kwargs)
def reposetup(ui, repo):
# We don't work with largefiles or inotify
@@ -665,16 +677,12 @@
'extension and has been disabled.\n') % ext)
return
- if util.safehasattr(repo, 'dirstate'):
- # We don't work with subrepos either. Note that we can get passed in
- # e.g. a statichttprepo, which throws on trying to access the substate.
- # XXX This sucks.
- try:
- # if repo[None].substate can cause a dirstate parse, which is too
- # slow. Instead, look for a file called hgsubstate,
- if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
- return
- except AttributeError:
+ if repo.local():
+ # We don't work with subrepos either.
+ #
+ # if repo[None].substate can cause a dirstate parse, which is too
+ # slow. Instead, look for a file called hgsubstate,
+ if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
return
fsmonitorstate = state.state(repo)
@@ -690,13 +698,11 @@
repo._fsmonitorstate = fsmonitorstate
repo._watchmanclient = client
- # at this point since fsmonitorstate wasn't present, repo.dirstate is
- # not a fsmonitordirstate
- repo.dirstate.__class__ = makedirstate(repo.dirstate.__class__)
- # nuke the dirstate so that _fsmonitorinit and subsequent configuration
- # changes take effect on it
- del repo._filecache['dirstate']
- delattr(repo.unfiltered(), 'dirstate')
+ dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
+ if cached:
+ # at this point since fsmonitorstate wasn't present,
+ # repo.dirstate is not a fsmonitordirstate
+ makedirstate(repo, dirstate)
class fsmonitorrepo(repo.__class__):
def status(self, *args, **kwargs):
@@ -704,21 +710,3 @@
return overridestatus(orig, self, *args, **kwargs)
repo.__class__ = fsmonitorrepo
-
-def wrapfilecache(cls, propname, wrapper):
- """Wraps a filecache property. These can't be wrapped using the normal
- wrapfunction. This should eventually go into upstream Mercurial.
- """
- assert callable(wrapper)
- for currcls in cls.__mro__:
- if propname in currcls.__dict__:
- origfn = currcls.__dict__[propname].func
- assert callable(origfn)
- def wrap(*args, **kwargs):
- return wrapper(origfn, *args, **kwargs)
- currcls.__dict__[propname].func = wrap
- break
-
- if currcls is object:
- raise AttributeError(
- _("type '%s' has no property '%s'") % (cls, propname))
--- a/hgext/fsmonitor/state.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/fsmonitor/state.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,7 +13,10 @@
import struct
from mercurial.i18n import _
-from mercurial import pathutil
+from mercurial import (
+ pathutil,
+ util,
+)
_version = 4
_versionformat = ">I"
@@ -24,6 +27,7 @@
self._ui = repo.ui
self._rootdir = pathutil.normasprefix(repo.root)
self._lastclock = None
+ self._identity = util.filestat(None)
self.mode = self._ui.config('fsmonitor', 'mode', default='on')
self.walk_on_invalidate = self._ui.configbool(
@@ -35,10 +39,13 @@
try:
file = self._vfs('fsmonitor.state', 'rb')
except IOError as inst:
+ self._identity = util.filestat(None)
if inst.errno != errno.ENOENT:
raise
return None, None, None
+ self._identity = util.filestat.fromfp(file)
+
versionbytes = file.read(4)
if len(versionbytes) < 4:
self._ui.log(
@@ -90,8 +97,16 @@
self.invalidate()
return
+ # Read the identity from the file on disk rather than from the open file
+ # pointer below, because the latter is actually a brand new file.
+ identity = util.filestat.frompath(self._vfs.join('fsmonitor.state'))
+ if identity != self._identity:
+ self._ui.debug('skip updating fsmonitor.state: identity mismatch\n')
+ return
+
try:
- file = self._vfs('fsmonitor.state', 'wb', atomictemp=True)
+ file = self._vfs('fsmonitor.state', 'wb', atomictemp=True,
+ checkambig=True)
except (IOError, OSError):
self._ui.warn(_("warning: unable to write out fsmonitor state\n"))
return
@@ -111,6 +126,7 @@
except OSError as inst:
if inst.errno != errno.ENOENT:
raise
+ self._identity = util.filestat(None)
def setlastclock(self, clock):
self._lastclock = clock
--- a/hgext/gpg.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/gpg.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,16 +14,16 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
error,
match,
node as hgnode,
pycompat,
+ registrar,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -221,7 +221,7 @@
('m', 'message', '',
_('use text as commit message'), _('TEXT')),
('e', 'edit', False, _('invoke editor on commit messages')),
- ] + commands.commitopts2,
+ ] + cmdutil.commitopts2,
_('hg sign [OPTION]... [REV]...'))
def sign(ui, repo, *revs, **opts):
"""add a signature for the current or given revision
--- a/hgext/graphlog.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/graphlog.py Wed Jul 19 07:51:41 2017 -0500
@@ -21,10 +21,11 @@
from mercurial import (
cmdutil,
commands,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -51,7 +52,7 @@
_('show changesets within the given named branch'), _('BRANCH')),
('P', 'prune', [],
_('do not display revision or any of its ancestors'), _('REV')),
- ] + commands.logopts + commands.walkopts,
+ ] + cmdutil.logopts + cmdutil.walkopts,
_('[OPTION]... [FILE]'),
inferrepo=True)
def glog(ui, repo, *pats, **opts):
--- a/hgext/hgk.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/hgk.py Wed Jul 19 07:51:41 2017 -0500
@@ -45,15 +45,15 @@
short,
)
from mercurial import (
- cmdutil,
commands,
obsolete,
patch,
+ registrar,
scmutil,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/highlight/highlight.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/highlight/highlight.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,11 +10,6 @@
from __future__ import absolute_import
-import pygments
-import pygments.formatters
-import pygments.lexers
-import pygments.util
-
from mercurial import demandimport
demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
@@ -23,6 +18,12 @@
util,
)
+with demandimport.deactivated():
+ import pygments
+ import pygments.formatters
+ import pygments.lexers
+ import pygments.util
+
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
--- a/hgext/histedit.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/histedit.py Wed Jul 19 07:51:41 2017 -0500
@@ -201,6 +201,7 @@
mergeutil,
node,
obsolete,
+ registrar,
repair,
scmutil,
util,
@@ -209,7 +210,7 @@
pickle = util.pickle
release = lock.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -279,7 +280,6 @@
self.lock = lock
self.wlock = wlock
self.backupfile = None
- self.tr = None
if replacements is None:
self.replacements = []
else:
@@ -1107,25 +1107,24 @@
if action.verb == 'fold' and nextact and nextact.verb == 'fold':
state.actions[idx].__class__ = _multifold
- total = len(state.actions)
- pos = 0
- state.tr = None
-
# Force an initial state file write, so the user can run --abort/continue
# even if there's an exception before the first transaction serialize.
state.write()
- try:
- # Don't use singletransaction by default since it rolls the entire
- # transaction back if an unexpected exception happens (like a
- # pretxncommit hook throws, or the user aborts the commit msg editor).
- if ui.configbool("histedit", "singletransaction", False):
- # Don't use a 'with' for the transaction, since actions may close
- # and reopen a transaction. For example, if the action executes an
- # external process it may choose to commit the transaction first.
- state.tr = repo.transaction('histedit')
+ total = len(state.actions)
+ pos = 0
+ tr = None
+ # Don't use singletransaction by default since it rolls the entire
+ # transaction back if an unexpected exception happens (like a
+ # pretxncommit hook throws, or the user aborts the commit msg editor).
+ if ui.configbool("histedit", "singletransaction", False):
+ # Don't use a 'with' for the transaction, since actions may close
+ # and reopen a transaction. For example, if the action executes an
+ # external process it may choose to commit the transaction first.
+ tr = repo.transaction('histedit')
+ with util.acceptintervention(tr):
while state.actions:
- state.write(tr=state.tr)
+ state.write(tr=tr)
actobj = state.actions[0]
pos += 1
ui.progress(_("editing"), pos, actobj.torule(),
@@ -1137,17 +1136,6 @@
state.replacements.extend(replacement_)
state.actions.pop(0)
- if state.tr is not None:
- state.tr.close()
- except error.InterventionRequired:
- if state.tr is not None:
- state.tr.close()
- raise
- except Exception:
- if state.tr is not None:
- state.tr.abort()
- raise
-
state.write()
ui.progress(_("editing"), None)
@@ -1170,13 +1158,21 @@
for n in succs[1:]:
ui.debug(m % node.short(n))
- safecleanupnode(ui, repo, 'temp', tmpnodes)
-
if not state.keep:
if mapping:
- movebookmarks(ui, repo, mapping, state.topmost, ntm)
+ movetopmostbookmarks(repo, state.topmost, ntm)
# TODO update mq state
- safecleanupnode(ui, repo, 'replaced', mapping)
+ else:
+ mapping = {}
+
+ for n in tmpnodes:
+ mapping[n] = ()
+
+ # remove entries about unknown nodes
+ nodemap = repo.unfiltered().changelog.nodemap
+ mapping = {k: v for k, v in mapping.items()
+ if k in nodemap and all(n in nodemap for n in v)}
+ scmutil.cleanupnodes(repo, mapping, 'histedit')
state.clear()
if os.path.exists(repo.sjoin('undo')):
@@ -1197,12 +1193,8 @@
f = hg.openpath(ui, backupfile)
gen = exchange.readbundle(ui, f, backupfile)
with repo.transaction('histedit.abort') as tr:
- if not isinstance(gen, bundle2.unbundle20):
- gen.apply(repo, 'histedit', 'bundle:' + backupfile)
- if isinstance(gen, bundle2.unbundle20):
- bundle2.applybundle(repo, gen, tr,
- source='histedit',
- url='bundle:' + backupfile)
+ bundle2.applybundle(repo, gen, tr, source='histedit',
+ url='bundle:' + backupfile)
os.remove(backupfile)
@@ -1210,8 +1202,8 @@
if repo.unfiltered().revs('parents() and (%n or %ln::)',
state.parentctxnode, leafs | tmpnodes):
hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
- cleanupnode(ui, repo, 'created', tmpnodes)
- cleanupnode(ui, repo, 'temp', leafs)
+ cleanupnode(ui, repo, tmpnodes)
+ cleanupnode(ui, repo, leafs)
except Exception:
if state.inprogress():
ui.warn(_('warning: encountered an exception during histedit '
@@ -1544,53 +1536,27 @@
return final, tmpnodes, new, newtopmost
-def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
- """Move bookmark from old to newly created node"""
- if not mapping:
- # if nothing got rewritten there is not purpose for this function
+def movetopmostbookmarks(repo, oldtopmost, newtopmost):
+ """Move bookmark from oldtopmost to newly created topmost
+
+ This is arguably a feature and we may only want that for the active
+ bookmark. But the behavior is kept compatible with the old version for now.
+ """
+ if not oldtopmost or not newtopmost:
return
- moves = []
- for bk, old in sorted(repo._bookmarks.iteritems()):
- if old == oldtopmost:
- # special case ensure bookmark stay on tip.
- #
- # This is arguably a feature and we may only want that for the
- # active bookmark. But the behavior is kept compatible with the old
- # version for now.
- moves.append((bk, newtopmost))
- continue
- base = old
- new = mapping.get(base, None)
- if new is None:
- continue
- while not new:
- # base is killed, trying with parent
- base = repo[base].p1().node()
- new = mapping.get(base, (base,))
- # nothing to move
- moves.append((bk, new[-1]))
- if moves:
- lock = tr = None
- try:
- lock = repo.lock()
- tr = repo.transaction('histedit')
+ oldbmarks = repo.nodebookmarks(oldtopmost)
+ if oldbmarks:
+ with repo.lock(), repo.transaction('histedit') as tr:
marks = repo._bookmarks
- for mark, new in moves:
- old = marks[mark]
- ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
- % (mark, node.short(old), node.short(new)))
- marks[mark] = new
- marks.recordchange(tr)
- tr.close()
- finally:
- release(tr, lock)
+ changes = []
+ for name in oldbmarks:
+ changes.append((name, newtopmost))
+ marks.applychanges(repo, tr, changes)
-def cleanupnode(ui, repo, name, nodes):
+def cleanupnode(ui, repo, nodes):
"""strip a group of nodes from the repository
The set of node to strip may contains unknown nodes."""
- ui.debug('should strip %s nodes %s\n' %
- (name, ', '.join([node.short(n) for n in nodes])))
with repo.lock():
# do not let filtering get in the way of the cleanse
# we should probably get rid of obsolescence marker created during the
@@ -1601,39 +1567,8 @@
nm = repo.changelog.nodemap
nodes = sorted(n for n in nodes if n in nm)
roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
- for c in roots:
- # We should process node in reverse order to strip tip most first.
- # but this trigger a bug in changegroup hook.
- # This would reduce bundle overhead
- repair.strip(ui, repo, c)
-
-def safecleanupnode(ui, repo, name, nodes):
- """strip or obsolete nodes
-
- nodes could be either a set or dict which maps to replacements.
- nodes could be unknown (outside the repo).
- """
- supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
- if supportsmarkers:
- if util.safehasattr(nodes, 'get'):
- # nodes is a dict-like mapping
- # use unfiltered repo for successors in case they are hidden
- urepo = repo.unfiltered()
- def getmarker(prec):
- succs = tuple(urepo[n] for n in nodes.get(prec, ()))
- return (repo[prec], succs)
- else:
- # nodes is a set-like
- def getmarker(prec):
- return (repo[prec], ())
- # sort by revision number because it sound "right"
- sortednodes = sorted([n for n in nodes if n in repo],
- key=repo.changelog.rev)
- markers = [getmarker(t) for t in sortednodes]
- if markers:
- obsolete.createmarkers(repo, markers)
- else:
- return cleanupnode(ui, repo, name, nodes)
+ if roots:
+ repair.strip(ui, repo, roots)
def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
if isinstance(nodelist, str):
@@ -1641,8 +1576,8 @@
if os.path.exists(os.path.join(repo.path, 'histedit-state')):
state = histeditstate(repo)
state.read()
- histedit_nodes = set([action.node for action
- in state.actions if action.node])
+ histedit_nodes = {action.node for action
+ in state.actions if action.node}
common_nodes = histedit_nodes & set(nodelist)
if common_nodes:
raise error.Abort(_("histedit in progress, can't strip %s")
--- a/hgext/journal.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/journal.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,7 +23,6 @@
from mercurial import (
bookmarks,
cmdutil,
- commands,
dispatch,
error,
extensions,
@@ -31,13 +30,14 @@
localrepo,
lock,
node,
+ registrar,
util,
)
from . import share
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -61,27 +61,38 @@
def extsetup(ui):
extensions.wrapfunction(dispatch, 'runcommand', runcommand)
extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
- extensions.wrapfunction(
- localrepo.localrepository.dirstate, 'func', wrapdirstate)
+ extensions.wrapfilecache(
+ localrepo.localrepository, 'dirstate', wrapdirstate)
extensions.wrapfunction(hg, 'postshare', wrappostshare)
extensions.wrapfunction(hg, 'copystore', unsharejournal)
def reposetup(ui, repo):
if repo.local():
repo.journal = journalstorage(repo)
+ repo._wlockfreeprefix.add('namejournal')
+
+ dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
+ if cached:
+ # already instantiated dirstate isn't yet marked as
+ # "journal"-ing, even though repo.dirstate() was already
+ # wrapped by own wrapdirstate()
+ _setupdirstate(repo, dirstate)
def runcommand(orig, lui, repo, cmd, fullargs, *args):
"""Track the command line options for recording in the journal"""
journalstorage.recordcommand(*fullargs)
return orig(lui, repo, cmd, fullargs, *args)
+def _setupdirstate(repo, dirstate):
+ dirstate.journalstorage = repo.journal
+ dirstate.addparentchangecallback('journal', recorddirstateparents)
+
# hooks to record dirstate changes
def wrapdirstate(orig, repo):
"""Make journal storage available to the dirstate object"""
dirstate = orig(repo)
if util.safehasattr(repo, 'journal'):
- dirstate.journalstorage = repo.journal
- dirstate.addparentchangecallback('journal', recorddirstateparents)
+ _setupdirstate(repo, dirstate)
return dirstate
def recorddirstateparents(dirstate, old, new):
@@ -158,7 +169,7 @@
util.safehasattr(repo, 'journal')):
sharedrepo = share._getsrcrepo(repo)
sharedfeatures = _readsharedfeatures(repo)
- if sharedrepo and sharedfeatures > set(['journal']):
+ if sharedrepo and sharedfeatures > {'journal'}:
# there is a shared repository and there are shared journal entries
# to copy. move shared date over from source to destination but
# move the local file first
@@ -292,7 +303,7 @@
# default to 600 seconds timeout
l = lock.lock(
vfs, 'namejournal.lock',
- int(self.ui.config("ui", "timeout", "600")), desc=desc)
+ int(self.ui.config("ui", "timeout")), desc=desc)
self.ui.warn(_("got lock after %s seconds\n") % l.delay)
self._lockref = weakref.ref(l)
return l
@@ -420,7 +431,7 @@
'journal', [
('', 'all', None, 'show history for all names'),
('c', 'commits', None, 'show commit metadata'),
- ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
+ ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
'[OPTION]... [BOOKMARKNAME]')
def journal(ui, repo, *args, **opts):
"""show the previous position of bookmarks and the working copy
--- a/hgext/keyword.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/keyword.py Wed Jul 19 07:51:41 2017 -0500
@@ -88,13 +88,13 @@
import os
import re
import tempfile
+import weakref
from mercurial.i18n import _
from mercurial.hgweb import webcommands
from mercurial import (
cmdutil,
- commands,
context,
dispatch,
error,
@@ -111,7 +111,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -122,6 +122,9 @@
nokwcommands = ('add addremove annotate bundle export grep incoming init log'
' outgoing push tip verify convert email glog')
+# webcommands that do not act on keywords
+nokwwebcommands = ('annotate changeset rev filediff diff comparison')
+
# hg commands that trigger expansion only when writing to working dir,
# not when reading filelog, and unexpand when reading from working dir
restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
@@ -162,7 +165,7 @@
return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
# make keyword tools accessible
-kwtools = {'templater': None, 'hgcmd': ''}
+kwtools = {'hgcmd': ''}
def _defaultkwmaps(ui):
'''Returns default keywordmaps according to keywordset configuration.'''
@@ -210,7 +213,7 @@
def __init__(self, ui, repo, inc, exc):
self.ui = ui
- self.repo = repo
+ self._repo = weakref.ref(repo)
self.match = match.match(repo.root, '', [], inc, exc)
self.restrict = kwtools['hgcmd'] in restricted.split()
self.postcommit = False
@@ -221,6 +224,10 @@
else:
self.templates = _defaultkwmaps(self.ui)
+ @property
+ def repo(self):
+ return self._repo()
+
@util.propertycache
def escape(self):
'''Returns bar-separated and escaped keywords.'''
@@ -240,8 +247,8 @@
'''Replaces keywords in data with expanded template.'''
def kwsub(mobj):
kw = mobj.group(1)
- ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
- self.templates[kw], '', False)
+ ct = cmdutil.makelogtemplater(self.ui, self.repo,
+ self.templates[kw])
self.ui.pushbuffer()
ct.show(ctx, root=self.repo.root, file=path)
ekw = templatefilters.firstline(self.ui.popbuffer())
@@ -378,7 +385,7 @@
wctx = repo[None]
if len(wctx.parents()) > 1:
raise error.Abort(_('outstanding uncommitted merge'))
- kwt = kwtools['templater']
+ kwt = getattr(repo, '_keywordkwt', None)
with repo.wlock():
status = _status(ui, repo, wctx, kwt, *pats, **opts)
if status.modified or status.added or status.removed or status.deleted:
@@ -481,7 +488,7 @@
repo.wvfs.rmtree(repo.root)
@command('kwexpand',
- commands.walkopts,
+ cmdutil.walkopts,
_('hg kwexpand [OPTION]... [FILE]...'),
inferrepo=True)
def expand(ui, repo, *pats, **opts):
@@ -498,7 +505,7 @@
[('A', 'all', None, _('show keyword status flags of all files')),
('i', 'ignore', None, _('show files excluded from expansion')),
('u', 'unknown', None, _('only show unknown (not tracked) files')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_('hg kwfiles [OPTION]... [FILE]...'),
inferrepo=True)
def files(ui, repo, *pats, **opts):
@@ -522,7 +529,7 @@
I = ignored
i = ignored (not tracked)
'''
- kwt = kwtools['templater']
+ kwt = getattr(repo, '_keywordkwt', None)
wctx = repo[None]
status = _status(ui, repo, wctx, kwt, *pats, **opts)
if pats:
@@ -557,7 +564,7 @@
fm.end()
@command('kwshrink',
- commands.walkopts,
+ cmdutil.walkopts,
_('hg kwshrink [OPTION]... [FILE]...'),
inferrepo=True)
def shrink(ui, repo, *pats, **opts):
@@ -570,9 +577,136 @@
# 3rd argument sets expansion to False
_kwfwrite(ui, repo, False, *pats, **opts)
+# monkeypatches
+
+def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
+ '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
+ rejects or conflicts due to expanded keywords in working dir.'''
+ orig(self, ui, gp, backend, store, eolmode)
+ kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
+ if kwt:
+ # shrink keywords read from working dir
+ self.lines = kwt.shrinklines(self.fname, self.lines)
+
+def kwdiff(orig, repo, *args, **kwargs):
+ '''Monkeypatch patch.diff to avoid expansion.'''
+ kwt = getattr(repo, '_keywordkwt', None)
+ if kwt:
+ restrict = kwt.restrict
+ kwt.restrict = True
+ try:
+ for chunk in orig(repo, *args, **kwargs):
+ yield chunk
+ finally:
+ if kwt:
+ kwt.restrict = restrict
+
+def kwweb_skip(orig, web, req, tmpl):
+ '''Wraps webcommands.x turning off keyword expansion.'''
+ kwt = getattr(web.repo, '_keywordkwt', None)
+ if kwt:
+ origmatch = kwt.match
+ kwt.match = util.never
+ try:
+ for chunk in orig(web, req, tmpl):
+ yield chunk
+ finally:
+ if kwt:
+ kwt.match = origmatch
+
+def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
+ '''Wraps cmdutil.amend expanding keywords after amend.'''
+ kwt = getattr(repo, '_keywordkwt', None)
+ if kwt is None:
+ return orig(ui, repo, commitfunc, old, extra, pats, opts)
+ with repo.wlock():
+ kwt.postcommit = True
+ newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
+ if newid != old.node():
+ ctx = repo[newid]
+ kwt.restrict = True
+ kwt.overwrite(ctx, ctx.files(), False, True)
+ kwt.restrict = False
+ return newid
+
+def kw_copy(orig, ui, repo, pats, opts, rename=False):
+ '''Wraps cmdutil.copy so that copy/rename destinations do not
+ contain expanded keywords.
+ Note that the source of a regular file destination may also be a
+ symlink:
+ hg cp sym x -> x is symlink
+ cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
+ For the latter we have to follow the symlink to find out whether its
+ target is configured for expansion and we therefore must unexpand the
+ keywords in the destination.'''
+ kwt = getattr(repo, '_keywordkwt', None)
+ if kwt is None:
+ return orig(ui, repo, pats, opts, rename)
+ with repo.wlock():
+ orig(ui, repo, pats, opts, rename)
+ if opts.get('dry_run'):
+ return
+ wctx = repo[None]
+ cwd = repo.getcwd()
+
+ def haskwsource(dest):
+ '''Returns true if dest is a regular file and configured for
+ expansion or a symlink which points to a file configured for
+ expansion. '''
+ source = repo.dirstate.copied(dest)
+ if 'l' in wctx.flags(source):
+ source = pathutil.canonpath(repo.root, cwd,
+ os.path.realpath(source))
+ return kwt.match(source)
+
+ candidates = [f for f in repo.dirstate.copies() if
+ 'l' not in wctx.flags(f) and haskwsource(f)]
+ kwt.overwrite(wctx, candidates, False, False)
+
+def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
+ '''Wraps record.dorecord expanding keywords after recording.'''
+ kwt = getattr(repo, '_keywordkwt', None)
+ if kwt is None:
+ return orig(ui, repo, commitfunc, *pats, **opts)
+ with repo.wlock():
+ # record returns 0 even when nothing has changed
+ # therefore compare nodes before and after
+ kwt.postcommit = True
+ ctx = repo['.']
+ wstatus = ctx.status()
+ ret = orig(ui, repo, commitfunc, *pats, **opts)
+ recctx = repo['.']
+ if ctx != recctx:
+ modified, added = _preselect(wstatus, recctx.files())
+ kwt.restrict = False
+ kwt.overwrite(recctx, modified, False, True)
+ kwt.overwrite(recctx, added, False, True, True)
+ kwt.restrict = True
+ return ret
+
+def kwfilectx_cmp(orig, self, fctx):
+ if fctx._customcmp:
+ return fctx.cmp(self)
+ kwt = getattr(self._repo, '_keywordkwt', None)
+ if kwt is None:
+ return orig(self, fctx)
+ # keyword affects data size, comparing wdir and filelog size does
+ # not make sense
+ if (fctx._filenode is None and
+ (self._repo._encodefilterpats or
+ kwt.match(fctx.path()) and 'l' not in fctx.flags() or
+ self.size() - 4 == fctx.size()) or
+ self.size() == fctx.size()):
+ return self._filelog.cmp(self._filenode, fctx.data())
+ return True
def uisetup(ui):
- ''' Monkeypatches dispatch._parse to retrieve user command.'''
+ ''' Monkeypatches dispatch._parse to retrieve user command.
+ Overrides file method to return kwfilelog instead of filelog
+ if file matches user configuration.
+ Wraps commit to overwrite configured files with updated
+ keyword substitutions.
+ Monkeypatches patch and webcommands.'''
def kwdispatch_parse(orig, ui, args):
'''Monkeypatch dispatch._parse to obtain running hg command.'''
@@ -582,13 +716,17 @@
extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
+ extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
+ extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
+ extensions.wrapfunction(patch, 'diff', kwdiff)
+ extensions.wrapfunction(cmdutil, 'amend', kw_amend)
+ extensions.wrapfunction(cmdutil, 'copy', kw_copy)
+ extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
+ for c in nokwwebcommands.split():
+ extensions.wrapfunction(webcommands, c, kwweb_skip)
+
def reposetup(ui, repo):
- '''Sets up repo as kwrepo for keyword substitution.
- Overrides file method to return kwfilelog instead of filelog
- if file matches user configuration.
- Wraps commit to overwrite configured files with updated
- keyword substitutions.
- Monkeypatches patch and webcommands.'''
+ '''Sets up repo as kwrepo for keyword substitution.'''
try:
if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
@@ -607,7 +745,7 @@
if not inc:
return
- kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
+ kwt = kwtemplater(ui, repo, inc, exc)
class kwrepo(repo.__class__):
def file(self, f):
@@ -640,121 +778,21 @@
return n
def rollback(self, dryrun=False, force=False):
- wlock = self.wlock()
- origrestrict = kwt.restrict
- try:
- if not dryrun:
- changed = self['.'].files()
- ret = super(kwrepo, self).rollback(dryrun, force)
- if not dryrun:
- ctx = self['.']
- modified, added = _preselect(ctx.status(), changed)
- kwt.restrict = False
- kwt.overwrite(ctx, modified, True, True)
- kwt.overwrite(ctx, added, True, False)
- return ret
- finally:
- kwt.restrict = origrestrict
- wlock.release()
-
- # monkeypatches
- def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
- '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
- rejects or conflicts due to expanded keywords in working dir.'''
- orig(self, ui, gp, backend, store, eolmode)
- # shrink keywords read from working dir
- self.lines = kwt.shrinklines(self.fname, self.lines)
-
- def kwdiff(orig, *args, **kwargs):
- '''Monkeypatch patch.diff to avoid expansion.'''
- kwt.restrict = True
- return orig(*args, **kwargs)
-
- def kwweb_skip(orig, web, req, tmpl):
- '''Wraps webcommands.x turning off keyword expansion.'''
- kwt.match = util.never
- return orig(web, req, tmpl)
-
- def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
- '''Wraps cmdutil.amend expanding keywords after amend.'''
- with repo.wlock():
- kwt.postcommit = True
- newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
- if newid != old.node():
- ctx = repo[newid]
- kwt.restrict = True
- kwt.overwrite(ctx, ctx.files(), False, True)
- kwt.restrict = False
- return newid
+ with self.wlock():
+ origrestrict = kwt.restrict
+ try:
+ if not dryrun:
+ changed = self['.'].files()
+ ret = super(kwrepo, self).rollback(dryrun, force)
+ if not dryrun:
+ ctx = self['.']
+ modified, added = _preselect(ctx.status(), changed)
+ kwt.restrict = False
+ kwt.overwrite(ctx, modified, True, True)
+ kwt.overwrite(ctx, added, True, False)
+ return ret
+ finally:
+ kwt.restrict = origrestrict
- def kw_copy(orig, ui, repo, pats, opts, rename=False):
- '''Wraps cmdutil.copy so that copy/rename destinations do not
- contain expanded keywords.
- Note that the source of a regular file destination may also be a
- symlink:
- hg cp sym x -> x is symlink
- cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
- For the latter we have to follow the symlink to find out whether its
- target is configured for expansion and we therefore must unexpand the
- keywords in the destination.'''
- with repo.wlock():
- orig(ui, repo, pats, opts, rename)
- if opts.get('dry_run'):
- return
- wctx = repo[None]
- cwd = repo.getcwd()
-
- def haskwsource(dest):
- '''Returns true if dest is a regular file and configured for
- expansion or a symlink which points to a file configured for
- expansion. '''
- source = repo.dirstate.copied(dest)
- if 'l' in wctx.flags(source):
- source = pathutil.canonpath(repo.root, cwd,
- os.path.realpath(source))
- return kwt.match(source)
-
- candidates = [f for f in repo.dirstate.copies() if
- 'l' not in wctx.flags(f) and haskwsource(f)]
- kwt.overwrite(wctx, candidates, False, False)
-
- def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
- '''Wraps record.dorecord expanding keywords after recording.'''
- with repo.wlock():
- # record returns 0 even when nothing has changed
- # therefore compare nodes before and after
- kwt.postcommit = True
- ctx = repo['.']
- wstatus = ctx.status()
- ret = orig(ui, repo, commitfunc, *pats, **opts)
- recctx = repo['.']
- if ctx != recctx:
- modified, added = _preselect(wstatus, recctx.files())
- kwt.restrict = False
- kwt.overwrite(recctx, modified, False, True)
- kwt.overwrite(recctx, added, False, True, True)
- kwt.restrict = True
- return ret
-
- def kwfilectx_cmp(orig, self, fctx):
- if fctx._customcmp:
- return fctx.cmp(self)
- # keyword affects data size, comparing wdir and filelog size does
- # not make sense
- if (fctx._filenode is None and
- (self._repo._encodefilterpats or
- kwt.match(fctx.path()) and 'l' not in fctx.flags() or
- self.size() - 4 == fctx.size()) or
- self.size() == fctx.size()):
- return self._filelog.cmp(self._filenode, fctx.data())
- return True
-
- extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
- extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
- extensions.wrapfunction(patch, 'diff', kwdiff)
- extensions.wrapfunction(cmdutil, 'amend', kw_amend)
- extensions.wrapfunction(cmdutil, 'copy', kw_copy)
- extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
- for c in 'annotate changeset rev filediff diff'.split():
- extensions.wrapfunction(webcommands, c, kwweb_skip)
repo.__class__ = kwrepo
+ repo._keywordkwt = kwt
--- a/hgext/largefiles/__init__.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/largefiles/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -129,7 +129,7 @@
def featuresetup(ui, supported):
# don't die on seeing a repo with the largefiles requirement
- supported |= set(['largefiles'])
+ supported |= {'largefiles'}
def uisetup(ui):
localrepo.localrepository.featuresetupfuncs.add(featuresetup)
--- a/hgext/largefiles/lfcommands.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/largefiles/lfcommands.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,13 +18,13 @@
from mercurial import (
cmdutil,
- commands,
context,
error,
hg,
lock,
match as matchmod,
node,
+ registrar,
scmutil,
util,
)
@@ -44,7 +44,7 @@
# -- Commands ----------------------------------------------------------
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('lfconvert',
[('s', 'size', '',
@@ -541,7 +541,7 @@
@command('lfpull',
[('r', 'rev', [], _('pull largefiles for these revisions'))
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
def lfpull(ui, repo, source="default", **opts):
"""pull largefiles for the specified revisions from the specified source
--- a/hgext/largefiles/lfutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/largefiles/lfutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -26,6 +26,7 @@
node,
pycompat,
scmutil,
+ sparse,
util,
vfs as vfsmod,
)
@@ -57,10 +58,9 @@
util.oslink(src, dest)
except OSError:
# if hardlinks fail, fallback on atomic copy
- with open(src, 'rb') as srcf:
- with util.atomictempfile(dest) as dstf:
- for chunk in util.filechunkiter(srcf):
- dstf.write(chunk)
+ with open(src, 'rb') as srcf, util.atomictempfile(dest) as dstf:
+ for chunk in util.filechunkiter(srcf):
+ dstf.write(chunk)
os.chmod(dest, os.stat(src).st_mode)
def usercachepath(ui, hash):
@@ -147,7 +147,8 @@
lfstoredir = longname
opener = vfsmod.vfs(vfs.join(lfstoredir))
lfdirstate = largefilesdirstate(opener, ui, repo.root,
- repo.dirstate._validate)
+ repo.dirstate._validate,
+ lambda: sparse.matcher(repo))
# If the largefiles dirstate does not exist, populate and create
# it. This ensures that we create it on the first meaningful
@@ -234,10 +235,9 @@
wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
# The write may fail before the file is fully written, but we
# don't use atomic writes in the working copy.
- with open(path, 'rb') as srcfd:
- with wvfs(filename, 'wb') as destfd:
- gothash = copyandhash(
- util.filechunkiter(srcfd), destfd)
+ with open(path, 'rb') as srcfd, wvfs(filename, 'wb') as destfd:
+ gothash = copyandhash(
+ util.filechunkiter(srcfd), destfd)
if gothash != hash:
repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
% (filename, path, gothash))
@@ -296,8 +296,6 @@
if not pats:
pats = [wvfs.join(standindir)]
match = scmutil.match(repo[None], pats, badfn=badfn)
- # if pats is empty, it would incorrectly always match, so clear _always
- match._always = False
else:
# no patterns: relative to repo root
match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
--- a/hgext/largefiles/overrides.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/largefiles/overrides.py Wed Jul 19 07:51:41 2017 -0500
@@ -41,8 +41,8 @@
m = copy.copy(match)
lfile = lambda f: lfutil.standin(f) in manifest
m._files = filter(lfile, m._files)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
m.matchfn = lambda f: lfile(f) and origmatchfn(f)
return m
@@ -56,8 +56,8 @@
notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
manifest or f in excluded)
m._files = filter(notlfile, m._files)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
return m
@@ -105,9 +105,9 @@
scmutil.matchandpats)
def addlargefiles(ui, repo, isaddremove, matcher, **opts):
- large = opts.get('large')
+ large = opts.get(r'large')
lfsize = lfutil.getminsize(
- ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
+ ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
lfmatcher = None
if lfutil.islfilesrepo(repo):
@@ -119,7 +119,7 @@
m = matcher
wctx = repo[None]
- for f in repo.walk(matchmod.badmatch(m, lambda x, y: None)):
+ for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
exact = m.exact(f)
lfile = lfutil.standin(f) in wctx
nfile = f in wctx
@@ -258,7 +258,7 @@
def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
# The --normal flag short circuits this override
- if opts.get('normal'):
+ if opts.get(r'normal'):
return orig(ui, repo, matcher, prefix, explicitonly, **opts)
ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
@@ -289,10 +289,10 @@
finally:
repo.lfstatus = False
-def overridedirty(orig, repo, ignoreupdate=False):
+def overridedirty(orig, repo, ignoreupdate=False, missing=False):
try:
repo._repo.lfstatus = True
- return orig(repo, ignoreupdate)
+ return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
finally:
repo._repo.lfstatus = False
@@ -347,14 +347,12 @@
else:
f = m._cwd + '/' + f
return back + lfutil.standin(f)
-
- pats.update(fixpats(f, tostandin) for f in p)
else:
def tostandin(f):
if lfutil.isstandin(f):
return f
return lfutil.standin(f)
- pats.update(fixpats(f, tostandin) for f in p)
+ pats.update(fixpats(f, tostandin) for f in p)
for i in range(0, len(m._files)):
# Don't add '.hglf' to m.files, since that is already covered by '.'
@@ -370,8 +368,8 @@
elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
m._files.append(standin)
- m._fileroots = set(m._files)
- m._always = False
+ m._fileset = set(m._files)
+ m.always = lambda: False
origmatchfn = m.matchfn
def lfmatchfn(f):
lf = lfutil.splitstandin(f)
@@ -381,7 +379,7 @@
return r
m.matchfn = lfmatchfn
- ui.debug('updated patterns: %s\n' % sorted(pats))
+ ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
return m, pats
# For hg log --patch, the match object is used in two different senses:
@@ -646,7 +644,7 @@
m = copy.copy(match)
lfile = lambda f: lfutil.standin(f) in manifest
m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
- m._fileroots = set(m._files)
+ m._fileset = set(m._files)
origmatchfn = m.matchfn
def matchfn(f):
lfile = lfutil.splitstandin(f)
@@ -769,7 +767,7 @@
else:
matchfiles.append(f)
m._files = matchfiles
- m._fileroots = set(m._files)
+ m._fileset = set(m._files)
origmatchfn = m.matchfn
def matchfn(f):
lfile = lfutil.splitstandin(f)
@@ -975,7 +973,7 @@
archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
- if repo.ui.configbool("ui", "archivemeta", True):
+ if repo.ui.configbool("ui", "archivemeta"):
write('.hg_archival.txt', 0o644, False,
lambda: archival.buildmetadata(ctx))
--- a/hgext/largefiles/uisetup.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/largefiles/uisetup.py Wed Jul 19 07:51:41 2017 -0500
@@ -21,7 +21,6 @@
cmdutil,
commands,
copies,
- debugcommands,
exchange,
extensions,
filemerge,
@@ -40,11 +39,6 @@
)
def uisetup(ui):
- # TODO: debugcommands should use a separate command table
- # Side-effect of accessing is debugcommands module is guaranteed to be
- # imported and commands.table is populated.
- debugcommands.command
-
# Disable auto-status for some commands which assume that all
# files in the result are under Mercurial's control
--- a/hgext/mq.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/mq.py Wed Jul 19 07:51:41 2017 -0500
@@ -101,7 +101,7 @@
seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -932,14 +932,13 @@
merged.append(f)
else:
removed.append(f)
- repo.dirstate.beginparentchange()
- for f in removed:
- repo.dirstate.remove(f)
- for f in merged:
- repo.dirstate.merge(f)
- p1, p2 = repo.dirstate.parents()
- repo.setparents(p1, merge)
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ for f in removed:
+ repo.dirstate.remove(f)
+ for f in merged:
+ repo.dirstate.merge(f)
+ p1, p2 = repo.dirstate.parents()
+ repo.setparents(p1, merge)
if all_files and '.hgsubstate' in all_files:
wctx = repo[None]
@@ -1580,16 +1579,15 @@
if keepchanges and tobackup:
raise error.Abort(_("local changes found, qrefresh first"))
self.backup(repo, tobackup)
- repo.dirstate.beginparentchange()
- for f in a:
- repo.wvfs.unlinkpath(f, ignoremissing=True)
- repo.dirstate.drop(f)
- for f in m + r:
- fctx = ctx[f]
- repo.wwrite(f, fctx.data(), fctx.flags())
- repo.dirstate.normal(f)
- repo.setparents(qp, nullid)
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ for f in a:
+ repo.wvfs.unlinkpath(f, ignoremissing=True)
+ repo.dirstate.drop(f)
+ for f in m + r:
+ fctx = ctx[f]
+ repo.wwrite(f, fctx.data(), fctx.flags())
+ repo.dirstate.normal(f)
+ repo.setparents(qp, nullid)
for patch in reversed(self.applied[start:end]):
self.ui.status(_("popping %s\n") % patch.name)
del self.applied[start:end]
@@ -1836,9 +1834,7 @@
patchf.close()
marks = repo._bookmarks
- for bm in bmlist:
- marks[bm] = n
- marks.recordchange(tr)
+ marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
tr.close()
self.applied.append(statusentry(n, patchfn))
@@ -2409,7 +2405,7 @@
_('use uncompressed transfer (fast over LAN)')),
('p', 'patches', '',
_('location of source patch repository'), _('REPO')),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('hg qclone [OPTION]... SOURCE [DEST]'),
norepo=True)
def clone(ui, source, dest=None, **opts):
@@ -2577,7 +2573,7 @@
('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
('d', 'date', '',
_('add "Date: <DATE>" to patch'), _('DATE'))
- ] + commands.walkopts + commands.commitopts,
+ ] + cmdutil.walkopts + cmdutil.commitopts,
_('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
inferrepo=True)
def new(ui, repo, patch, *args, **opts):
@@ -2626,7 +2622,7 @@
_('add/update date field in patch with current date')),
('d', 'date', '',
_('add/update date field in patch with given date'), _('DATE'))
- ] + commands.walkopts + commands.commitopts,
+ ] + cmdutil.walkopts + cmdutil.commitopts,
_('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
inferrepo=True)
def refresh(ui, repo, *pats, **opts):
@@ -2659,7 +2655,7 @@
return ret
@command("^qdiff",
- commands.diffopts + commands.diffopts2 + commands.walkopts,
+ cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
_('hg qdiff [OPTION]... [FILE]...'),
inferrepo=True)
def diff(ui, repo, *pats, **opts):
@@ -2684,7 +2680,7 @@
@command('qfold',
[('e', 'edit', None, _('invoke editor on commit messages')),
('k', 'keep', None, _('keep folded patch files')),
- ] + commands.commitopts,
+ ] + cmdutil.commitopts,
_('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
def fold(ui, repo, *files, **opts):
"""fold the named patches into the current patch
@@ -3046,7 +3042,7 @@
('n', 'name', '',
_('copy directory name'), _('NAME')),
('e', 'empty', None, _('clear queue status file')),
- ('f', 'force', None, _('force copy'))] + commands.commitopts,
+ ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
_('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
def save(ui, repo, **opts):
"""save current queue state (DEPRECATED)
@@ -3540,7 +3536,7 @@
"""Add --mq option to operate on patch repository instead of main"""
# some commands do not like getting unknown options
- mq = kwargs.pop('mq', None)
+ mq = kwargs.pop(r'mq', None)
if not mq:
return orig(ui, repo, *args, **kwargs)
--- a/hgext/notify.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/notify.py Wed Jul 19 07:51:41 2017 -0500
@@ -203,8 +203,9 @@
mapfile = self.ui.config('notify', 'style')
if not mapfile and not template:
template = deftemplates.get(hooktype) or single_template
- self.t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
- template, mapfile, False)
+ spec = cmdutil.logtemplatespec(template, mapfile)
+ self.t = cmdutil.changeset_templater(self.ui, self.repo, spec,
+ False, None, False)
def strip(self, path):
'''strip leading slashes from local path, turn into web-safe path.'''
--- a/hgext/patchbomb.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/patchbomb.py Wed Jul 19 07:51:41 2017 -0500
@@ -89,6 +89,8 @@
mail,
node as nodemod,
patch,
+ registrar,
+ repair,
scmutil,
templater,
util,
@@ -96,7 +98,7 @@
stringio = util.stringio
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -110,16 +112,20 @@
# experimental config: patchbomb.publicurl
# waiting for some logic that check that the changeset are available on the
# destination before patchbombing anything.
- pullurl = repo.ui.config('patchbomb', 'publicurl')
- if pullurl is not None:
+ publicurl = repo.ui.config('patchbomb', 'publicurl')
+ if publicurl:
return ('Available At %s\n'
- '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
+ '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
return None
def uisetup(ui):
cmdutil.extraexport.append('pullurl')
cmdutil.extraexportmap['pullurl'] = _addpullheader
+def reposetup(ui, repo):
+ if not repo.local():
+ return
+ repo._wlockfreeprefix.add('last-email.txt')
def prompt(ui, prompt, default=None, rest=':'):
if default:
@@ -441,6 +447,7 @@
('o', 'outgoing', None,
_('send changes not found in the target repository')),
('b', 'bundle', None, _('send changes not in target as a binary bundle')),
+ ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
('', 'bundlename', 'bundle',
_('name of the bundle attachment file'), _('NAME')),
('r', 'rev', [], _('a revision to send'), _('REV')),
@@ -449,7 +456,7 @@
('', 'base', [], _('a base changeset to specify instead of a destination '
'(with -b/--bundle)'), _('REV')),
('', 'intro', None, _('send an introduction email for a single patch')),
- ] + emailopts + commands.remoteopts,
+ ] + emailopts + cmdutil.remoteopts,
_('hg email [OPTION]... [DEST]...'))
def email(ui, repo, *revs, **opts):
'''send changesets by email
@@ -479,6 +486,9 @@
body and as a regular or an inline attachment by combining the
-a/--attach or -i/--inline with the --body option.
+ With -B/--bookmark changesets reachable by the given bookmark are
+ selected.
+
With -o/--outgoing, emails will be generated for patches not found
in the destination repository (or only those which are ancestors
of the specified revisions if any are provided)
@@ -517,6 +527,8 @@
hg email -o -r 3000 # send all ancestors of 3000 not in default
hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
+ hg email -B feature # send all ancestors of feature bookmark
+
hg email -b # send bundle of all patches not in default
hg email -b DEST # send bundle of all patches not in DEST
hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
@@ -539,17 +551,20 @@
mbox = opts.get('mbox')
outgoing = opts.get('outgoing')
rev = opts.get('rev')
+ bookmark = opts.get('bookmark')
if not (opts.get('test') or mbox):
# really sending
mail.validateconfig(ui)
- if not (revs or rev or outgoing or bundle):
- raise error.Abort(_('specify at least one changeset with -r or -o'))
+ if not (revs or rev or outgoing or bundle or bookmark):
+ raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
if outgoing and bundle:
raise error.Abort(_("--outgoing mode always on with --bundle;"
" do not re-specify --outgoing"))
+ if rev and bookmark:
+ raise error.Abort(_("-r and -B are mutually exclusive"))
if outgoing or bundle:
if len(revs) > 1:
@@ -564,6 +579,10 @@
if revs:
raise error.Abort(_('use only one form to specify the revision'))
revs = rev
+ elif bookmark:
+ if bookmark not in repo._bookmarks:
+ raise error.Abort(_("bookmark '%s' not found") % bookmark)
+ revs = repair.stripbmrevset(repo, bookmark)
revs = scmutil.revrange(repo, revs)
if outgoing:
@@ -573,7 +592,7 @@
# check if revision exist on the public destination
publicurl = repo.ui.config('patchbomb', 'publicurl')
- if publicurl is not None:
+ if publicurl:
repo.ui.debug('checking that revision exist in the public repo')
try:
publicpeer = hg.peer(repo, {}, publicurl)
@@ -645,15 +664,17 @@
if addr:
showaddrs.append('%s: %s' % (header, addr))
return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
- else:
- return default
+ elif default:
+ return mail.addrlistencode(
+ ui, [default], _charsets, opts.get('test'))
+ return []
to = getaddrs('To', ask=True)
if not to:
# we can get here in non-interactive mode
raise error.Abort(_('no recipient addresses provided'))
- cc = getaddrs('Cc', ask=True, default='') or []
- bcc = getaddrs('Bcc') or []
+ cc = getaddrs('Cc', ask=True, default='')
+ bcc = getaddrs('Bcc')
replyto = getaddrs('Reply-To')
confirm = ui.configbool('patchbomb', 'confirm')
--- a/hgext/purge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/purge.py Wed Jul 19 07:51:41 2017 -0500
@@ -30,14 +30,14 @@
from mercurial.i18n import _
from mercurial import (
cmdutil,
- commands,
error,
+ registrar,
scmutil,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -52,7 +52,7 @@
('p', 'print', None, _('print filenames instead of deleting them')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs'
' (implies -p/--print)')),
- ] + commands.walkopts,
+ ] + cmdutil.walkopts,
_('hg purge [OPTION]... [DIR]...'))
def purge(ui, repo, *dirs, **opts):
'''removes files not tracked by Mercurial
--- a/hgext/rebase.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/rebase.py Wed Jul 19 07:51:41 2017 -0500
@@ -40,6 +40,7 @@
merge as mergemod,
mergeutil,
obsolete,
+ obsutil,
patch,
phases,
registrar,
@@ -52,7 +53,7 @@
)
release = lock.release
-templateopts = commands.templateopts
+templateopts = cmdutil.templateopts
# The following constants are used throughout the rebase module. The ordering of
# their values must be maintained.
@@ -68,7 +69,7 @@
revskipped = (revignored, revprecursor, revpruned)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -138,10 +139,9 @@
# dict will be what contains most of the rebase progress state.
self.state = {}
self.activebookmark = None
- self.currentbookmarks = None
- self.target = None
+ self.dest = None
self.skipped = set()
- self.targetancestors = set()
+ self.destancestors = set()
self.collapsef = opts.get('collapse', False)
self.collapsemsg = cmdutil.logmessage(ui, opts)
@@ -171,7 +171,7 @@
def _writestatus(self, f):
repo = self.repo.unfiltered()
f.write(repo[self.originalwd].hex() + '\n')
- f.write(repo[self.target].hex() + '\n')
+ f.write(repo[self.dest].hex() + '\n')
f.write(repo[self.external].hex() + '\n')
f.write('%d\n' % int(self.collapsef))
f.write('%d\n' % int(self.keepf))
@@ -194,7 +194,7 @@
"""Restore a previously stored status"""
repo = self.repo
keepbranches = None
- target = None
+ dest = None
collapse = False
external = nullrev
activebookmark = None
@@ -206,7 +206,7 @@
if i == 0:
originalwd = repo[l].rev()
elif i == 1:
- target = repo[l].rev()
+ dest = repo[l].rev()
elif i == 2:
external = repo[l].rev()
elif i == 3:
@@ -242,7 +242,7 @@
skipped = set()
# recompute the set of skipped revs
if not collapse:
- seen = set([target])
+ seen = {dest}
for old, new in sorted(state.items()):
if new != revtodo and new in seen:
skipped.add(old)
@@ -250,10 +250,10 @@
repo.ui.debug('computed skipped revs: %s\n' %
(' '.join(str(r) for r in sorted(skipped)) or None))
repo.ui.debug('rebase status resumed\n')
- _setrebasesetvisibility(repo, set(state.keys()) | set([originalwd]))
+ _setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
self.originalwd = originalwd
- self.target = target
+ self.dest = dest
self.state = state
self.skipped = skipped
self.collapsef = collapse
@@ -262,12 +262,12 @@
self.external = external
self.activebookmark = activebookmark
- def _handleskippingobsolete(self, rebaserevs, obsoleterevs, target):
+ def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest):
"""Compute structures necessary for skipping obsolete revisions
rebaserevs: iterable of all revisions that are to be rebased
obsoleterevs: iterable of all obsolete revisions in rebaseset
- target: a destination revision for the rebase operation
+ dest: a destination revision for the rebase operation
"""
self.obsoletenotrebased = {}
if not self.ui.configbool('experimental', 'rebaseskipobsolete',
@@ -276,7 +276,7 @@
rebaseset = set(rebaserevs)
obsoleteset = set(obsoleterevs)
self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
- obsoleteset, target)
+ obsoleteset, dest)
skippedset = set(self.obsoletenotrebased)
_checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
@@ -296,11 +296,11 @@
hint = _('use "hg rebase --abort" to clear broken state')
raise error.Abort(msg, hint=hint)
if isabort:
- return abort(self.repo, self.originalwd, self.target,
+ return abort(self.repo, self.originalwd, self.dest,
self.state, activebookmark=self.activebookmark)
obsrevs = (r for r, st in self.state.items() if st == revprecursor)
- self._handleskippingobsolete(self.state.keys(), obsrevs, self.target)
+ self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest)
def _preparenewrebase(self, dest, rebaseset):
if dest is None:
@@ -316,7 +316,7 @@
hint=_('use --keep to keep original changesets'))
obsrevs = _filterobsoleterevs(self.repo, set(rebaseset))
- self._handleskippingobsolete(rebaseset, obsrevs, dest)
+ self._handleskippingobsolete(rebaseset, obsrevs, dest.rev())
result = buildstate(self.repo, dest, rebaseset, self.collapsef,
self.obsoletenotrebased)
@@ -332,18 +332,18 @@
% root,
hint=_("see 'hg help phases' for details"))
- (self.originalwd, self.target, self.state) = result
+ (self.originalwd, self.dest, self.state) = result
if self.collapsef:
- self.targetancestors = self.repo.changelog.ancestors(
- [self.target],
+ self.destancestors = self.repo.changelog.ancestors(
+ [self.dest],
inclusive=True)
self.external = externalparent(self.repo, self.state,
- self.targetancestors)
+ self.destancestors)
if dest.closesbranch() and not self.keepbranchesf:
self.ui.status(_('reopening closed branch head %s\n') % dest)
- def _performrebase(self):
+ def _performrebase(self, tr):
repo, ui, opts = self.repo, self.ui, self.opts
if self.keepbranchesf:
# insert _savebranch at the start of extrafns so if
@@ -359,12 +359,11 @@
'branches'))
# Rebase
- if not self.targetancestors:
- self.targetancestors = repo.changelog.ancestors([self.target],
- inclusive=True)
+ if not self.destancestors:
+ self.destancestors = repo.changelog.ancestors([self.dest],
+ inclusive=True)
- # Keep track of the current bookmarks in order to reset them later
- self.currentbookmarks = repo._bookmarks.copy()
+ # Keep track of the active bookmarks in order to reset them later
self.activebookmark = self.activebookmark or repo._activebookmark
if self.activebookmark:
bookmarks.deactivate(repo)
@@ -384,16 +383,18 @@
names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
if names:
desc += ' (%s)' % ' '.join(names)
- if self.state[rev] == revtodo:
+ if self.state[rev] == rev:
+ ui.status(_('already rebased %s\n') % desc)
+ elif self.state[rev] == revtodo:
pos += 1
ui.status(_('rebasing %s\n') % desc)
ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
_('changesets'), total)
- p1, p2, base = defineparents(repo, rev, self.target,
+ p1, p2, base = defineparents(repo, rev, self.dest,
self.state,
- self.targetancestors,
+ self.destancestors,
self.obsoletenotrebased)
- self.storestatus()
+ self.storestatus(tr=tr)
storecollapsemsg(repo, self.collapsemsg)
if len(repo[None].parents()) == 2:
repo.ui.debug('resuming interrupted rebase\n')
@@ -402,7 +403,7 @@
ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
'rebase')
stats = rebasenode(repo, rev, p1, base, self.state,
- self.collapsef, self.target)
+ self.collapsef, self.dest)
if stats and stats[3] > 0:
raise error.InterventionRequired(
_('unresolved conflicts (see hg '
@@ -418,11 +419,14 @@
editor=editor,
keepbranches=self.keepbranchesf,
date=self.date)
+ if newnode is None:
+ # If it ended up being a no-op commit, then the normal
+ # merge state clean-up path doesn't happen, so do it
+ # here. Fix issue5494
+ mergemod.mergestate.clean(repo)
else:
# Skip commit if we are collapsing
- repo.dirstate.beginparentchange()
repo.setparents(repo[p1].node())
- repo.dirstate.endparentchange()
newnode = None
# Update the state
if newnode is not None:
@@ -440,11 +444,11 @@
elif self.state[rev] == revignored:
ui.status(_('not rebasing ignored %s\n') % desc)
elif self.state[rev] == revprecursor:
- targetctx = repo[self.obsoletenotrebased[rev]]
- desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
- targetctx.description().split('\n', 1)[0])
+ destctx = repo[self.obsoletenotrebased[rev]]
+ descdest = '%d:%s "%s"' % (destctx.rev(), destctx,
+ destctx.description().split('\n', 1)[0])
msg = _('note: not rebasing %s, already in destination as %s\n')
- ui.status(msg % (desc, desctarget))
+ ui.status(msg % (desc, descdest))
elif self.state[rev] == revpruned:
msg = _('note: not rebasing %s, it has no successor\n')
ui.status(msg % desc)
@@ -459,8 +463,8 @@
repo, ui, opts = self.repo, self.ui, self.opts
if self.collapsef and not self.keepopen:
p1, p2, _base = defineparents(repo, min(self.state),
- self.target, self.state,
- self.targetancestors,
+ self.dest, self.state,
+ self.destancestors,
self.obsoletenotrebased)
editopt = opts.get('edit')
editform = 'rebase.collapse'
@@ -482,7 +486,7 @@
keepbranches=self.keepbranchesf,
date=self.date)
if newnode is None:
- newrev = self.target
+ newrev = self.dest
else:
newrev = repo[newnode].rev()
for oldrev in self.state.iterkeys():
@@ -492,19 +496,6 @@
if 'qtip' in repo.tags():
updatemq(repo, self.state, self.skipped, **opts)
- if self.currentbookmarks:
- # Nodeids are needed to reset bookmarks
- nstate = {}
- for k, v in self.state.iteritems():
- if v > nullmerge:
- nstate[repo[k].node()] = repo[v].node()
- elif v == revprecursor:
- succ = self.obsoletenotrebased[k]
- nstate[repo[k].node()] = repo[succ].node()
- # XXX this is the same as dest.node() for the non-continue path --
- # this should probably be cleaned up
- targetnode = repo[self.target].node()
-
# restore original working directory
# (we do this before stripping)
newwd = self.state.get(self.originalwd, self.originalwd)
@@ -517,14 +508,6 @@
ui.note(_("update back to initial working directory parent\n"))
hg.updaterepo(repo, newwd, False)
- if self.currentbookmarks:
- with repo.transaction('bookmark') as tr:
- updatebookmarks(repo, targetnode, nstate,
- self.currentbookmarks, tr)
- if self.activebookmark not in repo._bookmarks:
- # active bookmark was divergent one and has been deleted
- self.activebookmark = None
-
if not self.keepf:
collapsedas = None
if self.collapsef:
@@ -540,7 +523,7 @@
skippedlen = len(self.skipped)
ui.note(_("%d revisions have been skipped\n") % skippedlen)
- if (self.activebookmark and
+ if (self.activebookmark and self.activebookmark in repo._bookmarks and
repo['.'].node() == repo._bookmarks[self.activebookmark]):
bookmarks.activate(repo, self.activebookmark)
@@ -658,6 +641,15 @@
[commands]
rebase.requiredest = True
+ By default, rebase will close the transaction after each commit. For
+ performance purposes, you can configure rebase to use a single transaction
+ across the entire rebase. WARNING: This setting introduces a significant
+ risk of losing the work you've done in a rebase if the rebase aborts
+ unexpectedly::
+
+ [rebase]
+ singletransaction = True
+
Return Values:
Returns 0 on success, 1 if nothing to rebase or there are
@@ -666,11 +658,7 @@
"""
rbsrt = rebaseruntime(repo, ui, opts)
- lock = wlock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
-
+ with repo.wlock(), repo.lock():
# Validate input and define rebasing points
destf = opts.get('dest', None)
srcf = opts.get('source', None)
@@ -721,10 +709,13 @@
if retcode is not None:
return retcode
- rbsrt._performrebase()
+ tr = None
+ if ui.configbool('rebase', 'singletransaction'):
+ tr = repo.transaction('rebase')
+ with util.acceptintervention(tr):
+ rbsrt._performrebase(tr)
+
rbsrt._finishrebase()
- finally:
- release(lock, wlock)
def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None,
destspace=None):
@@ -821,9 +812,9 @@
return dest, rebaseset
-def externalparent(repo, state, targetancestors):
+def externalparent(repo, state, destancestors):
"""Return the revision that should be used as the second parent
- when the revisions in state is collapsed on top of targetancestors.
+ when the revisions in state is collapsed on top of destancestors.
Abort if there is more than one parent.
"""
parents = set()
@@ -833,7 +824,7 @@
continue
for p in repo[rev].parents():
if (p.rev() not in state
- and p.rev() not in targetancestors):
+ and p.rev() not in destancestors):
parents.add(p.rev())
if not parents:
return nullrev
@@ -841,7 +832,7 @@
return parents.pop()
raise error.Abort(_('unable to collapse on top of %s, there is more '
'than one external parent: %s') %
- (max(targetancestors),
+ (max(destancestors),
', '.join(str(p) for p in sorted(parents))))
def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None,
@@ -860,8 +851,8 @@
if extrafn:
extrafn(ctx, extra)
- targetphase = max(ctx.phase(), phases.draft)
- overrides = {('phases', 'new-commit'): targetphase}
+ destphase = max(ctx.phase(), phases.draft)
+ overrides = {('phases', 'new-commit'): destphase}
with repo.ui.configoverride(overrides, 'rebase'):
if keepbranch:
repo.ui.setconfig('ui', 'allowemptycommit', True)
@@ -877,15 +868,15 @@
finally:
release(dsguard)
-def rebasenode(repo, rev, p1, base, state, collapse, target):
+def rebasenode(repo, rev, p1, base, state, collapse, dest):
'Rebase a single revision rev on top of p1 using base as merge ancestor'
# Merge phase
- # Update to target and merge it with local
+ # Update to destination and merge it with local
if repo['.'].rev() != p1:
repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
mergemod.update(repo, p1, False, True)
else:
- repo.ui.debug(" already in target\n")
+ repo.ui.debug(" already in destination\n")
repo.dirstate.write(repo.currenttransaction())
repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
if base is not None:
@@ -895,7 +886,7 @@
stats = mergemod.update(repo, rev, True, True, base, collapse,
labels=['dest', 'source'])
if collapse:
- copies.duplicatecopies(repo, rev, target)
+ copies.duplicatecopies(repo, rev, dest)
else:
# If we're not using --collapse, we need to
# duplicate copies between the revision we're
@@ -903,7 +894,7 @@
# duplicate any copies that have already been
# performed in the destination.
p1rev = repo[rev].p1().rev()
- copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
+ copies.duplicatecopies(repo, rev, p1rev, skiprev=dest)
return stats
def nearestrebased(repo, rev, state):
@@ -938,7 +929,7 @@
"experimental.allowdivergence=True")
raise error.Abort(msg % (",".join(divhashes),), hint=h)
-def defineparents(repo, rev, target, state, targetancestors,
+def defineparents(repo, rev, dest, state, destancestors,
obsoletenotrebased):
'Return the new parent relationship of the revision that will be rebased'
parents = repo[rev].parents()
@@ -946,26 +937,26 @@
rp1 = None
p1n = parents[0].rev()
- if p1n in targetancestors:
- p1 = target
+ if p1n in destancestors:
+ p1 = dest
elif p1n in state:
if state[p1n] == nullmerge:
- p1 = target
+ p1 = dest
elif state[p1n] in revskipped:
p1 = nearestrebased(repo, p1n, state)
if p1 is None:
- p1 = target
+ p1 = dest
else:
p1 = state[p1n]
else: # p1n external
- p1 = target
+ p1 = dest
p2 = p1n
- if len(parents) == 2 and parents[1].rev() not in targetancestors:
+ if len(parents) == 2 and parents[1].rev() not in destancestors:
p2n = parents[1].rev()
# interesting second parent
if p2n in state:
- if p1 == target: # p1n in targetancestors or external
+ if p1 == dest: # p1n in destancestors or external
p1 = state[p2n]
if p1 == revprecursor:
rp1 = obsoletenotrebased[p2n]
@@ -973,7 +964,7 @@
p2 = nearestrebased(repo, p2n, state)
if p2 is None:
# no ancestors rebased yet, detach
- p2 = target
+ p2 = dest
else:
p2 = state[p2n]
else: # p2n external
@@ -1089,16 +1080,6 @@
mq.seriesdirty = True
mq.savedirty()
-def updatebookmarks(repo, targetnode, nstate, originalbookmarks, tr):
- 'Move bookmarks to their correct changesets, and delete divergent ones'
- marks = repo._bookmarks
- for k, v in originalbookmarks.iteritems():
- if v in nstate:
- # update the bookmarks for revs that have moved
- marks[k] = nstate[v]
- bookmarks.deletedivergent(repo, [targetnode], k)
- marks.recordchange(tr)
-
def storecollapsemsg(repo, collapsemsg):
'Store the collapse message to allow recovery'
collapsemsg = collapsemsg or ''
@@ -1129,6 +1110,10 @@
def clearstatus(repo):
'Remove the status files'
_clearrebasesetvisibiliy(repo)
+ # Make sure the active transaction won't write the state file
+ tr = repo.currenttransaction()
+ if tr:
+ tr.removefilegenerator('rebasestate')
repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
def needupdate(repo, state):
@@ -1148,7 +1133,7 @@
return False
-def abort(repo, originalwd, target, state, activebookmark=None):
+def abort(repo, originalwd, dest, state, activebookmark=None):
'''Restore the repository to its original state. Additional args:
activebookmark: the name of the bookmark that should be active after the
@@ -1156,9 +1141,9 @@
try:
# If the first commits in the rebased set get skipped during the rebase,
- # their values within the state mapping will be the target rev id. The
- # dstates list must must not contain the target rev (issue4896)
- dstates = [s for s in state.values() if s >= 0 and s != target]
+ # their values within the state mapping will be the dest rev id. The
+ # dstates list must must not contain the dest rev (issue4896)
+ dstates = [s for s in state.values() if s >= 0 and s != dest]
immutable = [d for d in dstates if not repo[d].mutable()]
cleanup = True
if immutable:
@@ -1171,19 +1156,19 @@
if dstates:
descendants = set(repo.changelog.descendants(dstates))
if descendants - set(dstates):
- repo.ui.warn(_("warning: new changesets detected on target branch, "
- "can't strip\n"))
+ repo.ui.warn(_("warning: new changesets detected on destination "
+ "branch, can't strip\n"))
cleanup = False
if cleanup:
shouldupdate = False
- rebased = filter(lambda x: x >= 0 and x != target, state.values())
+ rebased = filter(lambda x: x >= 0 and x != dest, state.values())
if rebased:
strippoints = [
c.node() for c in repo.set('roots(%ld)', rebased)]
updateifonnodes = set(rebased)
- updateifonnodes.add(target)
+ updateifonnodes.add(dest)
updateifonnodes.add(originalwd)
shouldupdate = repo['.'].rev() in updateifonnodes
@@ -1213,7 +1198,7 @@
rebaseset: set of rev
'''
originalwd = repo['.'].rev()
- _setrebasesetvisibility(repo, set(rebaseset) | set([originalwd]))
+ _setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
# This check isn't strictly necessary, since mq detects commits over an
# applied patch. But it prevents messing up the working directory when
@@ -1226,8 +1211,9 @@
if not roots:
raise error.Abort(_('no matching revisions'))
roots.sort()
- state = {}
+ state = dict.fromkeys(rebaseset, revtodo)
detachset = set()
+ emptyrebase = True
for root in roots:
commonbase = root.ancestor(dest)
if commonbase == root:
@@ -1239,12 +1225,15 @@
samebranch = root.branch() == wctx.branch()
else:
samebranch = root.branch() == dest.branch()
- if not collapse and samebranch and root in dest.children():
+ if not collapse and samebranch and dest in root.parents():
+ # mark the revision as done by setting its new revision
+ # equal to its old (current) revisions
+ state[root.rev()] = root.rev()
repo.ui.debug('source is a child of destination\n')
- return None
+ continue
+ emptyrebase = False
repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
- state.update(dict.fromkeys(rebaseset, revtodo))
# Rebase tries to turn <dest> into a parent of <root> while
# preserving the number of parents of rebased changesets:
#
@@ -1286,6 +1275,13 @@
# ancestors of <root> not ancestors of <dest>
detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
[root.rev()]))
+ if emptyrebase:
+ return None
+ for rev in sorted(state):
+ parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
+ # if all parents of this revision are done, then so is this revision
+ if parents and all((state.get(p) == p for p in parents)):
+ state[rev] = rev
for r in detachset:
if r not in state:
state[r] = nullmerge
@@ -1310,33 +1306,18 @@
If `collapsedas` is not None, the rebase was a collapse whose result if the
`collapsedas` node."""
- if obsolete.isenabled(repo, obsolete.createmarkersopt):
- markers = []
- for rev, newrev in sorted(state.items()):
- if newrev >= 0:
- if rev in skipped:
- succs = ()
- elif collapsedas is not None:
- succs = (repo[collapsedas],)
- else:
- succs = (repo[newrev],)
- markers.append((repo[rev], succs))
- if markers:
- obsolete.createmarkers(repo, markers)
- else:
- rebased = [rev for rev in state if state[rev] > nullmerge]
- if rebased:
- stripped = []
- for root in repo.set('roots(%ld)', rebased):
- if set(repo.changelog.descendants([root.rev()])) - set(state):
- ui.warn(_("warning: new changesets detected "
- "on source branch, not stripping\n"))
- else:
- stripped.append(root.node())
- if stripped:
- # backup the old csets by default
- repair.strip(ui, repo, stripped, "all")
-
+ tonode = repo.changelog.node
+ mapping = {}
+ for rev, newrev in sorted(state.items()):
+ if newrev >= 0 and newrev != rev:
+ if rev in skipped:
+ succs = ()
+ elif collapsedas is not None:
+ succs = (collapsedas,)
+ else:
+ succs = (tonode(newrev),)
+ mapping[tonode(rev)] = succs
+ scmutil.cleanupnodes(repo, mapping, 'rebase')
def pullrebase(orig, ui, repo, *args, **opts):
'Call rebase after pull if the latter has been invoked with --rebase'
@@ -1347,10 +1328,7 @@
hint = _('use hg pull followed by hg rebase -d DEST')
raise error.Abort(msg, hint=hint)
- wlock = lock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
+ with repo.wlock(), repo.lock():
if opts.get('update'):
del opts['update']
ui.debug('--update and --rebase are not compatible, ignoring '
@@ -1394,8 +1372,6 @@
# not passing argument to get the bare update behavior
# with warning and trumpets
commands.update(ui, repo)
- finally:
- release(lock, wlock)
else:
if opts.get('tool'):
raise error.Abort(_('--tool can only be used with --rebase'))
@@ -1444,7 +1420,7 @@
cl = repo.changelog
for r in rebaseobsrevs:
node = cl.node(r)
- for s in obsolete.allsuccessors(repo.obsstore, [node]):
+ for s in obsutil.allsuccessors(repo.obsstore, [node]):
try:
allsuccessors[cl.rev(s)] = cl.rev(node)
except LookupError:
@@ -1453,7 +1429,7 @@
if allsuccessors:
# Look for successors of obsolete nodes to be rebased among
# the ancestors of dest
- ancs = cl.ancestors([repo[dest].rev()],
+ ancs = cl.ancestors([dest],
stoprev=min(allsuccessors),
inclusive=True)
for s in allsuccessors:
@@ -1499,4 +1475,4 @@
cmdutil.afterresolvedstates.append(
['rebasestate', _('hg rebase --continue')])
# ensure rebased rev are not hidden
- extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
+ extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible)
--- a/hgext/record.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/record.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,10 +18,11 @@
commands,
error,
extensions,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -32,7 +33,7 @@
@command("record",
# same options as commit + white space diff options
[c for c in commands.table['^commit|ci'][1][:]
- if c[1] != "interactive"] + commands.diffwsopts,
+ if c[1] != "interactive"] + cmdutil.diffwsopts,
_('hg record [OPTION]... [FILE]...'))
def record(ui, repo, *pats, **opts):
'''interactively select changes to commit
@@ -135,7 +136,7 @@
(qrecord,
# same options as qnew, but copy them so we don't get
# -i/--interactive for qrecord and add white space diff options
- mq.cmdtable['^qnew'][1][:] + commands.diffwsopts,
+ mq.cmdtable['^qnew'][1][:] + cmdutil.diffwsopts,
_('hg qrecord [OPTION]... PATCH [FILE]...'))
_wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/releasenotes.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,465 @@
+# Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""generate release notes from commit messages (EXPERIMENTAL)
+
+It is common to maintain files detailing changes in a project between
+releases. Maintaining these files can be difficult and time consuming.
+The :hg:`releasenotes` command provided by this extension makes the
+process simpler by automating it.
+"""
+
+from __future__ import absolute_import
+
+import errno
+import re
+import sys
+import textwrap
+
+from mercurial.i18n import _
+from mercurial import (
+ config,
+ error,
+ minirst,
+ registrar,
+ scmutil,
+ util,
+)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+DEFAULT_SECTIONS = [
+ ('feature', _('New Features')),
+ ('bc', _('Backwards Compatibility Changes')),
+ ('fix', _('Bug Fixes')),
+ ('perf', _('Performance Improvements')),
+ ('api', _('API Changes')),
+]
+
+RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
+
+BULLET_SECTION = _('Other Changes')
+
+class parsedreleasenotes(object):
+ def __init__(self):
+ self.sections = {}
+
+ def __contains__(self, section):
+ return section in self.sections
+
+ def __iter__(self):
+ return iter(sorted(self.sections))
+
+ def addtitleditem(self, section, title, paragraphs):
+ """Add a titled release note entry."""
+ self.sections.setdefault(section, ([], []))
+ self.sections[section][0].append((title, paragraphs))
+
+ def addnontitleditem(self, section, paragraphs):
+ """Adds a non-titled release note entry.
+
+ Will be rendered as a bullet point.
+ """
+ self.sections.setdefault(section, ([], []))
+ self.sections[section][1].append(paragraphs)
+
+ def titledforsection(self, section):
+ """Returns titled entries in a section.
+
+ Returns a list of (title, paragraphs) tuples describing sub-sections.
+ """
+ return self.sections.get(section, ([], []))[0]
+
+ def nontitledforsection(self, section):
+ """Returns non-titled, bulleted paragraphs in a section."""
+ return self.sections.get(section, ([], []))[1]
+
+ def hastitledinsection(self, section, title):
+ return any(t[0] == title for t in self.titledforsection(section))
+
+ def merge(self, ui, other):
+ """Merge another instance into this one.
+
+ This is used to combine multiple sources of release notes together.
+ """
+ for section in other:
+ for title, paragraphs in other.titledforsection(section):
+ if self.hastitledinsection(section, title):
+ # TODO prompt for resolution if different and running in
+ # interactive mode.
+ ui.write(_('%s already exists in %s section; ignoring\n') %
+ (title, section))
+ continue
+
+ # TODO perform similarity comparison and try to match against
+ # existing.
+ self.addtitleditem(section, title, paragraphs)
+
+ for paragraphs in other.nontitledforsection(section):
+ if paragraphs in self.nontitledforsection(section):
+ continue
+
+ # TODO perform similarily comparison and try to match against
+ # existing.
+ self.addnontitleditem(section, paragraphs)
+
+class releasenotessections(object):
+ def __init__(self, ui, repo=None):
+ if repo:
+ sections = util.sortdict(DEFAULT_SECTIONS)
+ custom_sections = getcustomadmonitions(repo)
+ if custom_sections:
+ sections.update(custom_sections)
+ self._sections = list(sections.iteritems())
+ else:
+ self._sections = list(DEFAULT_SECTIONS)
+
+ def __iter__(self):
+ return iter(self._sections)
+
+ def names(self):
+ return [t[0] for t in self._sections]
+
+ def sectionfromtitle(self, title):
+ for name, value in self._sections:
+ if value == title:
+ return name
+
+ return None
+
+def getcustomadmonitions(repo):
+ ctx = repo['.']
+ p = config.config()
+
+ def read(f, sections=None, remap=None):
+ if f in ctx:
+ data = ctx[f].data()
+ p.parse(f, data, sections, remap, read)
+ else:
+ raise error.Abort(_(".hgreleasenotes file \'%s\' not found") %
+ repo.pathto(f))
+
+ if '.hgreleasenotes' in ctx:
+ read('.hgreleasenotes')
+ return p['sections']
+
+def parsenotesfromrevisions(repo, directives, revs):
+ notes = parsedreleasenotes()
+
+ for rev in revs:
+ ctx = repo[rev]
+
+ blocks, pruned = minirst.parse(ctx.description(),
+ admonitions=directives)
+
+ for i, block in enumerate(blocks):
+ if block['type'] != 'admonition':
+ continue
+
+ directive = block['admonitiontitle']
+ title = block['lines'][0].strip() if block['lines'] else None
+
+ if i + 1 == len(blocks):
+ raise error.Abort(_('release notes directive %s lacks content')
+ % directive)
+
+ # Now search ahead and find all paragraphs attached to this
+ # admonition.
+ paragraphs = []
+ for j in range(i + 1, len(blocks)):
+ pblock = blocks[j]
+
+ # Margin blocks may appear between paragraphs. Ignore them.
+ if pblock['type'] == 'margin':
+ continue
+
+ if pblock['type'] != 'paragraph':
+ raise error.Abort(_('unexpected block in release notes '
+ 'directive %s') % directive)
+
+ if pblock['indent'] > 0:
+ paragraphs.append(pblock['lines'])
+ else:
+ break
+
+ # TODO consider using title as paragraph for more concise notes.
+ if not paragraphs:
+ raise error.Abort(_('could not find content for release note '
+ '%s') % directive)
+
+ if title:
+ notes.addtitleditem(directive, title, paragraphs)
+ else:
+ notes.addnontitleditem(directive, paragraphs)
+
+ return notes
+
+def parsereleasenotesfile(sections, text):
+ """Parse text content containing generated release notes."""
+ notes = parsedreleasenotes()
+
+ blocks = minirst.parse(text)[0]
+
+ def gatherparagraphsbullets(offset, title=False):
+ notefragment = []
+
+ for i in range(offset + 1, len(blocks)):
+ block = blocks[i]
+
+ if block['type'] == 'margin':
+ continue
+ elif block['type'] == 'section':
+ break
+ elif block['type'] == 'bullet':
+ if block['indent'] != 0:
+ raise error.Abort(_('indented bullet lists not supported'))
+ if title:
+ lines = [l[1:].strip() for l in block['lines']]
+ notefragment.append(lines)
+ continue
+ else:
+ lines = [[l[1:].strip() for l in block['lines']]]
+
+ for block in blocks[i + 1:]:
+ if block['type'] in ('bullet', 'section'):
+ break
+ if block['type'] == 'paragraph':
+ lines.append(block['lines'])
+ notefragment.append(lines)
+ continue
+ elif block['type'] != 'paragraph':
+ raise error.Abort(_('unexpected block type in release notes: '
+ '%s') % block['type'])
+ if title:
+ notefragment.append(block['lines'])
+
+ return notefragment
+
+ currentsection = None
+ for i, block in enumerate(blocks):
+ if block['type'] != 'section':
+ continue
+
+ title = block['lines'][0]
+
+ # TODO the parsing around paragraphs and bullet points needs some
+ # work.
+ if block['underline'] == '=': # main section
+ name = sections.sectionfromtitle(title)
+ if not name:
+ raise error.Abort(_('unknown release notes section: %s') %
+ title)
+
+ currentsection = name
+ bullet_points = gatherparagraphsbullets(i)
+ if bullet_points:
+ for para in bullet_points:
+ notes.addnontitleditem(currentsection, para)
+
+ elif block['underline'] == '-': # sub-section
+ if title == BULLET_SECTION:
+ bullet_points = gatherparagraphsbullets(i)
+ for para in bullet_points:
+ notes.addnontitleditem(currentsection, para)
+ else:
+ paragraphs = gatherparagraphsbullets(i, True)
+ notes.addtitleditem(currentsection, title, paragraphs)
+ else:
+ raise error.Abort(_('unsupported section type for %s') % title)
+
+ return notes
+
+def serializenotes(sections, notes):
+ """Serialize release notes from parsed fragments and notes.
+
+ This function essentially takes the output of ``parsenotesfromrevisions()``
+ and ``parserelnotesfile()`` and produces output combining the 2.
+ """
+ lines = []
+
+ for sectionname, sectiontitle in sections:
+ if sectionname not in notes:
+ continue
+
+ lines.append(sectiontitle)
+ lines.append('=' * len(sectiontitle))
+ lines.append('')
+
+ # First pass to emit sub-sections.
+ for title, paragraphs in notes.titledforsection(sectionname):
+ lines.append(title)
+ lines.append('-' * len(title))
+ lines.append('')
+
+ wrapper = textwrap.TextWrapper(width=78)
+ for i, para in enumerate(paragraphs):
+ if i:
+ lines.append('')
+ lines.extend(wrapper.wrap(' '.join(para)))
+
+ lines.append('')
+
+ # Second pass to emit bullet list items.
+
+ # If the section has titled and non-titled items, we can't
+ # simply emit the bullet list because it would appear to come
+ # from the last title/section. So, we emit a new sub-section
+ # for the non-titled items.
+ nontitled = notes.nontitledforsection(sectionname)
+ if notes.titledforsection(sectionname) and nontitled:
+ # TODO make configurable.
+ lines.append(BULLET_SECTION)
+ lines.append('-' * len(BULLET_SECTION))
+ lines.append('')
+
+ for paragraphs in nontitled:
+ wrapper = textwrap.TextWrapper(initial_indent='* ',
+ subsequent_indent=' ',
+ width=78)
+ lines.extend(wrapper.wrap(' '.join(paragraphs[0])))
+
+ wrapper = textwrap.TextWrapper(initial_indent=' ',
+ subsequent_indent=' ',
+ width=78)
+ for para in paragraphs[1:]:
+ lines.append('')
+ lines.extend(wrapper.wrap(' '.join(para)))
+
+ lines.append('')
+
+ if lines[-1]:
+ lines.append('')
+
+ return '\n'.join(lines)
+
+@command('releasenotes',
+ [('r', 'rev', '', _('revisions to process for release notes'), _('REV'))],
+ _('[-r REV] FILE'))
+def releasenotes(ui, repo, file_, rev=None):
+ """parse release notes from commit messages into an output file
+
+ Given an output file and set of revisions, this command will parse commit
+ messages for release notes then add them to the output file.
+
+ Release notes are defined in commit messages as ReStructuredText
+ directives. These have the form::
+
+ .. directive:: title
+
+ content
+
+ Each ``directive`` maps to an output section in a generated release notes
+ file, which itself is ReStructuredText. For example, the ``.. feature::``
+ directive would map to a ``New Features`` section.
+
+ Release note directives can be either short-form or long-form. In short-
+ form, ``title`` is omitted and the release note is rendered as a bullet
+ list. In long form, a sub-section with the title ``title`` is added to the
+ section.
+
+ The ``FILE`` argument controls the output file to write gathered release
+ notes to. The format of the file is::
+
+ Section 1
+ =========
+
+ ...
+
+ Section 2
+ =========
+
+ ...
+
+ Only sections with defined release notes are emitted.
+
+ If a section only has short-form notes, it will consist of bullet list::
+
+ Section
+ =======
+
+ * Release note 1
+ * Release note 2
+
+ If a section has long-form notes, sub-sections will be emitted::
+
+ Section
+ =======
+
+ Note 1 Title
+ ------------
+
+ Description of the first long-form note.
+
+ Note 2 Title
+ ------------
+
+ Description of the second long-form note.
+
+ If the ``FILE`` argument points to an existing file, that file will be
+ parsed for release notes having the format that would be generated by this
+ command. The notes from the processed commit messages will be *merged*
+ into this parsed set.
+
+ During release notes merging:
+
+ * Duplicate items are automatically ignored
+ * Items that are different are automatically ignored if the similarity is
+ greater than a threshold.
+
+ This means that the release notes file can be updated independently from
+ this command and changes should not be lost when running this command on
+ that file. A particular use case for this is to tweak the wording of a
+ release note after it has been added to the release notes file.
+ """
+ sections = releasenotessections(ui, repo)
+
+ revs = scmutil.revrange(repo, [rev or 'not public()'])
+ incoming = parsenotesfromrevisions(repo, sections.names(), revs)
+
+ try:
+ with open(file_, 'rb') as fh:
+ notes = parsereleasenotesfile(sections, fh.read())
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ notes = parsedreleasenotes()
+
+ notes.merge(ui, incoming)
+
+ with open(file_, 'wb') as fh:
+ fh.write(serializenotes(sections, notes))
+
+@command('debugparsereleasenotes', norepo=True)
+def debugparsereleasenotes(ui, path, repo=None):
+ """parse release notes and print resulting data structure"""
+ if path == '-':
+ text = sys.stdin.read()
+ else:
+ with open(path, 'rb') as fh:
+ text = fh.read()
+
+ sections = releasenotessections(ui, repo)
+
+ notes = parsereleasenotesfile(sections, text)
+
+ for section in notes:
+ ui.write(_('section: %s\n') % section)
+ for title, paragraphs in notes.titledforsection(section):
+ ui.write(_(' subsection: %s\n') % title)
+ for para in paragraphs:
+ ui.write(_(' paragraph: %s\n') % ' '.join(para))
+
+ for paragraphs in notes.nontitledforsection(section):
+ ui.write(_(' bullet point:\n'))
+ for para in paragraphs:
+ ui.write(_(' paragraph: %s\n') % ' '.join(para))
--- a/hgext/relink.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/relink.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,14 +13,14 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
error,
hg,
+ registrar,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/schemes.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/schemes.py Wed Jul 19 07:51:41 2017 -0500
@@ -46,17 +46,17 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
error,
extensions,
hg,
pycompat,
+ registrar,
templater,
util,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/share.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/share.py Wed Jul 19 07:51:41 2017 -0500
@@ -43,11 +43,11 @@
from mercurial.i18n import _
from mercurial import (
bookmarks,
- cmdutil,
commands,
error,
extensions,
hg,
+ registrar,
txnutil,
util,
)
@@ -56,7 +56,7 @@
parseurl = hg.parseurl
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -132,16 +132,16 @@
if pool:
pool = util.expandpath(pool)
- opts['shareopts'] = dict(
- pool=pool,
- mode=ui.config('share', 'poolnaming', 'identity'),
- )
+ opts[r'shareopts'] = {
+ 'pool': pool,
+ 'mode': ui.config('share', 'poolnaming', 'identity'),
+ }
return orig(ui, source, *args, **opts)
def extsetup(ui):
extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile)
- extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange)
+ extensions.wrapfunction(bookmarks.bmstore, '_recordchange', recordchange)
extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo)
extensions.wrapcommand(commands.table, 'clone', clone)
--- a/hgext/shelve.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/shelve.py Wed Jul 19 07:51:41 2017 -0500
@@ -33,7 +33,6 @@
bundlerepo,
changegroup,
cmdutil,
- commands,
error,
exchange,
hg,
@@ -43,6 +42,7 @@
node as nodemod,
patch,
phases,
+ registrar,
repair,
scmutil,
templatefilters,
@@ -55,7 +55,7 @@
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -126,15 +126,10 @@
fp = self.opener()
try:
gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
- if not isinstance(gen, bundle2.unbundle20):
- gen.apply(self.repo, 'unshelve',
- 'bundle:' + self.vfs.join(self.fname),
- targetphase=phases.secret)
- if isinstance(gen, bundle2.unbundle20):
- bundle2.applybundle(self.repo, gen,
- self.repo.currenttransaction(),
- source='unshelve',
- url='bundle:' + self.vfs.join(self.fname))
+ bundle2.applybundle(self.repo, gen, self.repo.currenttransaction(),
+ source='unshelve',
+ url='bundle:' + self.vfs.join(self.fname),
+ targetphase=phases.secret)
finally:
fp.close()
@@ -167,7 +162,7 @@
Handles saving and restoring a shelved state. Ensures that different
versions of a shelved state are possible and handles them appropriately.
"""
- _version = 1
+ _version = 2
_filename = 'shelvedstate'
_keep = 'keep'
_nokeep = 'nokeep'
@@ -175,40 +170,75 @@
_noactivebook = ':no-active-bookmark'
@classmethod
- def load(cls, repo):
+ def _verifyandtransform(cls, d):
+ """Some basic shelvestate syntactic verification and transformation"""
+ try:
+ d['originalwctx'] = nodemod.bin(d['originalwctx'])
+ d['pendingctx'] = nodemod.bin(d['pendingctx'])
+ d['parents'] = [nodemod.bin(h)
+ for h in d['parents'].split(' ')]
+ d['nodestoremove'] = [nodemod.bin(h)
+ for h in d['nodestoremove'].split(' ')]
+ except (ValueError, TypeError, KeyError) as err:
+ raise error.CorruptedState(str(err))
+
+ @classmethod
+ def _getversion(cls, repo):
+ """Read version information from shelvestate file"""
fp = repo.vfs(cls._filename)
try:
version = int(fp.readline().strip())
-
- if version != cls._version:
- raise error.Abort(_('this version of shelve is incompatible '
- 'with the version used in this repo'))
- name = fp.readline().strip()
- wctx = nodemod.bin(fp.readline().strip())
- pendingctx = nodemod.bin(fp.readline().strip())
- parents = [nodemod.bin(h) for h in fp.readline().split()]
- nodestoremove = [nodemod.bin(h) for h in fp.readline().split()]
- branchtorestore = fp.readline().strip()
- keep = fp.readline().strip() == cls._keep
- activebook = fp.readline().strip()
- except (ValueError, TypeError) as err:
+ except ValueError as err:
raise error.CorruptedState(str(err))
finally:
fp.close()
+ return version
+ @classmethod
+ def _readold(cls, repo):
+ """Read the old position-based version of a shelvestate file"""
+ # Order is important, because old shelvestate file uses it
+ # to detemine values of fields (i.g. name is on the second line,
+ # originalwctx is on the third and so forth). Please do not change.
+ keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
+ 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
+ # this is executed only seldomly, so it is not a big deal
+ # that we open this file twice
+ fp = repo.vfs(cls._filename)
+ d = {}
+ try:
+ for key in keys:
+ d[key] = fp.readline().strip()
+ finally:
+ fp.close()
+ return d
+
+ @classmethod
+ def load(cls, repo):
+ version = cls._getversion(repo)
+ if version < cls._version:
+ d = cls._readold(repo)
+ elif version == cls._version:
+ d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
+ .read(firstlinenonkeyval=True)
+ else:
+ raise error.Abort(_('this version of shelve is incompatible '
+ 'with the version used in this repo'))
+
+ cls._verifyandtransform(d)
try:
obj = cls()
- obj.name = name
- obj.wctx = repo[wctx]
- obj.pendingctx = repo[pendingctx]
- obj.parents = parents
- obj.nodestoremove = nodestoremove
- obj.branchtorestore = branchtorestore
- obj.keep = keep
+ obj.name = d['name']
+ obj.wctx = repo[d['originalwctx']]
+ obj.pendingctx = repo[d['pendingctx']]
+ obj.parents = d['parents']
+ obj.nodestoremove = d['nodestoremove']
+ obj.branchtorestore = d.get('branchtorestore', '')
+ obj.keep = d.get('keep') == cls._keep
obj.activebookmark = ''
- if activebook != cls._noactivebook:
- obj.activebookmark = activebook
- except error.RepoLookupError as err:
+ if d.get('activebook', '') != cls._noactivebook:
+ obj.activebookmark = d.get('activebook', '')
+ except (error.RepoLookupError, KeyError) as err:
raise error.CorruptedState(str(err))
return obj
@@ -216,19 +246,20 @@
@classmethod
def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
branchtorestore, keep=False, activebook=''):
- fp = repo.vfs(cls._filename, 'wb')
- fp.write('%i\n' % cls._version)
- fp.write('%s\n' % name)
- fp.write('%s\n' % nodemod.hex(originalwctx.node()))
- fp.write('%s\n' % nodemod.hex(pendingctx.node()))
- fp.write('%s\n' %
- ' '.join([nodemod.hex(p) for p in repo.dirstate.parents()]))
- fp.write('%s\n' %
- ' '.join([nodemod.hex(n) for n in nodestoremove]))
- fp.write('%s\n' % branchtorestore)
- fp.write('%s\n' % (cls._keep if keep else cls._nokeep))
- fp.write('%s\n' % (activebook or cls._noactivebook))
- fp.close()
+ info = {
+ "name": name,
+ "originalwctx": nodemod.hex(originalwctx.node()),
+ "pendingctx": nodemod.hex(pendingctx.node()),
+ "parents": ' '.join([nodemod.hex(p)
+ for p in repo.dirstate.parents()]),
+ "nodestoremove": ' '.join([nodemod.hex(n)
+ for n in nodestoremove]),
+ "branchtorestore": branchtorestore,
+ "keep": cls._keep if keep else cls._nokeep,
+ "activebook": activebook or cls._noactivebook
+ }
+ scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
+ .write(info, firstline=str(cls._version))
@classmethod
def clear(cls, repo):
@@ -266,9 +297,10 @@
'''Abort current transaction for shelve/unshelve, but keep dirstate
'''
tr = repo.currenttransaction()
- repo.dirstate.savebackup(tr, suffix='.shelve')
+ backupname = 'dirstate.shelve'
+ repo.dirstate.savebackup(tr, backupname)
tr.abort()
- repo.dirstate.restorebackup(None, suffix='.shelve')
+ repo.dirstate.restorebackup(None, backupname)
def createcmd(ui, repo, pats, opts):
"""subcommand that creates a new shelve"""
@@ -280,7 +312,7 @@
"""Decide on the name this shelve is going to have"""
def gennames():
yield label
- for i in xrange(1, 100):
+ for i in itertools.count(1):
yield '%s-%02d' % (label, i)
name = opts.get('name')
label = repo._activebookmark or parent.branch() or 'default'
@@ -307,8 +339,6 @@
if not shelvedfile(repo, n, patchextension).exists():
name = n
break
- else:
- raise error.Abort(_("too many shelved changes named '%s'") % label)
return name
@@ -316,7 +346,7 @@
"""return all mutable ancestors for ctx (included)
Much faster than the revset ancestors(ctx) & draft()"""
- seen = set([nodemod.nullrev])
+ seen = {nodemod.nullrev}
visit = collections.deque()
visit.append(ctx)
while visit:
@@ -630,7 +660,7 @@
with repo.lock():
checkparents(repo, state)
ms = merge.mergestate.read(repo)
- if [f for f in ms if ms[f] == 'u']:
+ if list(ms.unresolved()):
raise error.Abort(
_("unresolved conflicts, can't continue"),
hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
@@ -645,7 +675,7 @@
raise
shelvectx = repo['tip']
- if not shelvectx in state.pendingctx.children():
+ if state.pendingctx not in shelvectx.parents():
# rebase was a no-op, so it produced no child commit
shelvectx = state.pendingctx
else:
@@ -722,7 +752,7 @@
# refresh ctx after rebase completes
shelvectx = repo['tip']
- if not shelvectx in tmpwctx.children():
+ if tmpwctx not in shelvectx.parents():
# rebase was a no-op, so it produced no child commit
shelvectx = tmpwctx
return shelvectx
@@ -934,7 +964,7 @@
('i', 'interactive', None,
_('interactive mode, only works while creating a shelve')),
('', 'stat', None,
- _('output diffstat-style summary of changes'))] + commands.walkopts,
+ _('output diffstat-style summary of changes'))] + cmdutil.walkopts,
_('hg shelve [OPTION]... [FILE]...'))
def shelvecmd(ui, repo, *pats, **opts):
'''save and set aside changes from the working directory
@@ -970,17 +1000,17 @@
all shelved changes, use ``--cleanup``.
'''
allowables = [
- ('addremove', set(['create'])), # 'create' is pseudo action
- ('unknown', set(['create'])),
- ('cleanup', set(['cleanup'])),
-# ('date', set(['create'])), # ignored for passing '--date "0 0"' in tests
- ('delete', set(['delete'])),
- ('edit', set(['create'])),
- ('list', set(['list'])),
- ('message', set(['create'])),
- ('name', set(['create'])),
- ('patch', set(['patch', 'list'])),
- ('stat', set(['stat', 'list'])),
+ ('addremove', {'create'}), # 'create' is pseudo action
+ ('unknown', {'create'}),
+ ('cleanup', {'cleanup'}),
+# ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
+ ('delete', {'delete'}),
+ ('edit', {'create'}),
+ ('list', {'list'}),
+ ('message', {'create'}),
+ ('name', {'create'}),
+ ('patch', {'patch', 'list'}),
+ ('stat', {'stat', 'list'}),
]
def checkopt(opt):
if opts.get(opt):
--- a/hgext/show.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/show.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,6 +10,19 @@
This extension provides the :hg:`show` command, which provides a central
command for displaying commonly-accessed repository data and views of that
data.
+
+The following config options can influence operation.
+
+``commands``
+------------
+
+``show.aliasprefix``
+ List of strings that will register aliases for views. e.g. ``s`` will
+ effectively set config options ``alias.s<view> = show <view>`` for all
+ views. i.e. `hg swork` would execute `hg show work`.
+
+ Aliases that would conflict with existing registrations will not be
+ performed.
"""
from __future__ import absolute_import
@@ -18,9 +31,12 @@
from mercurial.node import nullrev
from mercurial import (
cmdutil,
+ commands,
+ destutil,
error,
formatter,
graphmod,
+ phases,
pycompat,
registrar,
revset,
@@ -34,7 +50,7 @@
testedwith = 'ships-with-hg-core'
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
revsetpredicate = registrar.revsetpredicate()
class showcmdfunc(registrar._funcregistrarbase):
@@ -43,7 +59,7 @@
# Used by _formatdoc().
_docformat = '%s -- %s'
- def _extrasetup(self, name, func, fmtopic=None):
+ def _extrasetup(self, name, func, fmtopic=None, csettopic=None):
"""Called with decorator arguments to register a show view.
``name`` is the sub-command name.
@@ -52,13 +68,21 @@
``fmtopic`` is the topic in the style that will be rendered for
this view.
+
+ ``csettopic`` is the topic in the style to be used for a changeset
+ printer.
+
+ If ``fmtopic`` is specified, the view function will receive a
+ formatter instance. If ``csettopic`` is specified, the view
+ function will receive a changeset printer.
"""
func._fmtopic = fmtopic
+ func._csettopic = csettopic
showview = showcmdfunc()
@command('show', [
- # TODO: Switch this template flag to use commands.formatteropts if
+ # TODO: Switch this template flag to use cmdutil.formatteropts if
# 'hg show' becomes stable before --template/-T is stable. For now,
# we are putting it here without the '(EXPERIMENTAL)' flag because it
# is an important part of the 'hg show' user experience and the entire
@@ -109,11 +133,21 @@
hint=_('run "hg show" to see available views'))
template = template or 'show'
- fmtopic = 'show%s' % views[view]._fmtopic
+ fn = views[view]
ui.pager('show')
- with ui.formatter(fmtopic, {'template': template}) as fm:
- return views[view](ui, repo, fm)
+
+ if fn._fmtopic:
+ fmtopic = 'show%s' % fn._fmtopic
+ with ui.formatter(fmtopic, {'template': template}) as fm:
+ return fn(ui, repo, fm)
+ elif fn._csettopic:
+ ref = 'show%s' % fn._csettopic
+ spec = formatter.lookuptemplate(ui, ref, template)
+ displayer = cmdutil.changeset_templater(ui, repo, spec, buffered=True)
+ return fn(ui, repo, displayer)
+ else:
+ return fn(ui, repo)
@showview('bookmarks', fmtopic='bookmarks')
def showbookmarks(ui, repo, fm):
@@ -139,6 +173,171 @@
fm.data(active=bm == active,
longestbookmarklen=longestname)
+@showview('stack', csettopic='stack')
+def showstack(ui, repo, displayer):
+ """current line of work"""
+ wdirctx = repo['.']
+ if wdirctx.rev() == nullrev:
+ raise error.Abort(_('stack view only available when there is a '
+ 'working directory'))
+
+ if wdirctx.phase() == phases.public:
+ ui.write(_('(empty stack; working directory parent is a published '
+ 'changeset)\n'))
+ return
+
+ # TODO extract "find stack" into a function to facilitate
+ # customization and reuse.
+
+ baserev = destutil.stackbase(ui, repo)
+ basectx = None
+
+ if baserev is None:
+ baserev = wdirctx.rev()
+ stackrevs = {wdirctx.rev()}
+ else:
+ stackrevs = set(repo.revs('%d::.', baserev))
+
+ ctx = repo[baserev]
+ if ctx.p1().rev() != nullrev:
+ basectx = ctx.p1()
+
+ # And relevant descendants.
+ branchpointattip = False
+ cl = repo.changelog
+
+ for rev in cl.descendants([wdirctx.rev()]):
+ ctx = repo[rev]
+
+ # Will only happen if . is public.
+ if ctx.phase() == phases.public:
+ break
+
+ stackrevs.add(ctx.rev())
+
+ # ctx.children() within a function iterating on descandants
+ # potentially has severe performance concerns because revlog.children()
+ # iterates over all revisions after ctx's node. However, the number of
+ # draft changesets should be a reasonably small number. So even if
+ # this is quadratic, the perf impact should be minimal.
+ if len(ctx.children()) > 1:
+ branchpointattip = True
+ break
+
+ stackrevs = list(sorted(stackrevs, reverse=True))
+
+ # Find likely target heads for the current stack. These are likely
+ # merge or rebase targets.
+ if basectx:
+ # TODO make this customizable?
+ newheads = set(repo.revs('heads(%d::) - %ld - not public()',
+ basectx.rev(), stackrevs))
+ else:
+ newheads = set()
+
+ try:
+ cmdutil.findcmd('rebase', commands.table)
+ haverebase = True
+ except (error.AmbiguousCommand, error.UnknownCommand):
+ haverebase = False
+
+ # TODO use templating.
+ # TODO consider using graphmod. But it may not be necessary given
+ # our simplicity and the customizations required.
+ # TODO use proper graph symbols from graphmod
+
+ shortesttmpl = formatter.maketemplater(ui, '{shortest(node, 5)}')
+ def shortest(ctx):
+ return shortesttmpl.render({'ctx': ctx, 'node': ctx.hex()})
+
+ # We write out new heads to aid in DAG awareness and to help with decision
+ # making on how the stack should be reconciled with commits made since the
+ # branch point.
+ if newheads:
+ # Calculate distance from base so we can render the count and so we can
+ # sort display order by commit distance.
+ revdistance = {}
+ for head in newheads:
+ # There is some redundancy in DAG traversal here and therefore
+ # room to optimize.
+ ancestors = cl.ancestors([head], stoprev=basectx.rev())
+ revdistance[head] = len(list(ancestors))
+
+ sourcectx = repo[stackrevs[-1]]
+
+ sortedheads = sorted(newheads, key=lambda x: revdistance[x],
+ reverse=True)
+
+ for i, rev in enumerate(sortedheads):
+ ctx = repo[rev]
+
+ if i:
+ ui.write(': ')
+ else:
+ ui.write(' ')
+
+ ui.write(('o '))
+ displayer.show(ctx)
+ displayer.flush(ctx)
+ ui.write('\n')
+
+ if i:
+ ui.write(':/')
+ else:
+ ui.write(' /')
+
+ ui.write(' (')
+ ui.write(_('%d commits ahead') % revdistance[rev],
+ label='stack.commitdistance')
+
+ if haverebase:
+ # TODO may be able to omit --source in some scenarios
+ ui.write('; ')
+ ui.write(('hg rebase --source %s --dest %s' % (
+ shortest(sourcectx), shortest(ctx))),
+ label='stack.rebasehint')
+
+ ui.write(')\n')
+
+ ui.write(':\n: ')
+ ui.write(_('(stack head)\n'), label='stack.label')
+
+ if branchpointattip:
+ ui.write(' \\ / ')
+ ui.write(_('(multiple children)\n'), label='stack.label')
+ ui.write(' |\n')
+
+ for rev in stackrevs:
+ ctx = repo[rev]
+ symbol = '@' if rev == wdirctx.rev() else 'o'
+
+ if newheads:
+ ui.write(': ')
+ else:
+ ui.write(' ')
+
+ ui.write(symbol, ' ')
+ displayer.show(ctx)
+ displayer.flush(ctx)
+ ui.write('\n')
+
+ # TODO display histedit hint?
+
+ if basectx:
+ # Vertically and horizontally separate stack base from parent
+ # to reinforce stack boundary.
+ if newheads:
+ ui.write(':/ ')
+ else:
+ ui.write(' / ')
+
+ ui.write(_('(stack base)'), '\n', label='stack.label')
+ ui.write(('o '))
+
+ displayer.show(basectx)
+ displayer.flush(basectx)
+ ui.write('\n')
+
@revsetpredicate('_underway([commitage[, headage]])')
def underwayrevset(repo, subset, x):
args = revset.getargsdict(x, 'underway', 'commitage headage')
@@ -185,24 +384,40 @@
# Add working directory parent.
wdirrev = repo['.'].rev()
if wdirrev != nullrev:
- relevant += revset.baseset(set([wdirrev]))
+ relevant += revset.baseset({wdirrev})
return subset & relevant
-@showview('work', fmtopic='work')
-def showwork(ui, repo, fm):
+@showview('work', csettopic='work')
+def showwork(ui, repo, displayer):
"""changesets that aren't finished"""
# TODO support date-based limiting when calling revset.
revs = repo.revs('sort(_underway(), topo)')
revdag = graphmod.dagwalker(repo, revs)
- displayer = cmdutil.changeset_templater(ui, repo, None, None,
- tmpl=fm._t.load(fm._topic),
- mapfile=None, buffered=True)
ui.setconfig('experimental', 'graphshorten', True)
cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
+def extsetup(ui):
+ # Alias `hg <prefix><view>` to `hg show <view>`.
+ for prefix in ui.configlist('commands', 'show.aliasprefix'):
+ for view in showview._table:
+ name = '%s%s' % (prefix, view)
+
+ choice, allcommands = cmdutil.findpossible(name, commands.table,
+ strict=True)
+
+ # This alias is already a command name. Don't set it.
+ if name in choice:
+ continue
+
+ # Same for aliases.
+ if ui.config('alias', name):
+ continue
+
+ ui.setconfig('alias', name, 'show %s' % view, source='show')
+
# Adjust the docstring of the show command so it shows all registered views.
# This is a bit hacky because it runs at the end of module load. When moved
# into core or when another extension wants to provide a view, we'll need
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/sparse.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,336 @@
+# sparse.py - allow sparse checkouts of the working directory
+#
+# Copyright 2014 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""allow sparse checkouts of the working directory (EXPERIMENTAL)
+
+(This extension is not yet protected by backwards compatibility
+guarantees. Any aspect may break in future releases until this
+notice is removed.)
+
+This extension allows the working directory to only consist of a
+subset of files for the revision. This allows specific files or
+directories to be explicitly included or excluded. Many repository
+operations have performance proportional to the number of files in
+the working directory. So only realizing a subset of files in the
+working directory can improve performance.
+
+Sparse Config Files
+-------------------
+
+The set of files that are part of a sparse checkout are defined by
+a sparse config file. The file defines 3 things: includes (files to
+include in the sparse checkout), excludes (files to exclude from the
+sparse checkout), and profiles (links to other config files).
+
+The file format is newline delimited. Empty lines and lines beginning
+with ``#`` are ignored.
+
+Lines beginning with ``%include `` denote another sparse config file
+to include. e.g. ``%include tests.sparse``. The filename is relative
+to the repository root.
+
+The special lines ``[include]`` and ``[exclude]`` denote the section
+for includes and excludes that follow, respectively. It is illegal to
+have ``[include]`` after ``[exclude]``.
+
+Non-special lines resemble file patterns to be added to either includes
+or excludes. The syntax of these lines is documented by :hg:`help patterns`.
+Patterns are interpreted as ``glob:`` by default and match against the
+root of the repository.
+
+Exclusion patterns take precedence over inclusion patterns. So even
+if a file is explicitly included, an ``[exclude]`` entry can remove it.
+
+For example, say you have a repository with 3 directories, ``frontend/``,
+``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
+to different projects and it is uncommon for someone working on one
+to need the files for the other. But ``tools/`` contains files shared
+between both projects. Your sparse config files may resemble::
+
+ # frontend.sparse
+ frontend/**
+ tools/**
+
+ # backend.sparse
+ backend/**
+ tools/**
+
+Say the backend grows in size. Or there's a directory with thousands
+of files you wish to exclude. You can modify the profile to exclude
+certain files::
+
+ [include]
+ backend/**
+ tools/**
+
+ [exclude]
+ tools/tests/**
+"""
+
+from __future__ import absolute_import
+
+from mercurial.i18n import _
+from mercurial import (
+ cmdutil,
+ commands,
+ dirstate,
+ error,
+ extensions,
+ hg,
+ match as matchmod,
+ registrar,
+ sparse,
+ util,
+)
+
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'ships-with-hg-core'
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+def extsetup(ui):
+ sparse.enabled = True
+
+ _setupclone(ui)
+ _setuplog(ui)
+ _setupadd(ui)
+ _setupdirstate(ui)
+
+def replacefilecache(cls, propname, replacement):
+ """Replace a filecache property with a new class. This allows changing the
+ cache invalidation condition."""
+ origcls = cls
+ assert callable(replacement)
+ while cls is not object:
+ if propname in cls.__dict__:
+ orig = cls.__dict__[propname]
+ setattr(cls, propname, replacement(orig))
+ break
+ cls = cls.__bases__[0]
+
+ if cls is object:
+ raise AttributeError(_("type '%s' has no property '%s'") % (origcls,
+ propname))
+
+def _setuplog(ui):
+ entry = commands.table['^log|history']
+ entry[1].append(('', 'sparse', None,
+ "limit to changesets affecting the sparse checkout"))
+
+ def _logrevs(orig, repo, opts):
+ revs = orig(repo, opts)
+ if opts.get('sparse'):
+ sparsematch = sparse.matcher(repo)
+ def ctxmatch(rev):
+ ctx = repo[rev]
+ return any(f for f in ctx.files() if sparsematch(f))
+ revs = revs.filter(ctxmatch)
+ return revs
+ extensions.wrapfunction(cmdutil, '_logrevs', _logrevs)
+
+def _clonesparsecmd(orig, ui, repo, *args, **opts):
+ include_pat = opts.get('include')
+ exclude_pat = opts.get('exclude')
+ enableprofile_pat = opts.get('enable_profile')
+ include = exclude = enableprofile = False
+ if include_pat:
+ pat = include_pat
+ include = True
+ if exclude_pat:
+ pat = exclude_pat
+ exclude = True
+ if enableprofile_pat:
+ pat = enableprofile_pat
+ enableprofile = True
+ if sum([include, exclude, enableprofile]) > 1:
+ raise error.Abort(_("too many flags specified."))
+ if include or exclude or enableprofile:
+ def clonesparse(orig, self, node, overwrite, *args, **kwargs):
+ sparse.updateconfig(self.unfiltered(), pat, {}, include=include,
+ exclude=exclude, enableprofile=enableprofile)
+ return orig(self, node, overwrite, *args, **kwargs)
+ extensions.wrapfunction(hg, 'updaterepo', clonesparse)
+ return orig(ui, repo, *args, **opts)
+
+def _setupclone(ui):
+ entry = commands.table['^clone']
+ entry[1].append(('', 'enable-profile', [],
+ 'enable a sparse profile'))
+ entry[1].append(('', 'include', [],
+ 'include sparse pattern'))
+ entry[1].append(('', 'exclude', [],
+ 'exclude sparse pattern'))
+ extensions.wrapcommand(commands.table, 'clone', _clonesparsecmd)
+
+def _setupadd(ui):
+ entry = commands.table['^add']
+ entry[1].append(('s', 'sparse', None,
+ 'also include directories of added files in sparse config'))
+
+ def _add(orig, ui, repo, *pats, **opts):
+ if opts.get('sparse'):
+ dirs = set()
+ for pat in pats:
+ dirname, basename = util.split(pat)
+ dirs.add(dirname)
+ sparse.updateconfig(repo, list(dirs), opts, include=True)
+ return orig(ui, repo, *pats, **opts)
+
+ extensions.wrapcommand(commands.table, 'add', _add)
+
+def _setupdirstate(ui):
+ """Modify the dirstate to prevent stat'ing excluded files,
+ and to prevent modifications to files outside the checkout.
+ """
+
+ def walk(orig, self, match, subrepos, unknown, ignored, full=True):
+ match = matchmod.intersectmatchers(match, self._sparsematcher)
+ return orig(self, match, subrepos, unknown, ignored, full)
+
+ extensions.wrapfunction(dirstate.dirstate, 'walk', walk)
+
+ # dirstate.rebuild should not add non-matching files
+ def _rebuild(orig, self, parent, allfiles, changedfiles=None):
+ matcher = self._sparsematcher
+ if not matcher.always():
+ allfiles = allfiles.matches(matcher)
+ if changedfiles:
+ changedfiles = [f for f in changedfiles if matcher(f)]
+
+ if changedfiles is not None:
+ # In _rebuild, these files will be deleted from the dirstate
+ # when they are not found to be in allfiles
+ dirstatefilestoremove = set(f for f in self if not matcher(f))
+ changedfiles = dirstatefilestoremove.union(changedfiles)
+
+ return orig(self, parent, allfiles, changedfiles)
+ extensions.wrapfunction(dirstate.dirstate, 'rebuild', _rebuild)
+
+ # Prevent adding files that are outside the sparse checkout
+ editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
+ hint = _('include file with `hg debugsparse --include <pattern>` or use ' +
+ '`hg add -s <file>` to include file directory while adding')
+ for func in editfuncs:
+ def _wrapper(orig, self, *args):
+ sparsematch = self._sparsematcher
+ if not sparsematch.always():
+ for f in args:
+ if (f is not None and not sparsematch(f) and
+ f not in self):
+ raise error.Abort(_("cannot add '%s' - it is outside "
+ "the sparse checkout") % f,
+ hint=hint)
+ return orig(self, *args)
+ extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
+
+@command('^debugsparse', [
+ ('I', 'include', False, _('include files in the sparse checkout')),
+ ('X', 'exclude', False, _('exclude files in the sparse checkout')),
+ ('d', 'delete', False, _('delete an include/exclude rule')),
+ ('f', 'force', False, _('allow changing rules even with pending changes')),
+ ('', 'enable-profile', False, _('enables the specified profile')),
+ ('', 'disable-profile', False, _('disables the specified profile')),
+ ('', 'import-rules', False, _('imports rules from a file')),
+ ('', 'clear-rules', False, _('clears local include/exclude rules')),
+ ('', 'refresh', False, _('updates the working after sparseness changes')),
+ ('', 'reset', False, _('makes the repo full again')),
+ ] + commands.templateopts,
+ _('[--OPTION] PATTERN...'))
+def debugsparse(ui, repo, *pats, **opts):
+ """make the current checkout sparse, or edit the existing checkout
+
+ The sparse command is used to make the current checkout sparse.
+ This means files that don't meet the sparse condition will not be
+ written to disk, or show up in any working copy operations. It does
+ not affect files in history in any way.
+
+ Passing no arguments prints the currently applied sparse rules.
+
+ --include and --exclude are used to add and remove files from the sparse
+ checkout. The effects of adding an include or exclude rule are applied
+ immediately. If applying the new rule would cause a file with pending
+ changes to be added or removed, the command will fail. Pass --force to
+ force a rule change even with pending changes (the changes on disk will
+ be preserved).
+
+ --delete removes an existing include/exclude rule. The effects are
+ immediate.
+
+ --refresh refreshes the files on disk based on the sparse rules. This is
+ only necessary if .hg/sparse was changed by hand.
+
+ --enable-profile and --disable-profile accept a path to a .hgsparse file.
+ This allows defining sparse checkouts and tracking them inside the
+ repository. This is useful for defining commonly used sparse checkouts for
+ many people to use. As the profile definition changes over time, the sparse
+ checkout will automatically be updated appropriately, depending on which
+ changeset is checked out. Changes to .hgsparse are not applied until they
+ have been committed.
+
+ --import-rules accepts a path to a file containing rules in the .hgsparse
+ format, allowing you to add --include, --exclude and --enable-profile rules
+ in bulk. Like the --include, --exclude and --enable-profile switches, the
+ changes are applied immediately.
+
+ --clear-rules removes all local include and exclude rules, while leaving
+ any enabled profiles in place.
+
+ Returns 0 if editing the sparse checkout succeeds.
+ """
+ include = opts.get('include')
+ exclude = opts.get('exclude')
+ force = opts.get('force')
+ enableprofile = opts.get('enable_profile')
+ disableprofile = opts.get('disable_profile')
+ importrules = opts.get('import_rules')
+ clearrules = opts.get('clear_rules')
+ delete = opts.get('delete')
+ refresh = opts.get('refresh')
+ reset = opts.get('reset')
+ count = sum([include, exclude, enableprofile, disableprofile, delete,
+ importrules, refresh, clearrules, reset])
+ if count > 1:
+ raise error.Abort(_("too many flags specified"))
+
+ if count == 0:
+ if repo.vfs.exists('sparse'):
+ ui.status(repo.vfs.read("sparse") + "\n")
+ temporaryincludes = sparse.readtemporaryincludes(repo)
+ if temporaryincludes:
+ ui.status(_("Temporarily Included Files (for merge/rebase):\n"))
+ ui.status(("\n".join(temporaryincludes) + "\n"))
+ else:
+ ui.status(_('repo is not sparse\n'))
+ return
+
+ if include or exclude or delete or reset or enableprofile or disableprofile:
+ sparse.updateconfig(repo, pats, opts, include=include, exclude=exclude,
+ reset=reset, delete=delete,
+ enableprofile=enableprofile,
+ disableprofile=disableprofile, force=force)
+
+ if importrules:
+ sparse.importfromfiles(repo, opts, pats, force=force)
+
+ if clearrules:
+ sparse.clearrules(repo, force=force)
+
+ if refresh:
+ try:
+ wlock = repo.wlock()
+ fcounts = map(
+ len,
+ sparse.refreshwdir(repo, repo.status(), sparse.matcher(repo),
+ force=force))
+ sparse.printchanges(ui, opts, added=fcounts[0], dropped=fcounts[1],
+ conflicting=fcounts[2])
+ finally:
+ wlock.release()
--- a/hgext/strip.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/strip.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,6 +14,8 @@
lock as lockmod,
merge,
node as nodemod,
+ pycompat,
+ registrar,
repair,
scmutil,
util,
@@ -22,7 +24,7 @@
release = lockmod.release
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -57,10 +59,7 @@
return s
def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None):
- wlock = lock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
+ with repo.wlock(), repo.lock():
if update:
checklocalchanges(repo, force=force)
@@ -79,14 +78,9 @@
with repo.transaction('strip') as tr:
if repo._activebookmark in bookmarks:
bookmarksmod.deactivate(repo)
- for bookmark in bookmarks:
- del repomarks[bookmark]
- repomarks.recordchange(tr)
+ repomarks.applychanges(repo, tr, [(b, None) for b in bookmarks])
for bookmark in sorted(bookmarks):
ui.write(_("bookmark '%s' deleted\n") % bookmark)
- finally:
- release(lock, wlock)
-
@command("strip",
[
@@ -132,6 +126,7 @@
Return 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
backup = True
if opts.get('no_backup') or opts.get('nobackup'):
backup = False
@@ -159,18 +154,11 @@
rsrevs = repair.stripbmrevset(repo, marks[0])
revs.update(set(rsrevs))
if not revs:
- lock = tr = None
- try:
- lock = repo.lock()
- tr = repo.transaction('bookmark')
- for bookmark in bookmarks:
- del repomarks[bookmark]
- repomarks.recordchange(tr)
- tr.close()
- for bookmark in sorted(bookmarks):
- ui.write(_("bookmark '%s' deleted\n") % bookmark)
- finally:
- release(lock, tr)
+ with repo.lock(), repo.transaction('bookmark') as tr:
+ bmchanges = [(b, None) for b in bookmarks]
+ repomarks.applychanges(repo, tr, bmchanges)
+ for bookmark in sorted(bookmarks):
+ ui.write(_("bookmark '%s' deleted\n") % bookmark)
if not revs:
raise error.Abort(_('empty revision set'))
--- a/hgext/transplant.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/transplant.py Wed Jul 19 07:51:41 2017 -0500
@@ -42,7 +42,7 @@
pass
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/win32mbcs.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/win32mbcs.py Wed Jul 19 07:51:41 2017 -0500
@@ -183,7 +183,8 @@
if pycompat.osname == 'nt':
for f in winfuncs.split():
wrapname(f, wrapper)
- wrapname("mercurial.osutil.listdir", wrapperforlistdir)
+ wrapname("mercurial.util.listdir", wrapperforlistdir)
+ wrapname("mercurial.windows.listdir", wrapperforlistdir)
# wrap functions to be called with local byte string arguments
for f in rfuncs.split():
wrapname(f, reversewrapper)
--- a/hgext/zeroconf/__init__.py Wed Jul 05 11:24:22 2017 -0400
+++ b/hgext/zeroconf/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -167,12 +167,12 @@
value.properties.get("path", "/"))
yield "zc-" + name, url
-def config(orig, self, section, key, default=None, untrusted=False):
+def config(orig, self, section, key, *args, **kwargs):
if section == "paths" and key.startswith("zc-"):
for name, path in getzcpaths():
if name == key:
return path
- return orig(self, section, key, default, untrusted)
+ return orig(self, section, key, *args, **kwargs)
def configitems(orig, self, section, *args, **kwargs):
repos = orig(self, section, *args, **kwargs)
--- a/i18n/polib.py Wed Jul 05 11:24:22 2017 -0400
+++ b/i18n/polib.py Wed Jul 19 07:51:41 2017 -0500
@@ -804,7 +804,7 @@
real_wrapwidth = wrapwidth - flength + specialchars_count
if wrapwidth > 0 and len(field) > real_wrapwidth:
# Wrap the line but take field name into account
- lines = [''] + [unescape(item) for item in wrap(
+ lines = [''] + [unescape(item) for item in textwrap.wrap(
escaped_field,
wrapwidth - 2, # 2 for quotes ""
drop_whitespace=False,
@@ -879,7 +879,7 @@
if val:
for comment in val.split('\n'):
if wrapwidth > 0 and len(comment) + len(c[1]) > wrapwidth:
- ret += wrap(
+ ret += textwrap.wrap(
comment,
wrapwidth,
initial_indent=c[1],
@@ -903,7 +903,7 @@
# what we want for filenames, so the dirty hack is to
# temporally replace hyphens with a char that a file cannot
# contain, like "*"
- ret += [l.replace('*', '-') for l in wrap(
+ ret += [l.replace('*', '-') for l in textwrap.wrap(
filestr.replace('-', '*'),
wrapwidth,
initial_indent='#: ',
@@ -1552,97 +1552,3 @@
return tup
# }}}
-# class TextWrapper {{{
-
-class TextWrapper(textwrap.TextWrapper):
- """
- Subclass of textwrap.TextWrapper that backport the
- drop_whitespace option.
- """
- def __init__(self, *args, **kwargs):
- drop_whitespace = kwargs.pop('drop_whitespace', True)
- textwrap.TextWrapper.__init__(self, *args, **kwargs)
- self.drop_whitespace = drop_whitespace
-
- def _wrap_chunks(self, chunks):
- """_wrap_chunks(chunks : [string]) -> [string]
-
- Wrap a sequence of text chunks and return a list of lines of
- length 'self.width' or less. (If 'break_long_words' is false,
- some lines may be longer than this.) Chunks correspond roughly
- to words and the whitespace between them: each chunk is
- indivisible (modulo 'break_long_words'), but a line break can
- come between any two chunks. Chunks should not have internal
- whitespace; ie. a chunk is either all whitespace or a "word".
- Whitespace chunks will be removed from the beginning and end of
- lines, but apart from that whitespace is preserved.
- """
- lines = []
- if self.width <= 0:
- raise ValueError("invalid width %r (must be > 0)" % self.width)
-
- # Arrange in reverse order so items can be efficiently popped
- # from a stack of chucks.
- chunks.reverse()
-
- while chunks:
-
- # Start the list of chunks that will make up the current line.
- # cur_len is just the length of all the chunks in cur_line.
- cur_line = []
- cur_len = 0
-
- # Figure out which static string will prefix this line.
- if lines:
- indent = self.subsequent_indent
- else:
- indent = self.initial_indent
-
- # Maximum width for this line.
- width = self.width - len(indent)
-
- # First chunk on line is whitespace -- drop it, unless this
- # is the very beginning of the text (ie. no lines started yet).
- if self.drop_whitespace and chunks[-1].strip() == '' and lines:
- del chunks[-1]
-
- while chunks:
- l = len(chunks[-1])
-
- # Can at least squeeze this chunk onto the current line.
- if cur_len + l <= width:
- cur_line.append(chunks.pop())
- cur_len += l
-
- # Nope, this line is full.
- else:
- break
-
- # The current line is full, and the next chunk is too big to
- # fit on *any* line (not just this one).
- if chunks and len(chunks[-1]) > width:
- self._handle_long_word(chunks, cur_line, cur_len, width)
-
- # If the last chunk on this line is all whitespace, drop it.
- if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
- del cur_line[-1]
-
- # Convert current line back to a string and store it in list
- # of all lines (return value).
- if cur_line:
- lines.append(indent + ''.join(cur_line))
-
- return lines
-
-# }}}
-# function wrap() {{{
-
-def wrap(text, width=70, **kwargs):
- """
- Wrap a single paragraph of text, returning a list of wrapped lines.
- """
- if sys.version_info < (2, 6):
- return TextWrapper(width=width, **kwargs).wrap(text)
- return textwrap.wrap(text, width=width, **kwargs)
-
-#}}}
--- a/mercurial/__init__.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/__init__.py Wed Jul 19 07:51:41 2017 -0500
@@ -7,126 +7,20 @@
from __future__ import absolute_import
-import imp
-import os
import sys
-import zipimport
-from . import (
- policy
-)
+# Allow 'from mercurial import demandimport' to keep working.
+import hgdemandimport
+demandimport = hgdemandimport
__all__ = []
-modulepolicy = policy.policy
-
-# Modules that have both Python and C implementations. See also the
-# set of .py files under mercurial/pure/.
-_dualmodules = set([
- 'mercurial.base85',
- 'mercurial.bdiff',
- 'mercurial.diffhelpers',
- 'mercurial.mpatch',
- 'mercurial.osutil',
- 'mercurial.parsers',
-])
-
-class hgimporter(object):
- """Object that conforms to import hook interface defined in PEP-302."""
- def find_module(self, name, path=None):
- # We only care about modules that have both C and pure implementations.
- if name in _dualmodules:
- return self
- return None
-
- def load_module(self, name):
- mod = sys.modules.get(name, None)
- if mod:
- return mod
-
- mercurial = sys.modules['mercurial']
-
- # The zip importer behaves sufficiently differently from the default
- # importer to warrant its own code path.
- loader = getattr(mercurial, '__loader__', None)
- if isinstance(loader, zipimport.zipimporter):
- def ziploader(*paths):
- """Obtain a zipimporter for a directory under the main zip."""
- path = os.path.join(loader.archive, *paths)
- zl = sys.path_importer_cache.get(path)
- if not zl:
- zl = zipimport.zipimporter(path)
- return zl
-
- try:
- if modulepolicy in policy.policynoc:
- raise ImportError()
-
- zl = ziploader('mercurial')
- mod = zl.load_module(name)
- # Unlike imp, ziploader doesn't expose module metadata that
- # indicates the type of module. So just assume what we found
- # is OK (even though it could be a pure Python module).
- except ImportError:
- if modulepolicy == b'c':
- raise
- zl = ziploader('mercurial', 'pure')
- mod = zl.load_module(name)
-
- sys.modules[name] = mod
- return mod
-
- # Unlike the default importer which searches special locations and
- # sys.path, we only look in the directory where "mercurial" was
- # imported from.
-
- # imp.find_module doesn't support submodules (modules with ".").
- # Instead you have to pass the parent package's __path__ attribute
- # as the path argument.
- stem = name.split('.')[-1]
-
- try:
- if modulepolicy in policy.policynoc:
- raise ImportError()
-
- modinfo = imp.find_module(stem, mercurial.__path__)
-
- # The Mercurial installer used to copy files from
- # mercurial/pure/*.py to mercurial/*.py. Therefore, it's possible
- # for some installations to have .py files under mercurial/*.
- # Loading Python modules when we expected C versions could result
- # in a) poor performance b) loading a version from a previous
- # Mercurial version, potentially leading to incompatibility. Either
- # scenario is bad. So we verify that modules loaded from
- # mercurial/* are C extensions. If the current policy allows the
- # loading of .py modules, the module will be re-imported from
- # mercurial/pure/* below.
- if modinfo[2][2] != imp.C_EXTENSION:
- raise ImportError('.py version of %s found where C '
- 'version should exist' % name)
-
- except ImportError:
- if modulepolicy == b'c':
- raise
-
- # Could not load the C extension and pure Python is allowed. So
- # try to load them.
- from . import pure
- modinfo = imp.find_module(stem, pure.__path__)
- if not modinfo:
- raise ImportError('could not find mercurial module %s' %
- name)
-
- mod = imp.load_module(name, *modinfo)
- sys.modules[name] = mod
- return mod
-
# Python 3 uses a custom module loader that transforms source code between
# source file reading and compilation. This is done by registering a custom
# finder that changes the spec for Mercurial modules to use a custom loader.
if sys.version_info[0] >= 3:
- from . import pure
import importlib
+ import importlib.abc
import io
import token
import tokenize
@@ -137,17 +31,15 @@
# Only handle Mercurial-related modules.
if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
return None
+ # selectors2 is already dual-version clean, don't try and mangle it
+ if fullname.startswith('mercurial.selectors2'):
+ return None
# zstd is already dual-version clean, don't try and mangle it
if fullname.startswith('mercurial.zstd'):
return None
-
- # This assumes Python 3 doesn't support loading C modules.
- if fullname in _dualmodules:
- stem = fullname.split('.')[-1]
- fullname = 'mercurial.pure.%s' % stem
- target = pure
- assert len(path) == 1
- path = [os.path.join(path[0], 'pure')]
+ # pywatchman is already dual-version clean, don't try and mangle it
+ if fullname.startswith('hgext.fsmonitor.pywatchman'):
+ return None
# Try to find the module using other registered finders.
spec = None
@@ -165,12 +57,16 @@
if not spec:
return None
- if fullname.startswith('mercurial.pure.'):
- spec.name = spec.name.replace('.pure.', '.')
-
# TODO need to support loaders from alternate specs, like zip
# loaders.
- spec.loader = hgloader(spec.name, spec.origin)
+ loader = hgloader(spec.name, spec.origin)
+ # Can't use util.safehasattr here because that would require
+ # importing util, and we're in import code.
+ if hasattr(spec.loader, 'loader'): # hasattr-py3-only
+ # This is a nested loader (maybe a lazy loader?)
+ spec.loader.loader = loader
+ else:
+ spec.loader = loader
return spec
def replacetokens(tokens, fullname):
@@ -391,13 +287,10 @@
# implemented them because they are very ugly.
return super(hgloader, self).source_to_code(data, path)
-# We automagically register our custom importer as a side-effect of loading.
-# This is necessary to ensure that any entry points are able to import
-# mercurial.* modules without having to perform this registration themselves.
-if sys.version_info[0] >= 3:
- _importercls = hgpathentryfinder
-else:
- _importercls = hgimporter
-if not any(isinstance(x, _importercls) for x in sys.meta_path):
- # meta_path is used before any implicit finders and before sys.path.
- sys.meta_path.insert(0, _importercls())
+ # We automagically register our custom importer as a side-effect of
+ # loading. This is necessary to ensure that any entry points are able
+ # to import mercurial.* modules without having to perform this
+ # registration themselves.
+ if not any(isinstance(x, hgpathentryfinder) for x in sys.meta_path):
+ # meta_path is used before any implicit finders and before sys.path.
+ sys.meta_path.insert(0, hgpathentryfinder())
--- a/mercurial/ancestor.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/ancestor.py Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,7 @@
sv |= poison
if v in nodes:
# history is linear
- return set([v])
+ return {v}
if sv < poison:
for p in pfunc(v):
sp = seen[p]
@@ -151,7 +151,7 @@
def hasbases(self):
'''whether the common set has any non-trivial bases'''
- return self.bases and self.bases != set([nullrev])
+ return self.bases and self.bases != {nullrev}
def addbases(self, newbases):
'''grow the ancestor set by adding new bases'''
--- a/mercurial/archival.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/archival.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,9 +18,8 @@
from .i18n import _
from . import (
- cmdutil,
- encoding,
error,
+ formatter,
match as matchmod,
util,
vfs as vfsmod,
@@ -80,30 +79,43 @@
def buildmetadata(ctx):
'''build content of .hg_archival.txt'''
repo = ctx.repo()
- hex = ctx.hex()
- if ctx.rev() is None:
- hex = ctx.p1().hex()
- if ctx.dirty():
- hex += '+'
+
+ default = (
+ r'repo: {root}\n'
+ r'node: {ifcontains(rev, revset("wdir()"),'
+ r'"{p1node}{dirty}", "{node}")}\n'
+ r'branch: {branch|utf8}\n'
- base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
- _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch()))
+ # {tags} on ctx includes local tags and 'tip', with no current way to
+ # limit that to global tags. Therefore, use {latesttag} as a substitute
+ # when the distance is 0, since that will be the list of global tags on
+ # ctx.
+ r'{ifeq(latesttagdistance, 0, latesttag % "tag: {tag}\n",'
+ r'"{latesttag % "latesttag: {tag}\n"}'
+ r'latesttagdistance: {latesttagdistance}\n'
+ r'changessincelatesttag: {changessincelatesttag}\n")}'
+ )
- tags = ''.join('tag: %s\n' % t for t in ctx.tags()
- if repo.tagtype(t) == 'global')
- if not tags:
- repo.ui.pushbuffer()
- opts = {'template': '{latesttag}\n{latesttagdistance}\n'
- '{changessincelatesttag}',
- 'style': '', 'patch': None, 'git': None}
- cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
- ltags, dist, changessince = repo.ui.popbuffer().split('\n')
- ltags = ltags.split(':')
- tags = ''.join('latesttag: %s\n' % t for t in ltags)
- tags += 'latesttagdistance: %s\n' % dist
- tags += 'changessincelatesttag: %s\n' % changessince
+ opts = {
+ 'template': repo.ui.config('experimental', 'archivemetatemplate',
+ default)
+ }
+
+ out = util.stringio()
- return base + tags
+ fm = formatter.formatter(repo.ui, out, 'archive', opts)
+ fm.startitem()
+ fm.context(ctx=ctx)
+ fm.data(root=_rootctx(repo).hex())
+
+ if ctx.rev() is None:
+ dirty = ''
+ if ctx.dirty(missing=True):
+ dirty = '+'
+ fm.data(dirty=dirty)
+ fm.end()
+
+ return out.getvalue()
class tarit(object):
'''write archive to tar file or stream. can write uncompressed,
@@ -307,7 +319,7 @@
ctx = repo[node]
archiver = archivers[kind](dest, mtime or ctx.date()[0])
- if repo.ui.configbool("ui", "archivemeta", True):
+ if repo.ui.configbool("ui", "archivemeta"):
name = '.hg_archival.txt'
if not matchfn or matchfn(name):
write(name, 0o644, False, lambda: buildmetadata(ctx))
--- a/mercurial/base85.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,182 +0,0 @@
-/*
- base85 codec
-
- Copyright 2006 Brendan Cully <brendan@kublai.com>
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-
- Largely based on git's implementation
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-
-#include "util.h"
-
-static const char b85chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~";
-static char b85dec[256];
-
-static void b85prep(void)
-{
- unsigned i;
-
- memset(b85dec, 0, sizeof(b85dec));
- for (i = 0; i < sizeof(b85chars); i++)
- b85dec[(int)(b85chars[i])] = i + 1;
-}
-
-static PyObject *b85encode(PyObject *self, PyObject *args)
-{
- const unsigned char *text;
- PyObject *out;
- char *dst;
- Py_ssize_t len, olen, i;
- unsigned int acc, val, ch;
- int pad = 0;
-
- if (!PyArg_ParseTuple(args, "s#|i", &text, &len, &pad))
- return NULL;
-
- if (pad)
- olen = ((len + 3) / 4 * 5) - 3;
- else {
- olen = len % 4;
- if (olen)
- olen++;
- olen += len / 4 * 5;
- }
- if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3)))
- return NULL;
-
- dst = PyBytes_AsString(out);
-
- while (len) {
- acc = 0;
- for (i = 24; i >= 0; i -= 8) {
- ch = *text++;
- acc |= ch << i;
- if (--len == 0)
- break;
- }
- for (i = 4; i >= 0; i--) {
- val = acc % 85;
- acc /= 85;
- dst[i] = b85chars[val];
- }
- dst += 5;
- }
-
- if (!pad)
- _PyBytes_Resize(&out, olen);
-
- return out;
-}
-
-static PyObject *b85decode(PyObject *self, PyObject *args)
-{
- PyObject *out;
- const char *text;
- char *dst;
- Py_ssize_t len, i, j, olen, cap;
- int c;
- unsigned int acc;
-
- if (!PyArg_ParseTuple(args, "s#", &text, &len))
- return NULL;
-
- olen = len / 5 * 4;
- i = len % 5;
- if (i)
- olen += i - 1;
- if (!(out = PyBytes_FromStringAndSize(NULL, olen)))
- return NULL;
-
- dst = PyBytes_AsString(out);
-
- i = 0;
- while (i < len)
- {
- acc = 0;
- cap = len - i - 1;
- if (cap > 4)
- cap = 4;
- for (j = 0; j < cap; i++, j++)
- {
- c = b85dec[(int)*text++] - 1;
- if (c < 0)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
- acc = acc * 85 + c;
- }
- if (i++ < len)
- {
- c = b85dec[(int)*text++] - 1;
- if (c < 0)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
- /* overflow detection: 0xffffffff == "|NsC0",
- * "|NsC" == 0x03030303 */
- if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
- return PyErr_Format(
- PyExc_ValueError,
- "bad base85 sequence at position %d",
- (int)i);
- acc += c;
- }
-
- cap = olen < 4 ? olen : 4;
- olen -= cap;
- for (j = 0; j < 4 - cap; j++)
- acc *= 85;
- if (cap && cap < 4)
- acc += 0xffffff >> (cap - 1) * 8;
- for (j = 0; j < cap; j++)
- {
- acc = (acc << 8) | (acc >> 24);
- *dst++ = acc;
- }
- }
-
- return out;
-}
-
-static char base85_doc[] = "Base85 Data Encoding";
-
-static PyMethodDef methods[] = {
- {"b85encode", b85encode, METH_VARARGS,
- "Encode text in base85.\n\n"
- "If the second parameter is true, pad the result to a multiple of "
- "five characters.\n"},
- {"b85decode", b85decode, METH_VARARGS, "Decode base85 text.\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef base85_module = {
- PyModuleDef_HEAD_INIT,
- "base85",
- base85_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_base85(void)
-{
- b85prep();
-
- return PyModule_Create(&base85_module);
-}
-#else
-PyMODINIT_FUNC initbase85(void)
-{
- Py_InitModule3("base85", methods, base85_doc);
-
- b85prep();
-}
-#endif
--- a/mercurial/bdiff_module.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,213 +0,0 @@
-/*
- bdiff.c - efficient binary diff extension for Mercurial
-
- Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-
- Based roughly on Python difflib
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-#include <limits.h>
-
-#include "bdiff.h"
-#include "bitmanipulation.h"
-#include "util.h"
-
-
-static PyObject *blocks(PyObject *self, PyObject *args)
-{
- PyObject *sa, *sb, *rl = NULL, *m;
- struct bdiff_line *a, *b;
- struct bdiff_hunk l, *h;
- int an, bn, count, pos = 0;
-
- l.next = NULL;
-
- if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
- return NULL;
-
- an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
- bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
-
- if (!a || !b)
- goto nomem;
-
- count = bdiff_diff(a, an, b, bn, &l);
- if (count < 0)
- goto nomem;
-
- rl = PyList_New(count);
- if (!rl)
- goto nomem;
-
- for (h = l.next; h; h = h->next) {
- m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
- PyList_SetItem(rl, pos, m);
- pos++;
- }
-
-nomem:
- free(a);
- free(b);
- bdiff_freehunks(l.next);
- return rl ? rl : PyErr_NoMemory();
-}
-
-static PyObject *bdiff(PyObject *self, PyObject *args)
-{
- char *sa, *sb, *rb, *ia, *ib;
- PyObject *result = NULL;
- struct bdiff_line *al, *bl;
- struct bdiff_hunk l, *h;
- int an, bn, count;
- Py_ssize_t len = 0, la, lb, li = 0, lcommon = 0, lmax;
- PyThreadState *_save;
-
- l.next = NULL;
-
- if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
- return NULL;
-
- if (la > UINT_MAX || lb > UINT_MAX) {
- PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
- return NULL;
- }
-
- _save = PyEval_SaveThread();
-
- lmax = la > lb ? lb : la;
- for (ia = sa, ib = sb;
- li < lmax && *ia == *ib;
- ++li, ++ia, ++ib)
- if (*ia == '\n')
- lcommon = li + 1;
- /* we can almost add: if (li == lmax) lcommon = li; */
-
- an = bdiff_splitlines(sa + lcommon, la - lcommon, &al);
- bn = bdiff_splitlines(sb + lcommon, lb - lcommon, &bl);
- if (!al || !bl)
- goto nomem;
-
- count = bdiff_diff(al, an, bl, bn, &l);
- if (count < 0)
- goto nomem;
-
- /* calculate length of output */
- la = lb = 0;
- for (h = l.next; h; h = h->next) {
- if (h->a1 != la || h->b1 != lb)
- len += 12 + bl[h->b1].l - bl[lb].l;
- la = h->a2;
- lb = h->b2;
- }
- PyEval_RestoreThread(_save);
- _save = NULL;
-
- result = PyBytes_FromStringAndSize(NULL, len);
-
- if (!result)
- goto nomem;
-
- /* build binary patch */
- rb = PyBytes_AsString(result);
- la = lb = 0;
-
- for (h = l.next; h; h = h->next) {
- if (h->a1 != la || h->b1 != lb) {
- len = bl[h->b1].l - bl[lb].l;
- putbe32((uint32_t)(al[la].l + lcommon - al->l), rb);
- putbe32((uint32_t)(al[h->a1].l + lcommon - al->l), rb + 4);
- putbe32((uint32_t)len, rb + 8);
- memcpy(rb + 12, bl[lb].l, len);
- rb += 12 + len;
- }
- la = h->a2;
- lb = h->b2;
- }
-
-nomem:
- if (_save)
- PyEval_RestoreThread(_save);
- free(al);
- free(bl);
- bdiff_freehunks(l.next);
- return result ? result : PyErr_NoMemory();
-}
-
-/*
- * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
- * reduce whitespace sequences to a single space and trim remaining whitespace
- * from end of lines.
- */
-static PyObject *fixws(PyObject *self, PyObject *args)
-{
- PyObject *s, *result = NULL;
- char allws, c;
- const char *r;
- Py_ssize_t i, rlen, wlen = 0;
- char *w;
-
- if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
- return NULL;
- r = PyBytes_AsString(s);
- rlen = PyBytes_Size(s);
-
- w = (char *)PyMem_Malloc(rlen ? rlen : 1);
- if (!w)
- goto nomem;
-
- for (i = 0; i != rlen; i++) {
- c = r[i];
- if (c == ' ' || c == '\t' || c == '\r') {
- if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
- w[wlen++] = ' ';
- } else if (c == '\n' && !allws
- && wlen > 0 && w[wlen - 1] == ' ') {
- w[wlen - 1] = '\n';
- } else {
- w[wlen++] = c;
- }
- }
-
- result = PyBytes_FromStringAndSize(w, wlen);
-
-nomem:
- PyMem_Free(w);
- return result ? result : PyErr_NoMemory();
-}
-
-
-static char mdiff_doc[] = "Efficient binary diff.";
-
-static PyMethodDef methods[] = {
- {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
- {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
- {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef bdiff_module = {
- PyModuleDef_HEAD_INIT,
- "bdiff",
- mdiff_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_bdiff(void)
-{
- return PyModule_Create(&bdiff_module);
-}
-#else
-PyMODINIT_FUNC initbdiff(void)
-{
- Py_InitModule3("bdiff", methods, mdiff_doc);
-}
-#endif
--- a/mercurial/bitmanipulation.h Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/bitmanipulation.h Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,8 @@
#ifndef _HG_BITMANIPULATION_H_
#define _HG_BITMANIPULATION_H_
+#include <string.h>
+
#include "compat.h"
static inline uint32_t getbe32(const char *c)
--- a/mercurial/bookmarks.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/bookmarks.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,16 +13,25 @@
from .node import (
bin,
hex,
+ short,
)
from . import (
encoding,
error,
lock as lockmod,
- obsolete,
+ obsutil,
+ pycompat,
+ scmutil,
txnutil,
util,
)
+# label constants
+# until 3.5, bookmarks.current was the advertised name, not
+# bookmarks.active, so we must use both to avoid breaking old
+# custom styles
+activebookmarklabel = 'bookmarks.active bookmarks.current'
+
def _getbkfile(repo):
"""Hook so that extensions that mess with the store can hook bm storage.
@@ -50,28 +59,35 @@
def __init__(self, repo):
dict.__init__(self)
self._repo = repo
+ self._clean = True
+ self._aclean = True
+ nm = repo.changelog.nodemap
+ tonode = bin # force local lookup
+ setitem = dict.__setitem__
try:
- bkfile = _getbkfile(repo)
- for line in bkfile:
- line = line.strip()
- if not line:
- continue
- if ' ' not in line:
- repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
- % line)
- continue
- sha, refspec = line.split(' ', 1)
- refspec = encoding.tolocal(refspec)
- try:
- self[refspec] = repo.changelog.lookup(sha)
- except LookupError:
- pass
+ with _getbkfile(repo) as bkfile:
+ for line in bkfile:
+ line = line.strip()
+ if not line:
+ continue
+ try:
+ sha, refspec = line.split(' ', 1)
+ node = tonode(sha)
+ if node in nm:
+ refspec = encoding.tolocal(refspec)
+ setitem(self, refspec, node)
+ except (TypeError, ValueError):
+ # TypeError:
+ # - bin(...)
+ # ValueError:
+ # - node in nm, for non-20-bytes entry
+ # - split(...), for string without ' '
+ repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
+ % line)
except IOError as inst:
if inst.errno != errno.ENOENT:
raise
- self._clean = True
self._active = _readactive(repo, self)
- self._aclean = True
@property
def active(self):
@@ -86,14 +102,50 @@
self._aclean = False
def __setitem__(self, *args, **kwargs):
+ msg = ("'bookmarks[name] = node' is deprecated, "
+ "use 'bookmarks.applychanges'")
+ self._repo.ui.deprecwarn(msg, '4.3')
+ self._set(*args, **kwargs)
+
+ def _set(self, key, value):
self._clean = False
- return dict.__setitem__(self, *args, **kwargs)
+ return dict.__setitem__(self, key, value)
def __delitem__(self, key):
+ msg = ("'del bookmarks[name]' is deprecated, "
+ "use 'bookmarks.applychanges'")
+ self._repo.ui.deprecwarn(msg, '4.3')
+ self._del(key)
+
+ def _del(self, key):
self._clean = False
return dict.__delitem__(self, key)
+ def applychanges(self, repo, tr, changes):
+ """Apply a list of changes to bookmarks
+ """
+ bmchanges = tr.changes.get('bookmarks')
+ for name, node in changes:
+ old = self.get(name)
+ if node is None:
+ self._del(name)
+ else:
+ self._set(name, node)
+ if bmchanges is not None:
+ # if a previous value exist preserve the "initial" value
+ previous = bmchanges.get(name)
+ if previous is not None:
+ old = previous[0]
+ bmchanges[name] = (old, node)
+ self._recordchange(tr)
+
def recordchange(self, tr):
+ msg = ("'bookmarks.recorchange' is deprecated, "
+ "use 'bookmarks.applychanges'")
+ self._repo.ui.deprecwarn(msg, '4.3')
+ return self._recordchange(tr)
+
+ def _recordchange(self, tr):
"""record that bookmarks have been changed in a transaction
The transaction is then responsible for updating the file content."""
@@ -148,6 +200,63 @@
raise error.Abort(_("no active bookmark"))
return bname
+ def checkconflict(self, mark, force=False, target=None):
+ """check repo for a potential clash of mark with an existing bookmark,
+ branch, or hash
+
+ If target is supplied, then check that we are moving the bookmark
+ forward.
+
+ If force is supplied, then forcibly move the bookmark to a new commit
+ regardless if it is a move forward.
+
+ If divergent bookmark are to be deleted, they will be returned as list.
+ """
+ cur = self._repo.changectx('.').node()
+ if mark in self and not force:
+ if target:
+ if self[mark] == target and target == cur:
+ # re-activating a bookmark
+ return []
+ rev = self._repo[target].rev()
+ anc = self._repo.changelog.ancestors([rev])
+ bmctx = self._repo[self[mark]]
+ divs = [self._repo[b].node() for b in self
+ if b.split('@', 1)[0] == mark.split('@', 1)[0]]
+
+ # allow resolving a single divergent bookmark even if moving
+ # the bookmark across branches when a revision is specified
+ # that contains a divergent bookmark
+ if bmctx.rev() not in anc and target in divs:
+ return divergent2delete(self._repo, [target], mark)
+
+ deletefrom = [b for b in divs
+ if self._repo[b].rev() in anc or b == target]
+ delbms = divergent2delete(self._repo, deletefrom, mark)
+ if validdest(self._repo, bmctx, self._repo[target]):
+ self._repo.ui.status(
+ _("moving bookmark '%s' forward from %s\n") %
+ (mark, short(bmctx.node())))
+ return delbms
+ raise error.Abort(_("bookmark '%s' already exists "
+ "(use -f to force)") % mark)
+ if ((mark in self._repo.branchmap() or
+ mark == self._repo.dirstate.branch()) and not force):
+ raise error.Abort(
+ _("a bookmark cannot have the name of an existing branch"))
+ if len(mark) > 3 and not force:
+ try:
+ shadowhash = (mark in self._repo)
+ except error.LookupError: # ambiguous identifier
+ shadowhash = False
+ if shadowhash:
+ self._repo.ui.warn(
+ _("bookmark %s matches a changeset hash\n"
+ "(did you leave a -r out of an 'hg bookmark' "
+ "command?)\n")
+ % mark)
+ return []
+
def _readactive(repo, marks):
"""
Get the active bookmark. We can have an active bookmark that updates
@@ -209,11 +318,11 @@
parents = [p.node() for p in repo[None].parents()]
return (mark in marks and marks[mark] in parents)
-def deletedivergent(repo, deletefrom, bm):
- '''Delete divergent versions of bm on nodes in deletefrom.
+def divergent2delete(repo, deletefrom, bm):
+ """find divergent versions of bm on nodes in deletefrom.
- Return True if at least one bookmark was deleted, False otherwise.'''
- deleted = False
+ the list of bookmark to delete."""
+ todelete = []
marks = repo._bookmarks
divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
for mark in divergent:
@@ -222,9 +331,30 @@
continue
if mark and marks[mark] in deletefrom:
if mark != bm:
- del marks[mark]
- deleted = True
- return deleted
+ todelete.append(mark)
+ return todelete
+
+def headsforactive(repo):
+ """Given a repo with an active bookmark, return divergent bookmark nodes.
+
+ Args:
+ repo: A repository with an active bookmark.
+
+ Returns:
+ A list of binary node ids that is the full list of other
+ revisions with bookmarks divergent from the active bookmark. If
+ there were no divergent bookmarks, then this list will contain
+ only one entry.
+ """
+ if not repo._activebookmark:
+ raise ValueError(
+ 'headsforactive() only makes sense with an active bookmark')
+ name = repo._activebookmark.split('@', 1)[0]
+ heads = []
+ for mark, n in repo._bookmarks.iteritems():
+ if mark.split('@', 1)[0] == name:
+ heads.append(n)
+ return heads
def calculateupdate(ui, repo, checkout):
'''Return a tuple (targetrev, movemarkfrom) indicating the rev to
@@ -242,11 +372,11 @@
def update(repo, parents, node):
deletefrom = parents
marks = repo._bookmarks
- update = False
active = marks.active
if not active:
return False
+ bmchanges = []
if marks[active] in parents:
new = repo[node]
divs = [repo[b] for b in marks
@@ -254,22 +384,21 @@
anc = repo.changelog.ancestors([new.rev()])
deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
if validdest(repo, repo[marks[active]], new):
- marks[active] = new.node()
- update = True
+ bmchanges.append((active, new.node()))
- if deletedivergent(repo, deletefrom, active):
- update = True
+ for bm in divergent2delete(repo, deletefrom, active):
+ bmchanges.append((bm, None))
- if update:
+ if bmchanges:
lock = tr = None
try:
lock = repo.lock()
tr = repo.transaction('bookmark')
- marks.recordchange(tr)
+ marks.applychanges(repo, tr, bmchanges)
tr.close()
finally:
lockmod.release(tr, lock)
- return update
+ return bool(bmchanges)
def listbinbookmarks(repo):
# We may try to list bookmarks on a repo type that does not
@@ -299,12 +428,12 @@
if existing != old and existing != new:
return False
if new == '':
- del marks[key]
+ changes = [(key, None)]
else:
if new not in repo:
return False
- marks[key] = repo[new].node()
- marks.recordchange(tr)
+ changes = [(key, repo[new].node())]
+ marks.applychanges(repo, tr, changes)
tr.close()
return True
finally:
@@ -429,7 +558,7 @@
status = ui.status
warn = ui.warn
- if ui.configbool('ui', 'quietbookmarkmove', False):
+ if ui.configbool('ui', 'quietbookmarkmove'):
status = warn = ui.debug
explicit = set(explicit)
@@ -476,10 +605,11 @@
if changed:
tr = trfunc()
+ changes = []
for b, node, writer, msg in sorted(changed):
- localmarks[b] = node
+ changes.append((b, node))
writer(msg)
- localmarks.recordchange(tr)
+ localmarks.applychanges(repo, tr, changes)
def incoming(ui, repo, other):
'''Show bookmarks incoming from other to repo
@@ -589,7 +719,130 @@
# (new != nullrev has been excluded by the previous check)
return True
elif repo.obsstore:
- return new.node() in obsolete.foreground(repo, [old.node()])
+ return new.node() in obsutil.foreground(repo, [old.node()])
else:
# still an independent clause as it is lazier (and therefore faster)
return old.descendant(new)
+
+def checkformat(repo, mark):
+ """return a valid version of a potential bookmark name
+
+ Raises an abort error if the bookmark name is not valid.
+ """
+ mark = mark.strip()
+ if not mark:
+ raise error.Abort(_("bookmark names cannot consist entirely of "
+ "whitespace"))
+ scmutil.checknewlabel(repo, mark, 'bookmark')
+ return mark
+
+def delete(repo, tr, names):
+ """remove a mark from the bookmark store
+
+ Raises an abort error if mark does not exist.
+ """
+ marks = repo._bookmarks
+ changes = []
+ for mark in names:
+ if mark not in marks:
+ raise error.Abort(_("bookmark '%s' does not exist") % mark)
+ if mark == repo._activebookmark:
+ deactivate(repo)
+ changes.append((mark, None))
+ marks.applychanges(repo, tr, changes)
+
+def rename(repo, tr, old, new, force=False, inactive=False):
+ """rename a bookmark from old to new
+
+ If force is specified, then the new name can overwrite an existing
+ bookmark.
+
+ If inactive is specified, then do not activate the new bookmark.
+
+ Raises an abort error if old is not in the bookmark store.
+ """
+ marks = repo._bookmarks
+ mark = checkformat(repo, new)
+ if old not in marks:
+ raise error.Abort(_("bookmark '%s' does not exist") % old)
+ changes = []
+ for bm in marks.checkconflict(mark, force):
+ changes.append((bm, None))
+ changes.extend([(mark, marks[old]), (old, None)])
+ marks.applychanges(repo, tr, changes)
+ if repo._activebookmark == old and not inactive:
+ activate(repo, mark)
+
+def addbookmarks(repo, tr, names, rev=None, force=False, inactive=False):
+ """add a list of bookmarks
+
+ If force is specified, then the new name can overwrite an existing
+ bookmark.
+
+ If inactive is specified, then do not activate any bookmark. Otherwise, the
+ first bookmark is activated.
+
+ Raises an abort error if old is not in the bookmark store.
+ """
+ marks = repo._bookmarks
+ cur = repo.changectx('.').node()
+ newact = None
+ changes = []
+ for mark in names:
+ mark = checkformat(repo, mark)
+ if newact is None:
+ newact = mark
+ if inactive and mark == repo._activebookmark:
+ deactivate(repo)
+ return
+ tgt = cur
+ if rev:
+ tgt = scmutil.revsingle(repo, rev).node()
+ for bm in marks.checkconflict(mark, force, tgt):
+ changes.append((bm, None))
+ changes.append((mark, tgt))
+ marks.applychanges(repo, tr, changes)
+ if not inactive and cur == marks[newact] and not rev:
+ activate(repo, newact)
+ elif cur != tgt and newact == repo._activebookmark:
+ deactivate(repo)
+
+def _printbookmarks(ui, repo, bmarks, **opts):
+ """private method to print bookmarks
+
+ Provides a way for extensions to control how bookmarks are printed (e.g.
+ prepend or postpend names)
+ """
+ opts = pycompat.byteskwargs(opts)
+ fm = ui.formatter('bookmarks', opts)
+ hexfn = fm.hexfunc
+ if len(bmarks) == 0 and fm.isplain():
+ ui.status(_("no bookmarks set\n"))
+ for bmark, (n, prefix, label) in sorted(bmarks.iteritems()):
+ fm.startitem()
+ if not ui.quiet:
+ fm.plain(' %s ' % prefix, label=label)
+ fm.write('bookmark', '%s', bmark, label=label)
+ pad = " " * (25 - encoding.colwidth(bmark))
+ fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
+ repo.changelog.rev(n), hexfn(n), label=label)
+ fm.data(active=(activebookmarklabel in label))
+ fm.plain('\n')
+ fm.end()
+
+def printbookmarks(ui, repo, **opts):
+ """print bookmarks to a formatter
+
+ Provides a way for extensions to control how bookmarks are printed.
+ """
+ marks = repo._bookmarks
+ bmarks = {}
+ for bmark, n in sorted(marks.iteritems()):
+ active = repo._activebookmark
+ if bmark == active:
+ prefix, label = '*', activebookmarklabel
+ else:
+ prefix, label = ' ', ''
+
+ bmarks[bmark] = (n, prefix, label)
+ _printbookmarks(ui, repo, bmarks, **opts)
--- a/mercurial/branchmap.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/branchmap.py Wed Jul 19 07:51:41 2017 -0500
@@ -28,14 +28,14 @@
def _filename(repo):
"""name of a branchcache file for a given repo or repoview"""
- filename = "cache/branch2"
+ filename = "branch2"
if repo.filtername:
filename = '%s-%s' % (filename, repo.filtername)
return filename
def read(repo):
try:
- f = repo.vfs(_filename(repo))
+ f = repo.cachevfs(_filename(repo))
lines = f.read().split('\n')
f.close()
except (IOError, OSError):
@@ -67,8 +67,6 @@
partial.setdefault(label, []).append(node)
if state == 'c':
partial._closednodes.add(node)
- except KeyboardInterrupt:
- raise
except Exception as inst:
if repo.ui.debugflag:
msg = 'invalid branchheads cache'
@@ -230,7 +228,7 @@
def write(self, repo):
try:
- f = repo.vfs(_filename(repo), "w", atomictemp=True)
+ f = repo.cachevfs(_filename(repo), "w", atomictemp=True)
cachekey = [hex(self.tipnode), '%d' % self.tiprev]
if self.filteredhash is not None:
cachekey.append(hex(self.filteredhash))
@@ -319,8 +317,8 @@
# Revision branch info cache
_rbcversion = '-v1'
-_rbcnames = 'cache/rbc-names' + _rbcversion
-_rbcrevs = 'cache/rbc-revs' + _rbcversion
+_rbcnames = 'rbc-names' + _rbcversion
+_rbcrevs = 'rbc-revs' + _rbcversion
# [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
_rbcrecfmt = '>4sI'
_rbcrecsize = calcsize(_rbcrecfmt)
@@ -358,7 +356,7 @@
self._rbcrevs = bytearray()
self._rbcsnameslen = 0 # length of names read at _rbcsnameslen
try:
- bndata = repo.vfs.read(_rbcnames)
+ bndata = repo.cachevfs.read(_rbcnames)
self._rbcsnameslen = len(bndata) # for verification before writing
if bndata:
self._names = [encoding.tolocal(bn)
@@ -370,7 +368,7 @@
if self._names:
try:
- data = repo.vfs.read(_rbcrevs)
+ data = repo.cachevfs.read(_rbcrevs)
self._rbcrevs[:] = data
except (IOError, OSError) as inst:
repo.ui.debug("couldn't read revision branch cache: %s\n" %
@@ -408,8 +406,7 @@
# fast path: extract data from cache, use it if node is matching
reponode = changelog.node(rev)[:_rbcnodelen]
- cachenode, branchidx = unpack_from(
- _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx)
+ cachenode, branchidx = unpack_from(_rbcrecfmt, self._rbcrevs, rbcrevidx)
close = bool(branchidx & _rbccloseflag)
if close:
branchidx &= _rbcbranchidxmask
@@ -476,7 +473,7 @@
step = ' names'
wlock = repo.wlock(wait=False)
if self._rbcnamescount != 0:
- f = repo.vfs.open(_rbcnames, 'ab')
+ f = repo.cachevfs.open(_rbcnames, 'ab')
if f.tell() == self._rbcsnameslen:
f.write('\0')
else:
@@ -486,8 +483,8 @@
self._rbcrevslen = 0
if self._rbcnamescount == 0:
# before rewriting names, make sure references are removed
- repo.vfs.unlinkpath(_rbcrevs, ignoremissing=True)
- f = repo.vfs.open(_rbcnames, 'wb')
+ repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
+ f = repo.cachevfs.open(_rbcnames, 'wb')
f.write('\0'.join(encoding.fromlocal(b)
for b in self._names[self._rbcnamescount:]))
self._rbcsnameslen = f.tell()
@@ -501,9 +498,10 @@
wlock = repo.wlock(wait=False)
revs = min(len(repo.changelog),
len(self._rbcrevs) // _rbcrecsize)
- f = repo.vfs.open(_rbcrevs, 'ab')
+ f = repo.cachevfs.open(_rbcrevs, 'ab')
if f.tell() != start:
- repo.ui.debug("truncating %s to %d\n" % (_rbcrevs, start))
+ repo.ui.debug("truncating cache/%s to %d\n"
+ % (_rbcrevs, start))
f.seek(start)
if f.tell() != start:
start = 0
--- a/mercurial/bundle2.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/bundle2.py Wed Jul 19 07:51:41 2017 -0500
@@ -158,6 +158,7 @@
changegroup,
error,
obsolete,
+ phases,
pushkey,
pycompat,
tags,
@@ -178,18 +179,20 @@
_fpayloadsize = '>i'
_fpartparamcount = '>BB'
+_fphasesentry = '>i20s'
+
preferedchunksize = 4096
_parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
def outdebug(ui, message):
"""debug regarding output stream (bundling)"""
- if ui.configbool('devel', 'bundle2.debug', False):
+ if ui.configbool('devel', 'bundle2.debug'):
ui.debug('bundle2-output: %s\n' % message)
def indebug(ui, message):
"""debug on input stream (unbundling)"""
- if ui.configbool('devel', 'bundle2.debug', False):
+ if ui.configbool('devel', 'bundle2.debug'):
ui.debug('bundle2-input: %s\n' % message)
def validateparttype(parttype):
@@ -307,14 +310,20 @@
to be created"""
raise TransactionUnavailable()
-def applybundle(repo, unbundler, tr, source=None, url=None, op=None):
+def applybundle(repo, unbundler, tr, source=None, url=None, **kwargs):
# transform me into unbundler.apply() as soon as the freeze is lifted
- tr.hookargs['bundle2'] = '1'
- if source is not None and 'source' not in tr.hookargs:
- tr.hookargs['source'] = source
- if url is not None and 'url' not in tr.hookargs:
- tr.hookargs['url'] = url
- return processbundle(repo, unbundler, lambda: tr, op=op)
+ if isinstance(unbundler, unbundle20):
+ tr.hookargs['bundle2'] = '1'
+ if source is not None and 'source' not in tr.hookargs:
+ tr.hookargs['source'] = source
+ if url is not None and 'url' not in tr.hookargs:
+ tr.hookargs['url'] = url
+ return processbundle(repo, unbundler, lambda: tr)
+ else:
+ # the transactiongetter won't be used, but we might as well set it
+ op = bundleoperation(repo, lambda: tr)
+ _processchangegroup(op, unbundler, tr, source, url, **kwargs)
+ return op
def processbundle(repo, unbundler, transactiongetter=None, op=None):
"""This function process a bundle, apply effect to/from a repo
@@ -340,8 +349,8 @@
if repo.ui.debugflag:
msg = ['bundle2-input-bundle:']
if unbundler.params:
- msg.append(' %i params')
- if op.gettransaction is None:
+ msg.append(' %i params' % len(unbundler.params))
+ if op.gettransaction is None or op.gettransaction is _notransaction:
msg.append(' no-transaction')
else:
msg.append(' with-transaction')
@@ -392,6 +401,13 @@
return op
+def _processchangegroup(op, cg, tr, source, url, **kwargs):
+ ret = cg.apply(op.repo, tr, source, url, **kwargs)
+ op.records.add('changegroup', {
+ 'return': ret,
+ })
+ return ret
+
def _processpart(op, part):
"""process a single part from a bundle
@@ -661,6 +677,9 @@
magicstring = changegroup.readexactly(fp, 4)
magic, version = magicstring[0:2], magicstring[2:4]
if magic != 'HG':
+ ui.debug(
+ "error: invalid magic: %r (version %r), should be 'HG'\n"
+ % (magic, version))
raise error.Abort(_('not a Mercurial bundle'))
unbundlerclass = formatmap.get(version)
if unbundlerclass is None:
@@ -1005,7 +1024,7 @@
# backup exception data for later
ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
% exc)
- exc_info = sys.exc_info()
+ tb = sys.exc_info()[2]
msg = 'unexpected error: %s' % exc
interpart = bundlepart('error:abort', [('message', msg)],
mandatory=False)
@@ -1016,10 +1035,7 @@
outdebug(ui, 'closing payload chunk')
# abort current part payload
yield _pack(_fpayloadsize, 0)
- if pycompat.ispy3:
- raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
- else:
- exec("""raise exc_info[0], exc_info[1], exc_info[2]""")
+ pycompat.raisewithtb(exc, tb)
# end of payload
outdebug(ui, 'closing payload chunk')
yield _pack(_fpayloadsize, 0)
@@ -1326,6 +1342,9 @@
caps['obsmarkers'] = supportedformat
if allowpushback:
caps['pushback'] = ()
+ cpmode = repo.ui.config('server', 'concurrent-push-mode')
+ if cpmode == 'check-related':
+ caps['checkheads'] = ('related',)
return caps
def bundle2caps(remote):
@@ -1342,6 +1361,102 @@
obscaps = caps.get('obsmarkers', ())
return [int(c[1:]) for c in obscaps if c.startswith('V')]
+def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
+ vfs=None, compression=None, compopts=None):
+ if bundletype.startswith('HG10'):
+ cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
+ return writebundle(ui, cg, filename, bundletype, vfs=vfs,
+ compression=compression, compopts=compopts)
+ elif not bundletype.startswith('HG20'):
+ raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
+
+ caps = {}
+ if 'obsolescence' in opts:
+ caps['obsmarkers'] = ('V1',)
+ bundle = bundle20(ui, caps)
+ bundle.setcompression(compression, compopts)
+ _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
+ chunkiter = bundle.getchunks()
+
+ return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
+
+def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
+ # We should eventually reconcile this logic with the one behind
+ # 'exchange.getbundle2partsgenerator'.
+ #
+ # The type of input from 'getbundle' and 'writenewbundle' are a bit
+ # different right now. So we keep them separated for now for the sake of
+ # simplicity.
+
+ # we always want a changegroup in such bundle
+ cgversion = opts.get('cg.version')
+ if cgversion is None:
+ cgversion = changegroup.safeversion(repo)
+ cg = changegroup.getchangegroup(repo, source, outgoing,
+ version=cgversion)
+ part = bundler.newpart('changegroup', data=cg.getchunks())
+ part.addparam('version', cg.version)
+ if 'clcount' in cg.extras:
+ part.addparam('nbchanges', str(cg.extras['clcount']),
+ mandatory=False)
+ if opts.get('phases') and repo.revs('%ln and secret()',
+ outgoing.missingheads):
+ part.addparam('targetphase', '%d' % phases.secret, mandatory=False)
+
+ addparttagsfnodescache(repo, bundler, outgoing)
+
+ if opts.get('obsolescence', False):
+ obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
+ buildobsmarkerspart(bundler, obsmarkers)
+
+ if opts.get('phases', False):
+ headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
+ phasedata = []
+ for phase in phases.allphases:
+ for head in headsbyphase[phase]:
+ phasedata.append(_pack(_fphasesentry, phase, head))
+ bundler.newpart('phase-heads', data=''.join(phasedata))
+
+def addparttagsfnodescache(repo, bundler, outgoing):
+ # we include the tags fnode cache for the bundle changeset
+ # (as an optional parts)
+ cache = tags.hgtagsfnodescache(repo.unfiltered())
+ chunks = []
+
+ # .hgtags fnodes are only relevant for head changesets. While we could
+ # transfer values for all known nodes, there will likely be little to
+ # no benefit.
+ #
+ # We don't bother using a generator to produce output data because
+ # a) we only have 40 bytes per head and even esoteric numbers of heads
+ # consume little memory (1M heads is 40MB) b) we don't want to send the
+ # part if we don't have entries and knowing if we have entries requires
+ # cache lookups.
+ for node in outgoing.missingheads:
+ # Don't compute missing, as this may slow down serving.
+ fnode = cache.getfnode(node, computemissing=False)
+ if fnode is not None:
+ chunks.extend([node, fnode])
+
+ if chunks:
+ bundler.newpart('hgtagsfnodes', data=''.join(chunks))
+
+def buildobsmarkerspart(bundler, markers):
+ """add an obsmarker part to the bundler with <markers>
+
+ No part is created if markers is empty.
+ Raises ValueError if the bundler doesn't support any known obsmarker format.
+ """
+ if not markers:
+ return None
+
+ remoteversions = obsmarkersversion(bundler.capabilities)
+ version = obsolete.commonversion(remoteversions)
+ if version is None:
+ raise ValueError('bundler does not support common obsmarker format')
+ stream = obsolete.encodemarkers(markers, True, version=version)
+ return bundler.newpart('obsmarkers', data=stream)
+
def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
compopts=None):
"""Write a bundle file and return its filename.
@@ -1382,19 +1497,36 @@
# in case of sshrepo because we don't know the end of the stream
return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
-@parthandler('changegroup', ('version', 'nbchanges', 'treemanifest'))
+def combinechangegroupresults(op):
+ """logic to combine 0 or more addchangegroup results into one"""
+ results = [r.get('return', 0)
+ for r in op.records['changegroup']]
+ changedheads = 0
+ result = 1
+ for ret in results:
+ # If any changegroup result is 0, return 0
+ if ret == 0:
+ result = 0
+ break
+ if ret < -1:
+ changedheads += ret + 1
+ elif ret > 1:
+ changedheads += ret - 1
+ if changedheads > 0:
+ result = 1 + changedheads
+ elif changedheads < 0:
+ result = -1 + changedheads
+ return result
+
+@parthandler('changegroup', ('version', 'nbchanges', 'treemanifest',
+ 'targetphase'))
def handlechangegroup(op, inpart):
"""apply a changegroup part on the repo
This is a very early implementation that will massive rework before being
inflicted to any end-user.
"""
- # Make sure we trigger a transaction creation
- #
- # The addchangegroup function will get a transaction object by itself, but
- # we need to make sure we trigger the creation of a transaction object used
- # for the whole processing scope.
- op.gettransaction()
+ tr = op.gettransaction()
unpackerversion = inpart.params.get('version', '01')
# We should raise an appropriate exception here
cg = changegroup.getunbundler(unpackerversion, inpart, None)
@@ -1412,8 +1544,12 @@
op.repo.requirements.add('treemanifest')
op.repo._applyopenerreqs()
op.repo._writerequirements()
- ret = cg.apply(op.repo, 'bundle2', 'bundle2', expectedtotal=nbchangesets)
- op.records.add('changegroup', {'return': ret})
+ extrakwargs = {}
+ targetphase = inpart.params.get('targetphase')
+ if targetphase is not None:
+ extrakwargs['targetphase'] = int(targetphase)
+ ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2',
+ expectedtotal=nbchangesets, **extrakwargs)
if op.reply is not None:
# This is definitely not the final form of this
# return. But one need to start somewhere.
@@ -1470,19 +1606,13 @@
real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
- # Make sure we trigger a transaction creation
- #
- # The addchangegroup function will get a transaction object by itself, but
- # we need to make sure we trigger the creation of a transaction object used
- # for the whole processing scope.
- op.gettransaction()
+ tr = op.gettransaction()
from . import exchange
cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
if not isinstance(cg, changegroup.cg1unpacker):
raise error.Abort(_('%s: not a bundle version 1.0') %
util.hidepassword(raw_url))
- ret = cg.apply(op.repo, 'bundle2', 'bundle2')
- op.records.add('changegroup', {'return': ret})
+ ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2')
if op.reply is not None:
# This is definitely not the final form of this
# return. But one need to start somewhere.
@@ -1521,6 +1651,35 @@
raise error.PushRaced('repository changed while pushing - '
'please try again')
+@parthandler('check:updated-heads')
+def handlecheckupdatedheads(op, inpart):
+ """check for race on the heads touched by a push
+
+ This is similar to 'check:heads' but focus on the heads actually updated
+ during the push. If other activities happen on unrelated heads, it is
+ ignored.
+
+ This allow server with high traffic to avoid push contention as long as
+ unrelated parts of the graph are involved."""
+ h = inpart.read(20)
+ heads = []
+ while len(h) == 20:
+ heads.append(h)
+ h = inpart.read(20)
+ assert not h
+ # trigger a transaction so that we are guaranteed to have the lock now.
+ if op.ui.configbool('experimental', 'bundle2lazylocking'):
+ op.gettransaction()
+
+ currentheads = set()
+ for ls in op.repo.branchmap().itervalues():
+ currentheads.update(ls)
+
+ for h in heads:
+ if h not in currentheads:
+ raise error.PushRaced('repository changed while pushing - '
+ 'please try again')
+
@parthandler('output')
def handleoutput(op, inpart):
"""forward output captured on the server to the client"""
@@ -1610,6 +1769,25 @@
kwargs[key] = inpart.params[key]
raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
+def _readphaseheads(inpart):
+ headsbyphase = [[] for i in phases.allphases]
+ entrysize = struct.calcsize(_fphasesentry)
+ while True:
+ entry = inpart.read(entrysize)
+ if len(entry) < entrysize:
+ if entry:
+ raise error.Abort(_('bad phase-heads bundle part'))
+ break
+ phase, node = struct.unpack(_fphasesentry, entry)
+ headsbyphase[phase].append(node)
+ return headsbyphase
+
+@parthandler('phase-heads')
+def handlephases(op, inpart):
+ """apply phases from bundle part to repo"""
+ headsbyphase = _readphaseheads(inpart)
+ phases.updatephases(op.repo.unfiltered(), op.gettransaction(), headsbyphase)
+
@parthandler('reply:pushkey', ('return', 'in-reply-to'))
def handlepushkeyreply(op, inpart):
"""retrieve the result of a pushkey request"""
@@ -1622,7 +1800,7 @@
"""add a stream of obsmarkers to the repo"""
tr = op.gettransaction()
markerdata = inpart.read()
- if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
+ if op.ui.config('experimental', 'obsmarkers-exchange-debug'):
op.ui.write(('obsmarker-exchange: %i bytes received\n')
% len(markerdata))
# The mergemarkers call will crash if marker creation is not enabled.
--- a/mercurial/bundlerepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/bundlerepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -414,7 +414,7 @@
if create:
raise error.Abort(_('cannot create new bundle repository'))
# internal config: bundle.mainreporoot
- parentpath = ui.config("bundle", "mainreporoot", "")
+ parentpath = ui.config("bundle", "mainreporoot")
if not parentpath:
# try to find the correct path to the working directory repo
parentpath = cmdutil.findrepo(pycompat.getcwd())
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/base85.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,189 @@
+/*
+ base85 codec
+
+ Copyright 2006 Brendan Cully <brendan@kublai.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Largely based on git's implementation
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+
+#include "util.h"
+
+static const char b85chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~";
+static char b85dec[256];
+
+static void b85prep(void)
+{
+ unsigned i;
+
+ memset(b85dec, 0, sizeof(b85dec));
+ for (i = 0; i < sizeof(b85chars); i++)
+ b85dec[(int)(b85chars[i])] = i + 1;
+}
+
+static PyObject *b85encode(PyObject *self, PyObject *args)
+{
+ const unsigned char *text;
+ PyObject *out;
+ char *dst;
+ Py_ssize_t len, olen, i;
+ unsigned int acc, val, ch;
+ int pad = 0;
+
+ if (!PyArg_ParseTuple(args, "s#|i", &text, &len, &pad))
+ return NULL;
+
+ if (pad)
+ olen = ((len + 3) / 4 * 5) - 3;
+ else {
+ olen = len % 4;
+ if (olen)
+ olen++;
+ olen += len / 4 * 5;
+ }
+ if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3)))
+ return NULL;
+
+ dst = PyBytes_AsString(out);
+
+ while (len) {
+ acc = 0;
+ for (i = 24; i >= 0; i -= 8) {
+ ch = *text++;
+ acc |= ch << i;
+ if (--len == 0)
+ break;
+ }
+ for (i = 4; i >= 0; i--) {
+ val = acc % 85;
+ acc /= 85;
+ dst[i] = b85chars[val];
+ }
+ dst += 5;
+ }
+
+ if (!pad)
+ _PyBytes_Resize(&out, olen);
+
+ return out;
+}
+
+static PyObject *b85decode(PyObject *self, PyObject *args)
+{
+ PyObject *out;
+ const char *text;
+ char *dst;
+ Py_ssize_t len, i, j, olen, cap;
+ int c;
+ unsigned int acc;
+
+ if (!PyArg_ParseTuple(args, "s#", &text, &len))
+ return NULL;
+
+ olen = len / 5 * 4;
+ i = len % 5;
+ if (i)
+ olen += i - 1;
+ if (!(out = PyBytes_FromStringAndSize(NULL, olen)))
+ return NULL;
+
+ dst = PyBytes_AsString(out);
+
+ i = 0;
+ while (i < len)
+ {
+ acc = 0;
+ cap = len - i - 1;
+ if (cap > 4)
+ cap = 4;
+ for (j = 0; j < cap; i++, j++)
+ {
+ c = b85dec[(int)*text++] - 1;
+ if (c < 0)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 character at position %d",
+ (int)i);
+ acc = acc * 85 + c;
+ }
+ if (i++ < len)
+ {
+ c = b85dec[(int)*text++] - 1;
+ if (c < 0)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 character at position %d",
+ (int)i);
+ /* overflow detection: 0xffffffff == "|NsC0",
+ * "|NsC" == 0x03030303 */
+ if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
+ return PyErr_Format(
+ PyExc_ValueError,
+ "bad base85 sequence at position %d",
+ (int)i);
+ acc += c;
+ }
+
+ cap = olen < 4 ? olen : 4;
+ olen -= cap;
+ for (j = 0; j < 4 - cap; j++)
+ acc *= 85;
+ if (cap && cap < 4)
+ acc += 0xffffff >> (cap - 1) * 8;
+ for (j = 0; j < cap; j++)
+ {
+ acc = (acc << 8) | (acc >> 24);
+ *dst++ = acc;
+ }
+ }
+
+ return out;
+}
+
+static char base85_doc[] = "Base85 Data Encoding";
+
+static PyMethodDef methods[] = {
+ {"b85encode", b85encode, METH_VARARGS,
+ "Encode text in base85.\n\n"
+ "If the second parameter is true, pad the result to a multiple of "
+ "five characters.\n"},
+ {"b85decode", b85decode, METH_VARARGS, "Decode base85 text.\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef base85_module = {
+ PyModuleDef_HEAD_INIT,
+ "base85",
+ base85_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_base85(void)
+{
+ PyObject *m;
+ b85prep();
+
+ m = PyModule_Create(&base85_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initbase85(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("base85", methods, base85_doc);
+
+ b85prep();
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/bdiff.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,220 @@
+/*
+ bdiff.c - efficient binary diff extension for Mercurial
+
+ Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Based roughly on Python difflib
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+#include <limits.h>
+
+#include "bdiff.h"
+#include "bitmanipulation.h"
+#include "util.h"
+
+
+static PyObject *blocks(PyObject *self, PyObject *args)
+{
+ PyObject *sa, *sb, *rl = NULL, *m;
+ struct bdiff_line *a, *b;
+ struct bdiff_hunk l, *h;
+ int an, bn, count, pos = 0;
+
+ l.next = NULL;
+
+ if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+ return NULL;
+
+ an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
+ bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
+
+ if (!a || !b)
+ goto nomem;
+
+ count = bdiff_diff(a, an, b, bn, &l);
+ if (count < 0)
+ goto nomem;
+
+ rl = PyList_New(count);
+ if (!rl)
+ goto nomem;
+
+ for (h = l.next; h; h = h->next) {
+ m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
+ PyList_SetItem(rl, pos, m);
+ pos++;
+ }
+
+nomem:
+ free(a);
+ free(b);
+ bdiff_freehunks(l.next);
+ return rl ? rl : PyErr_NoMemory();
+}
+
+static PyObject *bdiff(PyObject *self, PyObject *args)
+{
+ char *sa, *sb, *rb, *ia, *ib;
+ PyObject *result = NULL;
+ struct bdiff_line *al, *bl;
+ struct bdiff_hunk l, *h;
+ int an, bn, count;
+ Py_ssize_t len = 0, la, lb, li = 0, lcommon = 0, lmax;
+ PyThreadState *_save;
+
+ l.next = NULL;
+
+ if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
+ return NULL;
+
+ if (la > UINT_MAX || lb > UINT_MAX) {
+ PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
+ return NULL;
+ }
+
+ _save = PyEval_SaveThread();
+
+ lmax = la > lb ? lb : la;
+ for (ia = sa, ib = sb;
+ li < lmax && *ia == *ib;
+ ++li, ++ia, ++ib)
+ if (*ia == '\n')
+ lcommon = li + 1;
+ /* we can almost add: if (li == lmax) lcommon = li; */
+
+ an = bdiff_splitlines(sa + lcommon, la - lcommon, &al);
+ bn = bdiff_splitlines(sb + lcommon, lb - lcommon, &bl);
+ if (!al || !bl)
+ goto nomem;
+
+ count = bdiff_diff(al, an, bl, bn, &l);
+ if (count < 0)
+ goto nomem;
+
+ /* calculate length of output */
+ la = lb = 0;
+ for (h = l.next; h; h = h->next) {
+ if (h->a1 != la || h->b1 != lb)
+ len += 12 + bl[h->b1].l - bl[lb].l;
+ la = h->a2;
+ lb = h->b2;
+ }
+ PyEval_RestoreThread(_save);
+ _save = NULL;
+
+ result = PyBytes_FromStringAndSize(NULL, len);
+
+ if (!result)
+ goto nomem;
+
+ /* build binary patch */
+ rb = PyBytes_AsString(result);
+ la = lb = 0;
+
+ for (h = l.next; h; h = h->next) {
+ if (h->a1 != la || h->b1 != lb) {
+ len = bl[h->b1].l - bl[lb].l;
+ putbe32((uint32_t)(al[la].l + lcommon - al->l), rb);
+ putbe32((uint32_t)(al[h->a1].l + lcommon - al->l), rb + 4);
+ putbe32((uint32_t)len, rb + 8);
+ memcpy(rb + 12, bl[lb].l, len);
+ rb += 12 + len;
+ }
+ la = h->a2;
+ lb = h->b2;
+ }
+
+nomem:
+ if (_save)
+ PyEval_RestoreThread(_save);
+ free(al);
+ free(bl);
+ bdiff_freehunks(l.next);
+ return result ? result : PyErr_NoMemory();
+}
+
+/*
+ * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
+ * reduce whitespace sequences to a single space and trim remaining whitespace
+ * from end of lines.
+ */
+static PyObject *fixws(PyObject *self, PyObject *args)
+{
+ PyObject *s, *result = NULL;
+ char allws, c;
+ const char *r;
+ Py_ssize_t i, rlen, wlen = 0;
+ char *w;
+
+ if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
+ return NULL;
+ r = PyBytes_AsString(s);
+ rlen = PyBytes_Size(s);
+
+ w = (char *)PyMem_Malloc(rlen ? rlen : 1);
+ if (!w)
+ goto nomem;
+
+ for (i = 0; i != rlen; i++) {
+ c = r[i];
+ if (c == ' ' || c == '\t' || c == '\r') {
+ if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
+ w[wlen++] = ' ';
+ } else if (c == '\n' && !allws
+ && wlen > 0 && w[wlen - 1] == ' ') {
+ w[wlen - 1] = '\n';
+ } else {
+ w[wlen++] = c;
+ }
+ }
+
+ result = PyBytes_FromStringAndSize(w, wlen);
+
+nomem:
+ PyMem_Free(w);
+ return result ? result : PyErr_NoMemory();
+}
+
+
+static char mdiff_doc[] = "Efficient binary diff.";
+
+static PyMethodDef methods[] = {
+ {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
+ {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
+ {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef bdiff_module = {
+ PyModuleDef_HEAD_INIT,
+ "bdiff",
+ mdiff_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_bdiff(void)
+{
+ PyObject *m;
+ m = PyModule_Create(&bdiff_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initbdiff(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("bdiff", methods, mdiff_doc);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/diffhelpers.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,204 @@
+/*
+ * diffhelpers.c - helper routines for mpatch
+ *
+ * Copyright 2007 Chris Mason <chris.mason@oracle.com>
+ *
+ * This software may be used and distributed according to the terms
+ * of the GNU General Public License v2, incorporated herein by reference.
+ */
+
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+
+static char diffhelpers_doc[] = "Efficient diff parsing";
+static PyObject *diffhelpers_Error;
+
+
+/* fixup the last lines of a and b when the patch has no newline at eof */
+static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
+{
+ Py_ssize_t hunksz = PyList_Size(hunk);
+ PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
+ char *l = PyBytes_AsString(s);
+ Py_ssize_t alen = PyList_Size(a);
+ Py_ssize_t blen = PyList_Size(b);
+ char c = l[0];
+ PyObject *hline;
+ Py_ssize_t sz = PyBytes_GET_SIZE(s);
+
+ if (sz > 1 && l[sz-2] == '\r')
+ /* tolerate CRLF in last line */
+ sz -= 1;
+
+ hline = PyBytes_FromStringAndSize(l, sz-1);
+ if (!hline) {
+ return;
+ }
+
+ if (c == ' ' || c == '+') {
+ PyObject *rline = PyBytes_FromStringAndSize(l + 1, sz - 2);
+ PyList_SetItem(b, blen-1, rline);
+ }
+ if (c == ' ' || c == '-') {
+ Py_INCREF(hline);
+ PyList_SetItem(a, alen-1, hline);
+ }
+ PyList_SetItem(hunk, hunksz-1, hline);
+}
+
+/* python callable form of _fix_newline */
+static PyObject *
+fix_newline(PyObject *self, PyObject *args)
+{
+ PyObject *hunk, *a, *b;
+ if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
+ return NULL;
+ _fix_newline(hunk, a, b);
+ return Py_BuildValue("l", 0);
+}
+
+#if (PY_VERSION_HEX < 0x02050000)
+static const char *addlines_format = "OOiiOO";
+#else
+static const char *addlines_format = "OOnnOO";
+#endif
+
+/*
+ * read lines from fp into the hunk. The hunk is parsed into two arrays
+ * a and b. a gets the old state of the text, b gets the new state
+ * The control char from the hunk is saved when inserting into a, but not b
+ * (for performance while deleting files)
+ */
+static PyObject *
+addlines(PyObject *self, PyObject *args)
+{
+
+ PyObject *fp, *hunk, *a, *b, *x;
+ Py_ssize_t i;
+ Py_ssize_t lena, lenb;
+ Py_ssize_t num;
+ Py_ssize_t todoa, todob;
+ char *s, c;
+ PyObject *l;
+ if (!PyArg_ParseTuple(args, addlines_format,
+ &fp, &hunk, &lena, &lenb, &a, &b))
+ return NULL;
+
+ while (1) {
+ todoa = lena - PyList_Size(a);
+ todob = lenb - PyList_Size(b);
+ num = todoa > todob ? todoa : todob;
+ if (num == 0)
+ break;
+ for (i = 0; i < num; i++) {
+ x = PyFile_GetLine(fp, 0);
+ s = PyBytes_AsString(x);
+ c = *s;
+ if (strcmp(s, "\\ No newline at end of file\n") == 0) {
+ _fix_newline(hunk, a, b);
+ continue;
+ }
+ if (c == '\n') {
+ /* Some patches may be missing the control char
+ * on empty lines. Supply a leading space. */
+ Py_DECREF(x);
+ x = PyBytes_FromString(" \n");
+ }
+ PyList_Append(hunk, x);
+ if (c == '+') {
+ l = PyBytes_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ } else if (c == '-') {
+ PyList_Append(a, x);
+ } else {
+ l = PyBytes_FromString(s + 1);
+ PyList_Append(b, l);
+ Py_DECREF(l);
+ PyList_Append(a, x);
+ }
+ Py_DECREF(x);
+ }
+ }
+ return Py_BuildValue("l", 0);
+}
+
+/*
+ * compare the lines in a with the lines in b. a is assumed to have
+ * a control char at the start of each line, this char is ignored in the
+ * compare
+ */
+static PyObject *
+testhunk(PyObject *self, PyObject *args)
+{
+
+ PyObject *a, *b;
+ long bstart;
+ Py_ssize_t alen, blen;
+ Py_ssize_t i;
+ char *sa, *sb;
+
+ if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
+ return NULL;
+ alen = PyList_Size(a);
+ blen = PyList_Size(b);
+ if (alen > blen - bstart || bstart < 0) {
+ return Py_BuildValue("l", -1);
+ }
+ for (i = 0; i < alen; i++) {
+ sa = PyBytes_AsString(PyList_GET_ITEM(a, i));
+ sb = PyBytes_AsString(PyList_GET_ITEM(b, i + bstart));
+ if (strcmp(sa + 1, sb) != 0)
+ return Py_BuildValue("l", -1);
+ }
+ return Py_BuildValue("l", 0);
+}
+
+static PyMethodDef methods[] = {
+ {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
+ {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
+ {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef diffhelpers_module = {
+ PyModuleDef_HEAD_INIT,
+ "diffhelpers",
+ diffhelpers_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_diffhelpers(void)
+{
+ PyObject *m;
+
+ m = PyModule_Create(&diffhelpers_module);
+ if (m == NULL)
+ return NULL;
+
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+ Py_INCREF(diffhelpers_Error);
+ PyModule_AddObject(m, "diffhelpersError", diffhelpers_Error);
+ PyModule_AddIntConstant(m, "version", version);
+
+ return m;
+}
+#else
+PyMODINIT_FUNC
+initdiffhelpers(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
+ diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
+ NULL, NULL);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/dirs.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,315 @@
+/*
+ dirs.c - dynamic directory diddling for dirstates
+
+ Copyright 2013 Facebook
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include "util.h"
+
+#ifdef IS_PY3K
+#define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[1]
+#else
+#define PYLONG_VALUE(o) PyInt_AS_LONG(o)
+#endif
+
+/*
+ * This is a multiset of directory names, built from the files that
+ * appear in a dirstate or manifest.
+ *
+ * A few implementation notes:
+ *
+ * We modify Python integers for refcounting, but those integers are
+ * never visible to Python code.
+ *
+ * We mutate strings in-place, but leave them immutable once they can
+ * be seen by Python code.
+ */
+typedef struct {
+ PyObject_HEAD
+ PyObject *dict;
+} dirsObject;
+
+static inline Py_ssize_t _finddir(const char *path, Py_ssize_t pos)
+{
+ while (pos != -1) {
+ if (path[pos] == '/')
+ break;
+ pos -= 1;
+ }
+
+ return pos;
+}
+
+static int _addpath(PyObject *dirs, PyObject *path)
+{
+ const char *cpath = PyBytes_AS_STRING(path);
+ Py_ssize_t pos = PyBytes_GET_SIZE(path);
+ PyObject *key = NULL;
+ int ret = -1;
+
+ /* This loop is super critical for performance. That's why we inline
+ * access to Python structs instead of going through a supported API.
+ * The implementation, therefore, is heavily dependent on CPython
+ * implementation details. We also commit violations of the Python
+ * "protocol" such as mutating immutable objects. But since we only
+ * mutate objects created in this function or in other well-defined
+ * locations, the references are known so these violations should go
+ * unnoticed. The code for adjusting the length of a PyBytesObject is
+ * essentially a minimal version of _PyBytes_Resize. */
+ while ((pos = _finddir(cpath, pos - 1)) != -1) {
+ PyObject *val;
+
+ /* It's likely that every prefix already has an entry
+ in our dict. Try to avoid allocating and
+ deallocating a string for each prefix we check. */
+ if (key != NULL)
+ ((PyBytesObject *)key)->ob_shash = -1;
+ else {
+ /* Force Python to not reuse a small shared string. */
+ key = PyBytes_FromStringAndSize(cpath,
+ pos < 2 ? 2 : pos);
+ if (key == NULL)
+ goto bail;
+ }
+ /* Py_SIZE(o) refers to the ob_size member of the struct. Yes,
+ * assigning to what looks like a function seems wrong. */
+ Py_SIZE(key) = pos;
+ ((PyBytesObject *)key)->ob_sval[pos] = '\0';
+
+ val = PyDict_GetItem(dirs, key);
+ if (val != NULL) {
+ PYLONG_VALUE(val) += 1;
+ break;
+ }
+
+ /* Force Python to not reuse a small shared int. */
+#ifdef IS_PY3K
+ val = PyLong_FromLong(0x1eadbeef);
+#else
+ val = PyInt_FromLong(0x1eadbeef);
+#endif
+
+ if (val == NULL)
+ goto bail;
+
+ PYLONG_VALUE(val) = 1;
+ ret = PyDict_SetItem(dirs, key, val);
+ Py_DECREF(val);
+ if (ret == -1)
+ goto bail;
+ Py_CLEAR(key);
+ }
+ ret = 0;
+
+bail:
+ Py_XDECREF(key);
+
+ return ret;
+}
+
+static int _delpath(PyObject *dirs, PyObject *path)
+{
+ char *cpath = PyBytes_AS_STRING(path);
+ Py_ssize_t pos = PyBytes_GET_SIZE(path);
+ PyObject *key = NULL;
+ int ret = -1;
+
+ while ((pos = _finddir(cpath, pos - 1)) != -1) {
+ PyObject *val;
+
+ key = PyBytes_FromStringAndSize(cpath, pos);
+
+ if (key == NULL)
+ goto bail;
+
+ val = PyDict_GetItem(dirs, key);
+ if (val == NULL) {
+ PyErr_SetString(PyExc_ValueError,
+ "expected a value, found none");
+ goto bail;
+ }
+
+ if (--PYLONG_VALUE(val) <= 0) {
+ if (PyDict_DelItem(dirs, key) == -1)
+ goto bail;
+ } else
+ break;
+ Py_CLEAR(key);
+ }
+ ret = 0;
+
+bail:
+ Py_XDECREF(key);
+
+ return ret;
+}
+
+static int dirs_fromdict(PyObject *dirs, PyObject *source, char skipchar)
+{
+ PyObject *key, *value;
+ Py_ssize_t pos = 0;
+
+ while (PyDict_Next(source, &pos, &key, &value)) {
+ if (!PyBytes_Check(key)) {
+ PyErr_SetString(PyExc_TypeError, "expected string key");
+ return -1;
+ }
+ if (skipchar) {
+ if (!dirstate_tuple_check(value)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ return -1;
+ }
+ if (((dirstateTupleObject *)value)->state == skipchar)
+ continue;
+ }
+
+ if (_addpath(dirs, key) == -1)
+ return -1;
+ }
+
+ return 0;
+}
+
+static int dirs_fromiter(PyObject *dirs, PyObject *source)
+{
+ PyObject *iter, *item = NULL;
+ int ret;
+
+ iter = PyObject_GetIter(source);
+ if (iter == NULL)
+ return -1;
+
+ while ((item = PyIter_Next(iter)) != NULL) {
+ if (!PyBytes_Check(item)) {
+ PyErr_SetString(PyExc_TypeError, "expected string");
+ break;
+ }
+
+ if (_addpath(dirs, item) == -1)
+ break;
+ Py_CLEAR(item);
+ }
+
+ ret = PyErr_Occurred() ? -1 : 0;
+ Py_DECREF(iter);
+ Py_XDECREF(item);
+ return ret;
+}
+
+/*
+ * Calculate a refcounted set of directory names for the files in a
+ * dirstate.
+ */
+static int dirs_init(dirsObject *self, PyObject *args)
+{
+ PyObject *dirs = NULL, *source = NULL;
+ char skipchar = 0;
+ int ret = -1;
+
+ self->dict = NULL;
+
+ if (!PyArg_ParseTuple(args, "|Oc:__init__", &source, &skipchar))
+ return -1;
+
+ dirs = PyDict_New();
+
+ if (dirs == NULL)
+ return -1;
+
+ if (source == NULL)
+ ret = 0;
+ else if (PyDict_Check(source))
+ ret = dirs_fromdict(dirs, source, skipchar);
+ else if (skipchar)
+ PyErr_SetString(PyExc_ValueError,
+ "skip character is only supported "
+ "with a dict source");
+ else
+ ret = dirs_fromiter(dirs, source);
+
+ if (ret == -1)
+ Py_XDECREF(dirs);
+ else
+ self->dict = dirs;
+
+ return ret;
+}
+
+PyObject *dirs_addpath(dirsObject *self, PyObject *args)
+{
+ PyObject *path;
+
+ if (!PyArg_ParseTuple(args, "O!:addpath", &PyBytes_Type, &path))
+ return NULL;
+
+ if (_addpath(self->dict, path) == -1)
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *dirs_delpath(dirsObject *self, PyObject *args)
+{
+ PyObject *path;
+
+ if (!PyArg_ParseTuple(args, "O!:delpath", &PyBytes_Type, &path))
+ return NULL;
+
+ if (_delpath(self->dict, path) == -1)
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static int dirs_contains(dirsObject *self, PyObject *value)
+{
+ return PyBytes_Check(value) ? PyDict_Contains(self->dict, value) : 0;
+}
+
+static void dirs_dealloc(dirsObject *self)
+{
+ Py_XDECREF(self->dict);
+ PyObject_Del(self);
+}
+
+static PyObject *dirs_iter(dirsObject *self)
+{
+ return PyObject_GetIter(self->dict);
+}
+
+static PySequenceMethods dirs_sequence_methods;
+
+static PyMethodDef dirs_methods[] = {
+ {"addpath", (PyCFunction)dirs_addpath, METH_VARARGS, "add a path"},
+ {"delpath", (PyCFunction)dirs_delpath, METH_VARARGS, "remove a path"},
+ {NULL} /* Sentinel */
+};
+
+static PyTypeObject dirsType = { PyVarObject_HEAD_INIT(NULL, 0) };
+
+void dirs_module_init(PyObject *mod)
+{
+ dirs_sequence_methods.sq_contains = (objobjproc)dirs_contains;
+ dirsType.tp_name = "parsers.dirs";
+ dirsType.tp_new = PyType_GenericNew;
+ dirsType.tp_basicsize = sizeof(dirsObject);
+ dirsType.tp_dealloc = (destructor)dirs_dealloc;
+ dirsType.tp_as_sequence = &dirs_sequence_methods;
+ dirsType.tp_flags = Py_TPFLAGS_DEFAULT;
+ dirsType.tp_doc = "dirs";
+ dirsType.tp_iter = (getiterfunc)dirs_iter;
+ dirsType.tp_methods = dirs_methods;
+ dirsType.tp_init = (initproc)dirs_init;
+
+ if (PyType_Ready(&dirsType) < 0)
+ return;
+ Py_INCREF(&dirsType);
+
+ PyModule_AddObject(mod, "dirs", (PyObject *)&dirsType);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/manifest.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,939 @@
+/*
+ * manifest.c - manifest type that does on-demand parsing.
+ *
+ * Copyright 2015, Google Inc.
+ *
+ * This software may be used and distributed according to the terms of
+ * the GNU General Public License, incorporated herein by reference.
+ */
+#include <Python.h>
+
+#include <assert.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "util.h"
+
+#define DEFAULT_LINES 100000
+
+typedef struct {
+ char *start;
+ Py_ssize_t len; /* length of line including terminal newline */
+ char hash_suffix;
+ bool from_malloc;
+ bool deleted;
+} line;
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *pydata;
+ line *lines;
+ int numlines; /* number of line entries */
+ int livelines; /* number of non-deleted lines */
+ int maxlines; /* allocated number of lines */
+ bool dirty;
+} lazymanifest;
+
+#define MANIFEST_OOM -1
+#define MANIFEST_NOT_SORTED -2
+#define MANIFEST_MALFORMED -3
+
+/* defined in parsers.c */
+PyObject *unhexlify(const char *str, int len);
+
+/* get the length of the path for a line */
+static size_t pathlen(line *l) {
+ return strlen(l->start);
+}
+
+/* get the node value of a single line */
+static PyObject *nodeof(line *l) {
+ char *s = l->start;
+ ssize_t llen = pathlen(l);
+ PyObject *hash = unhexlify(s + llen + 1, 40);
+ if (!hash) {
+ return NULL;
+ }
+ if (l->hash_suffix != '\0') {
+ char newhash[21];
+ memcpy(newhash, PyBytes_AsString(hash), 20);
+ Py_DECREF(hash);
+ newhash[20] = l->hash_suffix;
+ hash = PyBytes_FromStringAndSize(newhash, 21);
+ }
+ return hash;
+}
+
+/* get the node hash and flags of a line as a tuple */
+static PyObject *hashflags(line *l)
+{
+ char *s = l->start;
+ size_t plen = pathlen(l);
+ PyObject *hash = nodeof(l);
+
+ /* 40 for hash, 1 for null byte, 1 for newline */
+ size_t hplen = plen + 42;
+ Py_ssize_t flen = l->len - hplen;
+ PyObject *flags;
+ PyObject *tup;
+
+ if (!hash)
+ return NULL;
+ flags = PyBytes_FromStringAndSize(s + hplen - 1, flen);
+ if (!flags) {
+ Py_DECREF(hash);
+ return NULL;
+ }
+ tup = PyTuple_Pack(2, hash, flags);
+ Py_DECREF(flags);
+ Py_DECREF(hash);
+ return tup;
+}
+
+/* if we're about to run out of space in the line index, add more */
+static bool realloc_if_full(lazymanifest *self)
+{
+ if (self->numlines == self->maxlines) {
+ self->maxlines *= 2;
+ self->lines = realloc(self->lines, self->maxlines * sizeof(line));
+ }
+ return !!self->lines;
+}
+
+/*
+ * Find the line boundaries in the manifest that 'data' points to and store
+ * information about each line in 'self'.
+ */
+static int find_lines(lazymanifest *self, char *data, Py_ssize_t len)
+{
+ char *prev = NULL;
+ while (len > 0) {
+ line *l;
+ char *next = memchr(data, '\n', len);
+ if (!next) {
+ return MANIFEST_MALFORMED;
+ }
+ next++; /* advance past newline */
+ if (!realloc_if_full(self)) {
+ return MANIFEST_OOM; /* no memory */
+ }
+ if (prev && strcmp(prev, data) > -1) {
+ /* This data isn't sorted, so we have to abort. */
+ return MANIFEST_NOT_SORTED;
+ }
+ l = self->lines + ((self->numlines)++);
+ l->start = data;
+ l->len = next - data;
+ l->hash_suffix = '\0';
+ l->from_malloc = false;
+ l->deleted = false;
+ len = len - l->len;
+ prev = data;
+ data = next;
+ }
+ self->livelines = self->numlines;
+ return 0;
+}
+
+static int lazymanifest_init(lazymanifest *self, PyObject *args)
+{
+ char *data;
+ Py_ssize_t len;
+ int err, ret;
+ PyObject *pydata;
+ if (!PyArg_ParseTuple(args, "S", &pydata)) {
+ return -1;
+ }
+ err = PyBytes_AsStringAndSize(pydata, &data, &len);
+
+ self->dirty = false;
+ if (err == -1)
+ return -1;
+ self->pydata = pydata;
+ Py_INCREF(self->pydata);
+ Py_BEGIN_ALLOW_THREADS
+ self->lines = malloc(DEFAULT_LINES * sizeof(line));
+ self->maxlines = DEFAULT_LINES;
+ self->numlines = 0;
+ if (!self->lines)
+ ret = MANIFEST_OOM;
+ else
+ ret = find_lines(self, data, len);
+ Py_END_ALLOW_THREADS
+ switch (ret) {
+ case 0:
+ break;
+ case MANIFEST_OOM:
+ PyErr_NoMemory();
+ break;
+ case MANIFEST_NOT_SORTED:
+ PyErr_Format(PyExc_ValueError,
+ "Manifest lines not in sorted order.");
+ break;
+ case MANIFEST_MALFORMED:
+ PyErr_Format(PyExc_ValueError,
+ "Manifest did not end in a newline.");
+ break;
+ default:
+ PyErr_Format(PyExc_ValueError,
+ "Unknown problem parsing manifest.");
+ }
+ return ret == 0 ? 0 : -1;
+}
+
+static void lazymanifest_dealloc(lazymanifest *self)
+{
+ /* free any extra lines we had to allocate */
+ int i;
+ for (i = 0; i < self->numlines; i++) {
+ if (self->lines[i].from_malloc) {
+ free(self->lines[i].start);
+ }
+ }
+ if (self->lines) {
+ free(self->lines);
+ self->lines = NULL;
+ }
+ if (self->pydata) {
+ Py_DECREF(self->pydata);
+ self->pydata = NULL;
+ }
+ PyObject_Del(self);
+}
+
+/* iteration support */
+
+typedef struct {
+ PyObject_HEAD lazymanifest *m;
+ Py_ssize_t pos;
+} lmIter;
+
+static void lmiter_dealloc(PyObject *o)
+{
+ lmIter *self = (lmIter *)o;
+ Py_DECREF(self->m);
+ PyObject_Del(self);
+}
+
+static line *lmiter_nextline(lmIter *self)
+{
+ do {
+ self->pos++;
+ if (self->pos >= self->m->numlines) {
+ return NULL;
+ }
+ /* skip over deleted manifest entries */
+ } while (self->m->lines[self->pos].deleted);
+ return self->m->lines + self->pos;
+}
+
+static PyObject *lmiter_iterentriesnext(PyObject *o)
+{
+ size_t pl;
+ line *l;
+ Py_ssize_t consumed;
+ PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL;
+ l = lmiter_nextline((lmIter *)o);
+ if (!l) {
+ goto done;
+ }
+ pl = pathlen(l);
+ path = PyBytes_FromStringAndSize(l->start, pl);
+ hash = nodeof(l);
+ consumed = pl + 41;
+ flags = PyBytes_FromStringAndSize(l->start + consumed,
+ l->len - consumed - 1);
+ if (!path || !hash || !flags) {
+ goto done;
+ }
+ ret = PyTuple_Pack(3, path, hash, flags);
+done:
+ Py_XDECREF(path);
+ Py_XDECREF(hash);
+ Py_XDECREF(flags);
+ return ret;
+}
+
+#ifdef IS_PY3K
+#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
+ | Py_TPFLAGS_HAVE_ITER
+#endif
+
+static PyTypeObject lazymanifestEntriesIterator = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest.entriesiterator", /*tp_name */
+ sizeof(lmIter), /*tp_basicsize */
+ 0, /*tp_itemsize */
+ lmiter_dealloc, /*tp_dealloc */
+ 0, /*tp_print */
+ 0, /*tp_getattr */
+ 0, /*tp_setattr */
+ 0, /*tp_compare */
+ 0, /*tp_repr */
+ 0, /*tp_as_number */
+ 0, /*tp_as_sequence */
+ 0, /*tp_as_mapping */
+ 0, /*tp_hash */
+ 0, /*tp_call */
+ 0, /*tp_str */
+ 0, /*tp_getattro */
+ 0, /*tp_setattro */
+ 0, /*tp_as_buffer */
+ LAZYMANIFESTENTRIESITERATOR_TPFLAGS, /* tp_flags */
+ "Iterator for 3-tuples in a lazymanifest.", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter: __iter__() method */
+ lmiter_iterentriesnext, /* tp_iternext: next() method */
+};
+
+static PyObject *lmiter_iterkeysnext(PyObject *o)
+{
+ size_t pl;
+ line *l = lmiter_nextline((lmIter *)o);
+ if (!l) {
+ return NULL;
+ }
+ pl = pathlen(l);
+ return PyBytes_FromStringAndSize(l->start, pl);
+}
+
+#ifdef IS_PY3K
+#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
+ | Py_TPFLAGS_HAVE_ITER
+#endif
+
+static PyTypeObject lazymanifestKeysIterator = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest.keysiterator", /*tp_name */
+ sizeof(lmIter), /*tp_basicsize */
+ 0, /*tp_itemsize */
+ lmiter_dealloc, /*tp_dealloc */
+ 0, /*tp_print */
+ 0, /*tp_getattr */
+ 0, /*tp_setattr */
+ 0, /*tp_compare */
+ 0, /*tp_repr */
+ 0, /*tp_as_number */
+ 0, /*tp_as_sequence */
+ 0, /*tp_as_mapping */
+ 0, /*tp_hash */
+ 0, /*tp_call */
+ 0, /*tp_str */
+ 0, /*tp_getattro */
+ 0, /*tp_setattro */
+ 0, /*tp_as_buffer */
+ LAZYMANIFESTKEYSITERATOR_TPFLAGS, /* tp_flags */
+ "Keys iterator for a lazymanifest.", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter: __iter__() method */
+ lmiter_iterkeysnext, /* tp_iternext: next() method */
+};
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self);
+
+static PyObject *lazymanifest_getentriesiter(lazymanifest *self)
+{
+ lmIter *i = NULL;
+ lazymanifest *t = lazymanifest_copy(self);
+ if (!t) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ i = PyObject_New(lmIter, &lazymanifestEntriesIterator);
+ if (i) {
+ i->m = t;
+ i->pos = -1;
+ } else {
+ Py_DECREF(t);
+ PyErr_NoMemory();
+ }
+ return (PyObject *)i;
+}
+
+static PyObject *lazymanifest_getkeysiter(lazymanifest *self)
+{
+ lmIter *i = NULL;
+ lazymanifest *t = lazymanifest_copy(self);
+ if (!t) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ i = PyObject_New(lmIter, &lazymanifestKeysIterator);
+ if (i) {
+ i->m = t;
+ i->pos = -1;
+ } else {
+ Py_DECREF(t);
+ PyErr_NoMemory();
+ }
+ return (PyObject *)i;
+}
+
+/* __getitem__ and __setitem__ support */
+
+static Py_ssize_t lazymanifest_size(lazymanifest *self)
+{
+ return self->livelines;
+}
+
+static int linecmp(const void *left, const void *right)
+{
+ return strcmp(((const line *)left)->start,
+ ((const line *)right)->start);
+}
+
+static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "getitem: manifest keys must be a string.");
+ return NULL;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ PyErr_Format(PyExc_KeyError, "No such manifest entry.");
+ return NULL;
+ }
+ return hashflags(hit);
+}
+
+static int lazymanifest_delitem(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "delitem: manifest keys must be a string.");
+ return -1;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ PyErr_Format(PyExc_KeyError,
+ "Tried to delete nonexistent manifest entry.");
+ return -1;
+ }
+ self->dirty = true;
+ hit->deleted = true;
+ self->livelines--;
+ return 0;
+}
+
+/* Do a binary search for the insertion point for new, creating the
+ * new entry if needed. */
+static int internalsetitem(lazymanifest *self, line *new) {
+ int start = 0, end = self->numlines;
+ while (start < end) {
+ int pos = start + (end - start) / 2;
+ int c = linecmp(new, self->lines + pos);
+ if (c < 0)
+ end = pos;
+ else if (c > 0)
+ start = pos + 1;
+ else {
+ if (self->lines[pos].deleted)
+ self->livelines++;
+ if (self->lines[pos].from_malloc)
+ free(self->lines[pos].start);
+ start = pos;
+ goto finish;
+ }
+ }
+ /* being here means we need to do an insert */
+ if (!realloc_if_full(self)) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ memmove(self->lines + start + 1, self->lines + start,
+ (self->numlines - start) * sizeof(line));
+ self->numlines++;
+ self->livelines++;
+finish:
+ self->lines[start] = *new;
+ self->dirty = true;
+ return 0;
+}
+
+static int lazymanifest_setitem(
+ lazymanifest *self, PyObject *key, PyObject *value)
+{
+ char *path;
+ Py_ssize_t plen;
+ PyObject *pyhash;
+ Py_ssize_t hlen;
+ char *hash;
+ PyObject *pyflags;
+ char *flags;
+ Py_ssize_t flen;
+ size_t dlen;
+ char *dest;
+ int i;
+ line new;
+ if (!PyBytes_Check(key)) {
+ PyErr_Format(PyExc_TypeError,
+ "setitem: manifest keys must be a string.");
+ return -1;
+ }
+ if (!value) {
+ return lazymanifest_delitem(self, key);
+ }
+ if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) {
+ PyErr_Format(PyExc_TypeError,
+ "Manifest values must be a tuple of (node, flags).");
+ return -1;
+ }
+ if (PyBytes_AsStringAndSize(key, &path, &plen) == -1) {
+ return -1;
+ }
+
+ pyhash = PyTuple_GetItem(value, 0);
+ if (!PyBytes_Check(pyhash)) {
+ PyErr_Format(PyExc_TypeError,
+ "node must be a 20-byte string");
+ return -1;
+ }
+ hlen = PyBytes_Size(pyhash);
+ /* Some parts of the codebase try and set 21 or 22
+ * byte "hash" values in order to perturb things for
+ * status. We have to preserve at least the 21st
+ * byte. Sigh. If there's a 22nd byte, we drop it on
+ * the floor, which works fine.
+ */
+ if (hlen != 20 && hlen != 21 && hlen != 22) {
+ PyErr_Format(PyExc_TypeError,
+ "node must be a 20-byte string");
+ return -1;
+ }
+ hash = PyBytes_AsString(pyhash);
+
+ pyflags = PyTuple_GetItem(value, 1);
+ if (!PyBytes_Check(pyflags) || PyBytes_Size(pyflags) > 1) {
+ PyErr_Format(PyExc_TypeError,
+ "flags must a 0 or 1 byte string");
+ return -1;
+ }
+ if (PyBytes_AsStringAndSize(pyflags, &flags, &flen) == -1) {
+ return -1;
+ }
+ /* one null byte and one newline */
+ dlen = plen + 41 + flen + 1;
+ dest = malloc(dlen);
+ if (!dest) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(dest, path, plen + 1);
+ for (i = 0; i < 20; i++) {
+ /* Cast to unsigned, so it will not get sign-extended when promoted
+ * to int (as is done when passing to a variadic function)
+ */
+ sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]);
+ }
+ memcpy(dest + plen + 41, flags, flen);
+ dest[plen + 41 + flen] = '\n';
+ new.start = dest;
+ new.len = dlen;
+ new.hash_suffix = '\0';
+ if (hlen > 20) {
+ new.hash_suffix = hash[20];
+ }
+ new.from_malloc = true; /* is `start` a pointer we allocated? */
+ new.deleted = false; /* is this entry deleted? */
+ if (internalsetitem(self, &new)) {
+ return -1;
+ }
+ return 0;
+}
+
+static PyMappingMethods lazymanifest_mapping_methods = {
+ (lenfunc)lazymanifest_size, /* mp_length */
+ (binaryfunc)lazymanifest_getitem, /* mp_subscript */
+ (objobjargproc)lazymanifest_setitem, /* mp_ass_subscript */
+};
+
+/* sequence methods (important or __contains__ builds an iterator) */
+
+static int lazymanifest_contains(lazymanifest *self, PyObject *key)
+{
+ line needle;
+ line *hit;
+ if (!PyBytes_Check(key)) {
+ /* Our keys are always strings, so if the contains
+ * check is for a non-string, just return false. */
+ return 0;
+ }
+ needle.start = PyBytes_AsString(key);
+ hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+ &linecmp);
+ if (!hit || hit->deleted) {
+ return 0;
+ }
+ return 1;
+}
+
+static PySequenceMethods lazymanifest_seq_meths = {
+ (lenfunc)lazymanifest_size, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ 0, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)lazymanifest_contains, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0, /* sq_inplace_repeat */
+};
+
+
+/* Other methods (copy, diff, etc) */
+static PyTypeObject lazymanifestType;
+
+/* If the manifest has changes, build the new manifest text and reindex it. */
+static int compact(lazymanifest *self) {
+ int i;
+ ssize_t need = 0;
+ char *data;
+ line *src, *dst;
+ PyObject *pydata;
+ if (!self->dirty)
+ return 0;
+ for (i = 0; i < self->numlines; i++) {
+ if (!self->lines[i].deleted) {
+ need += self->lines[i].len;
+ }
+ }
+ pydata = PyBytes_FromStringAndSize(NULL, need);
+ if (!pydata)
+ return -1;
+ data = PyBytes_AsString(pydata);
+ if (!data) {
+ return -1;
+ }
+ src = self->lines;
+ dst = self->lines;
+ for (i = 0; i < self->numlines; i++, src++) {
+ char *tofree = NULL;
+ if (src->from_malloc) {
+ tofree = src->start;
+ }
+ if (!src->deleted) {
+ memcpy(data, src->start, src->len);
+ *dst = *src;
+ dst->start = data;
+ dst->from_malloc = false;
+ data += dst->len;
+ dst++;
+ }
+ free(tofree);
+ }
+ Py_DECREF(self->pydata);
+ self->pydata = pydata;
+ self->numlines = self->livelines;
+ self->dirty = false;
+ return 0;
+}
+
+static PyObject *lazymanifest_text(lazymanifest *self)
+{
+ if (compact(self) != 0) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ Py_INCREF(self->pydata);
+ return self->pydata;
+}
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self)
+{
+ lazymanifest *copy = NULL;
+ if (compact(self) != 0) {
+ goto nomem;
+ }
+ copy = PyObject_New(lazymanifest, &lazymanifestType);
+ if (!copy) {
+ goto nomem;
+ }
+ copy->numlines = self->numlines;
+ copy->livelines = self->livelines;
+ copy->dirty = false;
+ copy->lines = malloc(self->maxlines *sizeof(line));
+ if (!copy->lines) {
+ goto nomem;
+ }
+ memcpy(copy->lines, self->lines, self->numlines * sizeof(line));
+ copy->maxlines = self->maxlines;
+ copy->pydata = self->pydata;
+ Py_INCREF(copy->pydata);
+ return copy;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(copy);
+ return NULL;
+}
+
+static lazymanifest *lazymanifest_filtercopy(
+ lazymanifest *self, PyObject *matchfn)
+{
+ lazymanifest *copy = NULL;
+ int i;
+ if (!PyCallable_Check(matchfn)) {
+ PyErr_SetString(PyExc_TypeError, "matchfn must be callable");
+ return NULL;
+ }
+ /* compact ourselves first to avoid double-frees later when we
+ * compact tmp so that it doesn't have random pointers to our
+ * underlying from_malloc-data (self->pydata is safe) */
+ if (compact(self) != 0) {
+ goto nomem;
+ }
+ copy = PyObject_New(lazymanifest, &lazymanifestType);
+ if (!copy) {
+ goto nomem;
+ }
+ copy->dirty = true;
+ copy->lines = malloc(self->maxlines * sizeof(line));
+ if (!copy->lines) {
+ goto nomem;
+ }
+ copy->maxlines = self->maxlines;
+ copy->numlines = 0;
+ copy->pydata = self->pydata;
+ Py_INCREF(self->pydata);
+ for (i = 0; i < self->numlines; i++) {
+ PyObject *arglist = NULL, *result = NULL;
+ arglist = Py_BuildValue("(s)", self->lines[i].start);
+ if (!arglist) {
+ return NULL;
+ }
+ result = PyObject_CallObject(matchfn, arglist);
+ Py_DECREF(arglist);
+ /* if the callback raised an exception, just let it
+ * through and give up */
+ if (!result) {
+ free(copy->lines);
+ Py_DECREF(self->pydata);
+ return NULL;
+ }
+ if (PyObject_IsTrue(result)) {
+ assert(!(self->lines[i].from_malloc));
+ copy->lines[copy->numlines++] = self->lines[i];
+ }
+ Py_DECREF(result);
+ }
+ copy->livelines = copy->numlines;
+ return copy;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(copy);
+ return NULL;
+}
+
+static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args)
+{
+ lazymanifest *other;
+ PyObject *pyclean = NULL;
+ bool listclean;
+ PyObject *emptyTup = NULL, *ret = NULL;
+ PyObject *es;
+ int sneedle = 0, oneedle = 0;
+ if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) {
+ return NULL;
+ }
+ listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean);
+ es = PyBytes_FromString("");
+ if (!es) {
+ goto nomem;
+ }
+ emptyTup = PyTuple_Pack(2, Py_None, es);
+ Py_DECREF(es);
+ if (!emptyTup) {
+ goto nomem;
+ }
+ ret = PyDict_New();
+ if (!ret) {
+ goto nomem;
+ }
+ while (sneedle != self->numlines || oneedle != other->numlines) {
+ line *left = self->lines + sneedle;
+ line *right = other->lines + oneedle;
+ int result;
+ PyObject *key;
+ PyObject *outer;
+ /* If we're looking at a deleted entry and it's not
+ * the end of the manifest, just skip it. */
+ if (left->deleted && sneedle < self->numlines) {
+ sneedle++;
+ continue;
+ }
+ if (right->deleted && oneedle < other->numlines) {
+ oneedle++;
+ continue;
+ }
+ /* if we're at the end of either manifest, then we
+ * know the remaining items are adds so we can skip
+ * the strcmp. */
+ if (sneedle == self->numlines) {
+ result = 1;
+ } else if (oneedle == other->numlines) {
+ result = -1;
+ } else {
+ result = linecmp(left, right);
+ }
+ key = result <= 0 ?
+ PyBytes_FromString(left->start) :
+ PyBytes_FromString(right->start);
+ if (!key)
+ goto nomem;
+ if (result < 0) {
+ PyObject *l = hashflags(left);
+ if (!l) {
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, l, emptyTup);
+ Py_DECREF(l);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ sneedle++;
+ } else if (result > 0) {
+ PyObject *r = hashflags(right);
+ if (!r) {
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, emptyTup, r);
+ Py_DECREF(r);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ oneedle++;
+ } else {
+ /* file exists in both manifests */
+ if (left->len != right->len
+ || memcmp(left->start, right->start, left->len)
+ || left->hash_suffix != right->hash_suffix) {
+ PyObject *l = hashflags(left);
+ PyObject *r;
+ if (!l) {
+ goto nomem;
+ }
+ r = hashflags(right);
+ if (!r) {
+ Py_DECREF(l);
+ goto nomem;
+ }
+ outer = PyTuple_Pack(2, l, r);
+ Py_DECREF(l);
+ Py_DECREF(r);
+ if (!outer) {
+ goto nomem;
+ }
+ PyDict_SetItem(ret, key, outer);
+ Py_DECREF(outer);
+ } else if (listclean) {
+ PyDict_SetItem(ret, key, Py_None);
+ }
+ sneedle++;
+ oneedle++;
+ }
+ Py_DECREF(key);
+ }
+ Py_DECREF(emptyTup);
+ return ret;
+nomem:
+ PyErr_NoMemory();
+ Py_XDECREF(ret);
+ Py_XDECREF(emptyTup);
+ return NULL;
+}
+
+static PyMethodDef lazymanifest_methods[] = {
+ {"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS,
+ "Iterate over file names in this lazymanifest."},
+ {"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS,
+ "Iterate over (path, nodeid, flags) tuples in this lazymanifest."},
+ {"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS,
+ "Make a copy of this lazymanifest."},
+ {"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O,
+ "Make a copy of this manifest filtered by matchfn."},
+ {"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS,
+ "Compare this lazymanifest to another one."},
+ {"text", (PyCFunction)lazymanifest_text, METH_NOARGS,
+ "Encode this manifest to text."},
+ {NULL},
+};
+
+#ifdef IS_PY3K
+#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
+#else
+#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
+#endif
+
+static PyTypeObject lazymanifestType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.lazymanifest", /* tp_name */
+ sizeof(lazymanifest), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)lazymanifest_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &lazymanifest_seq_meths, /* tp_as_sequence */
+ &lazymanifest_mapping_methods, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ LAZYMANIFEST_TPFLAGS, /* tp_flags */
+ "TODO(augie)", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ (getiterfunc)lazymanifest_getkeysiter, /* tp_iter */
+ 0, /* tp_iternext */
+ lazymanifest_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)lazymanifest_init, /* tp_init */
+ 0, /* tp_alloc */
+};
+
+void manifest_module_init(PyObject * mod)
+{
+ lazymanifestType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&lazymanifestType) < 0)
+ return;
+ Py_INCREF(&lazymanifestType);
+
+ PyModule_AddObject(mod, "lazymanifest",
+ (PyObject *)&lazymanifestType);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/mpatch.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,200 @@
+/*
+ mpatch.c - efficient binary patching for Mercurial
+
+ This implements a patch algorithm that's O(m + nlog n) where m is the
+ size of the output and n is the number of patches.
+
+ Given a list of binary patches, it unpacks each into a hunk list,
+ then combines the hunk lists with a treewise recursion to form a
+ single hunk list. This hunk list is then applied to the original
+ text.
+
+ The text (or binary) fragments are copied directly from their source
+ Python objects into a preallocated output string to avoid the
+ allocation of intermediate Python objects. Working memory is about 2x
+ the total number of hunks.
+
+ Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms
+ of the GNU General Public License, incorporated herein by reference.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+#include "compat.h"
+#include "mpatch.h"
+
+static char mpatch_doc[] = "Efficient binary patching.";
+static PyObject *mpatch_Error;
+
+static void setpyerr(int r)
+{
+ switch (r) {
+ case MPATCH_ERR_NO_MEM:
+ PyErr_NoMemory();
+ break;
+ case MPATCH_ERR_CANNOT_BE_DECODED:
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ break;
+ case MPATCH_ERR_INVALID_PATCH:
+ PyErr_SetString(mpatch_Error, "invalid patch");
+ break;
+ }
+}
+
+struct mpatch_flist *cpygetitem(void *bins, ssize_t pos)
+{
+ const char *buffer;
+ struct mpatch_flist *res;
+ ssize_t blen;
+ int r;
+
+ PyObject *tmp = PyList_GetItem((PyObject*)bins, pos);
+ if (!tmp)
+ return NULL;
+ if (PyObject_AsCharBuffer(tmp, &buffer, (Py_ssize_t*)&blen))
+ return NULL;
+ if ((r = mpatch_decode(buffer, blen, &res)) < 0) {
+ if (!PyErr_Occurred())
+ setpyerr(r);
+ return NULL;
+ }
+ return res;
+}
+
+static PyObject *
+patches(PyObject *self, PyObject *args)
+{
+ PyObject *text, *bins, *result;
+ struct mpatch_flist *patch;
+ const char *in;
+ int r = 0;
+ char *out;
+ Py_ssize_t len, outlen, inlen;
+
+ if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
+ return NULL;
+
+ len = PyList_Size(bins);
+ if (!len) {
+ /* nothing to do */
+ Py_INCREF(text);
+ return text;
+ }
+
+ if (PyObject_AsCharBuffer(text, &in, &inlen))
+ return NULL;
+
+ patch = mpatch_fold(bins, cpygetitem, 0, len);
+ if (!patch) { /* error already set or memory error */
+ if (!PyErr_Occurred())
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ outlen = mpatch_calcsize(inlen, patch);
+ if (outlen < 0) {
+ r = (int)outlen;
+ result = NULL;
+ goto cleanup;
+ }
+ result = PyBytes_FromStringAndSize(NULL, outlen);
+ if (!result) {
+ result = NULL;
+ goto cleanup;
+ }
+ out = PyBytes_AsString(result);
+ if ((r = mpatch_apply(out, in, inlen, patch)) < 0) {
+ Py_DECREF(result);
+ result = NULL;
+ }
+cleanup:
+ mpatch_lfree(patch);
+ if (!result && !PyErr_Occurred())
+ setpyerr(r);
+ return result;
+}
+
+/* calculate size of a patched file directly */
+static PyObject *
+patchedsize(PyObject *self, PyObject *args)
+{
+ long orig, start, end, len, outlen = 0, last = 0, pos = 0;
+ Py_ssize_t patchlen;
+ char *bin;
+
+ if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
+ return NULL;
+
+ while (pos >= 0 && pos < patchlen) {
+ start = getbe32(bin + pos);
+ end = getbe32(bin + pos + 4);
+ len = getbe32(bin + pos + 8);
+ if (start > end)
+ break; /* sanity check */
+ pos += 12 + len;
+ outlen += start - last;
+ last = end;
+ outlen += len;
+ }
+
+ if (pos != patchlen) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(mpatch_Error, "patch cannot be decoded");
+ return NULL;
+ }
+
+ outlen += orig - last;
+ return Py_BuildValue("l", outlen);
+}
+
+static PyMethodDef methods[] = {
+ {"patches", patches, METH_VARARGS, "apply a series of patches\n"},
+ {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"},
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef mpatch_module = {
+ PyModuleDef_HEAD_INIT,
+ "mpatch",
+ mpatch_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_mpatch(void)
+{
+ PyObject *m;
+
+ m = PyModule_Create(&mpatch_module);
+ if (m == NULL)
+ return NULL;
+
+ mpatch_Error = PyErr_NewException("mercurial.cext.mpatch.mpatchError",
+ NULL, NULL);
+ Py_INCREF(mpatch_Error);
+ PyModule_AddObject(m, "mpatchError", mpatch_Error);
+ PyModule_AddIntConstant(m, "version", version);
+
+ return m;
+}
+#else
+PyMODINIT_FUNC
+initmpatch(void)
+{
+ PyObject *m;
+ m = Py_InitModule3("mpatch", methods, mpatch_doc);
+ mpatch_Error = PyErr_NewException("mercurial.cext.mpatch.mpatchError",
+ NULL, NULL);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/osutil.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,1335 @@
+/*
+ osutil.c - native operating system services
+
+ Copyright 2007 Matt Mackall and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#define _ATFILE_SOURCE
+#include <Python.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+
+#ifdef _WIN32
+#include <windows.h>
+#include <io.h>
+#else
+#include <dirent.h>
+#include <sys/socket.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+#ifdef HAVE_LINUX_STATFS
+#include <linux/magic.h>
+#include <sys/vfs.h>
+#endif
+#ifdef HAVE_BSD_STATFS
+#include <sys/mount.h>
+#include <sys/param.h>
+#endif
+#endif
+
+#ifdef __APPLE__
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#endif
+
+#include "util.h"
+
+/* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */
+#ifndef PATH_MAX
+#define PATH_MAX 4096
+#endif
+
+#ifdef _WIN32
+/*
+stat struct compatible with hg expectations
+Mercurial only uses st_mode, st_size and st_mtime
+the rest is kept to minimize changes between implementations
+*/
+struct hg_stat {
+ int st_dev;
+ int st_mode;
+ int st_nlink;
+ __int64 st_size;
+ int st_mtime;
+ int st_ctime;
+};
+struct listdir_stat {
+ PyObject_HEAD
+ struct hg_stat st;
+};
+#else
+struct listdir_stat {
+ PyObject_HEAD
+ struct stat st;
+};
+#endif
+
+#ifdef IS_PY3K
+#define listdir_slot(name) \
+ static PyObject *listdir_stat_##name(PyObject *self, void *x) \
+ { \
+ return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
+ }
+#else
+#define listdir_slot(name) \
+ static PyObject *listdir_stat_##name(PyObject *self, void *x) \
+ { \
+ return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
+ }
+#endif
+
+listdir_slot(st_dev)
+listdir_slot(st_mode)
+listdir_slot(st_nlink)
+#ifdef _WIN32
+static PyObject *listdir_stat_st_size(PyObject *self, void *x)
+{
+ return PyLong_FromLongLong(
+ (PY_LONG_LONG)((struct listdir_stat *)self)->st.st_size);
+}
+#else
+listdir_slot(st_size)
+#endif
+listdir_slot(st_mtime)
+listdir_slot(st_ctime)
+
+static struct PyGetSetDef listdir_stat_getsets[] = {
+ {"st_dev", listdir_stat_st_dev, 0, 0, 0},
+ {"st_mode", listdir_stat_st_mode, 0, 0, 0},
+ {"st_nlink", listdir_stat_st_nlink, 0, 0, 0},
+ {"st_size", listdir_stat_st_size, 0, 0, 0},
+ {"st_mtime", listdir_stat_st_mtime, 0, 0, 0},
+ {"st_ctime", listdir_stat_st_ctime, 0, 0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k)
+{
+ return t->tp_alloc(t, 0);
+}
+
+static void listdir_stat_dealloc(PyObject *o)
+{
+ o->ob_type->tp_free(o);
+}
+
+static PyTypeObject listdir_stat_type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "osutil.stat", /*tp_name*/
+ sizeof(struct listdir_stat), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ (destructor)listdir_stat_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ 0, /*tp_getattr*/
+ 0, /*tp_setattr*/
+ 0, /*tp_compare*/
+ 0, /*tp_repr*/
+ 0, /*tp_as_number*/
+ 0, /*tp_as_sequence*/
+ 0, /*tp_as_mapping*/
+ 0, /*tp_hash */
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ 0, /*tp_getattro*/
+ 0, /*tp_setattro*/
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
+ "stat objects", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ listdir_stat_getsets, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ listdir_stat_new, /* tp_new */
+};
+
+#ifdef _WIN32
+
+static int to_python_time(const FILETIME *tm)
+{
+ /* number of seconds between epoch and January 1 1601 */
+ const __int64 a0 = (__int64)134774L * (__int64)24L * (__int64)3600L;
+ /* conversion factor from 100ns to 1s */
+ const __int64 a1 = 10000000;
+ /* explicit (int) cast to suspend compiler warnings */
+ return (int)((((__int64)tm->dwHighDateTime << 32)
+ + tm->dwLowDateTime) / a1 - a0);
+}
+
+static PyObject *make_item(const WIN32_FIND_DATAA *fd, int wantstat)
+{
+ PyObject *py_st;
+ struct hg_stat *stp;
+
+ int kind = (fd->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)
+ ? _S_IFDIR : _S_IFREG;
+
+ if (!wantstat)
+ return Py_BuildValue("si", fd->cFileName, kind);
+
+ py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
+ if (!py_st)
+ return NULL;
+
+ stp = &((struct listdir_stat *)py_st)->st;
+ /*
+ use kind as st_mode
+ rwx bits on Win32 are meaningless
+ and Hg does not use them anyway
+ */
+ stp->st_mode = kind;
+ stp->st_mtime = to_python_time(&fd->ftLastWriteTime);
+ stp->st_ctime = to_python_time(&fd->ftCreationTime);
+ if (kind == _S_IFREG)
+ stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
+ + fd->nFileSizeLow;
+ return Py_BuildValue("siN", fd->cFileName,
+ kind, py_st);
+}
+
+static PyObject *_listdir(char *path, int plen, int wantstat, char *skip)
+{
+ PyObject *rval = NULL; /* initialize - return value */
+ PyObject *list;
+ HANDLE fh;
+ WIN32_FIND_DATAA fd;
+ char *pattern;
+
+ /* build the path + \* pattern string */
+ pattern = PyMem_Malloc(plen + 3); /* path + \* + \0 */
+ if (!pattern) {
+ PyErr_NoMemory();
+ goto error_nomem;
+ }
+ memcpy(pattern, path, plen);
+
+ if (plen > 0) {
+ char c = path[plen-1];
+ if (c != ':' && c != '/' && c != '\\')
+ pattern[plen++] = '\\';
+ }
+ pattern[plen++] = '*';
+ pattern[plen] = '\0';
+
+ fh = FindFirstFileA(pattern, &fd);
+ if (fh == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
+ goto error_file;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_list;
+
+ do {
+ PyObject *item;
+
+ if (fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
+ if (!strcmp(fd.cFileName, ".")
+ || !strcmp(fd.cFileName, ".."))
+ continue;
+
+ if (skip && !strcmp(fd.cFileName, skip)) {
+ rval = PyList_New(0);
+ goto error;
+ }
+ }
+
+ item = make_item(&fd, wantstat);
+ if (!item)
+ goto error;
+
+ if (PyList_Append(list, item)) {
+ Py_XDECREF(item);
+ goto error;
+ }
+
+ Py_XDECREF(item);
+ } while (FindNextFileA(fh, &fd));
+
+ if (GetLastError() != ERROR_NO_MORE_FILES) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
+ goto error;
+ }
+
+ rval = list;
+ Py_XINCREF(rval);
+error:
+ Py_XDECREF(list);
+error_list:
+ FindClose(fh);
+error_file:
+ PyMem_Free(pattern);
+error_nomem:
+ return rval;
+}
+
+#else
+
+int entkind(struct dirent *ent)
+{
+#ifdef DT_REG
+ switch (ent->d_type) {
+ case DT_REG: return S_IFREG;
+ case DT_DIR: return S_IFDIR;
+ case DT_LNK: return S_IFLNK;
+ case DT_BLK: return S_IFBLK;
+ case DT_CHR: return S_IFCHR;
+ case DT_FIFO: return S_IFIFO;
+ case DT_SOCK: return S_IFSOCK;
+ }
+#endif
+ return -1;
+}
+
+static PyObject *makestat(const struct stat *st)
+{
+ PyObject *stat;
+
+ stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
+ if (stat)
+ memcpy(&((struct listdir_stat *)stat)->st, st, sizeof(*st));
+ return stat;
+}
+
+static PyObject *_listdir_stat(char *path, int pathlen, int keepstat,
+ char *skip)
+{
+ PyObject *list, *elem, *stat = NULL, *ret = NULL;
+ char fullpath[PATH_MAX + 10];
+ int kind, err;
+ struct stat st;
+ struct dirent *ent;
+ DIR *dir;
+#ifdef AT_SYMLINK_NOFOLLOW
+ int dfd = -1;
+#endif
+
+ if (pathlen >= PATH_MAX) {
+ errno = ENAMETOOLONG;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+ strncpy(fullpath, path, PATH_MAX);
+ fullpath[pathlen] = '/';
+
+#ifdef AT_SYMLINK_NOFOLLOW
+ dfd = open(path, O_RDONLY);
+ if (dfd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+ dir = fdopendir(dfd);
+#else
+ dir = opendir(path);
+#endif
+ if (!dir) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_dir;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_list;
+
+ while ((ent = readdir(dir))) {
+ if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
+ continue;
+
+ kind = entkind(ent);
+ if (kind == -1 || keepstat) {
+#ifdef AT_SYMLINK_NOFOLLOW
+ err = fstatat(dfd, ent->d_name, &st,
+ AT_SYMLINK_NOFOLLOW);
+#else
+ strncpy(fullpath + pathlen + 1, ent->d_name,
+ PATH_MAX - pathlen);
+ fullpath[PATH_MAX] = '\0';
+ err = lstat(fullpath, &st);
+#endif
+ if (err == -1) {
+ /* race with file deletion? */
+ if (errno == ENOENT)
+ continue;
+ strncpy(fullpath + pathlen + 1, ent->d_name,
+ PATH_MAX - pathlen);
+ fullpath[PATH_MAX] = 0;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError,
+ fullpath);
+ goto error;
+ }
+ kind = st.st_mode & S_IFMT;
+ }
+
+ /* quit early? */
+ if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) {
+ ret = PyList_New(0);
+ goto error;
+ }
+
+ if (keepstat) {
+ stat = makestat(&st);
+ if (!stat)
+ goto error;
+ elem = Py_BuildValue("siN", ent->d_name, kind, stat);
+ } else
+ elem = Py_BuildValue("si", ent->d_name, kind);
+ if (!elem)
+ goto error;
+ stat = NULL;
+
+ PyList_Append(list, elem);
+ Py_DECREF(elem);
+ }
+
+ ret = list;
+ Py_INCREF(ret);
+
+error:
+ Py_DECREF(list);
+ Py_XDECREF(stat);
+error_list:
+ closedir(dir);
+ /* closedir also closes its dirfd */
+ goto error_value;
+error_dir:
+#ifdef AT_SYMLINK_NOFOLLOW
+ close(dfd);
+#endif
+error_value:
+ return ret;
+}
+
+#ifdef __APPLE__
+
+typedef struct {
+ u_int32_t length;
+ attrreference_t name;
+ fsobj_type_t obj_type;
+ struct timespec mtime;
+#if __LITTLE_ENDIAN__
+ mode_t access_mask;
+ uint16_t padding;
+#else
+ uint16_t padding;
+ mode_t access_mask;
+#endif
+ off_t size;
+} __attribute__((packed)) attrbuf_entry;
+
+int attrkind(attrbuf_entry *entry)
+{
+ switch (entry->obj_type) {
+ case VREG: return S_IFREG;
+ case VDIR: return S_IFDIR;
+ case VLNK: return S_IFLNK;
+ case VBLK: return S_IFBLK;
+ case VCHR: return S_IFCHR;
+ case VFIFO: return S_IFIFO;
+ case VSOCK: return S_IFSOCK;
+ }
+ return -1;
+}
+
+/* get these many entries at a time */
+#define LISTDIR_BATCH_SIZE 50
+
+static PyObject *_listdir_batch(char *path, int pathlen, int keepstat,
+ char *skip, bool *fallback)
+{
+ PyObject *list, *elem, *stat = NULL, *ret = NULL;
+ int kind, err;
+ unsigned long index;
+ unsigned int count, old_state, new_state;
+ bool state_seen = false;
+ attrbuf_entry *entry;
+ /* from the getattrlist(2) man page: a path can be no longer than
+ (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will
+ silently truncate attribute data if attrBufSize is too small." So
+ pass in a buffer big enough for the worst case. */
+ char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)];
+ unsigned int basep_unused;
+
+ struct stat st;
+ int dfd = -1;
+
+ /* these must match the attrbuf_entry struct, otherwise you'll end up
+ with garbage */
+ struct attrlist requested_attr = {0};
+ requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT;
+ requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE |
+ ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK);
+ requested_attr.fileattr = ATTR_FILE_DATALENGTH;
+
+ *fallback = false;
+
+ if (pathlen >= PATH_MAX) {
+ errno = ENAMETOOLONG;
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+
+ dfd = open(path, O_RDONLY);
+ if (dfd == -1) {
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error_value;
+ }
+
+ list = PyList_New(0);
+ if (!list)
+ goto error_dir;
+
+ do {
+ count = LISTDIR_BATCH_SIZE;
+ err = getdirentriesattr(dfd, &requested_attr, &attrbuf,
+ sizeof(attrbuf), &count, &basep_unused,
+ &new_state, 0);
+ if (err < 0) {
+ if (errno == ENOTSUP) {
+ /* We're on a filesystem that doesn't support
+ getdirentriesattr. Fall back to the
+ stat-based implementation. */
+ *fallback = true;
+ } else
+ PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ goto error;
+ }
+
+ if (!state_seen) {
+ old_state = new_state;
+ state_seen = true;
+ } else if (old_state != new_state) {
+ /* There's an edge case with getdirentriesattr. Consider
+ the following initial list of files:
+
+ a
+ b
+ <--
+ c
+ d
+
+ If the iteration is paused at the arrow, and b is
+ deleted before it is resumed, getdirentriesattr will
+ not return d at all! Ordinarily we're expected to
+ restart the iteration from the beginning. To avoid
+ getting stuck in a retry loop here, fall back to
+ stat. */
+ *fallback = true;
+ goto error;
+ }
+
+ entry = (attrbuf_entry *)attrbuf;
+
+ for (index = 0; index < count; index++) {
+ char *filename = ((char *)&entry->name) +
+ entry->name.attr_dataoffset;
+
+ if (!strcmp(filename, ".") || !strcmp(filename, ".."))
+ continue;
+
+ kind = attrkind(entry);
+ if (kind == -1) {
+ PyErr_Format(PyExc_OSError,
+ "unknown object type %u for file "
+ "%s%s!",
+ entry->obj_type, path, filename);
+ goto error;
+ }
+
+ /* quit early? */
+ if (skip && kind == S_IFDIR && !strcmp(filename, skip)) {
+ ret = PyList_New(0);
+ goto error;
+ }
+
+ if (keepstat) {
+ /* from the getattrlist(2) man page: "Only the
+ permission bits ... are valid". */
+ st.st_mode = (entry->access_mask & ~S_IFMT) | kind;
+ st.st_mtime = entry->mtime.tv_sec;
+ st.st_size = entry->size;
+ stat = makestat(&st);
+ if (!stat)
+ goto error;
+ elem = Py_BuildValue("siN", filename, kind, stat);
+ } else
+ elem = Py_BuildValue("si", filename, kind);
+ if (!elem)
+ goto error;
+ stat = NULL;
+
+ PyList_Append(list, elem);
+ Py_DECREF(elem);
+
+ entry = (attrbuf_entry *)((char *)entry + entry->length);
+ }
+ } while (err == 0);
+
+ ret = list;
+ Py_INCREF(ret);
+
+error:
+ Py_DECREF(list);
+ Py_XDECREF(stat);
+error_dir:
+ close(dfd);
+error_value:
+ return ret;
+}
+
+#endif /* __APPLE__ */
+
+static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
+{
+#ifdef __APPLE__
+ PyObject *ret;
+ bool fallback = false;
+
+ ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback);
+ if (ret != NULL || !fallback)
+ return ret;
+#endif
+ return _listdir_stat(path, pathlen, keepstat, skip);
+}
+
+static PyObject *statfiles(PyObject *self, PyObject *args)
+{
+ PyObject *names, *stats;
+ Py_ssize_t i, count;
+
+ if (!PyArg_ParseTuple(args, "O:statfiles", &names))
+ return NULL;
+
+ count = PySequence_Length(names);
+ if (count == -1) {
+ PyErr_SetString(PyExc_TypeError, "not a sequence");
+ return NULL;
+ }
+
+ stats = PyList_New(count);
+ if (stats == NULL)
+ return NULL;
+
+ for (i = 0; i < count; i++) {
+ PyObject *stat, *pypath;
+ struct stat st;
+ int ret, kind;
+ char *path;
+
+ /* With a large file count or on a slow filesystem,
+ don't block signals for long (issue4878). */
+ if ((i % 1000) == 999 && PyErr_CheckSignals() == -1)
+ goto bail;
+
+ pypath = PySequence_GetItem(names, i);
+ if (!pypath)
+ goto bail;
+ path = PyBytes_AsString(pypath);
+ if (path == NULL) {
+ Py_DECREF(pypath);
+ PyErr_SetString(PyExc_TypeError, "not a string");
+ goto bail;
+ }
+ ret = lstat(path, &st);
+ Py_DECREF(pypath);
+ kind = st.st_mode & S_IFMT;
+ if (ret != -1 && (kind == S_IFREG || kind == S_IFLNK)) {
+ stat = makestat(&st);
+ if (stat == NULL)
+ goto bail;
+ PyList_SET_ITEM(stats, i, stat);
+ } else {
+ Py_INCREF(Py_None);
+ PyList_SET_ITEM(stats, i, Py_None);
+ }
+ }
+
+ return stats;
+
+bail:
+ Py_DECREF(stats);
+ return NULL;
+}
+
+/*
+ * recvfds() simply does not release GIL during blocking io operation because
+ * command server is known to be single-threaded.
+ *
+ * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
+ * Currently, recvfds() is not supported on these platforms.
+ */
+#ifdef CMSG_LEN
+
+static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
+{
+ char dummy[1];
+ struct iovec iov = {dummy, sizeof(dummy)};
+ struct msghdr msgh = {0};
+ struct cmsghdr *cmsg;
+
+ msgh.msg_iov = &iov;
+ msgh.msg_iovlen = 1;
+ msgh.msg_control = cbuf;
+ msgh.msg_controllen = (socklen_t)cbufsize;
+ if (recvmsg(sockfd, &msgh, 0) < 0)
+ return -1;
+
+ for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
+ cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
+ if (cmsg->cmsg_level != SOL_SOCKET ||
+ cmsg->cmsg_type != SCM_RIGHTS)
+ continue;
+ *rfds = (int *)CMSG_DATA(cmsg);
+ return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
+ }
+
+ *rfds = cbuf;
+ return 0;
+}
+
+static PyObject *recvfds(PyObject *self, PyObject *args)
+{
+ int sockfd;
+ int *rfds = NULL;
+ ssize_t rfdscount, i;
+ char cbuf[256];
+ PyObject *rfdslist = NULL;
+
+ if (!PyArg_ParseTuple(args, "i", &sockfd))
+ return NULL;
+
+ rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
+ if (rfdscount < 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+
+ rfdslist = PyList_New(rfdscount);
+ if (!rfdslist)
+ goto bail;
+ for (i = 0; i < rfdscount; i++) {
+ PyObject *obj = PyLong_FromLong(rfds[i]);
+ if (!obj)
+ goto bail;
+ PyList_SET_ITEM(rfdslist, i, obj);
+ }
+ return rfdslist;
+
+bail:
+ Py_XDECREF(rfdslist);
+ return NULL;
+}
+
+#endif /* CMSG_LEN */
+
+#if defined(HAVE_SETPROCTITLE)
+/* setproctitle is the first choice - available in FreeBSD */
+#define SETPROCNAME_USE_SETPROCTITLE
+#elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
+/* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
+ * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
+#define SETPROCNAME_USE_ARGVREWRITE
+#else
+#define SETPROCNAME_USE_NONE
+#endif
+
+#ifndef SETPROCNAME_USE_NONE
+static PyObject *setprocname(PyObject *self, PyObject *args)
+{
+ const char *name = NULL;
+ if (!PyArg_ParseTuple(args, "s", &name))
+ return NULL;
+
+#if defined(SETPROCNAME_USE_SETPROCTITLE)
+ setproctitle("%s", name);
+#elif defined(SETPROCNAME_USE_ARGVREWRITE)
+ {
+ static char *argvstart = NULL;
+ static size_t argvsize = 0;
+ if (argvstart == NULL) {
+ int argc = 0, i;
+ char **argv = NULL;
+ char *argvend;
+ extern void Py_GetArgcArgv(int *argc, char ***argv);
+ Py_GetArgcArgv(&argc, &argv);
+
+ /* Check the memory we can use. Typically, argv[i] and
+ * argv[i + 1] are continuous. */
+ argvend = argvstart = argv[0];
+ for (i = 0; i < argc; ++i) {
+ if (argv[i] > argvend || argv[i] < argvstart)
+ break; /* not continuous */
+ size_t len = strlen(argv[i]);
+ argvend = argv[i] + len + 1 /* '\0' */;
+ }
+ if (argvend > argvstart) /* sanity check */
+ argvsize = argvend - argvstart;
+ }
+
+ if (argvstart && argvsize > 1) {
+ int n = snprintf(argvstart, argvsize, "%s", name);
+ if (n >= 0 && (size_t)n < argvsize)
+ memset(argvstart + n, 0, argvsize - n);
+ }
+ }
+#endif
+
+ Py_RETURN_NONE;
+}
+#endif /* ndef SETPROCNAME_USE_NONE */
+
+#if defined(HAVE_BSD_STATFS)
+static const char *describefstype(const struct statfs *pbuf)
+{
+ /* BSD or OSX provides a f_fstypename field */
+ return pbuf->f_fstypename;
+}
+#elif defined(HAVE_LINUX_STATFS)
+static const char *describefstype(const struct statfs *pbuf)
+{
+ /* Begin of Linux filesystems */
+#ifdef ADFS_SUPER_MAGIC
+ if (pbuf->f_type == ADFS_SUPER_MAGIC)
+ return "adfs";
+#endif
+#ifdef AFFS_SUPER_MAGIC
+ if (pbuf->f_type == AFFS_SUPER_MAGIC)
+ return "affs";
+#endif
+#ifdef AUTOFS_SUPER_MAGIC
+ if (pbuf->f_type == AUTOFS_SUPER_MAGIC)
+ return "autofs";
+#endif
+#ifdef BDEVFS_MAGIC
+ if (pbuf->f_type == BDEVFS_MAGIC)
+ return "bdevfs";
+#endif
+#ifdef BEFS_SUPER_MAGIC
+ if (pbuf->f_type == BEFS_SUPER_MAGIC)
+ return "befs";
+#endif
+#ifdef BFS_MAGIC
+ if (pbuf->f_type == BFS_MAGIC)
+ return "bfs";
+#endif
+#ifdef BINFMTFS_MAGIC
+ if (pbuf->f_type == BINFMTFS_MAGIC)
+ return "binfmtfs";
+#endif
+#ifdef BTRFS_SUPER_MAGIC
+ if (pbuf->f_type == BTRFS_SUPER_MAGIC)
+ return "btrfs";
+#endif
+#ifdef CGROUP_SUPER_MAGIC
+ if (pbuf->f_type == CGROUP_SUPER_MAGIC)
+ return "cgroup";
+#endif
+#ifdef CIFS_MAGIC_NUMBER
+ if (pbuf->f_type == CIFS_MAGIC_NUMBER)
+ return "cifs";
+#endif
+#ifdef CODA_SUPER_MAGIC
+ if (pbuf->f_type == CODA_SUPER_MAGIC)
+ return "coda";
+#endif
+#ifdef COH_SUPER_MAGIC
+ if (pbuf->f_type == COH_SUPER_MAGIC)
+ return "coh";
+#endif
+#ifdef CRAMFS_MAGIC
+ if (pbuf->f_type == CRAMFS_MAGIC)
+ return "cramfs";
+#endif
+#ifdef DEBUGFS_MAGIC
+ if (pbuf->f_type == DEBUGFS_MAGIC)
+ return "debugfs";
+#endif
+#ifdef DEVFS_SUPER_MAGIC
+ if (pbuf->f_type == DEVFS_SUPER_MAGIC)
+ return "devfs";
+#endif
+#ifdef DEVPTS_SUPER_MAGIC
+ if (pbuf->f_type == DEVPTS_SUPER_MAGIC)
+ return "devpts";
+#endif
+#ifdef EFIVARFS_MAGIC
+ if (pbuf->f_type == EFIVARFS_MAGIC)
+ return "efivarfs";
+#endif
+#ifdef EFS_SUPER_MAGIC
+ if (pbuf->f_type == EFS_SUPER_MAGIC)
+ return "efs";
+#endif
+#ifdef EXT_SUPER_MAGIC
+ if (pbuf->f_type == EXT_SUPER_MAGIC)
+ return "ext";
+#endif
+#ifdef EXT2_OLD_SUPER_MAGIC
+ if (pbuf->f_type == EXT2_OLD_SUPER_MAGIC)
+ return "ext2";
+#endif
+#ifdef EXT2_SUPER_MAGIC
+ if (pbuf->f_type == EXT2_SUPER_MAGIC)
+ return "ext2";
+#endif
+#ifdef EXT3_SUPER_MAGIC
+ if (pbuf->f_type == EXT3_SUPER_MAGIC)
+ return "ext3";
+#endif
+#ifdef EXT4_SUPER_MAGIC
+ if (pbuf->f_type == EXT4_SUPER_MAGIC)
+ return "ext4";
+#endif
+#ifdef F2FS_SUPER_MAGIC
+ if (pbuf->f_type == F2FS_SUPER_MAGIC)
+ return "f2fs";
+#endif
+#ifdef FUSE_SUPER_MAGIC
+ if (pbuf->f_type == FUSE_SUPER_MAGIC)
+ return "fuse";
+#endif
+#ifdef FUTEXFS_SUPER_MAGIC
+ if (pbuf->f_type == FUTEXFS_SUPER_MAGIC)
+ return "futexfs";
+#endif
+#ifdef HFS_SUPER_MAGIC
+ if (pbuf->f_type == HFS_SUPER_MAGIC)
+ return "hfs";
+#endif
+#ifdef HOSTFS_SUPER_MAGIC
+ if (pbuf->f_type == HOSTFS_SUPER_MAGIC)
+ return "hostfs";
+#endif
+#ifdef HPFS_SUPER_MAGIC
+ if (pbuf->f_type == HPFS_SUPER_MAGIC)
+ return "hpfs";
+#endif
+#ifdef HUGETLBFS_MAGIC
+ if (pbuf->f_type == HUGETLBFS_MAGIC)
+ return "hugetlbfs";
+#endif
+#ifdef ISOFS_SUPER_MAGIC
+ if (pbuf->f_type == ISOFS_SUPER_MAGIC)
+ return "isofs";
+#endif
+#ifdef JFFS2_SUPER_MAGIC
+ if (pbuf->f_type == JFFS2_SUPER_MAGIC)
+ return "jffs2";
+#endif
+#ifdef JFS_SUPER_MAGIC
+ if (pbuf->f_type == JFS_SUPER_MAGIC)
+ return "jfs";
+#endif
+#ifdef MINIX_SUPER_MAGIC
+ if (pbuf->f_type == MINIX_SUPER_MAGIC)
+ return "minix";
+#endif
+#ifdef MINIX2_SUPER_MAGIC
+ if (pbuf->f_type == MINIX2_SUPER_MAGIC)
+ return "minix2";
+#endif
+#ifdef MINIX3_SUPER_MAGIC
+ if (pbuf->f_type == MINIX3_SUPER_MAGIC)
+ return "minix3";
+#endif
+#ifdef MQUEUE_MAGIC
+ if (pbuf->f_type == MQUEUE_MAGIC)
+ return "mqueue";
+#endif
+#ifdef MSDOS_SUPER_MAGIC
+ if (pbuf->f_type == MSDOS_SUPER_MAGIC)
+ return "msdos";
+#endif
+#ifdef NCP_SUPER_MAGIC
+ if (pbuf->f_type == NCP_SUPER_MAGIC)
+ return "ncp";
+#endif
+#ifdef NFS_SUPER_MAGIC
+ if (pbuf->f_type == NFS_SUPER_MAGIC)
+ return "nfs";
+#endif
+#ifdef NILFS_SUPER_MAGIC
+ if (pbuf->f_type == NILFS_SUPER_MAGIC)
+ return "nilfs";
+#endif
+#ifdef NTFS_SB_MAGIC
+ if (pbuf->f_type == NTFS_SB_MAGIC)
+ return "ntfs-sb";
+#endif
+#ifdef OCFS2_SUPER_MAGIC
+ if (pbuf->f_type == OCFS2_SUPER_MAGIC)
+ return "ocfs2";
+#endif
+#ifdef OPENPROM_SUPER_MAGIC
+ if (pbuf->f_type == OPENPROM_SUPER_MAGIC)
+ return "openprom";
+#endif
+#ifdef OVERLAYFS_SUPER_MAGIC
+ if (pbuf->f_type == OVERLAYFS_SUPER_MAGIC)
+ return "overlay";
+#endif
+#ifdef PIPEFS_MAGIC
+ if (pbuf->f_type == PIPEFS_MAGIC)
+ return "pipefs";
+#endif
+#ifdef PROC_SUPER_MAGIC
+ if (pbuf->f_type == PROC_SUPER_MAGIC)
+ return "proc";
+#endif
+#ifdef PSTOREFS_MAGIC
+ if (pbuf->f_type == PSTOREFS_MAGIC)
+ return "pstorefs";
+#endif
+#ifdef QNX4_SUPER_MAGIC
+ if (pbuf->f_type == QNX4_SUPER_MAGIC)
+ return "qnx4";
+#endif
+#ifdef QNX6_SUPER_MAGIC
+ if (pbuf->f_type == QNX6_SUPER_MAGIC)
+ return "qnx6";
+#endif
+#ifdef RAMFS_MAGIC
+ if (pbuf->f_type == RAMFS_MAGIC)
+ return "ramfs";
+#endif
+#ifdef REISERFS_SUPER_MAGIC
+ if (pbuf->f_type == REISERFS_SUPER_MAGIC)
+ return "reiserfs";
+#endif
+#ifdef ROMFS_MAGIC
+ if (pbuf->f_type == ROMFS_MAGIC)
+ return "romfs";
+#endif
+#ifdef SECURITYFS_MAGIC
+ if (pbuf->f_type == SECURITYFS_MAGIC)
+ return "securityfs";
+#endif
+#ifdef SELINUX_MAGIC
+ if (pbuf->f_type == SELINUX_MAGIC)
+ return "selinux";
+#endif
+#ifdef SMACK_MAGIC
+ if (pbuf->f_type == SMACK_MAGIC)
+ return "smack";
+#endif
+#ifdef SMB_SUPER_MAGIC
+ if (pbuf->f_type == SMB_SUPER_MAGIC)
+ return "smb";
+#endif
+#ifdef SOCKFS_MAGIC
+ if (pbuf->f_type == SOCKFS_MAGIC)
+ return "sockfs";
+#endif
+#ifdef SQUASHFS_MAGIC
+ if (pbuf->f_type == SQUASHFS_MAGIC)
+ return "squashfs";
+#endif
+#ifdef SYSFS_MAGIC
+ if (pbuf->f_type == SYSFS_MAGIC)
+ return "sysfs";
+#endif
+#ifdef SYSV2_SUPER_MAGIC
+ if (pbuf->f_type == SYSV2_SUPER_MAGIC)
+ return "sysv2";
+#endif
+#ifdef SYSV4_SUPER_MAGIC
+ if (pbuf->f_type == SYSV4_SUPER_MAGIC)
+ return "sysv4";
+#endif
+#ifdef TMPFS_MAGIC
+ if (pbuf->f_type == TMPFS_MAGIC)
+ return "tmpfs";
+#endif
+#ifdef UDF_SUPER_MAGIC
+ if (pbuf->f_type == UDF_SUPER_MAGIC)
+ return "udf";
+#endif
+#ifdef UFS_MAGIC
+ if (pbuf->f_type == UFS_MAGIC)
+ return "ufs";
+#endif
+#ifdef USBDEVICE_SUPER_MAGIC
+ if (pbuf->f_type == USBDEVICE_SUPER_MAGIC)
+ return "usbdevice";
+#endif
+#ifdef V9FS_MAGIC
+ if (pbuf->f_type == V9FS_MAGIC)
+ return "v9fs";
+#endif
+#ifdef VXFS_SUPER_MAGIC
+ if (pbuf->f_type == VXFS_SUPER_MAGIC)
+ return "vxfs";
+#endif
+#ifdef XENFS_SUPER_MAGIC
+ if (pbuf->f_type == XENFS_SUPER_MAGIC)
+ return "xenfs";
+#endif
+#ifdef XENIX_SUPER_MAGIC
+ if (pbuf->f_type == XENIX_SUPER_MAGIC)
+ return "xenix";
+#endif
+#ifdef XFS_SUPER_MAGIC
+ if (pbuf->f_type == XFS_SUPER_MAGIC)
+ return "xfs";
+#endif
+ /* End of Linux filesystems */
+ return NULL;
+}
+#endif /* def HAVE_LINUX_STATFS */
+
+#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
+/* given a directory path, return filesystem type name (best-effort) */
+static PyObject *getfstype(PyObject *self, PyObject *args)
+{
+ const char *path = NULL;
+ struct statfs buf;
+ int r;
+ if (!PyArg_ParseTuple(args, "s", &path))
+ return NULL;
+
+ memset(&buf, 0, sizeof(buf));
+ r = statfs(path, &buf);
+ if (r != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("s", describefstype(&buf));
+}
+#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
+
+#endif /* ndef _WIN32 */
+
+static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ PyObject *statobj = NULL; /* initialize - optional arg */
+ PyObject *skipobj = NULL; /* initialize - optional arg */
+ char *path, *skip = NULL;
+ int wantstat, plen;
+
+ static char *kwlist[] = {"path", "stat", "skip", NULL};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|OO:listdir",
+ kwlist, &path, &plen, &statobj, &skipobj))
+ return NULL;
+
+ wantstat = statobj && PyObject_IsTrue(statobj);
+
+ if (skipobj && skipobj != Py_None) {
+ skip = PyBytes_AsString(skipobj);
+ if (!skip)
+ return NULL;
+ }
+
+ return _listdir(path, plen, wantstat, skip);
+}
+
+#ifdef _WIN32
+static PyObject *posixfile(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kwlist[] = {"name", "mode", "buffering", NULL};
+ PyObject *file_obj = NULL;
+ char *name = NULL;
+ char *mode = "rb";
+ DWORD access = 0;
+ DWORD creation;
+ HANDLE handle;
+ int fd, flags = 0;
+ int bufsize = -1;
+ char m0, m1, m2;
+ char fpmode[4];
+ int fppos = 0;
+ int plus;
+ FILE *fp;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:posixfile", kwlist,
+ Py_FileSystemDefaultEncoding,
+ &name, &mode, &bufsize))
+ return NULL;
+
+ m0 = mode[0];
+ m1 = m0 ? mode[1] : '\0';
+ m2 = m1 ? mode[2] : '\0';
+ plus = m1 == '+' || m2 == '+';
+
+ fpmode[fppos++] = m0;
+ if (m1 == 'b' || m2 == 'b') {
+ flags = _O_BINARY;
+ fpmode[fppos++] = 'b';
+ }
+ else
+ flags = _O_TEXT;
+ if (m0 == 'r' && !plus) {
+ flags |= _O_RDONLY;
+ access = GENERIC_READ;
+ } else {
+ /*
+ work around http://support.microsoft.com/kb/899149 and
+ set _O_RDWR for 'w' and 'a', even if mode has no '+'
+ */
+ flags |= _O_RDWR;
+ access = GENERIC_READ | GENERIC_WRITE;
+ fpmode[fppos++] = '+';
+ }
+ fpmode[fppos++] = '\0';
+
+ switch (m0) {
+ case 'r':
+ creation = OPEN_EXISTING;
+ break;
+ case 'w':
+ creation = CREATE_ALWAYS;
+ break;
+ case 'a':
+ creation = OPEN_ALWAYS;
+ flags |= _O_APPEND;
+ break;
+ default:
+ PyErr_Format(PyExc_ValueError,
+ "mode string must begin with one of 'r', 'w', "
+ "or 'a', not '%c'", m0);
+ goto bail;
+ }
+
+ handle = CreateFile(name, access,
+ FILE_SHARE_READ | FILE_SHARE_WRITE |
+ FILE_SHARE_DELETE,
+ NULL,
+ creation,
+ FILE_ATTRIBUTE_NORMAL,
+ 0);
+
+ if (handle == INVALID_HANDLE_VALUE) {
+ PyErr_SetFromWindowsErrWithFilename(GetLastError(), name);
+ goto bail;
+ }
+
+ fd = _open_osfhandle((intptr_t)handle, flags);
+
+ if (fd == -1) {
+ CloseHandle(handle);
+ PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
+ goto bail;
+ }
+#ifndef IS_PY3K
+ fp = _fdopen(fd, fpmode);
+ if (fp == NULL) {
+ _close(fd);
+ PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
+ goto bail;
+ }
+
+ file_obj = PyFile_FromFile(fp, name, mode, fclose);
+ if (file_obj == NULL) {
+ fclose(fp);
+ goto bail;
+ }
+
+ PyFile_SetBufSize(file_obj, bufsize);
+#else
+ file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
+ if (file_obj == NULL)
+ goto bail;
+#endif
+bail:
+ PyMem_Free(name);
+ return file_obj;
+}
+#endif
+
+#ifdef __APPLE__
+#include <ApplicationServices/ApplicationServices.h>
+
+static PyObject *isgui(PyObject *self)
+{
+ CFDictionaryRef dict = CGSessionCopyCurrentDictionary();
+
+ if (dict != NULL) {
+ CFRelease(dict);
+ Py_RETURN_TRUE;
+ } else {
+ Py_RETURN_FALSE;
+ }
+}
+#endif
+
+static char osutil_doc[] = "Native operating system services.";
+
+static PyMethodDef methods[] = {
+ {"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS,
+ "list a directory\n"},
+#ifdef _WIN32
+ {"posixfile", (PyCFunction)posixfile, METH_VARARGS | METH_KEYWORDS,
+ "Open a file with POSIX-like semantics.\n"
+"On error, this function may raise either a WindowsError or an IOError."},
+#else
+ {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
+ "stat a series of files or symlinks\n"
+"Returns None for non-existent entries and entries of other types.\n"},
+#ifdef CMSG_LEN
+ {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
+ "receive list of file descriptors via socket\n"},
+#endif
+#ifndef SETPROCNAME_USE_NONE
+ {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
+ "set process title (best-effort)\n"},
+#endif
+#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
+ {"getfstype", (PyCFunction)getfstype, METH_VARARGS,
+ "get filesystem type (best-effort)\n"},
+#endif
+#endif /* ndef _WIN32 */
+#ifdef __APPLE__
+ {
+ "isgui", (PyCFunction)isgui, METH_NOARGS,
+ "Is a CoreGraphics session available?"
+ },
+#endif
+ {NULL, NULL}
+};
+
+static const int version = 1;
+
+#ifdef IS_PY3K
+static struct PyModuleDef osutil_module = {
+ PyModuleDef_HEAD_INIT,
+ "osutil",
+ osutil_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_osutil(void)
+{
+ PyObject *m;
+ if (PyType_Ready(&listdir_stat_type) < 0)
+ return NULL;
+
+ m = PyModule_Create(&osutil_module);
+ PyModule_AddIntConstant(m, "version", version);
+ return m;
+}
+#else
+PyMODINIT_FUNC initosutil(void)
+{
+ PyObject *m;
+ if (PyType_Ready(&listdir_stat_type) == -1)
+ return;
+
+ m = Py_InitModule3("osutil", methods, osutil_doc);
+ PyModule_AddIntConstant(m, "version", version);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/parsers.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,1009 @@
+/*
+ parsers.c - efficient content parsing
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <ctype.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+
+#ifdef IS_PY3K
+/* The mapping of Python types is meant to be temporary to get Python
+ * 3 to compile. We should remove this once Python 3 support is fully
+ * supported and proper types are used in the extensions themselves. */
+#define PyInt_Type PyLong_Type
+#define PyInt_Check PyLong_Check
+#define PyInt_FromLong PyLong_FromLong
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_AS_LONG PyLong_AS_LONG
+#define PyInt_AsLong PyLong_AsLong
+#endif
+
+static const char *const versionerrortext = "Python minor version mismatch";
+
+static const char lowertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40',
+ '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
+ '\x78', '\x79', '\x7a', /* X-Z */
+ '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
+ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
+ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
+ '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+static const char uppertable[128] = {
+ '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+ '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+ '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+ '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+ '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+ '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+ '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+ '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+ '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
+ '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+ '\x60',
+ '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
+ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
+ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
+ '\x58', '\x59', '\x5a', /* x-z */
+ '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
+/*
+ * Turn a hex-encoded string into binary.
+ */
+PyObject *unhexlify(const char *str, int len)
+{
+ PyObject *ret;
+ char *d;
+ int i;
+
+ ret = PyBytes_FromStringAndSize(NULL, len / 2);
+
+ if (!ret)
+ return NULL;
+
+ d = PyBytes_AsString(ret);
+
+ for (i = 0; i < len;) {
+ int hi = hexdigit(str, i++);
+ int lo = hexdigit(str, i++);
+ *d++ = (hi << 4) | lo;
+ }
+
+ return ret;
+}
+
+static inline PyObject *_asciitransform(PyObject *str_obj,
+ const char table[128],
+ PyObject *fallback_fn)
+{
+ char *str, *newstr;
+ Py_ssize_t i, len;
+ PyObject *newobj = NULL;
+ PyObject *ret = NULL;
+
+ str = PyBytes_AS_STRING(str_obj);
+ len = PyBytes_GET_SIZE(str_obj);
+
+ newobj = PyBytes_FromStringAndSize(NULL, len);
+ if (!newobj)
+ goto quit;
+
+ newstr = PyBytes_AS_STRING(newobj);
+
+ for (i = 0; i < len; i++) {
+ char c = str[i];
+ if (c & 0x80) {
+ if (fallback_fn != NULL) {
+ ret = PyObject_CallFunctionObjArgs(fallback_fn,
+ str_obj, NULL);
+ } else {
+ PyObject *err = PyUnicodeDecodeError_Create(
+ "ascii", str, len, i, (i + 1),
+ "unexpected code byte");
+ PyErr_SetObject(PyExc_UnicodeDecodeError, err);
+ Py_XDECREF(err);
+ }
+ goto quit;
+ }
+ newstr[i] = table[(unsigned char)c];
+ }
+
+ ret = newobj;
+ Py_INCREF(ret);
+quit:
+ Py_XDECREF(newobj);
+ return ret;
+}
+
+static PyObject *asciilower(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, lowertable, NULL);
+}
+
+static PyObject *asciiupper(PyObject *self, PyObject *args)
+{
+ PyObject *str_obj;
+ if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
+ return NULL;
+ return _asciitransform(str_obj, uppertable, NULL);
+}
+
+static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
+{
+ /* _PyDict_NewPresized expects a minused parameter, but it actually
+ creates a dictionary that's the nearest power of two bigger than the
+ parameter. For example, with the initial minused = 1000, the
+ dictionary created has size 1024. Of course in a lot of cases that
+ can be greater than the maximum load factor Python's dict object
+ expects (= 2/3), so as soon as we cross the threshold we'll resize
+ anyway. So create a dictionary that's at least 3/2 the size. */
+ return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
+}
+
+static PyObject *dict_new_presized(PyObject *self, PyObject *args)
+{
+ Py_ssize_t expected_size;
+
+ if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size))
+ return NULL;
+
+ return _dict_new_presized(expected_size);
+}
+
+static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
+{
+ PyObject *dmap, *spec_obj, *normcase_fallback;
+ PyObject *file_foldmap = NULL;
+ enum normcase_spec spec;
+ PyObject *k, *v;
+ dirstateTupleObject *tuple;
+ Py_ssize_t pos = 0;
+ const char *table;
+
+ if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
+ &PyDict_Type, &dmap,
+ &PyInt_Type, &spec_obj,
+ &PyFunction_Type, &normcase_fallback))
+ goto quit;
+
+ spec = (int)PyInt_AS_LONG(spec_obj);
+ switch (spec) {
+ case NORMCASE_LOWER:
+ table = lowertable;
+ break;
+ case NORMCASE_UPPER:
+ table = uppertable;
+ break;
+ case NORMCASE_OTHER:
+ table = NULL;
+ break;
+ default:
+ PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
+ goto quit;
+ }
+
+ /* Add some more entries to deal with additions outside this
+ function. */
+ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
+ if (file_foldmap == NULL)
+ goto quit;
+
+ while (PyDict_Next(dmap, &pos, &k, &v)) {
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto quit;
+ }
+
+ tuple = (dirstateTupleObject *)v;
+ if (tuple->state != 'r') {
+ PyObject *normed;
+ if (table != NULL) {
+ normed = _asciitransform(k, table,
+ normcase_fallback);
+ } else {
+ normed = PyObject_CallFunctionObjArgs(
+ normcase_fallback, k, NULL);
+ }
+
+ if (normed == NULL)
+ goto quit;
+ if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
+ Py_DECREF(normed);
+ goto quit;
+ }
+ Py_DECREF(normed);
+ }
+ }
+ return file_foldmap;
+quit:
+ Py_XDECREF(file_foldmap);
+ return NULL;
+}
+
+/*
+ * This code assumes that a manifest is stitched together with newline
+ * ('\n') characters.
+ */
+static PyObject *parse_manifest(PyObject *self, PyObject *args)
+{
+ PyObject *mfdict, *fdict;
+ char *str, *start, *end;
+ int len;
+
+ if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest",
+ &PyDict_Type, &mfdict,
+ &PyDict_Type, &fdict,
+ &str, &len))
+ goto quit;
+
+ start = str;
+ end = str + len;
+ while (start < end) {
+ PyObject *file = NULL, *node = NULL;
+ PyObject *flags = NULL;
+ char *zero = NULL, *newline = NULL;
+ ptrdiff_t nlen;
+
+ zero = memchr(start, '\0', end - start);
+ if (!zero) {
+ PyErr_SetString(PyExc_ValueError,
+ "manifest entry has no separator");
+ goto quit;
+ }
+
+ newline = memchr(zero + 1, '\n', end - (zero + 1));
+ if (!newline) {
+ PyErr_SetString(PyExc_ValueError,
+ "manifest contains trailing garbage");
+ goto quit;
+ }
+
+ file = PyBytes_FromStringAndSize(start, zero - start);
+
+ if (!file)
+ goto bail;
+
+ nlen = newline - zero - 1;
+
+ node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
+ if (!node)
+ goto bail;
+
+ if (nlen > 40) {
+ flags = PyBytes_FromStringAndSize(zero + 41,
+ nlen - 40);
+ if (!flags)
+ goto bail;
+
+ if (PyDict_SetItem(fdict, file, flags) == -1)
+ goto bail;
+ }
+
+ if (PyDict_SetItem(mfdict, file, node) == -1)
+ goto bail;
+
+ start = newline + 1;
+
+ Py_XDECREF(flags);
+ Py_XDECREF(node);
+ Py_XDECREF(file);
+ continue;
+ bail:
+ Py_XDECREF(flags);
+ Py_XDECREF(node);
+ Py_XDECREF(file);
+ goto quit;
+ }
+
+ Py_INCREF(Py_None);
+ return Py_None;
+quit:
+ return NULL;
+}
+
+static inline dirstateTupleObject *make_dirstate_tuple(char state, int mode,
+ int size, int mtime)
+{
+ dirstateTupleObject *t = PyObject_New(dirstateTupleObject,
+ &dirstateTupleType);
+ if (!t)
+ return NULL;
+ t->state = state;
+ t->mode = mode;
+ t->size = size;
+ t->mtime = mtime;
+ return t;
+}
+
+static PyObject *dirstate_tuple_new(PyTypeObject *subtype, PyObject *args,
+ PyObject *kwds)
+{
+ /* We do all the initialization here and not a tp_init function because
+ * dirstate_tuple is immutable. */
+ dirstateTupleObject *t;
+ char state;
+ int size, mode, mtime;
+ if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime))
+ return NULL;
+
+ t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1);
+ if (!t)
+ return NULL;
+ t->state = state;
+ t->mode = mode;
+ t->size = size;
+ t->mtime = mtime;
+
+ return (PyObject *)t;
+}
+
+static void dirstate_tuple_dealloc(PyObject *o)
+{
+ PyObject_Del(o);
+}
+
+static Py_ssize_t dirstate_tuple_length(PyObject *o)
+{
+ return 4;
+}
+
+static PyObject *dirstate_tuple_item(PyObject *o, Py_ssize_t i)
+{
+ dirstateTupleObject *t = (dirstateTupleObject *)o;
+ switch (i) {
+ case 0:
+ return PyBytes_FromStringAndSize(&t->state, 1);
+ case 1:
+ return PyInt_FromLong(t->mode);
+ case 2:
+ return PyInt_FromLong(t->size);
+ case 3:
+ return PyInt_FromLong(t->mtime);
+ default:
+ PyErr_SetString(PyExc_IndexError, "index out of range");
+ return NULL;
+ }
+}
+
+static PySequenceMethods dirstate_tuple_sq = {
+ dirstate_tuple_length, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ dirstate_tuple_item, /* sq_item */
+ 0, /* sq_ass_item */
+ 0, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ 0 /* sq_inplace_repeat */
+};
+
+PyTypeObject dirstateTupleType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "dirstate_tuple", /* tp_name */
+ sizeof(dirstateTupleObject),/* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)dirstate_tuple_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &dirstate_tuple_sq, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ "dirstate tuple", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ 0, /* tp_init */
+ 0, /* tp_alloc */
+ dirstate_tuple_new, /* tp_new */
+};
+
+static PyObject *parse_dirstate(PyObject *self, PyObject *args)
+{
+ PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
+ PyObject *fname = NULL, *cname = NULL, *entry = NULL;
+ char state, *cur, *str, *cpos;
+ int mode, size, mtime;
+ unsigned int flen, len, pos = 40;
+ int readlen;
+
+ if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
+ &PyDict_Type, &dmap,
+ &PyDict_Type, &cmap,
+ &str, &readlen))
+ goto quit;
+
+ len = readlen;
+
+ /* read parents */
+ if (len < 40) {
+ PyErr_SetString(
+ PyExc_ValueError, "too little data for parents");
+ goto quit;
+ }
+
+ parents = Py_BuildValue("s#s#", str, 20, str + 20, 20);
+ if (!parents)
+ goto quit;
+
+ /* read filenames */
+ while (pos >= 40 && pos < len) {
+ if (pos + 17 > len) {
+ PyErr_SetString(PyExc_ValueError,
+ "overflow in dirstate");
+ goto quit;
+ }
+ cur = str + pos;
+ /* unpack header */
+ state = *cur;
+ mode = getbe32(cur + 1);
+ size = getbe32(cur + 5);
+ mtime = getbe32(cur + 9);
+ flen = getbe32(cur + 13);
+ pos += 17;
+ cur += 17;
+ if (flen > len - pos) {
+ PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
+ goto quit;
+ }
+
+ entry = (PyObject *)make_dirstate_tuple(state, mode, size,
+ mtime);
+ cpos = memchr(cur, 0, flen);
+ if (cpos) {
+ fname = PyBytes_FromStringAndSize(cur, cpos - cur);
+ cname = PyBytes_FromStringAndSize(cpos + 1,
+ flen - (cpos - cur) - 1);
+ if (!fname || !cname ||
+ PyDict_SetItem(cmap, fname, cname) == -1 ||
+ PyDict_SetItem(dmap, fname, entry) == -1)
+ goto quit;
+ Py_DECREF(cname);
+ } else {
+ fname = PyBytes_FromStringAndSize(cur, flen);
+ if (!fname ||
+ PyDict_SetItem(dmap, fname, entry) == -1)
+ goto quit;
+ }
+ Py_DECREF(fname);
+ Py_DECREF(entry);
+ fname = cname = entry = NULL;
+ pos += flen;
+ }
+
+ ret = parents;
+ Py_INCREF(ret);
+quit:
+ Py_XDECREF(fname);
+ Py_XDECREF(cname);
+ Py_XDECREF(entry);
+ Py_XDECREF(parents);
+ return ret;
+}
+
+/*
+ * Build a set of non-normal and other parent entries from the dirstate dmap
+*/
+static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args) {
+ PyObject *dmap, *fname, *v;
+ PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
+ Py_ssize_t pos;
+
+ if (!PyArg_ParseTuple(args, "O!:nonnormalentries",
+ &PyDict_Type, &dmap))
+ goto bail;
+
+ nonnset = PySet_New(NULL);
+ if (nonnset == NULL)
+ goto bail;
+
+ otherpset = PySet_New(NULL);
+ if (otherpset == NULL)
+ goto bail;
+
+ pos = 0;
+ while (PyDict_Next(dmap, &pos, &fname, &v)) {
+ dirstateTupleObject *t;
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto bail;
+ }
+ t = (dirstateTupleObject *)v;
+
+ if (t->state == 'n' && t->size == -2) {
+ if (PySet_Add(otherpset, fname) == -1) {
+ goto bail;
+ }
+ }
+
+ if (t->state == 'n' && t->mtime != -1)
+ continue;
+ if (PySet_Add(nonnset, fname) == -1)
+ goto bail;
+ }
+
+ result = Py_BuildValue("(OO)", nonnset, otherpset);
+ if (result == NULL)
+ goto bail;
+ Py_DECREF(nonnset);
+ Py_DECREF(otherpset);
+ return result;
+bail:
+ Py_XDECREF(nonnset);
+ Py_XDECREF(otherpset);
+ Py_XDECREF(result);
+ return NULL;
+}
+
+/*
+ * Efficiently pack a dirstate object into its on-disk format.
+ */
+static PyObject *pack_dirstate(PyObject *self, PyObject *args)
+{
+ PyObject *packobj = NULL;
+ PyObject *map, *copymap, *pl, *mtime_unset = NULL;
+ Py_ssize_t nbytes, pos, l;
+ PyObject *k, *v = NULL, *pn;
+ char *p, *s;
+ int now;
+
+ if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate",
+ &PyDict_Type, &map, &PyDict_Type, ©map,
+ &pl, &now))
+ return NULL;
+
+ if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) {
+ PyErr_SetString(PyExc_TypeError, "expected 2-element sequence");
+ return NULL;
+ }
+
+ /* Figure out how much we need to allocate. */
+ for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
+ PyObject *c;
+ if (!PyBytes_Check(k)) {
+ PyErr_SetString(PyExc_TypeError, "expected string key");
+ goto bail;
+ }
+ nbytes += PyBytes_GET_SIZE(k) + 17;
+ c = PyDict_GetItem(copymap, k);
+ if (c) {
+ if (!PyBytes_Check(c)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected string key");
+ goto bail;
+ }
+ nbytes += PyBytes_GET_SIZE(c) + 1;
+ }
+ }
+
+ packobj = PyBytes_FromStringAndSize(NULL, nbytes);
+ if (packobj == NULL)
+ goto bail;
+
+ p = PyBytes_AS_STRING(packobj);
+
+ pn = PySequence_ITEM(pl, 0);
+ if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
+ PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
+ goto bail;
+ }
+ memcpy(p, s, l);
+ p += 20;
+ pn = PySequence_ITEM(pl, 1);
+ if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
+ PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
+ goto bail;
+ }
+ memcpy(p, s, l);
+ p += 20;
+
+ for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
+ dirstateTupleObject *tuple;
+ char state;
+ int mode, size, mtime;
+ Py_ssize_t len, l;
+ PyObject *o;
+ char *t;
+
+ if (!dirstate_tuple_check(v)) {
+ PyErr_SetString(PyExc_TypeError,
+ "expected a dirstate tuple");
+ goto bail;
+ }
+ tuple = (dirstateTupleObject *)v;
+
+ state = tuple->state;
+ mode = tuple->mode;
+ size = tuple->size;
+ mtime = tuple->mtime;
+ if (state == 'n' && mtime == now) {
+ /* See pure/parsers.py:pack_dirstate for why we do
+ * this. */
+ mtime = -1;
+ mtime_unset = (PyObject *)make_dirstate_tuple(
+ state, mode, size, mtime);
+ if (!mtime_unset)
+ goto bail;
+ if (PyDict_SetItem(map, k, mtime_unset) == -1)
+ goto bail;
+ Py_DECREF(mtime_unset);
+ mtime_unset = NULL;
+ }
+ *p++ = state;
+ putbe32((uint32_t)mode, p);
+ putbe32((uint32_t)size, p + 4);
+ putbe32((uint32_t)mtime, p + 8);
+ t = p + 12;
+ p += 16;
+ len = PyBytes_GET_SIZE(k);
+ memcpy(p, PyBytes_AS_STRING(k), len);
+ p += len;
+ o = PyDict_GetItem(copymap, k);
+ if (o) {
+ *p++ = '\0';
+ l = PyBytes_GET_SIZE(o);
+ memcpy(p, PyBytes_AS_STRING(o), l);
+ p += l;
+ len += l + 1;
+ }
+ putbe32((uint32_t)len, t);
+ }
+
+ pos = p - PyBytes_AS_STRING(packobj);
+ if (pos != nbytes) {
+ PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
+ (long)pos, (long)nbytes);
+ goto bail;
+ }
+
+ return packobj;
+bail:
+ Py_XDECREF(mtime_unset);
+ Py_XDECREF(packobj);
+ Py_XDECREF(v);
+ return NULL;
+}
+
+#define BUMPED_FIX 1
+#define USING_SHA_256 2
+#define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
+
+static PyObject *readshas(
+ const char *source, unsigned char num, Py_ssize_t hashwidth)
+{
+ int i;
+ PyObject *list = PyTuple_New(num);
+ if (list == NULL) {
+ return NULL;
+ }
+ for (i = 0; i < num; i++) {
+ PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
+ if (hash == NULL) {
+ Py_DECREF(list);
+ return NULL;
+ }
+ PyTuple_SET_ITEM(list, i, hash);
+ source += hashwidth;
+ }
+ return list;
+}
+
+static PyObject *fm1readmarker(const char *databegin, const char *dataend,
+ uint32_t *msize)
+{
+ const char *data = databegin;
+ const char *meta;
+
+ double mtime;
+ int16_t tz;
+ uint16_t flags;
+ unsigned char nsuccs, nparents, nmetadata;
+ Py_ssize_t hashwidth = 20;
+
+ PyObject *prec = NULL, *parents = NULL, *succs = NULL;
+ PyObject *metadata = NULL, *ret = NULL;
+ int i;
+
+ if (data + FM1_HEADER_SIZE > dataend) {
+ goto overflow;
+ }
+
+ *msize = getbe32(data);
+ data += 4;
+ mtime = getbefloat64(data);
+ data += 8;
+ tz = getbeint16(data);
+ data += 2;
+ flags = getbeuint16(data);
+ data += 2;
+
+ if (flags & USING_SHA_256) {
+ hashwidth = 32;
+ }
+
+ nsuccs = (unsigned char)(*data++);
+ nparents = (unsigned char)(*data++);
+ nmetadata = (unsigned char)(*data++);
+
+ if (databegin + *msize > dataend) {
+ goto overflow;
+ }
+ dataend = databegin + *msize; /* narrow down to marker size */
+
+ if (data + hashwidth > dataend) {
+ goto overflow;
+ }
+ prec = PyBytes_FromStringAndSize(data, hashwidth);
+ data += hashwidth;
+ if (prec == NULL) {
+ goto bail;
+ }
+
+ if (data + nsuccs * hashwidth > dataend) {
+ goto overflow;
+ }
+ succs = readshas(data, nsuccs, hashwidth);
+ if (succs == NULL) {
+ goto bail;
+ }
+ data += nsuccs * hashwidth;
+
+ if (nparents == 1 || nparents == 2) {
+ if (data + nparents * hashwidth > dataend) {
+ goto overflow;
+ }
+ parents = readshas(data, nparents, hashwidth);
+ if (parents == NULL) {
+ goto bail;
+ }
+ data += nparents * hashwidth;
+ } else {
+ parents = Py_None;
+ Py_INCREF(parents);
+ }
+
+ if (data + 2 * nmetadata > dataend) {
+ goto overflow;
+ }
+ meta = data + (2 * nmetadata);
+ metadata = PyTuple_New(nmetadata);
+ if (metadata == NULL) {
+ goto bail;
+ }
+ for (i = 0; i < nmetadata; i++) {
+ PyObject *tmp, *left = NULL, *right = NULL;
+ Py_ssize_t leftsize = (unsigned char)(*data++);
+ Py_ssize_t rightsize = (unsigned char)(*data++);
+ if (meta + leftsize + rightsize > dataend) {
+ goto overflow;
+ }
+ left = PyBytes_FromStringAndSize(meta, leftsize);
+ meta += leftsize;
+ right = PyBytes_FromStringAndSize(meta, rightsize);
+ meta += rightsize;
+ tmp = PyTuple_New(2);
+ if (!left || !right || !tmp) {
+ Py_XDECREF(left);
+ Py_XDECREF(right);
+ Py_XDECREF(tmp);
+ goto bail;
+ }
+ PyTuple_SET_ITEM(tmp, 0, left);
+ PyTuple_SET_ITEM(tmp, 1, right);
+ PyTuple_SET_ITEM(metadata, i, tmp);
+ }
+ ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
+ metadata, mtime, (int)tz * 60, parents);
+ goto bail; /* return successfully */
+
+overflow:
+ PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
+bail:
+ Py_XDECREF(prec);
+ Py_XDECREF(succs);
+ Py_XDECREF(metadata);
+ Py_XDECREF(parents);
+ return ret;
+}
+
+
+static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
+ const char *data, *dataend;
+ int datalen;
+ Py_ssize_t offset, stop;
+ PyObject *markers = NULL;
+
+ if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) {
+ return NULL;
+ }
+ dataend = data + datalen;
+ data += offset;
+ markers = PyList_New(0);
+ if (!markers) {
+ return NULL;
+ }
+ while (offset < stop) {
+ uint32_t msize;
+ int error;
+ PyObject *record = fm1readmarker(data, dataend, &msize);
+ if (!record) {
+ goto bail;
+ }
+ error = PyList_Append(markers, record);
+ Py_DECREF(record);
+ if (error) {
+ goto bail;
+ }
+ data += msize;
+ offset += msize;
+ }
+ return markers;
+bail:
+ Py_DECREF(markers);
+ return NULL;
+}
+
+static char parsers_doc[] = "Efficient content parsing.";
+
+PyObject *encodedir(PyObject *self, PyObject *args);
+PyObject *pathencode(PyObject *self, PyObject *args);
+PyObject *lowerencode(PyObject *self, PyObject *args);
+PyObject *parse_index2(PyObject *self, PyObject *args);
+
+static PyMethodDef methods[] = {
+ {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
+ {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
+ "create a set containing non-normal and other parent entries of given "
+ "dirstate\n"},
+ {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
+ {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
+ {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
+ {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
+ {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
+ {"dict_new_presized", dict_new_presized, METH_VARARGS,
+ "construct a dict with an expected size\n"},
+ {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
+ "make file foldmap\n"},
+ {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
+ {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
+ {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
+ {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
+ "parse v1 obsolete markers\n"},
+ {NULL, NULL}
+};
+
+void dirs_module_init(PyObject *mod);
+void manifest_module_init(PyObject *mod);
+void revlog_module_init(PyObject *mod);
+
+static const int version = 1;
+
+static void module_init(PyObject *mod)
+{
+ PyModule_AddIntConstant(mod, "version", version);
+
+ /* This module constant has two purposes. First, it lets us unit test
+ * the ImportError raised without hard-coding any error text. This
+ * means we can change the text in the future without breaking tests,
+ * even across changesets without a recompile. Second, its presence
+ * can be used to determine whether the version-checking logic is
+ * present, which also helps in testing across changesets without a
+ * recompile. Note that this means the pure-Python version of parsers
+ * should not have this module constant. */
+ PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
+
+ dirs_module_init(mod);
+ manifest_module_init(mod);
+ revlog_module_init(mod);
+
+ if (PyType_Ready(&dirstateTupleType) < 0)
+ return;
+ Py_INCREF(&dirstateTupleType);
+ PyModule_AddObject(mod, "dirstatetuple",
+ (PyObject *)&dirstateTupleType);
+}
+
+static int check_python_version(void)
+{
+ PyObject *sys = PyImport_ImportModule("sys"), *ver;
+ long hexversion;
+ if (!sys)
+ return -1;
+ ver = PyObject_GetAttrString(sys, "hexversion");
+ Py_DECREF(sys);
+ if (!ver)
+ return -1;
+ hexversion = PyInt_AsLong(ver);
+ Py_DECREF(ver);
+ /* sys.hexversion is a 32-bit number by default, so the -1 case
+ * should only occur in unusual circumstances (e.g. if sys.hexversion
+ * is manually set to an invalid value). */
+ if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
+ PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension "
+ "modules were compiled with Python " PY_VERSION ", but "
+ "Mercurial is currently using Python with sys.hexversion=%ld: "
+ "Python %s\n at: %s", versionerrortext, hexversion,
+ Py_GetVersion(), Py_GetProgramFullPath());
+ return -1;
+ }
+ return 0;
+}
+
+#ifdef IS_PY3K
+static struct PyModuleDef parsers_module = {
+ PyModuleDef_HEAD_INIT,
+ "parsers",
+ parsers_doc,
+ -1,
+ methods
+};
+
+PyMODINIT_FUNC PyInit_parsers(void)
+{
+ PyObject *mod;
+
+ if (check_python_version() == -1)
+ return NULL;
+ mod = PyModule_Create(&parsers_module);
+ module_init(mod);
+ return mod;
+}
+#else
+PyMODINIT_FUNC initparsers(void)
+{
+ PyObject *mod;
+
+ if (check_python_version() == -1)
+ return;
+ mod = Py_InitModule3("parsers", methods, parsers_doc);
+ module_init(mod);
+}
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/pathencode.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,765 @@
+/*
+ pathencode.c - efficient path name encoding
+
+ Copyright 2012 Facebook
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+/*
+ * An implementation of the name encoding scheme used by the fncache
+ * store. The common case is of a path < 120 bytes long, which is
+ * handled either in a single pass with no allocations or two passes
+ * with a single allocation. For longer paths, multiple passes are
+ * required.
+ */
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <assert.h>
+#include <ctype.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+
+/* state machine for the fast path */
+enum path_state {
+ START, /* first byte of a path component */
+ A, /* "AUX" */
+ AU,
+ THIRD, /* third of a 3-byte sequence, e.g. "AUX", "NUL" */
+ C, /* "CON" or "COMn" */
+ CO,
+ COMLPT, /* "COM" or "LPT" */
+ COMLPTn,
+ L,
+ LP,
+ N,
+ NU,
+ P, /* "PRN" */
+ PR,
+ LDOT, /* leading '.' */
+ DOT, /* '.' in a non-leading position */
+ H, /* ".h" */
+ HGDI, /* ".hg", ".d", or ".i" */
+ SPACE,
+ DEFAULT /* byte of a path component after the first */
+};
+
+/* state machine for dir-encoding */
+enum dir_state {
+ DDOT,
+ DH,
+ DHGDI,
+ DDEFAULT
+};
+
+static inline int inset(const uint32_t bitset[], char c)
+{
+ return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31));
+}
+
+static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+ char c)
+{
+ if (dest) {
+ assert(*destlen < destsize);
+ dest[*destlen] = c;
+ }
+ (*destlen)++;
+}
+
+static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+ const void *src, Py_ssize_t len)
+{
+ if (dest) {
+ assert(*destlen + len < destsize);
+ memcpy((void *)&dest[*destlen], src, len);
+ }
+ *destlen += len;
+}
+
+static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize,
+ uint8_t c)
+{
+ static const char hexdigit[] = "0123456789abcdef";
+
+ charcopy(dest, destlen, destsize, hexdigit[c >> 4]);
+ charcopy(dest, destlen, destsize, hexdigit[c & 15]);
+}
+
+/* 3-byte escape: tilde followed by two hex digits */
+static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize,
+ char c)
+{
+ charcopy(dest, destlen, destsize, '~');
+ hexencode(dest, destlen, destsize, c);
+}
+
+static Py_ssize_t _encodedir(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ enum dir_state state = DDEFAULT;
+ Py_ssize_t i = 0, destlen = 0;
+
+ while (i < len) {
+ switch (state) {
+ case DDOT:
+ switch (src[i]) {
+ case 'd':
+ case 'i':
+ state = DHGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = DH;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DDEFAULT;
+ break;
+ }
+ break;
+ case DH:
+ if (src[i] == 'g') {
+ state = DHGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DDEFAULT;
+ break;
+ case DHGDI:
+ if (src[i] == '/') {
+ memcopy(dest, &destlen, destsize, ".hg", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ state = DDEFAULT;
+ break;
+ case DDEFAULT:
+ if (src[i] == '.')
+ state = DDOT;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ }
+ }
+
+ return destlen;
+}
+
+PyObject *encodedir(PyObject *self, PyObject *args)
+{
+ Py_ssize_t len, newlen;
+ PyObject *pathobj, *newobj;
+ char *path;
+
+ if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
+ return NULL;
+
+ if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
+ PyErr_SetString(PyExc_TypeError, "expected a string");
+ return NULL;
+ }
+
+ newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1;
+
+ if (newlen == len + 1) {
+ Py_INCREF(pathobj);
+ return pathobj;
+ }
+
+ newobj = PyBytes_FromStringAndSize(NULL, newlen);
+
+ if (newobj) {
+ assert(PyBytes_Check(newobj));
+ Py_SIZE(newobj)--;
+ _encodedir(PyBytes_AS_STRING(newobj), newlen, path,
+ len + 1);
+ }
+
+ return newobj;
+}
+
+static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8],
+ char *dest, Py_ssize_t destlen, size_t destsize,
+ const char *src, Py_ssize_t len,
+ int encodedir)
+{
+ enum path_state state = START;
+ Py_ssize_t i = 0;
+
+ /*
+ * Python strings end with a zero byte, which we use as a
+ * terminal token as they are not valid inside path names.
+ */
+
+ while (i < len) {
+ switch (state) {
+ case START:
+ switch (src[i]) {
+ case '/':
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case '.':
+ state = LDOT;
+ escape3(dest, &destlen, destsize, src[i++]);
+ break;
+ case ' ':
+ state = DEFAULT;
+ escape3(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'a':
+ state = A;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'c':
+ state = C;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'l':
+ state = L;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'n':
+ state = N;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'p':
+ state = P;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ break;
+ }
+ break;
+ case A:
+ if (src[i] == 'u') {
+ state = AU;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case AU:
+ if (src[i] == 'x') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case THIRD:
+ state = DEFAULT;
+ switch (src[i]) {
+ case '.':
+ case '/':
+ case '\0':
+ escape3(dest, &destlen, destsize, src[i - 1]);
+ break;
+ default:
+ i--;
+ break;
+ }
+ break;
+ case C:
+ if (src[i] == 'o') {
+ state = CO;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case CO:
+ if (src[i] == 'm') {
+ state = COMLPT;
+ i++;
+ }
+ else if (src[i] == 'n') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case COMLPT:
+ switch (src[i]) {
+ case '1': case '2': case '3': case '4': case '5':
+ case '6': case '7': case '8': case '9':
+ state = COMLPTn;
+ i++;
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, src[i - 1]);
+ break;
+ }
+ break;
+ case COMLPTn:
+ state = DEFAULT;
+ switch (src[i]) {
+ case '.':
+ case '/':
+ case '\0':
+ escape3(dest, &destlen, destsize, src[i - 2]);
+ charcopy(dest, &destlen, destsize, src[i - 1]);
+ break;
+ default:
+ memcopy(dest, &destlen, destsize,
+ &src[i - 2], 2);
+ break;
+ }
+ break;
+ case L:
+ if (src[i] == 'p') {
+ state = LP;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case LP:
+ if (src[i] == 't') {
+ state = COMLPT;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case N:
+ if (src[i] == 'u') {
+ state = NU;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case NU:
+ if (src[i] == 'l') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case P:
+ if (src[i] == 'r') {
+ state = PR;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case PR:
+ if (src[i] == 'n') {
+ state = THIRD;
+ i++;
+ }
+ else state = DEFAULT;
+ break;
+ case LDOT:
+ switch (src[i]) {
+ case 'd':
+ case 'i':
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = H;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ break;
+ }
+ break;
+ case DOT:
+ switch (src[i]) {
+ case '/':
+ case '\0':
+ state = START;
+ memcopy(dest, &destlen, destsize, "~2e", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'd':
+ case 'i':
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, '.');
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ case 'h':
+ state = H;
+ memcopy(dest, &destlen, destsize, ".h", 2);
+ i++;
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, '.');
+ break;
+ }
+ break;
+ case H:
+ if (src[i] == 'g') {
+ state = HGDI;
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case HGDI:
+ if (src[i] == '/') {
+ state = START;
+ if (encodedir)
+ memcopy(dest, &destlen, destsize, ".hg",
+ 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ }
+ else state = DEFAULT;
+ break;
+ case SPACE:
+ switch (src[i]) {
+ case '/':
+ case '\0':
+ state = START;
+ memcopy(dest, &destlen, destsize, "~20", 3);
+ charcopy(dest, &destlen, destsize, src[i++]);
+ break;
+ default:
+ state = DEFAULT;
+ charcopy(dest, &destlen, destsize, ' ');
+ break;
+ }
+ break;
+ case DEFAULT:
+ while (inset(onebyte, src[i])) {
+ charcopy(dest, &destlen, destsize, src[i++]);
+ if (i == len)
+ goto done;
+ }
+ switch (src[i]) {
+ case '.':
+ state = DOT;
+ i++;
+ break;
+ case ' ':
+ state = SPACE;
+ i++;
+ break;
+ case '/':
+ state = START;
+ charcopy(dest, &destlen, destsize, '/');
+ i++;
+ break;
+ default:
+ if (inset(onebyte, src[i])) {
+ do {
+ charcopy(dest, &destlen,
+ destsize, src[i++]);
+ } while (i < len &&
+ inset(onebyte, src[i]));
+ }
+ else if (inset(twobytes, src[i])) {
+ char c = src[i++];
+ charcopy(dest, &destlen, destsize, '_');
+ charcopy(dest, &destlen, destsize,
+ c == '_' ? '_' : c + 32);
+ }
+ else
+ escape3(dest, &destlen, destsize,
+ src[i++]);
+ break;
+ }
+ break;
+ }
+ }
+done:
+ return destlen;
+}
+
+static Py_ssize_t basicencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe };
+
+ static const uint32_t onebyte[8] = {
+ 1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
+ };
+
+ Py_ssize_t destlen = 0;
+
+ return _encode(twobytes, onebyte, dest, destlen, destsize,
+ src, len, 1);
+}
+
+static const Py_ssize_t maxstorepathlen = 120;
+
+static Py_ssize_t _lowerencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t onebyte[8] = {
+ 1, 0x2bfffbfb, 0xe8000001, 0x2fffffff
+ };
+
+ static const uint32_t lower[8] = { 0, 0, 0x7fffffe };
+
+ Py_ssize_t i, destlen = 0;
+
+ for (i = 0; i < len; i++) {
+ if (inset(onebyte, src[i]))
+ charcopy(dest, &destlen, destsize, src[i]);
+ else if (inset(lower, src[i]))
+ charcopy(dest, &destlen, destsize, src[i] + 32);
+ else
+ escape3(dest, &destlen, destsize, src[i]);
+ }
+
+ return destlen;
+}
+
+PyObject *lowerencode(PyObject *self, PyObject *args)
+{
+ char *path;
+ Py_ssize_t len, newlen;
+ PyObject *ret;
+
+ if (!PyArg_ParseTuple(args, "s#:lowerencode", &path, &len))
+ return NULL;
+
+ newlen = _lowerencode(NULL, 0, path, len);
+ ret = PyBytes_FromStringAndSize(NULL, newlen);
+ if (ret)
+ _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len);
+
+ return ret;
+}
+
+/* See store.py:_auxencode for a description. */
+static Py_ssize_t auxencode(char *dest, size_t destsize,
+ const char *src, Py_ssize_t len)
+{
+ static const uint32_t twobytes[8];
+
+ static const uint32_t onebyte[8] = {
+ ~0U, 0xffff3ffe, ~0U, ~0U, ~0U, ~0U, ~0U, ~0U,
+ };
+
+ return _encode(twobytes, onebyte, dest, 0, destsize, src, len, 0);
+}
+
+static PyObject *hashmangle(const char *src, Py_ssize_t len, const char sha[20])
+{
+ static const Py_ssize_t dirprefixlen = 8;
+ static const Py_ssize_t maxshortdirslen = 68;
+ char *dest;
+ PyObject *ret;
+
+ Py_ssize_t i, d, p, lastslash = len - 1, lastdot = -1;
+ Py_ssize_t destsize, destlen = 0, slop, used;
+
+ while (lastslash >= 0 && src[lastslash] != '/') {
+ if (src[lastslash] == '.' && lastdot == -1)
+ lastdot = lastslash;
+ lastslash--;
+ }
+
+#if 0
+ /* All paths should end in a suffix of ".i" or ".d".
+ Unfortunately, the file names in test-hybridencode.py
+ violate this rule. */
+ if (lastdot != len - 3) {
+ PyErr_SetString(PyExc_ValueError,
+ "suffix missing or wrong length");
+ return NULL;
+ }
+#endif
+
+ /* If src contains a suffix, we will append it to the end of
+ the new string, so make room. */
+ destsize = 120;
+ if (lastdot >= 0)
+ destsize += len - lastdot - 1;
+
+ ret = PyBytes_FromStringAndSize(NULL, destsize);
+ if (ret == NULL)
+ return NULL;
+
+ dest = PyBytes_AS_STRING(ret);
+ memcopy(dest, &destlen, destsize, "dh/", 3);
+
+ /* Copy up to dirprefixlen bytes of each path component, up to
+ a limit of maxshortdirslen bytes. */
+ for (i = d = p = 0; i < lastslash; i++, p++) {
+ if (src[i] == '/') {
+ char d = dest[destlen - 1];
+ /* After truncation, a directory name may end
+ in a space or dot, which are unportable. */
+ if (d == '.' || d == ' ')
+ dest[destlen - 1] = '_';
+ /* The + 3 is to account for "dh/" in the beginning */
+ if (destlen > maxshortdirslen + 3)
+ break;
+ charcopy(dest, &destlen, destsize, src[i]);
+ p = -1;
+ }
+ else if (p < dirprefixlen)
+ charcopy(dest, &destlen, destsize, src[i]);
+ }
+
+ /* Rewind to just before the last slash copied. */
+ if (destlen > maxshortdirslen + 3)
+ do {
+ destlen--;
+ } while (destlen > 0 && dest[destlen] != '/');
+
+ if (destlen > 3) {
+ if (lastslash > 0) {
+ char d = dest[destlen - 1];
+ /* The last directory component may be
+ truncated, so make it safe. */
+ if (d == '.' || d == ' ')
+ dest[destlen - 1] = '_';
+ }
+
+ charcopy(dest, &destlen, destsize, '/');
+ }
+
+ /* Add a prefix of the original file's name. Its length
+ depends on the number of bytes left after accounting for
+ hash and suffix. */
+ used = destlen + 40;
+ if (lastdot >= 0)
+ used += len - lastdot - 1;
+ slop = maxstorepathlen - used;
+ if (slop > 0) {
+ Py_ssize_t basenamelen =
+ lastslash >= 0 ? len - lastslash - 2 : len - 1;
+
+ if (basenamelen > slop)
+ basenamelen = slop;
+ if (basenamelen > 0)
+ memcopy(dest, &destlen, destsize, &src[lastslash + 1],
+ basenamelen);
+ }
+
+ /* Add hash and suffix. */
+ for (i = 0; i < 20; i++)
+ hexencode(dest, &destlen, destsize, sha[i]);
+
+ if (lastdot >= 0)
+ memcopy(dest, &destlen, destsize, &src[lastdot],
+ len - lastdot - 1);
+
+ assert(PyBytes_Check(ret));
+ Py_SIZE(ret) = destlen;
+
+ return ret;
+}
+
+/*
+ * Avoiding a trip through Python would improve performance by 50%,
+ * but we don't encounter enough long names to be worth the code.
+ */
+static int sha1hash(char hash[20], const char *str, Py_ssize_t len)
+{
+ static PyObject *shafunc;
+ PyObject *shaobj, *hashobj;
+
+ if (shafunc == NULL) {
+ PyObject *hashlib, *name = PyBytes_FromString("hashlib");
+
+ if (name == NULL)
+ return -1;
+
+ hashlib = PyImport_Import(name);
+ Py_DECREF(name);
+
+ if (hashlib == NULL) {
+ PyErr_SetString(PyExc_ImportError, "hashlib");
+ return -1;
+ }
+ shafunc = PyObject_GetAttrString(hashlib, "sha1");
+ Py_DECREF(hashlib);
+
+ if (shafunc == NULL) {
+ PyErr_SetString(PyExc_AttributeError,
+ "module 'hashlib' has no "
+ "attribute 'sha1'");
+ return -1;
+ }
+ }
+
+ shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
+
+ if (shaobj == NULL)
+ return -1;
+
+ hashobj = PyObject_CallMethod(shaobj, "digest", "");
+ Py_DECREF(shaobj);
+ if (hashobj == NULL)
+ return -1;
+
+ if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) {
+ PyErr_SetString(PyExc_TypeError,
+ "result of digest is not a 20-byte hash");
+ Py_DECREF(hashobj);
+ return -1;
+ }
+
+ memcpy(hash, PyBytes_AS_STRING(hashobj), 20);
+ Py_DECREF(hashobj);
+ return 0;
+}
+
+#define MAXENCODE 4096 * 4
+
+static PyObject *hashencode(const char *src, Py_ssize_t len)
+{
+ char dired[MAXENCODE];
+ char lowered[MAXENCODE];
+ char auxed[MAXENCODE];
+ Py_ssize_t dirlen, lowerlen, auxlen, baselen;
+ char sha[20];
+
+ baselen = (len - 5) * 3;
+ if (baselen >= MAXENCODE) {
+ PyErr_SetString(PyExc_ValueError, "string too long");
+ return NULL;
+ }
+
+ dirlen = _encodedir(dired, baselen, src, len);
+ if (sha1hash(sha, dired, dirlen - 1) == -1)
+ return NULL;
+ lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5);
+ auxlen = auxencode(auxed, baselen, lowered, lowerlen);
+ return hashmangle(auxed, auxlen, sha);
+}
+
+PyObject *pathencode(PyObject *self, PyObject *args)
+{
+ Py_ssize_t len, newlen;
+ PyObject *pathobj, *newobj;
+ char *path;
+
+ if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
+ return NULL;
+
+ if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
+ PyErr_SetString(PyExc_TypeError, "expected a string");
+ return NULL;
+ }
+
+ if (len > maxstorepathlen)
+ newlen = maxstorepathlen + 2;
+ else
+ newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
+
+ if (newlen <= maxstorepathlen + 1) {
+ if (newlen == len + 1) {
+ Py_INCREF(pathobj);
+ return pathobj;
+ }
+
+ newobj = PyBytes_FromStringAndSize(NULL, newlen);
+
+ if (newobj) {
+ assert(PyBytes_Check(newobj));
+ Py_SIZE(newobj)--;
+ basicencode(PyBytes_AS_STRING(newobj), newlen, path,
+ len + 1);
+ }
+ }
+ else
+ newobj = hashencode(path, len + 1);
+
+ return newobj;
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/revlog.c Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,2089 @@
+/*
+ parsers.c - efficient content parsing
+
+ Copyright 2008 Matt Mackall <mpm@selenic.com> and others
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#include <Python.h>
+#include <assert.h>
+#include <ctype.h>
+#include <stddef.h>
+#include <string.h>
+
+#include "util.h"
+#include "bitmanipulation.h"
+
+#ifdef IS_PY3K
+/* The mapping of Python types is meant to be temporary to get Python
+ * 3 to compile. We should remove this once Python 3 support is fully
+ * supported and proper types are used in the extensions themselves. */
+#define PyInt_Check PyLong_Check
+#define PyInt_FromLong PyLong_FromLong
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_AS_LONG PyLong_AS_LONG
+#define PyInt_AsLong PyLong_AsLong
+#endif
+
+/*
+ * A base-16 trie for fast node->rev mapping.
+ *
+ * Positive value is index of the next node in the trie
+ * Negative value is a leaf: -(rev + 1)
+ * Zero is empty
+ */
+typedef struct {
+ int children[16];
+} nodetree;
+
+/*
+ * This class has two behaviors.
+ *
+ * When used in a list-like way (with integer keys), we decode an
+ * entry in a RevlogNG index file on demand. Our last entry is a
+ * sentinel, always a nullid. We have limited support for
+ * integer-keyed insert and delete, only at elements right before the
+ * sentinel.
+ *
+ * With string keys, we lazily perform a reverse mapping from node to
+ * rev, using a base-16 trie.
+ */
+typedef struct {
+ PyObject_HEAD
+ /* Type-specific fields go here. */
+ PyObject *data; /* raw bytes of index */
+ Py_buffer buf; /* buffer of data */
+ PyObject **cache; /* cached tuples */
+ const char **offsets; /* populated on demand */
+ Py_ssize_t raw_length; /* original number of elements */
+ Py_ssize_t length; /* current number of elements */
+ PyObject *added; /* populated on demand */
+ PyObject *headrevs; /* cache, invalidated on changes */
+ PyObject *filteredrevs;/* filtered revs set */
+ nodetree *nt; /* base-16 trie */
+ unsigned ntlength; /* # nodes in use */
+ unsigned ntcapacity; /* # nodes allocated */
+ int ntdepth; /* maximum depth of tree */
+ int ntsplits; /* # splits performed */
+ int ntrev; /* last rev scanned */
+ int ntlookups; /* # lookups */
+ int ntmisses; /* # lookups that miss the cache */
+ int inlined;
+} indexObject;
+
+static Py_ssize_t index_length(const indexObject *self)
+{
+ if (self->added == NULL)
+ return self->length;
+ return self->length + PyList_GET_SIZE(self->added);
+}
+
+static PyObject *nullentry;
+static const char nullid[20];
+
+static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
+
+#if LONG_MAX == 0x7fffffffL
+static char *tuple_format = "Kiiiiiis#";
+#else
+static char *tuple_format = "kiiiiiis#";
+#endif
+
+/* A RevlogNG v1 index entry is 64 bytes long. */
+static const long v1_hdrsize = 64;
+
+/*
+ * Return a pointer to the beginning of a RevlogNG record.
+ */
+static const char *index_deref(indexObject *self, Py_ssize_t pos)
+{
+ if (self->inlined && pos > 0) {
+ if (self->offsets == NULL) {
+ self->offsets = PyMem_Malloc(self->raw_length *
+ sizeof(*self->offsets));
+ if (self->offsets == NULL)
+ return (const char *)PyErr_NoMemory();
+ inline_scan(self, self->offsets);
+ }
+ return self->offsets[pos];
+ }
+
+ return (const char *)(self->buf.buf) + pos * v1_hdrsize;
+}
+
+static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
+ int *ps, int maxrev)
+{
+ if (rev >= self->length - 1) {
+ PyObject *tuple = PyList_GET_ITEM(self->added,
+ rev - self->length + 1);
+ ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
+ ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
+ } else {
+ const char *data = index_deref(self, rev);
+ ps[0] = getbe32(data + 24);
+ ps[1] = getbe32(data + 28);
+ }
+ /* If index file is corrupted, ps[] may point to invalid revisions. So
+ * there is a risk of buffer overflow to trust them unconditionally. */
+ if (ps[0] > maxrev || ps[1] > maxrev) {
+ PyErr_SetString(PyExc_ValueError, "parent out of range");
+ return -1;
+ }
+ return 0;
+}
+
+
+/*
+ * RevlogNG format (all in big endian, data may be inlined):
+ * 6 bytes: offset
+ * 2 bytes: flags
+ * 4 bytes: compressed length
+ * 4 bytes: uncompressed length
+ * 4 bytes: base revision
+ * 4 bytes: link revision
+ * 4 bytes: parent 1 revision
+ * 4 bytes: parent 2 revision
+ * 32 bytes: nodeid (only 20 bytes used)
+ */
+static PyObject *index_get(indexObject *self, Py_ssize_t pos)
+{
+ uint64_t offset_flags;
+ int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
+ const char *c_node_id;
+ const char *data;
+ Py_ssize_t length = index_length(self);
+ PyObject *entry;
+
+ if (pos < 0)
+ pos += length;
+
+ if (pos < 0 || pos >= length) {
+ PyErr_SetString(PyExc_IndexError, "revlog index out of range");
+ return NULL;
+ }
+
+ if (pos == length - 1) {
+ Py_INCREF(nullentry);
+ return nullentry;
+ }
+
+ if (pos >= self->length - 1) {
+ PyObject *obj;
+ obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
+ Py_INCREF(obj);
+ return obj;
+ }
+
+ if (self->cache) {
+ if (self->cache[pos]) {
+ Py_INCREF(self->cache[pos]);
+ return self->cache[pos];
+ }
+ } else {
+ self->cache = calloc(self->raw_length, sizeof(PyObject *));
+ if (self->cache == NULL)
+ return PyErr_NoMemory();
+ }
+
+ data = index_deref(self, pos);
+ if (data == NULL)
+ return NULL;
+
+ offset_flags = getbe32(data + 4);
+ if (pos == 0) /* mask out version number for the first entry */
+ offset_flags &= 0xFFFF;
+ else {
+ uint32_t offset_high = getbe32(data);
+ offset_flags |= ((uint64_t)offset_high) << 32;
+ }
+
+ comp_len = getbe32(data + 8);
+ uncomp_len = getbe32(data + 12);
+ base_rev = getbe32(data + 16);
+ link_rev = getbe32(data + 20);
+ parent_1 = getbe32(data + 24);
+ parent_2 = getbe32(data + 28);
+ c_node_id = data + 32;
+
+ entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
+ uncomp_len, base_rev, link_rev,
+ parent_1, parent_2, c_node_id, 20);
+
+ if (entry) {
+ PyObject_GC_UnTrack(entry);
+ Py_INCREF(entry);
+ }
+
+ self->cache[pos] = entry;
+
+ return entry;
+}
+
+/*
+ * Return the 20-byte SHA of the node corresponding to the given rev.
+ */
+static const char *index_node(indexObject *self, Py_ssize_t pos)
+{
+ Py_ssize_t length = index_length(self);
+ const char *data;
+
+ if (pos == length - 1 || pos == INT_MAX)
+ return nullid;
+
+ if (pos >= length)
+ return NULL;
+
+ if (pos >= self->length - 1) {
+ PyObject *tuple, *str;
+ tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
+ str = PyTuple_GetItem(tuple, 7);
+ return str ? PyBytes_AS_STRING(str) : NULL;
+ }
+
+ data = index_deref(self, pos);
+ return data ? data + 32 : NULL;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev);
+
+static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
+{
+ if (PyBytes_AsStringAndSize(obj, node, nodelen) == -1)
+ return -1;
+ if (*nodelen == 20)
+ return 0;
+ PyErr_SetString(PyExc_ValueError, "20-byte hash required");
+ return -1;
+}
+
+static PyObject *index_insert(indexObject *self, PyObject *args)
+{
+ PyObject *obj;
+ char *node;
+ int index;
+ Py_ssize_t len, nodelen;
+
+ if (!PyArg_ParseTuple(args, "iO", &index, &obj))
+ return NULL;
+
+ if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
+ PyErr_SetString(PyExc_TypeError, "8-tuple required");
+ return NULL;
+ }
+
+ if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
+ return NULL;
+
+ len = index_length(self);
+
+ if (index < 0)
+ index += len;
+
+ if (index != len - 1) {
+ PyErr_SetString(PyExc_IndexError,
+ "insert only supported at index -1");
+ return NULL;
+ }
+
+ if (self->added == NULL) {
+ self->added = PyList_New(0);
+ if (self->added == NULL)
+ return NULL;
+ }
+
+ if (PyList_Append(self->added, obj) == -1)
+ return NULL;
+
+ if (self->nt)
+ nt_insert(self, node, index);
+
+ Py_CLEAR(self->headrevs);
+ Py_RETURN_NONE;
+}
+
+static void _index_clearcaches(indexObject *self)
+{
+ if (self->cache) {
+ Py_ssize_t i;
+
+ for (i = 0; i < self->raw_length; i++)
+ Py_CLEAR(self->cache[i]);
+ free(self->cache);
+ self->cache = NULL;
+ }
+ if (self->offsets) {
+ PyMem_Free(self->offsets);
+ self->offsets = NULL;
+ }
+ if (self->nt) {
+ free(self->nt);
+ self->nt = NULL;
+ }
+ Py_CLEAR(self->headrevs);
+}
+
+static PyObject *index_clearcaches(indexObject *self)
+{
+ _index_clearcaches(self);
+ self->ntlength = self->ntcapacity = 0;
+ self->ntdepth = self->ntsplits = 0;
+ self->ntrev = -1;
+ self->ntlookups = self->ntmisses = 0;
+ Py_RETURN_NONE;
+}
+
+static PyObject *index_stats(indexObject *self)
+{
+ PyObject *obj = PyDict_New();
+ PyObject *t = NULL;
+
+ if (obj == NULL)
+ return NULL;
+
+#define istat(__n, __d) \
+ do { \
+ t = PyInt_FromSsize_t(self->__n); \
+ if (!t) \
+ goto bail; \
+ if (PyDict_SetItemString(obj, __d, t) == -1) \
+ goto bail; \
+ Py_DECREF(t); \
+ } while (0)
+
+ if (self->added) {
+ Py_ssize_t len = PyList_GET_SIZE(self->added);
+ t = PyInt_FromSsize_t(len);
+ if (!t)
+ goto bail;
+ if (PyDict_SetItemString(obj, "index entries added", t) == -1)
+ goto bail;
+ Py_DECREF(t);
+ }
+
+ if (self->raw_length != self->length - 1)
+ istat(raw_length, "revs on disk");
+ istat(length, "revs in memory");
+ istat(ntcapacity, "node trie capacity");
+ istat(ntdepth, "node trie depth");
+ istat(ntlength, "node trie count");
+ istat(ntlookups, "node trie lookups");
+ istat(ntmisses, "node trie misses");
+ istat(ntrev, "node trie last rev scanned");
+ istat(ntsplits, "node trie splits");
+
+#undef istat
+
+ return obj;
+
+bail:
+ Py_XDECREF(obj);
+ Py_XDECREF(t);
+ return NULL;
+}
+
+/*
+ * When we cache a list, we want to be sure the caller can't mutate
+ * the cached copy.
+ */
+static PyObject *list_copy(PyObject *list)
+{
+ Py_ssize_t len = PyList_GET_SIZE(list);
+ PyObject *newlist = PyList_New(len);
+ Py_ssize_t i;
+
+ if (newlist == NULL)
+ return NULL;
+
+ for (i = 0; i < len; i++) {
+ PyObject *obj = PyList_GET_ITEM(list, i);
+ Py_INCREF(obj);
+ PyList_SET_ITEM(newlist, i, obj);
+ }
+
+ return newlist;
+}
+
+static int check_filter(PyObject *filter, Py_ssize_t arg) {
+ if (filter) {
+ PyObject *arglist, *result;
+ int isfiltered;
+
+ arglist = Py_BuildValue("(n)", arg);
+ if (!arglist) {
+ return -1;
+ }
+
+ result = PyEval_CallObject(filter, arglist);
+ Py_DECREF(arglist);
+ if (!result) {
+ return -1;
+ }
+
+ /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
+ * same as this function, so we can just return it directly.*/
+ isfiltered = PyObject_IsTrue(result);
+ Py_DECREF(result);
+ return isfiltered;
+ } else {
+ return 0;
+ }
+}
+
+static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
+ Py_ssize_t marker, char *phases)
+{
+ PyObject *iter = NULL;
+ PyObject *iter_item = NULL;
+ Py_ssize_t min_idx = index_length(self) + 1;
+ long iter_item_long;
+
+ if (PyList_GET_SIZE(list) != 0) {
+ iter = PyObject_GetIter(list);
+ if (iter == NULL)
+ return -2;
+ while ((iter_item = PyIter_Next(iter)))
+ {
+ iter_item_long = PyInt_AS_LONG(iter_item);
+ Py_DECREF(iter_item);
+ if (iter_item_long < min_idx)
+ min_idx = iter_item_long;
+ phases[iter_item_long] = marker;
+ }
+ Py_DECREF(iter);
+ }
+
+ return min_idx;
+}
+
+static inline void set_phase_from_parents(char *phases, int parent_1,
+ int parent_2, Py_ssize_t i)
+{
+ if (parent_1 >= 0 && phases[parent_1] > phases[i])
+ phases[i] = phases[parent_1];
+ if (parent_2 >= 0 && phases[parent_2] > phases[i])
+ phases[i] = phases[parent_2];
+}
+
+static PyObject *reachableroots2(indexObject *self, PyObject *args)
+{
+
+ /* Input */
+ long minroot;
+ PyObject *includepatharg = NULL;
+ int includepath = 0;
+ /* heads and roots are lists */
+ PyObject *heads = NULL;
+ PyObject *roots = NULL;
+ PyObject *reachable = NULL;
+
+ PyObject *val;
+ Py_ssize_t len = index_length(self) - 1;
+ long revnum;
+ Py_ssize_t k;
+ Py_ssize_t i;
+ Py_ssize_t l;
+ int r;
+ int parents[2];
+
+ /* Internal data structure:
+ * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
+ * revstates: array of length len+1 (all revs + nullrev) */
+ int *tovisit = NULL;
+ long lentovisit = 0;
+ enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
+ char *revstates = NULL;
+
+ /* Get arguments */
+ if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
+ &PyList_Type, &roots,
+ &PyBool_Type, &includepatharg))
+ goto bail;
+
+ if (includepatharg == Py_True)
+ includepath = 1;
+
+ /* Initialize return set */
+ reachable = PyList_New(0);
+ if (reachable == NULL)
+ goto bail;
+
+ /* Initialize internal datastructures */
+ tovisit = (int *)malloc((len + 1) * sizeof(int));
+ if (tovisit == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ revstates = (char *)calloc(len + 1, 1);
+ if (revstates == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ l = PyList_GET_SIZE(roots);
+ for (i = 0; i < l; i++) {
+ revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
+ if (revnum == -1 && PyErr_Occurred())
+ goto bail;
+ /* If root is out of range, e.g. wdir(), it must be unreachable
+ * from heads. So we can just ignore it. */
+ if (revnum + 1 < 0 || revnum + 1 >= len + 1)
+ continue;
+ revstates[revnum + 1] |= RS_ROOT;
+ }
+
+ /* Populate tovisit with all the heads */
+ l = PyList_GET_SIZE(heads);
+ for (i = 0; i < l; i++) {
+ revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
+ if (revnum == -1 && PyErr_Occurred())
+ goto bail;
+ if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
+ PyErr_SetString(PyExc_IndexError, "head out of range");
+ goto bail;
+ }
+ if (!(revstates[revnum + 1] & RS_SEEN)) {
+ tovisit[lentovisit++] = (int)revnum;
+ revstates[revnum + 1] |= RS_SEEN;
+ }
+ }
+
+ /* Visit the tovisit list and find the reachable roots */
+ k = 0;
+ while (k < lentovisit) {
+ /* Add the node to reachable if it is a root*/
+ revnum = tovisit[k++];
+ if (revstates[revnum + 1] & RS_ROOT) {
+ revstates[revnum + 1] |= RS_REACHABLE;
+ val = PyInt_FromLong(revnum);
+ if (val == NULL)
+ goto bail;
+ r = PyList_Append(reachable, val);
+ Py_DECREF(val);
+ if (r < 0)
+ goto bail;
+ if (includepath == 0)
+ continue;
+ }
+
+ /* Add its parents to the list of nodes to visit */
+ if (revnum == -1)
+ continue;
+ r = index_get_parents(self, revnum, parents, (int)len - 1);
+ if (r < 0)
+ goto bail;
+ for (i = 0; i < 2; i++) {
+ if (!(revstates[parents[i] + 1] & RS_SEEN)
+ && parents[i] >= minroot) {
+ tovisit[lentovisit++] = parents[i];
+ revstates[parents[i] + 1] |= RS_SEEN;
+ }
+ }
+ }
+
+ /* Find all the nodes in between the roots we found and the heads
+ * and add them to the reachable set */
+ if (includepath == 1) {
+ long minidx = minroot;
+ if (minidx < 0)
+ minidx = 0;
+ for (i = minidx; i < len; i++) {
+ if (!(revstates[i + 1] & RS_SEEN))
+ continue;
+ r = index_get_parents(self, i, parents, (int)len - 1);
+ /* Corrupted index file, error is set from
+ * index_get_parents */
+ if (r < 0)
+ goto bail;
+ if (((revstates[parents[0] + 1] |
+ revstates[parents[1] + 1]) & RS_REACHABLE)
+ && !(revstates[i + 1] & RS_REACHABLE)) {
+ revstates[i + 1] |= RS_REACHABLE;
+ val = PyInt_FromLong(i);
+ if (val == NULL)
+ goto bail;
+ r = PyList_Append(reachable, val);
+ Py_DECREF(val);
+ if (r < 0)
+ goto bail;
+ }
+ }
+ }
+
+ free(revstates);
+ free(tovisit);
+ return reachable;
+bail:
+ Py_XDECREF(reachable);
+ free(revstates);
+ free(tovisit);
+ return NULL;
+}
+
+static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
+{
+ PyObject *roots = Py_None;
+ PyObject *ret = NULL;
+ PyObject *phaseslist = NULL;
+ PyObject *phaseroots = NULL;
+ PyObject *phaseset = NULL;
+ PyObject *phasessetlist = NULL;
+ PyObject *rev = NULL;
+ Py_ssize_t len = index_length(self) - 1;
+ Py_ssize_t numphase = 0;
+ Py_ssize_t minrevallphases = 0;
+ Py_ssize_t minrevphase = 0;
+ Py_ssize_t i = 0;
+ char *phases = NULL;
+ long phase;
+
+ if (!PyArg_ParseTuple(args, "O", &roots))
+ goto done;
+ if (roots == NULL || !PyList_Check(roots))
+ goto done;
+
+ phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
+ if (phases == NULL) {
+ PyErr_NoMemory();
+ goto done;
+ }
+ /* Put the phase information of all the roots in phases */
+ numphase = PyList_GET_SIZE(roots)+1;
+ minrevallphases = len + 1;
+ phasessetlist = PyList_New(numphase);
+ if (phasessetlist == NULL)
+ goto done;
+
+ PyList_SET_ITEM(phasessetlist, 0, Py_None);
+ Py_INCREF(Py_None);
+
+ for (i = 0; i < numphase-1; i++) {
+ phaseroots = PyList_GET_ITEM(roots, i);
+ phaseset = PySet_New(NULL);
+ if (phaseset == NULL)
+ goto release;
+ PyList_SET_ITEM(phasessetlist, i+1, phaseset);
+ if (!PyList_Check(phaseroots))
+ goto release;
+ minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
+ if (minrevphase == -2) /* Error from add_roots_get_min */
+ goto release;
+ minrevallphases = MIN(minrevallphases, minrevphase);
+ }
+ /* Propagate the phase information from the roots to the revs */
+ if (minrevallphases != -1) {
+ int parents[2];
+ for (i = minrevallphases; i < len; i++) {
+ if (index_get_parents(self, i, parents,
+ (int)len - 1) < 0)
+ goto release;
+ set_phase_from_parents(phases, parents[0], parents[1], i);
+ }
+ }
+ /* Transform phase list to a python list */
+ phaseslist = PyList_New(len);
+ if (phaseslist == NULL)
+ goto release;
+ for (i = 0; i < len; i++) {
+ PyObject *phaseval;
+
+ phase = phases[i];
+ /* We only store the sets of phase for non public phase, the public phase
+ * is computed as a difference */
+ if (phase != 0) {
+ phaseset = PyList_GET_ITEM(phasessetlist, phase);
+ rev = PyInt_FromLong(i);
+ if (rev == NULL)
+ goto release;
+ PySet_Add(phaseset, rev);
+ Py_XDECREF(rev);
+ }
+ phaseval = PyInt_FromLong(phase);
+ if (phaseval == NULL)
+ goto release;
+ PyList_SET_ITEM(phaseslist, i, phaseval);
+ }
+ ret = PyTuple_Pack(2, phaseslist, phasessetlist);
+
+release:
+ Py_XDECREF(phaseslist);
+ Py_XDECREF(phasessetlist);
+done:
+ free(phases);
+ return ret;
+}
+
+static PyObject *index_headrevs(indexObject *self, PyObject *args)
+{
+ Py_ssize_t i, j, len;
+ char *nothead = NULL;
+ PyObject *heads = NULL;
+ PyObject *filter = NULL;
+ PyObject *filteredrevs = Py_None;
+
+ if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
+ return NULL;
+ }
+
+ if (self->headrevs && filteredrevs == self->filteredrevs)
+ return list_copy(self->headrevs);
+
+ Py_DECREF(self->filteredrevs);
+ self->filteredrevs = filteredrevs;
+ Py_INCREF(filteredrevs);
+
+ if (filteredrevs != Py_None) {
+ filter = PyObject_GetAttrString(filteredrevs, "__contains__");
+ if (!filter) {
+ PyErr_SetString(PyExc_TypeError,
+ "filteredrevs has no attribute __contains__");
+ goto bail;
+ }
+ }
+
+ len = index_length(self) - 1;
+ heads = PyList_New(0);
+ if (heads == NULL)
+ goto bail;
+ if (len == 0) {
+ PyObject *nullid = PyInt_FromLong(-1);
+ if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
+ Py_XDECREF(nullid);
+ goto bail;
+ }
+ goto done;
+ }
+
+ nothead = calloc(len, 1);
+ if (nothead == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ for (i = len - 1; i >= 0; i--) {
+ int isfiltered;
+ int parents[2];
+
+ /* If nothead[i] == 1, it means we've seen an unfiltered child of this
+ * node already, and therefore this node is not filtered. So we can skip
+ * the expensive check_filter step.
+ */
+ if (nothead[i] != 1) {
+ isfiltered = check_filter(filter, i);
+ if (isfiltered == -1) {
+ PyErr_SetString(PyExc_TypeError,
+ "unable to check filter");
+ goto bail;
+ }
+
+ if (isfiltered) {
+ nothead[i] = 1;
+ continue;
+ }
+ }
+
+ if (index_get_parents(self, i, parents, (int)len - 1) < 0)
+ goto bail;
+ for (j = 0; j < 2; j++) {
+ if (parents[j] >= 0)
+ nothead[parents[j]] = 1;
+ }
+ }
+
+ for (i = 0; i < len; i++) {
+ PyObject *head;
+
+ if (nothead[i])
+ continue;
+ head = PyInt_FromSsize_t(i);
+ if (head == NULL || PyList_Append(heads, head) == -1) {
+ Py_XDECREF(head);
+ goto bail;
+ }
+ }
+
+done:
+ self->headrevs = heads;
+ Py_XDECREF(filter);
+ free(nothead);
+ return list_copy(self->headrevs);
+bail:
+ Py_XDECREF(filter);
+ Py_XDECREF(heads);
+ free(nothead);
+ return NULL;
+}
+
+/**
+ * Obtain the base revision index entry.
+ *
+ * Callers must ensure that rev >= 0 or illegal memory access may occur.
+ */
+static inline int index_baserev(indexObject *self, int rev)
+{
+ const char *data;
+
+ if (rev >= self->length - 1) {
+ PyObject *tuple = PyList_GET_ITEM(self->added,
+ rev - self->length + 1);
+ return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
+ }
+ else {
+ data = index_deref(self, rev);
+ if (data == NULL) {
+ return -2;
+ }
+
+ return getbe32(data + 16);
+ }
+}
+
+static PyObject *index_deltachain(indexObject *self, PyObject *args)
+{
+ int rev, generaldelta;
+ PyObject *stoparg;
+ int stoprev, iterrev, baserev = -1;
+ int stopped;
+ PyObject *chain = NULL, *result = NULL;
+ const Py_ssize_t length = index_length(self);
+
+ if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
+ return NULL;
+ }
+
+ if (PyInt_Check(stoparg)) {
+ stoprev = (int)PyInt_AsLong(stoparg);
+ if (stoprev == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+ }
+ else if (stoparg == Py_None) {
+ stoprev = -2;
+ }
+ else {
+ PyErr_SetString(PyExc_ValueError,
+ "stoprev must be integer or None");
+ return NULL;
+ }
+
+ if (rev < 0 || rev >= length - 1) {
+ PyErr_SetString(PyExc_ValueError, "revlog index out of range");
+ return NULL;
+ }
+
+ chain = PyList_New(0);
+ if (chain == NULL) {
+ return NULL;
+ }
+
+ baserev = index_baserev(self, rev);
+
+ /* This should never happen. */
+ if (baserev <= -2) {
+ /* Error should be set by index_deref() */
+ assert(PyErr_Occurred());
+ goto bail;
+ }
+
+ iterrev = rev;
+
+ while (iterrev != baserev && iterrev != stoprev) {
+ PyObject *value = PyInt_FromLong(iterrev);
+ if (value == NULL) {
+ goto bail;
+ }
+ if (PyList_Append(chain, value)) {
+ Py_DECREF(value);
+ goto bail;
+ }
+ Py_DECREF(value);
+
+ if (generaldelta) {
+ iterrev = baserev;
+ }
+ else {
+ iterrev--;
+ }
+
+ if (iterrev < 0) {
+ break;
+ }
+
+ if (iterrev >= length - 1) {
+ PyErr_SetString(PyExc_IndexError, "revision outside index");
+ return NULL;
+ }
+
+ baserev = index_baserev(self, iterrev);
+
+ /* This should never happen. */
+ if (baserev <= -2) {
+ /* Error should be set by index_deref() */
+ assert(PyErr_Occurred());
+ goto bail;
+ }
+ }
+
+ if (iterrev == stoprev) {
+ stopped = 1;
+ }
+ else {
+ PyObject *value = PyInt_FromLong(iterrev);
+ if (value == NULL) {
+ goto bail;
+ }
+ if (PyList_Append(chain, value)) {
+ Py_DECREF(value);
+ goto bail;
+ }
+ Py_DECREF(value);
+
+ stopped = 0;
+ }
+
+ if (PyList_Reverse(chain)) {
+ goto bail;
+ }
+
+ result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
+ Py_DECREF(chain);
+ return result;
+
+bail:
+ Py_DECREF(chain);
+ return NULL;
+}
+
+static inline int nt_level(const char *node, Py_ssize_t level)
+{
+ int v = node[level>>1];
+ if (!(level & 1))
+ v >>= 4;
+ return v & 0xf;
+}
+
+/*
+ * Return values:
+ *
+ * -4: match is ambiguous (multiple candidates)
+ * -2: not found
+ * rest: valid rev
+ */
+static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
+ int hex)
+{
+ int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
+ int level, maxlevel, off;
+
+ if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
+ return -1;
+
+ if (self->nt == NULL)
+ return -2;
+
+ if (hex)
+ maxlevel = nodelen > 40 ? 40 : (int)nodelen;
+ else
+ maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
+
+ for (level = off = 0; level < maxlevel; level++) {
+ int k = getnybble(node, level);
+ nodetree *n = &self->nt[off];
+ int v = n->children[k];
+
+ if (v < 0) {
+ const char *n;
+ Py_ssize_t i;
+
+ v = -(v + 1);
+ n = index_node(self, v);
+ if (n == NULL)
+ return -2;
+ for (i = level; i < maxlevel; i++)
+ if (getnybble(node, i) != nt_level(n, i))
+ return -2;
+ return v;
+ }
+ if (v == 0)
+ return -2;
+ off = v;
+ }
+ /* multiple matches against an ambiguous prefix */
+ return -4;
+}
+
+static int nt_new(indexObject *self)
+{
+ if (self->ntlength == self->ntcapacity) {
+ if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) {
+ PyErr_SetString(PyExc_MemoryError,
+ "overflow in nt_new");
+ return -1;
+ }
+ self->ntcapacity *= 2;
+ self->nt = realloc(self->nt,
+ self->ntcapacity * sizeof(nodetree));
+ if (self->nt == NULL) {
+ PyErr_SetString(PyExc_MemoryError, "out of memory");
+ return -1;
+ }
+ memset(&self->nt[self->ntlength], 0,
+ sizeof(nodetree) * (self->ntcapacity - self->ntlength));
+ }
+ return self->ntlength++;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev)
+{
+ int level = 0;
+ int off = 0;
+
+ while (level < 40) {
+ int k = nt_level(node, level);
+ nodetree *n;
+ int v;
+
+ n = &self->nt[off];
+ v = n->children[k];
+
+ if (v == 0) {
+ n->children[k] = -rev - 1;
+ return 0;
+ }
+ if (v < 0) {
+ const char *oldnode = index_node(self, -(v + 1));
+ int noff;
+
+ if (!oldnode || !memcmp(oldnode, node, 20)) {
+ n->children[k] = -rev - 1;
+ return 0;
+ }
+ noff = nt_new(self);
+ if (noff == -1)
+ return -1;
+ /* self->nt may have been changed by realloc */
+ self->nt[off].children[k] = noff;
+ off = noff;
+ n = &self->nt[off];
+ n->children[nt_level(oldnode, ++level)] = v;
+ if (level > self->ntdepth)
+ self->ntdepth = level;
+ self->ntsplits += 1;
+ } else {
+ level += 1;
+ off = v;
+ }
+ }
+
+ return -1;
+}
+
+static int nt_init(indexObject *self)
+{
+ if (self->nt == NULL) {
+ if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) {
+ PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
+ return -1;
+ }
+ self->ntcapacity = self->raw_length < 4
+ ? 4 : (int)self->raw_length / 2;
+
+ self->nt = calloc(self->ntcapacity, sizeof(nodetree));
+ if (self->nt == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ self->ntlength = 1;
+ self->ntrev = (int)index_length(self) - 1;
+ self->ntlookups = 1;
+ self->ntmisses = 0;
+ if (nt_insert(self, nullid, INT_MAX) == -1)
+ return -1;
+ }
+ return 0;
+}
+
+/*
+ * Return values:
+ *
+ * -3: error (exception set)
+ * -2: not found (no exception set)
+ * rest: valid rev
+ */
+static int index_find_node(indexObject *self,
+ const char *node, Py_ssize_t nodelen)
+{
+ int rev;
+
+ self->ntlookups++;
+ rev = nt_find(self, node, nodelen, 0);
+ if (rev >= -1)
+ return rev;
+
+ if (nt_init(self) == -1)
+ return -3;
+
+ /*
+ * For the first handful of lookups, we scan the entire index,
+ * and cache only the matching nodes. This optimizes for cases
+ * like "hg tip", where only a few nodes are accessed.
+ *
+ * After that, we cache every node we visit, using a single
+ * scan amortized over multiple lookups. This gives the best
+ * bulk performance, e.g. for "hg log".
+ */
+ if (self->ntmisses++ < 4) {
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL)
+ return -2;
+ if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+ if (nt_insert(self, n, rev) == -1)
+ return -3;
+ break;
+ }
+ }
+ } else {
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL) {
+ self->ntrev = rev + 1;
+ return -2;
+ }
+ if (nt_insert(self, n, rev) == -1) {
+ self->ntrev = rev + 1;
+ return -3;
+ }
+ if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+ break;
+ }
+ }
+ self->ntrev = rev;
+ }
+
+ if (rev >= 0)
+ return rev;
+ return -2;
+}
+
+static void raise_revlog_error(void)
+{
+ PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
+
+ mod = PyImport_ImportModule("mercurial.error");
+ if (mod == NULL) {
+ goto cleanup;
+ }
+
+ dict = PyModule_GetDict(mod);
+ if (dict == NULL) {
+ goto cleanup;
+ }
+ Py_INCREF(dict);
+
+ errclass = PyDict_GetItemString(dict, "RevlogError");
+ if (errclass == NULL) {
+ PyErr_SetString(PyExc_SystemError,
+ "could not find RevlogError");
+ goto cleanup;
+ }
+
+ /* value of exception is ignored by callers */
+ PyErr_SetString(errclass, "RevlogError");
+
+cleanup:
+ Py_XDECREF(dict);
+ Py_XDECREF(mod);
+}
+
+static PyObject *index_getitem(indexObject *self, PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+ int rev;
+
+ if (PyInt_Check(value))
+ return index_get(self, PyInt_AS_LONG(value));
+
+ if (node_check(value, &node, &nodelen) == -1)
+ return NULL;
+ rev = index_find_node(self, node, nodelen);
+ if (rev >= -1)
+ return PyInt_FromLong(rev);
+ if (rev == -2)
+ raise_revlog_error();
+ return NULL;
+}
+
+static int nt_partialmatch(indexObject *self, const char *node,
+ Py_ssize_t nodelen)
+{
+ int rev;
+
+ if (nt_init(self) == -1)
+ return -3;
+
+ if (self->ntrev > 0) {
+ /* ensure that the radix tree is fully populated */
+ for (rev = self->ntrev - 1; rev >= 0; rev--) {
+ const char *n = index_node(self, rev);
+ if (n == NULL)
+ return -2;
+ if (nt_insert(self, n, rev) == -1)
+ return -3;
+ }
+ self->ntrev = rev;
+ }
+
+ return nt_find(self, node, nodelen, 1);
+}
+
+static PyObject *index_partialmatch(indexObject *self, PyObject *args)
+{
+ const char *fullnode;
+ int nodelen;
+ char *node;
+ int rev, i;
+
+ if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
+ return NULL;
+
+ if (nodelen < 4) {
+ PyErr_SetString(PyExc_ValueError, "key too short");
+ return NULL;
+ }
+
+ if (nodelen > 40) {
+ PyErr_SetString(PyExc_ValueError, "key too long");
+ return NULL;
+ }
+
+ for (i = 0; i < nodelen; i++)
+ hexdigit(node, i);
+ if (PyErr_Occurred()) {
+ /* input contains non-hex characters */
+ PyErr_Clear();
+ Py_RETURN_NONE;
+ }
+
+ rev = nt_partialmatch(self, node, nodelen);
+
+ switch (rev) {
+ case -4:
+ raise_revlog_error();
+ case -3:
+ return NULL;
+ case -2:
+ Py_RETURN_NONE;
+ case -1:
+ return PyBytes_FromStringAndSize(nullid, 20);
+ }
+
+ fullnode = index_node(self, rev);
+ if (fullnode == NULL) {
+ PyErr_Format(PyExc_IndexError,
+ "could not access rev %d", rev);
+ return NULL;
+ }
+ return PyBytes_FromStringAndSize(fullnode, 20);
+}
+
+static PyObject *index_m_get(indexObject *self, PyObject *args)
+{
+ Py_ssize_t nodelen;
+ PyObject *val;
+ char *node;
+ int rev;
+
+ if (!PyArg_ParseTuple(args, "O", &val))
+ return NULL;
+ if (node_check(val, &node, &nodelen) == -1)
+ return NULL;
+ rev = index_find_node(self, node, nodelen);
+ if (rev == -3)
+ return NULL;
+ if (rev == -2)
+ Py_RETURN_NONE;
+ return PyInt_FromLong(rev);
+}
+
+static int index_contains(indexObject *self, PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+
+ if (PyInt_Check(value)) {
+ long rev = PyInt_AS_LONG(value);
+ return rev >= -1 && rev < index_length(self);
+ }
+
+ if (node_check(value, &node, &nodelen) == -1)
+ return -1;
+
+ switch (index_find_node(self, node, nodelen)) {
+ case -3:
+ return -1;
+ case -2:
+ return 0;
+ default:
+ return 1;
+ }
+}
+
+typedef uint64_t bitmask;
+
+/*
+ * Given a disjoint set of revs, return all candidates for the
+ * greatest common ancestor. In revset notation, this is the set
+ * "heads(::a and ::b and ...)"
+ */
+static PyObject *find_gca_candidates(indexObject *self, const int *revs,
+ int revcount)
+{
+ const bitmask allseen = (1ull << revcount) - 1;
+ const bitmask poison = 1ull << revcount;
+ PyObject *gca = PyList_New(0);
+ int i, v, interesting;
+ int maxrev = -1;
+ bitmask sp;
+ bitmask *seen;
+
+ if (gca == NULL)
+ return PyErr_NoMemory();
+
+ for (i = 0; i < revcount; i++) {
+ if (revs[i] > maxrev)
+ maxrev = revs[i];
+ }
+
+ seen = calloc(sizeof(*seen), maxrev + 1);
+ if (seen == NULL) {
+ Py_DECREF(gca);
+ return PyErr_NoMemory();
+ }
+
+ for (i = 0; i < revcount; i++)
+ seen[revs[i]] = 1ull << i;
+
+ interesting = revcount;
+
+ for (v = maxrev; v >= 0 && interesting; v--) {
+ bitmask sv = seen[v];
+ int parents[2];
+
+ if (!sv)
+ continue;
+
+ if (sv < poison) {
+ interesting -= 1;
+ if (sv == allseen) {
+ PyObject *obj = PyInt_FromLong(v);
+ if (obj == NULL)
+ goto bail;
+ if (PyList_Append(gca, obj) == -1) {
+ Py_DECREF(obj);
+ goto bail;
+ }
+ sv |= poison;
+ for (i = 0; i < revcount; i++) {
+ if (revs[i] == v)
+ goto done;
+ }
+ }
+ }
+ if (index_get_parents(self, v, parents, maxrev) < 0)
+ goto bail;
+
+ for (i = 0; i < 2; i++) {
+ int p = parents[i];
+ if (p == -1)
+ continue;
+ sp = seen[p];
+ if (sv < poison) {
+ if (sp == 0) {
+ seen[p] = sv;
+ interesting++;
+ }
+ else if (sp != sv)
+ seen[p] |= sv;
+ } else {
+ if (sp && sp < poison)
+ interesting--;
+ seen[p] = sv;
+ }
+ }
+ }
+
+done:
+ free(seen);
+ return gca;
+bail:
+ free(seen);
+ Py_XDECREF(gca);
+ return NULL;
+}
+
+/*
+ * Given a disjoint set of revs, return the subset with the longest
+ * path to the root.
+ */
+static PyObject *find_deepest(indexObject *self, PyObject *revs)
+{
+ const Py_ssize_t revcount = PyList_GET_SIZE(revs);
+ static const Py_ssize_t capacity = 24;
+ int *depth, *interesting = NULL;
+ int i, j, v, ninteresting;
+ PyObject *dict = NULL, *keys = NULL;
+ long *seen = NULL;
+ int maxrev = -1;
+ long final;
+
+ if (revcount > capacity) {
+ PyErr_Format(PyExc_OverflowError,
+ "bitset size (%ld) > capacity (%ld)",
+ (long)revcount, (long)capacity);
+ return NULL;
+ }
+
+ for (i = 0; i < revcount; i++) {
+ int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ if (n > maxrev)
+ maxrev = n;
+ }
+
+ depth = calloc(sizeof(*depth), maxrev + 1);
+ if (depth == NULL)
+ return PyErr_NoMemory();
+
+ seen = calloc(sizeof(*seen), maxrev + 1);
+ if (seen == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ interesting = calloc(sizeof(*interesting), 1 << revcount);
+ if (interesting == NULL) {
+ PyErr_NoMemory();
+ goto bail;
+ }
+
+ if (PyList_Sort(revs) == -1)
+ goto bail;
+
+ for (i = 0; i < revcount; i++) {
+ int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
+ long b = 1l << i;
+ depth[n] = 1;
+ seen[n] = b;
+ interesting[b] = 1;
+ }
+
+ /* invariant: ninteresting is the number of non-zero entries in
+ * interesting. */
+ ninteresting = (int)revcount;
+
+ for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
+ int dv = depth[v];
+ int parents[2];
+ long sv;
+
+ if (dv == 0)
+ continue;
+
+ sv = seen[v];
+ if (index_get_parents(self, v, parents, maxrev) < 0)
+ goto bail;
+
+ for (i = 0; i < 2; i++) {
+ int p = parents[i];
+ long sp;
+ int dp;
+
+ if (p == -1)
+ continue;
+
+ dp = depth[p];
+ sp = seen[p];
+ if (dp <= dv) {
+ depth[p] = dv + 1;
+ if (sp != sv) {
+ interesting[sv] += 1;
+ seen[p] = sv;
+ if (sp) {
+ interesting[sp] -= 1;
+ if (interesting[sp] == 0)
+ ninteresting -= 1;
+ }
+ }
+ }
+ else if (dv == dp - 1) {
+ long nsp = sp | sv;
+ if (nsp == sp)
+ continue;
+ seen[p] = nsp;
+ interesting[sp] -= 1;
+ if (interesting[sp] == 0)
+ ninteresting -= 1;
+ if (interesting[nsp] == 0)
+ ninteresting += 1;
+ interesting[nsp] += 1;
+ }
+ }
+ interesting[sv] -= 1;
+ if (interesting[sv] == 0)
+ ninteresting -= 1;
+ }
+
+ final = 0;
+ j = ninteresting;
+ for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
+ if (interesting[i] == 0)
+ continue;
+ final |= i;
+ j -= 1;
+ }
+ if (final == 0) {
+ keys = PyList_New(0);
+ goto bail;
+ }
+
+ dict = PyDict_New();
+ if (dict == NULL)
+ goto bail;
+
+ for (i = 0; i < revcount; i++) {
+ PyObject *key;
+
+ if ((final & (1 << i)) == 0)
+ continue;
+
+ key = PyList_GET_ITEM(revs, i);
+ Py_INCREF(key);
+ Py_INCREF(Py_None);
+ if (PyDict_SetItem(dict, key, Py_None) == -1) {
+ Py_DECREF(key);
+ Py_DECREF(Py_None);
+ goto bail;
+ }
+ }
+
+ keys = PyDict_Keys(dict);
+
+bail:
+ free(depth);
+ free(seen);
+ free(interesting);
+ Py_XDECREF(dict);
+
+ return keys;
+}
+
+/*
+ * Given a (possibly overlapping) set of revs, return all the
+ * common ancestors heads: heads(::args[0] and ::a[1] and ...)
+ */
+static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
+{
+ PyObject *ret = NULL;
+ Py_ssize_t argcount, i, len;
+ bitmask repeat = 0;
+ int revcount = 0;
+ int *revs;
+
+ argcount = PySequence_Length(args);
+ revs = PyMem_Malloc(argcount * sizeof(*revs));
+ if (argcount > 0 && revs == NULL)
+ return PyErr_NoMemory();
+ len = index_length(self) - 1;
+
+ for (i = 0; i < argcount; i++) {
+ static const int capacity = 24;
+ PyObject *obj = PySequence_GetItem(args, i);
+ bitmask x;
+ long val;
+
+ if (!PyInt_Check(obj)) {
+ PyErr_SetString(PyExc_TypeError,
+ "arguments must all be ints");
+ Py_DECREF(obj);
+ goto bail;
+ }
+ val = PyInt_AsLong(obj);
+ Py_DECREF(obj);
+ if (val == -1) {
+ ret = PyList_New(0);
+ goto done;
+ }
+ if (val < 0 || val >= len) {
+ PyErr_SetString(PyExc_IndexError,
+ "index out of range");
+ goto bail;
+ }
+ /* this cheesy bloom filter lets us avoid some more
+ * expensive duplicate checks in the common set-is-disjoint
+ * case */
+ x = 1ull << (val & 0x3f);
+ if (repeat & x) {
+ int k;
+ for (k = 0; k < revcount; k++) {
+ if (val == revs[k])
+ goto duplicate;
+ }
+ }
+ else repeat |= x;
+ if (revcount >= capacity) {
+ PyErr_Format(PyExc_OverflowError,
+ "bitset size (%d) > capacity (%d)",
+ revcount, capacity);
+ goto bail;
+ }
+ revs[revcount++] = (int)val;
+ duplicate:;
+ }
+
+ if (revcount == 0) {
+ ret = PyList_New(0);
+ goto done;
+ }
+ if (revcount == 1) {
+ PyObject *obj;
+ ret = PyList_New(1);
+ if (ret == NULL)
+ goto bail;
+ obj = PyInt_FromLong(revs[0]);
+ if (obj == NULL)
+ goto bail;
+ PyList_SET_ITEM(ret, 0, obj);
+ goto done;
+ }
+
+ ret = find_gca_candidates(self, revs, revcount);
+ if (ret == NULL)
+ goto bail;
+
+done:
+ PyMem_Free(revs);
+ return ret;
+
+bail:
+ PyMem_Free(revs);
+ Py_XDECREF(ret);
+ return NULL;
+}
+
+/*
+ * Given a (possibly overlapping) set of revs, return the greatest
+ * common ancestors: those with the longest path to the root.
+ */
+static PyObject *index_ancestors(indexObject *self, PyObject *args)
+{
+ PyObject *ret;
+ PyObject *gca = index_commonancestorsheads(self, args);
+ if (gca == NULL)
+ return NULL;
+
+ if (PyList_GET_SIZE(gca) <= 1) {
+ return gca;
+ }
+
+ ret = find_deepest(self, gca);
+ Py_DECREF(gca);
+ return ret;
+}
+
+/*
+ * Invalidate any trie entries introduced by added revs.
+ */
+static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
+{
+ Py_ssize_t i, len = PyList_GET_SIZE(self->added);
+
+ for (i = start; i < len; i++) {
+ PyObject *tuple = PyList_GET_ITEM(self->added, i);
+ PyObject *node = PyTuple_GET_ITEM(tuple, 7);
+
+ nt_insert(self, PyBytes_AS_STRING(node), -1);
+ }
+
+ if (start == 0)
+ Py_CLEAR(self->added);
+}
+
+/*
+ * Delete a numeric range of revs, which must be at the end of the
+ * range, but exclude the sentinel nullid entry.
+ */
+static int index_slice_del(indexObject *self, PyObject *item)
+{
+ Py_ssize_t start, stop, step, slicelength;
+ Py_ssize_t length = index_length(self);
+ int ret = 0;
+
+/* Argument changed from PySliceObject* to PyObject* in Python 3. */
+#ifdef IS_PY3K
+ if (PySlice_GetIndicesEx(item, length,
+#else
+ if (PySlice_GetIndicesEx((PySliceObject*)item, length,
+#endif
+ &start, &stop, &step, &slicelength) < 0)
+ return -1;
+
+ if (slicelength <= 0)
+ return 0;
+
+ if ((step < 0 && start < stop) || (step > 0 && start > stop))
+ stop = start;
+
+ if (step < 0) {
+ stop = start + 1;
+ start = stop + step*(slicelength - 1) - 1;
+ step = -step;
+ }
+
+ if (step != 1) {
+ PyErr_SetString(PyExc_ValueError,
+ "revlog index delete requires step size of 1");
+ return -1;
+ }
+
+ if (stop != length - 1) {
+ PyErr_SetString(PyExc_IndexError,
+ "revlog index deletion indices are invalid");
+ return -1;
+ }
+
+ if (start < self->length - 1) {
+ if (self->nt) {
+ Py_ssize_t i;
+
+ for (i = start + 1; i < self->length - 1; i++) {
+ const char *node = index_node(self, i);
+
+ if (node)
+ nt_insert(self, node, -1);
+ }
+ if (self->added)
+ nt_invalidate_added(self, 0);
+ if (self->ntrev > start)
+ self->ntrev = (int)start;
+ }
+ self->length = start + 1;
+ if (start < self->raw_length) {
+ if (self->cache) {
+ Py_ssize_t i;
+ for (i = start; i < self->raw_length; i++)
+ Py_CLEAR(self->cache[i]);
+ }
+ self->raw_length = start;
+ }
+ goto done;
+ }
+
+ if (self->nt) {
+ nt_invalidate_added(self, start - self->length + 1);
+ if (self->ntrev > start)
+ self->ntrev = (int)start;
+ }
+ if (self->added)
+ ret = PyList_SetSlice(self->added, start - self->length + 1,
+ PyList_GET_SIZE(self->added), NULL);
+done:
+ Py_CLEAR(self->headrevs);
+ return ret;
+}
+
+/*
+ * Supported ops:
+ *
+ * slice deletion
+ * string assignment (extend node->rev mapping)
+ * string deletion (shrink node->rev mapping)
+ */
+static int index_assign_subscript(indexObject *self, PyObject *item,
+ PyObject *value)
+{
+ char *node;
+ Py_ssize_t nodelen;
+ long rev;
+
+ if (PySlice_Check(item) && value == NULL)
+ return index_slice_del(self, item);
+
+ if (node_check(item, &node, &nodelen) == -1)
+ return -1;
+
+ if (value == NULL)
+ return self->nt ? nt_insert(self, node, -1) : 0;
+ rev = PyInt_AsLong(value);
+ if (rev > INT_MAX || rev < 0) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(PyExc_ValueError, "rev out of range");
+ return -1;
+ }
+
+ if (nt_init(self) == -1)
+ return -1;
+ return nt_insert(self, node, (int)rev);
+}
+
+/*
+ * Find all RevlogNG entries in an index that has inline data. Update
+ * the optional "offsets" table with those entries.
+ */
+static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
+{
+ const char *data = (const char *)self->buf.buf;
+ Py_ssize_t pos = 0;
+ Py_ssize_t end = self->buf.len;
+ long incr = v1_hdrsize;
+ Py_ssize_t len = 0;
+
+ while (pos + v1_hdrsize <= end && pos >= 0) {
+ uint32_t comp_len;
+ /* 3rd element of header is length of compressed inline data */
+ comp_len = getbe32(data + pos + 8);
+ incr = v1_hdrsize + comp_len;
+ if (offsets)
+ offsets[len] = data + pos;
+ len++;
+ pos += incr;
+ }
+
+ if (pos != end) {
+ if (!PyErr_Occurred())
+ PyErr_SetString(PyExc_ValueError, "corrupt index file");
+ return -1;
+ }
+
+ return len;
+}
+
+static int index_init(indexObject *self, PyObject *args)
+{
+ PyObject *data_obj, *inlined_obj;
+ Py_ssize_t size;
+
+ /* Initialize before argument-checking to avoid index_dealloc() crash. */
+ self->raw_length = 0;
+ self->added = NULL;
+ self->cache = NULL;
+ self->data = NULL;
+ memset(&self->buf, 0, sizeof(self->buf));
+ self->headrevs = NULL;
+ self->filteredrevs = Py_None;
+ Py_INCREF(Py_None);
+ self->nt = NULL;
+ self->offsets = NULL;
+
+ if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
+ return -1;
+ if (!PyObject_CheckBuffer(data_obj)) {
+ PyErr_SetString(PyExc_TypeError,
+ "data does not support buffer interface");
+ return -1;
+ }
+
+ if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
+ return -1;
+ size = self->buf.len;
+
+ self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
+ self->data = data_obj;
+
+ self->ntlength = self->ntcapacity = 0;
+ self->ntdepth = self->ntsplits = 0;
+ self->ntlookups = self->ntmisses = 0;
+ self->ntrev = -1;
+ Py_INCREF(self->data);
+
+ if (self->inlined) {
+ Py_ssize_t len = inline_scan(self, NULL);
+ if (len == -1)
+ goto bail;
+ self->raw_length = len;
+ self->length = len + 1;
+ } else {
+ if (size % v1_hdrsize) {
+ PyErr_SetString(PyExc_ValueError, "corrupt index file");
+ goto bail;
+ }
+ self->raw_length = size / v1_hdrsize;
+ self->length = self->raw_length + 1;
+ }
+
+ return 0;
+bail:
+ return -1;
+}
+
+static PyObject *index_nodemap(indexObject *self)
+{
+ Py_INCREF(self);
+ return (PyObject *)self;
+}
+
+static void index_dealloc(indexObject *self)
+{
+ _index_clearcaches(self);
+ Py_XDECREF(self->filteredrevs);
+ if (self->buf.buf) {
+ PyBuffer_Release(&self->buf);
+ memset(&self->buf, 0, sizeof(self->buf));
+ }
+ Py_XDECREF(self->data);
+ Py_XDECREF(self->added);
+ PyObject_Del(self);
+}
+
+static PySequenceMethods index_sequence_methods = {
+ (lenfunc)index_length, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ (ssizeargfunc)index_get, /* sq_item */
+ 0, /* sq_slice */
+ 0, /* sq_ass_item */
+ 0, /* sq_ass_slice */
+ (objobjproc)index_contains, /* sq_contains */
+};
+
+static PyMappingMethods index_mapping_methods = {
+ (lenfunc)index_length, /* mp_length */
+ (binaryfunc)index_getitem, /* mp_subscript */
+ (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
+};
+
+static PyMethodDef index_methods[] = {
+ {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
+ "return the gca set of the given revs"},
+ {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
+ METH_VARARGS,
+ "return the heads of the common ancestors of the given revs"},
+ {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
+ "clear the index caches"},
+ {"get", (PyCFunction)index_m_get, METH_VARARGS,
+ "get an index entry"},
+ {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
+ METH_VARARGS, "compute phases"},
+ {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
+ "reachableroots"},
+ {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
+ "get head revisions"}, /* Can do filtering since 3.2 */
+ {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
+ "get filtered head revisions"}, /* Can always do filtering */
+ {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
+ "determine revisions with deltas to reconstruct fulltext"},
+ {"insert", (PyCFunction)index_insert, METH_VARARGS,
+ "insert an index entry"},
+ {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
+ "match a potentially ambiguous node ID"},
+ {"stats", (PyCFunction)index_stats, METH_NOARGS,
+ "stats for the index"},
+ {NULL} /* Sentinel */
+};
+
+static PyGetSetDef index_getset[] = {
+ {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
+ {NULL} /* Sentinel */
+};
+
+static PyTypeObject indexType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "parsers.index", /* tp_name */
+ sizeof(indexObject), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ (destructor)index_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ &index_sequence_methods, /* tp_as_sequence */
+ &index_mapping_methods, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ "revlog index", /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ index_methods, /* tp_methods */
+ 0, /* tp_members */
+ index_getset, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)index_init, /* tp_init */
+ 0, /* tp_alloc */
+};
+
+/*
+ * returns a tuple of the form (index, index, cache) with elements as
+ * follows:
+ *
+ * index: an index object that lazily parses RevlogNG records
+ * cache: if data is inlined, a tuple (0, index_file_content), else None
+ * index_file_content could be a string, or a buffer
+ *
+ * added complications are for backwards compatibility
+ */
+PyObject *parse_index2(PyObject *self, PyObject *args)
+{
+ PyObject *tuple = NULL, *cache = NULL;
+ indexObject *idx;
+ int ret;
+
+ idx = PyObject_New(indexObject, &indexType);
+ if (idx == NULL)
+ goto bail;
+
+ ret = index_init(idx, args);
+ if (ret == -1)
+ goto bail;
+
+ if (idx->inlined) {
+ cache = Py_BuildValue("iO", 0, idx->data);
+ if (cache == NULL)
+ goto bail;
+ } else {
+ cache = Py_None;
+ Py_INCREF(cache);
+ }
+
+ tuple = Py_BuildValue("NN", idx, cache);
+ if (!tuple)
+ goto bail;
+ return tuple;
+
+bail:
+ Py_XDECREF(idx);
+ Py_XDECREF(cache);
+ Py_XDECREF(tuple);
+ return NULL;
+}
+
+void revlog_module_init(PyObject *mod)
+{
+ indexType.tp_new = PyType_GenericNew;
+ if (PyType_Ready(&indexType) < 0)
+ return;
+ Py_INCREF(&indexType);
+ PyModule_AddObject(mod, "index", (PyObject *)&indexType);
+
+ nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
+ -1, -1, -1, -1, nullid, 20);
+ if (nullentry)
+ PyObject_GC_UnTrack(nullentry);
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/util.h Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,76 @@
+/*
+ util.h - utility functions for interfacing with the various python APIs.
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+#ifndef _HG_UTIL_H_
+#define _HG_UTIL_H_
+
+#include "compat.h"
+
+#if PY_MAJOR_VERSION >= 3
+#define IS_PY3K
+#endif
+
+typedef struct {
+ PyObject_HEAD
+ char state;
+ int mode;
+ int size;
+ int mtime;
+} dirstateTupleObject;
+
+extern PyTypeObject dirstateTupleType;
+#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
+
+/* This should be kept in sync with normcasespecs in encoding.py. */
+enum normcase_spec {
+ NORMCASE_LOWER = -1,
+ NORMCASE_UPPER = 1,
+ NORMCASE_OTHER = 0
+};
+
+#define MIN(a, b) (((a)<(b))?(a):(b))
+/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
+#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
+#define true 1
+#define false 0
+typedef unsigned char bool;
+#else
+#include <stdbool.h>
+#endif
+
+static const int8_t hextable[256] = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
+};
+
+static inline int hexdigit(const char *p, Py_ssize_t off)
+{
+ int8_t val = hextable[(unsigned char)p[off]];
+
+ if (val >= 0) {
+ return val;
+ }
+
+ PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
+ return 0;
+}
+
+#endif /* _HG_UTIL_H_ */
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/base85.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,10 @@
+# base85.py: pure python base85 codec
+#
+# Copyright (C) 2009 Brendan Cully <brendan@kublai.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.base85 import *
--- a/mercurial/cffi/bdiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/cffi/bdiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -1,31 +1,76 @@
+# bdiff.py - CFFI implementation of bdiff.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
-import os
+import struct
-ffi = cffi.FFI()
-ffi.set_source("_bdiff_cffi",
- open(os.path.join(os.path.join(os.path.dirname(__file__), '..'),
- 'bdiff.c')).read(), include_dirs=['mercurial'])
-ffi.cdef("""
-struct bdiff_line {
- int hash, n, e;
- ssize_t len;
- const char *l;
-};
+from ..pure.bdiff import *
+from . import _bdiff
+
+ffi = _bdiff.ffi
+lib = _bdiff.lib
-struct bdiff_hunk;
-struct bdiff_hunk {
- int a1, a2, b1, b2;
- struct bdiff_hunk *next;
-};
+def blocks(sa, sb):
+ a = ffi.new("struct bdiff_line**")
+ b = ffi.new("struct bdiff_line**")
+ ac = ffi.new("char[]", str(sa))
+ bc = ffi.new("char[]", str(sb))
+ l = ffi.new("struct bdiff_hunk*")
+ try:
+ an = lib.bdiff_splitlines(ac, len(sa), a)
+ bn = lib.bdiff_splitlines(bc, len(sb), b)
+ if not a[0] or not b[0]:
+ raise MemoryError
+ count = lib.bdiff_diff(a[0], an, b[0], bn, l)
+ if count < 0:
+ raise MemoryError
+ rl = [None] * count
+ h = l.next
+ i = 0
+ while h:
+ rl[i] = (h.a1, h.a2, h.b1, h.b2)
+ h = h.next
+ i += 1
+ finally:
+ lib.free(a[0])
+ lib.free(b[0])
+ lib.bdiff_freehunks(l.next)
+ return rl
-int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr);
-int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b, int bn,
- struct bdiff_hunk *base);
-void bdiff_freehunks(struct bdiff_hunk *l);
-void free(void*);
-""")
+def bdiff(sa, sb):
+ a = ffi.new("struct bdiff_line**")
+ b = ffi.new("struct bdiff_line**")
+ ac = ffi.new("char[]", str(sa))
+ bc = ffi.new("char[]", str(sb))
+ l = ffi.new("struct bdiff_hunk*")
+ try:
+ an = lib.bdiff_splitlines(ac, len(sa), a)
+ bn = lib.bdiff_splitlines(bc, len(sb), b)
+ if not a[0] or not b[0]:
+ raise MemoryError
+ count = lib.bdiff_diff(a[0], an, b[0], bn, l)
+ if count < 0:
+ raise MemoryError
+ rl = []
+ h = l.next
+ la = lb = 0
+ while h:
+ if h.a1 != la or h.b1 != lb:
+ lgt = (b[0] + h.b1).l - (b[0] + lb).l
+ rl.append(struct.pack(">lll", (a[0] + la).l - a[0].l,
+ (a[0] + h.a1).l - a[0].l, lgt))
+ rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
+ la = h.a2
+ lb = h.b2
+ h = h.next
-if __name__ == '__main__':
- ffi.compile()
+ finally:
+ lib.free(a[0])
+ lib.free(b[0])
+ lib.bdiff_freehunks(l.next)
+ return "".join(rl)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/bdiffbuild.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,31 @@
+from __future__ import absolute_import
+
+import cffi
+import os
+
+ffi = cffi.FFI()
+ffi.set_source("mercurial.cffi._bdiff",
+ open(os.path.join(os.path.join(os.path.dirname(__file__), '..'),
+ 'bdiff.c')).read(), include_dirs=['mercurial'])
+ffi.cdef("""
+struct bdiff_line {
+ int hash, n, e;
+ ssize_t len;
+ const char *l;
+};
+
+struct bdiff_hunk;
+struct bdiff_hunk {
+ int a1, a2, b1, b2;
+ struct bdiff_hunk *next;
+};
+
+int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr);
+int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b, int bn,
+ struct bdiff_hunk *base);
+void bdiff_freehunks(struct bdiff_hunk *l);
+void free(void*);
+""")
+
+if __name__ == '__main__':
+ ffi.compile()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/diffhelpers.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,10 @@
+# diffhelpers.py - pure Python implementation of diffhelpers.c
+#
+# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.diffhelpers import *
--- a/mercurial/cffi/mpatch.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/cffi/mpatch.py Wed Jul 19 07:51:41 2017 -0500
@@ -1,35 +1,48 @@
+# mpatch.py - CFFI implementation of mpatch.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
-import os
+from ..pure.mpatch import *
+from ..pure.mpatch import mpatchError # silence pyflakes
+from . import _mpatch
+
+ffi = _mpatch.ffi
+lib = _mpatch.lib
-ffi = cffi.FFI()
-mpatch_c = os.path.join(os.path.join(os.path.dirname(__file__), '..',
- 'mpatch.c'))
-ffi.set_source("_mpatch_cffi", open(mpatch_c).read(),
- include_dirs=["mercurial"])
-ffi.cdef("""
-
-struct mpatch_frag {
- int start, end, len;
- const char *data;
-};
+@ffi.def_extern()
+def cffi_get_next_item(arg, pos):
+ all, bins = ffi.from_handle(arg)
+ container = ffi.new("struct mpatch_flist*[1]")
+ to_pass = ffi.new("char[]", str(bins[pos]))
+ all.append(to_pass)
+ r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
+ if r < 0:
+ return ffi.NULL
+ return container[0]
-struct mpatch_flist {
- struct mpatch_frag *base, *head, *tail;
-};
-
-extern "Python" struct mpatch_flist* cffi_get_next_item(void*, ssize_t);
-
-int mpatch_decode(const char *bin, ssize_t len, struct mpatch_flist** res);
-ssize_t mpatch_calcsize(size_t len, struct mpatch_flist *l);
-void mpatch_lfree(struct mpatch_flist *a);
-static int mpatch_apply(char *buf, const char *orig, size_t len,
- struct mpatch_flist *l);
-struct mpatch_flist *mpatch_fold(void *bins,
- struct mpatch_flist* (*get_next_item)(void*, ssize_t),
- ssize_t start, ssize_t end);
-""")
-
-if __name__ == '__main__':
- ffi.compile()
+def patches(text, bins):
+ lgt = len(bins)
+ all = []
+ if not lgt:
+ return text
+ arg = (all, bins)
+ patch = lib.mpatch_fold(ffi.new_handle(arg),
+ lib.cffi_get_next_item, 0, lgt)
+ if not patch:
+ raise mpatchError("cannot decode chunk")
+ outlen = lib.mpatch_calcsize(len(text), patch)
+ if outlen < 0:
+ lib.mpatch_lfree(patch)
+ raise mpatchError("inconsistency detected")
+ buf = ffi.new("char[]", outlen)
+ if lib.mpatch_apply(buf, text, len(text), patch) < 0:
+ lib.mpatch_lfree(patch)
+ raise mpatchError("error applying patches")
+ res = ffi.buffer(buf, outlen)[:]
+ lib.mpatch_lfree(patch)
+ return res
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/mpatchbuild.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,35 @@
+from __future__ import absolute_import
+
+import cffi
+import os
+
+ffi = cffi.FFI()
+mpatch_c = os.path.join(os.path.join(os.path.dirname(__file__), '..',
+ 'mpatch.c'))
+ffi.set_source("mercurial.cffi._mpatch", open(mpatch_c).read(),
+ include_dirs=["mercurial"])
+ffi.cdef("""
+
+struct mpatch_frag {
+ int start, end, len;
+ const char *data;
+};
+
+struct mpatch_flist {
+ struct mpatch_frag *base, *head, *tail;
+};
+
+extern "Python" struct mpatch_flist* cffi_get_next_item(void*, ssize_t);
+
+int mpatch_decode(const char *bin, ssize_t len, struct mpatch_flist** res);
+ssize_t mpatch_calcsize(size_t len, struct mpatch_flist *l);
+void mpatch_lfree(struct mpatch_flist *a);
+static int mpatch_apply(char *buf, const char *orig, size_t len,
+ struct mpatch_flist *l);
+struct mpatch_flist *mpatch_fold(void *bins,
+ struct mpatch_flist* (*get_next_item)(void*, ssize_t),
+ ssize_t start, ssize_t end);
+""")
+
+if __name__ == '__main__':
+ ffi.compile()
--- a/mercurial/cffi/osutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/cffi/osutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -1,102 +1,102 @@
+# osutil.py - CFFI version of osutil.c
+#
+# Copyright 2016 Maciej Fijalkowski <fijall@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
from __future__ import absolute_import
-import cffi
+import os
+import stat as statmod
-ffi = cffi.FFI()
-ffi.set_source("_osutil_cffi", """
-#include <sys/attr.h>
-#include <sys/vnode.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <time.h>
+from ..pure.osutil import *
+
+from .. import (
+ pycompat,
+)
-typedef struct val_attrs {
- uint32_t length;
- attribute_set_t returned;
- attrreference_t name_info;
- fsobj_type_t obj_type;
- struct timespec mtime;
- uint32_t accessmask;
- off_t datalength;
-} __attribute__((aligned(4), packed)) val_attrs_t;
-""", include_dirs=['mercurial'])
-ffi.cdef('''
+if pycompat.sysplatform == 'darwin':
+ from . import _osutil
+
+ ffi = _osutil.ffi
+ lib = _osutil.lib
+
+ listdir_batch_size = 4096
+ # tweakable number, only affects performance, which chunks
+ # of bytes do we get back from getattrlistbulk
-typedef uint32_t attrgroup_t;
+ attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
-typedef struct attrlist {
- uint16_t bitmapcount; /* number of attr. bit sets in list */
- uint16_t reserved; /* (to maintain 4-byte alignment) */
- attrgroup_t commonattr; /* common attribute group */
- attrgroup_t volattr; /* volume attribute group */
- attrgroup_t dirattr; /* directory attribute group */
- attrgroup_t fileattr; /* file attribute group */
- attrgroup_t forkattr; /* fork attribute group */
- ...;
-};
+ attrkinds[lib.VREG] = statmod.S_IFREG
+ attrkinds[lib.VDIR] = statmod.S_IFDIR
+ attrkinds[lib.VLNK] = statmod.S_IFLNK
+ attrkinds[lib.VBLK] = statmod.S_IFBLK
+ attrkinds[lib.VCHR] = statmod.S_IFCHR
+ attrkinds[lib.VFIFO] = statmod.S_IFIFO
+ attrkinds[lib.VSOCK] = statmod.S_IFSOCK
-typedef struct attribute_set {
- ...;
-} attribute_set_t;
+ class stat_res(object):
+ def __init__(self, st_mode, st_mtime, st_size):
+ self.st_mode = st_mode
+ self.st_mtime = st_mtime
+ self.st_size = st_size
-typedef struct attrreference {
- int attr_dataoffset;
- int attr_length;
- ...;
-} attrreference_t;
-
-typedef int ... off_t;
+ tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
+ buf = ffi.new("char[]", listdir_batch_size)
-typedef struct val_attrs {
- uint32_t length;
- attribute_set_t returned;
- attrreference_t name_info;
- uint32_t obj_type;
- struct timespec mtime;
- uint32_t accessmask;
- off_t datalength;
- ...;
-} val_attrs_t;
-
-/* the exact layout of the above struct will be figured out during build time */
-
-typedef int ... time_t;
-
-typedef struct timespec {
- time_t tv_sec;
- ...;
-};
-
-int getattrlist(const char* path, struct attrlist * attrList, void * attrBuf,
- size_t attrBufSize, unsigned int options);
-
-int getattrlistbulk(int dirfd, struct attrlist * attrList, void * attrBuf,
- size_t attrBufSize, uint64_t options);
+ def listdirinternal(dfd, req, stat, skip):
+ ret = []
+ while True:
+ r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
+ if r == 0:
+ break
+ if r == -1:
+ raise OSError(ffi.errno, os.strerror(ffi.errno))
+ cur = ffi.cast("val_attrs_t*", buf)
+ for i in range(r):
+ lgt = cur.length
+ assert lgt == ffi.cast('uint32_t*', cur)[0]
+ ofs = cur.name_info.attr_dataoffset
+ str_lgt = cur.name_info.attr_length
+ base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
+ name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
+ str_lgt - 1))
+ tp = attrkinds[cur.obj_type]
+ if name == "." or name == "..":
+ continue
+ if skip == name and tp == statmod.S_ISDIR:
+ return []
+ if stat:
+ mtime = cur.mtime.tv_sec
+ mode = (cur.accessmask & ~lib.S_IFMT)| tp
+ ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
+ st_size=cur.datalength)))
+ else:
+ ret.append((name, tp))
+ cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
+ + lgt)
+ return ret
-#define ATTR_BIT_MAP_COUNT ...
-#define ATTR_CMN_NAME ...
-#define ATTR_CMN_OBJTYPE ...
-#define ATTR_CMN_MODTIME ...
-#define ATTR_CMN_ACCESSMASK ...
-#define ATTR_CMN_ERROR ...
-#define ATTR_CMN_RETURNED_ATTRS ...
-#define ATTR_FILE_DATALENGTH ...
+ def listdir(path, stat=False, skip=None):
+ req = ffi.new("struct attrlist*")
+ req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
+ req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
+ lib.ATTR_CMN_NAME |
+ lib.ATTR_CMN_OBJTYPE |
+ lib.ATTR_CMN_ACCESSMASK |
+ lib.ATTR_CMN_MODTIME)
+ req.fileattr = lib.ATTR_FILE_DATALENGTH
+ dfd = lib.open(path, lib.O_RDONLY, 0)
+ if dfd == -1:
+ raise OSError(ffi.errno, os.strerror(ffi.errno))
-#define VREG ...
-#define VDIR ...
-#define VLNK ...
-#define VBLK ...
-#define VCHR ...
-#define VFIFO ...
-#define VSOCK ...
-
-#define S_IFMT ...
-
-int open(const char *path, int oflag, int perm);
-int close(int);
-
-#define O_RDONLY ...
-''')
-
-if __name__ == '__main__':
- ffi.compile()
+ try:
+ ret = listdirinternal(dfd, req, stat, skip)
+ finally:
+ try:
+ lib.close(dfd)
+ except BaseException:
+ pass # we ignore all the errors from closing, not
+ # much we can do about that
+ return ret
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/osutilbuild.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,102 @@
+from __future__ import absolute_import
+
+import cffi
+
+ffi = cffi.FFI()
+ffi.set_source("mercurial.cffi._osutil", """
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+
+typedef struct val_attrs {
+ uint32_t length;
+ attribute_set_t returned;
+ attrreference_t name_info;
+ fsobj_type_t obj_type;
+ struct timespec mtime;
+ uint32_t accessmask;
+ off_t datalength;
+} __attribute__((aligned(4), packed)) val_attrs_t;
+""", include_dirs=['mercurial'])
+ffi.cdef('''
+
+typedef uint32_t attrgroup_t;
+
+typedef struct attrlist {
+ uint16_t bitmapcount; /* number of attr. bit sets in list */
+ uint16_t reserved; /* (to maintain 4-byte alignment) */
+ attrgroup_t commonattr; /* common attribute group */
+ attrgroup_t volattr; /* volume attribute group */
+ attrgroup_t dirattr; /* directory attribute group */
+ attrgroup_t fileattr; /* file attribute group */
+ attrgroup_t forkattr; /* fork attribute group */
+ ...;
+};
+
+typedef struct attribute_set {
+ ...;
+} attribute_set_t;
+
+typedef struct attrreference {
+ int attr_dataoffset;
+ int attr_length;
+ ...;
+} attrreference_t;
+
+typedef int ... off_t;
+
+typedef struct val_attrs {
+ uint32_t length;
+ attribute_set_t returned;
+ attrreference_t name_info;
+ uint32_t obj_type;
+ struct timespec mtime;
+ uint32_t accessmask;
+ off_t datalength;
+ ...;
+} val_attrs_t;
+
+/* the exact layout of the above struct will be figured out during build time */
+
+typedef int ... time_t;
+
+typedef struct timespec {
+ time_t tv_sec;
+ ...;
+};
+
+int getattrlist(const char* path, struct attrlist * attrList, void * attrBuf,
+ size_t attrBufSize, unsigned int options);
+
+int getattrlistbulk(int dirfd, struct attrlist * attrList, void * attrBuf,
+ size_t attrBufSize, uint64_t options);
+
+#define ATTR_BIT_MAP_COUNT ...
+#define ATTR_CMN_NAME ...
+#define ATTR_CMN_OBJTYPE ...
+#define ATTR_CMN_MODTIME ...
+#define ATTR_CMN_ACCESSMASK ...
+#define ATTR_CMN_ERROR ...
+#define ATTR_CMN_RETURNED_ATTRS ...
+#define ATTR_FILE_DATALENGTH ...
+
+#define VREG ...
+#define VDIR ...
+#define VLNK ...
+#define VBLK ...
+#define VCHR ...
+#define VFIFO ...
+#define VSOCK ...
+
+#define S_IFMT ...
+
+int open(const char *path, int oflag, int perm);
+int close(int);
+
+#define O_RDONLY ...
+''')
+
+if __name__ == '__main__':
+ ffi.compile()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cffi/parsers.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,10 @@
+# parsers.py - Python implementation of parsers.c
+#
+# Copyright 2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..pure.parsers import *
--- a/mercurial/changegroup.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/changegroup.py Wed Jul 19 07:51:41 2017 -0500
@@ -20,7 +20,6 @@
)
from . import (
- branchmap,
dagutil,
discovery,
error,
@@ -61,25 +60,6 @@
"""return a changegroup chunk header (string) for a zero-length chunk"""
return struct.pack(">l", 0)
-def combineresults(results):
- """logic to combine 0 or more addchangegroup results into one"""
- changedheads = 0
- result = 1
- for ret in results:
- # If any changegroup result is 0, return 0
- if ret == 0:
- result = 0
- break
- if ret < -1:
- changedheads += ret + 1
- elif ret > 1:
- changedheads += ret - 1
- if changedheads > 0:
- result = 1 + changedheads
- elif changedheads < 0:
- result = -1 + changedheads
- return result
-
def writechunks(ui, chunks, filename, vfs=None):
"""Write chunks to a file and return its filename.
@@ -257,8 +237,8 @@
repo.ui.progress(_('manifests'), None)
self.callback = None
- def apply(self, repo, srctype, url, emptyok=False,
- targetphase=phases.draft, expectedtotal=None):
+ def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
+ expectedtotal=None):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
the URL of the repo where this changegroup is coming from.
@@ -280,169 +260,159 @@
changesets = files = revisions = 0
try:
- with repo.transaction("\n".join([srctype,
- util.hidepassword(url)])) as tr:
- # The transaction could have been created before and already
- # carries source information. In this case we use the top
- # level data. We overwrite the argument because we need to use
- # the top level value (if they exist) in this function.
- srctype = tr.hookargs.setdefault('source', srctype)
- url = tr.hookargs.setdefault('url', url)
- repo.hook('prechangegroup', throw=True, **tr.hookargs)
+ # The transaction may already carry source information. In this
+ # case we use the top level data. We overwrite the argument
+ # because we need to use the top level value (if they exist)
+ # in this function.
+ srctype = tr.hookargs.setdefault('source', srctype)
+ url = tr.hookargs.setdefault('url', url)
+ repo.hook('prechangegroup', throw=True, **tr.hookargs)
- # write changelog data to temp files so concurrent readers
- # will not see an inconsistent view
- cl = repo.changelog
- cl.delayupdate(tr)
- oldheads = set(cl.heads())
+ # write changelog data to temp files so concurrent readers
+ # will not see an inconsistent view
+ cl = repo.changelog
+ cl.delayupdate(tr)
+ oldheads = set(cl.heads())
- trp = weakref.proxy(tr)
- # pull off the changeset group
- repo.ui.status(_("adding changesets\n"))
- clstart = len(cl)
- class prog(object):
- def __init__(self, step, total):
- self._step = step
- self._total = total
- self._count = 1
- def __call__(self):
- repo.ui.progress(self._step, self._count,
- unit=_('chunks'), total=self._total)
- self._count += 1
- self.callback = prog(_('changesets'), expectedtotal)
+ trp = weakref.proxy(tr)
+ # pull off the changeset group
+ repo.ui.status(_("adding changesets\n"))
+ clstart = len(cl)
+ class prog(object):
+ def __init__(self, step, total):
+ self._step = step
+ self._total = total
+ self._count = 1
+ def __call__(self):
+ repo.ui.progress(self._step, self._count, unit=_('chunks'),
+ total=self._total)
+ self._count += 1
+ self.callback = prog(_('changesets'), expectedtotal)
- efiles = set()
- def onchangelog(cl, node):
- efiles.update(cl.readfiles(node))
+ efiles = set()
+ def onchangelog(cl, node):
+ efiles.update(cl.readfiles(node))
- self.changelogheader()
- srccontent = cl.addgroup(self, csmap, trp,
- addrevisioncb=onchangelog)
- efiles = len(efiles)
+ self.changelogheader()
+ cgnodes = cl.addgroup(self, csmap, trp, addrevisioncb=onchangelog)
+ efiles = len(efiles)
- if not (srccontent or emptyok):
- raise error.Abort(_("received changelog group is empty"))
- clend = len(cl)
- changesets = clend - clstart
- repo.ui.progress(_('changesets'), None)
- self.callback = None
+ if not cgnodes:
+ repo.ui.develwarn('applied empty changegroup',
+ config='empty-changegroup')
+ clend = len(cl)
+ changesets = clend - clstart
+ repo.ui.progress(_('changesets'), None)
+ self.callback = None
- # pull off the manifest group
- repo.ui.status(_("adding manifests\n"))
- self._unpackmanifests(repo, revmap, trp, prog, changesets)
+ # pull off the manifest group
+ repo.ui.status(_("adding manifests\n"))
+ self._unpackmanifests(repo, revmap, trp, prog, changesets)
- needfiles = {}
- if repo.ui.configbool('server', 'validate', default=False):
- cl = repo.changelog
- ml = repo.manifestlog
- # validate incoming csets have their manifests
- for cset in xrange(clstart, clend):
- mfnode = cl.changelogrevision(cset).manifest
- mfest = ml[mfnode].readdelta()
- # store file nodes we must see
- for f, n in mfest.iteritems():
- needfiles.setdefault(f, set()).add(n)
+ needfiles = {}
+ if repo.ui.configbool('server', 'validate'):
+ cl = repo.changelog
+ ml = repo.manifestlog
+ # validate incoming csets have their manifests
+ for cset in xrange(clstart, clend):
+ mfnode = cl.changelogrevision(cset).manifest
+ mfest = ml[mfnode].readdelta()
+ # store file cgnodes we must see
+ for f, n in mfest.iteritems():
+ needfiles.setdefault(f, set()).add(n)
- # process the files
- repo.ui.status(_("adding file changes\n"))
- newrevs, newfiles = _addchangegroupfiles(
- repo, self, revmap, trp, efiles, needfiles)
- revisions += newrevs
- files += newfiles
+ # process the files
+ repo.ui.status(_("adding file changes\n"))
+ newrevs, newfiles = _addchangegroupfiles(
+ repo, self, revmap, trp, efiles, needfiles)
+ revisions += newrevs
+ files += newfiles
- dh = 0
- if oldheads:
- heads = cl.heads()
- dh = len(heads) - len(oldheads)
- for h in heads:
- if h not in oldheads and repo[h].closesbranch():
- dh -= 1
- htext = ""
- if dh:
- htext = _(" (%+d heads)") % dh
+ deltaheads = 0
+ if oldheads:
+ heads = cl.heads()
+ deltaheads = len(heads) - len(oldheads)
+ for h in heads:
+ if h not in oldheads and repo[h].closesbranch():
+ deltaheads -= 1
+ htext = ""
+ if deltaheads:
+ htext = _(" (%+d heads)") % deltaheads
- repo.ui.status(_("added %d changesets"
- " with %d changes to %d files%s\n")
- % (changesets, revisions, files, htext))
- repo.invalidatevolatilesets()
+ repo.ui.status(_("added %d changesets"
+ " with %d changes to %d files%s\n")
+ % (changesets, revisions, files, htext))
+ repo.invalidatevolatilesets()
- if changesets > 0:
- if 'node' not in tr.hookargs:
- tr.hookargs['node'] = hex(cl.node(clstart))
- tr.hookargs['node_last'] = hex(cl.node(clend - 1))
- hookargs = dict(tr.hookargs)
- else:
- hookargs = dict(tr.hookargs)
- hookargs['node'] = hex(cl.node(clstart))
- hookargs['node_last'] = hex(cl.node(clend - 1))
- repo.hook('pretxnchangegroup', throw=True, **hookargs)
+ if changesets > 0:
+ if 'node' not in tr.hookargs:
+ tr.hookargs['node'] = hex(cl.node(clstart))
+ tr.hookargs['node_last'] = hex(cl.node(clend - 1))
+ hookargs = dict(tr.hookargs)
+ else:
+ hookargs = dict(tr.hookargs)
+ hookargs['node'] = hex(cl.node(clstart))
+ hookargs['node_last'] = hex(cl.node(clend - 1))
+ repo.hook('pretxnchangegroup', throw=True, **hookargs)
- added = [cl.node(r) for r in xrange(clstart, clend)]
- publishing = repo.publishing()
- if srctype in ('push', 'serve'):
- # Old servers can not push the boundary themselves.
- # New servers won't push the boundary if changeset already
- # exists locally as secret
- #
- # We should not use added here but the list of all change in
- # the bundle
- if publishing:
- phases.advanceboundary(repo, tr, phases.public,
- srccontent)
- else:
- # Those changesets have been pushed from the
- # outside, their phases are going to be pushed
- # alongside. Therefor `targetphase` is
- # ignored.
- phases.advanceboundary(repo, tr, phases.draft,
- srccontent)
- phases.retractboundary(repo, tr, phases.draft, added)
- elif srctype != 'strip':
- # publishing only alter behavior during push
- #
- # strip should not touch boundary at all
- phases.retractboundary(repo, tr, targetphase, added)
+ added = [cl.node(r) for r in xrange(clstart, clend)]
+ phaseall = None
+ if srctype in ('push', 'serve'):
+ # Old servers can not push the boundary themselves.
+ # New servers won't push the boundary if changeset already
+ # exists locally as secret
+ #
+ # We should not use added here but the list of all change in
+ # the bundle
+ if repo.publishing():
+ targetphase = phaseall = phases.public
+ else:
+ # closer target phase computation
+
+ # Those changesets have been pushed from the
+ # outside, their phases are going to be pushed
+ # alongside. Therefor `targetphase` is
+ # ignored.
+ targetphase = phaseall = phases.draft
+ if added:
+ phases.registernew(repo, tr, targetphase, added)
+ if phaseall is not None:
+ phases.advanceboundary(repo, tr, phaseall, cgnodes)
- if changesets > 0:
- if srctype != 'strip':
- # During strip, branchcache is invalid but
- # coming call to `destroyed` will repair it.
- # In other case we can safely update cache on
- # disk.
- repo.ui.debug('updating the branch cache\n')
- branchmap.updatecache(repo.filtered('served'))
+ if changesets > 0:
- def runhooks():
- # These hooks run when the lock releases, not when the
- # transaction closes. So it's possible for the changelog
- # to have changed since we last saw it.
- if clstart >= len(repo):
- return
+ def runhooks():
+ # These hooks run when the lock releases, not when the
+ # transaction closes. So it's possible for the changelog
+ # to have changed since we last saw it.
+ if clstart >= len(repo):
+ return
+
+ repo.hook("changegroup", **hookargs)
- repo.hook("changegroup", **hookargs)
-
- for n in added:
- args = hookargs.copy()
- args['node'] = hex(n)
- del args['node_last']
- repo.hook("incoming", **args)
+ for n in added:
+ args = hookargs.copy()
+ args['node'] = hex(n)
+ del args['node_last']
+ repo.hook("incoming", **args)
- newheads = [h for h in repo.heads()
- if h not in oldheads]
- repo.ui.log("incoming",
- "%s incoming changes - new heads: %s\n",
- len(added),
- ', '.join([hex(c[:6]) for c in newheads]))
+ newheads = [h for h in repo.heads()
+ if h not in oldheads]
+ repo.ui.log("incoming",
+ "%s incoming changes - new heads: %s\n",
+ len(added),
+ ', '.join([hex(c[:6]) for c in newheads]))
- tr.addpostclose('changegroup-runhooks-%020i' % clstart,
- lambda tr: repo._afterlock(runhooks))
+ tr.addpostclose('changegroup-runhooks-%020i' % clstart,
+ lambda tr: repo._afterlock(runhooks))
finally:
repo.ui.flush()
# never return 0 here:
- if dh < 0:
- return dh - 1
+ if deltaheads < 0:
+ ret = deltaheads - 1
else:
- return dh + 1
+ ret = deltaheads + 1
+ return ret
class cg2unpacker(cg1unpacker):
"""Unpacker for cg2 streams.
@@ -506,14 +476,16 @@
"""Given a source repo, construct a bundler.
bundlecaps is optional and can be used to specify the set of
- capabilities which can be used to build the bundle.
+ capabilities which can be used to build the bundle. While bundlecaps is
+ unused in core Mercurial, extensions rely on this feature to communicate
+ capabilities to customize the changegroup packer.
"""
# Set of capabilities we can use to build the bundle.
if bundlecaps is None:
bundlecaps = set()
self._bundlecaps = bundlecaps
# experimental config: bundle.reorder
- reorder = repo.ui.config('bundle', 'reorder', 'auto')
+ reorder = repo.ui.config('bundle', 'reorder')
if reorder == 'auto':
reorder = None
else:
@@ -974,8 +946,8 @@
bundler = getbundler(version, repo, bundlecaps)
return getsubsetraw(repo, outgoing, bundler, source)
-def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
- version='01'):
+def getchangegroup(repo, source, outgoing, bundlecaps=None,
+ version='01'):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
@@ -985,18 +957,10 @@
bundler = getbundler(version, repo, bundlecaps)
return getsubset(repo, outgoing, bundler, source)
-def getchangegroup(repo, source, outgoing, bundlecaps=None,
- version='01'):
- """Like changegroupsubset, but returns the set difference between the
- ancestors of heads and the ancestors common.
-
- If heads is None, use the local heads. If common is None, use [nullid].
-
- The nodes in common might not all be known locally due to the way the
- current discovery protocol works.
- """
- return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
- version=version)
+def getlocalchangegroup(repo, *args, **kwargs):
+ repo.ui.deprecwarn('getlocalchangegroup is deprecated, use getchangegroup',
+ '4.3')
+ return getchangegroup(repo, *args, **kwargs)
def changegroup(repo, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
--- a/mercurial/changelog.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/changelog.py Wed Jul 19 07:51:41 2017 -0500
@@ -190,7 +190,7 @@
# The list of files may be empty. Which means nl3 is the first of the
# double newline that precedes the description.
- if text[nl3 + 1] == '\n':
+ if text[nl3 + 1:nl3 + 2] == '\n':
doublenl = nl3
else:
doublenl = text.index('\n\n', nl3 + 1)
@@ -258,12 +258,28 @@
return encoding.tolocal(self._text[self._offsets[3] + 2:])
class changelog(revlog.revlog):
- def __init__(self, opener):
- revlog.revlog.__init__(self, opener, "00changelog.i",
+ def __init__(self, opener, trypending=False):
+ """Load a changelog revlog using an opener.
+
+ If ``trypending`` is true, we attempt to load the index from a
+ ``00changelog.i.a`` file instead of the default ``00changelog.i``.
+ The ``00changelog.i.a`` file contains index (and possibly inline
+ revision) data for a transaction that hasn't been finalized yet.
+ It exists in a separate file to facilitate readers (such as
+ hooks processes) accessing data before a transaction is finalized.
+ """
+ if trypending and opener.exists('00changelog.i.a'):
+ indexfile = '00changelog.i.a'
+ else:
+ indexfile = '00changelog.i'
+
+ datafile = '00changelog.d'
+ revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
checkambig=True)
+
if self._initempty:
# changelogs don't benefit from generaldelta
- self.version &= ~revlog.REVLOGGENERALDELTA
+ self.version &= ~revlog.FLAG_GENERALDELTA
self._generaldelta = False
# Delta chains for changelogs tend to be very small because entries
@@ -401,27 +417,6 @@
# split when we're done
self.checkinlinesize(tr)
- def readpending(self, file):
- """read index data from a "pending" file
-
- During a transaction, the actual changeset data is already stored in the
- main file, but not yet finalized in the on-disk index. Instead, a
- "pending" index is written by the transaction logic. If this function
- is running, we are likely in a subprocess invoked in a hook. The
- subprocess is informed that it is within a transaction and needs to
- access its content.
-
- This function will read all the index data out of the pending file and
- overwrite the main index."""
-
- if not self.opener.exists(file):
- return # no pending data for changelog
- r = revlog.revlog(self.opener, file)
- self.index = r.index
- self.nodemap = r.nodemap
- self._nodecache = r._nodecache
- self._chunkcache = r._chunkcache
-
def _writepending(self, tr):
"create a file containing the unfinalized state for pretxnchangegroup"
if self._delaybuf:
@@ -535,3 +530,14 @@
just to access this is costly."""
extra = self.read(rev)[5]
return encoding.tolocal(extra.get("branch")), 'close' in extra
+
+ def _addrevision(self, node, rawtext, transaction, *args, **kwargs):
+ # overlay over the standard revlog._addrevision to track the new
+ # revision on the transaction.
+ rev = len(self)
+ node = super(changelog, self)._addrevision(node, rawtext, transaction,
+ *args, **kwargs)
+ revs = transaction.changes.get('revs')
+ if revs is not None:
+ revs.add(rev)
+ return node
--- a/mercurial/chgserver.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/chgserver.py Wed Jul 19 07:51:41 2017 -0500
@@ -44,6 +44,7 @@
import inspect
import os
import re
+import socket
import struct
import time
@@ -54,7 +55,6 @@
encoding,
error,
extensions,
- osutil,
pycompat,
util,
)
@@ -75,7 +75,8 @@
# sensitive environment variables affecting confighash
_envre = re.compile(r'''\A(?:
CHGHG
- |HG(?:[A-Z].*)?
+ |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
+ |HG(?:ENCODING|PLAIN).*
|LANG(?:UAGE)?
|LC_.*
|LD_.*
@@ -313,7 +314,7 @@
# tell client to sendmsg() with 1-byte payload, which makes it
# distinctive from "attachio\n" command consumed by client.read()
self.clientsock.sendall(struct.pack('>cI', 'I', 1))
- clientfds = osutil.recvfds(self.clientsock.fileno())
+ clientfds = util.recvfds(self.clientsock.fileno())
_log('received fds: %r\n' % clientfds)
ui = self.ui
@@ -458,12 +459,12 @@
'setenv': setenv,
'setumask': setumask})
- if util.safehasattr(osutil, 'setprocname'):
+ if util.safehasattr(util, 'setprocname'):
def setprocname(self):
"""Change process title"""
name = self._readstr()
_log('setprocname: %r\n' % name)
- osutil.setprocname(name)
+ util.setprocname(name)
capabilities['setprocname'] = setprocname
def _tempaddress(address):
@@ -484,7 +485,7 @@
def __init__(self, ui):
self.ui = ui
- self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
+ self._idletimeout = ui.configint('chgserver', 'idletimeout')
self._lastactive = time.time()
def bindsocket(self, sock, address):
@@ -492,10 +493,11 @@
self._checkextensions()
self._bind(sock)
self._createsymlink()
+ # no "listening at" message should be printed to simulate hg behavior
def _inithashstate(self, address):
self._baseaddress = address
- if self.ui.configbool('chgserver', 'skiphash', False):
+ if self.ui.configbool('chgserver', 'skiphash'):
self._hashstate = None
self._realaddress = address
return
@@ -517,6 +519,7 @@
tempaddress = _tempaddress(self._realaddress)
util.bindunixsocket(sock, tempaddress)
self._socketstat = os.stat(tempaddress)
+ sock.listen(socket.SOMAXCONN)
# rename will replace the old socket file if exists atomically. the
# old server will detect ownership change and exit.
util.rename(tempaddress, self._realaddress)
@@ -545,10 +548,6 @@
# the client will start a new server on demand.
util.tryunlink(self._realaddress)
- def printbanner(self, address):
- # no "listening at" message should be printed to simulate hg behavior
- pass
-
def shouldexit(self):
if not self._issocketowner():
self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
--- a/mercurial/cmdutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/cmdutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -15,7 +15,6 @@
from .i18n import _
from .node import (
- bin,
hex,
nullid,
nullrev,
@@ -31,14 +30,13 @@
error,
formatter,
graphmod,
- lock as lockmod,
match as matchmod,
obsolete,
patch,
pathutil,
phases,
pycompat,
- repair,
+ registrar,
revlog,
revset,
scmutil,
@@ -50,6 +48,113 @@
)
stringio = util.stringio
+# templates of common command options
+
+dryrunopts = [
+ ('n', 'dry-run', None,
+ _('do not perform actions, just print output')),
+]
+
+remoteopts = [
+ ('e', 'ssh', '',
+ _('specify ssh command to use'), _('CMD')),
+ ('', 'remotecmd', '',
+ _('specify hg command to run on the remote side'), _('CMD')),
+ ('', 'insecure', None,
+ _('do not verify server certificate (ignoring web.cacerts config)')),
+]
+
+walkopts = [
+ ('I', 'include', [],
+ _('include names matching the given patterns'), _('PATTERN')),
+ ('X', 'exclude', [],
+ _('exclude names matching the given patterns'), _('PATTERN')),
+]
+
+commitopts = [
+ ('m', 'message', '',
+ _('use text as commit message'), _('TEXT')),
+ ('l', 'logfile', '',
+ _('read commit message from file'), _('FILE')),
+]
+
+commitopts2 = [
+ ('d', 'date', '',
+ _('record the specified date as commit date'), _('DATE')),
+ ('u', 'user', '',
+ _('record the specified user as committer'), _('USER')),
+]
+
+# hidden for now
+formatteropts = [
+ ('T', 'template', '',
+ _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
+]
+
+templateopts = [
+ ('', 'style', '',
+ _('display using template map file (DEPRECATED)'), _('STYLE')),
+ ('T', 'template', '',
+ _('display with template'), _('TEMPLATE')),
+]
+
+logopts = [
+ ('p', 'patch', None, _('show patch')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('l', 'limit', '',
+ _('limit number of changes displayed'), _('NUM')),
+ ('M', 'no-merges', None, _('do not show merges')),
+ ('', 'stat', None, _('output diffstat-style summary of changes')),
+ ('G', 'graph', None, _("show the revision DAG")),
+] + templateopts
+
+diffopts = [
+ ('a', 'text', None, _('treat all files as text')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('', 'binary', None, _('generate binary diffs in git mode (default)')),
+ ('', 'nodates', None, _('omit dates from diff headers'))
+]
+
+diffwsopts = [
+ ('w', 'ignore-all-space', None,
+ _('ignore white space when comparing lines')),
+ ('b', 'ignore-space-change', None,
+ _('ignore changes in the amount of white space')),
+ ('B', 'ignore-blank-lines', None,
+ _('ignore changes whose lines are all blank')),
+]
+
+diffopts2 = [
+ ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
+ ('p', 'show-function', None, _('show which function each change is in')),
+ ('', 'reverse', None, _('produce a diff that undoes the changes')),
+] + diffwsopts + [
+ ('U', 'unified', '',
+ _('number of lines of context to show'), _('NUM')),
+ ('', 'stat', None, _('output diffstat-style summary of changes')),
+ ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
+]
+
+mergetoolopts = [
+ ('t', 'tool', '', _('specify merge tool')),
+]
+
+similarityopts = [
+ ('s', 'similarity', '',
+ _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
+]
+
+subrepoopts = [
+ ('S', 'subrepos', None,
+ _('recurse into subrepositories'))
+]
+
+debugrevlogopts = [
+ ('c', 'changelog', False, _('open changelog')),
+ ('m', 'manifest', False, _('open manifest')),
+ ('', 'dir', '', _('open directory manifest')),
+]
+
# special string such that everything below this line will be ingored in the
# editor text
_linebelow = "^HG: ------------------------ >8 ------------------------$"
@@ -103,7 +208,7 @@
(see patch.filterpatch).
"""
usecurses = crecordmod.checkcurses(ui)
- testfile = ui.config('experimental', 'crecordtest', None)
+ testfile = ui.config('experimental', 'crecordtest')
oldwrite = setupwrapcolorwrite(ui)
try:
newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
@@ -115,6 +220,7 @@
def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
filterfn, *pats, **opts):
from . import merge as mergemod
+ opts = pycompat.byteskwargs(opts)
if not ui.interactive():
if cmdsuggest:
msg = _('running non-interactively, use %s instead') % cmdsuggest
@@ -294,6 +400,178 @@
return commit(ui, repo, recordinwlock, pats, opts)
+def tersestatus(root, statlist, status, ignorefn, ignore):
+ """
+ Returns a list of statuses with directory collapsed if all the files in the
+ directory has the same status.
+ """
+
+ def numfiles(dirname):
+ """
+ Calculates the number of tracked files in a given directory which also
+ includes files which were removed or deleted. Considers ignored files
+ if ignore argument is True or 'i' is present in status argument.
+ """
+ if lencache.get(dirname):
+ return lencache[dirname]
+ if 'i' in status or ignore:
+ def match(localpath):
+ absolutepath = os.path.join(root, localpath)
+ if os.path.isdir(absolutepath) and isemptydir(absolutepath):
+ return True
+ return False
+ else:
+ def match(localpath):
+ # there can be directory whose all the files are ignored and
+ # hence the drectory should also be ignored while counting
+ # number of files or subdirs in it's parent directory. This
+ # checks the same.
+ # XXX: We need a better logic here.
+ if os.path.isdir(os.path.join(root, localpath)):
+ return isignoreddir(localpath)
+ else:
+ # XXX: there can be files which have the ignored pattern but
+ # are not ignored. That leads to bug in counting number of
+ # tracked files in the directory.
+ return ignorefn(localpath)
+ lendir = 0
+ abspath = os.path.join(root, dirname)
+ # There might be cases when a directory does not exists as the whole
+ # directory can be removed and/or deleted.
+ try:
+ for f in os.listdir(abspath):
+ localpath = os.path.join(dirname, f)
+ if not match(localpath):
+ lendir += 1
+ except OSError:
+ pass
+ lendir += len(absentdir.get(dirname, []))
+ lencache[dirname] = lendir
+ return lendir
+
+ def isemptydir(abspath):
+ """
+ Check whether a directory is empty or not, i.e. there is no files in the
+ directory and all its subdirectories.
+ """
+ for f in os.listdir(abspath):
+ fullpath = os.path.join(abspath, f)
+ if os.path.isdir(fullpath):
+ # recursion here
+ ret = isemptydir(fullpath)
+ if not ret:
+ return False
+ else:
+ return False
+ return True
+
+ def isignoreddir(localpath):
+ """
+ This function checks whether the directory contains only ignored files
+ and hence should the directory be considered ignored. Returns True, if
+ that should be ignored otherwise False.
+ """
+ dirpath = os.path.join(root, localpath)
+ for f in os.listdir(dirpath):
+ filepath = os.path.join(dirpath, f)
+ if os.path.isdir(filepath):
+ # recursion here
+ ret = isignoreddir(os.path.join(localpath, f))
+ if not ret:
+ return False
+ else:
+ if not ignorefn(os.path.join(localpath, f)):
+ return False
+ return True
+
+ def absentones(removedfiles, missingfiles):
+ """
+ Returns a dictionary of directories with files in it which are either
+ removed or missing (deleted) in them.
+ """
+ absentdir = {}
+ absentfiles = removedfiles + missingfiles
+ while absentfiles:
+ f = absentfiles.pop()
+ par = os.path.dirname(f)
+ if par == '':
+ continue
+ # we need to store files rather than number of files as some files
+ # or subdirectories in a directory can be counted twice. This is
+ # also we have used sets here.
+ try:
+ absentdir[par].add(f)
+ except KeyError:
+ absentdir[par] = set([f])
+ absentfiles.append(par)
+ return absentdir
+
+ indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
+ # get a dictonary of directories and files which are missing as os.listdir()
+ # won't be able to list them.
+ absentdir = absentones(statlist[2], statlist[3])
+ finalrs = [[]] * len(indexes)
+ didsomethingchanged = False
+ # dictionary to store number of files and subdir in a directory so that we
+ # don't compute that again.
+ lencache = {}
+
+ for st in pycompat.bytestr(status):
+
+ try:
+ ind = indexes[st]
+ except KeyError:
+ # TODO: Need a better error message here
+ raise error.Abort("'%s' not recognized" % st)
+
+ sfiles = statlist[ind]
+ if not sfiles:
+ continue
+ pardict = {}
+ for a in sfiles:
+ par = os.path.dirname(a)
+ pardict.setdefault(par, []).append(a)
+
+ rs = []
+ newls = []
+ for par, files in pardict.iteritems():
+ lenpar = numfiles(par)
+ if lenpar == len(files):
+ newls.append(par)
+
+ if not newls:
+ continue
+
+ while newls:
+ newel = newls.pop()
+ if newel == '':
+ continue
+ parn = os.path.dirname(newel)
+ pardict[newel] = []
+ # Adding pycompat.ossep as newel is a directory.
+ pardict.setdefault(parn, []).append(newel + pycompat.ossep)
+ lenpar = numfiles(parn)
+ if lenpar == len(pardict[parn]):
+ newls.append(parn)
+
+ # dict.values() for Py3 compatibility
+ for files in pardict.values():
+ rs.extend(files)
+
+ rs.sort()
+ finalrs[ind] = rs
+ didsomethingchanged = True
+
+ # If nothing is changed, make sure the order of files is preserved.
+ if not didsomethingchanged:
+ return statlist
+
+ for x in xrange(len(indexes)):
+ if not finalrs[x]:
+ finalrs[x] = statlist[x]
+
+ return finalrs
+
def findpossible(cmd, table, strict=False):
"""
Return cmd -> (aliases, command table entry)
@@ -340,12 +618,11 @@
return choice[cmd]
if len(choice) > 1:
- clist = choice.keys()
- clist.sort()
+ clist = sorted(choice)
raise error.AmbiguousCommand(cmd, clist)
if choice:
- return choice.values()[0]
+ return list(choice.values())[0]
raise error.UnknownCommand(cmd, allcmds)
@@ -385,7 +662,7 @@
'exclusive'))
if not message and logfile:
try:
- if logfile == '-':
+ if isstdiofilename(logfile):
message = ui.fin.read()
else:
message = '\n'.join(util.readfile(logfile).splitlines())
@@ -490,10 +767,10 @@
patlen = len(pat)
i = 0
while i < patlen:
- c = pat[i]
+ c = pat[i:i + 1]
if c == '%':
i += 1
- c = pat[i]
+ c = pat[i:i + 1]
c = expander[c]()
newname.append(c)
i += 1
@@ -502,6 +779,10 @@
raise error.Abort(_("invalid format spec '%%%s' in output filename") %
inst.args[0])
+def isstdiofilename(pat):
+ """True if the given pat looks like a filename denoting stdin/stdout"""
+ return not pat or pat == '-'
+
class _unclosablefile(object):
def __init__(self, fp):
self._fp = fp
@@ -527,16 +808,12 @@
writable = mode not in ('r', 'rb')
- if not pat or pat == '-':
+ if isstdiofilename(pat):
if writable:
fp = repo.ui.fout
else:
fp = repo.ui.fin
return _unclosablefile(fp)
- if util.safehasattr(pat, 'write') and writable:
- return pat
- if util.safehasattr(pat, 'read') and 'r' in mode:
- return pat
fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
if modemap is not None:
mode = modemap.get(fn, mode)
@@ -606,8 +883,8 @@
badstates = '?'
else:
badstates = '?r'
- m = scmutil.match(repo[None], [pat], opts, globbed=True)
- for abs in repo.walk(m):
+ m = scmutil.match(wctx, [pat], opts, globbed=True)
+ for abs in wctx.walk(m):
state = repo.dirstate[abs]
rel = m.rel(abs)
exact = m.exact(abs)
@@ -1001,11 +1278,13 @@
editor = None
else:
editor = getcommiteditor(editform='import.bypass')
- memctx = context.makememctx(repo, (p1.node(), p2.node()),
+ memctx = context.memctx(repo, (p1.node(), p2.node()),
message,
- user,
- date,
- branch, files, store,
+ files=files,
+ filectxfn=store,
+ user=user,
+ date=date,
+ branch=branch,
editor=editor)
n = memctx.commit()
finally:
@@ -1032,70 +1311,97 @@
# it is given two arguments (sequencenumber, changectx)
extraexportmap = {}
-def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
+def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
+ node = scmutil.binnode(ctx)
+ parents = [p.node() for p in ctx.parents() if p]
+ branch = ctx.branch()
+ if switch_parent:
+ parents.reverse()
+
+ if parents:
+ prev = parents[0]
+ else:
+ prev = nullid
+
+ write("# HG changeset patch\n")
+ write("# User %s\n" % ctx.user())
+ write("# Date %d %d\n" % ctx.date())
+ write("# %s\n" % util.datestr(ctx.date()))
+ if branch and branch != 'default':
+ write("# Branch %s\n" % branch)
+ write("# Node ID %s\n" % hex(node))
+ write("# Parent %s\n" % hex(prev))
+ if len(parents) > 1:
+ write("# Parent %s\n" % hex(parents[1]))
+
+ for headerid in extraexport:
+ header = extraexportmap[headerid](seqno, ctx)
+ if header is not None:
+ write('# %s\n' % header)
+ write(ctx.description().rstrip())
+ write("\n\n")
+
+ for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
+ write(chunk, label=label)
+
+def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
opts=None, match=None):
- '''export changesets as hg patches.'''
+ '''export changesets as hg patches
+
+ Args:
+ repo: The repository from which we're exporting revisions.
+ revs: A list of revisions to export as revision numbers.
+ fntemplate: An optional string to use for generating patch file names.
+ fp: An optional file-like object to which patches should be written.
+ switch_parent: If True, show diffs against second parent when not nullid.
+ Default is false, which always shows diff against p1.
+ opts: diff options to use for generating the patch.
+ match: If specified, only export changes to files matching this matcher.
+
+ Returns:
+ Nothing.
+
+ Side Effect:
+ "HG Changeset Patch" data is emitted to one of the following
+ destinations:
+ fp is specified: All revs are written to the specified
+ file-like object.
+ fntemplate specified: Each rev is written to a unique file named using
+ the given template.
+ Neither fp nor template specified: All revs written to repo.ui.write()
+ '''
total = len(revs)
- revwidth = max([len(str(rev)) for rev in revs])
+ revwidth = max(len(str(rev)) for rev in revs)
filemode = {}
- def single(rev, seqno, fp):
+ write = None
+ dest = '<unnamed>'
+ if fp:
+ dest = getattr(fp, 'name', dest)
+ def write(s, **kw):
+ fp.write(s)
+ elif not fntemplate:
+ write = repo.ui.write
+
+ for seqno, rev in enumerate(revs, 1):
ctx = repo[rev]
- node = ctx.node()
- parents = [p.node() for p in ctx.parents() if p]
- branch = ctx.branch()
- if switch_parent:
- parents.reverse()
-
- if parents:
- prev = parents[0]
- else:
- prev = nullid
-
- shouldclose = False
- if not fp and len(template) > 0:
+ fo = None
+ if not fp and fntemplate:
desc_lines = ctx.description().rstrip().split('\n')
desc = desc_lines[0] #Commit always has a first line.
- fp = makefileobj(repo, template, node, desc=desc, total=total,
- seqno=seqno, revwidth=revwidth, mode='wb',
- modemap=filemode)
- shouldclose = True
- if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
- repo.ui.note("%s\n" % fp.name)
-
- if not fp:
- write = repo.ui.write
- else:
+ fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
+ total=total, seqno=seqno, revwidth=revwidth,
+ mode='wb', modemap=filemode)
+ dest = fo.name
def write(s, **kw):
- fp.write(s)
-
- write("# HG changeset patch\n")
- write("# User %s\n" % ctx.user())
- write("# Date %d %d\n" % ctx.date())
- write("# %s\n" % util.datestr(ctx.date()))
- if branch and branch != 'default':
- write("# Branch %s\n" % branch)
- write("# Node ID %s\n" % hex(node))
- write("# Parent %s\n" % hex(prev))
- if len(parents) > 1:
- write("# Parent %s\n" % hex(parents[1]))
-
- for headerid in extraexport:
- header = extraexportmap[headerid](seqno, ctx)
- if header is not None:
- write('# %s\n' % header)
- write(ctx.description().rstrip())
- write("\n\n")
-
- for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
- write(chunk, label=label)
-
- if shouldclose:
- fp.close()
-
- for seqno, rev in enumerate(revs):
- single(rev, seqno + 1, fp)
+ fo.write(s)
+ if not dest.startswith('<'):
+ repo.ui.note("%s\n" % dest)
+ _exportsingle(
+ repo, ctx, match, switch_parent, rev, seqno, write, opts)
+ if fo is not None:
+ fo.close()
def diffordiffstat(ui, repo, diffopts, node1, node2, match,
changes=None, stat=False, fp=None, prefix='',
@@ -1197,6 +1503,7 @@
self.ui.write(self.footer)
def show(self, ctx, copies=None, matchfn=None, **props):
+ props = pycompat.byteskwargs(props)
if self.buffered:
self.ui.pushbuffer(labeled=True)
self._show(ctx, copies, matchfn, props)
@@ -1214,7 +1521,7 @@
hexfunc = short
# as of now, wctx.node() and wctx.rev() return None, but we want to
# show the same values as {node} and {rev} templatekw
- revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
+ revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
if self.ui.quiet:
self.ui.write("%d:%s\n" % revnode, label='log.node')
@@ -1275,6 +1582,8 @@
self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
label='log.trouble')
+ self._exthook(ctx)
+
if self.ui.debugflag:
files = ctx.p1().status(ctx)[:3]
for key, value in zip([# i18n: column positioning for "hg log"
@@ -1321,6 +1630,11 @@
self.showpatch(ctx, matchfn)
+ def _exthook(self, ctx):
+ '''empty method used by extension as a hook point
+ '''
+ pass
+
def showpatch(self, ctx, matchfn):
if not matchfn:
matchfn = self.matchfn
@@ -1360,7 +1674,7 @@
if rev is None:
jrev = jnode = 'null'
else:
- jrev = str(rev)
+ jrev = '%d' % rev
jnode = '"%s"' % hex(ctx.node())
j = encoding.jsonescape
@@ -1442,37 +1756,41 @@
class changeset_templater(changeset_printer):
'''format changeset information.'''
- def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
+ # Arguments before "buffered" used to be positional. Consider not
+ # adding/removing arguments before "buffered" to not break callers.
+ def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
+ buffered=False):
+ diffopts = diffopts or {}
+
changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
- assert not (tmpl and mapfile)
- defaulttempl = templatekw.defaulttempl
- if mapfile:
- self.t = templater.templater.frommapfile(mapfile,
- cache=defaulttempl)
- else:
- self.t = formatter.maketemplater(ui, 'changeset', tmpl,
- cache=defaulttempl)
-
+ self.t = formatter.loadtemplater(ui, tmplspec,
+ cache=templatekw.defaulttempl)
self._counter = itertools.count()
self.cache = {}
- # find correct templates for current mode
- tmplmodes = [
- (True, None),
- (self.ui.verbose, 'verbose'),
- (self.ui.quiet, 'quiet'),
- (self.ui.debugflag, 'debug'),
- ]
-
- self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
- 'docheader': '', 'docfooter': ''}
- for mode, postfix in tmplmodes:
- for t in self._parts:
- cur = t
- if postfix:
- cur += "_" + postfix
- if mode and cur in self.t:
- self._parts[t] = cur
+ self._tref = tmplspec.ref
+ self._parts = {'header': '', 'footer': '',
+ tmplspec.ref: tmplspec.ref,
+ 'docheader': '', 'docfooter': '',
+ 'separator': ''}
+ if tmplspec.mapfile:
+ # find correct templates for current mode, for backward
+ # compatibility with 'log -v/-q/--debug' using a mapfile
+ tmplmodes = [
+ (True, ''),
+ (self.ui.verbose, '_verbose'),
+ (self.ui.quiet, '_quiet'),
+ (self.ui.debugflag, '_debug'),
+ ]
+ for mode, postfix in tmplmodes:
+ for t in self._parts:
+ cur = t + postfix
+ if mode and cur in self.t:
+ self._parts[t] = cur
+ else:
+ partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
+ m = formatter.templatepartsmap(tmplspec, self.t, partnames)
+ self._parts.update(m)
if self._parts['docheader']:
self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
@@ -1492,9 +1810,16 @@
props['ctx'] = ctx
props['repo'] = self.repo
props['ui'] = self.repo.ui
- props['index'] = next(self._counter)
+ props['index'] = index = next(self._counter)
props['revcache'] = {'copies': copies}
props['cache'] = self.cache
+ props = pycompat.strkwargs(props)
+
+ # write separator, which wouldn't work well with the header part below
+ # since there's inherently a conflict between header (across items) and
+ # separator (per item)
+ if self._parts['separator'] and index > 0:
+ self.ui.write(templater.stringify(self.t(self._parts['separator'])))
# write header
if self._parts['header']:
@@ -1507,7 +1832,7 @@
self.ui.write(h)
# write changeset metadata, then patch if requested
- key = self._parts['changeset']
+ key = self._parts[self._tref]
self.ui.write(templater.stringify(self.t(key, **props)))
self.showpatch(ctx, matchfn)
@@ -1516,18 +1841,25 @@
self.footer = templater.stringify(
self.t(self._parts['footer'], **props))
-def gettemplate(ui, tmpl, style):
- """
- Find the template matching the given template spec or style.
+def logtemplatespec(tmpl, mapfile):
+ if mapfile:
+ return formatter.templatespec('changeset', tmpl, mapfile)
+ else:
+ return formatter.templatespec('', tmpl, None)
+
+def _lookuplogtemplate(ui, tmpl, style):
+ """Find the template matching the given template spec or style
+
+ See formatter.lookuptemplate() for details.
"""
# ui settings
if not tmpl and not style: # template are stronger than style
tmpl = ui.config('ui', 'logtemplate')
if tmpl:
- return templater.unquotestring(tmpl), None
+ return logtemplatespec(templater.unquotestring(tmpl), None)
else:
- style = util.expandpath(ui.config('ui', 'style', ''))
+ style = util.expandpath(ui.config('ui', 'style'))
if not tmpl and style:
mapfile = style
@@ -1536,13 +1868,18 @@
or templater.templatepath(mapfile))
if mapname:
mapfile = mapname
- return None, mapfile
+ return logtemplatespec(None, mapfile)
if not tmpl:
- return None, None
+ return logtemplatespec(None, None)
return formatter.lookuptemplate(ui, 'changeset', tmpl)
+def makelogtemplater(ui, repo, tmpl, buffered=False):
+ """Create a changeset_templater from a literal template 'tmpl'"""
+ spec = logtemplatespec(tmpl, None)
+ return changeset_templater(ui, repo, spec, buffered=buffered)
+
def show_changeset(ui, repo, opts, buffered=False):
"""show one changeset using template or regular display.
@@ -1562,12 +1899,12 @@
if opts.get('template') == 'json':
return jsonchangeset(ui, repo, matchfn, opts, buffered)
- tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
-
- if not tmpl and not mapfile:
+ spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
+
+ if not spec.ref and not spec.tmpl and not spec.mapfile:
return changeset_printer(ui, repo, matchfn, opts, buffered)
- return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
+ return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
def showmarker(fm, marker, index=None):
"""utility function to display obsolescence marker in a readable way
@@ -1607,7 +1944,7 @@
if rev in results:
ui.status(_("found revision %s from %s\n") %
(rev, util.datestr(results[rev])))
- return str(rev)
+ return '%d' % rev
raise error.Abort(_("revision matching date not found"))
@@ -1691,7 +2028,7 @@
last = filelog.rev(node)
# keep track of all ancestors of the file
- ancestors = set([filelog.linkrev(last)])
+ ancestors = {filelog.linkrev(last)}
# iterate from latest to oldest revision
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
@@ -2155,7 +2492,7 @@
return templatekw.showgraphnode # fast path for "{graphnode}"
spec = templater.unquotestring(spec)
- templ = formatter.gettemplater(ui, 'graphnode', spec)
+ templ = formatter.maketemplater(ui, spec)
cache = {}
if isinstance(displayer, changeset_templater):
cache = displayer.cache # reuse cache of slow templates
@@ -2167,7 +2504,7 @@
props['repo'] = repo
props['ui'] = repo.ui
props['revcache'] = {}
- return templater.stringify(templ('graphnode', **props))
+ return templ.render(props)
return formatnode
def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
@@ -2278,7 +2615,7 @@
sub = wctx.sub(subpath)
try:
submatch = matchmod.subdirmatcher(subpath, match)
- if opts.get('subrepos'):
+ if opts.get(r'subrepos'):
bad.extend(sub.add(ui, submatch, prefix, False, **opts))
else:
bad.extend(sub.add(ui, submatch, prefix, True, **opts))
@@ -2286,7 +2623,7 @@
ui.status(_("skipping missing subrepository: %s\n")
% join(subpath))
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
rejected = wctx.add(names, prefix)
bad.extend(f for f in rejected if f in match.files())
return bad
@@ -2308,7 +2645,7 @@
forgot = []
s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
- forget = sorted(s[0] + s[1] + s[3] + s[6])
+ forget = sorted(s.modified + s.added + s.deleted + s.clean)
if explicitonly:
forget = [f for f in forget if match.exact(f)]
@@ -2496,17 +2833,21 @@
return ret
-def cat(ui, repo, ctx, matcher, prefix, **opts):
+def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
err = 1
def write(path):
- fp = makefileobj(repo, opts.get('output'), ctx.node(),
- pathname=os.path.join(prefix, path))
- data = ctx[path].data()
- if opts.get('decode'):
- data = repo.wwritedata(path, data)
- fp.write(data)
- fp.close()
+ filename = None
+ if fntemplate:
+ filename = makefilename(repo, fntemplate, ctx.node(),
+ pathname=os.path.join(prefix, path))
+ with formatter.maybereopen(basefm, filename, opts) as fm:
+ data = ctx[path].data()
+ if opts.get('decode'):
+ data = repo.wwritedata(path, data)
+ fm.startitem()
+ fm.write('data', '%s', data)
+ fm.data(abspath=path, path=matcher.rel(path))
# Automation often uses hg cat on single files, so special case it
# for performance to avoid the cost of parsing the manifest.
@@ -2530,8 +2871,8 @@
try:
submatch = matchmod.subdirmatcher(subpath, matcher)
- if not sub.cat(submatch, os.path.join(prefix, sub._path),
- **opts):
+ if not sub.cat(submatch, basefm, fntemplate,
+ os.path.join(prefix, sub._path), **opts):
err = 0
except error.RepoLookupError:
ui.status(_("skipping missing subrepository: %s\n")
@@ -2579,176 +2920,148 @@
ui.note(_('amending changeset %s\n') % old)
base = old.p1()
- createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
-
- wlock = lock = newid = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
- with repo.transaction('amend') as tr:
- # See if we got a message from -m or -l, if not, open the editor
- # with the message of the changeset to amend
- message = logmessage(ui, opts)
- # ensure logfile does not conflict with later enforcement of the
- # message. potential logfile content has been processed by
- # `logmessage` anyway.
- opts.pop('logfile')
- # First, do a regular commit to record all changes in the working
- # directory (if there are any)
- ui.callhooks = False
- activebookmark = repo._bookmarks.active
- try:
- repo._bookmarks.active = None
- opts['message'] = 'temporary amend commit for %s' % old
- node = commit(ui, repo, commitfunc, pats, opts)
- finally:
- repo._bookmarks.active = activebookmark
- repo._bookmarks.recordchange(tr)
- ui.callhooks = True
- ctx = repo[node]
-
- # Participating changesets:
+
+ newid = None
+ with repo.wlock(), repo.lock(), repo.transaction('amend'):
+ # See if we got a message from -m or -l, if not, open the editor
+ # with the message of the changeset to amend
+ message = logmessage(ui, opts)
+ # ensure logfile does not conflict with later enforcement of the
+ # message. potential logfile content has been processed by
+ # `logmessage` anyway.
+ opts.pop('logfile')
+ # First, do a regular commit to record all changes in the working
+ # directory (if there are any)
+ ui.callhooks = False
+ activebookmark = repo._bookmarks.active
+ try:
+ repo._bookmarks.active = None
+ opts['message'] = 'temporary amend commit for %s' % old
+ node = commit(ui, repo, commitfunc, pats, opts)
+ finally:
+ repo._bookmarks.active = activebookmark
+ ui.callhooks = True
+ ctx = repo[node]
+
+ # Participating changesets:
+ #
+ # node/ctx o - new (intermediate) commit that contains changes
+ # | from working dir to go into amending commit
+ # | (or a workingctx if there were no changes)
+ # |
+ # old o - changeset to amend
+ # |
+ # base o - parent of amending changeset
+
+ # Update extra dict from amended commit (e.g. to preserve graft
+ # source)
+ extra.update(old.extra())
+
+ # Also update it from the intermediate commit or from the wctx
+ extra.update(ctx.extra())
+
+ if len(old.parents()) > 1:
+ # ctx.files() isn't reliable for merges, so fall back to the
+ # slower repo.status() method
+ files = set([fn for st in repo.status(base, old)[:3]
+ for fn in st])
+ else:
+ files = set(old.files())
+
+ # Second, we use either the commit we just did, or if there were no
+ # changes the parent of the working directory as the version of the
+ # files in the final amend commit
+ if node:
+ ui.note(_('copying changeset %s to %s\n') % (ctx, base))
+
+ user = ctx.user()
+ date = ctx.date()
+ # Recompute copies (avoid recording a -> b -> a)
+ copied = copies.pathcopies(base, ctx)
+ if old.p2:
+ copied.update(copies.pathcopies(old.p2(), ctx))
+
+ # Prune files which were reverted by the updates: if old
+ # introduced file X and our intermediate commit, node,
+ # renamed that file, then those two files are the same and
+ # we can discard X from our list of files. Likewise if X
+ # was deleted, it's no longer relevant
+ files.update(ctx.files())
+ files = [f for f in files if not samefile(f, ctx, base)]
+
+ def filectxfn(repo, ctx_, path):
+ try:
+ fctx = ctx[path]
+ flags = fctx.flags()
+ mctx = context.memfilectx(repo,
+ fctx.path(), fctx.data(),
+ islink='l' in flags,
+ isexec='x' in flags,
+ copied=copied.get(path))
+ return mctx
+ except KeyError:
+ return None
+ else:
+ ui.note(_('copying changeset %s to %s\n') % (old, base))
+
+ # Use version of files as in the old cset
+ def filectxfn(repo, ctx_, path):
+ try:
+ return old.filectx(path)
+ except KeyError:
+ return None
+
+ user = opts.get('user') or old.user()
+ date = opts.get('date') or old.date()
+ editform = mergeeditform(old, 'commit.amend')
+ editor = getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
+ if not message:
+ editor = getcommiteditor(edit=True, editform=editform)
+ message = old.description()
+
+ pureextra = extra.copy()
+ extra['amend_source'] = old.hex()
+
+ new = context.memctx(repo,
+ parents=[base.node(), old.p2().node()],
+ text=message,
+ files=files,
+ filectxfn=filectxfn,
+ user=user,
+ date=date,
+ extra=extra,
+ editor=editor)
+
+ newdesc = changelog.stripdesc(new.description())
+ if ((not node)
+ and newdesc == old.description()
+ and user == old.user()
+ and date == old.date()
+ and pureextra == old.extra()):
+ # nothing changed. continuing here would create a new node
+ # anyway because of the amend_source noise.
#
- # node/ctx o - new (intermediate) commit that contains changes
- # | from working dir to go into amending commit
- # | (or a workingctx if there were no changes)
- # |
- # old o - changeset to amend
- # |
- # base o - parent of amending changeset
-
- # Update extra dict from amended commit (e.g. to preserve graft
- # source)
- extra.update(old.extra())
-
- # Also update it from the intermediate commit or from the wctx
- extra.update(ctx.extra())
-
- if len(old.parents()) > 1:
- # ctx.files() isn't reliable for merges, so fall back to the
- # slower repo.status() method
- files = set([fn for st in repo.status(base, old)[:3]
- for fn in st])
+ # This not what we expect from amend.
+ return old.node()
+
+ ph = repo.ui.config('phases', 'new-commit', phases.draft)
+ try:
+ if opts.get('secret'):
+ commitphase = 'secret'
else:
- files = set(old.files())
-
- # Second, we use either the commit we just did, or if there were no
- # changes the parent of the working directory as the version of the
- # files in the final amend commit
+ commitphase = old.phase()
+ repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
+ newid = repo.commitctx(new)
+ finally:
+ repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
+ if newid != old.node():
+ # Reroute the working copy parent to the new changeset
+ repo.setparents(newid, nullid)
+ mapping = {old.node(): (newid,)}
if node:
- ui.note(_('copying changeset %s to %s\n') % (ctx, base))
-
- user = ctx.user()
- date = ctx.date()
- # Recompute copies (avoid recording a -> b -> a)
- copied = copies.pathcopies(base, ctx)
- if old.p2:
- copied.update(copies.pathcopies(old.p2(), ctx))
-
- # Prune files which were reverted by the updates: if old
- # introduced file X and our intermediate commit, node,
- # renamed that file, then those two files are the same and
- # we can discard X from our list of files. Likewise if X
- # was deleted, it's no longer relevant
- files.update(ctx.files())
- files = [f for f in files if not samefile(f, ctx, base)]
-
- def filectxfn(repo, ctx_, path):
- try:
- fctx = ctx[path]
- flags = fctx.flags()
- mctx = context.memfilectx(repo,
- fctx.path(), fctx.data(),
- islink='l' in flags,
- isexec='x' in flags,
- copied=copied.get(path))
- return mctx
- except KeyError:
- return None
- else:
- ui.note(_('copying changeset %s to %s\n') % (old, base))
-
- # Use version of files as in the old cset
- def filectxfn(repo, ctx_, path):
- try:
- return old.filectx(path)
- except KeyError:
- return None
-
- user = opts.get('user') or old.user()
- date = opts.get('date') or old.date()
- editform = mergeeditform(old, 'commit.amend')
- editor = getcommiteditor(editform=editform, **opts)
- if not message:
- editor = getcommiteditor(edit=True, editform=editform)
- message = old.description()
-
- pureextra = extra.copy()
- extra['amend_source'] = old.hex()
-
- new = context.memctx(repo,
- parents=[base.node(), old.p2().node()],
- text=message,
- files=files,
- filectxfn=filectxfn,
- user=user,
- date=date,
- extra=extra,
- editor=editor)
-
- newdesc = changelog.stripdesc(new.description())
- if ((not node)
- and newdesc == old.description()
- and user == old.user()
- and date == old.date()
- and pureextra == old.extra()):
- # nothing changed. continuing here would create a new node
- # anyway because of the amend_source noise.
- #
- # This not what we expect from amend.
- return old.node()
-
- ph = repo.ui.config('phases', 'new-commit', phases.draft)
- try:
- if opts.get('secret'):
- commitphase = 'secret'
- else:
- commitphase = old.phase()
- repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
- newid = repo.commitctx(new)
- finally:
- repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
- if newid != old.node():
- # Reroute the working copy parent to the new changeset
- repo.setparents(newid, nullid)
-
- # Move bookmarks from old parent to amend commit
- bms = repo.nodebookmarks(old.node())
- if bms:
- marks = repo._bookmarks
- for bm in bms:
- ui.debug('moving bookmarks %r from %s to %s\n' %
- (marks, old.hex(), hex(newid)))
- marks[bm] = newid
- marks.recordchange(tr)
- #commit the whole amend process
- if createmarkers:
- # mark the new changeset as successor of the rewritten one
- new = repo[newid]
- obs = [(old, (new,))]
- if node:
- obs.append((ctx, ()))
-
- obsolete.createmarkers(repo, obs)
- if not createmarkers and newid != old.node():
- # Strip the intermediate commit (if there was one) and the amended
- # commit
- if node:
- ui.note(_('stripping intermediate changeset %s\n') % ctx)
- ui.note(_('stripping amended changeset %s\n') % old)
- repair.strip(ui, repo, old.node(), topic='amend-backup')
- finally:
- lockmod.release(lock, wlock)
+ mapping[node] = ()
+ scmutil.cleanupnodes(repo, mapping, 'amend')
return newid
def commiteditor(repo, ctx, subs, editform=''):
@@ -2766,11 +3079,10 @@
forms.insert(0, 'changeset')
templatetext = None
while forms:
- tmpl = repo.ui.config('committemplate', '.'.join(forms))
- if tmpl:
- tmpl = templater.unquotestring(tmpl)
+ ref = '.'.join(forms)
+ if repo.ui.config('committemplate', ref):
templatetext = committext = buildcommittemplate(
- repo, ctx, subs, extramsg, tmpl)
+ repo, ctx, subs, extramsg, ref)
break
forms.pop()
else:
@@ -2808,15 +3120,12 @@
return text
-def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
+def buildcommittemplate(repo, ctx, subs, extramsg, ref):
ui = repo.ui
- tmpl, mapfile = gettemplate(ui, tmpl, None)
-
- t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
-
- for k, v in repo.ui.configitems('committemplate'):
- if k != 'changeset':
- t.t.cache[k] = v
+ spec = formatter.templatespec(ref, None, None)
+ t = changeset_templater(ui, repo, spec, None, {}, False)
+ t.t.cache.update((k, templater.unquotestring(v))
+ for k, v in repo.ui.configitems('committemplate'))
if not extramsg:
extramsg = '' # ensure that extramsg is string
@@ -2938,7 +3247,8 @@
targetsubs = sorted(s for s in wctx.substate if m(s))
if not m.always():
- for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
+ matcher = matchmod.badmatch(m, lambda x, y: False)
+ for abs in wctx.walk(matcher):
names[abs] = m.rel(abs), m.exact(abs)
# walk target manifest to fill `names`
@@ -3267,8 +3577,7 @@
if node != parent:
operation = 'revert'
reversehunks = repo.ui.configbool('experimental',
- 'revertalternateinteractivemode',
- True)
+ 'revertalternateinteractivemode')
if reversehunks:
diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
else:
@@ -3332,50 +3641,10 @@
if f in copied:
repo.dirstate.copy(copied[f], f)
-def command(table):
- """Returns a function object to be used as a decorator for making commands.
-
- This function receives a command table as its argument. The table should
- be a dict.
-
- The returned function can be used as a decorator for adding commands
- to that command table. This function accepts multiple arguments to define
- a command.
-
- The first argument is the command name.
-
- The options argument is an iterable of tuples defining command arguments.
- See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
-
- The synopsis argument defines a short, one line summary of how to use the
- command. This shows up in the help output.
-
- The norepo argument defines whether the command does not require a
- local repository. Most commands operate against a repository, thus the
- default is False.
-
- The optionalrepo argument defines whether the command optionally requires
- a local repository.
-
- The inferrepo argument defines whether to try to find a repository from the
- command line arguments. If True, arguments will be examined for potential
- repository locations. See ``findrepo()``. If a repository is found, it
- will be used.
- """
- def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
- inferrepo=False):
- def decorator(func):
- func.norepo = norepo
- func.optionalrepo = optionalrepo
- func.inferrepo = inferrepo
- if synopsis:
- table[name] = func, list(options), synopsis
- else:
- table[name] = func, list(options)
- return func
- return decorator
-
- return cmd
+class command(registrar.command):
+ def _doregister(self, func, name, *args, **kwargs):
+ func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
+ return super(command, self)._doregister(func, name, *args, **kwargs)
# a list of (ui, repo, otherpeer, opts, missing) functions called by
# commands.outgoing. "missing" is "missing" of the result of
@@ -3447,10 +3716,7 @@
for f, msg in afterresolvedstates:
if repo.vfs.exists(f):
return contmsg % msg, True
- workingctx = repo[None]
- dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
- for s in workingctx.substate)
- if dirty:
+ if repo[None].dirty(missing=True, merge=False, branch=False):
return contmsg % _("hg commit"), False
return None, None
--- a/mercurial/color.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/color.py Wed Jul 19 07:51:41 2017 -0500
@@ -45,8 +45,6 @@
curses = None
_baseterminfoparams = {}
-_enabledbydefault = True
-
# start and stop parameters for effects
_effects = {
'none': 0,
@@ -185,10 +183,7 @@
def _modesetup(ui):
if ui.plain():
return None
- default = 'never'
- if _enabledbydefault:
- default = 'auto'
- config = ui.config('ui', 'color', default)
+ config = ui.config('ui', 'color')
if config == 'debug':
return 'debug'
@@ -208,31 +203,41 @@
formatted = (always or (encoding.environ.get('TERM') != 'dumb'
and ui.formatted()))
- mode = ui.config('color', 'mode', 'auto')
+ mode = ui.config('color', 'mode')
# If pager is active, color.pagermode overrides color.mode.
if getattr(ui, 'pageractive', False):
mode = ui.config('color', 'pagermode', mode)
realmode = mode
- if mode == 'auto':
- if pycompat.osname == 'nt':
- term = encoding.environ.get('TERM')
- # TERM won't be defined in a vanilla cmd.exe environment.
+ if pycompat.osname == 'nt':
+ from . import win32
+
+ term = encoding.environ.get('TERM')
+ # TERM won't be defined in a vanilla cmd.exe environment.
- # UNIX-like environments on Windows such as Cygwin and MSYS will
- # set TERM. They appear to make a best effort attempt at setting it
- # to something appropriate. However, not all environments with TERM
- # defined support ANSI. Since "ansi" could result in terminal
- # gibberish, we error on the side of selecting "win32". However, if
- # w32effects is not defined, we almost certainly don't support
- # "win32", so don't even try.
- if (term and 'xterm' in term) or not w32effects:
+ # UNIX-like environments on Windows such as Cygwin and MSYS will
+ # set TERM. They appear to make a best effort attempt at setting it
+ # to something appropriate. However, not all environments with TERM
+ # defined support ANSI.
+ ansienviron = term and 'xterm' in term
+
+ if mode == 'auto':
+ # Since "ansi" could result in terminal gibberish, we error on the
+ # side of selecting "win32". However, if w32effects is not defined,
+ # we almost certainly don't support "win32", so don't even try.
+ # w32ffects is not populated when stdout is redirected, so checking
+ # it first avoids win32 calls in a state known to error out.
+ if ansienviron or not w32effects or win32.enablevtmode():
realmode = 'ansi'
else:
realmode = 'win32'
- else:
- realmode = 'ansi'
+ # An empty w32effects is a clue that stdout is redirected, and thus
+ # cannot enable VT mode.
+ elif mode == 'ansi' and w32effects and not ansienviron:
+ win32.enablevtmode()
+ elif mode == 'auto':
+ realmode = 'ansi'
def modewarn():
# only warn if color.mode was explicitly set and we're in
@@ -442,10 +447,10 @@
'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
}
- passthrough = set([_FOREGROUND_INTENSITY,
- _BACKGROUND_INTENSITY,
- _COMMON_LVB_UNDERSCORE,
- _COMMON_LVB_REVERSE_VIDEO])
+ passthrough = {_FOREGROUND_INTENSITY,
+ _BACKGROUND_INTENSITY,
+ _COMMON_LVB_UNDERSCORE,
+ _COMMON_LVB_REVERSE_VIDEO}
stdout = _kernel32.GetStdHandle(
_STD_OUTPUT_HANDLE) # don't close the handle returned
--- a/mercurial/commands.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/commands.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,6 +11,7 @@
import errno
import os
import re
+import sys
from .i18n import _
from .node import (
@@ -26,6 +27,7 @@
changegroup,
cmdutil,
copies,
+ debugcommands as debugcommandsmod,
destutil,
dirstateguard,
discovery,
@@ -33,6 +35,7 @@
error,
exchange,
extensions,
+ formatter,
graphmod,
hbisect,
help,
@@ -44,6 +47,7 @@
phases,
pycompat,
rcutil,
+ registrar,
revsetlang,
scmutil,
server,
@@ -58,14 +62,9 @@
release = lockmod.release
table = {}
-
-command = cmdutil.command(table)
-
-# label constants
-# until 3.5, bookmarks.current was the advertised name, not
-# bookmarks.active, so we must use both to avoid breaking old
-# custom styles
-activebookmarklabel = 'bookmarks.active bookmarks.current'
+table.update(debugcommandsmod.command._table)
+
+command = registrar.command(table)
# common command options
@@ -103,108 +102,21 @@
_("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
]
-dryrunopts = [('n', 'dry-run', None,
- _('do not perform actions, just print output'))]
-
-remoteopts = [
- ('e', 'ssh', '',
- _('specify ssh command to use'), _('CMD')),
- ('', 'remotecmd', '',
- _('specify hg command to run on the remote side'), _('CMD')),
- ('', 'insecure', None,
- _('do not verify server certificate (ignoring web.cacerts config)')),
-]
-
-walkopts = [
- ('I', 'include', [],
- _('include names matching the given patterns'), _('PATTERN')),
- ('X', 'exclude', [],
- _('exclude names matching the given patterns'), _('PATTERN')),
-]
-
-commitopts = [
- ('m', 'message', '',
- _('use text as commit message'), _('TEXT')),
- ('l', 'logfile', '',
- _('read commit message from file'), _('FILE')),
-]
-
-commitopts2 = [
- ('d', 'date', '',
- _('record the specified date as commit date'), _('DATE')),
- ('u', 'user', '',
- _('record the specified user as committer'), _('USER')),
-]
-
-# hidden for now
-formatteropts = [
- ('T', 'template', '',
- _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
-]
-
-templateopts = [
- ('', 'style', '',
- _('display using template map file (DEPRECATED)'), _('STYLE')),
- ('T', 'template', '',
- _('display with template'), _('TEMPLATE')),
-]
-
-logopts = [
- ('p', 'patch', None, _('show patch')),
- ('g', 'git', None, _('use git extended diff format')),
- ('l', 'limit', '',
- _('limit number of changes displayed'), _('NUM')),
- ('M', 'no-merges', None, _('do not show merges')),
- ('', 'stat', None, _('output diffstat-style summary of changes')),
- ('G', 'graph', None, _("show the revision DAG")),
-] + templateopts
-
-diffopts = [
- ('a', 'text', None, _('treat all files as text')),
- ('g', 'git', None, _('use git extended diff format')),
- ('', 'binary', None, _('generate binary diffs in git mode (default)')),
- ('', 'nodates', None, _('omit dates from diff headers'))
-]
-
-diffwsopts = [
- ('w', 'ignore-all-space', None,
- _('ignore white space when comparing lines')),
- ('b', 'ignore-space-change', None,
- _('ignore changes in the amount of white space')),
- ('B', 'ignore-blank-lines', None,
- _('ignore changes whose lines are all blank')),
- ]
-
-diffopts2 = [
- ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
- ('p', 'show-function', None, _('show which function each change is in')),
- ('', 'reverse', None, _('produce a diff that undoes the changes')),
- ] + diffwsopts + [
- ('U', 'unified', '',
- _('number of lines of context to show'), _('NUM')),
- ('', 'stat', None, _('output diffstat-style summary of changes')),
- ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
-]
-
-mergetoolopts = [
- ('t', 'tool', '', _('specify merge tool')),
-]
-
-similarityopts = [
- ('s', 'similarity', '',
- _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
-]
-
-subrepoopts = [
- ('S', 'subrepos', None,
- _('recurse into subrepositories'))
-]
-
-debugrevlogopts = [
- ('c', 'changelog', False, _('open changelog')),
- ('m', 'manifest', False, _('open manifest')),
- ('', 'dir', '', _('open directory manifest')),
-]
+dryrunopts = cmdutil.dryrunopts
+remoteopts = cmdutil.remoteopts
+walkopts = cmdutil.walkopts
+commitopts = cmdutil.commitopts
+commitopts2 = cmdutil.commitopts2
+formatteropts = cmdutil.formatteropts
+templateopts = cmdutil.templateopts
+logopts = cmdutil.logopts
+diffopts = cmdutil.diffopts
+diffwsopts = cmdutil.diffwsopts
+diffopts2 = cmdutil.diffopts2
+mergetoolopts = cmdutil.mergetoolopts
+similarityopts = cmdutil.similarityopts
+subrepoopts = cmdutil.subrepoopts
+debugrevlogopts = cmdutil.debugrevlogopts
# Commands start here, listed alphabetically
@@ -255,7 +167,7 @@
Returns 0 if all files are successfully added.
"""
- m = scmutil.match(repo[None], pats, opts)
+ m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
rejected = cmdutil.add(ui, repo, m, "", False, **opts)
return rejected and 1 or 0
@@ -325,6 +237,7 @@
Returns 0 if all files are successfully added.
"""
+ opts = pycompat.byteskwargs(opts)
try:
sim = float(opts.get('similarity') or 100)
except ValueError:
@@ -345,7 +258,8 @@
('d', 'date', None, _('list the date (short with -q)')),
('n', 'number', None, _('list the revision number (default)')),
('c', 'changeset', None, _('list the changeset')),
- ('l', 'line-number', None, _('show line number at the first appearance'))
+ ('l', 'line-number', None, _('show line number at the first appearance')),
+ ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
] + diffwsopts + walkopts + formatteropts,
_('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
inferrepo=True)
@@ -368,6 +282,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if not pats:
raise error.Abort(_('at least one filename or pattern is required'))
@@ -378,7 +293,7 @@
ctx = scmutil.revsingle(repo, opts.get('rev'))
- fm = ui.formatter('annotate', opts)
+ rootfm = ui.formatter('annotate', opts)
if ui.quiet:
datefunc = util.shortdate
else:
@@ -388,7 +303,7 @@
if node is None:
return None
else:
- return fm.hexfunc(node)
+ return rootfm.hexfunc(node)
if opts.get('changeset'):
# omit "+" suffix which is appended to node hex
def formatrev(rev):
@@ -404,12 +319,12 @@
return '%d ' % rev
def formathex(hex):
if hex is None:
- return '%s+' % fm.hexfunc(ctx.p1().node())
+ return '%s+' % rootfm.hexfunc(ctx.p1().node())
else:
return '%s ' % hex
else:
- hexfn = fm.hexfunc
- formatrev = formathex = str
+ hexfn = rootfm.hexfunc
+ formatrev = formathex = pycompat.bytestr
opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
('number', ' ', lambda x: x[0].rev(), formatrev),
@@ -430,7 +345,7 @@
ui.pager('annotate')
- if fm.isplain():
+ if rootfm.isplain():
def makefunc(get, fmt):
return lambda x: fmt(get(x))
else:
@@ -450,15 +365,24 @@
follow = not opts.get('no_follow')
diffopts = patch.difffeatureopts(ui, opts, section='annotate',
whitespace=True)
+ skiprevs = opts.get('skip')
+ if skiprevs:
+ skiprevs = scmutil.revrange(repo, skiprevs)
+
for abs in ctx.walk(m):
fctx = ctx[abs]
- if not opts.get('text') and util.binary(fctx.data()):
- fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
+ rootfm.startitem()
+ rootfm.data(abspath=abs, path=m.rel(abs))
+ if not opts.get('text') and fctx.isbinary():
+ rootfm.plain(_("%s: binary file\n")
+ % ((pats and m.rel(abs)) or abs))
continue
+ fm = rootfm.nested('lines')
lines = fctx.annotate(follow=follow, linenumber=linenumber,
- diffopts=diffopts)
+ skiprevs=skiprevs, diffopts=diffopts)
if not lines:
+ fm.end()
continue
formats = []
pieces = []
@@ -480,8 +404,9 @@
if not lines[-1][1].endswith('\n'):
fm.plain('\n')
-
- fm.end()
+ fm.end()
+
+ rootfm.end()
@command('archive',
[('', 'no-decode', None, _('do not pass files through decoders')),
@@ -532,6 +457,7 @@
Returns 0 on success.
'''
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'))
if not ctx:
raise error.Abort(_('no working directory: please specify a revision'))
@@ -627,6 +553,7 @@
release(lock, wlock)
def _dobackout(ui, repo, node=None, rev=None, **opts):
+ opts = pycompat.byteskwargs(opts)
if opts.get('commit') and opts.get('no_commit'):
raise error.Abort(_("cannot use --commit with --no-commit"))
if opts.get('merge') and opts.get('no_commit'):
@@ -702,7 +629,8 @@
def commitfunc(ui, repo, message, match, opts):
editform = 'backout'
- e = cmdutil.getcommiteditor(editform=editform, **opts)
+ e = cmdutil.getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
if not message:
# we don't translate commit messages
message = "Backed out changeset %s" % short(node)
@@ -834,10 +762,23 @@
bad = True
else:
reset = True
- elif extra or good + bad + skip + reset + extend + bool(command) > 1:
+ elif extra:
raise error.Abort(_('incompatible arguments'))
- cmdutil.checkunfinished(repo)
+ incompatibles = {
+ '--bad': bad,
+ '--command': bool(command),
+ '--extend': extend,
+ '--good': good,
+ '--reset': reset,
+ '--skip': skip,
+ }
+
+ enabled = [x for x in incompatibles if incompatibles[x]]
+
+ if len(enabled) > 1:
+ raise error.Abort(_('%s and %s are incompatible') %
+ tuple(sorted(enabled)[0:2]))
if reset:
hbisect.resetstate(repo)
@@ -865,6 +806,7 @@
"""common used update sequence"""
if noupdate:
return
+ cmdutil.checkunfinished(repo)
cmdutil.bailifchanged(repo)
return hg.clean(repo, node, show_stats=show_stats)
@@ -1002,51 +944,11 @@
hg book -f @
'''
- force = opts.get('force')
- rev = opts.get('rev')
- delete = opts.get('delete')
- rename = opts.get('rename')
- inactive = opts.get('inactive')
-
- def checkformat(mark):
- mark = mark.strip()
- if not mark:
- raise error.Abort(_("bookmark names cannot consist entirely of "
- "whitespace"))
- scmutil.checknewlabel(repo, mark, 'bookmark')
- return mark
-
- def checkconflict(repo, mark, cur, force=False, target=None):
- if mark in marks and not force:
- if target:
- if marks[mark] == target and target == cur:
- # re-activating a bookmark
- return
- anc = repo.changelog.ancestors([repo[target].rev()])
- bmctx = repo[marks[mark]]
- divs = [repo[b].node() for b in marks
- if b.split('@', 1)[0] == mark.split('@', 1)[0]]
-
- # allow resolving a single divergent bookmark even if moving
- # the bookmark across branches when a revision is specified
- # that contains a divergent bookmark
- if bmctx.rev() not in anc and target in divs:
- bookmarks.deletedivergent(repo, [target], mark)
- return
-
- deletefrom = [b for b in divs
- if repo[b].rev() in anc or b == target]
- bookmarks.deletedivergent(repo, deletefrom, mark)
- if bookmarks.validdest(repo, bmctx, repo[target]):
- ui.status(_("moving bookmark '%s' forward from %s\n") %
- (mark, short(bmctx.node())))
- return
- raise error.Abort(_("bookmark '%s' already exists "
- "(use -f to force)") % mark)
- if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
- and not force):
- raise error.Abort(
- _("a bookmark cannot have the name of an existing branch"))
+ force = opts.get(r'force')
+ rev = opts.get(r'rev')
+ delete = opts.get(r'delete')
+ rename = opts.get(r'rename')
+ inactive = opts.get(r'inactive')
if delete and rename:
raise error.Abort(_("--delete and --rename are incompatible"))
@@ -1058,91 +960,26 @@
raise error.Abort(_("bookmark name required"))
if delete or rename or names or inactive:
- wlock = lock = tr = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
- cur = repo.changectx('.').node()
- marks = repo._bookmarks
+ with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
if delete:
- tr = repo.transaction('bookmark')
- for mark in names:
- if mark not in marks:
- raise error.Abort(_("bookmark '%s' does not exist") %
- mark)
- if mark == repo._activebookmark:
- bookmarks.deactivate(repo)
- del marks[mark]
-
+ bookmarks.delete(repo, tr, names)
elif rename:
- tr = repo.transaction('bookmark')
if not names:
raise error.Abort(_("new bookmark name required"))
elif len(names) > 1:
raise error.Abort(_("only one new bookmark name allowed"))
- mark = checkformat(names[0])
- if rename not in marks:
- raise error.Abort(_("bookmark '%s' does not exist")
- % rename)
- checkconflict(repo, mark, cur, force)
- marks[mark] = marks[rename]
- if repo._activebookmark == rename and not inactive:
- bookmarks.activate(repo, mark)
- del marks[rename]
+ bookmarks.rename(repo, tr, rename, names[0], force, inactive)
elif names:
- tr = repo.transaction('bookmark')
- newact = None
- for mark in names:
- mark = checkformat(mark)
- if newact is None:
- newact = mark
- if inactive and mark == repo._activebookmark:
- bookmarks.deactivate(repo)
- return
- tgt = cur
- if rev:
- tgt = scmutil.revsingle(repo, rev).node()
- checkconflict(repo, mark, cur, force, tgt)
- marks[mark] = tgt
- if not inactive and cur == marks[newact] and not rev:
- bookmarks.activate(repo, newact)
- elif cur != tgt and newact == repo._activebookmark:
- bookmarks.deactivate(repo)
+ bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
elif inactive:
- if len(marks) == 0:
+ if len(repo._bookmarks) == 0:
ui.status(_("no bookmarks set\n"))
elif not repo._activebookmark:
ui.status(_("no active bookmark\n"))
else:
bookmarks.deactivate(repo)
- if tr is not None:
- marks.recordchange(tr)
- tr.close()
- finally:
- lockmod.release(tr, lock, wlock)
else: # show bookmarks
- fm = ui.formatter('bookmarks', opts)
- hexfn = fm.hexfunc
- marks = repo._bookmarks
- if len(marks) == 0 and fm.isplain():
- ui.status(_("no bookmarks set\n"))
- for bmark, n in sorted(marks.iteritems()):
- active = repo._activebookmark
- if bmark == active:
- prefix, label = '*', activebookmarklabel
- else:
- prefix, label = ' ', ''
-
- fm.startitem()
- if not ui.quiet:
- fm.plain(' %s ' % prefix, label=label)
- fm.write('bookmark', '%s', bmark, label=label)
- pad = " " * (25 - encoding.colwidth(bmark))
- fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
- repo.changelog.rev(n), hexfn(n), label=label)
- fm.data(active=(bmark == active))
- fm.plain('\n')
- fm.end()
+ bookmarks.printbookmarks(ui, repo, **opts)
@command('branch',
[('f', 'force', None,
@@ -1178,6 +1015,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if label:
label = label.strip()
@@ -1226,6 +1064,7 @@
Returns 0.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('branches')
fm = ui.formatter('branches', opts)
hexfunc = fm.hexfunc
@@ -1309,6 +1148,7 @@
Returns 0 on success, 1 if no changes found.
"""
+ opts = pycompat.byteskwargs(opts)
revs = None
if 'rev' in opts:
revstrings = opts['rev']
@@ -1339,8 +1179,6 @@
base = ['null']
else:
base = scmutil.revrange(repo, opts.get('base'))
- # TODO: get desired bundlecaps from command line.
- bundlecaps = None
if cgversion not in changegroup.supportedoutgoingversions(repo):
raise error.Abort(_("repository does not support bundle version %s") %
cgversion)
@@ -1352,10 +1190,6 @@
common = [repo.lookup(rev) for rev in base]
heads = revs and map(repo.lookup, revs) or None
outgoing = discovery.outgoing(repo, common, heads)
- cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
- bundlecaps=bundlecaps,
- version=cgversion)
- outgoing = None
else:
dest = ui.expandpath(dest or 'default-push', dest or 'default')
dest, branches = hg.parseurl(dest, opts.get('branch'))
@@ -1366,10 +1200,9 @@
onlyheads=heads,
force=opts.get('force'),
portable=True)
- cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
- bundlecaps, version=cgversion)
- if not cg:
- scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
+
+ if not outgoing.missing:
+ scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
return 1
if cgversion == '01': #bundle1
@@ -1392,15 +1225,22 @@
if complevel is not None:
compopts['level'] = complevel
- bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression,
- compopts=compopts)
+
+ contentopts = {'cg.version': cgversion}
+ if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
+ contentopts['obsolescence'] = True
+ if repo.ui.configbool('experimental', 'bundle-phases'):
+ contentopts['phases'] = True
+ bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
+ contentopts, compression=bcompression,
+ compopts=compopts)
@command('cat',
[('o', 'output', '',
_('print output to file with formatted name'), _('FORMAT')),
('r', 'rev', '', _('print the given revision'), _('REV')),
('', 'decode', None, _('apply any matching decode filter')),
- ] + walkopts,
+ ] + walkopts + formatteropts,
_('[OPTION]... FILE...'),
inferrepo=True)
def cat(ui, repo, file1, *pats, **opts):
@@ -1426,9 +1266,17 @@
"""
ctx = scmutil.revsingle(repo, opts.get('rev'))
m = scmutil.match(ctx, (file1,) + pats, opts)
-
- ui.pager('cat')
- return cmdutil.cat(ui, repo, ctx, m, '', **opts)
+ fntemplate = opts.pop('output', '')
+ if cmdutil.isstdiofilename(fntemplate):
+ fntemplate = ''
+
+ if fntemplate:
+ fm = formatter.nullformatter(ui, 'cat')
+ else:
+ ui.pager('cat')
+ fm = ui.formatter('cat', opts)
+ with fm:
+ return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
@command('^clone',
[('U', 'noupdate', None, _('the clone will include an empty working '
@@ -1549,6 +1397,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('noupdate') and opts.get('updaterev'):
raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
@@ -1639,16 +1488,16 @@
release(lock, wlock)
def _docommit(ui, repo, *pats, **opts):
- opts = pycompat.byteskwargs(opts)
- if opts.get('interactive'):
- opts.pop('interactive')
+ if opts.get(r'interactive'):
+ opts.pop(r'interactive')
ret = cmdutil.dorecord(ui, repo, commit, None, False,
cmdutil.recordfilter, *pats,
- **pycompat.strkwargs(opts))
+ **opts)
# ret can be 0 (no changes to record) or the value returned by
# commit(), 1 if nothing changed or None on success.
return 1 if ret == 0 else ret
+ opts = pycompat.byteskwargs(opts)
if opts.get('subrepos'):
if opts.get('amend'):
raise error.Abort(_('cannot amend with --subrepos'))
@@ -1769,6 +1618,7 @@
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('edit') or opts.get('local') or opts.get('global'):
if opts.get('local') and opts.get('global'):
raise error.Abort(_("can't use --local and --global together"))
@@ -1871,9 +1721,46 @@
Returns 0 on success, 1 if errors are encountered.
"""
+ opts = pycompat.byteskwargs(opts)
with repo.wlock(False):
return cmdutil.copy(ui, repo, pats, opts)
+@command('debugcommands', [], _('[COMMAND]'), norepo=True)
+def debugcommands(ui, cmd='', *args):
+ """list all available commands and options"""
+ for cmd, vals in sorted(table.iteritems()):
+ cmd = cmd.split('|')[0].strip('^')
+ opts = ', '.join([i[1] for i in vals[1]])
+ ui.write('%s: %s\n' % (cmd, opts))
+
+@command('debugcomplete',
+ [('o', 'options', None, _('show the command options'))],
+ _('[-o] CMD'),
+ norepo=True)
+def debugcomplete(ui, cmd='', **opts):
+ """returns the completion list associated with the given command"""
+
+ if opts.get('options'):
+ options = []
+ otables = [globalopts]
+ if cmd:
+ aliases, entry = cmdutil.findcmd(cmd, table, False)
+ otables.append(entry[1])
+ for t in otables:
+ for o in t:
+ if "(DEPRECATED)" in o[3]:
+ continue
+ if o[0]:
+ options.append('-%s' % o[0])
+ options.append('--%s' % o[1])
+ ui.write("%s\n" % "\n".join(options))
+ return
+
+ cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
+ if ui.verbose:
+ cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
+ ui.write("%s\n" % "\n".join(sorted(cmdlist)))
+
@command('^diff',
[('r', 'rev', [], _('revision'), _('REV')),
('c', 'change', '', _('change made by revision'), _('REV'))
@@ -1938,6 +1825,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
revs = opts.get('rev')
change = opts.get('change')
stat = opts.get('stat')
@@ -2041,7 +1929,7 @@
else:
ui.note(_('exporting patch:\n'))
ui.pager('export')
- cmdutil.export(repo, revs, template=opts.get('output'),
+ cmdutil.export(repo, revs, fntemplate=opts.get('output'),
switch_parent=opts.get('switch_parent'),
opts=patch.diffallopts(ui, opts))
@@ -2094,7 +1982,9 @@
Returns 0 if a match is found, 1 otherwise.
"""
- ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
+
+ opts = pycompat.byteskwargs(opts)
+ ctx = scmutil.revsingle(repo, opts.get('rev'), None)
end = '\n'
if opts.get('print0'):
@@ -2136,6 +2026,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if not pats:
raise error.Abort(_('no files specified'))
@@ -2220,6 +2111,7 @@
return _dograft(ui, repo, *revs, **opts)
def _dograft(ui, repo, *revs, **opts):
+ opts = pycompat.byteskwargs(opts)
if revs and opts.get('rev'):
ui.warn(_('warning: inconsistent use of --rev might give unexpected '
'revision ordering!\n'))
@@ -2232,7 +2124,8 @@
if not opts.get('date') and opts.get('currentdate'):
opts['date'] = "%d %d" % util.makedate()
- editor = cmdutil.getcommiteditor(editform='graft', **opts)
+ editor = cmdutil.getcommiteditor(editform='graft',
+ **pycompat.strkwargs(opts))
cont = False
if opts.get('continue'):
@@ -2439,6 +2332,7 @@
Returns 0 if a match is found, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
reflags = re.M
if opts.get('ignore_case'):
reflags |= re.I
@@ -2685,6 +2579,7 @@
Returns 0 if matching heads are found, 1 if not.
"""
+ opts = pycompat.byteskwargs(opts)
start = None
if 'rev' in opts:
start = scmutil.revsingle(repo, opts['rev'], None).node()
@@ -2743,7 +2638,7 @@
Returns 0 if successful.
"""
- keep = opts.get('system') or []
+ keep = opts.get(r'system') or []
if len(keep) == 0:
if pycompat.sysplatform.startswith('win'):
keep.append('windows')
@@ -2757,7 +2652,8 @@
if ui.verbose:
keep.append('verbose')
- formatted = help.formattedhelp(ui, name, keep=keep, **opts)
+ commands = sys.modules[__name__]
+ formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
ui.pager('help')
ui.write(formatted)
@@ -2770,7 +2666,7 @@
('b', 'branch', None, _('show branch')),
('t', 'tags', None, _('show tags')),
('B', 'bookmarks', None, _('show bookmarks')),
- ] + remoteopts,
+ ] + remoteopts + formatteropts,
_('[-nibtB] [-r REV] [SOURCE]'),
optionalrepo=True)
def identify(ui, repo, source=None, rev=None,
@@ -2810,6 +2706,7 @@
Returns 0 if successful.
"""
+ opts = pycompat.byteskwargs(opts)
if not repo and not source:
raise error.Abort(_("there is no Mercurial repository here "
"(.hg not found)"))
@@ -2828,6 +2725,9 @@
repo = peer.local()
revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
+ fm = ui.formatter('identify', opts)
+ fm.startitem()
+
if not repo:
if num or branch or tags:
raise error.Abort(
@@ -2838,8 +2738,10 @@
rev = "tip"
remoterev = peer.lookup(rev)
+ hexrev = hexfunc(remoterev)
if default or id:
- output = [hexfunc(remoterev)]
+ output = [hexrev]
+ fm.data(id=hexrev)
def getbms():
bms = []
@@ -2851,13 +2753,17 @@
return sorted(bms)
+ bms = getbms()
if bookmarks:
- output.extend(getbms())
+ output.extend(bms)
elif default and not ui.quiet:
# multiple bookmarks for a single parent separated by '/'
- bm = '/'.join(getbms())
+ bm = '/'.join(bms)
if bm:
output.append(bm)
+
+ fm.data(node=hex(remoterev))
+ fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
else:
ctx = scmutil.revsingle(repo, rev, None)
@@ -2868,22 +2774,35 @@
for p in parents:
taglist.extend(p.tags())
- changed = ""
- if default or id or num:
- if (any(repo.status())
- or any(ctx.sub(s).dirty() for s in ctx.substate)):
- changed = '+'
+ dirty = ""
+ if ctx.dirty(missing=True, merge=False, branch=False):
+ dirty = '+'
+ fm.data(dirty=dirty)
+
+ hexoutput = [hexfunc(p.node()) for p in parents]
if default or id:
- output = ["%s%s" %
- ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
+ output = ["%s%s" % ('+'.join(hexoutput), dirty)]
+ fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
+
if num:
- output.append("%s%s" %
- ('+'.join([str(p.rev()) for p in parents]), changed))
+ numoutput = ["%d" % p.rev() for p in parents]
+ output.append("%s%s" % ('+'.join(numoutput), dirty))
+
+ fn = fm.nested('parents')
+ for p in parents:
+ fn.startitem()
+ fn.data(rev=p.rev())
+ fn.data(node=p.hex())
+ fn.context(ctx=p)
+ fn.end()
else:
+ hexoutput = hexfunc(ctx.node())
if default or id:
- output = [hexfunc(ctx.node())]
+ output = [hexoutput]
+ fm.data(id=hexoutput)
+
if num:
- output.append(str(ctx.rev()))
+ output.append(pycompat.bytestr(ctx.rev()))
taglist = ctx.tags()
if default and not ui.quiet:
@@ -2910,7 +2829,14 @@
if bookmarks:
output.extend(ctx.bookmarks())
- ui.write("%s\n" % ' '.join(output))
+ fm.data(node=ctx.hex())
+ fm.data(branch=ctx.branch())
+ fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
+ fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
+ fm.context(ctx=ctx)
+
+ fm.plain("%s\n" % ' '.join(output))
+ fm.end()
@command('import|patch',
[('p', 'strip', 1,
@@ -3033,6 +2959,7 @@
Returns 0 on success, 1 on partial success (see --partial).
"""
+ opts = pycompat.byteskwargs(opts)
if not patch1:
raise error.Abort(_('need at least one patch to import'))
@@ -3237,6 +3164,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
hg.peer(ui, opts, ui.expandpath(dest), create=True)
@command('locate',
@@ -3267,6 +3195,7 @@
Returns 0 if a match is found, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('print0'):
end = '\0'
else:
@@ -3473,6 +3402,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
fm = ui.formatter('manifest', opts)
if opts.get('all'):
@@ -3551,6 +3481,7 @@
Returns 0 on success, 1 if there are unresolved files.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('rev') and node:
raise error.Abort(_("please specify just one revision"))
if not node:
@@ -3630,6 +3561,7 @@
Returns 0 if there are outgoing changes, 1 otherwise.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('graph'):
cmdutil.checkunsupportedgraphflags([], opts)
o, other = hg._outgoing(ui, repo, dest, opts)
@@ -3687,6 +3619,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'), None)
if file_:
@@ -3749,6 +3682,8 @@
Returns 0 on success.
"""
+
+ opts = pycompat.byteskwargs(opts)
ui.pager('paths')
if search:
pathitems = [(name, path) for name, path in ui.paths.iteritems()
@@ -3811,6 +3746,7 @@
(For more information about the phases concept, see :hg:`help phases`.)
"""
+ opts = pycompat.byteskwargs(opts)
# search for a unique phase argument
targetphase = None
for idx, name in enumerate(phases.phasenames):
@@ -3943,6 +3879,7 @@
Returns 0 on success, 1 if an update had unresolved files.
"""
+ opts = pycompat.byteskwargs(opts)
if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
msg = _('update destination required by configuration')
hint = _('use hg pull followed by hg update DEST')
@@ -4073,6 +4010,7 @@
Returns 0 if push was successful, 1 if nothing to push.
"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('bookmark'):
ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
for b in opts['bookmark']:
@@ -4198,6 +4136,7 @@
Returns 0 on success, 1 if any warnings encountered.
"""
+ opts = pycompat.byteskwargs(opts)
after, force = opts.get('after'), opts.get('force')
if not pats and not after:
raise error.Abort(_('no files specified'))
@@ -4227,6 +4166,7 @@
Returns 0 on success, 1 if errors are encountered.
"""
+ opts = pycompat.byteskwargs(opts)
with repo.wlock(False):
return cmdutil.copy(ui, repo, pats, opts, rename=True)
@@ -4281,6 +4221,7 @@
Returns 0 on success, 1 if any files fail a resolve attempt.
"""
+ opts = pycompat.byteskwargs(opts)
flaglist = 'all mark unmark list no_status'.split()
all, mark, unmark, show, nostatus = \
[opts.get(o) for o in flaglist]
@@ -4583,11 +4524,11 @@
Returns 0 on success, 1 if no rollback data is available.
"""
- if not ui.configbool('ui', 'rollback', True):
+ if not ui.configbool('ui', 'rollback'):
raise error.Abort(_('rollback is disabled because it is unsafe'),
hint=('see `hg help -v rollback` for information'))
- return repo.rollback(dryrun=opts.get('dry_run'),
- force=opts.get('force'))
+ return repo.rollback(dryrun=opts.get(r'dry_run'),
+ force=opts.get(r'force'))
@command('root', [])
def root(ui, repo):
@@ -4652,6 +4593,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
if opts["stdio"] and opts["cmdserver"]:
raise error.Abort(_("cannot use --stdio with --cmdserver"))
@@ -4675,6 +4617,7 @@
('u', 'unknown', None, _('show only unknown (not tracked) files')),
('i', 'ignored', None, _('show only ignored files')),
('n', 'no-status', None, _('hide status prefix')),
+ ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
('C', 'copies', None, _('show source of copied files')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
('', 'rev', [], _('show difference from revision'), _('REV')),
@@ -4720,6 +4663,16 @@
.. container:: verbose
+ The -t/--terse option abbreviates the output by showing directory name
+ if all the files in it share the same status. The option expects a value
+ which can be a string formed by using 'm', 'a', 'r', 'd', 'u', 'i', 'c'
+ where, 'm' stands for 'modified', 'a' for 'added', 'r' for 'removed',
+ 'd' for 'deleted', 'u' for 'unknown', 'i' for 'ignored' and 'c' for clean.
+
+ It terses the output of only those status which are passed. The ignored
+ files are not considered while tersing until 'i' is there in --terse value
+ or the --ignored option is used.
+
Examples:
- show changes in the working directory relative to a
@@ -4746,10 +4699,14 @@
opts = pycompat.byteskwargs(opts)
revs = opts.get('rev')
change = opts.get('change')
+ terse = opts.get('terse')
if revs and change:
msg = _('cannot specify --rev and --change at the same time')
raise error.Abort(msg)
+ elif revs and terse:
+ msg = _('cannot use --terse with --rev')
+ raise error.Abort(msg)
elif change:
node2 = scmutil.revsingle(repo, change, None).node()
node1 = repo[node2].p1().node()
@@ -4770,6 +4727,7 @@
show = [k for k in states if opts.get(k)]
if opts.get('all'):
show += ui.quiet and (states[:4] + ['clean']) or states
+
if not show:
if ui.quiet:
show = states[:4]
@@ -4780,6 +4738,9 @@
stat = repo.status(node1, node2, m,
'ignored' in show, 'clean' in show, 'unknown' in show,
opts.get('subrepos'))
+ if terse:
+ stat = cmdutil.tersestatus(repo.root, stat, terse,
+ repo.dirstate._ignore, opts.get('ignored'))
changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
if (opts.get('all') or opts.get('copies')
@@ -4817,6 +4778,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('summary')
ctx = repo[None]
parents = ctx.parents()
@@ -4830,9 +4792,9 @@
s = ' '.join(e.recordtypes)
ui.warn(
_('warning: merge state has unsupported record types: %s\n') % s)
- unresolved = 0
+ unresolved = []
else:
- unresolved = [f for f in ms if ms[f] == 'u']
+ unresolved = list(ms.unresolved())
for p in parents:
# label with log.changeset (instead of log.parent) since this
@@ -4875,10 +4837,10 @@
ui.write(_('bookmarks:'), label='log.bookmark')
if active is not None:
if active in marks:
- ui.write(' *' + active, label=activebookmarklabel)
+ ui.write(' *' + active, label=bookmarks.activebookmarklabel)
marks.remove(active)
else:
- ui.write(' [%s]' % active, label=activebookmarklabel)
+ ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
for m in marks:
ui.write(' ' + m, label='log.bookmark')
ui.write('\n', label='log.bookmark')
@@ -5125,6 +5087,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
wlock = lock = None
try:
wlock = repo.wlock()
@@ -5189,7 +5152,8 @@
editform = 'tag.remove'
else:
editform = 'tag.add'
- editor = cmdutil.getcommiteditor(editform=editform, **opts)
+ editor = cmdutil.getcommiteditor(editform=editform,
+ **pycompat.strkwargs(opts))
# don't allow tagging the null rev
if (not opts.get('remove') and
@@ -5212,6 +5176,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
ui.pager('tags')
fm = ui.formatter('tags', opts)
hexfunc = fm.hexfunc
@@ -5256,6 +5221,7 @@
Returns 0 on success.
"""
+ opts = pycompat.byteskwargs(opts)
displayer = cmdutil.show_changeset(ui, repo, opts)
displayer.show(repo['tip'])
displayer.close()
@@ -5277,33 +5243,28 @@
for fname in fnames:
f = hg.openpath(ui, fname)
gen = exchange.readbundle(ui, f, fname)
- if isinstance(gen, bundle2.unbundle20):
- tr = repo.transaction('unbundle')
- try:
- op = bundle2.applybundle(repo, gen, tr, source='unbundle',
- url='bundle:' + fname)
- tr.close()
- except error.BundleUnknownFeatureError as exc:
- raise error.Abort(_('%s: unknown bundle feature, %s')
- % (fname, exc),
- hint=_("see https://mercurial-scm.org/"
- "wiki/BundleFeature for more "
- "information"))
- finally:
- if tr:
- tr.release()
- changes = [r.get('return', 0)
- for r in op.records['changegroup']]
- modheads = changegroup.combineresults(changes)
- elif isinstance(gen, streamclone.streamcloneapplier):
+ if isinstance(gen, streamclone.streamcloneapplier):
raise error.Abort(
_('packed bundles cannot be applied with '
'"hg unbundle"'),
hint=_('use "hg debugapplystreamclonebundle"'))
- else:
- modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
-
- return postincoming(ui, repo, modheads, opts.get('update'), None, None)
+ url = 'bundle:' + fname
+ try:
+ txnname = 'unbundle'
+ if not isinstance(gen, bundle2.unbundle20):
+ txnname = 'unbundle\n%s' % util.hidepassword(url)
+ with repo.transaction(txnname) as tr:
+ op = bundle2.applybundle(repo, gen, tr, source='unbundle',
+ url=url)
+ except error.BundleUnknownFeatureError as exc:
+ raise error.Abort(
+ _('%s: unknown bundle feature, %s') % (fname, exc),
+ hint=_("see https://mercurial-scm.org/"
+ "wiki/BundleFeature for more "
+ "information"))
+ modheads = bundle2.combinechangegroupresults(op)
+
+ return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
@command('^update|up|checkout|co',
[('C', 'clean', None, _('discard uncommitted changes (no backup)')),
@@ -5430,6 +5391,7 @@
@command('version', [] + formatteropts, norepo=True)
def version_(ui, **opts):
"""output version and copyright information"""
+ opts = pycompat.byteskwargs(opts)
if ui.verbose:
ui.pager('version')
fm = ui.formatter("version", opts)
--- a/mercurial/commandserver.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/commandserver.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,7 +11,6 @@
import gc
import os
import random
-import select
import signal
import socket
import struct
@@ -22,6 +21,7 @@
encoding,
error,
pycompat,
+ selectors2,
util,
)
@@ -156,7 +156,7 @@
self.cwd = pycompat.getcwd()
# developer config: cmdserver.log
- logpath = ui.config("cmdserver", "log", None)
+ logpath = ui.config("cmdserver", "log")
if logpath:
global logfile
if logpath == '-':
@@ -409,14 +409,13 @@
def bindsocket(self, sock, address):
util.bindunixsocket(sock, address)
+ sock.listen(socket.SOMAXCONN)
+ self.ui.status(_('listening at %s\n') % address)
+ self.ui.flush() # avoid buffering of status message
def unlinksocket(self, address):
os.unlink(address)
- def printbanner(self, address):
- self.ui.status(_('listening at %s\n') % address)
- self.ui.flush() # avoid buffering of status message
-
def shouldexit(self):
"""True if server should shut down; checked per pollinterval"""
return False
@@ -452,10 +451,8 @@
def init(self):
self._sock = socket.socket(socket.AF_UNIX)
self._servicehandler.bindsocket(self._sock, self.address)
- self._sock.listen(socket.SOMAXCONN)
o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
self._oldsigchldhandler = o
- self._servicehandler.printbanner(self.address)
self._socketunlinked = False
def _unlinksocket(self):
@@ -479,6 +476,8 @@
def _mainloop(self):
exiting = False
h = self._servicehandler
+ selector = selectors2.DefaultSelector()
+ selector.register(self._sock, selectors2.EVENT_READ)
while True:
if not exiting and h.shouldexit():
# clients can no longer connect() to the domain socket, so
@@ -488,15 +487,15 @@
# waiting for recv() will receive ECONNRESET.
self._unlinksocket()
exiting = True
+ ready = selector.select(timeout=h.pollinterval)
+ if not ready:
+ # only exit if we completed all queued requests
+ if exiting:
+ break
+ continue
try:
- ready = select.select([self._sock], [], [], h.pollinterval)[0]
- if not ready:
- # only exit if we completed all queued requests
- if exiting:
- break
- continue
conn, _addr = self._sock.accept()
- except (select.error, socket.error) as inst:
+ except socket.error as inst:
if inst.args[0] == errno.EINTR:
continue
raise
@@ -519,6 +518,7 @@
self.ui.traceback(force=True)
finally:
os._exit(255)
+ selector.close()
def _sigchldhandler(self, signal, frame):
self._reapworkers(os.WNOHANG)
--- a/mercurial/config.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/config.py Wed Jul 19 07:51:41 2017 -0500
@@ -68,7 +68,7 @@
def sections(self):
return sorted(self._data.keys())
def items(self, section):
- return self._data.get(section, {}).items()
+ return list(self._data.get(section, {}).iteritems())
def set(self, section, item, value, source=""):
if pycompat.ispy3:
assert not isinstance(value, str), (
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/configitems.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,586 @@
+# configitems.py - centralized declaration of configuration option
+#
+# Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import functools
+
+from . import (
+ error,
+)
+
+def loadconfigtable(ui, extname, configtable):
+ """update config item known to the ui with the extension ones"""
+ for section, items in configtable.items():
+ knownitems = ui._knownconfig.setdefault(section, {})
+ knownkeys = set(knownitems)
+ newkeys = set(items)
+ for key in sorted(knownkeys & newkeys):
+ msg = "extension '%s' overwrite config item '%s.%s'"
+ msg %= (extname, section, key)
+ ui.develwarn(msg, config='warn-config')
+
+ knownitems.update(items)
+
+class configitem(object):
+ """represent a known config item
+
+ :section: the official config section where to find this item,
+ :name: the official name within the section,
+ :default: default value for this item,
+ :alias: optional list of tuples as alternatives.
+ """
+
+ def __init__(self, section, name, default=None, alias=()):
+ self.section = section
+ self.name = name
+ self.default = default
+ self.alias = list(alias)
+
+coreitems = {}
+
+def _register(configtable, *args, **kwargs):
+ item = configitem(*args, **kwargs)
+ section = configtable.setdefault(item.section, {})
+ if item.name in section:
+ msg = "duplicated config item registration for '%s.%s'"
+ raise error.ProgrammingError(msg % (item.section, item.name))
+ section[item.name] = item
+
+# special value for case where the default is derived from other values
+dynamicdefault = object()
+
+# Registering actual config items
+
+def getitemregister(configtable):
+ return functools.partial(_register, configtable)
+
+coreconfigitem = getitemregister(coreitems)
+
+coreconfigitem('auth', 'cookiefile',
+ default=None,
+)
+# bookmarks.pushing: internal hack for discovery
+coreconfigitem('bookmarks', 'pushing',
+ default=list,
+)
+# bundle.mainreporoot: internal hack for bundlerepo
+coreconfigitem('bundle', 'mainreporoot',
+ default='',
+)
+# bundle.reorder: experimental config
+coreconfigitem('bundle', 'reorder',
+ default='auto',
+)
+coreconfigitem('censor', 'policy',
+ default='abort',
+)
+coreconfigitem('chgserver', 'idletimeout',
+ default=3600,
+)
+coreconfigitem('chgserver', 'skiphash',
+ default=False,
+)
+coreconfigitem('cmdserver', 'log',
+ default=None,
+)
+coreconfigitem('color', 'mode',
+ default='auto',
+)
+coreconfigitem('color', 'pagermode',
+ default=dynamicdefault,
+)
+coreconfigitem('commands', 'status.relative',
+ default=False,
+)
+coreconfigitem('commands', 'update.requiredest',
+ default=False,
+)
+coreconfigitem('devel', 'all-warnings',
+ default=False,
+)
+coreconfigitem('devel', 'bundle2.debug',
+ default=False,
+)
+coreconfigitem('devel', 'check-locks',
+ default=False,
+)
+coreconfigitem('devel', 'check-relroot',
+ default=False,
+)
+coreconfigitem('devel', 'default-date',
+ default=None,
+)
+coreconfigitem('devel', 'deprec-warn',
+ default=False,
+)
+coreconfigitem('devel', 'disableloaddefaultcerts',
+ default=False,
+)
+coreconfigitem('devel', 'legacy.exchange',
+ default=list,
+)
+coreconfigitem('devel', 'servercafile',
+ default='',
+)
+coreconfigitem('devel', 'serverexactprotocol',
+ default='',
+)
+coreconfigitem('devel', 'serverrequirecert',
+ default=False,
+)
+coreconfigitem('devel', 'strip-obsmarkers',
+ default=True,
+)
+coreconfigitem('email', 'charsets',
+ default=list,
+)
+coreconfigitem('email', 'method',
+ default='smtp',
+)
+coreconfigitem('experimental', 'bundle-phases',
+ default=False,
+)
+coreconfigitem('experimental', 'bundle2-advertise',
+ default=True,
+)
+coreconfigitem('experimental', 'bundle2-output-capture',
+ default=False,
+)
+coreconfigitem('experimental', 'bundle2.pushback',
+ default=False,
+)
+coreconfigitem('experimental', 'bundle2lazylocking',
+ default=False,
+)
+coreconfigitem('experimental', 'bundlecomplevel',
+ default=None,
+)
+coreconfigitem('experimental', 'changegroup3',
+ default=False,
+)
+coreconfigitem('experimental', 'clientcompressionengines',
+ default=list,
+)
+coreconfigitem('experimental', 'crecordtest',
+ default=None,
+)
+coreconfigitem('experimental', 'disablecopytrace',
+ default=False,
+)
+coreconfigitem('experimental', 'editortmpinhg',
+ default=False,
+)
+coreconfigitem('experimental', 'evolution',
+ default=list,
+)
+coreconfigitem('experimental', 'evolution.bundle-obsmarker',
+ default=False,
+)
+coreconfigitem('experimental', 'evolution.track-operation',
+ default=False,
+)
+coreconfigitem('experimental', 'exportableenviron',
+ default=list,
+)
+coreconfigitem('experimental', 'extendedheader.index',
+ default=None,
+)
+coreconfigitem('experimental', 'extendedheader.similarity',
+ default=False,
+)
+coreconfigitem('experimental', 'format.compression',
+ default='zlib',
+)
+coreconfigitem('experimental', 'graphshorten',
+ default=False,
+)
+coreconfigitem('experimental', 'hook-track-tags',
+ default=False,
+)
+coreconfigitem('experimental', 'httppostargs',
+ default=False,
+)
+coreconfigitem('experimental', 'manifestv2',
+ default=False,
+)
+coreconfigitem('experimental', 'mergedriver',
+ default=None,
+)
+coreconfigitem('experimental', 'obsmarkers-exchange-debug',
+ default=False,
+)
+coreconfigitem('experimental', 'revertalternateinteractivemode',
+ default=True,
+)
+coreconfigitem('experimental', 'revlogv2',
+ default=None,
+)
+coreconfigitem('experimental', 'spacemovesdown',
+ default=False,
+)
+coreconfigitem('experimental', 'treemanifest',
+ default=False,
+)
+coreconfigitem('experimental', 'updatecheck',
+ default=None,
+)
+coreconfigitem('format', 'aggressivemergedeltas',
+ default=False,
+)
+coreconfigitem('format', 'chunkcachesize',
+ default=None,
+)
+coreconfigitem('format', 'dotencode',
+ default=True,
+)
+coreconfigitem('format', 'generaldelta',
+ default=False,
+)
+coreconfigitem('format', 'manifestcachesize',
+ default=None,
+)
+coreconfigitem('format', 'maxchainlen',
+ default=None,
+)
+coreconfigitem('format', 'obsstore-version',
+ default=None,
+)
+coreconfigitem('format', 'usefncache',
+ default=True,
+)
+coreconfigitem('format', 'usegeneraldelta',
+ default=True,
+)
+coreconfigitem('format', 'usestore',
+ default=True,
+)
+coreconfigitem('hostsecurity', 'ciphers',
+ default=None,
+)
+coreconfigitem('hostsecurity', 'disabletls10warning',
+ default=False,
+)
+coreconfigitem('http_proxy', 'always',
+ default=False,
+)
+coreconfigitem('http_proxy', 'host',
+ default=None,
+)
+coreconfigitem('http_proxy', 'no',
+ default=list,
+)
+coreconfigitem('http_proxy', 'passwd',
+ default=None,
+)
+coreconfigitem('http_proxy', 'user',
+ default=None,
+)
+coreconfigitem('merge', 'followcopies',
+ default=True,
+)
+coreconfigitem('pager', 'ignore',
+ default=list,
+)
+coreconfigitem('patch', 'eol',
+ default='strict',
+)
+coreconfigitem('patch', 'fuzz',
+ default=2,
+)
+coreconfigitem('paths', 'default',
+ default=None,
+)
+coreconfigitem('paths', 'default-push',
+ default=None,
+)
+coreconfigitem('phases', 'checksubrepos',
+ default='follow',
+)
+coreconfigitem('phases', 'publish',
+ default=True,
+)
+coreconfigitem('profiling', 'enabled',
+ default=False,
+)
+coreconfigitem('profiling', 'format',
+ default='text',
+)
+coreconfigitem('profiling', 'freq',
+ default=1000,
+)
+coreconfigitem('profiling', 'limit',
+ default=30,
+)
+coreconfigitem('profiling', 'nested',
+ default=0,
+)
+coreconfigitem('profiling', 'sort',
+ default='inlinetime',
+)
+coreconfigitem('profiling', 'statformat',
+ default='hotpath',
+)
+coreconfigitem('progress', 'assume-tty',
+ default=False,
+)
+coreconfigitem('progress', 'changedelay',
+ default=1,
+)
+coreconfigitem('progress', 'clear-complete',
+ default=True,
+)
+coreconfigitem('progress', 'debug',
+ default=False,
+)
+coreconfigitem('progress', 'delay',
+ default=3,
+)
+coreconfigitem('progress', 'disable',
+ default=False,
+)
+coreconfigitem('progress', 'estimate',
+ default=2,
+)
+coreconfigitem('progress', 'refresh',
+ default=0.1,
+)
+coreconfigitem('progress', 'width',
+ default=dynamicdefault,
+)
+coreconfigitem('server', 'bundle1',
+ default=True,
+)
+coreconfigitem('server', 'bundle1gd',
+ default=None,
+)
+coreconfigitem('server', 'compressionengines',
+ default=list,
+)
+coreconfigitem('server', 'concurrent-push-mode',
+ default='strict',
+)
+coreconfigitem('server', 'disablefullbundle',
+ default=False,
+)
+coreconfigitem('server', 'maxhttpheaderlen',
+ default=1024,
+)
+coreconfigitem('server', 'preferuncompressed',
+ default=False,
+)
+coreconfigitem('server', 'uncompressed',
+ default=True,
+)
+coreconfigitem('server', 'uncompressedallowsecret',
+ default=False,
+)
+coreconfigitem('server', 'validate',
+ default=False,
+)
+coreconfigitem('server', 'zliblevel',
+ default=-1,
+)
+coreconfigitem('smtp', 'host',
+ default=None,
+)
+coreconfigitem('smtp', 'local_hostname',
+ default=None,
+)
+coreconfigitem('smtp', 'password',
+ default=None,
+)
+coreconfigitem('smtp', 'tls',
+ default='none',
+)
+coreconfigitem('smtp', 'username',
+ default=None,
+)
+coreconfigitem('sparse', 'missingwarning',
+ default=True,
+)
+coreconfigitem('trusted', 'groups',
+ default=list,
+)
+coreconfigitem('trusted', 'users',
+ default=list,
+)
+coreconfigitem('ui', '_usedassubrepo',
+ default=False,
+)
+coreconfigitem('ui', 'allowemptycommit',
+ default=False,
+)
+coreconfigitem('ui', 'archivemeta',
+ default=True,
+)
+coreconfigitem('ui', 'askusername',
+ default=False,
+)
+coreconfigitem('ui', 'clonebundlefallback',
+ default=False,
+)
+coreconfigitem('ui', 'clonebundleprefers',
+ default=list,
+)
+coreconfigitem('ui', 'clonebundles',
+ default=True,
+)
+coreconfigitem('ui', 'color',
+ default='auto',
+)
+coreconfigitem('ui', 'commitsubrepos',
+ default=False,
+)
+coreconfigitem('ui', 'debug',
+ default=False,
+)
+coreconfigitem('ui', 'debugger',
+ default=None,
+)
+coreconfigitem('ui', 'fallbackencoding',
+ default=None,
+)
+coreconfigitem('ui', 'forcecwd',
+ default=None,
+)
+coreconfigitem('ui', 'forcemerge',
+ default=None,
+)
+coreconfigitem('ui', 'formatdebug',
+ default=False,
+)
+coreconfigitem('ui', 'formatjson',
+ default=False,
+)
+coreconfigitem('ui', 'formatted',
+ default=None,
+)
+coreconfigitem('ui', 'graphnodetemplate',
+ default=None,
+)
+coreconfigitem('ui', 'http2debuglevel',
+ default=None,
+)
+coreconfigitem('ui', 'interactive',
+ default=None,
+)
+coreconfigitem('ui', 'interface',
+ default=None,
+)
+coreconfigitem('ui', 'logblockedtimes',
+ default=False,
+)
+coreconfigitem('ui', 'logtemplate',
+ default=None,
+)
+coreconfigitem('ui', 'merge',
+ default=None,
+)
+coreconfigitem('ui', 'mergemarkers',
+ default='basic',
+)
+coreconfigitem('ui', 'mergemarkertemplate',
+ default=('{node|short} '
+ '{ifeq(tags, "tip", "", '
+ 'ifeq(tags, "", "", "{tags} "))}'
+ '{if(bookmarks, "{bookmarks} ")}'
+ '{ifeq(branch, "default", "", "{branch} ")}'
+ '- {author|user}: {desc|firstline}')
+)
+coreconfigitem('ui', 'nontty',
+ default=False,
+)
+coreconfigitem('ui', 'origbackuppath',
+ default=None,
+)
+coreconfigitem('ui', 'paginate',
+ default=True,
+)
+coreconfigitem('ui', 'patch',
+ default=None,
+)
+coreconfigitem('ui', 'portablefilenames',
+ default='warn',
+)
+coreconfigitem('ui', 'promptecho',
+ default=False,
+)
+coreconfigitem('ui', 'quiet',
+ default=False,
+)
+coreconfigitem('ui', 'quietbookmarkmove',
+ default=False,
+)
+coreconfigitem('ui', 'remotecmd',
+ default='hg',
+)
+coreconfigitem('ui', 'report_untrusted',
+ default=True,
+)
+coreconfigitem('ui', 'rollback',
+ default=True,
+)
+coreconfigitem('ui', 'slash',
+ default=False,
+)
+coreconfigitem('ui', 'ssh',
+ default='ssh',
+)
+coreconfigitem('ui', 'statuscopies',
+ default=False,
+)
+coreconfigitem('ui', 'strict',
+ default=False,
+)
+coreconfigitem('ui', 'style',
+ default='',
+)
+coreconfigitem('ui', 'supportcontact',
+ default=None,
+)
+coreconfigitem('ui', 'textwidth',
+ default=78,
+)
+coreconfigitem('ui', 'timeout',
+ default='600',
+)
+coreconfigitem('ui', 'traceback',
+ default=False,
+)
+coreconfigitem('ui', 'tweakdefaults',
+ default=False,
+)
+coreconfigitem('ui', 'usehttp2',
+ default=False,
+)
+coreconfigitem('ui', 'username',
+ alias=[('ui', 'user')]
+)
+coreconfigitem('ui', 'verbose',
+ default=False,
+)
+coreconfigitem('verify', 'skipflags',
+ default=None,
+)
+coreconfigitem('worker', 'backgroundclose',
+ default=dynamicdefault,
+)
+# Windows defaults to a limit of 512 open files. A buffer of 128
+# should give us enough headway.
+coreconfigitem('worker', 'backgroundclosemaxqueue',
+ default=384,
+)
+coreconfigitem('worker', 'backgroundcloseminfilecount',
+ default=2048,
+)
+coreconfigitem('worker', 'backgroundclosethreadcount',
+ default=4,
+)
+coreconfigitem('worker', 'numcpus',
+ default=None,
+)
--- a/mercurial/context.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/context.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,6 +23,7 @@
short,
wdirid,
wdirnodes,
+ wdirrev,
)
from . import (
encoding,
@@ -32,11 +33,13 @@
mdiff,
obsolete as obsmod,
patch,
+ pathutil,
phases,
pycompat,
repoview,
revlog,
scmutil,
+ sparse,
subrepo,
util,
)
@@ -64,20 +67,16 @@
return o
- def __str__(self):
- r = short(self.node())
- if pycompat.ispy3:
- return r.decode('ascii')
- return r
-
def __bytes__(self):
return short(self.node())
+ __str__ = encoding.strmethod(__bytes__)
+
def __int__(self):
return self.rev()
def __repr__(self):
- return "<%s %s>" % (type(self).__name__, str(self))
+ return r"<%s %s>" % (type(self).__name__, str(self))
def __eq__(self, other):
try:
@@ -257,13 +256,13 @@
return changectx(self._repo, nullrev)
def _fileinfo(self, path):
- if '_manifest' in self.__dict__:
+ if r'_manifest' in self.__dict__:
try:
return self._manifest[path], self._manifest.flags(path)
except KeyError:
raise error.ManifestLookupError(self._node, path,
_('not found in manifest'))
- if '_manifestdelta' in self.__dict__ or path in self.files():
+ if r'_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
return (self._manifestdelta[path],
self._manifestdelta.flags(path))
@@ -300,8 +299,6 @@
def match(self, pats=None, include=None, exclude=None, default='glob',
listsubrepos=False, badfn=None):
- if pats is None:
- pats = []
r = self._repo
return matchmod.match(r.root, r.getcwd(), pats,
include, exclude, default,
@@ -323,9 +320,6 @@
def hasdir(self, dir):
return self._manifest.hasdir(dir)
- def dirty(self, missing=False, merge=True, branch=True):
- return False
-
def status(self, other=None, match=None, listignored=False,
listclean=False, listunknown=False, listsubrepos=False):
"""return status of files between two nodes or node and working
@@ -389,24 +383,6 @@
return r
-
-def makememctx(repo, parents, text, user, date, branch, files, store,
- editor=None, extra=None):
- def getfilectx(repo, memctx, path):
- data, mode, copied = store.getfile(path)
- if data is None:
- return None
- islink, isexec = mode
- return memfilectx(repo, path, data, islink=islink, isexec=isexec,
- copied=copied, memctx=memctx)
- if extra is None:
- extra = {}
- if branch:
- extra['branch'] = encoding.fromlocal(branch)
- ctx = memctx(repo, parents, text, files, getfilectx, user,
- date, extra, editor)
- return ctx
-
def _filterederror(repo, changeid):
"""build an exception to be raised about a filtered changeid
@@ -474,7 +450,7 @@
l = len(repo.changelog)
if r < 0:
r += l
- if r < 0 or r >= l:
+ if r < 0 or r >= l and r != wdirrev:
raise ValueError
self._rev = r
self._node = repo.changelog.node(r)
@@ -551,7 +527,7 @@
def _manifest(self):
return self._manifestctx.read()
- @propertycache
+ @property
def _manifestctx(self):
return self._repo.manifestlog[self._changeset.manifest]
@@ -687,21 +663,20 @@
in the repo,
workingfilectx: a filecontext that represents files from the working
directory,
- memfilectx: a filecontext that represents files in-memory."""
- def __new__(cls, repo, path, *args, **kwargs):
- return super(basefilectx, cls).__new__(cls)
-
+ memfilectx: a filecontext that represents files in-memory,
+ overlayfilectx: duplicate another filecontext with some fields overridden.
+ """
@propertycache
def _filelog(self):
return self._repo.file(self._path)
@propertycache
def _changeid(self):
- if '_changeid' in self.__dict__:
+ if r'_changeid' in self.__dict__:
return self._changeid
- elif '_changectx' in self.__dict__:
+ elif r'_changectx' in self.__dict__:
return self._changectx.rev()
- elif '_descendantrev' in self.__dict__:
+ elif r'_descendantrev' in self.__dict__:
# this file context was created from a revision with a known
# descendant, we can (lazily) correct for linkrev aliases
return self._adjustlinkrev(self._descendantrev)
@@ -710,7 +685,7 @@
@propertycache
def _filenode(self):
- if '_fileid' in self.__dict__:
+ if r'_fileid' in self.__dict__:
return self._filelog.lookup(self._fileid)
else:
return self._changectx.filenode(self._path)
@@ -733,12 +708,14 @@
__bool__ = __nonzero__
- def __str__(self):
+ def __bytes__(self):
try:
return "%s@%s" % (self.path(), self._changectx)
except error.LookupError:
return "%s@???" % self.path()
+ __str__ = encoding.strmethod(__bytes__)
+
def __repr__(self):
return "<%s %s>" % (type(self).__name__, str(self))
@@ -762,8 +739,11 @@
return self._filerev
def filenode(self):
return self._filenode
+ @propertycache
+ def _flags(self):
+ return self._changectx.flags(self._path)
def flags(self):
- return self._changectx.flags(self._path)
+ return self._flags
def filelog(self):
return self._filelog
def rev(self):
@@ -794,8 +774,12 @@
return self._changectx.manifest()
def changectx(self):
return self._changectx
+ def renamed(self):
+ return self._copied
def repo(self):
return self._repo
+ def size(self):
+ return len(self.data())
def path(self):
return self._path
@@ -943,7 +927,8 @@
return p[1]
return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
- def annotate(self, follow=False, linenumber=False, diffopts=None):
+ def annotate(self, follow=False, linenumber=False, skiprevs=None,
+ diffopts=None):
'''returns a list of tuples of ((ctx, number), line) for each line
in the file, where ctx is the filectx of the node where
that line was last changed; if linenumber parameter is true, number is
@@ -963,15 +948,6 @@
def decorate(text, rev):
return ([(rev, False)] * lines(text), text)
- def pair(parent, child):
- blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts)
- for (a1, a2, b1, b2), t in blocks:
- # Changed blocks ('!') or blocks made only of blank lines ('~')
- # belong to the child.
- if t == '=':
- child[0][b1:b2] = parent[0][a1:a2]
- return child
-
getlog = util.lrucachefunc(lambda x: self._repo.file(x))
def parents(f):
@@ -1047,8 +1023,12 @@
if ready:
visit.pop()
curr = decorate(f.data(), f)
+ skipchild = False
+ if skiprevs is not None:
+ skipchild = f._changeid in skiprevs
+ curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
+ diffopts)
for p in pl:
- curr = pair(hist[p], curr)
if needed[p] == 1:
del hist[p]
del needed[p]
@@ -1076,6 +1056,116 @@
c = visit.pop(max(visit))
yield c
+def _annotatepair(parents, childfctx, child, skipchild, diffopts):
+ r'''
+ Given parent and child fctxes and annotate data for parents, for all lines
+ in either parent that match the child, annotate the child with the parent's
+ data.
+
+ Additionally, if `skipchild` is True, replace all other lines with parent
+ annotate data as well such that child is never blamed for any lines.
+
+ >>> oldfctx = 'old'
+ >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
+ >>> olddata = 'a\nb\n'
+ >>> p1data = 'a\nb\nc\n'
+ >>> p2data = 'a\nc\nd\n'
+ >>> childdata = 'a\nb2\nc\nc2\nd\n'
+ >>> diffopts = mdiff.diffopts()
+
+ >>> def decorate(text, rev):
+ ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
+
+ Basic usage:
+
+ >>> oldann = decorate(olddata, oldfctx)
+ >>> p1ann = decorate(p1data, p1fctx)
+ >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
+ >>> p1ann[0]
+ [('old', 1), ('old', 2), ('p1', 3)]
+ >>> p2ann = decorate(p2data, p2fctx)
+ >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
+ >>> p2ann[0]
+ [('old', 1), ('p2', 2), ('p2', 3)]
+
+ Test with multiple parents (note the difference caused by ordering):
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
+
+ Test with skipchild (note the difference caused by ordering):
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
+
+ >>> childann = decorate(childdata, childfctx)
+ >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
+ ... diffopts)
+ >>> childann[0]
+ [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
+ '''
+ pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
+ for parent in parents]
+
+ if skipchild:
+ # Need to iterate over the blocks twice -- make it a list
+ pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
+ # Mercurial currently prefers p2 over p1 for annotate.
+ # TODO: change this?
+ for parent, blocks in pblocks:
+ for (a1, a2, b1, b2), t in blocks:
+ # Changed blocks ('!') or blocks made only of blank lines ('~')
+ # belong to the child.
+ if t == '=':
+ child[0][b1:b2] = parent[0][a1:a2]
+
+ if skipchild:
+ # Now try and match up anything that couldn't be matched,
+ # Reversing pblocks maintains bias towards p2, matching above
+ # behavior.
+ pblocks.reverse()
+
+ # The heuristics are:
+ # * Work on blocks of changed lines (effectively diff hunks with -U0).
+ # This could potentially be smarter but works well enough.
+ # * For a non-matching section, do a best-effort fit. Match lines in
+ # diff hunks 1:1, dropping lines as necessary.
+ # * Repeat the last line as a last resort.
+
+ # First, replace as much as possible without repeating the last line.
+ remaining = [(parent, []) for parent, _blocks in pblocks]
+ for idx, (parent, blocks) in enumerate(pblocks):
+ for (a1, a2, b1, b2), _t in blocks:
+ if a2 - a1 >= b2 - b1:
+ for bk in xrange(b1, b2):
+ if child[0][bk][0] == childfctx:
+ ak = min(a1 + (bk - b1), a2 - 1)
+ child[0][bk] = parent[0][ak]
+ else:
+ remaining[idx][1].append((a1, a2, b1, b2))
+
+ # Then, look at anything left, which might involve repeating the last
+ # line.
+ for parent, blocks in remaining:
+ for a1, a2, b1, b2 in blocks:
+ for bk in xrange(b1, b2):
+ if child[0][bk][0] == childfctx:
+ ak = min(a1 + (bk - b1), a2 - 1)
+ child[0][bk] = parent[0][ak]
+ return child
+
class filectx(basefilectx):
"""A filecontext object makes access to data related to a particular
filerevision convenient."""
@@ -1134,11 +1224,15 @@
def rawdata(self):
return self._filelog.revision(self._filenode, raw=True)
+ def rawflags(self):
+ """low-level revlog flags"""
+ return self._filelog.flags(self._filerev)
+
def data(self):
try:
return self._filelog.read(self._filenode)
except error.CensoredNodeError:
- if self._repo.ui.config("censor", "policy", "abort") == "ignore":
+ if self._repo.ui.config("censor", "policy") == "ignore":
return ""
raise error.Abort(_("censored node: %s") % short(self._filenode),
hint=_("set censor.policy to ignore errors"))
@@ -1146,7 +1240,8 @@
def size(self):
return self._filelog.size(self._filerev)
- def renamed(self):
+ @propertycache
+ def _copied(self):
"""check if file was actually renamed in this changeset revision
If rename logged in file revision, we report copy for changeset only
@@ -1177,89 +1272,6 @@
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
-def _changesrange(fctx1, fctx2, linerange2, diffopts):
- """Return `(diffinrange, linerange1)` where `diffinrange` is True
- if diff from fctx2 to fctx1 has changes in linerange2 and
- `linerange1` is the new line range for fctx1.
- """
- blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
- filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
- diffinrange = any(stype == '!' for _, stype in filteredblocks)
- return diffinrange, linerange1
-
-def blockancestors(fctx, fromline, toline, followfirst=False):
- """Yield ancestors of `fctx` with respect to the block of lines within
- `fromline`-`toline` range.
- """
- diffopts = patch.diffopts(fctx._repo.ui)
- introrev = fctx.introrev()
- if fctx.rev() != introrev:
- fctx = fctx.filectx(fctx.filenode(), changeid=introrev)
- visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
- while visit:
- c, linerange2 = visit.pop(max(visit))
- pl = c.parents()
- if followfirst:
- pl = pl[:1]
- if not pl:
- # The block originates from the initial revision.
- yield c, linerange2
- continue
- inrange = False
- for p in pl:
- inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
- inrange = inrange or inrangep
- if linerange1[0] == linerange1[1]:
- # Parent's linerange is empty, meaning that the block got
- # introduced in this revision; no need to go futher in this
- # branch.
- continue
- # Set _descendantrev with 'c' (a known descendant) so that, when
- # _adjustlinkrev is called for 'p', it receives this descendant
- # (as srcrev) instead possibly topmost introrev.
- p._descendantrev = c.rev()
- visit[p.linkrev(), p.filenode()] = p, linerange1
- if inrange:
- yield c, linerange2
-
-def blockdescendants(fctx, fromline, toline):
- """Yield descendants of `fctx` with respect to the block of lines within
- `fromline`-`toline` range.
- """
- # First possibly yield 'fctx' if it has changes in range with respect to
- # its parents.
- try:
- c, linerange1 = next(blockancestors(fctx, fromline, toline))
- except StopIteration:
- pass
- else:
- if c == fctx:
- yield c, linerange1
-
- diffopts = patch.diffopts(fctx._repo.ui)
- fl = fctx.filelog()
- seen = {fctx.filerev(): (fctx, (fromline, toline))}
- for i in fl.descendants([fctx.filerev()]):
- c = fctx.filectx(i)
- inrange = False
- for x in fl.parentrevs(i):
- try:
- p, linerange2 = seen[x]
- except KeyError:
- # nullrev or other branch
- continue
- inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
- inrange = inrange or inrangep
- # If revision 'i' has been seen (it's a merge), we assume that its
- # line range is the same independently of which parents was used
- # to compute it.
- assert i not in seen or seen[i][1] == linerange1, (
- 'computed line range for %s is not consistent between '
- 'ancestor branches' % c)
- seen[i] = c, linerange1
- if inrange:
- yield c, linerange1
-
class committablectx(basectx):
"""A committablectx object provides common functionality for a context that
wants the ability to commit, e.g. workingctx or memctx."""
@@ -1288,8 +1300,10 @@
if self._extra['branch'] == '':
self._extra['branch'] = 'default'
- def __str__(self):
- return str(self._parents[0]) + "+"
+ def __bytes__(self):
+ return bytes(self._parents[0]) + "+"
+
+ __str__ = encoding.strmethod(__bytes__)
def __nonzero__(self):
return True
@@ -1342,7 +1356,11 @@
@propertycache
def _date(self):
- return util.makedate()
+ ui = self._repo.ui
+ date = ui.configdate('devel', 'default-date')
+ if date is None:
+ date = util.makedate()
+ return date
def subrev(self, subpath):
return None
@@ -1396,7 +1414,7 @@
return []
def flags(self, path):
- if '_manifest' in self.__dict__:
+ if r'_manifest' in self.__dict__:
try:
return self._manifest.flags(path)
except KeyError:
@@ -1436,19 +1454,21 @@
"""
- self._repo.dirstate.beginparentchange()
- for f in self.modified() + self.added():
- self._repo.dirstate.normal(f)
- for f in self.removed():
- self._repo.dirstate.drop(f)
- self._repo.dirstate.setparents(node)
- self._repo.dirstate.endparentchange()
+ with self._repo.dirstate.parentchange():
+ for f in self.modified() + self.added():
+ self._repo.dirstate.normal(f)
+ for f in self.removed():
+ self._repo.dirstate.drop(f)
+ self._repo.dirstate.setparents(node)
# write changes out explicitly, because nesting wlock at
# runtime may prevent 'wlock.release()' in 'repo.commit()'
# from immediately doing so for subsequent changing files
self._repo.dirstate.write(self._repo.currenttransaction())
+ def dirty(self, missing=False, merge=True, branch=True):
+ return False
+
class workingctx(committablectx):
"""A workingctx object makes access to data related to
the current working directory convenient.
@@ -1490,7 +1510,7 @@
"check whether a working directory is modified"
# check subrepos first
for s in sorted(self.substate):
- if self.sub(s).dirty():
+ if self.sub(s).dirty(missing=missing):
return True
# check current working dir
return ((merge and self.p2()) or
@@ -1499,17 +1519,20 @@
(missing and self.deleted()))
def add(self, list, prefix=""):
- join = lambda f: os.path.join(prefix, f)
with self._repo.wlock():
ui, ds = self._repo.ui, self._repo.dirstate
+ uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
rejected = []
lstat = self._repo.wvfs.lstat
for f in list:
- scmutil.checkportable(ui, join(f))
+ # ds.pathto() returns an absolute file when this is invoked from
+ # the keyword extension. That gets flagged as non-portable on
+ # Windows, since it contains the drive letter and colon.
+ scmutil.checkportable(ui, os.path.join(prefix, f))
try:
st = lstat(f)
except OSError:
- ui.warn(_("%s does not exist!\n") % join(f))
+ ui.warn(_("%s does not exist!\n") % uipath(f))
rejected.append(f)
continue
if st.st_size > 10000000:
@@ -1517,13 +1540,13 @@
"to manage this file\n"
"(use 'hg revert %s' to cancel the "
"pending addition)\n")
- % (f, 3 * st.st_size // 1000000, join(f)))
+ % (f, 3 * st.st_size // 1000000, uipath(f)))
if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
ui.warn(_("%s not added: only files and symlinks "
- "supported currently\n") % join(f))
+ "supported currently\n") % uipath(f))
rejected.append(f)
elif ds[f] in 'amn':
- ui.warn(_("%s already tracked!\n") % join(f))
+ ui.warn(_("%s already tracked!\n") % uipath(f))
elif ds[f] == 'r':
ds.normallookup(f)
else:
@@ -1531,12 +1554,13 @@
return rejected
def forget(self, files, prefix=""):
- join = lambda f: os.path.join(prefix, f)
with self._repo.wlock():
+ ds = self._repo.dirstate
+ uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
rejected = []
for f in files:
if f not in self._repo.dirstate:
- self._repo.ui.warn(_("%s not tracked!\n") % join(f))
+ self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
rejected.append(f)
elif self._repo.dirstate[f] != 'a':
self._repo.dirstate.remove(f)
@@ -1547,9 +1571,10 @@
def undelete(self, list):
pctxs = self.parents()
with self._repo.wlock():
+ ds = self._repo.dirstate
for f in list:
if self._repo.dirstate[f] != 'r':
- self._repo.ui.warn(_("%s not removed!\n") % f)
+ self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
else:
fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
t = fctx.data()
@@ -1562,11 +1587,13 @@
except OSError as err:
if err.errno != errno.ENOENT:
raise
- self._repo.ui.warn(_("%s does not exist!\n") % dest)
+ self._repo.ui.warn(_("%s does not exist!\n")
+ % self._repo.dirstate.pathto(dest))
return
if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
self._repo.ui.warn(_("copy failed: %s is not a file or a "
- "symbolic link\n") % dest)
+ "symbolic link\n")
+ % self._repo.dirstate.pathto(dest))
else:
with self._repo.wlock():
if self._repo.dirstate[dest] in '?':
@@ -1577,19 +1604,15 @@
def match(self, pats=None, include=None, exclude=None, default='glob',
listsubrepos=False, badfn=None):
- if pats is None:
- pats = []
r = self._repo
# Only a case insensitive filesystem needs magic to translate user input
# to actual case in the filesystem.
- matcherfunc = matchmod.match
- if not util.fscasesensitive(r.root):
- matcherfunc = matchmod.icasefsmatcher
- return matcherfunc(r.root, r.getcwd(), pats,
- include, exclude, default,
- auditor=r.auditor, ctx=self,
- listsubrepos=listsubrepos, badfn=badfn)
+ icasefs = not util.fscasesensitive(r.root)
+ return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
+ default, auditor=r.auditor, ctx=self,
+ listsubrepos=listsubrepos, badfn=badfn,
+ icasefs=icasefs)
def _filtersuspectsymlink(self, files):
if not files or self._repo.dirstate._checklink:
@@ -1638,25 +1661,47 @@
# it's in the dirstate.
deleted.append(f)
- # update dirstate for files that are actually clean
- if fixup:
+ return modified, deleted, fixup
+
+ def _poststatusfixup(self, status, fixup):
+ """update dirstate for files that are actually clean"""
+ poststatus = self._repo.postdsstatus()
+ if fixup or poststatus:
try:
+ oldid = self._repo.dirstate.identity()
+
# updating the dirstate is optional
# so we don't wait on the lock
# wlock can invalidate the dirstate, so cache normal _after_
# taking the lock
with self._repo.wlock(False):
- normal = self._repo.dirstate.normal
- for f in fixup:
- normal(f)
- # write changes out explicitly, because nesting
- # wlock at runtime may prevent 'wlock.release()'
- # after this block from doing so for subsequent
- # changing files
- self._repo.dirstate.write(self._repo.currenttransaction())
+ if self._repo.dirstate.identity() == oldid:
+ if fixup:
+ normal = self._repo.dirstate.normal
+ for f in fixup:
+ normal(f)
+ # write changes out explicitly, because nesting
+ # wlock at runtime may prevent 'wlock.release()'
+ # after this block from doing so for subsequent
+ # changing files
+ tr = self._repo.currenttransaction()
+ self._repo.dirstate.write(tr)
+
+ if poststatus:
+ for ps in poststatus:
+ ps(self, status)
+ else:
+ # in this case, writing changes out breaks
+ # consistency, because .hg/dirstate was
+ # already changed simultaneously after last
+ # caching (see also issue5584 for detail)
+ self._repo.ui.debug('skip updating dirstate: '
+ 'identity mismatch\n')
except error.LockError:
pass
- return modified, deleted, fixup
+ finally:
+ # Even if the wlock couldn't be grabbed, clear out the list.
+ self._repo.clearpostdsstatus()
def _dirstatestatus(self, match=None, ignored=False, clean=False,
unknown=False):
@@ -1670,15 +1715,17 @@
listclean, listunknown)
# check for any possibly clean files
+ fixup = []
if cmp:
modified2, deleted2, fixup = self._checklookup(cmp)
s.modified.extend(modified2)
s.deleted.extend(deleted2)
- # update dirstate for files that are actually clean
if fixup and listclean:
s.clean.extend(fixup)
+ self._poststatusfixup(s, fixup)
+
if match.always():
# cache for performance
if s.unknown or s.ignored or s.clean:
@@ -1765,6 +1812,11 @@
match.bad = bad
return match
+ def markcommitted(self, node):
+ super(workingctx, self).markcommitted(node)
+
+ sparse.aftercommit(self._repo, node)
+
class committablefilectx(basefilectx):
"""A committablefilectx provides common functionality for a file context
that wants the ability to commit, e.g. workingfilectx or memfilectx."""
@@ -1841,6 +1893,15 @@
raise
return (t, tz)
+ def exists(self):
+ return self._repo.wvfs.exists(self._path)
+
+ def lexists(self):
+ return self._repo.wvfs.lexists(self._path)
+
+ def audit(self):
+ return self._repo.wvfs.audit(self._path)
+
def cmp(self, fctx):
"""compare with other file context
@@ -1854,9 +1915,13 @@
"""wraps unlink for a repo's working directory"""
self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
- def write(self, data, flags):
+ def write(self, data, flags, backgroundclose=False):
"""wraps repo.wwrite"""
- self._repo.wwrite(self._path, data, flags)
+ self._repo.wwrite(self._path, data, flags,
+ backgroundclose=backgroundclose)
+
+ def setflags(self, l, x):
+ self._repo.wvfs.setflags(self._path, l, x)
class workingcommitctx(workingctx):
"""A workingcommitctx object makes access to data related to
@@ -1912,6 +1977,41 @@
return getfilectx
+def memfilefromctx(ctx):
+ """Given a context return a memfilectx for ctx[path]
+
+ This is a convenience method for building a memctx based on another
+ context.
+ """
+ def getfilectx(repo, memctx, path):
+ fctx = ctx[path]
+ # this is weird but apparently we only keep track of one parent
+ # (why not only store that instead of a tuple?)
+ copied = fctx.renamed()
+ if copied:
+ copied = copied[0]
+ return memfilectx(repo, path, fctx.data(),
+ islink=fctx.islink(), isexec=fctx.isexec(),
+ copied=copied, memctx=memctx)
+
+ return getfilectx
+
+def memfilefrompatch(patchstore):
+ """Given a patch (e.g. patchstore object) return a memfilectx
+
+ This is a convenience method for building a memctx based on a patchstore.
+ """
+ def getfilectx(repo, memctx, path):
+ data, mode, copied = patchstore.getfile(path)
+ if data is None:
+ return None
+ islink, isexec = mode
+ return memfilectx(repo, path, data, islink=islink,
+ isexec=isexec, copied=copied,
+ memctx=memctx)
+
+ return getfilectx
+
class memctx(committablectx):
"""Use memctx to perform in-memory commits via localrepo.commitctx().
@@ -1946,7 +2046,7 @@
_returnnoneformissingfiles = True
def __init__(self, repo, parents, text, files, filectxfn, user=None,
- date=None, extra=None, editor=False):
+ date=None, extra=None, branch=None, editor=False):
super(memctx, self).__init__(repo, text, user, date, extra)
self._rev = None
self._node = None
@@ -1955,32 +2055,18 @@
self._parents = [changectx(self._repo, p) for p in (p1, p2)]
files = sorted(set(files))
self._files = files
+ if branch is not None:
+ self._extra['branch'] = encoding.fromlocal(branch)
self.substate = {}
- # if store is not callable, wrap it in a function
- if not callable(filectxfn):
- def getfilectx(repo, memctx, path):
- fctx = filectxfn[path]
- # this is weird but apparently we only keep track of one parent
- # (why not only store that instead of a tuple?)
- copied = fctx.renamed()
- if copied:
- copied = copied[0]
- return memfilectx(repo, path, fctx.data(),
- islink=fctx.islink(), isexec=fctx.isexec(),
- copied=copied, memctx=memctx)
- self._filectxfn = getfilectx
- else:
- # memoizing increases performance for e.g. vcs convert scenarios.
- self._filectxfn = makecachingfilectxfn(filectxfn)
+ if isinstance(filectxfn, patch.filestore):
+ filectxfn = memfilefrompatch(filectxfn)
+ elif not callable(filectxfn):
+ # if store is not callable, wrap it in a function
+ filectxfn = memfilefromctx(filectxfn)
- if extra:
- self._extra = extra.copy()
- else:
- self._extra = {}
-
- if self._extra.get('branch', '') == '':
- self._extra['branch'] = 'default'
+ # memoizing increases performance for e.g. vcs convert scenarios.
+ self._filectxfn = makecachingfilectxfn(filectxfn)
if editor:
self._text = editor(self._repo, self, [])
@@ -2072,12 +2158,6 @@
def data(self):
return self._data
- def size(self):
- return len(self.data())
- def flags(self):
- return self._flags
- def renamed(self):
- return self._copied
def remove(self, ignoremissing=False):
"""wraps unlink for a repo's working directory"""
@@ -2088,6 +2168,77 @@
"""wraps repo.wwrite"""
self._data = data
+class overlayfilectx(committablefilectx):
+ """Like memfilectx but take an original filectx and optional parameters to
+ override parts of it. This is useful when fctx.data() is expensive (i.e.
+ flag processor is expensive) and raw data, flags, and filenode could be
+ reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
+ """
+
+ def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
+ copied=None, ctx=None):
+ """originalfctx: filecontext to duplicate
+
+ datafunc: None or a function to override data (file content). It is a
+ function to be lazy. path, flags, copied, ctx: None or overridden value
+
+ copied could be (path, rev), or False. copied could also be just path,
+ and will be converted to (path, nullid). This simplifies some callers.
+ """
+
+ if path is None:
+ path = originalfctx.path()
+ if ctx is None:
+ ctx = originalfctx.changectx()
+ ctxmatch = lambda: True
+ else:
+ ctxmatch = lambda: ctx == originalfctx.changectx()
+
+ repo = originalfctx.repo()
+ flog = originalfctx.filelog()
+ super(overlayfilectx, self).__init__(repo, path, flog, ctx)
+
+ if copied is None:
+ copied = originalfctx.renamed()
+ copiedmatch = lambda: True
+ else:
+ if copied and not isinstance(copied, tuple):
+ # repo._filecommit will recalculate copyrev so nullid is okay
+ copied = (copied, nullid)
+ copiedmatch = lambda: copied == originalfctx.renamed()
+
+ # When data, copied (could affect data), ctx (could affect filelog
+ # parents) are not overridden, rawdata, rawflags, and filenode may be
+ # reused (repo._filecommit should double check filelog parents).
+ #
+ # path, flags are not hashed in filelog (but in manifestlog) so they do
+ # not affect reusable here.
+ #
+ # If ctx or copied is overridden to a same value with originalfctx,
+ # still consider it's reusable. originalfctx.renamed() may be a bit
+ # expensive so it's not called unless necessary. Assuming datafunc is
+ # always expensive, do not call it for this "reusable" test.
+ reusable = datafunc is None and ctxmatch() and copiedmatch()
+
+ if datafunc is None:
+ datafunc = originalfctx.data
+ if flags is None:
+ flags = originalfctx.flags()
+
+ self._datafunc = datafunc
+ self._flags = flags
+ self._copied = copied
+
+ if reusable:
+ # copy extra fields from originalfctx
+ attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
+ for attr in attrs:
+ if util.safehasattr(originalfctx, attr):
+ setattr(self, attr, getattr(originalfctx, attr))
+
+ def data(self):
+ return self._datafunc()
+
class metadataonlyctx(committablectx):
"""Like memctx but it's reusing the manifest of different commit.
Intended to be used by lightweight operations that are creating
@@ -2129,14 +2280,6 @@
self._files = originalctx.files()
self.substate = {}
- if extra:
- self._extra = extra.copy()
- else:
- self._extra = {}
-
- if self._extra.get('branch', '') == '':
- self._extra['branch'] = 'default'
-
if editor:
self._text = editor(self._repo, self, [])
self._repo.savecommitmessage(self._text)
@@ -2144,7 +2287,7 @@
def manifestnode(self):
return self._manifestnode
- @propertycache
+ @property
def _manifestctx(self):
return self._repo.manifestlog[self._manifestnode]
--- a/mercurial/copies.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/copies.py Wed Jul 19 07:51:41 2017 -0500
@@ -414,13 +414,15 @@
baselabel='topological common ancestor')
for f in u1u:
- _checkcopies(c1, f, m1, m2, base, tca, dirtyc1, limit, data1)
+ _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
for f in u2u:
- _checkcopies(c2, f, m2, m1, base, tca, dirtyc2, limit, data2)
+ _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
- copy = dict(data1['copy'].items() + data2['copy'].items())
- fullcopy = dict(data1['fullcopy'].items() + data2['fullcopy'].items())
+ copy = dict(data1['copy'])
+ copy.update(data2['copy'])
+ fullcopy = dict(data1['fullcopy'])
+ fullcopy.update(data2['fullcopy'])
if dirtyc1:
_combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
@@ -462,8 +464,8 @@
'incompletediverge': bothincompletediverge
}
for f in bothnew:
- _checkcopies(c1, f, m1, m2, base, tca, dirtyc1, limit, both1)
- _checkcopies(c2, f, m2, m1, base, tca, dirtyc2, limit, both2)
+ _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
+ _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
if dirtyc1:
# incomplete copies may only be found on the "dirty" side for bothnew
assert not both2['incomplete']
@@ -598,17 +600,16 @@
except StopIteration:
return False
-def _checkcopies(ctx, f, m1, m2, base, tca, remotebase, limit, data):
+def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
"""
- check possible copies of f from m1 to m2
+ check possible copies of f from msrc to mdst
- ctx = starting context for f in m1
- f = the filename to check (as in m1)
- m1 = the source manifest
- m2 = the destination manifest
+ srcctx = starting context for f in msrc
+ dstctx = destination context for f in mdst
+ f = the filename to check (as in msrc)
base = the changectx used as a merge base
tca = topological common ancestor for graft-like scenarios
- remotebase = True if base is outside tca::ctx, False otherwise
+ remotebase = True if base is outside tca::srcctx, False otherwise
limit = the rev number to not search beyond
data = dictionary of dictionary to store copy data. (see mergecopies)
@@ -618,6 +619,8 @@
once it "goes behind a certain revision".
"""
+ msrc = srcctx.manifest()
+ mdst = dstctx.manifest()
mb = base.manifest()
mta = tca.manifest()
# Might be true if this call is about finding backward renames,
@@ -630,15 +633,16 @@
# the base) this is more complicated as we must detect a divergence.
# We use 'backwards = False' in that case.
backwards = not remotebase and base != tca and f in mb
- getfctx = _makegetfctx(ctx)
+ getsrcfctx = _makegetfctx(srcctx)
+ getdstfctx = _makegetfctx(dstctx)
- if m1[f] == mb.get(f) and not remotebase:
+ if msrc[f] == mb.get(f) and not remotebase:
# Nothing to merge
return
of = None
- seen = set([f])
- for oc in getfctx(f, m1[f]).ancestors():
+ seen = {f}
+ for oc in getsrcfctx(f, msrc[f]).ancestors():
ocr = oc.linkrev()
of = oc.path()
if of in seen:
@@ -653,11 +657,11 @@
data['fullcopy'][of] = f # grafting backwards through renames
else:
data['fullcopy'][f] = of
- if of not in m2:
+ if of not in mdst:
continue # no match, keep looking
- if m2[of] == mb.get(of):
+ if mdst[of] == mb.get(of):
return # no merge needed, quit early
- c2 = getfctx(of, m2[of])
+ c2 = getdstfctx(of, mdst[of])
# c2 might be a plain new file on added on destination side that is
# unrelated to the droids we are looking for.
cr = _related(oc, c2, tca.rev())
--- a/mercurial/crecord.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/crecord.py Wed Jul 19 07:51:41 2017 -0500
@@ -427,6 +427,54 @@
self.pretty(x)
return x.getvalue()
+ def reversehunk(self):
+ """return a recordhunk which is the reverse of the hunk
+
+ Assuming the displayed patch is diff(A, B) result. The returned hunk is
+ intended to be applied to B, instead of A.
+
+ For example, when A is "0\n1\n2\n6\n" and B is "0\n3\n4\n5\n6\n", and
+ the user made the following selection:
+
+ 0
+ [x] -1 [x]: selected
+ [ ] -2 [ ]: not selected
+ [x] +3
+ [ ] +4
+ [x] +5
+ 6
+
+ This function returns a hunk like:
+
+ 0
+ -3
+ -4
+ -5
+ +1
+ +4
+ 6
+
+ Note "4" was first deleted then added. That's because "4" exists in B
+ side and "-4" must exist between "-3" and "-5" to make the patch
+ applicable to B.
+ """
+ dels = []
+ adds = []
+ for line in self.changedlines:
+ text = line.linetext
+ if line.applied:
+ if text[0] == '+':
+ dels.append(text[1:])
+ elif text[0] == '-':
+ adds.append(text[1:])
+ elif text[0] == '+':
+ dels.append(text[1:])
+ adds.append(text[1:])
+ hunk = ['-%s' % l for l in dels] + ['+%s' % l for l in adds]
+ h = self._hunk
+ return patchmod.recordhunk(h.header, h.toline, h.fromline, h.proc,
+ h.before, hunk, h.after)
+
def __getattr__(self, name):
return getattr(self._hunk, name)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/dagop.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,497 @@
+# dagop.py - graph ancestry and topology algorithm for revset
+#
+# Copyright 2010 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import heapq
+
+from . import (
+ error,
+ mdiff,
+ node,
+ patch,
+ smartset,
+)
+
+baseset = smartset.baseset
+generatorset = smartset.generatorset
+
+# possible maximum depth between null and wdir()
+_maxlogdepth = 0x80000000
+
+def _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse):
+ """Walk DAG using 'pfunc' from the given 'revs' nodes
+
+ 'pfunc(rev)' should return the parent/child revisions of the given 'rev'
+ if 'reverse' is True/False respectively.
+
+ Scan ends at the stopdepth (exlusive) if specified. Revisions found
+ earlier than the startdepth are omitted.
+ """
+ if startdepth is None:
+ startdepth = 0
+ if stopdepth is None:
+ stopdepth = _maxlogdepth
+ if stopdepth == 0:
+ return
+ if stopdepth < 0:
+ raise error.ProgrammingError('negative stopdepth')
+ if reverse:
+ heapsign = -1 # max heap
+ else:
+ heapsign = +1 # min heap
+
+ # load input revs lazily to heap so earlier revisions can be yielded
+ # without fully computing the input revs
+ revs.sort(reverse)
+ irevs = iter(revs)
+ pendingheap = [] # [(heapsign * rev, depth), ...] (i.e. lower depth first)
+
+ inputrev = next(irevs, None)
+ if inputrev is not None:
+ heapq.heappush(pendingheap, (heapsign * inputrev, 0))
+
+ lastrev = None
+ while pendingheap:
+ currev, curdepth = heapq.heappop(pendingheap)
+ currev = heapsign * currev
+ if currev == inputrev:
+ inputrev = next(irevs, None)
+ if inputrev is not None:
+ heapq.heappush(pendingheap, (heapsign * inputrev, 0))
+ # rescan parents until curdepth >= startdepth because queued entries
+ # of the same revision are iterated from the lowest depth
+ foundnew = (currev != lastrev)
+ if foundnew and curdepth >= startdepth:
+ lastrev = currev
+ yield currev
+ pdepth = curdepth + 1
+ if foundnew and pdepth < stopdepth:
+ for prev in pfunc(currev):
+ if prev != node.nullrev:
+ heapq.heappush(pendingheap, (heapsign * prev, pdepth))
+
+def _genrevancestors(repo, revs, followfirst, startdepth, stopdepth):
+ if followfirst:
+ cut = 1
+ else:
+ cut = None
+ cl = repo.changelog
+ def pfunc(rev):
+ try:
+ return cl.parentrevs(rev)[:cut]
+ except error.WdirUnsupported:
+ return (pctx.rev() for pctx in repo[rev].parents()[:cut])
+ return _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse=True)
+
+def revancestors(repo, revs, followfirst, startdepth=None, stopdepth=None):
+ """Like revlog.ancestors(), but supports additional options, includes
+ the given revs themselves, and returns a smartset
+
+ Scan ends at the stopdepth (exlusive) if specified. Revisions found
+ earlier than the startdepth are omitted.
+ """
+ gen = _genrevancestors(repo, revs, followfirst, startdepth, stopdepth)
+ return generatorset(gen, iterasc=False)
+
+def _genrevdescendants(repo, revs, followfirst):
+ if followfirst:
+ cut = 1
+ else:
+ cut = None
+
+ cl = repo.changelog
+ first = revs.min()
+ nullrev = node.nullrev
+ if first == nullrev:
+ # Are there nodes with a null first parent and a non-null
+ # second one? Maybe. Do we care? Probably not.
+ yield first
+ for i in cl:
+ yield i
+ else:
+ seen = set(revs)
+ for i in cl.revs(first):
+ if i in seen:
+ yield i
+ continue
+ for x in cl.parentrevs(i)[:cut]:
+ if x != nullrev and x in seen:
+ seen.add(i)
+ yield i
+ break
+
+def _builddescendantsmap(repo, startrev, followfirst):
+ """Build map of 'rev -> child revs', offset from startrev"""
+ cl = repo.changelog
+ nullrev = node.nullrev
+ descmap = [[] for _rev in xrange(startrev, len(cl))]
+ for currev in cl.revs(startrev + 1):
+ p1rev, p2rev = cl.parentrevs(currev)
+ if p1rev >= startrev:
+ descmap[p1rev - startrev].append(currev)
+ if not followfirst and p2rev != nullrev and p2rev >= startrev:
+ descmap[p2rev - startrev].append(currev)
+ return descmap
+
+def _genrevdescendantsofdepth(repo, revs, followfirst, startdepth, stopdepth):
+ startrev = revs.min()
+ descmap = _builddescendantsmap(repo, startrev, followfirst)
+ def pfunc(rev):
+ return descmap[rev - startrev]
+ return _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse=False)
+
+def revdescendants(repo, revs, followfirst, startdepth=None, stopdepth=None):
+ """Like revlog.descendants() but supports additional options, includes
+ the given revs themselves, and returns a smartset
+
+ Scan ends at the stopdepth (exlusive) if specified. Revisions found
+ earlier than the startdepth are omitted.
+ """
+ if startdepth is None and stopdepth is None:
+ gen = _genrevdescendants(repo, revs, followfirst)
+ else:
+ gen = _genrevdescendantsofdepth(repo, revs, followfirst,
+ startdepth, stopdepth)
+ return generatorset(gen, iterasc=True)
+
+def _reachablerootspure(repo, minroot, roots, heads, includepath):
+ """return (heads(::<roots> and ::<heads>))
+
+ If includepath is True, return (<roots>::<heads>)."""
+ if not roots:
+ return []
+ parentrevs = repo.changelog.parentrevs
+ roots = set(roots)
+ visit = list(heads)
+ reachable = set()
+ seen = {}
+ # prefetch all the things! (because python is slow)
+ reached = reachable.add
+ dovisit = visit.append
+ nextvisit = visit.pop
+ # open-code the post-order traversal due to the tiny size of
+ # sys.getrecursionlimit()
+ while visit:
+ rev = nextvisit()
+ if rev in roots:
+ reached(rev)
+ if not includepath:
+ continue
+ parents = parentrevs(rev)
+ seen[rev] = parents
+ for parent in parents:
+ if parent >= minroot and parent not in seen:
+ dovisit(parent)
+ if not reachable:
+ return baseset()
+ if not includepath:
+ return reachable
+ for rev in sorted(seen):
+ for parent in seen[rev]:
+ if parent in reachable:
+ reached(rev)
+ return reachable
+
+def reachableroots(repo, roots, heads, includepath=False):
+ """return (heads(::<roots> and ::<heads>))
+
+ If includepath is True, return (<roots>::<heads>)."""
+ if not roots:
+ return baseset()
+ minroot = roots.min()
+ roots = list(roots)
+ heads = list(heads)
+ try:
+ revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
+ except AttributeError:
+ revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
+ revs = baseset(revs)
+ revs.sort()
+ return revs
+
+def _changesrange(fctx1, fctx2, linerange2, diffopts):
+ """Return `(diffinrange, linerange1)` where `diffinrange` is True
+ if diff from fctx2 to fctx1 has changes in linerange2 and
+ `linerange1` is the new line range for fctx1.
+ """
+ blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
+ filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
+ diffinrange = any(stype == '!' for _, stype in filteredblocks)
+ return diffinrange, linerange1
+
+def blockancestors(fctx, fromline, toline, followfirst=False):
+ """Yield ancestors of `fctx` with respect to the block of lines within
+ `fromline`-`toline` range.
+ """
+ diffopts = patch.diffopts(fctx._repo.ui)
+ introrev = fctx.introrev()
+ if fctx.rev() != introrev:
+ fctx = fctx.filectx(fctx.filenode(), changeid=introrev)
+ visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
+ while visit:
+ c, linerange2 = visit.pop(max(visit))
+ pl = c.parents()
+ if followfirst:
+ pl = pl[:1]
+ if not pl:
+ # The block originates from the initial revision.
+ yield c, linerange2
+ continue
+ inrange = False
+ for p in pl:
+ inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
+ inrange = inrange or inrangep
+ if linerange1[0] == linerange1[1]:
+ # Parent's linerange is empty, meaning that the block got
+ # introduced in this revision; no need to go futher in this
+ # branch.
+ continue
+ # Set _descendantrev with 'c' (a known descendant) so that, when
+ # _adjustlinkrev is called for 'p', it receives this descendant
+ # (as srcrev) instead possibly topmost introrev.
+ p._descendantrev = c.rev()
+ visit[p.linkrev(), p.filenode()] = p, linerange1
+ if inrange:
+ yield c, linerange2
+
+def blockdescendants(fctx, fromline, toline):
+ """Yield descendants of `fctx` with respect to the block of lines within
+ `fromline`-`toline` range.
+ """
+ # First possibly yield 'fctx' if it has changes in range with respect to
+ # its parents.
+ try:
+ c, linerange1 = next(blockancestors(fctx, fromline, toline))
+ except StopIteration:
+ pass
+ else:
+ if c == fctx:
+ yield c, linerange1
+
+ diffopts = patch.diffopts(fctx._repo.ui)
+ fl = fctx.filelog()
+ seen = {fctx.filerev(): (fctx, (fromline, toline))}
+ for i in fl.descendants([fctx.filerev()]):
+ c = fctx.filectx(i)
+ inrange = False
+ for x in fl.parentrevs(i):
+ try:
+ p, linerange2 = seen[x]
+ except KeyError:
+ # nullrev or other branch
+ continue
+ inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
+ inrange = inrange or inrangep
+ # If revision 'i' has been seen (it's a merge) and the line range
+ # previously computed differs from the one we just got, we take the
+ # surrounding interval. This is conservative but avoids loosing
+ # information.
+ if i in seen and seen[i][1] != linerange1:
+ lbs, ubs = zip(linerange1, seen[i][1])
+ linerange1 = min(lbs), max(ubs)
+ seen[i] = c, linerange1
+ if inrange:
+ yield c, linerange1
+
+def toposort(revs, parentsfunc, firstbranch=()):
+ """Yield revisions from heads to roots one (topo) branch at a time.
+
+ This function aims to be used by a graph generator that wishes to minimize
+ the number of parallel branches and their interleaving.
+
+ Example iteration order (numbers show the "true" order in a changelog):
+
+ o 4
+ |
+ o 1
+ |
+ | o 3
+ | |
+ | o 2
+ |/
+ o 0
+
+ Note that the ancestors of merges are understood by the current
+ algorithm to be on the same branch. This means no reordering will
+ occur behind a merge.
+ """
+
+ ### Quick summary of the algorithm
+ #
+ # This function is based around a "retention" principle. We keep revisions
+ # in memory until we are ready to emit a whole branch that immediately
+ # "merges" into an existing one. This reduces the number of parallel
+ # branches with interleaved revisions.
+ #
+ # During iteration revs are split into two groups:
+ # A) revision already emitted
+ # B) revision in "retention". They are stored as different subgroups.
+ #
+ # for each REV, we do the following logic:
+ #
+ # 1) if REV is a parent of (A), we will emit it. If there is a
+ # retention group ((B) above) that is blocked on REV being
+ # available, we emit all the revisions out of that retention
+ # group first.
+ #
+ # 2) else, we'll search for a subgroup in (B) awaiting for REV to be
+ # available, if such subgroup exist, we add REV to it and the subgroup is
+ # now awaiting for REV.parents() to be available.
+ #
+ # 3) finally if no such group existed in (B), we create a new subgroup.
+ #
+ #
+ # To bootstrap the algorithm, we emit the tipmost revision (which
+ # puts it in group (A) from above).
+
+ revs.sort(reverse=True)
+
+ # Set of parents of revision that have been emitted. They can be considered
+ # unblocked as the graph generator is already aware of them so there is no
+ # need to delay the revisions that reference them.
+ #
+ # If someone wants to prioritize a branch over the others, pre-filling this
+ # set will force all other branches to wait until this branch is ready to be
+ # emitted.
+ unblocked = set(firstbranch)
+
+ # list of groups waiting to be displayed, each group is defined by:
+ #
+ # (revs: lists of revs waiting to be displayed,
+ # blocked: set of that cannot be displayed before those in 'revs')
+ #
+ # The second value ('blocked') correspond to parents of any revision in the
+ # group ('revs') that is not itself contained in the group. The main idea
+ # of this algorithm is to delay as much as possible the emission of any
+ # revision. This means waiting for the moment we are about to display
+ # these parents to display the revs in a group.
+ #
+ # This first implementation is smart until it encounters a merge: it will
+ # emit revs as soon as any parent is about to be emitted and can grow an
+ # arbitrary number of revs in 'blocked'. In practice this mean we properly
+ # retains new branches but gives up on any special ordering for ancestors
+ # of merges. The implementation can be improved to handle this better.
+ #
+ # The first subgroup is special. It corresponds to all the revision that
+ # were already emitted. The 'revs' lists is expected to be empty and the
+ # 'blocked' set contains the parents revisions of already emitted revision.
+ #
+ # You could pre-seed the <parents> set of groups[0] to a specific
+ # changesets to select what the first emitted branch should be.
+ groups = [([], unblocked)]
+ pendingheap = []
+ pendingset = set()
+
+ heapq.heapify(pendingheap)
+ heappop = heapq.heappop
+ heappush = heapq.heappush
+ for currentrev in revs:
+ # Heap works with smallest element, we want highest so we invert
+ if currentrev not in pendingset:
+ heappush(pendingheap, -currentrev)
+ pendingset.add(currentrev)
+ # iterates on pending rev until after the current rev have been
+ # processed.
+ rev = None
+ while rev != currentrev:
+ rev = -heappop(pendingheap)
+ pendingset.remove(rev)
+
+ # Seek for a subgroup blocked, waiting for the current revision.
+ matching = [i for i, g in enumerate(groups) if rev in g[1]]
+
+ if matching:
+ # The main idea is to gather together all sets that are blocked
+ # on the same revision.
+ #
+ # Groups are merged when a common blocking ancestor is
+ # observed. For example, given two groups:
+ #
+ # revs [5, 4] waiting for 1
+ # revs [3, 2] waiting for 1
+ #
+ # These two groups will be merged when we process
+ # 1. In theory, we could have merged the groups when
+ # we added 2 to the group it is now in (we could have
+ # noticed the groups were both blocked on 1 then), but
+ # the way it works now makes the algorithm simpler.
+ #
+ # We also always keep the oldest subgroup first. We can
+ # probably improve the behavior by having the longest set
+ # first. That way, graph algorithms could minimise the length
+ # of parallel lines their drawing. This is currently not done.
+ targetidx = matching.pop(0)
+ trevs, tparents = groups[targetidx]
+ for i in matching:
+ gr = groups[i]
+ trevs.extend(gr[0])
+ tparents |= gr[1]
+ # delete all merged subgroups (except the one we kept)
+ # (starting from the last subgroup for performance and
+ # sanity reasons)
+ for i in reversed(matching):
+ del groups[i]
+ else:
+ # This is a new head. We create a new subgroup for it.
+ targetidx = len(groups)
+ groups.append(([], {rev}))
+
+ gr = groups[targetidx]
+
+ # We now add the current nodes to this subgroups. This is done
+ # after the subgroup merging because all elements from a subgroup
+ # that relied on this rev must precede it.
+ #
+ # we also update the <parents> set to include the parents of the
+ # new nodes.
+ if rev == currentrev: # only display stuff in rev
+ gr[0].append(rev)
+ gr[1].remove(rev)
+ parents = [p for p in parentsfunc(rev) if p > node.nullrev]
+ gr[1].update(parents)
+ for p in parents:
+ if p not in pendingset:
+ pendingset.add(p)
+ heappush(pendingheap, -p)
+
+ # Look for a subgroup to display
+ #
+ # When unblocked is empty (if clause), we were not waiting for any
+ # revisions during the first iteration (if no priority was given) or
+ # if we emitted a whole disconnected set of the graph (reached a
+ # root). In that case we arbitrarily take the oldest known
+ # subgroup. The heuristic could probably be better.
+ #
+ # Otherwise (elif clause) if the subgroup is blocked on
+ # a revision we just emitted, we can safely emit it as
+ # well.
+ if not unblocked:
+ if len(groups) > 1: # display other subset
+ targetidx = 1
+ gr = groups[1]
+ elif not gr[1] & unblocked:
+ gr = None
+
+ if gr is not None:
+ # update the set of awaited revisions with the one from the
+ # subgroup
+ unblocked |= gr[1]
+ # output all revisions in the subgroup
+ for r in gr[0]:
+ yield r
+ # delete the subgroup that you just output
+ # unless it is groups[0] in which case you just empty it.
+ if targetidx:
+ del groups[targetidx]
+ else:
+ gr[0][:] = []
+ # Check if we have some subgroup waiting for revisions we are not going to
+ # iterate over
+ for g in groups:
+ for r in g[0]:
+ yield r
--- a/mercurial/debugcommands.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/debugcommands.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
import os
import random
import socket
+import ssl
import string
import sys
import tempfile
@@ -32,7 +33,6 @@
changegroup,
cmdutil,
color,
- commands,
context,
dagparser,
dagutil,
@@ -40,6 +40,7 @@
error,
exchange,
extensions,
+ filemerge,
fileset,
formatter,
hg,
@@ -47,9 +48,12 @@
lock as lockmod,
merge as mergemod,
obsolete,
+ obsutil,
+ phases,
policy,
pvec,
pycompat,
+ registrar,
repair,
revlog,
revset,
@@ -69,9 +73,7 @@
release = lockmod.release
-# We reuse the command table from commands because it is easier than
-# teaching dispatch about multiple tables.
-command = cmdutil.command(commands.table)
+command = registrar.command()
@command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
def debugancestor(ui, repo, *args):
@@ -289,20 +291,60 @@
ui.write("%s%s\n" % (indent_string, hex(node)))
chain = node
+def _debugobsmarkers(ui, part, indent=0, **opts):
+ """display version and markers contained in 'data'"""
+ opts = pycompat.byteskwargs(opts)
+ data = part.read()
+ indent_string = ' ' * indent
+ try:
+ version, markers = obsolete._readmarkers(data)
+ except error.UnknownVersion as exc:
+ msg = "%sunsupported version: %s (%d bytes)\n"
+ msg %= indent_string, exc.version, len(data)
+ ui.write(msg)
+ else:
+ msg = "%sversion: %s (%d bytes)\n"
+ msg %= indent_string, version, len(data)
+ ui.write(msg)
+ fm = ui.formatter('debugobsolete', opts)
+ for rawmarker in sorted(markers):
+ m = obsutil.marker(None, rawmarker)
+ fm.startitem()
+ fm.plain(indent_string)
+ cmdutil.showmarker(fm, m)
+ fm.end()
+
+def _debugphaseheads(ui, data, indent=0):
+ """display version and markers contained in 'data'"""
+ indent_string = ' ' * indent
+ headsbyphase = bundle2._readphaseheads(data)
+ for phase in phases.allphases:
+ for head in headsbyphase[phase]:
+ ui.write(indent_string)
+ ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
+
def _debugbundle2(ui, gen, all=None, **opts):
"""lists the contents of a bundle2"""
if not isinstance(gen, bundle2.unbundle20):
raise error.Abort(_('not a bundle2 file'))
ui.write(('Stream params: %s\n' % repr(gen.params)))
+ parttypes = opts.get(r'part_type', [])
for part in gen.iterparts():
+ if parttypes and part.type not in parttypes:
+ continue
ui.write('%s -- %r\n' % (part.type, repr(part.params)))
if part.type == 'changegroup':
version = part.params.get('version', '01')
cg = changegroup.getunbundler(version, part, 'UN')
_debugchangegroup(ui, cg, all=all, indent=4, **opts)
+ if part.type == 'obsmarkers':
+ _debugobsmarkers(ui, part, indent=4, **opts)
+ if part.type == 'phase-heads':
+ _debugphaseheads(ui, part, indent=4)
@command('debugbundle',
[('a', 'all', None, _('show all details')),
+ ('', 'part-type', [], _('show only the named part type')),
('', 'spec', None, _('print the bundlespec of the bundle'))],
_('FILE'),
norepo=True)
@@ -353,7 +395,7 @@
def debugcolor(ui, repo, **opts):
"""show available color, effects or style"""
ui.write(('color mode: %s\n') % ui._colormode)
- if opts.get('style'):
+ if opts.get(r'style'):
return _debugdisplaystyle(ui)
else:
return _debugdisplaycolor(ui)
@@ -388,42 +430,6 @@
ui.write(', '.join(ui.label(e, e) for e in effects.split()))
ui.write('\n')
-@command('debugcommands', [], _('[COMMAND]'), norepo=True)
-def debugcommands(ui, cmd='', *args):
- """list all available commands and options"""
- for cmd, vals in sorted(commands.table.iteritems()):
- cmd = cmd.split('|')[0].strip('^')
- opts = ', '.join([i[1] for i in vals[1]])
- ui.write('%s: %s\n' % (cmd, opts))
-
-@command('debugcomplete',
- [('o', 'options', None, _('show the command options'))],
- _('[-o] CMD'),
- norepo=True)
-def debugcomplete(ui, cmd='', **opts):
- """returns the completion list associated with the given command"""
-
- if opts.get('options'):
- options = []
- otables = [commands.globalopts]
- if cmd:
- aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
- otables.append(entry[1])
- for t in otables:
- for o in t:
- if "(DEPRECATED)" in o[3]:
- continue
- if o[0]:
- options.append('-%s' % o[0])
- options.append('--%s' % o[1])
- ui.write("%s\n" % "\n".join(options))
- return
-
- cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
- if ui.verbose:
- cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
- ui.write("%s\n" % "\n".join(sorted(cmdlist)))
-
@command('debugcreatestreamclonebundle', [], 'FILE')
def debugcreatestreamclonebundle(ui, repo, fname):
"""create a stream clone bundle file
@@ -431,6 +437,12 @@
Stream bundles are special bundles that are essentially archives of
revlog files. They are commonly used for cloning very quickly.
"""
+ # TODO we may want to turn this into an abort when this functionality
+ # is moved into `hg bundle`.
+ if phases.hassecret(repo):
+ ui.warn(_('(warning: stream clone bundle will contain secret '
+ 'revisions)\n'))
+
requirements, gen = streamclone.generatebundlev1(repo)
changegroup.writechunks(ui, gen, fname)
@@ -451,8 +463,8 @@
Otherwise, the changelog DAG of the current repo is emitted.
"""
- spaces = opts.get('spaces')
- dots = opts.get('dots')
+ spaces = opts.get(r'spaces')
+ dots = opts.get(r'dots')
if file_:
rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
file_)
@@ -465,8 +477,8 @@
yield 'l', (r, "r%i" % r)
elif repo:
cl = repo.changelog
- tags = opts.get('tags')
- branches = opts.get('branches')
+ tags = opts.get(r'tags')
+ branches = opts.get(r'branches')
if tags:
labels = {}
for l, n in repo.tags().items():
@@ -499,9 +511,10 @@
ui.write(line)
ui.write("\n")
-@command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
+@command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
def debugdata(ui, repo, file_, rev=None, **opts):
"""dump the contents of a data file revision"""
+ opts = pycompat.byteskwargs(opts)
if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
if rev is not None:
raise error.CommandError('debugdata', _('invalid arguments'))
@@ -520,7 +533,7 @@
norepo=True, optionalrepo=True)
def debugdate(ui, date, range=None, **opts):
"""parse and display a date"""
- if opts["extended"]:
+ if opts[r"extended"]:
d = util.parsedate(date, util.extendeddateformats)
else:
d = util.parsedate(date)
@@ -531,7 +544,7 @@
ui.write(("match: %s\n") % m(d[0]))
@command('debugdeltachain',
- commands.debugrevlogopts + commands.formatteropts,
+ cmdutil.debugrevlogopts + cmdutil.formatteropts,
_('-c|-m|FILE'),
optionalrepo=True)
def debugdeltachain(ui, repo, file_=None, **opts):
@@ -558,9 +571,10 @@
:``extraratio``: extradist divided by chainsize; another representation of
how much unrelated data is needed to load this delta chain
"""
+ opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
index = r.index
- generaldelta = r.version & revlog.REVLOGGENERALDELTA
+ generaldelta = r.version & revlog.FLAG_GENERALDELTA
def revinfo(rev):
e = index[rev]
@@ -638,8 +652,8 @@
def debugstate(ui, repo, **opts):
"""show the contents of the current dirstate"""
- nodates = opts.get('nodates')
- datesort = opts.get('datesort')
+ nodates = opts.get(r'nodates')
+ datesort = opts.get(r'datesort')
timestr = ""
if datesort:
@@ -666,10 +680,11 @@
[('', 'old', None, _('use old-style discovery')),
('', 'nonheads', None,
_('use old-style discovery with non-heads included')),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
def debugdiscovery(ui, repo, remoteurl="default", **opts):
"""runs the changeset discovery protocol in isolation"""
+ opts = pycompat.byteskwargs(opts)
remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
opts.get('branch'))
remote = hg.peer(repo, opts, remoteurl)
@@ -728,9 +743,10 @@
localrevs = opts.get('local_head')
doit(localrevs, remoterevs)
-@command('debugextensions', commands.formatteropts, [], norepo=True)
+@command('debugextensions', cmdutil.formatteropts, [], norepo=True)
def debugextensions(ui, **opts):
'''show information about active extensions'''
+ opts = pycompat.byteskwargs(opts)
exts = extensions.extensions(ui)
hgver = util.version()
fm = ui.formatter('debugextensions', opts)
@@ -778,7 +794,7 @@
_('[-r REV] FILESPEC'))
def debugfileset(ui, repo, expr, **opts):
'''parse and apply a fileset specification'''
- ctx = scmutil.revsingle(repo, opts.get('rev'), None)
+ ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
if ui.verbose:
tree = fileset.parse(expr)
ui.note(fileset.prettyformat(tree), "\n")
@@ -813,16 +829,17 @@
Every ID must be a full-length hex node id string. Saves the bundle to the
given file.
"""
+ opts = pycompat.byteskwargs(opts)
repo = hg.peer(ui, opts, repopath)
if not repo.capable('getbundle'):
raise error.Abort("getbundle() not supported by target repository")
args = {}
if common:
- args['common'] = [bin(s) for s in common]
+ args[r'common'] = [bin(s) for s in common]
if head:
- args['heads'] = [bin(s) for s in head]
+ args[r'heads'] = [bin(s) for s in head]
# TODO: get desired bundlecaps from command line.
- args['bundlecaps'] = None
+ args[r'bundlecaps'] = None
bundle = repo.getbundle('debug', **args)
bundletype = opts.get('type', 'bzip2').lower()
@@ -847,13 +864,10 @@
ignore = repo.dirstate._ignore
if not files:
# Show all the patterns
- includepat = getattr(ignore, 'includepat', None)
- if includepat is not None:
- ui.write("%s\n" % includepat)
- else:
- raise error.Abort(_("no ignore patterns found"))
+ ui.write("%s\n" % repr(ignore))
else:
- for f in files:
+ m = scmutil.match(repo[None], pats=files)
+ for f in m.files():
nf = util.normpath(f)
ignored = None
ignoredata = None
@@ -869,29 +883,30 @@
break
if ignored:
if ignored == nf:
- ui.write(_("%s is ignored\n") % f)
+ ui.write(_("%s is ignored\n") % m.uipath(f))
else:
ui.write(_("%s is ignored because of "
"containing folder %s\n")
- % (f, ignored))
+ % (m.uipath(f), ignored))
ignorefile, lineno, line = ignoredata
ui.write(_("(ignore rule in %s, line %d: '%s')\n")
% (ignorefile, lineno, line))
else:
- ui.write(_("%s is not ignored\n") % f)
+ ui.write(_("%s is not ignored\n") % m.uipath(f))
-@command('debugindex', commands.debugrevlogopts +
+@command('debugindex', cmdutil.debugrevlogopts +
[('f', 'format', 0, _('revlog format'), _('FORMAT'))],
_('[-f FORMAT] -c|-m|FILE'),
optionalrepo=True)
def debugindex(ui, repo, file_=None, **opts):
"""dump the contents of an index file"""
+ opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
format = opts.get('format', 0)
if format not in (0, 1):
raise error.Abort(_("unknown format %d") % format)
- generaldelta = r.version & revlog.REVLOGGENERALDELTA
+ generaldelta = r.version & revlog.FLAG_GENERALDELTA
if generaldelta:
basehdr = ' delta'
else:
@@ -936,10 +951,11 @@
i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
-@command('debugindexdot', commands.debugrevlogopts,
+@command('debugindexdot', cmdutil.debugrevlogopts,
_('-c|-m|FILE'), optionalrepo=True)
def debugindexdot(ui, repo, file_=None, **opts):
"""dump an index DAG as a graphviz dot file"""
+ opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
ui.write(("digraph G {\n"))
for i in r:
@@ -950,12 +966,13 @@
ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
ui.write("}\n")
-@command('debuginstall', [] + commands.formatteropts, '', norepo=True)
+@command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
def debuginstall(ui, **opts):
'''test Mercurial installation
Returns 0 on success.
'''
+ opts = pycompat.byteskwargs(opts)
def writetemp(contents):
(fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
@@ -1020,19 +1037,20 @@
fm.write('hgmodules', _("checking installed modules (%s)...\n"),
os.path.dirname(pycompat.fsencode(__file__)))
- err = None
- try:
- from . import (
- base85,
- bdiff,
- mpatch,
- osutil,
- )
- dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
- except Exception as inst:
- err = inst
- problems += 1
- fm.condwrite(err, 'extensionserror', " %s\n", err)
+ if policy.policy in ('c', 'allow'):
+ err = None
+ try:
+ from .cext import (
+ base85,
+ bdiff,
+ mpatch,
+ osutil,
+ )
+ dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
+ except Exception as inst:
+ err = inst
+ problems += 1
+ fm.condwrite(err, 'extensionserror', " %s\n", err)
compengines = util.compengines._engines.values()
fm.write('compengines', _('checking registered compression engines (%s)\n'),
@@ -1123,6 +1141,7 @@
Every ID must be a full-length hex node id string. Returns a list of 0s
and 1s indicating unknown/known.
"""
+ opts = pycompat.byteskwargs(opts)
repo = hg.peer(ui, opts, repopath)
if not repo.capable('known'):
raise error.Abort("known() not supported by target repository")
@@ -1160,11 +1179,11 @@
"""
- if opts.get('force_lock'):
+ if opts.get(r'force_lock'):
repo.svfs.unlink('lock')
- if opts.get('force_wlock'):
+ if opts.get(r'force_wlock'):
repo.vfs.unlink('wlock')
- if opts.get('force_lock') or opts.get('force_lock'):
+ if opts.get(r'force_lock') or opts.get(r'force_lock'):
return 0
now = time.time()
@@ -1330,15 +1349,19 @@
('', 'record-parents', False,
_('record parent information for the precursor')),
('r', 'rev', [], _('display markers relevant to REV')),
+ ('', 'exclusive', False, _('restrict display to markers only '
+ 'relevant to REV')),
('', 'index', False, _('display index of the marker')),
('', 'delete', [], _('delete markers specified by indices')),
- ] + commands.commitopts2 + commands.formatteropts,
+ ] + cmdutil.commitopts2 + cmdutil.formatteropts,
_('[OBSOLETED [REPLACEMENT ...]]'))
def debugobsolete(ui, repo, precursor=None, *successors, **opts):
"""create arbitrary obsolete marker
With no arguments, displays the list of obsolescence markers."""
+ opts = pycompat.byteskwargs(opts)
+
def parsenodeid(s):
try:
# We do not use revsingle/revrange functions here to accept
@@ -1396,7 +1419,7 @@
parents = tuple(p.node() for p in parents)
repo.obsstore.create(tr, prec, succs, opts['flags'],
parents=parents, date=date,
- metadata=metadata)
+ metadata=metadata, ui=ui)
tr.close()
except ValueError as exc:
raise error.Abort(_('bad obsmarker input: %s') % exc)
@@ -1408,15 +1431,16 @@
if opts['rev']:
revs = scmutil.revrange(repo, opts['rev'])
nodes = [repo[r].node() for r in revs]
- markers = list(obsolete.getmarkers(repo, nodes=nodes))
+ markers = list(obsutil.getmarkers(repo, nodes=nodes,
+ exclusive=opts['exclusive']))
markers.sort(key=lambda x: x._data)
else:
- markers = obsolete.getmarkers(repo)
+ markers = obsutil.getmarkers(repo)
markerstoiter = markers
isrelevant = lambda m: True
if opts.get('rev') and opts.get('index'):
- markerstoiter = obsolete.getmarkers(repo)
+ markerstoiter = obsutil.getmarkers(repo)
markerset = set(markers)
isrelevant = lambda m: m in markerset
@@ -1464,7 +1488,7 @@
if fixpaths:
spec = spec.replace(pycompat.ossep, '/')
speclen = len(spec)
- fullpaths = opts['full']
+ fullpaths = opts[r'full']
files, dirs = set(), set()
adddir, addfile = dirs.add, files.add
for f, st in dirstate.iteritems():
@@ -1482,11 +1506,11 @@
return files, dirs
acceptable = ''
- if opts['normal']:
+ if opts[r'normal']:
acceptable += 'nm'
- if opts['added']:
+ if opts[r'added']:
acceptable += 'a'
- if opts['removed']:
+ if opts[r'removed']:
acceptable += 'r'
cwd = repo.getcwd()
if not specs:
@@ -1501,6 +1525,85 @@
ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
ui.write('\n')
+@command('debugpickmergetool',
+ [('r', 'rev', '', _('check for files in this revision'), _('REV')),
+ ('', 'changedelete', None, _('emulate merging change and delete')),
+ ] + cmdutil.walkopts + cmdutil.mergetoolopts,
+ _('[PATTERN]...'),
+ inferrepo=True)
+def debugpickmergetool(ui, repo, *pats, **opts):
+ """examine which merge tool is chosen for specified file
+
+ As described in :hg:`help merge-tools`, Mercurial examines
+ configurations below in this order to decide which merge tool is
+ chosen for specified file.
+
+ 1. ``--tool`` option
+ 2. ``HGMERGE`` environment variable
+ 3. configurations in ``merge-patterns`` section
+ 4. configuration of ``ui.merge``
+ 5. configurations in ``merge-tools`` section
+ 6. ``hgmerge`` tool (for historical reason only)
+ 7. default tool for fallback (``:merge`` or ``:prompt``)
+
+ This command writes out examination result in the style below::
+
+ FILE = MERGETOOL
+
+ By default, all files known in the first parent context of the
+ working directory are examined. Use file patterns and/or -I/-X
+ options to limit target files. -r/--rev is also useful to examine
+ files in another context without actual updating to it.
+
+ With --debug, this command shows warning messages while matching
+ against ``merge-patterns`` and so on, too. It is recommended to
+ use this option with explicit file patterns and/or -I/-X options,
+ because this option increases amount of output per file according
+ to configurations in hgrc.
+
+ With -v/--verbose, this command shows configurations below at
+ first (only if specified).
+
+ - ``--tool`` option
+ - ``HGMERGE`` environment variable
+ - configuration of ``ui.merge``
+
+ If merge tool is chosen before matching against
+ ``merge-patterns``, this command can't show any helpful
+ information, even with --debug. In such case, information above is
+ useful to know why a merge tool is chosen.
+ """
+ opts = pycompat.byteskwargs(opts)
+ overrides = {}
+ if opts['tool']:
+ overrides[('ui', 'forcemerge')] = opts['tool']
+ ui.note(('with --tool %r\n') % (opts['tool']))
+
+ with ui.configoverride(overrides, 'debugmergepatterns'):
+ hgmerge = encoding.environ.get("HGMERGE")
+ if hgmerge is not None:
+ ui.note(('with HGMERGE=%r\n') % (hgmerge))
+ uimerge = ui.config("ui", "merge")
+ if uimerge:
+ ui.note(('with ui.merge=%r\n') % (uimerge))
+
+ ctx = scmutil.revsingle(repo, opts.get('rev'))
+ m = scmutil.match(ctx, pats, opts)
+ changedelete = opts['changedelete']
+ for path in ctx.walk(m):
+ fctx = ctx[path]
+ try:
+ if not ui.debugflag:
+ ui.pushbuffer(error=True)
+ tool, toolpath = filemerge._picktool(repo, ui, path,
+ fctx.isbinary(),
+ 'l' in fctx.flags(),
+ changedelete)
+ finally:
+ if not ui.debugflag:
+ ui.popbuffer()
+ ui.write(('%s = %s\n') % (path, tool))
+
@command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
'''access the pushkey key/value protocol
@@ -1571,7 +1674,7 @@
dirstate = repo.dirstate
changedfiles = None
# See command doc for what minimal does.
- if opts.get('minimal'):
+ if opts.get(r'minimal'):
manifestfiles = set(ctx.manifest().keys())
dirstatefiles = set(dirstate)
manifestonly = manifestfiles - dirstatefiles
@@ -1592,6 +1695,7 @@
def debugrename(ui, repo, file1, *pats, **opts):
"""dump rename information"""
+ opts = pycompat.byteskwargs(opts)
ctx = scmutil.revsingle(repo, opts.get('rev'))
m = scmutil.match(ctx, (file1,) + pats, opts)
for abs in ctx.walk(m):
@@ -1603,12 +1707,13 @@
else:
ui.write(_("%s not renamed\n") % rel)
-@command('debugrevlog', commands.debugrevlogopts +
+@command('debugrevlog', cmdutil.debugrevlogopts +
[('d', 'dump', False, _('dump index data'))],
_('-c|-m|FILE'),
optionalrepo=True)
def debugrevlog(ui, repo, file_=None, **opts):
"""show data and statistics about a revlog"""
+ opts = pycompat.byteskwargs(opts)
r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
if opts.get("dump"):
@@ -1645,9 +1750,9 @@
format = v & 0xFFFF
flags = []
gdelta = False
- if v & revlog.REVLOGNGINLINEDATA:
+ if v & revlog.FLAG_INLINE_DATA:
flags.append('inline')
- if v & revlog.REVLOGGENERALDELTA:
+ if v & revlog.FLAG_GENERALDELTA:
gdelta = True
flags.append('generaldelta')
if not flags:
@@ -1662,6 +1767,8 @@
nump1prev = 0
nump2prev = 0
chainlengths = []
+ chainbases = []
+ chainspans = []
datasize = [None, 0, 0]
fullsize = [None, 0, 0]
@@ -1687,10 +1794,16 @@
size = r.length(rev)
if delta == nullrev:
chainlengths.append(0)
+ chainbases.append(r.start(rev))
+ chainspans.append(size)
numfull += 1
addsize(size, fullsize)
else:
chainlengths.append(chainlengths[delta] + 1)
+ baseaddr = chainbases[delta]
+ revaddr = r.start(rev)
+ chainbases.append(baseaddr)
+ chainspans.append((revaddr - baseaddr) + size)
addsize(size, deltasize)
if delta == rev - 1:
numprev += 1
@@ -1706,9 +1819,9 @@
numother += 1
# Obtain data on the raw chunks in the revlog.
- chunk = r._chunkraw(rev, rev)[1]
- if chunk:
- chunktype = chunk[0]
+ segment = r._getsegmentforrevs(rev, rev)[1]
+ if segment:
+ chunktype = bytes(segment[0:1])
else:
chunktype = 'empty'
@@ -1736,6 +1849,7 @@
totalsize = fulltotal + deltatotal
avgchainlen = sum(chainlengths) / numrevs
maxchainlen = max(chainlengths)
+ maxchainspan = max(chainspans)
compratio = 1
if totalsize:
compratio = totalrawsize / totalsize
@@ -1773,7 +1887,7 @@
def fmtchunktype(chunktype):
if chunktype == 'empty':
return ' %s : ' % chunktype
- elif chunktype in string.ascii_letters:
+ elif chunktype in pycompat.bytestr(string.ascii_letters):
return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
else:
return ' 0x%s : ' % hex(chunktype)
@@ -1789,9 +1903,10 @@
ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
ui.write('\n')
- fmt = dfmtstr(max(avgchainlen, compratio))
+ fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
ui.write(('avg chain length : ') + fmt % avgchainlen)
ui.write(('max chain length : ') + fmt % maxchainlen)
+ ui.write(('max chain reach : ') + fmt % maxchainspan)
ui.write(('compression ratio : ') + fmt % compratio)
if format > 0:
@@ -1826,6 +1941,8 @@
@command('debugrevspec',
[('', 'optimize', None,
_('print parsed tree after optimizing (DEPRECATED)')),
+ ('', 'show-revs', True, _('print list of result revisions (default)')),
+ ('s', 'show-set', None, _('print internal representation of result set')),
('p', 'show-stage', [],
_('print parsed tree at the given stage'), _('NAME')),
('', 'no-optimized', False, _('evaluate tree without optimization')),
@@ -1838,12 +1955,18 @@
Use -p/--show-stage option to print the parsed tree at the given stages.
Use -p all to print tree at every stage.
+ Use --no-show-revs option with -s or -p to print only the set
+ representation or the parsed tree respectively.
+
Use --verify-optimized to compare the optimized result with the unoptimized
one. Returns 1 if the optimized result differs.
"""
+ opts = pycompat.byteskwargs(opts)
+ aliases = ui.configitems('revsetalias')
stages = [
('parsed', lambda tree: tree),
- ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
+ ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
+ ui.warn)),
('concatenated', revsetlang.foldconcat),
('analyzed', revsetlang.analyze),
('optimized', revsetlang.optimize),
@@ -1887,9 +2010,9 @@
if opts['verify_optimized']:
arevs = revset.makematcher(treebystage['analyzed'])(repo)
brevs = revset.makematcher(treebystage['optimized'])(repo)
- if ui.verbose:
- ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
- ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
+ if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
+ ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
+ ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
arevs = list(arevs)
brevs = list(brevs)
if arevs == brevs:
@@ -1911,8 +2034,10 @@
func = revset.makematcher(tree)
revs = func(repo)
- if ui.verbose:
- ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
+ if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
+ ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
+ if not opts['show_revs']:
+ return
for c in revs:
ui.write("%s\n" % c)
@@ -1934,6 +2059,66 @@
with repo.wlock():
repo.setparents(r1, r2)
+@command('debugssl', [], '[SOURCE]', optionalrepo=True)
+def debugssl(ui, repo, source=None, **opts):
+ '''test a secure connection to a server
+
+ This builds the certificate chain for the server on Windows, installing the
+ missing intermediates and trusted root via Windows Update if necessary. It
+ does nothing on other platforms.
+
+ If SOURCE is omitted, the 'default' path will be used. If a URL is given,
+ that server is used. See :hg:`help urls` for more information.
+
+ If the update succeeds, retry the original operation. Otherwise, the cause
+ of the SSL error is likely another issue.
+ '''
+ if pycompat.osname != 'nt':
+ raise error.Abort(_('certificate chain building is only possible on '
+ 'Windows'))
+
+ if not source:
+ source = "default"
+ elif not repo:
+ raise error.Abort(_("there is no Mercurial repository here, and no "
+ "server specified"))
+
+ source, branches = hg.parseurl(ui.expandpath(source))
+ url = util.url(source)
+ addr = None
+
+ if url.scheme == 'https':
+ addr = (url.host, url.port or 443)
+ elif url.scheme == 'ssh':
+ addr = (url.host, url.port or 22)
+ else:
+ raise error.Abort(_("only https and ssh connections are supported"))
+
+ from . import win32
+
+ s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
+ cert_reqs=ssl.CERT_NONE, ca_certs=None)
+
+ try:
+ s.connect(addr)
+ cert = s.getpeercert(True)
+
+ ui.status(_('checking the certificate chain for %s\n') % url.host)
+
+ complete = win32.checkcertificatechain(cert, build=False)
+
+ if not complete:
+ ui.status(_('certificate chain is incomplete, updating... '))
+
+ if not win32.checkcertificatechain(cert):
+ ui.status(_('failed.\n'))
+ else:
+ ui.status(_('done.\n'))
+ else:
+ ui.status(_('full certificate chain is available\n'))
+ finally:
+ s.close()
+
@command('debugsub',
[('r', 'rev', '',
_('revision to check'), _('REV'))],
@@ -1946,13 +2131,14 @@
ui.write((' revision %s\n') % v[1])
@command('debugsuccessorssets',
- [],
+ [('', 'closest', False, _('return closest successors sets only'))],
_('[REV]'))
-def debugsuccessorssets(ui, repo, *revs):
+def debugsuccessorssets(ui, repo, *revs, **opts):
"""show set of successors for revision
A successors set of changeset A is a consistent group of revisions that
- succeed A. It contains non-obsolete changesets only.
+ succeed A. It contains non-obsolete changesets only unless closests
+ successors set is set.
In most cases a changeset A has a single successors set containing a single
successor (changeset A replaced by A').
@@ -1990,7 +2176,9 @@
for rev in scmutil.revrange(repo, revs):
ctx = repo[rev]
ui.write('%s\n'% ctx2str(ctx))
- for succsset in obsolete.successorssets(repo, ctx.node(), cache):
+ for succsset in obsutil.successorssets(repo, ctx.node(),
+ closest=opts['closest'],
+ cache=cache):
if succsset:
ui.write(' ')
ui.write(node2str(succsset[0]))
@@ -2014,14 +2202,14 @@
Use --verbose to print the parsed tree.
"""
revs = None
- if opts['rev']:
+ if opts[r'rev']:
if repo is None:
raise error.RepoError(_('there is no Mercurial repository here '
'(.hg not found)'))
- revs = scmutil.revrange(repo, opts['rev'])
+ revs = scmutil.revrange(repo, opts[r'rev'])
props = {}
- for d in opts['define']:
+ for d in opts[r'define']:
try:
k, v = (e.strip() for e in d.split('=', 1))
if not k or k == 'ui':
@@ -2038,18 +2226,22 @@
if newtree != tree:
ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
- mapfile = None
if revs is None:
- k = 'debugtemplate'
- t = formatter.maketemplater(ui, k, tmpl)
- ui.write(templater.stringify(t(k, ui=ui, **props)))
+ t = formatter.maketemplater(ui, tmpl)
+ props['ui'] = ui
+ ui.write(t.render(props))
else:
- displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
- mapfile, buffered=False)
+ displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
for r in revs:
- displayer.show(repo[r], **props)
+ displayer.show(repo[r], **pycompat.strkwargs(props))
displayer.close()
+@command('debugupdatecaches', [])
+def debugupdatecaches(ui, repo, *pats, **opts):
+ """warm all known caches in the repository"""
+ with repo.wlock(), repo.lock():
+ repo.updatecaches()
+
@command('debugupgraderepo', [
('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
('', 'run', False, _('performs an upgrade')),
@@ -2075,12 +2267,14 @@
"""
return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
-@command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
+@command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
inferrepo=True)
def debugwalk(ui, repo, *pats, **opts):
"""show how files match on given patterns"""
+ opts = pycompat.byteskwargs(opts)
m = scmutil.match(repo[None], pats, opts)
- items = list(repo.walk(m))
+ ui.write(('matcher: %r\n' % m))
+ items = list(repo[None].walk(m))
if not items:
return
f = lambda fn: fn
@@ -2097,12 +2291,13 @@
[('', 'three', '', 'three'),
('', 'four', '', 'four'),
('', 'five', '', 'five'),
- ] + commands.remoteopts,
+ ] + cmdutil.remoteopts,
_('REPO [OPTIONS]... [ONE [TWO]]'),
norepo=True)
def debugwireargs(ui, repopath, *vals, **opts):
+ opts = pycompat.byteskwargs(opts)
repo = hg.peer(ui, opts, repopath)
- for opt in commands.remoteopts:
+ for opt in cmdutil.remoteopts:
del opts[opt[1]]
args = {}
for k, v in opts.iteritems():
--- a/mercurial/demandimport.py Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,331 +0,0 @@
-# demandimport.py - global demand-loading of modules for Mercurial
-#
-# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-'''
-demandimport - automatic demandloading of modules
-
-To enable this module, do:
-
- import demandimport; demandimport.enable()
-
-Imports of the following forms will be demand-loaded:
-
- import a, b.c
- import a.b as c
- from a import b,c # a will be loaded immediately
-
-These imports will not be delayed:
-
- from a import *
- b = __import__(a)
-'''
-
-from __future__ import absolute_import
-
-import contextlib
-import os
-import sys
-
-# __builtin__ in Python 2, builtins in Python 3.
-try:
- import __builtin__ as builtins
-except ImportError:
- import builtins
-
-contextmanager = contextlib.contextmanager
-
-_origimport = __import__
-
-nothing = object()
-
-# Python 3 doesn't have relative imports nor level -1.
-level = -1
-if sys.version_info[0] >= 3:
- level = 0
-_import = _origimport
-
-def _hgextimport(importfunc, name, globals, *args, **kwargs):
- try:
- return importfunc(name, globals, *args, **kwargs)
- except ImportError:
- if not globals:
- raise
- # extensions are loaded with "hgext_" prefix
- hgextname = 'hgext_%s' % name
- nameroot = hgextname.split('.', 1)[0]
- contextroot = globals.get('__name__', '').split('.', 1)[0]
- if nameroot != contextroot:
- raise
- # retry to import with "hgext_" prefix
- return importfunc(hgextname, globals, *args, **kwargs)
-
-class _demandmod(object):
- """module demand-loader and proxy
-
- Specify 1 as 'level' argument at construction, to import module
- relatively.
- """
- def __init__(self, name, globals, locals, level):
- if '.' in name:
- head, rest = name.split('.', 1)
- after = [rest]
- else:
- head = name
- after = []
- object.__setattr__(self, r"_data",
- (head, globals, locals, after, level, set()))
- object.__setattr__(self, r"_module", None)
- def _extend(self, name):
- """add to the list of submodules to load"""
- self._data[3].append(name)
-
- def _addref(self, name):
- """Record that the named module ``name`` imports this module.
-
- References to this proxy class having the name of this module will be
- replaced at module load time. We assume the symbol inside the importing
- module is identical to the "head" name of this module. We don't
- actually know if "as X" syntax is being used to change the symbol name
- because this information isn't exposed to __import__.
- """
- self._data[5].add(name)
-
- def _load(self):
- if not self._module:
- head, globals, locals, after, level, modrefs = self._data
- mod = _hgextimport(_import, head, globals, locals, None, level)
- if mod is self:
- # In this case, _hgextimport() above should imply
- # _demandimport(). Otherwise, _hgextimport() never
- # returns _demandmod. This isn't intentional behavior,
- # in fact. (see also issue5304 for detail)
- #
- # If self._module is already bound at this point, self
- # should be already _load()-ed while _hgextimport().
- # Otherwise, there is no way to import actual module
- # as expected, because (re-)invoking _hgextimport()
- # should cause same result.
- # This is reason why _load() returns without any more
- # setup but assumes self to be already bound.
- mod = self._module
- assert mod and mod is not self, "%s, %s" % (self, mod)
- return
-
- # load submodules
- def subload(mod, p):
- h, t = p, None
- if '.' in p:
- h, t = p.split('.', 1)
- if getattr(mod, h, nothing) is nothing:
- setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__,
- level=1))
- elif t:
- subload(getattr(mod, h), t)
-
- for x in after:
- subload(mod, x)
-
- # Replace references to this proxy instance with the actual module.
- if locals and locals.get(head) == self:
- locals[head] = mod
-
- for modname in modrefs:
- modref = sys.modules.get(modname, None)
- if modref and getattr(modref, head, None) == self:
- setattr(modref, head, mod)
-
- object.__setattr__(self, r"_module", mod)
-
- def __repr__(self):
- if self._module:
- return "<proxied module '%s'>" % self._data[0]
- return "<unloaded module '%s'>" % self._data[0]
- def __call__(self, *args, **kwargs):
- raise TypeError("%s object is not callable" % repr(self))
- def __getattribute__(self, attr):
- if attr in ('_data', '_extend', '_load', '_module', '_addref'):
- return object.__getattribute__(self, attr)
- self._load()
- return getattr(self._module, attr)
- def __setattr__(self, attr, val):
- self._load()
- setattr(self._module, attr, val)
-
-_pypy = '__pypy__' in sys.builtin_module_names
-
-def _demandimport(name, globals=None, locals=None, fromlist=None, level=level):
- if not locals or name in ignore or fromlist == ('*',):
- # these cases we can't really delay
- return _hgextimport(_import, name, globals, locals, fromlist, level)
- elif not fromlist:
- # import a [as b]
- if '.' in name: # a.b
- base, rest = name.split('.', 1)
- # email.__init__ loading email.mime
- if globals and globals.get('__name__', None) == base:
- return _import(name, globals, locals, fromlist, level)
- # if a is already demand-loaded, add b to its submodule list
- if base in locals:
- if isinstance(locals[base], _demandmod):
- locals[base]._extend(rest)
- return locals[base]
- return _demandmod(name, globals, locals, level)
- else:
- # There is a fromlist.
- # from a import b,c,d
- # from . import b,c,d
- # from .a import b,c,d
-
- # level == -1: relative and absolute attempted (Python 2 only).
- # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3).
- # The modern Mercurial convention is to use absolute_import everywhere,
- # so modern Mercurial code will have level >= 0.
-
- # The name of the module the import statement is located in.
- globalname = globals.get('__name__')
-
- def processfromitem(mod, attr):
- """Process an imported symbol in the import statement.
-
- If the symbol doesn't exist in the parent module, and if the
- parent module is a package, it must be a module. We set missing
- modules up as _demandmod instances.
- """
- symbol = getattr(mod, attr, nothing)
- nonpkg = getattr(mod, '__path__', nothing) is nothing
- if symbol is nothing:
- if nonpkg:
- # do not try relative import, which would raise ValueError,
- # and leave unknown attribute as the default __import__()
- # would do. the missing attribute will be detected later
- # while processing the import statement.
- return
- mn = '%s.%s' % (mod.__name__, attr)
- if mn in ignore:
- importfunc = _origimport
- else:
- importfunc = _demandmod
- symbol = importfunc(attr, mod.__dict__, locals, level=1)
- setattr(mod, attr, symbol)
-
- # Record the importing module references this symbol so we can
- # replace the symbol with the actual module instance at load
- # time.
- if globalname and isinstance(symbol, _demandmod):
- symbol._addref(globalname)
-
- def chainmodules(rootmod, modname):
- # recurse down the module chain, and return the leaf module
- mod = rootmod
- for comp in modname.split('.')[1:]:
- if getattr(mod, comp, nothing) is nothing:
- setattr(mod, comp, _demandmod(comp, mod.__dict__,
- mod.__dict__, level=1))
- mod = getattr(mod, comp)
- return mod
-
- if level >= 0:
- if name:
- # "from a import b" or "from .a import b" style
- rootmod = _hgextimport(_origimport, name, globals, locals,
- level=level)
- mod = chainmodules(rootmod, name)
- elif _pypy:
- # PyPy's __import__ throws an exception if invoked
- # with an empty name and no fromlist. Recreate the
- # desired behaviour by hand.
- mn = globalname
- mod = sys.modules[mn]
- if getattr(mod, '__path__', nothing) is nothing:
- mn = mn.rsplit('.', 1)[0]
- mod = sys.modules[mn]
- if level > 1:
- mn = mn.rsplit('.', level - 1)[0]
- mod = sys.modules[mn]
- else:
- mod = _hgextimport(_origimport, name, globals, locals,
- level=level)
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
- # But, we still need to support lazy loading of standard library and 3rd
- # party modules. So handle level == -1.
- mod = _hgextimport(_origimport, name, globals, locals)
- mod = chainmodules(mod, name)
-
- for x in fromlist:
- processfromitem(mod, x)
-
- return mod
-
-ignore = [
- '__future__',
- '_hashlib',
- # ImportError during pkg_resources/__init__.py:fixup_namespace_package
- '_imp',
- '_xmlplus',
- 'fcntl',
- 'nt', # pathlib2 tests the existence of built-in 'nt' module
- 'win32com.gen_py',
- 'win32com.shell', # 'appdirs' tries to import win32com.shell
- '_winreg', # 2.7 mimetypes needs immediate ImportError
- 'pythoncom',
- # imported by tarfile, not available under Windows
- 'pwd',
- 'grp',
- # imported by profile, itself imported by hotshot.stats,
- # not available under Windows
- 'resource',
- # this trips up many extension authors
- 'gtk',
- # setuptools' pkg_resources.py expects "from __main__ import x" to
- # raise ImportError if x not defined
- '__main__',
- '_ssl', # conditional imports in the stdlib, issue1964
- '_sre', # issue4920
- 'rfc822',
- 'mimetools',
- 'sqlalchemy.events', # has import-time side effects (issue5085)
- # setuptools 8 expects this module to explode early when not on windows
- 'distutils.msvc9compiler',
- '__builtin__',
- 'builtins',
- ]
-
-if _pypy:
- ignore.extend([
- # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
- '_ctypes.pointer',
- ])
-
-def isenabled():
- return builtins.__import__ == _demandimport
-
-def enable():
- "enable global demand-loading of modules"
- if os.environ.get('HGDEMANDIMPORT') != 'disable':
- builtins.__import__ = _demandimport
-
-def disable():
- "disable global demand-loading of modules"
- builtins.__import__ = _origimport
-
-@contextmanager
-def deactivated():
- "context manager for disabling demandimport in 'with' blocks"
- demandenabled = isenabled()
- if demandenabled:
- disable()
-
- try:
- yield
- finally:
- if demandenabled:
- enable()
--- a/mercurial/destutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/destutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,7 +11,7 @@
from . import (
bookmarks,
error,
- obsolete,
+ obsutil,
scmutil,
)
@@ -24,7 +24,7 @@
if p1.obsolete() and not p1.children():
# allow updating to successors
- successors = obsolete.successorssets(repo, p1.node())
+ successors = obsutil.successorssets(repo, p1.node())
# behavior of certain cases is as follows,
#
@@ -234,7 +234,7 @@
def _destmergebook(repo, action='merge', sourceset=None, destspace=None):
"""find merge destination in the active bookmark case"""
node = None
- bmheads = repo.bookmarkheads(repo._activebookmark)
+ bmheads = bookmarks.headsforactive(repo)
curhead = repo[repo._activebookmark].node()
if len(bmheads) == 2:
if curhead == bmheads[0]:
@@ -354,8 +354,14 @@
return None
+def stackbase(ui, repo):
+ # The histedit default base stops at public changesets, branchpoints,
+ # and merges, which is exactly what we want for a stack.
+ revs = scmutil.revrange(repo, [histeditdefaultrevset])
+ return revs.last() if revs else None
+
def _statusotherbook(ui, repo):
- bmheads = repo.bookmarkheads(repo._activebookmark)
+ bmheads = bookmarks.headsforactive(repo)
curhead = repo[repo._activebookmark].node()
if repo.revs('%n and parents()', curhead):
# we are on the active bookmark
@@ -391,6 +397,9 @@
ui.warn(_('(committing will reopen branch "%s")\n') %
(currentbranch))
elif otherheads:
+ curhead = repo['.']
+ ui.status(_('updated to "%s: %s"\n') % (curhead,
+ curhead.description().split('\n')[0]))
ui.status(_('%i other heads for branch "%s"\n') %
(len(otherheads), currentbranch))
--- a/mercurial/diffhelpers.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,199 +0,0 @@
-/*
- * diffhelpers.c - helper routines for mpatch
- *
- * Copyright 2007 Chris Mason <chris.mason@oracle.com>
- *
- * This software may be used and distributed according to the terms
- * of the GNU General Public License v2, incorporated herein by reference.
- */
-
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-
-static char diffhelpers_doc[] = "Efficient diff parsing";
-static PyObject *diffhelpers_Error;
-
-
-/* fixup the last lines of a and b when the patch has no newline at eof */
-static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
-{
- Py_ssize_t hunksz = PyList_Size(hunk);
- PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
- char *l = PyBytes_AsString(s);
- Py_ssize_t alen = PyList_Size(a);
- Py_ssize_t blen = PyList_Size(b);
- char c = l[0];
- PyObject *hline;
- Py_ssize_t sz = PyBytes_GET_SIZE(s);
-
- if (sz > 1 && l[sz-2] == '\r')
- /* tolerate CRLF in last line */
- sz -= 1;
-
- hline = PyBytes_FromStringAndSize(l, sz-1);
- if (!hline) {
- return;
- }
-
- if (c == ' ' || c == '+') {
- PyObject *rline = PyBytes_FromStringAndSize(l + 1, sz - 2);
- PyList_SetItem(b, blen-1, rline);
- }
- if (c == ' ' || c == '-') {
- Py_INCREF(hline);
- PyList_SetItem(a, alen-1, hline);
- }
- PyList_SetItem(hunk, hunksz-1, hline);
-}
-
-/* python callable form of _fix_newline */
-static PyObject *
-fix_newline(PyObject *self, PyObject *args)
-{
- PyObject *hunk, *a, *b;
- if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b))
- return NULL;
- _fix_newline(hunk, a, b);
- return Py_BuildValue("l", 0);
-}
-
-#if (PY_VERSION_HEX < 0x02050000)
-static const char *addlines_format = "OOiiOO";
-#else
-static const char *addlines_format = "OOnnOO";
-#endif
-
-/*
- * read lines from fp into the hunk. The hunk is parsed into two arrays
- * a and b. a gets the old state of the text, b gets the new state
- * The control char from the hunk is saved when inserting into a, but not b
- * (for performance while deleting files)
- */
-static PyObject *
-addlines(PyObject *self, PyObject *args)
-{
-
- PyObject *fp, *hunk, *a, *b, *x;
- Py_ssize_t i;
- Py_ssize_t lena, lenb;
- Py_ssize_t num;
- Py_ssize_t todoa, todob;
- char *s, c;
- PyObject *l;
- if (!PyArg_ParseTuple(args, addlines_format,
- &fp, &hunk, &lena, &lenb, &a, &b))
- return NULL;
-
- while (1) {
- todoa = lena - PyList_Size(a);
- todob = lenb - PyList_Size(b);
- num = todoa > todob ? todoa : todob;
- if (num == 0)
- break;
- for (i = 0; i < num; i++) {
- x = PyFile_GetLine(fp, 0);
- s = PyBytes_AsString(x);
- c = *s;
- if (strcmp(s, "\\ No newline at end of file\n") == 0) {
- _fix_newline(hunk, a, b);
- continue;
- }
- if (c == '\n') {
- /* Some patches may be missing the control char
- * on empty lines. Supply a leading space. */
- Py_DECREF(x);
- x = PyBytes_FromString(" \n");
- }
- PyList_Append(hunk, x);
- if (c == '+') {
- l = PyBytes_FromString(s + 1);
- PyList_Append(b, l);
- Py_DECREF(l);
- } else if (c == '-') {
- PyList_Append(a, x);
- } else {
- l = PyBytes_FromString(s + 1);
- PyList_Append(b, l);
- Py_DECREF(l);
- PyList_Append(a, x);
- }
- Py_DECREF(x);
- }
- }
- return Py_BuildValue("l", 0);
-}
-
-/*
- * compare the lines in a with the lines in b. a is assumed to have
- * a control char at the start of each line, this char is ignored in the
- * compare
- */
-static PyObject *
-testhunk(PyObject *self, PyObject *args)
-{
-
- PyObject *a, *b;
- long bstart;
- Py_ssize_t alen, blen;
- Py_ssize_t i;
- char *sa, *sb;
-
- if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
- return NULL;
- alen = PyList_Size(a);
- blen = PyList_Size(b);
- if (alen > blen - bstart || bstart < 0) {
- return Py_BuildValue("l", -1);
- }
- for (i = 0; i < alen; i++) {
- sa = PyBytes_AsString(PyList_GET_ITEM(a, i));
- sb = PyBytes_AsString(PyList_GET_ITEM(b, i + bstart));
- if (strcmp(sa + 1, sb) != 0)
- return Py_BuildValue("l", -1);
- }
- return Py_BuildValue("l", 0);
-}
-
-static PyMethodDef methods[] = {
- {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"},
- {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"},
- {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef diffhelpers_module = {
- PyModuleDef_HEAD_INIT,
- "diffhelpers",
- diffhelpers_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_diffhelpers(void)
-{
- PyObject *m;
-
- m = PyModule_Create(&diffhelpers_module);
- if (m == NULL)
- return NULL;
-
- diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
- NULL, NULL);
- Py_INCREF(diffhelpers_Error);
- PyModule_AddObject(m, "diffhelpersError", diffhelpers_Error);
-
- return m;
-}
-#else
-PyMODINIT_FUNC
-initdiffhelpers(void)
-{
- Py_InitModule3("diffhelpers", methods, diffhelpers_doc);
- diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError",
- NULL, NULL);
-}
-#endif
--- a/mercurial/dirs.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,315 +0,0 @@
-/*
- dirs.c - dynamic directory diddling for dirstates
-
- Copyright 2013 Facebook
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include "util.h"
-
-#ifdef IS_PY3K
-#define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[1]
-#else
-#define PYLONG_VALUE(o) PyInt_AS_LONG(o)
-#endif
-
-/*
- * This is a multiset of directory names, built from the files that
- * appear in a dirstate or manifest.
- *
- * A few implementation notes:
- *
- * We modify Python integers for refcounting, but those integers are
- * never visible to Python code.
- *
- * We mutate strings in-place, but leave them immutable once they can
- * be seen by Python code.
- */
-typedef struct {
- PyObject_HEAD
- PyObject *dict;
-} dirsObject;
-
-static inline Py_ssize_t _finddir(const char *path, Py_ssize_t pos)
-{
- while (pos != -1) {
- if (path[pos] == '/')
- break;
- pos -= 1;
- }
-
- return pos;
-}
-
-static int _addpath(PyObject *dirs, PyObject *path)
-{
- const char *cpath = PyBytes_AS_STRING(path);
- Py_ssize_t pos = PyBytes_GET_SIZE(path);
- PyObject *key = NULL;
- int ret = -1;
-
- /* This loop is super critical for performance. That's why we inline
- * access to Python structs instead of going through a supported API.
- * The implementation, therefore, is heavily dependent on CPython
- * implementation details. We also commit violations of the Python
- * "protocol" such as mutating immutable objects. But since we only
- * mutate objects created in this function or in other well-defined
- * locations, the references are known so these violations should go
- * unnoticed. The code for adjusting the length of a PyBytesObject is
- * essentially a minimal version of _PyBytes_Resize. */
- while ((pos = _finddir(cpath, pos - 1)) != -1) {
- PyObject *val;
-
- /* It's likely that every prefix already has an entry
- in our dict. Try to avoid allocating and
- deallocating a string for each prefix we check. */
- if (key != NULL)
- ((PyBytesObject *)key)->ob_shash = -1;
- else {
- /* Force Python to not reuse a small shared string. */
- key = PyBytes_FromStringAndSize(cpath,
- pos < 2 ? 2 : pos);
- if (key == NULL)
- goto bail;
- }
- /* Py_SIZE(o) refers to the ob_size member of the struct. Yes,
- * assigning to what looks like a function seems wrong. */
- Py_SIZE(key) = pos;
- ((PyBytesObject *)key)->ob_sval[pos] = '\0';
-
- val = PyDict_GetItem(dirs, key);
- if (val != NULL) {
- PYLONG_VALUE(val) += 1;
- break;
- }
-
- /* Force Python to not reuse a small shared int. */
-#ifdef IS_PY3K
- val = PyLong_FromLong(0x1eadbeef);
-#else
- val = PyInt_FromLong(0x1eadbeef);
-#endif
-
- if (val == NULL)
- goto bail;
-
- PYLONG_VALUE(val) = 1;
- ret = PyDict_SetItem(dirs, key, val);
- Py_DECREF(val);
- if (ret == -1)
- goto bail;
- Py_CLEAR(key);
- }
- ret = 0;
-
-bail:
- Py_XDECREF(key);
-
- return ret;
-}
-
-static int _delpath(PyObject *dirs, PyObject *path)
-{
- char *cpath = PyBytes_AS_STRING(path);
- Py_ssize_t pos = PyBytes_GET_SIZE(path);
- PyObject *key = NULL;
- int ret = -1;
-
- while ((pos = _finddir(cpath, pos - 1)) != -1) {
- PyObject *val;
-
- key = PyBytes_FromStringAndSize(cpath, pos);
-
- if (key == NULL)
- goto bail;
-
- val = PyDict_GetItem(dirs, key);
- if (val == NULL) {
- PyErr_SetString(PyExc_ValueError,
- "expected a value, found none");
- goto bail;
- }
-
- if (--PYLONG_VALUE(val) <= 0) {
- if (PyDict_DelItem(dirs, key) == -1)
- goto bail;
- } else
- break;
- Py_CLEAR(key);
- }
- ret = 0;
-
-bail:
- Py_XDECREF(key);
-
- return ret;
-}
-
-static int dirs_fromdict(PyObject *dirs, PyObject *source, char skipchar)
-{
- PyObject *key, *value;
- Py_ssize_t pos = 0;
-
- while (PyDict_Next(source, &pos, &key, &value)) {
- if (!PyBytes_Check(key)) {
- PyErr_SetString(PyExc_TypeError, "expected string key");
- return -1;
- }
- if (skipchar) {
- if (!dirstate_tuple_check(value)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- return -1;
- }
- if (((dirstateTupleObject *)value)->state == skipchar)
- continue;
- }
-
- if (_addpath(dirs, key) == -1)
- return -1;
- }
-
- return 0;
-}
-
-static int dirs_fromiter(PyObject *dirs, PyObject *source)
-{
- PyObject *iter, *item = NULL;
- int ret;
-
- iter = PyObject_GetIter(source);
- if (iter == NULL)
- return -1;
-
- while ((item = PyIter_Next(iter)) != NULL) {
- if (!PyBytes_Check(item)) {
- PyErr_SetString(PyExc_TypeError, "expected string");
- break;
- }
-
- if (_addpath(dirs, item) == -1)
- break;
- Py_CLEAR(item);
- }
-
- ret = PyErr_Occurred() ? -1 : 0;
- Py_DECREF(iter);
- Py_XDECREF(item);
- return ret;
-}
-
-/*
- * Calculate a refcounted set of directory names for the files in a
- * dirstate.
- */
-static int dirs_init(dirsObject *self, PyObject *args)
-{
- PyObject *dirs = NULL, *source = NULL;
- char skipchar = 0;
- int ret = -1;
-
- self->dict = NULL;
-
- if (!PyArg_ParseTuple(args, "|Oc:__init__", &source, &skipchar))
- return -1;
-
- dirs = PyDict_New();
-
- if (dirs == NULL)
- return -1;
-
- if (source == NULL)
- ret = 0;
- else if (PyDict_Check(source))
- ret = dirs_fromdict(dirs, source, skipchar);
- else if (skipchar)
- PyErr_SetString(PyExc_ValueError,
- "skip character is only supported "
- "with a dict source");
- else
- ret = dirs_fromiter(dirs, source);
-
- if (ret == -1)
- Py_XDECREF(dirs);
- else
- self->dict = dirs;
-
- return ret;
-}
-
-PyObject *dirs_addpath(dirsObject *self, PyObject *args)
-{
- PyObject *path;
-
- if (!PyArg_ParseTuple(args, "O!:addpath", &PyBytes_Type, &path))
- return NULL;
-
- if (_addpath(self->dict, path) == -1)
- return NULL;
-
- Py_RETURN_NONE;
-}
-
-static PyObject *dirs_delpath(dirsObject *self, PyObject *args)
-{
- PyObject *path;
-
- if (!PyArg_ParseTuple(args, "O!:delpath", &PyBytes_Type, &path))
- return NULL;
-
- if (_delpath(self->dict, path) == -1)
- return NULL;
-
- Py_RETURN_NONE;
-}
-
-static int dirs_contains(dirsObject *self, PyObject *value)
-{
- return PyBytes_Check(value) ? PyDict_Contains(self->dict, value) : 0;
-}
-
-static void dirs_dealloc(dirsObject *self)
-{
- Py_XDECREF(self->dict);
- PyObject_Del(self);
-}
-
-static PyObject *dirs_iter(dirsObject *self)
-{
- return PyObject_GetIter(self->dict);
-}
-
-static PySequenceMethods dirs_sequence_methods;
-
-static PyMethodDef dirs_methods[] = {
- {"addpath", (PyCFunction)dirs_addpath, METH_VARARGS, "add a path"},
- {"delpath", (PyCFunction)dirs_delpath, METH_VARARGS, "remove a path"},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject dirsType = { PyVarObject_HEAD_INIT(NULL, 0) };
-
-void dirs_module_init(PyObject *mod)
-{
- dirs_sequence_methods.sq_contains = (objobjproc)dirs_contains;
- dirsType.tp_name = "parsers.dirs";
- dirsType.tp_new = PyType_GenericNew;
- dirsType.tp_basicsize = sizeof(dirsObject);
- dirsType.tp_dealloc = (destructor)dirs_dealloc;
- dirsType.tp_as_sequence = &dirs_sequence_methods;
- dirsType.tp_flags = Py_TPFLAGS_DEFAULT;
- dirsType.tp_doc = "dirs";
- dirsType.tp_iter = (getiterfunc)dirs_iter;
- dirsType.tp_methods = dirs_methods;
- dirsType.tp_init = (initproc)dirs_init;
-
- if (PyType_Ready(&dirsType) < 0)
- return;
- Py_INCREF(&dirsType);
-
- PyModule_AddObject(mod, "dirs", (PyObject *)&dirsType);
-}
--- a/mercurial/dirstate.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/dirstate.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import collections
+import contextlib
import errno
import os
import stat
@@ -18,15 +19,16 @@
encoding,
error,
match as matchmod,
- osutil,
- parsers,
pathutil,
+ policy,
pycompat,
scmutil,
txnutil,
util,
)
+parsers = policy.importmod(r'parsers')
+
propertycache = util.propertycache
filecache = scmutil.filecache
_rangemask = 0x7fffffff
@@ -68,7 +70,7 @@
class dirstate(object):
- def __init__(self, opener, ui, root, validate):
+ def __init__(self, opener, ui, root, validate, sparsematchfn):
'''Create a new dirstate object.
opener is an open()-like callable that can be used to open the
@@ -78,13 +80,10 @@
self._opener = opener
self._validate = validate
self._root = root
+ self._sparsematchfn = sparsematchfn
# ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
# UNC path pointing to root share (issue4557)
self._rootdir = pathutil.normasprefix(root)
- # internal config: ui.forcecwd
- forcecwd = ui.config('ui', 'forcecwd')
- if forcecwd:
- self._cwd = forcecwd
self._dirty = False
self._dirtypl = False
self._lastnormaltime = 0
@@ -100,6 +99,23 @@
# for consistent view between _pl() and _read() invocations
self._pendingmode = None
+ @contextlib.contextmanager
+ def parentchange(self):
+ '''Context manager for handling dirstate parents.
+
+ If an exception occurs in the scope of the context manager,
+ the incoherent dirstate won't be written when wlock is
+ released.
+ '''
+ self._parentwriters += 1
+ yield
+ # Typically we want the "undo" step of a context manager in a
+ # finally block so it happens even when an exception
+ # occurs. In this case, however, we only want to decrement
+ # parentwriters if the code in the with statement exits
+ # normally, so we don't have a try/finally here on purpose.
+ self._parentwriters -= 1
+
def beginparentchange(self):
'''Marks the beginning of a set of changes that involve changing
the dirstate parents. If there is an exception during this time,
@@ -107,6 +123,8 @@
prevents writing an incoherent dirstate where the parent doesn't
match the contents.
'''
+ self._ui.deprecwarn('beginparentchange is obsoleted by the '
+ 'parentchange context manager.', '4.3')
self._parentwriters += 1
def endparentchange(self):
@@ -114,6 +132,8 @@
dirstate parents. Once all parent changes have been marked done,
the wlock will be free to write the dirstate on release.
'''
+ self._ui.deprecwarn('endparentchange is obsoleted by the '
+ 'parentchange context manager.', '4.3')
if self._parentwriters > 0:
self._parentwriters -= 1
@@ -136,6 +156,11 @@
return self._copymap
@propertycache
+ def _identity(self):
+ self._read()
+ return self._identity
+
+ @propertycache
def _nonnormalset(self):
nonnorm, otherparents = nonnormalentries(self._map)
self._otherparentset = otherparents
@@ -173,6 +198,19 @@
f[normcase(name)] = name
return f
+ @property
+ def _sparsematcher(self):
+ """The matcher for the sparse checkout.
+
+ The working directory may not include every file from a manifest. The
+ matcher obtained by this property will match a path if it is to be
+ included in the working directory.
+ """
+ # TODO there is potential to cache this property. For now, the matcher
+ # is resolved on every access. (But the called function does use a
+ # cache to keep the lookup fast.)
+ return self._sparsematchfn()
+
@repocache('branch')
def _branch(self):
try:
@@ -209,7 +247,7 @@
def _ignore(self):
files = self._ignorefiles()
if not files:
- return util.never
+ return matchmod.never(self._root, '')
pats = ['include:%s' % f for f in files]
return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
@@ -271,6 +309,10 @@
@propertycache
def _cwd(self):
+ # internal config: ui.forcecwd
+ forcecwd = self._ui.config('ui', 'forcecwd')
+ if forcecwd:
+ return forcecwd
return pycompat.getcwd()
def getcwd(self):
@@ -320,9 +362,11 @@
for x in sorted(self._map):
yield x
- def iteritems(self):
+ def items(self):
return self._map.iteritems()
+ iteritems = items
+
def parents(self):
return [self._validate(p) for p in self._pl]
@@ -401,6 +445,9 @@
def _read(self):
self._map = {}
self._copymap = {}
+ # ignore HG_PENDING because identity is used only for writing
+ self._identity = util.filestat.frompath(
+ self._opener.join(self._filename))
try:
fp = self._opendirstatefile()
try:
@@ -445,7 +492,14 @@
self._pl = p
def invalidate(self):
- for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
+ '''Causes the next access to reread the dirstate.
+
+ This is different from localrepo.invalidatedirstate() because it always
+ rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
+ check whether the dirstate has changed before rereading it.'''
+
+ for a in ("_map", "_copymap", "_identity",
+ "_filefoldmap", "_dirfoldmap", "_branch",
"_pl", "_dirs", "_ignore", "_nonnormalset",
"_otherparentset"):
if a in self.__dict__:
@@ -710,6 +764,14 @@
self._dirty = True
+ def identity(self):
+ '''Return identity of dirstate itself to detect changing in storage
+
+ If identity of previous dirstate is equal to this, writing
+ changes based on the former dirstate out can keep consistency.
+ '''
+ return self._identity
+
def write(self, tr):
if not self._dirty:
return
@@ -988,7 +1050,7 @@
matchalways = match.always()
matchtdir = match.traversedir
dmap = self._map
- listdir = osutil.listdir
+ listdir = util.listdir
lstat = os.lstat
dirkind = stat.S_IFDIR
regkind = stat.S_IFREG
@@ -1021,6 +1083,8 @@
wadd = work.append
while work:
nd = work.pop()
+ if not match.visitdir(nd):
+ continue
skip = None
if nd == '.':
nd = ''
@@ -1236,10 +1300,10 @@
else:
return self._filename
- def savebackup(self, tr, suffix='', prefix=''):
- '''Save current dirstate into backup file with suffix'''
- assert len(suffix) > 0 or len(prefix) > 0
+ def savebackup(self, tr, backupname):
+ '''Save current dirstate into backup file'''
filename = self._actualfilename(tr)
+ assert backupname != filename
# use '_writedirstate' instead of 'write' to write changes certainly,
# because the latter omits writing out if transaction is running.
@@ -1260,27 +1324,20 @@
# end of this transaction
tr.registertmp(filename, location='plain')
- backupname = prefix + self._filename + suffix
- assert backupname != filename
self._opener.tryunlink(backupname)
# hardlink backup is okay because _writedirstate is always called
# with an "atomictemp=True" file.
util.copyfile(self._opener.join(filename),
self._opener.join(backupname), hardlink=True)
- def restorebackup(self, tr, suffix='', prefix=''):
- '''Restore dirstate by backup file with suffix'''
- assert len(suffix) > 0 or len(prefix) > 0
+ def restorebackup(self, tr, backupname):
+ '''Restore dirstate by backup file'''
# this "invalidate()" prevents "wlock.release()" from writing
# changes of dirstate out after restoring from backup file
self.invalidate()
filename = self._actualfilename(tr)
- # using self._filename to avoid having "pending" in the backup filename
- self._opener.rename(prefix + self._filename + suffix, filename,
- checkambig=True)
+ self._opener.rename(backupname, filename, checkambig=True)
- def clearbackup(self, tr, suffix='', prefix=''):
- '''Clear backup file with suffix'''
- assert len(suffix) > 0 or len(prefix) > 0
- # using self._filename to avoid having "pending" in the backup filename
- self._opener.unlink(prefix + self._filename + suffix)
+ def clearbackup(self, tr, backupname):
+ '''Clear backup file'''
+ self._opener.unlink(backupname)
--- a/mercurial/dirstateguard.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/dirstateguard.py Wed Jul 19 07:51:41 2017 -0500
@@ -31,8 +31,8 @@
self._repo = repo
self._active = False
self._closed = False
- self._suffix = '.backup.%s.%d' % (name, id(self))
- repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
+ self._backupname = 'dirstate.backup.%s.%d' % (name, id(self))
+ repo.dirstate.savebackup(repo.currenttransaction(), self._backupname)
self._active = True
def __del__(self):
@@ -45,25 +45,24 @@
def close(self):
if not self._active: # already inactivated
- msg = (_("can't close already inactivated backup: dirstate%s")
- % self._suffix)
+ msg = (_("can't close already inactivated backup: %s")
+ % self._backupname)
raise error.Abort(msg)
self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
- self._suffix)
+ self._backupname)
self._active = False
self._closed = True
def _abort(self):
self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
- self._suffix)
+ self._backupname)
self._active = False
def release(self):
if not self._closed:
if not self._active: # already inactivated
- msg = (_("can't release already inactivated backup:"
- " dirstate%s")
- % self._suffix)
+ msg = (_("can't release already inactivated backup: %s")
+ % self._backupname)
raise error.Abort(msg)
self._abort()
--- a/mercurial/discovery.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/discovery.py Wed Jul 19 07:51:41 2017 -0500
@@ -182,17 +182,22 @@
return og
-def _headssummary(repo, remote, outgoing):
+def _headssummary(pushop):
"""compute a summary of branch and heads status before and after push
- return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
+ return {'branch': ([remoteheads], [newheads],
+ [unsyncedheads], [discardedheads])} mapping
- - branch: the branch name
+ - branch: the branch name,
- remoteheads: the list of remote heads known locally
- None if the branch is new
- - newheads: the new remote heads (known locally) with outgoing pushed
- - unsyncedheads: the list of remote heads unknown locally.
+ None if the branch is new,
+ - newheads: the new remote heads (known locally) with outgoing pushed,
+ - unsyncedheads: the list of remote heads unknown locally,
+ - discardedheads: the list of heads made obsolete by the push.
"""
+ repo = pushop.repo.unfiltered()
+ remote = pushop.remote
+ outgoing = pushop.outgoing
cl = repo.changelog
headssum = {}
# A. Create set of branches involved in the push.
@@ -235,6 +240,23 @@
newmap.update(repo, (ctx.rev() for ctx in missingctx))
for branch, newheads in newmap.iteritems():
headssum[branch][1][:] = newheads
+ for branch, items in headssum.iteritems():
+ for l in items:
+ if l is not None:
+ l.sort()
+ headssum[branch] = items + ([],)
+
+ # If there are no obsstore, no post processing are needed.
+ if repo.obsstore:
+ torev = repo.changelog.rev
+ futureheads = set(torev(h) for h in outgoing.missingheads)
+ futureheads |= set(torev(h) for h in outgoing.commonheads)
+ allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
+ for branch, heads in sorted(headssum.iteritems()):
+ remoteheads, newheads, unsyncedheads, placeholder = heads
+ result = _postprocessobsolete(pushop, allfuturecommon, newheads)
+ headssum[branch] = (remoteheads, sorted(result[0]), unsyncedheads,
+ sorted(result[1]))
return headssum
def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
@@ -244,20 +266,20 @@
# Construct {old,new}map with branch = None (topological branch).
# (code based on update)
knownnode = repo.changelog.hasnode # no nodemap until it is filtered
- oldheads = set(h for h in remoteheads if knownnode(h))
+ oldheads = sorted(h for h in remoteheads if knownnode(h))
# all nodes in outgoing.missing are children of either:
# - an element of oldheads
# - another element of outgoing.missing
# - nullrev
# This explains why the new head are very simple to compute.
r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
- newheads = list(c.node() for c in r)
+ newheads = sorted(c.node() for c in r)
# set some unsynced head to issue the "unsynced changes" warning
if inc:
- unsynced = set([None])
+ unsynced = [None]
else:
- unsynced = set()
- return {None: (oldheads, newheads, unsynced)}
+ unsynced = []
+ return {None: (oldheads, newheads, unsynced, [])}
def _nowarnheads(pushop):
# Compute newly pushed bookmarks. We don't warn about bookmarked heads.
@@ -307,9 +329,10 @@
return
if remote.capable('branchmap'):
- headssum = _headssummary(repo, remote, outgoing)
+ headssum = _headssummary(pushop)
else:
headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
+ pushop.pushbranchmap = headssum
newbranches = [branch for branch, heads in headssum.iteritems()
if heads[0] is None]
# 1. Check for new branches on the remote.
@@ -327,41 +350,26 @@
# If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
errormsg = None
- # If there is no obsstore, allfuturecommon won't be used, so no
- # need to compute it.
- if repo.obsstore:
- allmissing = set(outgoing.missing)
- cctx = repo.set('%ld', outgoing.common)
- allfuturecommon = set(c.node() for c in cctx)
- allfuturecommon.update(allmissing)
for branch, heads in sorted(headssum.iteritems()):
- remoteheads, newheads, unsyncedheads = heads
- candidate_newhs = set(newheads)
+ remoteheads, newheads, unsyncedheads, discardedheads = heads
# add unsynced data
if remoteheads is None:
oldhs = set()
else:
oldhs = set(remoteheads)
oldhs.update(unsyncedheads)
- candidate_newhs.update(unsyncedheads)
dhs = None # delta heads, the new heads on branch
- if not repo.obsstore:
- discardedheads = set()
- newhs = candidate_newhs
- else:
- newhs, discardedheads = _postprocessobsolete(pushop,
- allfuturecommon,
- candidate_newhs)
- unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
- if unsynced:
- if None in unsynced:
+ newhs = set(newheads)
+ newhs.update(unsyncedheads)
+ if unsyncedheads:
+ if None in unsyncedheads:
# old remote, no heads data
heads = None
- elif len(unsynced) <= 4 or repo.ui.verbose:
- heads = ' '.join(short(h) for h in unsynced)
+ elif len(unsyncedheads) <= 4 or repo.ui.verbose:
+ heads = ' '.join(short(h) for h in unsyncedheads)
else:
- heads = (' '.join(short(h) for h in unsynced[:4]) +
- ' ' + _("and %s others") % (len(unsynced) - 4))
+ heads = (' '.join(short(h) for h in unsyncedheads[:4]) +
+ ' ' + _("and %s others") % (len(unsyncedheads) - 4))
if heads is None:
repo.ui.status(_("remote has heads that are "
"not known locally\n"))
@@ -431,11 +439,12 @@
repo = pushop.repo
unfi = repo.unfiltered()
tonode = unfi.changelog.node
+ torev = unfi.changelog.nodemap.get
public = phases.public
getphase = unfi._phasecache.phase
ispublic = (lambda r: getphase(unfi, r) == public)
- hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore,
- futurecommon)
+ ispushed = (lambda n: torev(n) in futurecommon)
+ hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
successorsmarkers = unfi.obsstore.successors
newhs = set() # final set of new heads
discarded = set() # new head of fully replaced branch
@@ -460,8 +469,7 @@
while localcandidate:
nh = localcandidate.pop()
# run this check early to skip the evaluation of the whole branch
- if (nh in futurecommon
- or unfi[nh].phase() <= public):
+ if (torev(nh) in futurecommon or ispublic(torev(nh))):
newhs.add(nh)
continue
@@ -476,7 +484,7 @@
# * any part of it is considered part of the result by previous logic,
# * if we have no markers to push to obsolete it.
if (any(ispublic(r) for r in branchrevs)
- or any(n in futurecommon for n in branchnodes)
+ or any(torev(n) in futurecommon for n in branchnodes)
or any(not hasoutmarker(n) for n in branchnodes)):
newhs.add(nh)
else:
@@ -488,7 +496,7 @@
newhs |= unknownheads
return newhs, discarded
-def pushingmarkerfor(obsstore, pushset, node):
+def pushingmarkerfor(obsstore, ispushed, node):
"""true if some markers are to be pushed for node
We cannot just look in to the pushed obsmarkers from the pushop because
@@ -504,7 +512,7 @@
seen = set(stack)
while stack:
current = stack.pop()
- if current in pushset:
+ if ispushed(current):
return True
markers = successorsmarkers.get(current, ())
# markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
--- a/mercurial/dispatch.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/dispatch.py Wed Jul 19 07:51:41 2017 -0500
@@ -25,30 +25,24 @@
cmdutil,
color,
commands,
- debugcommands,
demandimport,
encoding,
error,
extensions,
fancyopts,
- fileset,
help,
hg,
hook,
profiling,
pycompat,
- revset,
scmutil,
- templatefilters,
- templatekw,
- templater,
ui as uimod,
util,
)
class request(object):
def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
- ferr=None):
+ ferr=None, prereposetups=None):
self.args = args
self.ui = ui
self.repo = repo
@@ -58,6 +52,10 @@
self.fout = fout
self.ferr = ferr
+ # reposetups which run before extensions, useful for chg to pre-fill
+ # low-level repo state (for example, changelog) before extensions.
+ self.prereposetups = prereposetups or []
+
def _runexithandlers(self):
exc = None
handlers = self.ui._exithandlers
@@ -85,13 +83,13 @@
status = -1
if util.safehasattr(req.ui, 'fout'):
try:
- req.ui.fout.close()
+ req.ui.fout.flush()
except IOError as err:
status = -1
if util.safehasattr(req.ui, 'ferr'):
if err is not None and err.errno != errno.EPIPE:
req.ui.ferr.write('abort: %s\n' % err.strerror)
- req.ui.ferr.close()
+ req.ui.ferr.flush()
sys.exit(status & 255)
def _getsimilar(symbols, value):
@@ -162,9 +160,18 @@
ret = None
try:
ret = _runcatch(req)
- except KeyboardInterrupt:
+ except error.ProgrammingError as inst:
+ req.ui.warn(_('** ProgrammingError: %s\n') % inst)
+ if inst.hint:
+ req.ui.warn(_('** (%s)\n') % inst.hint)
+ raise
+ except KeyboardInterrupt as inst:
try:
- req.ui.warn(_("interrupted!\n"))
+ if isinstance(inst, error.SignalInterrupt):
+ msg = _("killed!\n")
+ else:
+ msg = _("interrupted!\n")
+ req.ui.warn(msg)
except error.SignalInterrupt:
# maybe pager would quit without consuming all the output, and
# SIGPIPE was raised. we cannot print anything in this case.
@@ -179,7 +186,7 @@
if req.ui.logblockedtimes:
req.ui._blockedtimes['command_duration'] = duration * 1000
req.ui.log('uiblocked', 'ui blocked ms', **req.ui._blockedtimes)
- req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
+ req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n",
msg, ret or 0, duration)
try:
req._runexithandlers()
@@ -307,7 +314,8 @@
except error.CommandError as inst:
if inst.args[0]:
ui.pager('help')
- ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
+ msgbytes = pycompat.bytestr(inst.args[1])
+ ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes))
commands.help_(ui, inst.args[0], full=False, command=True)
else:
ui.pager('help')
@@ -321,7 +329,8 @@
try:
# check if the command is in a disabled extension
# (but don't check for extensions themselves)
- formatted = help.formattedhelp(ui, inst.args[0], unknowncmd=True)
+ formatted = help.formattedhelp(ui, commands, inst.args[0],
+ unknowncmd=True)
ui.warn(nocmdmsg)
ui.write(formatted)
except (error.UnknownCommand, error.Abort):
@@ -475,7 +484,8 @@
return aliasargs(self.fn, args)
def __getattr__(self, name):
- adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
+ adefaults = {r'norepo': True,
+ r'optionalrepo': False, r'inferrepo': False}
if name not in adefaults:
raise AttributeError(name)
if self.badalias or util.safehasattr(self, 'shell'):
@@ -710,24 +720,6 @@
return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
[], {})
-_loaded = set()
-
-# list of (objname, loadermod, loadername) tuple:
-# - objname is the name of an object in extension module, from which
-# extra information is loaded
-# - loadermod is the module where loader is placed
-# - loadername is the name of the function, which takes (ui, extensionname,
-# extraobj) arguments
-extraloaders = [
- ('cmdtable', commands, 'loadcmdtable'),
- ('colortable', color, 'loadcolortable'),
- ('filesetpredicate', fileset, 'loadpredicate'),
- ('revsetpredicate', revset, 'loadpredicate'),
- ('templatefilter', templatefilters, 'loadfilter'),
- ('templatefunc', templater, 'loadfunction'),
- ('templatekeyword', templatekw, 'loadkeyword'),
-]
-
def _dispatch(req):
args = req.args
ui = req.ui
@@ -740,11 +732,7 @@
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
- # Side-effect of accessing is debugcommands module is guaranteed to be
- # imported and commands.table is populated.
- debugcommands.command
-
- uis = set([ui, lui])
+ uis = {ui, lui}
if req.repo:
uis.add(req.repo.ui)
@@ -753,24 +741,16 @@
for ui_ in uis:
ui_.setconfig('profiling', 'enabled', 'true', '--profile')
- with profiling.maybeprofile(lui):
+ profile = lui.configbool('profiling', 'enabled')
+ with profiling.profile(lui, enabled=profile) as profiler:
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
- # reposetup. Programs like TortoiseHg will call _dispatch several
- # times so we keep track of configured extensions in _loaded.
+ # reposetup
extensions.loadall(lui)
- exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
# Propagate any changes to lui.__class__ by extensions
ui.__class__ = lui.__class__
# (uisetup and extsetup are handled in extensions.loadall)
- for name, module in exts:
- for objname, loadermod, loadername in extraloaders:
- extraobj = getattr(module, objname, None)
- if extraobj is not None:
- getattr(loadermod, loadername)(ui, name, extraobj)
- _loaded.add(name)
-
# (reposetup is handled in hg.repository)
addaliases(lui, commands.table)
@@ -816,6 +796,8 @@
_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
ui.atexit(print_time)
+ if options["profile"]:
+ profiler.start()
if options['verbose'] or options['debug'] or options['quiet']:
for opt in ('verbose', 'debug', 'quiet'):
@@ -871,7 +853,8 @@
repo.ui.ferr = ui.ferr
else:
try:
- repo = hg.repository(ui, path=path)
+ repo = hg.repository(ui, path=path,
+ presetupfuncs=req.prereposetups)
if not repo.local():
raise error.Abort(_("repository '%s' is not local")
% path)
@@ -933,7 +916,7 @@
# version number and try updating.
ct = util.versiontuple(n=2)
worst = None, ct, ''
- if ui.config('ui', 'supportcontact', None) is None:
+ if ui.config('ui', 'supportcontact') is None:
for name, mod in extensions.extensions():
testedwith = getattr(mod, 'testedwith', '')
if pycompat.ispy3 and isinstance(testedwith, str):
@@ -967,7 +950,7 @@
'** If that fixes the bug please report it to %s\n')
% (name, testedwith, name, report))
else:
- bugtracker = ui.config('ui', 'supportcontact', None)
+ bugtracker = ui.config('ui', 'supportcontact')
if bugtracker is None:
bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
warning = (_("** unknown exception encountered, "
--- a/mercurial/encoding.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/encoding.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,6 +14,7 @@
from . import (
error,
+ policy,
pycompat,
)
@@ -29,10 +30,7 @@
"200c 200d 200e 200f 202a 202b 202c 202d 202e "
"206a 206b 206c 206d 206e 206f feff".split()]
# verify the next function will work
-if pycompat.ispy3:
- assert set(i[0] for i in _ignore) == set([ord(b'\xe2'), ord(b'\xef')])
-else:
- assert set(i[0] for i in _ignore) == set(["\xe2", "\xef"])
+assert all(i.startswith(("\xe2", "\xef")) for i in _ignore)
def hfsignoreclean(s):
"""Remove codepoints ignored by HFS+ from s.
@@ -51,43 +49,18 @@
# the process environment
_nativeenviron = (not pycompat.ispy3 or os.supports_bytes_environ)
if not pycompat.ispy3:
- environ = os.environ
+ environ = os.environ # re-exports
elif _nativeenviron:
- environ = os.environb
+ environ = os.environb # re-exports
else:
# preferred encoding isn't known yet; use utf-8 to avoid unicode error
# and recreate it once encoding is settled
environ = dict((k.encode(u'utf-8'), v.encode(u'utf-8'))
- for k, v in os.environ.items())
-
-def _getpreferredencoding():
- '''
- On darwin, getpreferredencoding ignores the locale environment and
- always returns mac-roman. http://bugs.python.org/issue6202 fixes this
- for Python 2.7 and up. This is the same corrected code for earlier
- Python versions.
-
- However, we can't use a version check for this method, as some distributions
- patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman
- encoding, as it is unlikely that this encoding is the actually expected.
- '''
- try:
- locale.CODESET
- except AttributeError:
- # Fall back to parsing environment variables :-(
- return locale.getdefaultlocale()[1]
-
- oldloc = locale.setlocale(locale.LC_CTYPE)
- locale.setlocale(locale.LC_CTYPE, "")
- result = locale.nl_langinfo(locale.CODESET)
- locale.setlocale(locale.LC_CTYPE, oldloc)
-
- return result
+ for k, v in os.environ.items()) # re-exports
_encodingfixers = {
'646': lambda: 'ascii',
'ANSI_X3.4-1968': lambda: 'ascii',
- 'mac-roman': _getpreferredencoding
}
try:
@@ -204,25 +177,34 @@
"""Convert a byte string of local encoding to a unicode string"""
return fromlocal(s).decode('utf-8')
+def unimethod(bytesfunc):
+ """Create a proxy method that forwards __unicode__() and __str__() of
+ Python 3 to __bytes__()"""
+ def unifunc(obj):
+ return unifromlocal(bytesfunc(obj))
+ return unifunc
+
# converter functions between native str and byte string. use these if the
# character encoding is not aware (e.g. exception message) or is known to
# be locale dependent (e.g. date formatting.)
if pycompat.ispy3:
strtolocal = unitolocal
strfromlocal = unifromlocal
+ strmethod = unimethod
else:
strtolocal = pycompat.identity
strfromlocal = pycompat.identity
+ strmethod = pycompat.identity
if not _nativeenviron:
# now encoding and helper functions are available, recreate the environ
# dict to be exported to other modules
environ = dict((tolocal(k.encode(u'utf-8')), tolocal(v.encode(u'utf-8')))
- for k, v in os.environ.items())
+ for k, v in os.environ.items()) # re-exports
# How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
-wide = (environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
- and "WFA" or "WF")
+_wide = _sysstr(environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
+ and "WFA" or "WF")
def colwidth(s):
"Find the column width of a string for display in the local encoding"
@@ -232,7 +214,7 @@
"Find the column width of a Unicode string for display"
eaw = getattr(unicodedata, 'east_asian_width', None)
if eaw is not None:
- return sum([eaw(c) in wide and 2 or 1 for c in d])
+ return sum([eaw(c) in _wide and 2 or 1 for c in d])
return len(d)
def getcols(s, start, c):
@@ -346,7 +328,7 @@
def asciilower(s):
# delay importing avoids cyclic dependency around "parsers" in
# pure Python build (util => i18n => encoding => parsers => util)
- from . import parsers
+ parsers = policy.importmod(r'parsers')
impl = getattr(parsers, 'asciilower', _asciilower)
global asciilower
asciilower = impl
@@ -362,7 +344,7 @@
def asciiupper(s):
# delay importing avoids cyclic dependency around "parsers" in
# pure Python build (util => i18n => encoding => parsers => util)
- from . import parsers
+ parsers = policy.importmod(r'parsers')
impl = getattr(parsers, 'asciiupper', _asciiupper)
global asciiupper
asciiupper = impl
@@ -429,7 +411,7 @@
_jsonmap = []
_jsonmap.extend("\\u%04x" % x for x in range(32))
-_jsonmap.extend(chr(x) for x in range(32, 127))
+_jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
_jsonmap.append('\\u007f')
_jsonmap[0x09] = '\\t'
_jsonmap[0x0a] = '\\n'
@@ -441,7 +423,7 @@
_paranoidjsonmap = _jsonmap[:]
_paranoidjsonmap[0x3c] = '\\u003c' # '<' (e.g. escape "</script>")
_paranoidjsonmap[0x3e] = '\\u003e' # '>'
-_jsonmap.extend(chr(x) for x in range(128, 256))
+_jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
def jsonescape(s, paranoid=False):
'''returns a string suitable for JSON
--- a/mercurial/error.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/error.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,7 +13,16 @@
from __future__ import absolute_import
-# Do not import anything here, please
+# Do not import anything but pycompat here, please
+from . import pycompat
+
+def _tobytes(exc):
+ """Byte-stringify exception in the same way as BaseException_str()"""
+ if not exc.args:
+ return b''
+ if len(exc.args) == 1:
+ return pycompat.bytestr(exc.args[0])
+ return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
class Hint(object):
"""Mix-in to provide a hint of an error
@@ -26,10 +35,10 @@
super(Hint, self).__init__(*args, **kw)
class RevlogError(Hint, Exception):
- pass
+ __bytes__ = _tobytes
class FilteredIndexError(IndexError):
- pass
+ __bytes__ = _tobytes
class LookupError(RevlogError, KeyError):
def __init__(self, name, index, message):
@@ -43,6 +52,9 @@
name = short(name)
RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
+ def __bytes__(self):
+ return RevlogError.__bytes__(self)
+
def __str__(self):
return RevlogError.__str__(self)
@@ -54,12 +66,15 @@
class CommandError(Exception):
"""Exception raised on errors in parsing the command line."""
+ __bytes__ = _tobytes
class InterventionRequired(Hint, Exception):
"""Exception raised when a command requires human intervention."""
+ __bytes__ = _tobytes
class Abort(Hint, Exception):
"""Raised if a command needs to print an error and exit."""
+ __bytes__ = _tobytes
class HookLoadError(Abort):
"""raised when loading a hook fails, aborting an operation
@@ -94,9 +109,11 @@
class OutOfBandError(Hint, Exception):
"""Exception raised when a remote repo reports failure"""
+ __bytes__ = _tobytes
class ParseError(Hint, Exception):
"""Raised when parsing config files and {rev,file}sets (msg[, pos])"""
+ __bytes__ = _tobytes
class UnknownIdentifier(ParseError):
"""Exception raised when a {rev,file}set references an unknown identifier"""
@@ -108,7 +125,7 @@
self.symbols = symbols
class RepoError(Hint, Exception):
- pass
+ __bytes__ = _tobytes
class RepoLookupError(RepoError):
pass
@@ -128,6 +145,8 @@
def __init__(self, err):
IOError.__init__(self, err.errno, err.strerror)
+ # no __bytes__() because error message is derived from the standard IOError
+
class UnsupportedMergeRecords(Abort):
def __init__(self, recordtypes):
from .i18n import _
@@ -138,11 +157,21 @@
hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for '
'more information'))
+class UnknownVersion(Abort):
+ """generic exception for aborting from an encounter with an unknown version
+ """
+
+ def __init__(self, msg, hint=None, version=None):
+ self.version = version
+ super(UnknownVersion, self).__init__(msg, hint=hint)
+
class LockError(IOError):
def __init__(self, errno, strerror, filename, desc):
IOError.__init__(self, errno, strerror, filename)
self.desc = desc
+ # no __bytes__() because error message is derived from the standard IOError
+
class LockHeld(LockError):
def __init__(self, errno, filename, desc, locker):
LockError.__init__(self, errno, 'Lock held', filename, desc)
@@ -153,33 +182,43 @@
# LockError is for errors while acquiring the lock -- this is unrelated
class LockInheritanceContractViolation(RuntimeError):
- pass
+ __bytes__ = _tobytes
class ResponseError(Exception):
"""Raised to print an error with part of output and exit."""
+ __bytes__ = _tobytes
class UnknownCommand(Exception):
"""Exception raised if command is not in the command table."""
+ __bytes__ = _tobytes
class AmbiguousCommand(Exception):
"""Exception raised if command shortcut matches more than one command."""
+ __bytes__ = _tobytes
# derived from KeyboardInterrupt to simplify some breakout code
class SignalInterrupt(KeyboardInterrupt):
"""Exception raised on SIGTERM and SIGHUP."""
class SignatureError(Exception):
- pass
+ __bytes__ = _tobytes
class PushRaced(RuntimeError):
"""An exception raised during unbundling that indicate a push race"""
+ __bytes__ = _tobytes
-class ProgrammingError(RuntimeError):
+class ProgrammingError(Hint, RuntimeError):
"""Raised if a mercurial (core or extension) developer made a mistake"""
+ __bytes__ = _tobytes
+
+class WdirUnsupported(Exception):
+ """An exception which is raised when 'wdir()' is not supported"""
+ __bytes__ = _tobytes
# bundle2 related errors
class BundleValueError(ValueError):
"""error raised when bundle2 cannot be processed"""
+ __bytes__ = _tobytes
class BundleUnknownFeatureError(BundleValueError):
def __init__(self, parttype=None, params=(), values=()):
@@ -206,6 +245,7 @@
class ReadOnlyPartError(RuntimeError):
"""error raised when code tries to alter a part being generated"""
+ __bytes__ = _tobytes
class PushkeyFailed(Abort):
"""error raised when a pushkey part failed to update a value"""
@@ -246,12 +286,15 @@
This is used for syntax errors as opposed to support errors.
"""
+ __bytes__ = _tobytes
class UnsupportedBundleSpecification(Exception):
"""error raised when a bundle specification is not supported."""
+ __bytes__ = _tobytes
class CorruptedState(Exception):
"""error raised when a command is not able to read its state from file"""
+ __bytes__ = _tobytes
class PeerTransportError(Abort):
"""Transport-level I/O error when communicating with a peer repo."""
--- a/mercurial/exchange.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/exchange.py Wed Jul 19 07:51:41 2017 -0500
@@ -16,7 +16,6 @@
nullid,
)
from . import (
- base85,
bookmarks as bookmod,
bundle2,
changegroup,
@@ -26,10 +25,10 @@
obsolete,
phases,
pushkey,
+ pycompat,
scmutil,
sslutil,
streamclone,
- tags,
url as urlmod,
util,
)
@@ -45,7 +44,7 @@
}
# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
-_bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
+_bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
def parsebundlespec(repo, spec, strict=True, externalnames=False):
"""Parse a bundle string specification into parts.
@@ -250,21 +249,6 @@
else:
raise error.Abort(_('unknown bundle type: %s') % b)
-def buildobsmarkerspart(bundler, markers):
- """add an obsmarker part to the bundler with <markers>
-
- No part is created if markers is empty.
- Raises ValueError if the bundler doesn't support any known obsmarker format.
- """
- if markers:
- remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
- version = obsolete.commonversion(remoteversions)
- if version is None:
- raise ValueError('bundler does not support common obsmarker format')
- stream = obsolete.encodemarkers(markers, True, version=version)
- return bundler.newpart('obsmarkers', data=stream)
- return None
-
def _computeoutgoing(repo, heads, common):
"""Computes which revs are outgoing given a set of common
and a set of heads.
@@ -340,8 +324,21 @@
self.bkresult = None
# discover.outgoing object (contains common and outgoing data)
self.outgoing = None
- # all remote heads before the push
+ # all remote topological heads before the push
self.remoteheads = None
+ # Details of the remote branch pre and post push
+ #
+ # mapping: {'branch': ([remoteheads],
+ # [newheads],
+ # [unsyncedheads],
+ # [discardedheads])}
+ # - branch: the branch name
+ # - remoteheads: the list of remote heads known locally
+ # None if the branch is new
+ # - newheads: the new remote heads (known locally) with outgoing pushed
+ # - unsyncedheads: the list of remote heads unknown locally.
+ # - discardedheads: the list of remote heads made obsolete by the push
+ self.pushbranchmap = None
# testable as a boolean indicating if any nodes are missing locally.
self.incoming = None
# phases changes that must be pushed along side the changesets
@@ -550,7 +547,7 @@
unfi = pushop.repo.unfiltered()
remotephases = pushop.remote.listkeys('phases')
publishing = remotephases.get('publishing', False)
- if (pushop.ui.configbool('ui', '_usedassubrepo', False)
+ if (pushop.ui.configbool('ui', '_usedassubrepo')
and remotephases # server supports phases
and not pushop.outgoing.missing # no changesets to be pushed
and publishing):
@@ -729,8 +726,24 @@
Exists as an independent function to aid extensions
"""
- if not pushop.force:
- bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+ # * 'force' do not check for push race,
+ # * if we don't push anything, there are nothing to check.
+ if not pushop.force and pushop.outgoing.missingheads:
+ allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
+ emptyremote = pushop.pushbranchmap is None
+ if not allowunrelated or emptyremote:
+ bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+ else:
+ affected = set()
+ for branch, heads in pushop.pushbranchmap.iteritems():
+ remoteheads, newheads, unsyncedheads, discardedheads = heads
+ if remoteheads is not None:
+ remote = set(remoteheads)
+ affected |= set(discardedheads) & remote
+ affected |= remote - set(newheads)
+ if affected:
+ data = iter(sorted(affected))
+ bundler.newpart('check:updated-heads', data=data)
@b2partsgenerator('changeset')
def _pushb2ctx(pushop, bundler):
@@ -824,7 +837,7 @@
pushop.stepsdone.add('obsmarkers')
if pushop.outobsmarkers:
markers = sorted(pushop.outobsmarkers)
- buildobsmarkerspart(bundler, markers)
+ bundle2.buildobsmarkerspart(bundler, markers)
@b2partsgenerator('bookmarks')
def _pushb2bookmarks(pushop, bundler):
@@ -952,8 +965,8 @@
'push',
fastpath=True)
else:
- cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
- bundlecaps)
+ cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
+ bundlecaps=bundlecaps)
# apply changegroup to remote
if unbundle:
@@ -980,7 +993,7 @@
cheads = pushop.commonheads
# even when we don't push, exchanging phase data is useful
remotephases = pushop.remote.listkeys('phases')
- if (pushop.ui.configbool('ui', '_usedassubrepo', False)
+ if (pushop.ui.configbool('ui', '_usedassubrepo')
and remotephases # server supports phases
and pushop.cgresult is None # nothing was pushed
and remotephases.get('publishing', False)):
@@ -1335,7 +1348,9 @@
For now, the only supported data are changegroup."""
kwargs = {'bundlecaps': caps20to10(pullop.repo)}
- streaming, streamreqs = streamclone.canperformstreamclone(pullop)
+ # At the moment we don't do stream clones over bundle2. If that is
+ # implemented then here's where the check for that will go.
+ streaming = False
# pulling changegroup
pullop.stepsdone.add('changegroup')
@@ -1373,7 +1388,7 @@
kwargs['obsmarkers'] = True
pullop.stepsdone.add('obsmarkers')
_pullbundle2extraprepare(pullop, kwargs)
- bundle = pullop.remote.getbundle('pull', **kwargs)
+ bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
try:
op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
except bundle2.AbortFromPart as exc:
@@ -1383,8 +1398,7 @@
raise error.Abort(_('missing support for %s') % exc)
if pullop.fetch:
- results = [cg['return'] for cg in op.records['changegroup']]
- pullop.cgresult = changegroup.combineresults(results)
+ pullop.cgresult = bundle2.combinechangegroupresults(op)
# processing phases change
for namespace, value in op.records['listkeys']:
@@ -1416,7 +1430,7 @@
pullop.repo.ui.status(_("no changes found\n"))
pullop.cgresult = 0
return
- pullop.gettransaction()
+ tr = pullop.gettransaction()
if pullop.heads is None and list(pullop.common) == [nullid]:
pullop.repo.ui.status(_("requesting all changes\n"))
elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
@@ -1435,7 +1449,9 @@
"changegroupsubset."))
else:
cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
- pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
+ bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
+ pullop.remote.url())
+ pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
def _pullphase(pullop):
# Get remote phases data from remote
@@ -1512,7 +1528,7 @@
markers = []
for key in sorted(remoteobs, reverse=True):
if key.startswith('dump'):
- data = base85.b85decode(remoteobs[key])
+ data = util.b85decode(remoteobs[key])
version, newmarks = obsolete._readmarkers(data)
markers += newmarks
if markers:
@@ -1522,7 +1538,7 @@
def caps20to10(repo):
"""return a set with appropriate options to use bundle20 during getbundle"""
- caps = set(['HG20'])
+ caps = {'HG20'}
capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
caps.add('bundle2=' + urlreq.quote(capsblob))
return caps
@@ -1568,6 +1584,7 @@
Returns an iterator over raw chunks (of varying sizes).
"""
+ kwargs = pycompat.byteskwargs(kwargs)
usebundle2 = bundle2requested(bundlecaps)
# bundle10 case
if not usebundle2:
@@ -1595,7 +1612,7 @@
for name in getbundle2partsorder:
func = getbundle2partsmapping[name]
func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
- **kwargs)
+ **pycompat.strkwargs(kwargs))
return bundler.getchunks()
@@ -1648,7 +1665,7 @@
subset = [c.node() for c in repo.set('::%ln', heads)]
markers = repo.obsstore.relevantmarkers(subset)
markers = sorted(markers)
- buildobsmarkerspart(bundler, markers)
+ bundle2.buildobsmarkerspart(bundler, markers)
@getbundle2partsgenerator('hgtagsfnodes')
def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
@@ -1668,30 +1685,7 @@
return
outgoing = _computeoutgoing(repo, heads, common)
-
- if not outgoing.missingheads:
- return
-
- cache = tags.hgtagsfnodescache(repo.unfiltered())
- chunks = []
-
- # .hgtags fnodes are only relevant for head changesets. While we could
- # transfer values for all known nodes, there will likely be little to
- # no benefit.
- #
- # We don't bother using a generator to produce output data because
- # a) we only have 40 bytes per head and even esoteric numbers of heads
- # consume little memory (1M heads is 40MB) b) we don't want to send the
- # part if we don't have entries and knowing if we have entries requires
- # cache lookups.
- for node in outgoing.missingheads:
- # Don't compute missing, as this may slow down serving.
- fnode = cache.getfnode(node, computemissing=False)
- if fnode is not None:
- chunks.extend([node, fnode])
-
- if chunks:
- bundler.newpart('hgtagsfnodes', data=''.join(chunks))
+ bundle2.addparttagsfnodescache(repo, bundler, outgoing)
def _getbookmarks(repo, **kwargs):
"""Returns bookmark to node mapping.
@@ -1732,8 +1726,7 @@
lockandtr = [None, None, None]
recordout = None
# quick fix for output mismatch with bundle2 in 3.4
- captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
- False)
+ captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
if url.startswith('remote:http:') or url.startswith('remote:https:'):
captureoutput = True
try:
@@ -1741,10 +1734,12 @@
# 'check_heads' call wil be a no-op
check_heads(repo, heads, 'uploading changes')
# push can proceed
- if not util.safehasattr(cg, 'params'):
+ if not isinstance(cg, bundle2.unbundle20):
# legacy case: bundle1 (changegroup 01)
- lockandtr[1] = repo.lock()
- r = cg.apply(repo, source, url)
+ txnname = "\n".join([source, util.hidepassword(url)])
+ with repo.lock(), repo.transaction(txnname) as tr:
+ op = bundle2.applybundle(repo, cg, tr, source, url)
+ r = bundle2.combinechangegroupresults(op)
else:
r = None
try:
@@ -1796,7 +1791,7 @@
repo = pullop.repo
remote = pullop.remote
- if not repo.ui.configbool('ui', 'clonebundles', True):
+ if not repo.ui.configbool('ui', 'clonebundles'):
return
# Only run if local repo is empty.
@@ -1845,7 +1840,7 @@
# We abort by default to avoid the thundering herd of
# clients flooding a server that was expecting expensive
# clone load to be offloaded.
- elif repo.ui.configbool('ui', 'clonebundlefallback', False):
+ elif repo.ui.configbool('ui', 'clonebundlefallback'):
repo.ui.warn(_('falling back to normal clone\n'))
else:
raise error.Abort(_('error applying bundle'),
@@ -1988,7 +1983,7 @@
return self._cmp(other) != 0
def sortclonebundleentries(ui, entries):
- prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
+ prefers = ui.configlist('ui', 'clonebundleprefers')
if not prefers:
return list(entries)
@@ -1999,29 +1994,19 @@
def trypullbundlefromurl(ui, repo, url):
"""Attempt to apply a bundle from a URL."""
- lock = repo.lock()
- try:
- tr = repo.transaction('bundleurl')
+ with repo.lock(), repo.transaction('bundleurl') as tr:
try:
- try:
- fh = urlmod.open(ui, url)
- cg = readbundle(ui, fh, 'stream')
+ fh = urlmod.open(ui, url)
+ cg = readbundle(ui, fh, 'stream')
- if isinstance(cg, bundle2.unbundle20):
- bundle2.processbundle(repo, cg, lambda: tr)
- elif isinstance(cg, streamclone.streamcloneapplier):
- cg.apply(repo)
- else:
- cg.apply(repo, 'clonebundles', url)
- tr.close()
- return True
- except urlerr.httperror as e:
- ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
- except urlerr.urlerror as e:
- ui.warn(_('error fetching bundle: %s\n') % e.reason)
+ if isinstance(cg, streamclone.streamcloneapplier):
+ cg.apply(repo)
+ else:
+ bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
+ return True
+ except urlerr.httperror as e:
+ ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
+ except urlerr.urlerror as e:
+ ui.warn(_('error fetching bundle: %s\n') % e.reason)
- return False
- finally:
- tr.release()
- finally:
- lock.release()
+ return False
--- a/mercurial/extensions.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/extensions.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,6 +18,7 @@
from . import (
cmdutil,
+ configitems,
encoding,
error,
pycompat,
@@ -28,8 +29,16 @@
_disabledextensions = {}
_aftercallbacks = {}
_order = []
-_builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
- 'inotify', 'hgcia'])
+_builtin = {
+ 'hbisect',
+ 'bookmarks',
+ 'color',
+ 'parentrevspec',
+ 'progress',
+ 'interhg',
+ 'inotify',
+ 'hgcia'
+}
def extensions(ui=None):
if ui:
@@ -118,6 +127,23 @@
if ui.debugflag:
ui.traceback()
+# attributes set by registrar.command
+_cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
+
+def _validatecmdtable(ui, cmdtable):
+ """Check if extension commands have required attributes"""
+ for c, e in cmdtable.iteritems():
+ f = e[0]
+ if getattr(f, '_deprecatedregistrar', False):
+ ui.deprecwarn("cmdutil.command is deprecated, use "
+ "registrar.command to register '%s'" % c, '4.6')
+ missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
+ if not missing:
+ continue
+ raise error.ProgrammingError(
+ 'missing attributes: %s' % ', '.join(missing),
+ hint="use @command decorator to register '%s'" % c)
+
def load(ui, name, path):
if name.startswith('hgext.') or name.startswith('hgext/'):
shortname = name[6:]
@@ -139,6 +165,7 @@
ui.warn(_('(third party extension %s requires version %s or newer '
'of Mercurial; disabling)\n') % (shortname, minver))
return
+ _validatecmdtable(ui, getattr(mod, 'cmdtable', {}))
_extensions[shortname] = mod
_order.append(shortname)
@@ -149,20 +176,36 @@
def _runuisetup(name, ui):
uisetup = getattr(_extensions[name], 'uisetup', None)
if uisetup:
- uisetup(ui)
+ try:
+ uisetup(ui)
+ except Exception as inst:
+ ui.traceback()
+ msg = _forbytes(inst)
+ ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+ return False
+ return True
def _runextsetup(name, ui):
extsetup = getattr(_extensions[name], 'extsetup', None)
if extsetup:
try:
- extsetup(ui)
- except TypeError:
- if inspect.getargspec(extsetup).args:
- raise
- extsetup() # old extsetup with no ui argument
+ try:
+ extsetup(ui)
+ except TypeError:
+ if inspect.getargspec(extsetup).args:
+ raise
+ extsetup() # old extsetup with no ui argument
+ except Exception as inst:
+ ui.traceback()
+ msg = _forbytes(inst)
+ ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+ return False
+ return True
-def loadall(ui):
+def loadall(ui, whitelist=None):
result = ui.configitems("extensions")
+ if whitelist is not None:
+ result = [(k, v) for (k, v) in result if k in whitelist]
newindex = len(_order)
for (name, path) in result:
if path:
@@ -171,23 +214,31 @@
continue
try:
load(ui, name, path)
- except KeyboardInterrupt:
- raise
except Exception as inst:
- inst = _forbytes(inst)
+ msg = _forbytes(inst)
if path:
ui.warn(_("*** failed to import extension %s from %s: %s\n")
- % (name, path, inst))
+ % (name, path, msg))
else:
ui.warn(_("*** failed to import extension %s: %s\n")
- % (name, inst))
+ % (name, msg))
+ if isinstance(inst, error.Hint) and inst.hint:
+ ui.warn(_("*** (%s)\n") % inst.hint)
ui.traceback()
+ broken = set()
+ for name in _order[newindex:]:
+ if not _runuisetup(name, ui):
+ broken.add(name)
+
for name in _order[newindex:]:
- _runuisetup(name, ui)
+ if name in broken:
+ continue
+ if not _runextsetup(name, ui):
+ broken.add(name)
- for name in _order[newindex:]:
- _runextsetup(name, ui)
+ for name in broken:
+ _extensions[name] = None
# Call aftercallbacks that were never met.
for shortname in _aftercallbacks:
@@ -201,6 +252,44 @@
# entries could result in double execution. See issue4646.
_aftercallbacks.clear()
+ # delay importing avoids cyclic dependency (especially commands)
+ from . import (
+ color,
+ commands,
+ fileset,
+ revset,
+ templatefilters,
+ templatekw,
+ templater,
+ )
+
+ # list of (objname, loadermod, loadername) tuple:
+ # - objname is the name of an object in extension module,
+ # from which extra information is loaded
+ # - loadermod is the module where loader is placed
+ # - loadername is the name of the function,
+ # which takes (ui, extensionname, extraobj) arguments
+ extraloaders = [
+ ('cmdtable', commands, 'loadcmdtable'),
+ ('colortable', color, 'loadcolortable'),
+ ('configtable', configitems, 'loadconfigtable'),
+ ('filesetpredicate', fileset, 'loadpredicate'),
+ ('revsetpredicate', revset, 'loadpredicate'),
+ ('templatefilter', templatefilters, 'loadfilter'),
+ ('templatefunc', templater, 'loadfunction'),
+ ('templatekeyword', templatekw, 'loadkeyword'),
+ ]
+
+ for name in _order[newindex:]:
+ module = _extensions[name]
+ if not module:
+ continue # loading this module failed
+
+ for objname, loadermod, loadername in extraloaders:
+ extraobj = getattr(module, objname, None)
+ if extraobj is not None:
+ getattr(loadermod, loadername)(ui, name, extraobj)
+
def afterloaded(extension, callback):
'''Run the specified function after a named extension is loaded.
@@ -215,7 +304,9 @@
'''
if extension in _extensions:
- callback(loaded=True)
+ # Report loaded as False if the extension is disabled
+ loaded = (_extensions[extension] is not None)
+ callback(loaded=loaded)
else:
_aftercallbacks.setdefault(extension, []).append(callback)
@@ -288,6 +379,25 @@
table[key] = tuple(newentry)
return entry
+def wrapfilecache(cls, propname, wrapper):
+ """Wraps a filecache property.
+
+ These can't be wrapped using the normal wrapfunction.
+ """
+ assert callable(wrapper)
+ for currcls in cls.__mro__:
+ if propname in currcls.__dict__:
+ origfn = currcls.__dict__[propname].func
+ assert callable(origfn)
+ def wrap(*args, **kwargs):
+ return wrapper(origfn, *args, **kwargs)
+ currcls.__dict__[propname].func = wrap
+ break
+
+ if currcls is object:
+ raise AttributeError(
+ _("type '%s' has no property '%s'") % (cls, propname))
+
def wrapfunction(container, funcname, wrapper):
'''Wrap the function named funcname in container
@@ -392,7 +502,11 @@
if name in exts or name in _order or name == '__init__':
continue
exts[name] = path
- exts.update(_disabledextensions)
+ for name, path in _disabledextensions.iteritems():
+ # If no path was provided for a disabled extension (e.g. "color=!"),
+ # don't replace the path we already found by the scan above.
+ if path:
+ exts[name] = path
return exts
def _moduledoc(file):
--- a/mercurial/fancyopts.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/fancyopts.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,7 +14,7 @@
)
# Set of flags to not apply boolean negation logic on
-nevernegate = set([
+nevernegate = {
# avoid --no-noninteractive
'noninteractive',
# These two flags are special because they cause hg to do one
@@ -22,7 +22,7 @@
# like aliases anyway.
'help',
'version',
- ])
+}
def gnugetopt(args, options, longoptions):
"""Parse options mostly like getopt.gnu_getopt.
@@ -39,7 +39,7 @@
args = []
while parseargs:
arg = parseargs.pop(0)
- if arg and arg[0] == '-' and len(arg) > 1:
+ if arg and arg[0:1] == '-' and len(arg) > 1:
parseargs.insert(0, arg)
topts, newparseargs = pycompat.getoptb(parseargs,\
options, longoptions)
--- a/mercurial/filelog.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/filelog.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,7 +18,7 @@
_mdre = re.compile('\1\n')
def parsemeta(text):
- """return (metadatadict, keylist, metadatasize)"""
+ """return (metadatadict, metadatasize)"""
# text can be buffer, so we can't use .startswith or .index
if text[:2] != '\1\n':
return None, None
--- a/mercurial/filemerge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/filemerge.py Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,17 @@
mergeonly = 'mergeonly' # just the full merge, no premerge
fullmerge = 'fullmerge' # both premerge and merge
+_localchangedotherdeletedmsg = _(
+ "local%(l)s changed %(fd)s which other%(o)s deleted\n"
+ "use (c)hanged version, (d)elete, or leave (u)nresolved?"
+ "$$ &Changed $$ &Delete $$ &Unresolved")
+
+_otherchangedlocaldeletedmsg = _(
+ "other%(o)s changed %(fd)s which local%(l)s deleted\n"
+ "use (c)hanged version, leave (d)eleted, or "
+ "leave (u)nresolved?"
+ "$$ &Changed $$ &Deleted $$ &Unresolved")
+
class absentfilectx(object):
"""Represents a file that's ostensibly in a context but is actually not
present in it.
@@ -133,7 +144,7 @@
def check(tool, pat, symlink, binary, changedelete):
tmsg = tool
if pat:
- tmsg += " specified for " + pat
+ tmsg = _("%s (for pattern %s)") % (tool, pat)
if not _findtool(ui, tool):
if pat: # explicitly requested tool deserves a warning
ui.warn(_("couldn't find merge tool %s\n") % tmsg)
@@ -209,6 +220,9 @@
# internal merge or prompt as last resort
if symlink or binary or changedelete:
+ if not changedelete and len(tools):
+ # any tool is rejected by capability for symlink or binary
+ ui.warn(_("no tool found to merge %s\n") % path)
return ":prompt", None
return ":merge", None
@@ -247,21 +261,16 @@
try:
if fco.isabsent():
index = ui.promptchoice(
- _("local%(l)s changed %(fd)s which other%(o)s deleted\n"
- "use (c)hanged version, (d)elete, or leave (u)nresolved?"
- "$$ &Changed $$ &Delete $$ &Unresolved") % prompts, 2)
+ _localchangedotherdeletedmsg % prompts, 2)
choice = ['local', 'other', 'unresolved'][index]
elif fcd.isabsent():
index = ui.promptchoice(
- _("other%(o)s changed %(fd)s which local%(l)s deleted\n"
- "use (c)hanged version, leave (d)eleted, or "
- "leave (u)nresolved?"
- "$$ &Changed $$ &Deleted $$ &Unresolved") % prompts, 2)
+ _otherchangedlocaldeletedmsg % prompts, 2)
choice = ['other', 'local', 'unresolved'][index]
else:
index = ui.promptchoice(
- _("no tool found to merge %(fd)s\n"
- "keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved?"
+ _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
+ " for %(fd)s?"
"$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
choice = ['local', 'other', 'unresolved'][index]
@@ -289,10 +298,10 @@
"""Uses the other `p2()` version of files as the merged version."""
if fco.isabsent():
# local changed, remote deleted -- 'deleted' picked
- repo.wvfs.unlinkpath(fcd.path())
+ _underlyingfctxifabsent(fcd).remove()
deleted = True
else:
- repo.wwrite(fcd.path(), fco.data(), fco.flags())
+ _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
deleted = False
return 0, deleted
@@ -304,9 +313,19 @@
used to resolve these conflicts."""
# for change/delete conflicts write out the changed version, then fail
if fcd.isabsent():
- repo.wwrite(fcd.path(), fco.data(), fco.flags())
+ _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
return 1, False
+def _underlyingfctxifabsent(filectx):
+ """Sometimes when resolving, our fcd is actually an absentfilectx, but
+ we want to write to it (to do the resolve). This helper returns the
+ underyling workingfilectx in that case.
+ """
+ if filectx.isabsent():
+ return filectx.changectx()[filectx.path()]
+ else:
+ return filectx
+
def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
tool, toolpath, binary, symlink = toolconf
if symlink or fcd.isabsent() or fco.isabsent():
@@ -455,7 +474,11 @@
perform a merge manually. If the file to be merged is named
``a.txt``, these files will accordingly be named ``a.txt.local``,
``a.txt.other`` and ``a.txt.base`` and they will be placed in the
- same directory as ``a.txt``."""
+ same directory as ``a.txt``.
+
+ This implies permerge. Therefore, files aren't dumped, if premerge
+ runs successfully. Use :forcedump to forcibly write files out.
+ """
a, b, c, back = files
fd = fcd.path()
@@ -465,6 +488,15 @@
repo.wwrite(fd + ".base", fca.data(), fca.flags())
return False, 1, False
+@internaltool('forcedump', mergeonly)
+def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+ labels=None):
+ """
+ Creates three versions of the files as same as :dump, but omits premerge.
+ """
+ return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
+ labels=labels)
+
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
tool, toolpath, binary, symlink = toolconf
if fcd.isabsent() or fco.isabsent():
@@ -512,10 +544,10 @@
props['templ'] = template
props['ctx'] = ctx
props['repo'] = repo
- templateresult = template('conflictmarker', **props)
+ templateresult = template.render(props)
label = ('%s:' % label).ljust(pad + 1)
- mark = '%s %s' % (label, templater.stringify(templateresult))
+ mark = '%s %s' % (label, templateresult)
if mark:
mark = mark.splitlines()[0] # split for safety
@@ -523,13 +555,6 @@
# 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
return util.ellipsis(mark, 80 - 8)
-_defaultconflictmarker = ('{node|short} '
- '{ifeq(tags, "tip", "", '
- 'ifeq(tags, "", "", "{tags} "))}'
- '{if(bookmarks, "{bookmarks} ")}'
- '{ifeq(branch, "default", "", "{branch} ")}'
- '- {author|user}: {desc|firstline}')
-
_defaultconflictlabels = ['local', 'other']
def _formatlabels(repo, fcd, fco, fca, labels):
@@ -542,9 +567,9 @@
ca = fca.changectx()
ui = repo.ui
- template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker)
+ template = ui.config('ui', 'mergemarkertemplate')
template = templater.unquotestring(template)
- tmpl = formatter.maketemplater(ui, 'conflictmarker', template)
+ tmpl = formatter.maketemplater(ui, template)
pad = max(len(l) for l in labels)
@@ -606,7 +631,8 @@
# normalize to new-style names (':merge' etc)
tool = tool[len('internal'):]
ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
- % (tool, fd, binary, symlink, changedelete))
+ % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
+ pycompat.bytestr(changedelete)))
if tool in internals:
func = internals[tool]
@@ -652,7 +678,7 @@
r = 1
try:
- markerstyle = ui.config('ui', 'mergemarkers', 'basic')
+ markerstyle = ui.config('ui', 'mergemarkers')
if not labels:
labels = _defaultconflictlabels
if markerstyle != 'basic':
--- a/mercurial/fileset.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/fileset.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,6 +14,7 @@
error,
merge,
parser,
+ pycompat,
registrar,
scmutil,
util,
@@ -37,12 +38,13 @@
"end": (0, None, None, None, None),
}
-keywords = set(['and', 'or', 'not'])
+keywords = {'and', 'or', 'not'}
globchars = ".*{}[]?/\\_"
def tokenize(program):
pos, l = 0, len(program)
+ program = pycompat.bytestr(program)
while pos < l:
c = program[pos]
if c.isspace(): # skip inter-token whitespace
@@ -256,7 +258,7 @@
"""
# i18n: "binary" is a keyword
getargs(x, 0, 0, _("binary takes no arguments"))
- return [f for f in mctx.existing() if util.binary(mctx.ctx[f].data())]
+ return [f for f in mctx.existing() if mctx.ctx[f].isbinary()]
@predicate('exec()', callexisting=True)
def exec_(mctx, x):
--- a/mercurial/formatter.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/formatter.py Wed Jul 19 07:51:41 2017 -0500
@@ -103,6 +103,8 @@
from __future__ import absolute_import
+import collections
+import contextlib
import itertools
import os
@@ -114,6 +116,7 @@
from . import (
error,
+ pycompat,
templatefilters,
templatekw,
templater,
@@ -124,6 +127,10 @@
class _nullconverter(object):
'''convert non-primitive data types to be processed by formatter'''
+
+ # set to True if context object should be stored as item
+ storecontext = False
+
@staticmethod
def formatdate(date, fmt):
'''convert date tuple to appropriate format'''
@@ -175,9 +182,13 @@
return self._converter.formatlist(data, name, fmt, sep)
def context(self, **ctxs):
'''insert context objects to be used to render template keywords'''
- pass
+ ctxs = pycompat.byteskwargs(ctxs)
+ assert all(k == 'ctx' for k in ctxs)
+ if self._converter.storecontext:
+ self._item.update(ctxs)
def data(self, **data):
'''insert data into item that's not shown in default output'''
+ data = pycompat.byteskwargs(data)
self._item.update(data)
def write(self, fields, deftext, *fielddata, **opts):
'''do default text output while assigning data to item'''
@@ -204,6 +215,10 @@
if self._item is not None:
self._showitem()
+def nullformatter(ui, topic):
+ '''formatter that prints nothing'''
+ return baseformatter(ui, topic, opts={}, converter=_nullconverter)
+
class _nestedformatter(baseformatter):
'''build sub items and store them in the parent formatter'''
def __init__(self, ui, converter, data):
@@ -220,6 +235,9 @@
class _plainconverter(object):
'''convert non-primitive data types to text'''
+
+ storecontext = False
+
@staticmethod
def formatdate(date, fmt):
'''stringify date tuple in the given format'''
@@ -235,24 +253,28 @@
class plainformatter(baseformatter):
'''the default text output scheme'''
- def __init__(self, ui, topic, opts):
+ def __init__(self, ui, out, topic, opts):
baseformatter.__init__(self, ui, topic, opts, _plainconverter)
if ui.debugflag:
self.hexfunc = hex
else:
self.hexfunc = short
+ if ui is out:
+ self._write = ui.write
+ else:
+ self._write = lambda s, **opts: out.write(s)
def startitem(self):
pass
def data(self, **data):
pass
def write(self, fields, deftext, *fielddata, **opts):
- self._ui.write(deftext % fielddata, **opts)
+ self._write(deftext % fielddata, **opts)
def condwrite(self, cond, fields, deftext, *fielddata, **opts):
'''do conditional write'''
if cond:
- self._ui.write(deftext % fielddata, **opts)
+ self._write(deftext % fielddata, **opts)
def plain(self, text, **opts):
- self._ui.write(text, **opts)
+ self._write(text, **opts)
def isplain(self):
return True
def nested(self, field):
@@ -311,6 +333,9 @@
class _templateconverter(object):
'''convert non-primitive data types to be processed by templater'''
+
+ storecontext = True
+
@staticmethod
def formatdate(date, fmt):
'''return date tuple'''
@@ -335,49 +360,84 @@
def __init__(self, ui, out, topic, opts):
baseformatter.__init__(self, ui, topic, opts, _templateconverter)
self._out = out
- self._topic = topic
- self._t = gettemplater(ui, topic, opts.get('template', ''),
- cache=templatekw.defaulttempl)
+ spec = lookuptemplate(ui, topic, opts.get('template', ''))
+ self._tref = spec.ref
+ self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
+ self._parts = templatepartsmap(spec, self._t,
+ ['docheader', 'docfooter', 'separator'])
self._counter = itertools.count()
self._cache = {} # for templatekw/funcs to store reusable data
- def context(self, **ctxs):
- '''insert context objects to be used to render template keywords'''
- assert all(k == 'ctx' for k in ctxs)
- self._item.update(ctxs)
+ self._renderitem('docheader', {})
+
def _showitem(self):
+ item = self._item.copy()
+ item['index'] = index = next(self._counter)
+ if index > 0:
+ self._renderitem('separator', {})
+ self._renderitem(self._tref, item)
+
+ def _renderitem(self, part, item):
+ if part not in self._parts:
+ return
+ ref = self._parts[part]
+
# TODO: add support for filectx. probably each template keyword or
# function will have to declare dependent resources. e.g.
# @templatekeyword(..., requires=('ctx',))
props = {}
- if 'ctx' in self._item:
+ if 'ctx' in item:
props.update(templatekw.keywords)
- props['index'] = next(self._counter)
# explicitly-defined fields precede templatekw
- props.update(self._item)
- if 'ctx' in self._item:
+ props.update(item)
+ if 'ctx' in item:
# but template resources must be always available
props['templ'] = self._t
props['repo'] = props['ctx'].repo()
props['revcache'] = {}
- g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
+ props = pycompat.strkwargs(props)
+ g = self._t(ref, ui=self._ui, cache=self._cache, **props)
self._out.write(templater.stringify(g))
+ def end(self):
+ baseformatter.end(self)
+ self._renderitem('docfooter', {})
+
+templatespec = collections.namedtuple(r'templatespec',
+ r'ref tmpl mapfile')
+
def lookuptemplate(ui, topic, tmpl):
+ """Find the template matching the given -T/--template spec 'tmpl'
+
+ 'tmpl' can be any of the following:
+
+ - a literal template (e.g. '{rev}')
+ - a map-file name or path (e.g. 'changelog')
+ - a reference to [templates] in config file
+ - a path to raw template file
+
+ A map file defines a stand-alone template environment. If a map file
+ selected, all templates defined in the file will be loaded, and the
+ template matching the given topic will be rendered. No aliases will be
+ loaded from user config.
+
+ If no map file selected, all templates in [templates] section will be
+ available as well as aliases in [templatealias].
+ """
+
# looks like a literal template?
if '{' in tmpl:
- return tmpl, None
+ return templatespec('', tmpl, None)
# perhaps a stock style?
if not os.path.split(tmpl)[0]:
mapname = (templater.templatepath('map-cmdline.' + tmpl)
or templater.templatepath(tmpl))
if mapname and os.path.isfile(mapname):
- return None, mapname
+ return templatespec(topic, None, mapname)
# perhaps it's a reference to [templates]
- t = ui.config('templates', tmpl)
- if t:
- return templater.unquotestring(t), None
+ if ui.config('templates', tmpl):
+ return templatespec(tmpl, None, None)
if tmpl == 'list':
ui.write(_("available styles: %s\n") % templater.stylelist())
@@ -387,42 +447,84 @@
if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
# is it a mapfile for a style?
if os.path.basename(tmpl).startswith("map-"):
- return None, os.path.realpath(tmpl)
- tmpl = open(tmpl).read()
- return tmpl, None
+ return templatespec(topic, None, os.path.realpath(tmpl))
+ with util.posixfile(tmpl, 'rb') as f:
+ tmpl = f.read()
+ return templatespec('', tmpl, None)
# constant string?
- return tmpl, None
+ return templatespec('', tmpl, None)
-def gettemplater(ui, topic, spec, cache=None):
- tmpl, mapfile = lookuptemplate(ui, topic, spec)
- assert not (tmpl and mapfile)
- if mapfile:
- return templater.templater.frommapfile(mapfile, cache=cache)
- return maketemplater(ui, topic, tmpl, cache=cache)
+def templatepartsmap(spec, t, partnames):
+ """Create a mapping of {part: ref}"""
+ partsmap = {spec.ref: spec.ref} # initial ref must exist in t
+ if spec.mapfile:
+ partsmap.update((p, p) for p in partnames if p in t)
+ elif spec.ref:
+ for part in partnames:
+ ref = '%s:%s' % (spec.ref, part) # select config sub-section
+ if ref in t:
+ partsmap[part] = ref
+ return partsmap
-def maketemplater(ui, topic, tmpl, cache=None):
+def loadtemplater(ui, spec, cache=None):
+ """Create a templater from either a literal template or loading from
+ a map file"""
+ assert not (spec.tmpl and spec.mapfile)
+ if spec.mapfile:
+ return templater.templater.frommapfile(spec.mapfile, cache=cache)
+ return maketemplater(ui, spec.tmpl, cache=cache)
+
+def maketemplater(ui, tmpl, cache=None):
"""Create a templater from a string template 'tmpl'"""
aliases = ui.configitems('templatealias')
t = templater.templater(cache=cache, aliases=aliases)
+ t.cache.update((k, templater.unquotestring(v))
+ for k, v in ui.configitems('templates'))
if tmpl:
- t.cache[topic] = tmpl
+ t.cache[''] = tmpl
return t
-def formatter(ui, topic, opts):
+def formatter(ui, out, topic, opts):
template = opts.get("template", "")
if template == "json":
- return jsonformatter(ui, ui, topic, opts)
+ return jsonformatter(ui, out, topic, opts)
elif template == "pickle":
- return pickleformatter(ui, ui, topic, opts)
+ return pickleformatter(ui, out, topic, opts)
elif template == "debug":
- return debugformatter(ui, ui, topic, opts)
+ return debugformatter(ui, out, topic, opts)
elif template != "":
- return templateformatter(ui, ui, topic, opts)
+ return templateformatter(ui, out, topic, opts)
# developer config: ui.formatdebug
elif ui.configbool('ui', 'formatdebug'):
- return debugformatter(ui, ui, topic, opts)
+ return debugformatter(ui, out, topic, opts)
# deprecated config: ui.formatjson
elif ui.configbool('ui', 'formatjson'):
- return jsonformatter(ui, ui, topic, opts)
- return plainformatter(ui, topic, opts)
+ return jsonformatter(ui, out, topic, opts)
+ return plainformatter(ui, out, topic, opts)
+
+@contextlib.contextmanager
+def openformatter(ui, filename, topic, opts):
+ """Create a formatter that writes outputs to the specified file
+
+ Must be invoked using the 'with' statement.
+ """
+ with util.posixfile(filename, 'wb') as out:
+ with formatter(ui, out, topic, opts) as fm:
+ yield fm
+
+@contextlib.contextmanager
+def _neverending(fm):
+ yield fm
+
+def maybereopen(fm, filename, opts):
+ """Create a formatter backed by file if filename specified, else return
+ the given formatter
+
+ Must be invoked using the 'with' statement. This will never call fm.end()
+ of the given formatter.
+ """
+ if filename:
+ return openformatter(fm._ui, filename, fm._topic, opts)
+ else:
+ return _neverending(fm)
--- a/mercurial/graphmod.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/graphmod.py Wed Jul 19 07:51:41 2017 -0500
@@ -21,7 +21,7 @@
from .node import nullrev
from . import (
- revset,
+ dagop,
smartset,
util,
)
@@ -70,7 +70,7 @@
# through all revs (issue4782)
if not isinstance(revs, smartset.baseset):
revs = smartset.baseset(revs)
- gp = gpcache[mpar] = sorted(set(revset.reachableroots(
+ gp = gpcache[mpar] = sorted(set(dagop.reachableroots(
repo, revs, [mpar])))
if not gp:
parents.append((MISSINGPARENT, mpar))
@@ -273,7 +273,7 @@
# | | | | | |
line.extend(echars[idx * 2:(idx + 1) * 2])
else:
- line.extend(' ')
+ line.extend([' ', ' '])
# all edges to the right of the current node
remainder = ncols - idx - 1
if remainder > 0:
@@ -410,14 +410,17 @@
# shift_interline is the line containing the non-vertical
# edges between this entry and the next
shift_interline = echars[:idx * 2]
- shift_interline.extend(' ' * (2 + coldiff))
+ for i in xrange(2 + coldiff):
+ shift_interline.append(' ')
count = ncols - idx - 1
if coldiff == -1:
- shift_interline.extend('/ ' * count)
+ for i in xrange(count):
+ shift_interline.extend(['/', ' '])
elif coldiff == 0:
shift_interline.extend(echars[(idx + 1) * 2:ncols * 2])
else:
- shift_interline.extend(r'\ ' * count)
+ for i in xrange(count):
+ shift_interline.extend(['\\', ' '])
# draw edges from the current node to its parents
_drawedges(echars, edges, nodeline, shift_interline)
--- a/mercurial/help.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,6 +23,7 @@
filemerge,
fileset,
minirst,
+ pycompat,
revset,
templatefilters,
templatekw,
@@ -33,7 +34,7 @@
webcommands,
)
-_exclkeywords = set([
+_exclkeywords = {
"(ADVANCED)",
"(DEPRECATED)",
"(EXPERIMENTAL)",
@@ -43,7 +44,7 @@
_("(DEPRECATED)"),
# i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently
_("(EXPERIMENTAL)"),
- ])
+}
def listexts(header, exts, indent=1, showdeprecated=False):
'''return a text listing of the given extensions'''
@@ -83,7 +84,11 @@
so = '-' + shortopt
lo = '--' + longopt
if default:
- desc += _(" (default: %s)") % default
+ # default is of unknown type, and in Python 2 we abused
+ # the %s-shows-repr property to handle integers etc. To
+ # match that behavior on Python 3, we do str(default) and
+ # then convert it to bytes.
+ desc += _(" (default: %s)") % pycompat.bytestr(default)
if isinstance(default, list):
lo += " %s [+]" % optlabel
@@ -113,7 +118,7 @@
return True
return False
-def topicmatch(ui, kw):
+def topicmatch(ui, commands, kw):
"""Return help topics matching kw.
Returns {'section': [(name, summary), ...], ...} where section is
@@ -133,14 +138,13 @@
or lowercontains(header)
or (callable(doc) and lowercontains(doc(ui)))):
results['topics'].append((names[0], header))
- from . import commands # avoid cycle
for cmd, entry in commands.table.iteritems():
if len(entry) == 3:
summary = entry[2]
else:
summary = ''
# translate docs *before* searching there
- docs = _(getattr(entry[0], '__doc__', None)) or ''
+ docs = _(pycompat.getdoc(entry[0])) or ''
if kw in cmd or lowercontains(summary) or lowercontains(docs):
doclines = docs.splitlines()
if doclines:
@@ -162,8 +166,9 @@
for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems():
if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
cmdname = cmd.partition('|')[0].lstrip('^')
- if entry[0].__doc__:
- cmddoc = gettext(entry[0].__doc__).splitlines()[0]
+ cmddoc = pycompat.getdoc(entry[0])
+ if cmddoc:
+ cmddoc = gettext(cmddoc).splitlines()[0]
else:
cmddoc = _('(no help text available)')
if filtercmd(ui, cmdname, kw, cmddoc):
@@ -259,13 +264,14 @@
"""
entries = []
for name in sorted(items):
- text = (items[name].__doc__ or '').rstrip()
+ text = (pycompat.getdoc(items[name]) or '').rstrip()
if (not text
or not ui.verbose and any(w in text for w in _exclkeywords)):
continue
text = gettext(text)
if dedent:
- text = textwrap.dedent(text)
+ # Abuse latin1 to use textwrap.dedent() on bytes.
+ text = textwrap.dedent(text.decode('latin1')).encode('latin1')
lines = text.splitlines()
doclines = [(lines[0])]
for l in lines[1:]:
@@ -297,13 +303,14 @@
addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands,
dedent=True)
-def help_(ui, name, unknowncmd=False, full=True, subtopic=None, **opts):
+def help_(ui, commands, name, unknowncmd=False, full=True, subtopic=None,
+ **opts):
'''
Generate the help for 'name' as unformatted restructured text. If
'name' is None, describe the commands available.
'''
- from . import commands # avoid cycle
+ opts = pycompat.byteskwargs(opts)
def helpcmd(name, subtopic=None):
try:
@@ -343,7 +350,7 @@
rst.append('\n')
# description
- doc = gettext(entry[0].__doc__)
+ doc = gettext(pycompat.getdoc(entry[0]))
if not doc:
doc = _("(no help text available)")
if util.safehasattr(entry[0], 'definition'): # aliased command
@@ -365,7 +372,7 @@
# extension help text
try:
mod = extensions.find(name)
- doc = gettext(mod.__doc__) or ''
+ doc = gettext(pycompat.getdoc(mod)) or ''
if '\n' in doc.strip():
msg = _("(use 'hg help -e %s' to show help for "
"the %s extension)") % (name, name)
@@ -413,7 +420,7 @@
if name == "shortlist" and not f.startswith("^"):
continue
f = f.lstrip("^")
- doc = e[0].__doc__
+ doc = pycompat.getdoc(e[0])
if filtercmd(ui, f, name, doc):
continue
doc = gettext(doc)
@@ -516,7 +523,7 @@
def helpext(name, subtopic=None):
try:
mod = extensions.find(name)
- doc = gettext(mod.__doc__) or _('no help text available')
+ doc = gettext(pycompat.getdoc(mod)) or _('no help text available')
except KeyError:
mod = None
doc = extensions.disabledext(name)
@@ -552,7 +559,7 @@
def helpextcmd(name, subtopic=None):
cmd, ext, mod = extensions.disabledcmd(ui, name,
ui.configbool('ui', 'strict'))
- doc = gettext(mod.__doc__).splitlines()[0]
+ doc = gettext(pycompat.getdoc(mod)).splitlines()[0]
rst = listexts(_("'%s' is provided by the following "
"extension:") % cmd, {ext: doc}, indent=4,
@@ -566,7 +573,7 @@
rst = []
kw = opts.get('keyword')
if kw or name is None and any(opts[o] for o in opts):
- matches = topicmatch(ui, name or '')
+ matches = topicmatch(ui, commands, name or '')
helpareas = []
if opts.get('extension'):
helpareas += [('extensions', _('Extensions'))]
@@ -613,11 +620,12 @@
# program name
if not ui.quiet:
rst = [_("Mercurial Distributed SCM\n"), '\n']
- rst.extend(helplist(None, **opts))
+ rst.extend(helplist(None, **pycompat.strkwargs(opts)))
return ''.join(rst)
-def formattedhelp(ui, name, keep=None, unknowncmd=False, full=True, **opts):
+def formattedhelp(ui, commands, name, keep=None, unknowncmd=False, full=True,
+ **opts):
"""get help for a given topic (as a dotted name) as rendered rst
Either returns the rendered help text or raises an exception.
@@ -639,11 +647,11 @@
subtopic = remaining
else:
section = remaining
- textwidth = ui.configint('ui', 'textwidth', 78)
+ textwidth = ui.configint('ui', 'textwidth')
termwidth = ui.termwidth() - 2
if textwidth <= 0 or termwidth < textwidth:
textwidth = termwidth
- text = help_(ui, name,
+ text = help_(ui, commands, name,
subtopic=subtopic, unknowncmd=unknowncmd, full=full, **opts)
formatted, pruned = minirst.format(text, textwidth, keep=keep,
--- a/mercurial/help/color.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/color.txt Wed Jul 19 07:51:41 2017 -0500
@@ -22,7 +22,8 @@
The default pager on Windows does not support color, so enabling the pager
will effectively disable color. See :hg:`help config.ui.paginate` to disable
the pager. Alternately, MSYS and Cygwin shells provide `less` as a pager,
- which can be configured to support ANSI color mode.
+ which can be configured to support ANSI color mode. Windows 10 natively
+ supports ANSI color mode.
Mode
====
--- a/mercurial/help/config.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/config.txt Wed Jul 19 07:51:41 2017 -0500
@@ -415,7 +415,7 @@
``mode``
String: control the method used to output color. One of ``auto``, ``ansi``,
``win32``, ``terminfo`` or ``debug``. In auto mode, Mercurial will
- use ANSI mode by default (or win32 mode on Windows) if it detects a
+ use ANSI mode by default (or win32 mode prior to Windows 10) if it detects a
terminal. Any invalid value will disable color.
``pagermode``
@@ -1133,6 +1133,7 @@
A list of hashes of the DER encoded peer/remote certificate. Values have
the form ``algorithm``:``fingerprint``. e.g.
``sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2``.
+ In addition, colons (``:``) can appear in the fingerprint part.
The following algorithms/prefixes are supported: ``sha1``, ``sha256``,
``sha512``.
@@ -1182,6 +1183,7 @@
[hostsecurity]
hg.example.com:fingerprints = sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2
hg2.example.com:fingerprints = sha1:914f1aff87249c09b6859b88b1906d30756491ca, sha1:fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
+ hg3.example.com:fingerprints = sha256:9a:b0:dc:e2:75:ad:8a:b7:84:58:e5:1f:07:32:f1:87:e6:bd:24:22:af:b7:ce:8e:9c:b4:10:cf:b9:f4:0e:d2
foo.example.com:verifycertsfile = /etc/ssl/trusted-ca-certs.pem
To change the default minimum protocol version to TLS 1.2 but to allow TLS 1.1
@@ -1572,6 +1574,28 @@
Specific to the ``ls`` instrumenting profiler.
(default: 5)
+``showmin``
+ Minimum fraction of samples an entry must have for it to be displayed.
+ Can be specified as a float between ``0.0`` and ``1.0`` or can have a
+ ``%`` afterwards to allow values up to ``100``. e.g. ``5%``.
+
+ Only used by the ``stat`` profiler.
+
+ For the ``hotpath`` format, default is ``0.05``.
+ For the ``chrome`` format, default is ``0.005``.
+
+ The option is unused on other formats.
+
+``showmax``
+ Maximum fraction of samples an entry can have before it is ignored in
+ display. Values format is the same as ``showmin``.
+
+ Only used by the ``stat`` profiler.
+
+ For the ``chrome`` format, default is ``0.999``.
+
+ The option is unused on other formats.
+
``progress``
------------
@@ -1657,10 +1681,31 @@
the write lock while determining what data to transfer.
(default: True)
+``uncompressedallowsecret``
+ Whether to allow stream clones when the repository contains secret
+ changesets. (default: False)
+
``preferuncompressed``
When set, clients will try to use the uncompressed streaming
protocol. (default: False)
+``disablefullbundle``
+ When set, servers will refuse attempts to do pull-based clones.
+ If this option is set, ``preferuncompressed`` and/or clone bundles
+ are highly recommended. Partial clones will still be allowed.
+ (default: False)
+
+``concurrent-push-mode``
+ Level of allowed race condition between two pushing clients.
+
+ - 'strict': push is abort if another client touched the repository
+ while the push was preparing. (default)
+ - 'check-related': push is only aborted if it affects head that got also
+ affected while the push was preparing.
+
+ This requires compatible client (version 4.3 and later). Old client will
+ use 'strict'.
+
``validate``
Whether to validate the completeness of pushed changesets by
checking that all new file revisions specified in manifests are
@@ -2060,6 +2105,15 @@
on all exceptions, even those recognized by Mercurial (such as
IOError or MemoryError). (default: False)
+``tweakdefaults``
+
+ By default Mercurial's behavior changes very little from release
+ to release, but over time the recommended config settings
+ shift. Enable this config to opt in to get automatic tweaks to
+ Mercurial's behavior over time. This config setting will have no
+ effet if ``HGPLAIN` is set or ``HGPLAINEXCEPT`` is set and does
+ not include ``tweakdefaults``. (default: False)
+
``username``
The committer of a changeset created when running "commit".
Typically a person's name and email address, e.g. ``Fred Widget
--- a/mercurial/help/internals/requirements.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/internals/requirements.txt Wed Jul 19 07:51:41 2017 -0500
@@ -117,3 +117,14 @@
Support for this requirement was added in Mercurial 3.4 (released
August 2015). The requirement is currently experimental and is
disabled by default.
+
+exp-sparse
+==========
+
+The working directory is sparse (only contains a subset of files).
+
+Support for this requirement was added in Mercurial 4.3 (released
+August 2017). This requirement and feature are experimental and may
+disappear in a future Mercurial release. The requirement will only
+be present on repositories that have opted in to a sparse working
+directory.
--- a/mercurial/help/internals/revlogs.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/internals/revlogs.txt Wed Jul 19 07:51:41 2017 -0500
@@ -45,6 +45,12 @@
1
RevlogNG (*next generation*). It replaced version 0 when it was
implemented in 2006.
+2
+ In-development version incorporating accumulated knowledge and
+ missing features from 10+ years of revlog version 1.
+57005 (0xdead)
+ Reserved for internal testing of new versions. No defined format
+ beyond 32-bit header.
The feature flags short consists of bit flags. Where 0 is the least
significant bit, the following bit offsets define flags:
@@ -60,23 +66,23 @@
The following header values are common:
00 00 00 01
- RevlogNG
+ v1
00 01 00 01
- RevlogNG + inline
+ v1 + inline
00 02 00 01
- RevlogNG + generaldelta
+ v1 + generaldelta
00 03 00 01
- RevlogNG + inline + generaldelta
+ v1 + inline + generaldelta
Following the 32-bit header is the remainder of the first index entry.
Following that are remaining *index* data. Inlined revision data is
possibly located between index entries. More on this layout is described
below.
-RevlogNG Format
-===============
+Version 1 Format
+================
-RevlogNG (version 1) begins with an index describing the revisions in
+Version 1 (RevlogNG) begins with an index describing the revisions in
the revlog. If the ``inline`` flag is set, revision data is stored inline,
or between index entries (as opposed to in a separate container).
@@ -142,6 +148,14 @@
The first 4 bytes of the revlog are shared between the revlog header
and the 6 byte absolute offset field from the first revlog entry.
+Version 2 Format
+================
+
+(In development. Format not finalized or stable.)
+
+Version 2 is currently identical to version 1. This will obviously
+change.
+
Delta Chains
============
--- a/mercurial/help/internals/wireprotocol.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/internals/wireprotocol.txt Wed Jul 19 07:51:41 2017 -0500
@@ -632,6 +632,9 @@
branches
--------
+(Legacy command used for discovery in old clients. Clients with ``getbundle``
+use the ``known`` and ``heads`` commands instead.)
+
Obtain ancestor changesets of specific nodes back to a branch point.
Despite the name, this command has nothing to do with Mercurial named branches.
--- a/mercurial/help/revisions.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/revisions.txt Wed Jul 19 07:51:41 2017 -0500
@@ -97,6 +97,7 @@
``x~n``
The nth first ancestor of x; ``x~0`` is x; ``x~3`` is ``x^^^``.
+ For n < 0, the nth unambiguous descendent of x.
``x ## y``
Concatenate strings and identifiers into one string.
--- a/mercurial/help/templates.txt Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/help/templates.txt Wed Jul 19 07:51:41 2017 -0500
@@ -109,6 +109,23 @@
$ hg log -r . -Tnodedate
+A template defined in ``templates`` section can also be referenced from
+another template::
+
+ $ hg log -r . -T "{rev} {nodedate}"
+
+but be aware that the keywords cannot be overridden by templates. For example,
+a template defined as ``templates.rev`` cannot be referenced as ``{rev}``.
+
+A template defined in ``templates`` section may have sub templates which
+are inserted before/after/between items::
+
+ [templates]
+ myjson = ' {dict(rev, node|short)|json}'
+ myjson:docheader = '\{\n'
+ myjson:docfooter = '\n}\n'
+ myjson:separator = ',\n'
+
Examples
========
--- a/mercurial/hg.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hg.py Wed Jul 19 07:51:41 2017 -0500
@@ -130,7 +130,7 @@
def islocal(repo):
'''return true if repo (or path pointing to repo) is local'''
- if isinstance(repo, str):
+ if isinstance(repo, bytes):
try:
return _peerlookup(repo).islocal(repo)
except AttributeError:
@@ -148,10 +148,12 @@
# a list of (ui, repo) functions called for wire peer initialization
wirepeersetupfuncs = []
-def _peerorrepo(ui, path, create=False):
+def _peerorrepo(ui, path, create=False, presetupfuncs=None):
"""return a repository object for the specified path"""
obj = _peerlookup(path).instance(ui, path, create)
ui = getattr(obj, "ui", ui)
+ for f in presetupfuncs or []:
+ f(ui, obj)
for name, module in extensions.extensions(ui):
hook = getattr(module, 'reposetup', None)
if hook:
@@ -161,9 +163,9 @@
f(ui, obj)
return obj
-def repository(ui, path='', create=False):
+def repository(ui, path='', create=False, presetupfuncs=None):
"""return a repository object for the specified path"""
- peer = _peerorrepo(ui, path, create)
+ peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs)
repo = peer.local()
if not repo:
raise error.Abort(_("repository '%s' is not local") %
@@ -407,6 +409,29 @@
return srcpeer, peer(ui, peeropts, dest)
+# Recomputing branch cache might be slow on big repos,
+# so just copy it
+def _copycache(srcrepo, dstcachedir, fname):
+ """copy a cache from srcrepo to destcachedir (if it exists)"""
+ srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
+ dstbranchcache = os.path.join(dstcachedir, fname)
+ if os.path.exists(srcbranchcache):
+ if not os.path.exists(dstcachedir):
+ os.mkdir(dstcachedir)
+ util.copyfile(srcbranchcache, dstbranchcache)
+
+def _cachetocopy(srcrepo):
+ """return the list of cache file valuable to copy during a clone"""
+ # In local clones we're copying all nodes, not just served
+ # ones. Therefore copy all branch caches over.
+ cachefiles = ['branch2']
+ cachefiles += ['branch2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['rbc-names-v1', 'rbc-revs-v1']
+ cachefiles += ['tags2']
+ cachefiles += ['tags2-%s' % f for f in repoview.filtertable]
+ cachefiles += ['hgtagsfnodes1']
+ return cachefiles
+
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
update=True, stream=False, branch=None, shareopts=None):
"""Make a copy of an existing repository.
@@ -452,7 +477,7 @@
remote's path/URL. Defaults to "identity."
"""
- if isinstance(source, str):
+ if isinstance(source, bytes):
origsource = ui.expandpath(source)
source, branch = parseurl(origsource, branch)
srcpeer = peer(ui, peeropts, source)
@@ -564,22 +589,9 @@
if os.path.exists(srcbookmarks):
util.copyfile(srcbookmarks, dstbookmarks)
- # Recomputing branch cache might be slow on big repos,
- # so just copy it
- def copybranchcache(fname):
- srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
- dstbranchcache = os.path.join(dstcachedir, fname)
- if os.path.exists(srcbranchcache):
- if not os.path.exists(dstcachedir):
- os.mkdir(dstcachedir)
- util.copyfile(srcbranchcache, dstbranchcache)
-
dstcachedir = os.path.join(destpath, 'cache')
- # In local clones we're copying all nodes, not just served
- # ones. Therefore copy all branch caches over.
- copybranchcache('branch2')
- for cachename in repoview.filtertable:
- copybranchcache('branch2-%s' % cachename)
+ for cache in _cachetocopy(srcrepo):
+ _copycache(srcrepo, dstcachedir, cache)
# we need to re-init the repo after manually copying the data
# into it
@@ -869,7 +881,7 @@
revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
other = peer(repo, opts, dest)
- outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
+ outgoing = discovery.findcommonoutgoing(repo, other, revs,
force=opts.get('force'))
o = outgoing.missing
if not o:
--- a/mercurial/hgweb/hgweb_mod.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hgweb/hgweb_mod.py Wed Jul 19 07:51:41 2017 -0500
@@ -119,19 +119,19 @@
self.csp, self.nonce = cspvalues(self.repo.ui)
# Trust the settings from the .hg/hgrc files by default.
- def config(self, section, name, default=None, untrusted=True):
+ def config(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.config(section, name, default,
untrusted=untrusted)
- def configbool(self, section, name, default=False, untrusted=True):
+ def configbool(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configbool(section, name, default,
untrusted=untrusted)
- def configint(self, section, name, default=None, untrusted=True):
+ def configint(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configint(section, name, default,
untrusted=untrusted)
- def configlist(self, section, name, default=None, untrusted=True):
+ def configlist(self, section, name, default=uimod._unset, untrusted=True):
return self.repo.ui.configlist(section, name, default,
untrusted=untrusted)
@@ -311,7 +311,8 @@
should be using instances of this class as the WSGI application.
"""
with self._obtainrepo() as repo:
- with profiling.maybeprofile(repo.ui):
+ profile = repo.ui.configbool('profiling', 'enabled')
+ with profiling.profile(repo.ui, enabled=profile):
for r in self._runwsgi(req, repo):
yield r
--- a/mercurial/hgweb/hgwebdir_mod.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py Wed Jul 19 07:51:41 2017 -0500
@@ -220,7 +220,8 @@
return False
def run_wsgi(self, req):
- with profiling.maybeprofile(self.ui):
+ profile = self.ui.configbool('profiling', 'enabled')
+ with profiling.profile(self.ui, enabled=profile):
for r in self._runwsgi(req):
yield r
@@ -403,7 +404,7 @@
except Exception as e:
u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
continue
- def get(section, name, default=None):
+ def get(section, name, default=uimod._unset):
return u.config(section, name, default, untrusted=True)
if u.configbool("web", "hidden", untrusted=True):
--- a/mercurial/hgweb/protocol.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hgweb/protocol.py Wed Jul 19 07:51:41 2017 -0500
@@ -135,7 +135,7 @@
# Don't allow untrusted settings because disabling compression or
# setting a very high compression level could lead to flooding
# the server's network or CPU.
- opts = {'level': self.ui.configint('server', 'zliblevel', -1)}
+ opts = {'level': self.ui.configint('server', 'zliblevel')}
return HGTYPE, util.compengines['zlib'], opts
def iscmd(cmd):
--- a/mercurial/hgweb/webcommands.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hgweb/webcommands.py Wed Jul 19 07:51:41 2017 -0500
@@ -28,7 +28,7 @@
from .. import (
archival,
- context,
+ dagop,
encoding,
error,
graphmod,
@@ -260,7 +260,7 @@
if not funcsused.issubset(revset.safesymbols):
return MODE_KEYWORD, query
- mfunc = revset.match(web.repo.ui, revdef)
+ mfunc = revset.match(web.repo.ui, revdef, repo=web.repo)
try:
revs = mfunc(web.repo)
return MODE_REVSET, revs
@@ -808,7 +808,7 @@
context = parsecontext(web.config('web', 'comparisoncontext', '5'))
def filelines(f):
- if util.binary(f.data()):
+ if f.isbinary():
mt = mimetypes.guess_type(f.path())[0]
if not mt:
mt = 'application/octet-stream'
@@ -865,6 +865,7 @@
fctx = webutil.filectx(web.repo, req)
f = fctx.path()
parity = paritygen(web.stripecount)
+ ishead = fctx.filerev() in fctx.filelog().headrevs()
# parents() is called once per line and several lines likely belong to
# same revision. So it is worth caching.
@@ -886,7 +887,7 @@
yield p
def annotate(**map):
- if util.binary(fctx.data()):
+ if fctx.isbinary():
mt = (mimetypes.guess_type(fctx.path())[0]
or 'application/octet-stream')
lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)]
@@ -927,6 +928,7 @@
symrev=webutil.symrevorshortnode(req, fctx),
rename=webutil.renamelink(fctx),
permissions=fctx.manifest().flags(f),
+ ishead=int(ishead),
**webutil.commonentry(web.repo, fctx))
@webcommand('filelog')
@@ -1013,9 +1015,9 @@
# would required a dedicated "revnav" class
nav = None
if descend:
- it = context.blockdescendants(fctx, *lrange)
+ it = dagop.blockdescendants(fctx, *lrange)
else:
- it = context.blockancestors(fctx, *lrange)
+ it = dagop.blockancestors(fctx, *lrange)
for i, (c, lr) in enumerate(it, 1):
diffs = None
if patch:
@@ -1374,7 +1376,7 @@
subtopic = None
try:
- doc = helpmod.help_(u, topic, subtopic=subtopic)
+ doc = helpmod.help_(u, commands, topic, subtopic=subtopic)
except error.UnknownCommand:
raise ErrorResponse(HTTP_NOT_FOUND)
return tmpl('help', topic=topicname, doc=doc)
--- a/mercurial/hook.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/hook.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
from .i18n import _
from . import (
demandimport,
+ encoding,
error,
extensions,
pycompat,
@@ -31,7 +32,7 @@
if callable(funcname):
obj = funcname
- funcname = obj.__module__ + "." + obj.__name__
+ funcname = pycompat.sysbytes(obj.__module__ + r"." + obj.__name__)
else:
d = funcname.rfind('.')
if d == -1:
@@ -97,7 +98,7 @@
(hname, exc.args[0]))
else:
ui.warn(_('error: %s hook raised an exception: '
- '%s\n') % (hname, exc))
+ '%s\n') % (hname, encoding.strtolocal(str(exc))))
if throw:
raise
if not ui.tracebackflag:
@@ -204,6 +205,7 @@
return r
def runhooks(ui, repo, htype, hooks, throw=False, **args):
+ args = pycompat.byteskwargs(args)
res = {}
oldstdout = -1
--- a/mercurial/httpconnection.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/httpconnection.py Wed Jul 19 07:51:41 2017 -0500
@@ -141,7 +141,7 @@
self.pwmgr = pwmgr
self._connections = {}
# developer config: ui.http2debuglevel
- loglevel = ui.config('ui', 'http2debuglevel', default=None)
+ loglevel = ui.config('ui', 'http2debuglevel')
if loglevel and not _configuredlogging:
_configuredlogging = True
logger = logging.getLogger('mercurial.httpclient')
--- a/mercurial/httppeer.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/httppeer.py Wed Jul 19 07:51:41 2017 -0500
@@ -181,7 +181,7 @@
size = data.length
elif data is not None:
size = len(data)
- if size and self.ui.configbool('ui', 'usehttp2', False):
+ if size and self.ui.configbool('ui', 'usehttp2'):
headers['Expect'] = '100-Continue'
headers['X-HgHttp2'] = '1'
if data is not None and 'Content-Type' not in headers:
--- a/mercurial/keepalive.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/keepalive.py Wed Jul 19 07:51:41 2017 -0500
@@ -353,7 +353,9 @@
def __init__(self, sock, debuglevel=0, strict=0, method=None):
- httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
+ httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
+ strict=True, method=method,
+ buffering=True)
self.fileno = sock.fileno
self.code = None
self._rbuf = ''
--- a/mercurial/localrepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/localrepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -20,7 +20,6 @@
hex,
nullid,
short,
- wdirrev,
)
from . import (
bookmarks,
@@ -54,6 +53,7 @@
revset,
revsetlang,
scmutil,
+ sparse,
store,
subrepo,
tags as tagsmod,
@@ -67,26 +67,53 @@
urlerr = util.urlerr
urlreq = util.urlreq
-class repofilecache(scmutil.filecache):
+# set of (path, vfs-location) tuples. vfs-location is:
+# - 'plain for vfs relative paths
+# - '' for svfs relative paths
+_cachedfiles = set()
+
+class _basefilecache(scmutil.filecache):
"""All filecache usage on repo are done for logic that should be unfiltered
"""
-
- def join(self, obj, fname):
- return obj.vfs.join(fname)
def __get__(self, repo, type=None):
if repo is None:
return self
- return super(repofilecache, self).__get__(repo.unfiltered(), type)
+ return super(_basefilecache, self).__get__(repo.unfiltered(), type)
def __set__(self, repo, value):
- return super(repofilecache, self).__set__(repo.unfiltered(), value)
+ return super(_basefilecache, self).__set__(repo.unfiltered(), value)
def __delete__(self, repo):
- return super(repofilecache, self).__delete__(repo.unfiltered())
+ return super(_basefilecache, self).__delete__(repo.unfiltered())
+
+class repofilecache(_basefilecache):
+ """filecache for files in .hg but outside of .hg/store"""
+ def __init__(self, *paths):
+ super(repofilecache, self).__init__(*paths)
+ for path in paths:
+ _cachedfiles.add((path, 'plain'))
-class storecache(repofilecache):
+ def join(self, obj, fname):
+ return obj.vfs.join(fname)
+
+class storecache(_basefilecache):
"""filecache for files in the store"""
+ def __init__(self, *paths):
+ super(storecache, self).__init__(*paths)
+ for path in paths:
+ _cachedfiles.add((path, ''))
+
def join(self, obj, fname):
return obj.sjoin(fname)
+def isfilecached(repo, name):
+ """check if a repo has already cached "name" filecache-ed property
+
+ This returns (cachedobj-or-None, iscached) tuple.
+ """
+ cacheentry = repo.unfiltered()._filecache.get(name, None)
+ if not cacheentry:
+ return None, False
+ return cacheentry.obj, True
+
class unfilteredpropertycache(util.propertycache):
"""propertycache that apply to unfiltered repo only"""
@@ -113,9 +140,9 @@
return orig(repo.unfiltered(), *args, **kwargs)
return wrapper
-moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
- 'unbundle'))
-legacycaps = moderncaps.union(set(['changegroupsubset']))
+moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
+ 'unbundle'}
+legacycaps = moderncaps.union({'changegroupsubset'})
class localpeer(peer.peerrepository):
'''peer for a local repo; reflects only the most recent API'''
@@ -164,7 +191,7 @@
**kwargs)
cb = util.chunkbuffer(chunks)
- if bundlecaps is not None and 'HG20' in bundlecaps:
+ if exchange.bundle2requested(bundlecaps):
# When requesting a bundle2, getbundle returns a stream to make the
# wire level function happier. We need to build a proper object
# from it in local peer.
@@ -213,9 +240,6 @@
def lock(self):
return self._repo.lock()
- def addchangegroup(self, cg, source, url):
- return cg.apply(self._repo, source, url)
-
def pushkey(self, namespace, key, old, new):
return self._repo.pushkey(namespace, key, old, new)
@@ -245,21 +269,61 @@
def changegroupsubset(self, bases, heads, source):
return changegroup.changegroupsubset(self._repo, bases, heads, source)
+# Increment the sub-version when the revlog v2 format changes to lock out old
+# clients.
+REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
+
class localrepository(object):
- supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
- 'manifestv2'))
- _basesupported = supportedformats | set(('store', 'fncache', 'shared',
- 'relshared', 'dotencode'))
- openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
- filtername = None
+ supportedformats = {
+ 'revlogv1',
+ 'generaldelta',
+ 'treemanifest',
+ 'manifestv2',
+ REVLOGV2_REQUIREMENT,
+ }
+ _basesupported = supportedformats | {
+ 'store',
+ 'fncache',
+ 'shared',
+ 'relshared',
+ 'dotencode',
+ 'exp-sparse',
+ }
+ openerreqs = {
+ 'revlogv1',
+ 'generaldelta',
+ 'treemanifest',
+ 'manifestv2',
+ }
# a list of (ui, featureset) functions.
# only functions defined in module of enabled extensions are invoked
featuresetupfuncs = set()
+ # list of prefix for file which can be written without 'wlock'
+ # Extensions should extend this list when needed
+ _wlockfreeprefix = {
+ # We migh consider requiring 'wlock' for the next
+ # two, but pretty much all the existing code assume
+ # wlock is not needed so we keep them excluded for
+ # now.
+ 'hgrc',
+ 'requires',
+ # XXX cache is a complicatged business someone
+ # should investigate this in depth at some point
+ 'cache/',
+ # XXX shouldn't be dirstate covered by the wlock?
+ 'dirstate',
+ # XXX bisect was still a bit too messy at the time
+ # this changeset was introduced. Someone should fix
+ # the remainig bit and drop this line
+ 'bisect.state',
+ }
+
def __init__(self, baseui, path, create=False):
self.requirements = set()
+ self.filtername = None
# wvfs: rooted at the repository root, used to access the working copy
self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
# vfs: rooted at .hg, used to access repo files outside of .hg/store
@@ -271,13 +335,18 @@
self.root = self.wvfs.base
self.path = self.wvfs.join(".hg")
self.origroot = path
+ # These auditor are not used by the vfs,
+ # only used when writing this comment: basectx.match
self.auditor = pathutil.pathauditor(self.root, self._checknested)
self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
realfs=False)
- self.vfs = vfsmod.vfs(self.path)
self.baseui = baseui
self.ui = baseui.copy()
self.ui.copy = baseui.copy # prevent copying repo configuration
+ self.vfs = vfsmod.vfs(self.path)
+ if (self.ui.configbool('devel', 'all-warnings') or
+ self.ui.configbool('devel', 'check-locks')):
+ self.vfs.audit = self._getvfsward(self.vfs.audit)
# A list of callback to shape the phase if no data were found.
# Callback are in the form: func(repo, roots) --> processed root.
# This list it to be filled by extension during repo setup
@@ -334,12 +403,14 @@
if inst.errno != errno.ENOENT:
raise
+ cachepath = self.vfs.join('cache')
self.sharedpath = self.path
try:
sharedpath = self.vfs.read("sharedpath").rstrip('\n')
if 'relshared' in self.requirements:
sharedpath = self.vfs.join(sharedpath)
vfs = vfsmod.vfs(sharedpath, realpath=True)
+ cachepath = vfs.join('cache')
s = vfs.base
if not vfs.exists():
raise error.RepoError(
@@ -349,12 +420,25 @@
if inst.errno != errno.ENOENT:
raise
+ if 'exp-sparse' in self.requirements and not sparse.enabled:
+ raise error.RepoError(_('repository is using sparse feature but '
+ 'sparse is not enabled; enable the '
+ '"sparse" extensions to access'))
+
self.store = store.store(
self.requirements, self.sharedpath, vfsmod.vfs)
self.spath = self.store.path
self.svfs = self.store.vfs
self.sjoin = self.store.join
self.vfs.createmode = self.store.createmode
+ self.cachevfs = vfsmod.vfs(cachepath)
+ self.cachevfs.createmode = self.store.createmode
+ if (self.ui.configbool('devel', 'all-warnings') or
+ self.ui.configbool('devel', 'check-locks')):
+ if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
+ self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
+ else: # standard vfs
+ self.svfs.audit = self._getsvfsward(self.svfs.audit)
self._applyopenerreqs()
if create:
self._writerequirements()
@@ -382,18 +466,73 @@
# - bookmark changes
self.filteredrevcache = {}
+ # post-dirstate-status hooks
+ self._postdsstatus = []
+
+ # Cache of types representing filtered repos.
+ self._filteredrepotypes = weakref.WeakKeyDictionary()
+
# generic mapping between names and nodes
self.names = namespaces.namespaces()
- @property
- def wopener(self):
- self.ui.deprecwarn("use 'repo.wvfs' instead of 'repo.wopener'", '4.2')
- return self.wvfs
+ # Key to signature value.
+ self._sparsesignaturecache = {}
+ # Signature to cached matcher instance.
+ self._sparsematchercache = {}
- @property
- def opener(self):
- self.ui.deprecwarn("use 'repo.vfs' instead of 'repo.opener'", '4.2')
- return self.vfs
+ def _getvfsward(self, origfunc):
+ """build a ward for self.vfs"""
+ rref = weakref.ref(self)
+ def checkvfs(path, mode=None):
+ ret = origfunc(path, mode=mode)
+ repo = rref()
+ if (repo is None
+ or not util.safehasattr(repo, '_wlockref')
+ or not util.safehasattr(repo, '_lockref')):
+ return
+ if mode in (None, 'r', 'rb'):
+ return
+ if path.startswith(repo.path):
+ # truncate name relative to the repository (.hg)
+ path = path[len(repo.path) + 1:]
+ if path.startswith('cache/'):
+ msg = 'accessing cache with vfs instead of cachevfs: "%s"'
+ repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
+ if path.startswith('journal.'):
+ # journal is covered by 'lock'
+ if repo._currentlock(repo._lockref) is None:
+ repo.ui.develwarn('write with no lock: "%s"' % path,
+ stacklevel=2, config='check-locks')
+ elif repo._currentlock(repo._wlockref) is None:
+ # rest of vfs files are covered by 'wlock'
+ #
+ # exclude special files
+ for prefix in self._wlockfreeprefix:
+ if path.startswith(prefix):
+ return
+ repo.ui.develwarn('write with no wlock: "%s"' % path,
+ stacklevel=2, config='check-locks')
+ return ret
+ return checkvfs
+
+ def _getsvfsward(self, origfunc):
+ """build a ward for self.svfs"""
+ rref = weakref.ref(self)
+ def checksvfs(path, mode=None):
+ ret = origfunc(path, mode=mode)
+ repo = rref()
+ if repo is None or not util.safehasattr(repo, '_lockref'):
+ return
+ if mode in (None, 'r', 'rb'):
+ return
+ if path.startswith(repo.sharedpath):
+ # truncate name relative to the repository (.hg)
+ path = path[len(repo.sharedpath) + 1:]
+ if repo._currentlock(repo._lockref) is None:
+ repo.ui.develwarn('write with no lock: "%s"' % path,
+ stacklevel=3)
+ return ret
+ return checksvfs
def close(self):
self._writecaches()
@@ -406,7 +545,7 @@
self._revbranchcache.write()
def _restrictcapabilities(self, caps):
- if self.ui.configbool('experimental', 'bundle2-advertise', True):
+ if self.ui.configbool('experimental', 'bundle2-advertise'):
caps = set(caps)
capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
caps.add('bundle2=' + urlreq.quote(capsblob))
@@ -429,14 +568,21 @@
self.svfs.options['manifestcachesize'] = manifestcachesize
# experimental config: format.aggressivemergedeltas
aggressivemergedeltas = self.ui.configbool('format',
- 'aggressivemergedeltas', False)
+ 'aggressivemergedeltas')
self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
+ chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
+ if 0 <= chainspan:
+ self.svfs.options['maxdeltachainspan'] = chainspan
for r in self.requirements:
if r.startswith('exp-compression-'):
self.svfs.options['compengine'] = r[len('exp-compression-'):]
+ # TODO move "revlogv2" to openerreqs once finalized.
+ if REVLOGV2_REQUIREMENT in self.requirements:
+ self.svfs.options['revlogv2'] = True
+
def _writerequirements(self):
scmutil.writerequires(self.vfs, self.requirements)
@@ -489,11 +635,21 @@
def filtered(self, name):
"""Return a filtered version of a repository"""
- # build a new class with the mixin and the current class
- # (possibly subclass of the repo)
- class filteredrepo(repoview.repoview, self.unfiltered().__class__):
- pass
- return filteredrepo(self, name)
+ # Python <3.4 easily leaks types via __mro__. See
+ # https://bugs.python.org/issue17950. We cache dynamically
+ # created types so this method doesn't leak on every
+ # invocation.
+
+ key = self.unfiltered().__class__
+ if key not in self._filteredrepotypes:
+ # Build a new type with the repoview mixin and the base
+ # class of this repo. Give it a name containing the
+ # filter name to aid debugging.
+ bases = (repoview.repoview, key)
+ cls = type(r'%sfilteredrepo' % name, bases, {})
+ self._filteredrepotypes[key] = cls
+
+ return self._filteredrepotypes[key](self, name)
@repofilecache('bookmarks', 'bookmarks.current')
def _bookmarks(self):
@@ -503,14 +659,6 @@
def _activebookmark(self):
return self._bookmarks.active
- def bookmarkheads(self, bookmark):
- name = bookmark.split('@', 1)[0]
- heads = []
- for mark, n in self._bookmarks.iteritems():
- if mark.split('@', 1)[0] == name:
- heads.append(n)
- return heads
-
# _phaserevs and _phasesets depend on changelog. what we need is to
# call _phasecache.invalidate() if '00changelog.i' was changed, but it
# can't be easily expressed in filecache mechanism.
@@ -520,28 +668,12 @@
@storecache('obsstore')
def obsstore(self):
- # read default format for new obsstore.
- # developer config: format.obsstore-version
- defaultformat = self.ui.configint('format', 'obsstore-version', None)
- # rely on obsstore class default when possible.
- kwargs = {}
- if defaultformat is not None:
- kwargs['defaultformat'] = defaultformat
- readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
- store = obsolete.obsstore(self.svfs, readonly=readonly,
- **kwargs)
- if store and readonly:
- self.ui.warn(
- _('obsolete feature not enabled but %i markers found!\n')
- % len(list(store)))
- return store
+ return obsolete.makestore(self.ui, self)
@storecache('00changelog.i')
def changelog(self):
- c = changelog.changelog(self.svfs)
- if txnutil.mayhavepending(self.root):
- c.readpending('00changelog.i.a')
- return c
+ return changelog.changelog(self.svfs,
+ trypending=txnutil.mayhavepending(self.root))
def _constructmanifest(self):
# This is a temporary function while we migrate from manifest to
@@ -555,8 +687,10 @@
@repofilecache('dirstate')
def dirstate(self):
+ sparsematchfn = lambda: sparse.matcher(self)
+
return dirstate.dirstate(self.vfs, self.ui, self.root,
- self._dirstatevalidate)
+ self._dirstatevalidate, sparsematchfn)
def _dirstatevalidate(self, node):
try:
@@ -570,15 +704,23 @@
return nullid
def __getitem__(self, changeid):
- if changeid is None or changeid == wdirrev:
+ if changeid is None:
return context.workingctx(self)
if isinstance(changeid, slice):
+ # wdirrev isn't contiguous so the slice shouldn't include it
return [context.changectx(self, i)
for i in xrange(*changeid.indices(len(self)))
if i not in self.changelog.filteredrevs]
- return context.changectx(self, changeid)
+ try:
+ return context.changectx(self, changeid)
+ except error.WdirUnsupported:
+ return context.workingctx(self)
def __contains__(self, changeid):
+ """True if the given changeid exists
+
+ error.LookupError is raised if an ambiguous node specified.
+ """
try:
self[changeid]
return True
@@ -625,16 +767,19 @@
for r in self.revs(expr, *args):
yield self[r]
- def anyrevs(self, specs, user=False):
+ def anyrevs(self, specs, user=False, localalias=None):
'''Find revisions matching one of the given revsets.
Revset aliases from the configuration are not expanded by default. To
- expand user aliases, specify ``user=True``.
+ expand user aliases, specify ``user=True``. To provide some local
+ definitions overriding user aliases, set ``localalias`` to
+ ``{name: definitionstring}``.
'''
if user:
- m = revset.matchany(self.ui, specs, repo=self)
+ m = revset.matchany(self.ui, specs, repo=self,
+ localalias=localalias)
else:
- m = revset.matchany(None, specs)
+ m = revset.matchany(None, specs, localalias=localalias)
return m(self)
def url(self):
@@ -649,11 +794,6 @@
"""
return hook.hook(self.ui, self, name, throw, **args)
- def tag(self, names, node, message, local, user, date, editor=False):
- self.ui.deprecwarn("use 'tagsmod.tag' instead of 'repo.tag'", '4.2')
- tagsmod.tag(self, names, node, message, local, user, date,
- editor=editor)
-
@filteredpropertycache
def _tagscache(self):
'''Returns a tagscache object that contains various tags related
@@ -824,7 +964,7 @@
def publishing(self):
# it's safe (and desirable) to trust the publish flag unconditionally
# so that we don't finalize changes shared between users via ssh or nfs
- return self.ui.configbool('phases', 'publish', True, untrusted=True)
+ return self.ui.configbool('phases', 'publish', untrusted=True)
def cancopy(self):
# so statichttprepo's override of local() works
@@ -841,10 +981,6 @@
return 'store'
return None
- def join(self, f, *insidef):
- self.ui.deprecwarn("use 'repo.vfs.join' instead of 'repo.join'", '4.2')
- return self.vfs.join(os.path.join(f, *insidef))
-
def wjoin(self, f, *insidef):
return self.vfs.reljoin(self.root, f, *insidef)
@@ -857,21 +993,20 @@
return self[changeid]
def setparents(self, p1, p2=nullid):
- self.dirstate.beginparentchange()
- copies = self.dirstate.setparents(p1, p2)
- pctx = self[p1]
- if copies:
- # Adjust copy records, the dirstate cannot do it, it
- # requires access to parents manifests. Preserve them
- # only for entries added to first parent.
- for f in copies:
- if f not in pctx and copies[f] in pctx:
- self.dirstate.copy(copies[f], f)
- if p2 == nullid:
- for f, s in sorted(self.dirstate.copies().items()):
- if f not in pctx and s not in pctx:
- self.dirstate.copy(None, f)
- self.dirstate.endparentchange()
+ with self.dirstate.parentchange():
+ copies = self.dirstate.setparents(p1, p2)
+ pctx = self[p1]
+ if copies:
+ # Adjust copy records, the dirstate cannot do it, it
+ # requires access to parents manifests. Preserve them
+ # only for entries added to first parent.
+ for f in copies:
+ if f not in pctx and copies[f] in pctx:
+ self.dirstate.copy(copies[f], f)
+ if p2 == nullid:
+ for f, s in sorted(self.dirstate.copies().items()):
+ if f not in pctx and s not in pctx:
+ self.dirstate.copy(None, f)
def filectx(self, path, changeid=None, fileid=None):
"""changeid can be a changeset revision, node, or tag.
@@ -884,15 +1019,6 @@
def pathto(self, f, cwd=None):
return self.dirstate.pathto(f, cwd)
- def wfile(self, f, mode='r'):
- self.ui.deprecwarn("use 'repo.wvfs' instead of 'repo.wfile'", '4.2')
- return self.wvfs(f, mode)
-
- def _link(self, f):
- self.ui.deprecwarn("use 'repo.wvfs.islink' instead of 'repo._link'",
- '4.2')
- return self.wvfs.islink(f)
-
def _loadfilter(self, filter):
if filter not in self.filterpats:
l = []
@@ -979,6 +1105,7 @@
raise error.ProgrammingError('transaction requires locking')
tr = self.currenttransaction()
if tr is not None:
+ scmutil.registersummarycallback(self, tr, desc)
return tr.nest()
# abort here if the journal already exists
@@ -1036,8 +1163,7 @@
# "+M": tag is moved (new value),
tracktags = lambda x: None
# experimental config: experimental.hook-track-tags
- shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags',
- False)
+ shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
if desc != 'strip' and shouldtracktags:
oldheads = self.changelog.headrevs()
def tracktags(tr2):
@@ -1089,7 +1215,7 @@
else:
# discard all changes (including ones already written
# out) in this transaction
- repo.dirstate.restorebackup(None, prefix='journal.')
+ repo.dirstate.restorebackup(None, 'journal.dirstate')
repo.invalidate(clearfilecache=True)
@@ -1099,7 +1225,12 @@
aftertrans(renames),
self.store.createmode,
validator=validate,
- releasefn=releasefn)
+ releasefn=releasefn,
+ checkambigfiles=_cachedfiles)
+ tr.changes['revs'] = set()
+ tr.changes['obsmarkers'] = set()
+ tr.changes['phases'] = {}
+ tr.changes['bookmarks'] = {}
tr.hookargs['txnid'] = txnid
# note: writing the fncache only during finalize mean that the file is
@@ -1120,6 +1251,7 @@
**pycompat.strkwargs(hookargs))
reporef()._afterlock(hook)
tr.addfinalize('txnclose-hook', txnclosehook)
+ tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
def txnaborthook(tr2):
"""To be run if transaction is aborted
"""
@@ -1130,6 +1262,7 @@
# to stored data if transaction has no error.
tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
self._transref = weakref.ref(tr)
+ scmutil.registersummarycallback(self, tr, desc)
return tr
def _journalfiles(self):
@@ -1143,8 +1276,9 @@
def undofiles(self):
return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
+ @unfilteredmethod
def _writejournal(self, desc):
- self.dirstate.savebackup(None, prefix='journal.')
+ self.dirstate.savebackup(None, 'journal.dirstate')
self.vfs.write("journal.branch",
encoding.fromlocal(self.dirstate.branch()))
self.vfs.write("journal.desc",
@@ -1161,7 +1295,8 @@
vfsmap = {'': self.svfs,
'plain': self.vfs,}
transaction.rollback(self.svfs, vfsmap, "journal",
- self.ui.warn)
+ self.ui.warn,
+ checkambigfiles=_cachedfiles)
self.invalidate()
return True
else:
@@ -1194,11 +1329,11 @@
oldtip = oldlen - 1
if detail and ui.verbose:
- msg = (_('repository tip rolled back to revision %s'
+ msg = (_('repository tip rolled back to revision %d'
' (undo %s: %s)\n')
% (oldtip, desc, detail))
else:
- msg = (_('repository tip rolled back to revision %s'
+ msg = (_('repository tip rolled back to revision %d'
' (undo %s)\n')
% (oldtip, desc))
except IOError:
@@ -1217,7 +1352,8 @@
parents = self.dirstate.parents()
self.destroying()
vfsmap = {'plain': self.vfs, '': self.svfs}
- transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
+ transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
+ checkambigfiles=_cachedfiles)
if self.vfs.exists('undo.bookmarks'):
self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
if self.svfs.exists('undo.phaseroots'):
@@ -1230,7 +1366,7 @@
# prevent dirstateguard from overwriting already restored one
dsguard.close()
- self.dirstate.restorebackup(None, prefix='undo.')
+ self.dirstate.restorebackup(None, 'undo.dirstate')
try:
branch = self.vfs.read('undo.branch')
self.dirstate.setbranch(encoding.tolocal(branch))
@@ -1254,6 +1390,38 @@
self.destroyed()
return 0
+ def _buildcacheupdater(self, newtransaction):
+ """called during transaction to build the callback updating cache
+
+ Lives on the repository to help extension who might want to augment
+ this logic. For this purpose, the created transaction is passed to the
+ method.
+ """
+ # we must avoid cyclic reference between repo and transaction.
+ reporef = weakref.ref(self)
+ def updater(tr):
+ repo = reporef()
+ repo.updatecaches(tr)
+ return updater
+
+ @unfilteredmethod
+ def updatecaches(self, tr=None):
+ """warm appropriate caches
+
+ If this function is called after a transaction closed. The transaction
+ will be available in the 'tr' argument. This can be used to selectively
+ update caches relevant to the changes in that transaction.
+ """
+ if tr is not None and tr.hookargs.get('source') == 'strip':
+ # During strip, many caches are invalid but
+ # later call to `destroyed` will refresh them.
+ return
+
+ if tr is None or tr.changes['revs']:
+ # updating the unfiltered branchmap should refresh all the others,
+ self.ui.debug('updating the branch cache\n')
+ branchmap.updatecache(self.filtered('served'))
+
def invalidatecaches(self):
if '_tagscache' in vars(self):
@@ -1262,6 +1430,7 @@
self.unfiltered()._branchcaches.clear()
self.invalidatevolatilesets()
+ self._sparsesignaturecache.clear()
def invalidatevolatilesets(self):
self.filteredrevcache.clear()
@@ -1352,7 +1521,7 @@
(desc, inst.locker))
# default to 600 seconds timeout
l = lockmod.lock(vfs, lockname,
- int(self.ui.config("ui", "timeout", "600")),
+ int(self.ui.config("ui", "timeout")),
releasefn=releasefn, acquirefn=acquirefn,
desc=desc)
self.ui.warn(_("got lock after %s seconds\n") % l.delay)
@@ -1582,7 +1751,7 @@
wctx = self[None]
merge = len(wctx.parents()) > 1
- if not force and merge and match.ispartial():
+ if not force and merge and not match.always():
raise error.Abort(_('cannot partially commit a merge '
'(do not specify files or patterns)'))
@@ -1798,9 +1967,8 @@
# be compliant anyway
#
# if minimal phase was 0 we don't need to retract anything
- phases.retractboundary(self, tr, targetphase, [n])
+ phases.registernew(self, tr, targetphase, [n])
tr.close()
- branchmap.updatecache(self.filtered('served'))
return n
finally:
if tr:
@@ -1842,10 +2010,8 @@
self._phasecache.filterunknown(self)
self._phasecache.write()
- # update the 'served' branch cache to help read only server process
- # Thanks to branchcache collaboration this is done from the nearest
- # filtered subset and it is expected to be fast.
- branchmap.updatecache(self.filtered('served'))
+ # refresh all repository caches
+ self.updatecaches()
# Ensure the persistent tag cache is updated. Doing it now
# means that the tag cache only has to worry about destroyed
@@ -1865,6 +2031,7 @@
changeset, finding all files matched by the match
function
'''
+ self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
return self[node].walk(match)
def status(self, node1='.', node2=None, match=None,
@@ -1874,6 +2041,36 @@
return self[node1].status(node2, match, ignored, clean, unknown,
listsubrepos)
+ def addpostdsstatus(self, ps):
+ """Add a callback to run within the wlock, at the point at which status
+ fixups happen.
+
+ On status completion, callback(wctx, status) will be called with the
+ wlock held, unless the dirstate has changed from underneath or the wlock
+ couldn't be grabbed.
+
+ Callbacks should not capture and use a cached copy of the dirstate --
+ it might change in the meanwhile. Instead, they should access the
+ dirstate via wctx.repo().dirstate.
+
+ This list is emptied out after each status run -- extensions should
+ make sure it adds to this list each time dirstate.status is called.
+ Extensions should also make sure they don't call this for statuses
+ that don't involve the dirstate.
+ """
+
+ # The list is located here for uniqueness reasons -- it is actually
+ # managed by the workingctx, but that isn't unique per-repo.
+ self._postdsstatus.append(ps)
+
+ def postdsstatus(self):
+ """Used by workingctx to get the list of post-dirstate-status hooks."""
+ return self._postdsstatus
+
+ def clearpostdsstatus(self):
+ """Used by workingctx to clear post-dirstate-status hooks."""
+ del self._postdsstatus[:]
+
def heads(self, start=None):
if start is None:
cl = self.changelog
@@ -2028,15 +2225,15 @@
new repositories.
"""
ui = repo.ui
- requirements = set(['revlogv1'])
- if ui.configbool('format', 'usestore', True):
+ requirements = {'revlogv1'}
+ if ui.configbool('format', 'usestore'):
requirements.add('store')
- if ui.configbool('format', 'usefncache', True):
+ if ui.configbool('format', 'usefncache'):
requirements.add('fncache')
- if ui.configbool('format', 'dotencode', True):
+ if ui.configbool('format', 'dotencode'):
requirements.add('dotencode')
- compengine = ui.config('experimental', 'format.compression', 'zlib')
+ compengine = ui.config('experimental', 'format.compression')
if compengine not in util.compengines:
raise error.Abort(_('compression engine %s defined by '
'experimental.format.compression not available') %
@@ -2050,9 +2247,16 @@
if scmutil.gdinitconfig(ui):
requirements.add('generaldelta')
- if ui.configbool('experimental', 'treemanifest', False):
+ if ui.configbool('experimental', 'treemanifest'):
requirements.add('treemanifest')
- if ui.configbool('experimental', 'manifestv2', False):
+ if ui.configbool('experimental', 'manifestv2'):
requirements.add('manifestv2')
+ revlogv2 = ui.config('experimental', 'revlogv2')
+ if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
+ requirements.remove('revlogv1')
+ # generaldelta is implied by revlogv2.
+ requirements.discard('generaldelta')
+ requirements.add(REVLOGV2_REQUIREMENT)
+
return requirements
--- a/mercurial/mail.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/mail.py Wed Jul 19 07:51:41 2017 -0500
@@ -24,26 +24,6 @@
util,
)
-_oldheaderinit = email.header.Header.__init__
-def _unifiedheaderinit(self, *args, **kw):
- """
- Python 2.7 introduces a backwards incompatible change
- (Python issue1974, r70772) in email.Generator.Generator code:
- pre-2.7 code passed "continuation_ws='\t'" to the Header
- constructor, and 2.7 removed this parameter.
-
- Default argument is continuation_ws=' ', which means that the
- behavior is different in <2.7 and 2.7
-
- We consider the 2.7 behavior to be preferable, but need
- to have an unified behavior for versions 2.4 to 2.7
- """
- # override continuation_ws
- kw['continuation_ws'] = ' '
- _oldheaderinit(self, *args, **kw)
-
-setattr(email.header.Header, '__init__', _unifiedheaderinit)
-
class STARTTLS(smtplib.SMTP):
'''Derived class to verify the peer certificate for STARTTLS.
@@ -98,7 +78,7 @@
def _smtp(ui):
'''build an smtp connection and return a function to send mail'''
local_hostname = ui.config('smtp', 'local_hostname')
- tls = ui.config('smtp', 'tls', 'none')
+ tls = ui.config('smtp', 'tls')
# backward compatible: when tls = true, we use starttls.
starttls = tls == 'starttls' or util.parsebool(tls)
smtps = tls == 'smtps'
@@ -155,7 +135,7 @@
def _sendmail(ui, sender, recipients, msg):
'''send mail using sendmail.'''
- program = ui.config('email', 'method', 'smtp')
+ program = ui.config('email', 'method')
cmdline = '%s -f %s %s' % (program, util.email(sender),
' '.join(map(util.email, recipients)))
ui.note(_('sending mail: %s\n') % cmdline)
@@ -184,7 +164,7 @@
if mbox:
open(mbox, 'wb').close()
return lambda s, r, m: _mbox(mbox, s, r, m)
- if ui.config('email', 'method', 'smtp') == 'smtp':
+ if ui.config('email', 'method') == 'smtp':
return _smtp(ui)
return lambda s, r, m: _sendmail(ui, s, r, m)
@@ -194,7 +174,7 @@
def validateconfig(ui):
'''determine if we have enough config data to try sending email.'''
- method = ui.config('email', 'method', 'smtp')
+ method = ui.config('email', 'method')
if method == 'smtp':
if not ui.config('smtp', 'host'):
raise error.Abort(_('smtp specified as email transport, '
--- a/mercurial/manifest.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,939 +0,0 @@
-/*
- * manifest.c - manifest type that does on-demand parsing.
- *
- * Copyright 2015, Google Inc.
- *
- * This software may be used and distributed according to the terms of
- * the GNU General Public License, incorporated herein by reference.
- */
-#include <Python.h>
-
-#include <assert.h>
-#include <string.h>
-#include <stdlib.h>
-
-#include "util.h"
-
-#define DEFAULT_LINES 100000
-
-typedef struct {
- char *start;
- Py_ssize_t len; /* length of line including terminal newline */
- char hash_suffix;
- bool from_malloc;
- bool deleted;
-} line;
-
-typedef struct {
- PyObject_HEAD
- PyObject *pydata;
- line *lines;
- int numlines; /* number of line entries */
- int livelines; /* number of non-deleted lines */
- int maxlines; /* allocated number of lines */
- bool dirty;
-} lazymanifest;
-
-#define MANIFEST_OOM -1
-#define MANIFEST_NOT_SORTED -2
-#define MANIFEST_MALFORMED -3
-
-/* defined in parsers.c */
-PyObject *unhexlify(const char *str, int len);
-
-/* get the length of the path for a line */
-static size_t pathlen(line *l) {
- return strlen(l->start);
-}
-
-/* get the node value of a single line */
-static PyObject *nodeof(line *l) {
- char *s = l->start;
- ssize_t llen = pathlen(l);
- PyObject *hash = unhexlify(s + llen + 1, 40);
- if (!hash) {
- return NULL;
- }
- if (l->hash_suffix != '\0') {
- char newhash[21];
- memcpy(newhash, PyBytes_AsString(hash), 20);
- Py_DECREF(hash);
- newhash[20] = l->hash_suffix;
- hash = PyBytes_FromStringAndSize(newhash, 21);
- }
- return hash;
-}
-
-/* get the node hash and flags of a line as a tuple */
-static PyObject *hashflags(line *l)
-{
- char *s = l->start;
- size_t plen = pathlen(l);
- PyObject *hash = nodeof(l);
-
- /* 40 for hash, 1 for null byte, 1 for newline */
- size_t hplen = plen + 42;
- Py_ssize_t flen = l->len - hplen;
- PyObject *flags;
- PyObject *tup;
-
- if (!hash)
- return NULL;
- flags = PyBytes_FromStringAndSize(s + hplen - 1, flen);
- if (!flags) {
- Py_DECREF(hash);
- return NULL;
- }
- tup = PyTuple_Pack(2, hash, flags);
- Py_DECREF(flags);
- Py_DECREF(hash);
- return tup;
-}
-
-/* if we're about to run out of space in the line index, add more */
-static bool realloc_if_full(lazymanifest *self)
-{
- if (self->numlines == self->maxlines) {
- self->maxlines *= 2;
- self->lines = realloc(self->lines, self->maxlines * sizeof(line));
- }
- return !!self->lines;
-}
-
-/*
- * Find the line boundaries in the manifest that 'data' points to and store
- * information about each line in 'self'.
- */
-static int find_lines(lazymanifest *self, char *data, Py_ssize_t len)
-{
- char *prev = NULL;
- while (len > 0) {
- line *l;
- char *next = memchr(data, '\n', len);
- if (!next) {
- return MANIFEST_MALFORMED;
- }
- next++; /* advance past newline */
- if (!realloc_if_full(self)) {
- return MANIFEST_OOM; /* no memory */
- }
- if (prev && strcmp(prev, data) > -1) {
- /* This data isn't sorted, so we have to abort. */
- return MANIFEST_NOT_SORTED;
- }
- l = self->lines + ((self->numlines)++);
- l->start = data;
- l->len = next - data;
- l->hash_suffix = '\0';
- l->from_malloc = false;
- l->deleted = false;
- len = len - l->len;
- prev = data;
- data = next;
- }
- self->livelines = self->numlines;
- return 0;
-}
-
-static int lazymanifest_init(lazymanifest *self, PyObject *args)
-{
- char *data;
- Py_ssize_t len;
- int err, ret;
- PyObject *pydata;
- if (!PyArg_ParseTuple(args, "S", &pydata)) {
- return -1;
- }
- err = PyBytes_AsStringAndSize(pydata, &data, &len);
-
- self->dirty = false;
- if (err == -1)
- return -1;
- self->pydata = pydata;
- Py_INCREF(self->pydata);
- Py_BEGIN_ALLOW_THREADS
- self->lines = malloc(DEFAULT_LINES * sizeof(line));
- self->maxlines = DEFAULT_LINES;
- self->numlines = 0;
- if (!self->lines)
- ret = MANIFEST_OOM;
- else
- ret = find_lines(self, data, len);
- Py_END_ALLOW_THREADS
- switch (ret) {
- case 0:
- break;
- case MANIFEST_OOM:
- PyErr_NoMemory();
- break;
- case MANIFEST_NOT_SORTED:
- PyErr_Format(PyExc_ValueError,
- "Manifest lines not in sorted order.");
- break;
- case MANIFEST_MALFORMED:
- PyErr_Format(PyExc_ValueError,
- "Manifest did not end in a newline.");
- break;
- default:
- PyErr_Format(PyExc_ValueError,
- "Unknown problem parsing manifest.");
- }
- return ret == 0 ? 0 : -1;
-}
-
-static void lazymanifest_dealloc(lazymanifest *self)
-{
- /* free any extra lines we had to allocate */
- int i;
- for (i = 0; i < self->numlines; i++) {
- if (self->lines[i].from_malloc) {
- free(self->lines[i].start);
- }
- }
- if (self->lines) {
- free(self->lines);
- self->lines = NULL;
- }
- if (self->pydata) {
- Py_DECREF(self->pydata);
- self->pydata = NULL;
- }
- PyObject_Del(self);
-}
-
-/* iteration support */
-
-typedef struct {
- PyObject_HEAD lazymanifest *m;
- Py_ssize_t pos;
-} lmIter;
-
-static void lmiter_dealloc(PyObject *o)
-{
- lmIter *self = (lmIter *)o;
- Py_DECREF(self->m);
- PyObject_Del(self);
-}
-
-static line *lmiter_nextline(lmIter *self)
-{
- do {
- self->pos++;
- if (self->pos >= self->m->numlines) {
- return NULL;
- }
- /* skip over deleted manifest entries */
- } while (self->m->lines[self->pos].deleted);
- return self->m->lines + self->pos;
-}
-
-static PyObject *lmiter_iterentriesnext(PyObject *o)
-{
- size_t pl;
- line *l;
- Py_ssize_t consumed;
- PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL;
- l = lmiter_nextline((lmIter *)o);
- if (!l) {
- goto done;
- }
- pl = pathlen(l);
- path = PyBytes_FromStringAndSize(l->start, pl);
- hash = nodeof(l);
- consumed = pl + 41;
- flags = PyBytes_FromStringAndSize(l->start + consumed,
- l->len - consumed - 1);
- if (!path || !hash || !flags) {
- goto done;
- }
- ret = PyTuple_Pack(3, path, hash, flags);
-done:
- Py_XDECREF(path);
- Py_XDECREF(hash);
- Py_XDECREF(flags);
- return ret;
-}
-
-#ifdef IS_PY3K
-#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
-
-static PyTypeObject lazymanifestEntriesIterator = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest.entriesiterator", /*tp_name */
- sizeof(lmIter), /*tp_basicsize */
- 0, /*tp_itemsize */
- lmiter_dealloc, /*tp_dealloc */
- 0, /*tp_print */
- 0, /*tp_getattr */
- 0, /*tp_setattr */
- 0, /*tp_compare */
- 0, /*tp_repr */
- 0, /*tp_as_number */
- 0, /*tp_as_sequence */
- 0, /*tp_as_mapping */
- 0, /*tp_hash */
- 0, /*tp_call */
- 0, /*tp_str */
- 0, /*tp_getattro */
- 0, /*tp_setattro */
- 0, /*tp_as_buffer */
- LAZYMANIFESTENTRIESITERATOR_TPFLAGS, /* tp_flags */
- "Iterator for 3-tuples in a lazymanifest.", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter: __iter__() method */
- lmiter_iterentriesnext, /* tp_iternext: next() method */
-};
-
-static PyObject *lmiter_iterkeysnext(PyObject *o)
-{
- size_t pl;
- line *l = lmiter_nextline((lmIter *)o);
- if (!l) {
- return NULL;
- }
- pl = pathlen(l);
- return PyBytes_FromStringAndSize(l->start, pl);
-}
-
-#ifdef IS_PY3K
-#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
- | Py_TPFLAGS_HAVE_ITER
-#endif
-
-static PyTypeObject lazymanifestKeysIterator = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest.keysiterator", /*tp_name */
- sizeof(lmIter), /*tp_basicsize */
- 0, /*tp_itemsize */
- lmiter_dealloc, /*tp_dealloc */
- 0, /*tp_print */
- 0, /*tp_getattr */
- 0, /*tp_setattr */
- 0, /*tp_compare */
- 0, /*tp_repr */
- 0, /*tp_as_number */
- 0, /*tp_as_sequence */
- 0, /*tp_as_mapping */
- 0, /*tp_hash */
- 0, /*tp_call */
- 0, /*tp_str */
- 0, /*tp_getattro */
- 0, /*tp_setattro */
- 0, /*tp_as_buffer */
- LAZYMANIFESTKEYSITERATOR_TPFLAGS, /* tp_flags */
- "Keys iterator for a lazymanifest.", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- PyObject_SelfIter, /* tp_iter: __iter__() method */
- lmiter_iterkeysnext, /* tp_iternext: next() method */
-};
-
-static lazymanifest *lazymanifest_copy(lazymanifest *self);
-
-static PyObject *lazymanifest_getentriesiter(lazymanifest *self)
-{
- lmIter *i = NULL;
- lazymanifest *t = lazymanifest_copy(self);
- if (!t) {
- PyErr_NoMemory();
- return NULL;
- }
- i = PyObject_New(lmIter, &lazymanifestEntriesIterator);
- if (i) {
- i->m = t;
- i->pos = -1;
- } else {
- Py_DECREF(t);
- PyErr_NoMemory();
- }
- return (PyObject *)i;
-}
-
-static PyObject *lazymanifest_getkeysiter(lazymanifest *self)
-{
- lmIter *i = NULL;
- lazymanifest *t = lazymanifest_copy(self);
- if (!t) {
- PyErr_NoMemory();
- return NULL;
- }
- i = PyObject_New(lmIter, &lazymanifestKeysIterator);
- if (i) {
- i->m = t;
- i->pos = -1;
- } else {
- Py_DECREF(t);
- PyErr_NoMemory();
- }
- return (PyObject *)i;
-}
-
-/* __getitem__ and __setitem__ support */
-
-static Py_ssize_t lazymanifest_size(lazymanifest *self)
-{
- return self->livelines;
-}
-
-static int linecmp(const void *left, const void *right)
-{
- return strcmp(((const line *)left)->start,
- ((const line *)right)->start);
-}
-
-static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "getitem: manifest keys must be a string.");
- return NULL;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- PyErr_Format(PyExc_KeyError, "No such manifest entry.");
- return NULL;
- }
- return hashflags(hit);
-}
-
-static int lazymanifest_delitem(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "delitem: manifest keys must be a string.");
- return -1;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- PyErr_Format(PyExc_KeyError,
- "Tried to delete nonexistent manifest entry.");
- return -1;
- }
- self->dirty = true;
- hit->deleted = true;
- self->livelines--;
- return 0;
-}
-
-/* Do a binary search for the insertion point for new, creating the
- * new entry if needed. */
-static int internalsetitem(lazymanifest *self, line *new) {
- int start = 0, end = self->numlines;
- while (start < end) {
- int pos = start + (end - start) / 2;
- int c = linecmp(new, self->lines + pos);
- if (c < 0)
- end = pos;
- else if (c > 0)
- start = pos + 1;
- else {
- if (self->lines[pos].deleted)
- self->livelines++;
- if (self->lines[pos].from_malloc)
- free(self->lines[pos].start);
- start = pos;
- goto finish;
- }
- }
- /* being here means we need to do an insert */
- if (!realloc_if_full(self)) {
- PyErr_NoMemory();
- return -1;
- }
- memmove(self->lines + start + 1, self->lines + start,
- (self->numlines - start) * sizeof(line));
- self->numlines++;
- self->livelines++;
-finish:
- self->lines[start] = *new;
- self->dirty = true;
- return 0;
-}
-
-static int lazymanifest_setitem(
- lazymanifest *self, PyObject *key, PyObject *value)
-{
- char *path;
- Py_ssize_t plen;
- PyObject *pyhash;
- Py_ssize_t hlen;
- char *hash;
- PyObject *pyflags;
- char *flags;
- Py_ssize_t flen;
- size_t dlen;
- char *dest;
- int i;
- line new;
- if (!PyBytes_Check(key)) {
- PyErr_Format(PyExc_TypeError,
- "setitem: manifest keys must be a string.");
- return -1;
- }
- if (!value) {
- return lazymanifest_delitem(self, key);
- }
- if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) {
- PyErr_Format(PyExc_TypeError,
- "Manifest values must be a tuple of (node, flags).");
- return -1;
- }
- if (PyBytes_AsStringAndSize(key, &path, &plen) == -1) {
- return -1;
- }
-
- pyhash = PyTuple_GetItem(value, 0);
- if (!PyBytes_Check(pyhash)) {
- PyErr_Format(PyExc_TypeError,
- "node must be a 20-byte string");
- return -1;
- }
- hlen = PyBytes_Size(pyhash);
- /* Some parts of the codebase try and set 21 or 22
- * byte "hash" values in order to perturb things for
- * status. We have to preserve at least the 21st
- * byte. Sigh. If there's a 22nd byte, we drop it on
- * the floor, which works fine.
- */
- if (hlen != 20 && hlen != 21 && hlen != 22) {
- PyErr_Format(PyExc_TypeError,
- "node must be a 20-byte string");
- return -1;
- }
- hash = PyBytes_AsString(pyhash);
-
- pyflags = PyTuple_GetItem(value, 1);
- if (!PyBytes_Check(pyflags) || PyBytes_Size(pyflags) > 1) {
- PyErr_Format(PyExc_TypeError,
- "flags must a 0 or 1 byte string");
- return -1;
- }
- if (PyBytes_AsStringAndSize(pyflags, &flags, &flen) == -1) {
- return -1;
- }
- /* one null byte and one newline */
- dlen = plen + 41 + flen + 1;
- dest = malloc(dlen);
- if (!dest) {
- PyErr_NoMemory();
- return -1;
- }
- memcpy(dest, path, plen + 1);
- for (i = 0; i < 20; i++) {
- /* Cast to unsigned, so it will not get sign-extended when promoted
- * to int (as is done when passing to a variadic function)
- */
- sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]);
- }
- memcpy(dest + plen + 41, flags, flen);
- dest[plen + 41 + flen] = '\n';
- new.start = dest;
- new.len = dlen;
- new.hash_suffix = '\0';
- if (hlen > 20) {
- new.hash_suffix = hash[20];
- }
- new.from_malloc = true; /* is `start` a pointer we allocated? */
- new.deleted = false; /* is this entry deleted? */
- if (internalsetitem(self, &new)) {
- return -1;
- }
- return 0;
-}
-
-static PyMappingMethods lazymanifest_mapping_methods = {
- (lenfunc)lazymanifest_size, /* mp_length */
- (binaryfunc)lazymanifest_getitem, /* mp_subscript */
- (objobjargproc)lazymanifest_setitem, /* mp_ass_subscript */
-};
-
-/* sequence methods (important or __contains__ builds an iterator) */
-
-static int lazymanifest_contains(lazymanifest *self, PyObject *key)
-{
- line needle;
- line *hit;
- if (!PyBytes_Check(key)) {
- /* Our keys are always strings, so if the contains
- * check is for a non-string, just return false. */
- return 0;
- }
- needle.start = PyBytes_AsString(key);
- hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
- &linecmp);
- if (!hit || hit->deleted) {
- return 0;
- }
- return 1;
-}
-
-static PySequenceMethods lazymanifest_seq_meths = {
- (lenfunc)lazymanifest_size, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- 0, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)lazymanifest_contains, /* sq_contains */
- 0, /* sq_inplace_concat */
- 0, /* sq_inplace_repeat */
-};
-
-
-/* Other methods (copy, diff, etc) */
-static PyTypeObject lazymanifestType;
-
-/* If the manifest has changes, build the new manifest text and reindex it. */
-static int compact(lazymanifest *self) {
- int i;
- ssize_t need = 0;
- char *data;
- line *src, *dst;
- PyObject *pydata;
- if (!self->dirty)
- return 0;
- for (i = 0; i < self->numlines; i++) {
- if (!self->lines[i].deleted) {
- need += self->lines[i].len;
- }
- }
- pydata = PyBytes_FromStringAndSize(NULL, need);
- if (!pydata)
- return -1;
- data = PyBytes_AsString(pydata);
- if (!data) {
- return -1;
- }
- src = self->lines;
- dst = self->lines;
- for (i = 0; i < self->numlines; i++, src++) {
- char *tofree = NULL;
- if (src->from_malloc) {
- tofree = src->start;
- }
- if (!src->deleted) {
- memcpy(data, src->start, src->len);
- *dst = *src;
- dst->start = data;
- dst->from_malloc = false;
- data += dst->len;
- dst++;
- }
- free(tofree);
- }
- Py_DECREF(self->pydata);
- self->pydata = pydata;
- self->numlines = self->livelines;
- self->dirty = false;
- return 0;
-}
-
-static PyObject *lazymanifest_text(lazymanifest *self)
-{
- if (compact(self) != 0) {
- PyErr_NoMemory();
- return NULL;
- }
- Py_INCREF(self->pydata);
- return self->pydata;
-}
-
-static lazymanifest *lazymanifest_copy(lazymanifest *self)
-{
- lazymanifest *copy = NULL;
- if (compact(self) != 0) {
- goto nomem;
- }
- copy = PyObject_New(lazymanifest, &lazymanifestType);
- if (!copy) {
- goto nomem;
- }
- copy->numlines = self->numlines;
- copy->livelines = self->livelines;
- copy->dirty = false;
- copy->lines = malloc(self->maxlines *sizeof(line));
- if (!copy->lines) {
- goto nomem;
- }
- memcpy(copy->lines, self->lines, self->numlines * sizeof(line));
- copy->maxlines = self->maxlines;
- copy->pydata = self->pydata;
- Py_INCREF(copy->pydata);
- return copy;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(copy);
- return NULL;
-}
-
-static lazymanifest *lazymanifest_filtercopy(
- lazymanifest *self, PyObject *matchfn)
-{
- lazymanifest *copy = NULL;
- int i;
- if (!PyCallable_Check(matchfn)) {
- PyErr_SetString(PyExc_TypeError, "matchfn must be callable");
- return NULL;
- }
- /* compact ourselves first to avoid double-frees later when we
- * compact tmp so that it doesn't have random pointers to our
- * underlying from_malloc-data (self->pydata is safe) */
- if (compact(self) != 0) {
- goto nomem;
- }
- copy = PyObject_New(lazymanifest, &lazymanifestType);
- if (!copy) {
- goto nomem;
- }
- copy->dirty = true;
- copy->lines = malloc(self->maxlines * sizeof(line));
- if (!copy->lines) {
- goto nomem;
- }
- copy->maxlines = self->maxlines;
- copy->numlines = 0;
- copy->pydata = self->pydata;
- Py_INCREF(self->pydata);
- for (i = 0; i < self->numlines; i++) {
- PyObject *arglist = NULL, *result = NULL;
- arglist = Py_BuildValue("(s)", self->lines[i].start);
- if (!arglist) {
- return NULL;
- }
- result = PyObject_CallObject(matchfn, arglist);
- Py_DECREF(arglist);
- /* if the callback raised an exception, just let it
- * through and give up */
- if (!result) {
- free(copy->lines);
- Py_DECREF(self->pydata);
- return NULL;
- }
- if (PyObject_IsTrue(result)) {
- assert(!(self->lines[i].from_malloc));
- copy->lines[copy->numlines++] = self->lines[i];
- }
- Py_DECREF(result);
- }
- copy->livelines = copy->numlines;
- return copy;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(copy);
- return NULL;
-}
-
-static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args)
-{
- lazymanifest *other;
- PyObject *pyclean = NULL;
- bool listclean;
- PyObject *emptyTup = NULL, *ret = NULL;
- PyObject *es;
- int sneedle = 0, oneedle = 0;
- if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) {
- return NULL;
- }
- listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean);
- es = PyBytes_FromString("");
- if (!es) {
- goto nomem;
- }
- emptyTup = PyTuple_Pack(2, Py_None, es);
- Py_DECREF(es);
- if (!emptyTup) {
- goto nomem;
- }
- ret = PyDict_New();
- if (!ret) {
- goto nomem;
- }
- while (sneedle != self->numlines || oneedle != other->numlines) {
- line *left = self->lines + sneedle;
- line *right = other->lines + oneedle;
- int result;
- PyObject *key;
- PyObject *outer;
- /* If we're looking at a deleted entry and it's not
- * the end of the manifest, just skip it. */
- if (left->deleted && sneedle < self->numlines) {
- sneedle++;
- continue;
- }
- if (right->deleted && oneedle < other->numlines) {
- oneedle++;
- continue;
- }
- /* if we're at the end of either manifest, then we
- * know the remaining items are adds so we can skip
- * the strcmp. */
- if (sneedle == self->numlines) {
- result = 1;
- } else if (oneedle == other->numlines) {
- result = -1;
- } else {
- result = linecmp(left, right);
- }
- key = result <= 0 ?
- PyBytes_FromString(left->start) :
- PyBytes_FromString(right->start);
- if (!key)
- goto nomem;
- if (result < 0) {
- PyObject *l = hashflags(left);
- if (!l) {
- goto nomem;
- }
- outer = PyTuple_Pack(2, l, emptyTup);
- Py_DECREF(l);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- sneedle++;
- } else if (result > 0) {
- PyObject *r = hashflags(right);
- if (!r) {
- goto nomem;
- }
- outer = PyTuple_Pack(2, emptyTup, r);
- Py_DECREF(r);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- oneedle++;
- } else {
- /* file exists in both manifests */
- if (left->len != right->len
- || memcmp(left->start, right->start, left->len)
- || left->hash_suffix != right->hash_suffix) {
- PyObject *l = hashflags(left);
- PyObject *r;
- if (!l) {
- goto nomem;
- }
- r = hashflags(right);
- if (!r) {
- Py_DECREF(l);
- goto nomem;
- }
- outer = PyTuple_Pack(2, l, r);
- Py_DECREF(l);
- Py_DECREF(r);
- if (!outer) {
- goto nomem;
- }
- PyDict_SetItem(ret, key, outer);
- Py_DECREF(outer);
- } else if (listclean) {
- PyDict_SetItem(ret, key, Py_None);
- }
- sneedle++;
- oneedle++;
- }
- Py_DECREF(key);
- }
- Py_DECREF(emptyTup);
- return ret;
-nomem:
- PyErr_NoMemory();
- Py_XDECREF(ret);
- Py_XDECREF(emptyTup);
- return NULL;
-}
-
-static PyMethodDef lazymanifest_methods[] = {
- {"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS,
- "Iterate over file names in this lazymanifest."},
- {"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS,
- "Iterate over (path, nodeid, flags) tuples in this lazymanifest."},
- {"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS,
- "Make a copy of this lazymanifest."},
- {"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O,
- "Make a copy of this manifest filtered by matchfn."},
- {"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS,
- "Compare this lazymanifest to another one."},
- {"text", (PyCFunction)lazymanifest_text, METH_NOARGS,
- "Encode this manifest to text."},
- {NULL},
-};
-
-#ifdef IS_PY3K
-#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
-#else
-#define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
-#endif
-
-static PyTypeObject lazymanifestType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.lazymanifest", /* tp_name */
- sizeof(lazymanifest), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)lazymanifest_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &lazymanifest_seq_meths, /* tp_as_sequence */
- &lazymanifest_mapping_methods, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- LAZYMANIFEST_TPFLAGS, /* tp_flags */
- "TODO(augie)", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)lazymanifest_getkeysiter, /* tp_iter */
- 0, /* tp_iternext */
- lazymanifest_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)lazymanifest_init, /* tp_init */
- 0, /* tp_alloc */
-};
-
-void manifest_module_init(PyObject * mod)
-{
- lazymanifestType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&lazymanifestType) < 0)
- return;
- Py_INCREF(&lazymanifestType);
-
- PyModule_AddObject(mod, "lazymanifest",
- (PyObject *)&lazymanifestType);
-}
--- a/mercurial/manifest.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/manifest.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,6 +8,7 @@
from __future__ import absolute_import
import heapq
+import itertools
import os
import struct
@@ -19,11 +20,12 @@
from . import (
error,
mdiff,
- parsers,
+ policy,
revlog,
util,
)
+parsers = policy.importmod(r'parsers')
propertycache = util.propertycache
def _parsev1(data):
@@ -32,7 +34,7 @@
# class exactly matches its C counterpart to try and help
# prevent surprise breakage for anyone that develops against
# the pure version.
- if data and data[-1] != '\n':
+ if data and data[-1:] != '\n':
raise ValueError('Manifest did not end in a newline.')
prev = None
for l in data.splitlines():
@@ -54,7 +56,7 @@
end = data.find('\n', pos + 1) # +1 to skip stem length byte
if end == -1:
raise ValueError('Manifest ended with incomplete file entry.')
- stemlen = ord(data[pos])
+ stemlen = ord(data[pos:pos + 1])
items = data[pos + 1:end].split('\0')
f = prevf[:stemlen] + items[0]
if prevf > f:
@@ -577,9 +579,11 @@
c._lm = self._lm.copy()
return c
- def iteritems(self):
+ def items(self):
return (x[:2] for x in self._lm.iterentries())
+ iteritems = items
+
def iterentries(self):
return self._lm.iterentries()
@@ -778,25 +782,29 @@
def iterentries(self):
self._load()
- for p, n in sorted(self._dirs.items() + self._files.items()):
+ for p, n in sorted(itertools.chain(self._dirs.items(),
+ self._files.items())):
if p in self._files:
yield self._subpath(p), n, self._flags.get(p, '')
else:
for x in n.iterentries():
yield x
- def iteritems(self):
+ def items(self):
self._load()
- for p, n in sorted(self._dirs.items() + self._files.items()):
+ for p, n in sorted(itertools.chain(self._dirs.items(),
+ self._files.items())):
if p in self._files:
yield self._subpath(p), n
else:
for f, sn in n.iteritems():
yield f, sn
+ iteritems = items
+
def iterkeys(self):
self._load()
- for p in sorted(self._dirs.keys() + self._files.keys()):
+ for p in sorted(itertools.chain(self._dirs, self._files)):
if p in self._files:
yield self._subpath(p)
else:
@@ -1175,25 +1183,31 @@
'''A revlog that stores manifest texts. This is responsible for caching the
full-text manifest contents.
'''
- def __init__(self, opener, dir='', dirlogcache=None, indexfile=None):
+ def __init__(self, opener, dir='', dirlogcache=None, indexfile=None,
+ treemanifest=False):
"""Constructs a new manifest revlog
`indexfile` - used by extensions to have two manifests at once, like
when transitioning between flatmanifeset and treemanifests.
+
+ `treemanifest` - used to indicate this is a tree manifest revlog. Opener
+ options can also be used to make this a tree manifest revlog. The opener
+ option takes precedence, so if it is set to True, we ignore whatever
+ value is passed in to the constructor.
"""
# During normal operations, we expect to deal with not more than four
# revs at a time (such as during commit --amend). When rebasing large
# stacks of commits, the number can go up, hence the config knob below.
cachesize = 4
- usetreemanifest = False
+ optiontreemanifest = False
usemanifestv2 = False
opts = getattr(opener, 'options', None)
if opts is not None:
cachesize = opts.get('manifestcachesize', cachesize)
- usetreemanifest = opts.get('treemanifest', usetreemanifest)
+ optiontreemanifest = opts.get('treemanifest', False)
usemanifestv2 = opts.get('manifestv2', usemanifestv2)
- self._treeondisk = usetreemanifest
+ self._treeondisk = optiontreemanifest or treemanifest
self._usemanifestv2 = usemanifestv2
self._fulltextcache = util.lrucachedict(cachesize)
@@ -1216,7 +1230,8 @@
self._dirlogcache = {'': self}
super(manifestrevlog, self).__init__(opener, indexfile,
- checkambig=bool(dir))
+ # only root indexfile is cached
+ checkambig=not bool(dir))
@property
def fulltextcache(self):
@@ -1231,8 +1246,10 @@
if dir:
assert self._treeondisk
if dir not in self._dirlogcache:
- self._dirlogcache[dir] = manifestrevlog(self.opener, dir,
- self._dirlogcache)
+ mfrevlog = manifestrevlog(self.opener, dir,
+ self._dirlogcache,
+ treemanifest=self._treeondisk)
+ self._dirlogcache[dir] = mfrevlog
return self._dirlogcache[dir]
def add(self, m, transaction, link, p1, p2, added, removed, readtree=None):
@@ -1317,8 +1334,7 @@
cachesize = opts.get('manifestcachesize', cachesize)
self._treeinmem = usetreemanifest
- self._oldmanifest = repo._constructmanifest()
- self._revlog = self._oldmanifest
+ self._revlog = repo._constructmanifest()
# A cache of the manifestctx or treemanifestctx for each directory
self._dirmancache = {}
@@ -1340,12 +1356,7 @@
the revlog
"""
if node in self._dirmancache.get(dir, ()):
- cachemf = self._dirmancache[dir][node]
- # The old manifest may put non-ctx manifests in the cache, so
- # skip those since they don't implement the full api.
- if (isinstance(cachemf, manifestctx) or
- isinstance(cachemf, treemanifestctx)):
- return cachemf
+ return self._dirmancache[dir][node]
if dir:
if self._revlog._treeondisk:
--- a/mercurial/match.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/match.py Wed Jul 19 07:51:41 2017 -0500
@@ -38,7 +38,8 @@
for kind, pat, source in kindpats:
if kind == 'set':
if not ctx:
- raise error.Abort(_("fileset expression with no context"))
+ raise error.ProgrammingError("fileset expression with no "
+ "context")
s = ctx.getfileset(pat)
fset.update(s)
@@ -52,7 +53,7 @@
return fset, other
def _expandsubinclude(kindpats, root):
- '''Returns the list of subinclude matchers and the kindpats without the
+ '''Returns the list of subinclude matcher args and the kindpats without the
subincludes in it.'''
relmatchers = []
other = []
@@ -64,12 +65,12 @@
path = pathutil.join(sourceroot, pat)
newroot = pathutil.dirname(path)
- relmatcher = match(newroot, '', [], ['include:%s' % path])
+ matcherargs = (newroot, '', [], ['include:%s' % path])
prefix = pathutil.canonpath(root, root, newroot)
if prefix:
prefix += '/'
- relmatchers.append((prefix, relmatcher))
+ relmatchers.append((prefix, matcherargs))
else:
other.append((kind, pat, source))
@@ -84,118 +85,161 @@
return False
return True
-class match(object):
- def __init__(self, root, cwd, patterns, include=None, exclude=None,
- default='glob', exact=False, auditor=None, ctx=None,
- listsubrepos=False, warn=None, badfn=None):
- """build an object to match a set of file patterns
+def match(root, cwd, patterns=None, include=None, exclude=None, default='glob',
+ exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None,
+ badfn=None, icasefs=False):
+ """build an object to match a set of file patterns
+
+ arguments:
+ root - the canonical root of the tree you're matching against
+ cwd - the current working directory, if relevant
+ patterns - patterns to find
+ include - patterns to include (unless they are excluded)
+ exclude - patterns to exclude (even if they are included)
+ default - if a pattern in patterns has no explicit type, assume this one
+ exact - patterns are actually filenames (include/exclude still apply)
+ warn - optional function used for printing warnings
+ badfn - optional bad() callback for this matcher instead of the default
+ icasefs - make a matcher for wdir on case insensitive filesystems, which
+ normalizes the given patterns to the case in the filesystem
- arguments:
- root - the canonical root of the tree you're matching against
- cwd - the current working directory, if relevant
- patterns - patterns to find
- include - patterns to include (unless they are excluded)
- exclude - patterns to exclude (even if they are included)
- default - if a pattern in patterns has no explicit type, assume this one
- exact - patterns are actually filenames (include/exclude still apply)
- warn - optional function used for printing warnings
- badfn - optional bad() callback for this matcher instead of the default
+ a pattern is one of:
+ 'glob:<glob>' - a glob relative to cwd
+ 're:<regexp>' - a regular expression
+ 'path:<path>' - a path relative to repository root, which is matched
+ recursively
+ 'rootfilesin:<path>' - a path relative to repository root, which is
+ matched non-recursively (will not match subdirectories)
+ 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
+ 'relpath:<path>' - a path relative to cwd
+ 'relre:<regexp>' - a regexp that needn't match the start of a name
+ 'set:<fileset>' - a fileset expression
+ 'include:<path>' - a file of patterns to read and include
+ 'subinclude:<path>' - a file of patterns to match against files under
+ the same directory
+ '<something>' - a pattern of the specified default type
+ """
+ normalize = _donormalize
+ if icasefs:
+ if exact:
+ raise error.ProgrammingError("a case-insensitive exact matcher "
+ "doesn't make sense")
+ dirstate = ctx.repo().dirstate
+ dsnormalize = dirstate.normalize
+
+ def normalize(patterns, default, root, cwd, auditor, warn):
+ kp = _donormalize(patterns, default, root, cwd, auditor, warn)
+ kindpats = []
+ for kind, pats, source in kp:
+ if kind not in ('re', 'relre'): # regex can't be normalized
+ p = pats
+ pats = dsnormalize(pats)
+
+ # Preserve the original to handle a case only rename.
+ if p != pats and p in dirstate:
+ kindpats.append((kind, p, source))
+
+ kindpats.append((kind, pats, source))
+ return kindpats
+
+ if exact:
+ m = exactmatcher(root, cwd, patterns, badfn)
+ elif patterns:
+ kindpats = normalize(patterns, default, root, cwd, auditor, warn)
+ if _kindpatsalwaysmatch(kindpats):
+ m = alwaysmatcher(root, cwd, badfn, relativeuipath=True)
+ else:
+ m = patternmatcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=badfn)
+ else:
+ # It's a little strange that no patterns means to match everything.
+ # Consider changing this to match nothing (probably using nevermatcher).
+ m = alwaysmatcher(root, cwd, badfn)
- a pattern is one of:
- 'glob:<glob>' - a glob relative to cwd
- 're:<regexp>' - a regular expression
- 'path:<path>' - a path relative to repository root, which is matched
- recursively
- 'rootfilesin:<path>' - a path relative to repository root, which is
- matched non-recursively (will not match subdirectories)
- 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
- 'relpath:<path>' - a path relative to cwd
- 'relre:<regexp>' - a regexp that needn't match the start of a name
- 'set:<fileset>' - a fileset expression
- 'include:<path>' - a file of patterns to read and include
- 'subinclude:<path>' - a file of patterns to match against files under
- the same directory
- '<something>' - a pattern of the specified default type
- """
- if include is None:
- include = []
- if exclude is None:
- exclude = []
+ if include:
+ kindpats = normalize(include, 'glob', root, cwd, auditor, warn)
+ im = includematcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=None)
+ m = intersectmatchers(m, im)
+ if exclude:
+ kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn)
+ em = includematcher(root, cwd, kindpats, ctx=ctx,
+ listsubrepos=listsubrepos, badfn=None)
+ m = differencematcher(m, em)
+ return m
+
+def exact(root, cwd, files, badfn=None):
+ return exactmatcher(root, cwd, files, badfn=badfn)
+
+def always(root, cwd):
+ return alwaysmatcher(root, cwd)
+
+def never(root, cwd):
+ return nevermatcher(root, cwd)
+
+def badmatch(match, badfn):
+ """Make a copy of the given matcher, replacing its bad method with the given
+ one.
+ """
+ m = copy.copy(match)
+ m.bad = badfn
+ return m
+def _donormalize(patterns, default, root, cwd, auditor, warn):
+ '''Convert 'kind:pat' from the patterns list to tuples with kind and
+ normalized and rooted patterns and with listfiles expanded.'''
+ kindpats = []
+ for kind, pat in [_patsplit(p, default) for p in patterns]:
+ if kind in ('glob', 'relpath'):
+ pat = pathutil.canonpath(root, cwd, pat, auditor)
+ elif kind in ('relglob', 'path', 'rootfilesin'):
+ pat = util.normpath(pat)
+ elif kind in ('listfile', 'listfile0'):
+ try:
+ files = util.readfile(pat)
+ if kind == 'listfile0':
+ files = files.split('\0')
+ else:
+ files = files.splitlines()
+ files = [f for f in files if f]
+ except EnvironmentError:
+ raise error.Abort(_("unable to read file list (%s)") % pat)
+ for k, p, source in _donormalize(files, default, root, cwd,
+ auditor, warn):
+ kindpats.append((k, p, pat))
+ continue
+ elif kind == 'include':
+ try:
+ fullpath = os.path.join(root, util.localpath(pat))
+ includepats = readpatternfile(fullpath, warn)
+ for k, p, source in _donormalize(includepats, default,
+ root, cwd, auditor, warn):
+ kindpats.append((k, p, source or pat))
+ except error.Abort as inst:
+ raise error.Abort('%s: %s' % (pat, inst[0]))
+ except IOError as inst:
+ if warn:
+ warn(_("skipping unreadable pattern file '%s': %s\n") %
+ (pat, inst.strerror))
+ continue
+ # else: re or relre - which cannot be normalized
+ kindpats.append((kind, pat, ''))
+ return kindpats
+
+class basematcher(object):
+
+ def __init__(self, root, cwd, badfn=None, relativeuipath=True):
self._root = root
self._cwd = cwd
- self._files = [] # exact files and roots of patterns
- self._anypats = bool(include or exclude)
- self._always = False
- self._pathrestricted = bool(include or exclude or patterns)
- self._warn = warn
-
- # roots are directories which are recursively included/excluded.
- self._includeroots = set()
- self._excluderoots = set()
- # dirs are directories which are non-recursively included.
- self._includedirs = set(['.'])
-
if badfn is not None:
self.bad = badfn
-
- matchfns = []
- if include:
- kindpats = self._normalize(include, 'glob', root, cwd, auditor)
- self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)',
- listsubrepos, root)
- roots, dirs = _rootsanddirs(kindpats)
- self._includeroots.update(roots)
- self._includedirs.update(dirs)
- matchfns.append(im)
- if exclude:
- kindpats = self._normalize(exclude, 'glob', root, cwd, auditor)
- self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)',
- listsubrepos, root)
- if not _anypats(kindpats):
- # Only consider recursive excludes as such - if a non-recursive
- # exclude is used, we must still recurse into the excluded
- # directory, at least to find subdirectories. In such a case,
- # the regex still won't match the non-recursively-excluded
- # files.
- self._excluderoots.update(_roots(kindpats))
- matchfns.append(lambda f: not em(f))
- if exact:
- if isinstance(patterns, list):
- self._files = patterns
- else:
- self._files = list(patterns)
- matchfns.append(self.exact)
- elif patterns:
- kindpats = self._normalize(patterns, default, root, cwd, auditor)
- if not _kindpatsalwaysmatch(kindpats):
- self._files = _explicitfiles(kindpats)
- self._anypats = self._anypats or _anypats(kindpats)
- self.patternspat, pm = _buildmatch(ctx, kindpats, '$',
- listsubrepos, root)
- matchfns.append(pm)
-
- if not matchfns:
- m = util.always
- self._always = True
- elif len(matchfns) == 1:
- m = matchfns[0]
- else:
- def m(f):
- for matchfn in matchfns:
- if not matchfn(f):
- return False
- return True
-
- self.matchfn = m
- self._fileroots = set(self._files)
+ self._relativeuipath = relativeuipath
def __call__(self, fn):
return self.matchfn(fn)
def __iter__(self):
for f in self._files:
yield f
-
# Callbacks related to how the matcher is used by dirstate.walk.
# Subscribers to these events must monkeypatch the matcher object.
def bad(self, f, msg):
@@ -224,7 +268,11 @@
'''Convert repo path to a display path. If patterns or -I/-X were used
to create this matcher, the display path will be relative to cwd.
Otherwise it is relative to the root of the repo.'''
- return (self._pathrestricted and self.rel(f)) or self.abs(f)
+ return (self._relativeuipath and self.rel(f)) or self.abs(f)
+
+ @propertycache
+ def _files(self):
+ return []
def files(self):
'''Explicitly listed files or patterns or roots:
@@ -235,8 +283,15 @@
return self._files
@propertycache
- def _dirs(self):
- return set(util.dirs(self._fileroots)) | set(['.'])
+ def _fileset(self):
+ return set(self._files)
+
+ def exact(self, f):
+ '''Returns True if f is in .files().'''
+ return f in self._fileset
+
+ def matchfn(self, f):
+ return False
def visitdir(self, dir):
'''Decides whether a directory should be visited based on whether it
@@ -250,107 +305,269 @@
This function's behavior is undefined if it has returned False for
one of the dir's parent directories.
'''
- if self.prefix() and dir in self._fileroots:
- return 'all'
- if dir in self._excluderoots:
- return False
- if ((self._includeroots or self._includedirs != set(['.'])) and
- '.' not in self._includeroots and
- dir not in self._includeroots and
- dir not in self._includedirs and
- not any(parent in self._includeroots
- for parent in util.finddirs(dir))):
- return False
- return (not self._fileroots or
- '.' in self._fileroots or
- dir in self._fileroots or
- dir in self._dirs or
- any(parentdir in self._fileroots
- for parentdir in util.finddirs(dir)))
+ return True
+
+ def always(self):
+ '''Matcher will match everything and .files() will be empty --
+ optimization might be possible.'''
+ return False
- def exact(self, f):
- '''Returns True if f is in .files().'''
- return f in self._fileroots
+ def isexact(self):
+ '''Matcher will match exactly the list of files in .files() --
+ optimization might be possible.'''
+ return False
+
+ def prefix(self):
+ '''Matcher will match the paths in .files() recursively --
+ optimization might be possible.'''
+ return False
def anypats(self):
- '''Matcher uses patterns or include/exclude.'''
- return self._anypats
+ '''None of .always(), .isexact(), and .prefix() is true --
+ optimizations will be difficult.'''
+ return not self.always() and not self.isexact() and not self.prefix()
+
+class alwaysmatcher(basematcher):
+ '''Matches everything.'''
+
+ def __init__(self, root, cwd, badfn=None, relativeuipath=False):
+ super(alwaysmatcher, self).__init__(root, cwd, badfn,
+ relativeuipath=relativeuipath)
def always(self):
- '''Matcher will match everything and .files() will be empty
- - optimization might be possible and necessary.'''
- return self._always
+ return True
+
+ def matchfn(self, f):
+ return True
+
+ def visitdir(self, dir):
+ return 'all'
+
+ def __repr__(self):
+ return '<alwaysmatcher>'
- def ispartial(self):
- '''True if the matcher won't always match.
+class nevermatcher(basematcher):
+ '''Matches nothing.'''
+
+ def __init__(self, root, cwd, badfn=None):
+ super(nevermatcher, self).__init__(root, cwd, badfn)
+
+ # It's a little weird to say that the nevermatcher is an exact matcher
+ # or a prefix matcher, but it seems to make sense to let callers take
+ # fast paths based on either. There will be no exact matches, nor any
+ # prefixes (files() returns []), so fast paths iterating over them should
+ # be efficient (and correct).
+ def isexact(self):
+ return True
- Although it's just the inverse of _always in this implementation,
- an extension such as narrowhg might make it return something
- slightly different.'''
- return not self._always
+ def prefix(self):
+ return True
+
+ def __repr__(self):
+ return '<nevermatcher>'
+
+class patternmatcher(basematcher):
+
+ def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
+ badfn=None):
+ super(patternmatcher, self).__init__(root, cwd, badfn)
- def isexact(self):
- return self.matchfn == self.exact
+ self._files = _explicitfiles(kindpats)
+ self._prefix = _prefix(kindpats)
+ self._pats, self.matchfn = _buildmatch(ctx, kindpats, '$', listsubrepos,
+ root)
+
+ @propertycache
+ def _dirs(self):
+ return set(util.dirs(self._fileset)) | {'.'}
+
+ def visitdir(self, dir):
+ if self._prefix and dir in self._fileset:
+ return 'all'
+ return ('.' in self._fileset or
+ dir in self._fileset or
+ dir in self._dirs or
+ any(parentdir in self._fileset
+ for parentdir in util.finddirs(dir)))
def prefix(self):
- return not self.always() and not self.isexact() and not self.anypats()
+ return self._prefix
+
+ def __repr__(self):
+ return ('<patternmatcher patterns=%r>' % self._pats)
+
+class includematcher(basematcher):
+
+ def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
+ badfn=None):
+ super(includematcher, self).__init__(root, cwd, badfn)
+
+ self._pats, self.matchfn = _buildmatch(ctx, kindpats, '(?:/|$)',
+ listsubrepos, root)
+ self._prefix = _prefix(kindpats)
+ roots, dirs = _rootsanddirs(kindpats)
+ # roots are directories which are recursively included.
+ self._roots = set(roots)
+ # dirs are directories which are non-recursively included.
+ self._dirs = set(dirs)
+
+ def visitdir(self, dir):
+ if self._prefix and dir in self._roots:
+ return 'all'
+ return ('.' in self._roots or
+ dir in self._roots or
+ dir in self._dirs or
+ any(parentdir in self._roots
+ for parentdir in util.finddirs(dir)))
+
+ def __repr__(self):
+ return ('<includematcher includes=%r>' % self._pats)
+
+class exactmatcher(basematcher):
+ '''Matches the input files exactly. They are interpreted as paths, not
+ patterns (so no kind-prefixes).
+ '''
+
+ def __init__(self, root, cwd, files, badfn=None):
+ super(exactmatcher, self).__init__(root, cwd, badfn)
+
+ if isinstance(files, list):
+ self._files = files
+ else:
+ self._files = list(files)
+
+ matchfn = basematcher.exact
+
+ @propertycache
+ def _dirs(self):
+ return set(util.dirs(self._fileset)) | {'.'}
+
+ def visitdir(self, dir):
+ return dir in self._dirs
+
+ def isexact(self):
+ return True
+
+ def __repr__(self):
+ return ('<exactmatcher files=%r>' % self._files)
+
+class differencematcher(basematcher):
+ '''Composes two matchers by matching if the first matches and the second
+ does not. Well, almost... If the user provides a pattern like "-X foo foo",
+ Mercurial actually does match "foo" against that. That's because exact
+ matches are treated specially. So, since this differencematcher is used for
+ excludes, it needs to special-case exact matching.
+
+ The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
+ traversedir) are ignored.
+
+ TODO: If we want to keep the behavior described above for exact matches, we
+ should consider instead treating the above case something like this:
+ union(exact(foo), difference(pattern(foo), include(foo)))
+ '''
+ def __init__(self, m1, m2):
+ super(differencematcher, self).__init__(m1._root, m1._cwd)
+ self._m1 = m1
+ self._m2 = m2
+ self.bad = m1.bad
+ self.explicitdir = m1.explicitdir
+ self.traversedir = m1.traversedir
+
+ def matchfn(self, f):
+ return self._m1(f) and (not self._m2(f) or self._m1.exact(f))
- def _normalize(self, patterns, default, root, cwd, auditor):
- '''Convert 'kind:pat' from the patterns list to tuples with kind and
- normalized and rooted patterns and with listfiles expanded.'''
- kindpats = []
- for kind, pat in [_patsplit(p, default) for p in patterns]:
- if kind in ('glob', 'relpath'):
- pat = pathutil.canonpath(root, cwd, pat, auditor)
- elif kind in ('relglob', 'path', 'rootfilesin'):
- pat = util.normpath(pat)
- elif kind in ('listfile', 'listfile0'):
- try:
- files = util.readfile(pat)
- if kind == 'listfile0':
- files = files.split('\0')
- else:
- files = files.splitlines()
- files = [f for f in files if f]
- except EnvironmentError:
- raise error.Abort(_("unable to read file list (%s)") % pat)
- for k, p, source in self._normalize(files, default, root, cwd,
- auditor):
- kindpats.append((k, p, pat))
- continue
- elif kind == 'include':
- try:
- fullpath = os.path.join(root, util.localpath(pat))
- includepats = readpatternfile(fullpath, self._warn)
- for k, p, source in self._normalize(includepats, default,
- root, cwd, auditor):
- kindpats.append((k, p, source or pat))
- except error.Abort as inst:
- raise error.Abort('%s: %s' % (pat, inst[0]))
- except IOError as inst:
- if self._warn:
- self._warn(_("skipping unreadable pattern file "
- "'%s': %s\n") % (pat, inst.strerror))
- continue
- # else: re or relre - which cannot be normalized
- kindpats.append((kind, pat, ''))
- return kindpats
+ @propertycache
+ def _files(self):
+ if self.isexact():
+ return [f for f in self._m1.files() if self(f)]
+ # If m1 is not an exact matcher, we can't easily figure out the set of
+ # files, because its files() are not always files. For example, if
+ # m1 is "path:dir" and m2 is "rootfileins:.", we don't
+ # want to remove "dir" from the set even though it would match m2,
+ # because the "dir" in m1 may not be a file.
+ return self._m1.files()
+
+ def visitdir(self, dir):
+ if self._m2.visitdir(dir) == 'all':
+ # There's a bug here: If m1 matches file 'dir/file' and m2 excludes
+ # 'dir' (recursively), we should still visit 'dir' due to the
+ # exception we have for exact matches.
+ return False
+ return bool(self._m1.visitdir(dir))
+
+ def isexact(self):
+ return self._m1.isexact()
+
+ def __repr__(self):
+ return ('<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2))
+
+def intersectmatchers(m1, m2):
+ '''Composes two matchers by matching if both of them match.
-def exact(root, cwd, files, badfn=None):
- return match(root, cwd, files, exact=True, badfn=badfn)
+ The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
+ traversedir) are ignored.
+ '''
+ if m1 is None or m2 is None:
+ return m1 or m2
+ if m1.always():
+ m = copy.copy(m2)
+ # TODO: Consider encapsulating these things in a class so there's only
+ # one thing to copy from m1.
+ m.bad = m1.bad
+ m.explicitdir = m1.explicitdir
+ m.traversedir = m1.traversedir
+ m.abs = m1.abs
+ m.rel = m1.rel
+ m._relativeuipath |= m1._relativeuipath
+ return m
+ if m2.always():
+ m = copy.copy(m1)
+ m._relativeuipath |= m2._relativeuipath
+ return m
+ return intersectionmatcher(m1, m2)
-def always(root, cwd):
- return match(root, cwd, [])
+class intersectionmatcher(basematcher):
+ def __init__(self, m1, m2):
+ super(intersectionmatcher, self).__init__(m1._root, m1._cwd)
+ self._m1 = m1
+ self._m2 = m2
+ self.bad = m1.bad
+ self.explicitdir = m1.explicitdir
+ self.traversedir = m1.traversedir
-def badmatch(match, badfn):
- """Make a copy of the given matcher, replacing its bad method with the given
- one.
- """
- m = copy.copy(match)
- m.bad = badfn
- return m
+ @propertycache
+ def _files(self):
+ if self.isexact():
+ m1, m2 = self._m1, self._m2
+ if not m1.isexact():
+ m1, m2 = m2, m1
+ return [f for f in m1.files() if m2(f)]
+ # It neither m1 nor m2 is an exact matcher, we can't easily intersect
+ # the set of files, because their files() are not always files. For
+ # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
+ # "path:dir2", we don't want to remove "dir2" from the set.
+ return self._m1.files() + self._m2.files()
+
+ def matchfn(self, f):
+ return self._m1(f) and self._m2(f)
-class subdirmatcher(match):
+ def visitdir(self, dir):
+ visit1 = self._m1.visitdir(dir)
+ if visit1 == 'all':
+ return self._m2.visitdir(dir)
+ # bool() because visit1=True + visit2='all' should not be 'all'
+ return bool(visit1 and self._m2.visitdir(dir))
+
+ def always(self):
+ return self._m1.always() and self._m2.always()
+
+ def isexact(self):
+ return self._m1.isexact() or self._m2.isexact()
+
+ def __repr__(self):
+ return ('<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2))
+
+class subdirmatcher(basematcher):
"""Adapt a matcher to work on a subdirectory only.
The paths are remapped to remove/insert the path as needed:
@@ -381,79 +598,86 @@
"""
def __init__(self, path, matcher):
- self._root = matcher._root
- self._cwd = matcher._cwd
+ super(subdirmatcher, self).__init__(matcher._root, matcher._cwd)
self._path = path
self._matcher = matcher
- self._always = matcher._always
- self._pathrestricted = matcher._pathrestricted
+ self._always = matcher.always()
self._files = [f[len(path) + 1:] for f in matcher._files
if f.startswith(path + "/")]
- # If the parent repo had a path to this subrepo and no patterns are
- # specified, this submatcher always matches.
- if not self._always and not matcher._anypats:
+ # If the parent repo had a path to this subrepo and the matcher is
+ # a prefix matcher, this submatcher always matches.
+ if matcher.prefix():
self._always = any(f == path for f in matcher._files)
- self._anypats = matcher._anypats
- # Some information is lost in the superclass's constructor, so we
- # can not accurately create the matching function for the subdirectory
- # from the inputs. Instead, we override matchfn() and visitdir() to
- # call the original matcher with the subdirectory path prepended.
- self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
- def visitdir(dir):
- if dir == '.':
- return matcher.visitdir(self._path)
- return matcher.visitdir(self._path + "/" + dir)
- self.visitdir = visitdir
- self._fileroots = set(self._files)
+ def bad(self, f, msg):
+ self._matcher.bad(self._path + "/" + f, msg)
def abs(self, f):
return self._matcher.abs(self._path + "/" + f)
- def bad(self, f, msg):
- self._matcher.bad(self._path + "/" + f, msg)
-
def rel(self, f):
return self._matcher.rel(self._path + "/" + f)
-class icasefsmatcher(match):
- """A matcher for wdir on case insensitive filesystems, which normalizes the
- given patterns to the case in the filesystem.
+ def uipath(self, f):
+ return self._matcher.uipath(self._path + "/" + f)
+
+ def matchfn(self, f):
+ # Some information is lost in the superclass's constructor, so we
+ # can not accurately create the matching function for the subdirectory
+ # from the inputs. Instead, we override matchfn() and visitdir() to
+ # call the original matcher with the subdirectory path prepended.
+ return self._matcher.matchfn(self._path + "/" + f)
+
+ def visitdir(self, dir):
+ if dir == '.':
+ dir = self._path
+ else:
+ dir = self._path + "/" + dir
+ return self._matcher.visitdir(dir)
+
+ def always(self):
+ return self._always
+
+ def prefix(self):
+ return self._matcher.prefix() and not self._always
+
+ def __repr__(self):
+ return ('<subdirmatcher path=%r, matcher=%r>' %
+ (self._path, self._matcher))
+
+class unionmatcher(basematcher):
+ """A matcher that is the union of several matchers.
+
+ The non-matching-attributes (root, cwd, bad, explicitdir, traversedir) are
+ taken from the first matcher.
"""
- def __init__(self, root, cwd, patterns, include, exclude, default, auditor,
- ctx, listsubrepos=False, badfn=None):
- init = super(icasefsmatcher, self).__init__
- self._dirstate = ctx.repo().dirstate
- self._dsnormalize = self._dirstate.normalize
-
- init(root, cwd, patterns, include, exclude, default, auditor=auditor,
- ctx=ctx, listsubrepos=listsubrepos, badfn=badfn)
-
- # m.exact(file) must be based off of the actual user input, otherwise
- # inexact case matches are treated as exact, and not noted without -v.
- if self._files:
- roots, dirs = _rootsanddirs(self._kp)
- self._fileroots = set(roots)
- self._fileroots.update(dirs)
+ def __init__(self, matchers):
+ m1 = matchers[0]
+ super(unionmatcher, self).__init__(m1._root, m1._cwd)
+ self.explicitdir = m1.explicitdir
+ self.traversedir = m1.traversedir
+ self._matchers = matchers
- def _normalize(self, patterns, default, root, cwd, auditor):
- self._kp = super(icasefsmatcher, self)._normalize(patterns, default,
- root, cwd, auditor)
- kindpats = []
- for kind, pats, source in self._kp:
- if kind not in ('re', 'relre'): # regex can't be normalized
- p = pats
- pats = self._dsnormalize(pats)
+ def matchfn(self, f):
+ for match in self._matchers:
+ if match(f):
+ return True
+ return False
- # Preserve the original to handle a case only rename.
- if p != pats and p in self._dirstate:
- kindpats.append((kind, p, source))
+ def visitdir(self, dir):
+ r = False
+ for m in self._matchers:
+ v = m.visitdir(dir)
+ if v == 'all':
+ return v
+ r |= v
+ return r
- kindpats.append((kind, pats, source))
- return kindpats
+ def __repr__(self):
+ return ('<unionmatcher matchers=%r>' % self._matchers)
def patkind(pattern, default=None):
'''If pattern is 'kind:pat' with a known kind, return kind.'''
@@ -555,10 +779,10 @@
return ''
if kind == 're':
return pat
- if kind == 'path':
+ if kind in ('path', 'relpath'):
if pat == '.':
return ''
- return '^' + util.re.escape(pat) + '(?:/|$)'
+ return util.re.escape(pat) + '(?:/|$)'
if kind == 'rootfilesin':
if pat == '.':
escaped = ''
@@ -566,11 +790,9 @@
# Pattern is a directory name.
escaped = util.re.escape(pat) + '/'
# Anything after the pattern must be a non-directory.
- return '^' + escaped + '[^/]+$'
+ return escaped + '[^/]+$'
if kind == 'relglob':
return '(?:|.*/)' + _globre(pat) + globsuffix
- if kind == 'relpath':
- return util.re.escape(pat) + '(?:/|$)'
if kind == 'relre':
if pat.startswith('^'):
return pat
@@ -584,10 +806,17 @@
subincludes, kindpats = _expandsubinclude(kindpats, root)
if subincludes:
+ submatchers = {}
def matchsubinclude(f):
- for prefix, mf in subincludes:
- if f.startswith(prefix) and mf(f[len(prefix):]):
- return True
+ for prefix, matcherargs in subincludes:
+ if f.startswith(prefix):
+ mf = submatchers.get(prefix)
+ if mf is None:
+ mf = match(*matcherargs)
+ submatchers[prefix] = mf
+
+ if mf(f[len(prefix):]):
+ return True
return False
matchfuncs.append(matchsubinclude)
@@ -677,16 +906,16 @@
>>> _rootsanddirs(\
[('glob', 'g/h/*', ''), ('glob', 'g/h', ''), ('glob', 'g*', '')])
- (['g/h', 'g/h', '.'], ['g'])
+ (['g/h', 'g/h', '.'], ['g', '.'])
>>> _rootsanddirs(\
[('rootfilesin', 'g/h', ''), ('rootfilesin', '', '')])
- ([], ['g/h', '.', 'g'])
+ ([], ['g/h', '.', 'g', '.'])
>>> _rootsanddirs(\
[('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')])
- (['r', 'p/p', '.'], ['p'])
+ (['r', 'p/p', '.'], ['p', '.'])
>>> _rootsanddirs(\
[('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')])
- (['.', '.', '.'], [])
+ (['.', '.', '.'], ['.'])
'''
r, d = _patternrootsanddirs(kindpats)
@@ -694,6 +923,8 @@
# scanned to get to either the roots or the other exact directories.
d.extend(util.dirs(d))
d.extend(util.dirs(r))
+ # util.dirs() does not include the root directory, so add it manually
+ d.append('.')
return r, d
@@ -710,10 +941,12 @@
filable = [kp for kp in kindpats if kp[0] not in ('rootfilesin',)]
return _roots(filable)
-def _anypats(kindpats):
+def _prefix(kindpats):
+ '''Whether all the patterns match a prefix (i.e. recursively)'''
for kind, pat, source in kindpats:
- if kind in ('glob', 're', 'relglob', 'relre', 'set', 'rootfilesin'):
- return True
+ if kind not in ('path', 'relpath'):
+ return False
+ return True
_commentre = None
--- a/mercurial/mdiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/mdiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,14 +13,21 @@
from .i18n import _
from . import (
- base85,
- bdiff,
error,
- mpatch,
+ policy,
pycompat,
util,
)
+bdiff = policy.importmod(r'bdiff')
+mpatch = policy.importmod(r'mpatch')
+
+blocks = bdiff.blocks
+fixws = bdiff.fixws
+patches = mpatch.patches
+patchedsize = mpatch.patchedsize
+textdiff = bdiff.bdiff
+
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
lines = [l + '\n' for l in text.split('\n')]
@@ -77,6 +84,7 @@
def copy(self, **kwargs):
opts = dict((k, getattr(self, k)) for k in self.defaults)
+ opts = pycompat.strkwargs(opts)
opts.update(kwargs)
return diffopts(**opts)
@@ -426,7 +434,7 @@
l = chr(ord('A') + l - 1)
else:
l = chr(l - 26 + ord('a') - 1)
- return '%c%s\n' % (l, base85.b85encode(line, True))
+ return '%c%s\n' % (l, util.b85encode(line, True))
def chunk(text, csize=52):
l = len(text)
@@ -478,7 +486,3 @@
def replacediffheader(oldlen, newlen):
return struct.pack(">lll", 0, oldlen, newlen)
-
-patches = mpatch.patches
-patchedsize = mpatch.patchedsize
-textdiff = bdiff.bdiff
--- a/mercurial/merge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/merge.py Wed Jul 19 07:51:41 2017 -0500
@@ -9,7 +9,6 @@
import errno
import hashlib
-import os
import shutil
import struct
@@ -28,7 +27,7 @@
error,
filemerge,
match as matchmod,
- obsolete,
+ obsutil,
pycompat,
scmutil,
subrepo,
@@ -414,7 +413,7 @@
if fcl.isabsent():
hash = nullhex
else:
- hash = hashlib.sha1(fcl.path()).hexdigest()
+ hash = hex(hashlib.sha1(fcl.path()).digest())
self._repo.vfs.write('merge/' + hash, fcl.data())
self._state[fd] = ['u', hash, fcl.path(),
fca.path(), hex(fca.filenode()),
@@ -445,7 +444,7 @@
def unresolved(self):
"""Obtain the paths of unresolved files."""
- for f, entry in self._state.items():
+ for f, entry in self._state.iteritems():
if entry[0] == 'u':
yield f
@@ -492,10 +491,10 @@
# restore local
if hash != nullhex:
f = self._repo.vfs('merge/' + hash)
- self._repo.wwrite(dfile, f.read(), flags)
+ wctx[dfile].write(f.read(), flags)
f.close()
else:
- self._repo.wvfs.unlinkpath(dfile, ignoremissing=True)
+ wctx[dfile].remove(ignoremissing=True)
complete, r, deleted = filemerge.premerge(self._repo, self._local,
lfile, fcd, fco, fca,
labels=self._labels)
@@ -568,8 +567,7 @@
def unresolvedcount(self):
"""get unresolved count for this merge (persistent)"""
- return len([True for f, entry in self._state.iteritems()
- if entry[0] == 'u'])
+ return len(list(self.unresolved()))
def actions(self):
"""return lists of actions to perform on the dirstate"""
@@ -786,7 +784,7 @@
return True
def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
- acceptremote, followcopies):
+ acceptremote, followcopies, forcefulldiff=False):
"""
Merge wctx and p2 with ancestor pa and generate merge action list
@@ -801,15 +799,18 @@
# manifests fetched in order are going to be faster, so prime the caches
[x.manifest() for x in
- sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
+ sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
if followcopies:
ret = copies.mergecopies(repo, wctx, p2, pa)
copy, movewithdir, diverge, renamedelete, dirmove = ret
+ boolbm = pycompat.bytestr(bool(branchmerge))
+ boolf = pycompat.bytestr(bool(force))
+ boolm = pycompat.bytestr(bool(matcher))
repo.ui.note(_("resolving manifests\n"))
repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
- % (bool(branchmerge), bool(force), bool(matcher)))
+ % (boolbm, boolf, boolm))
repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
@@ -821,6 +822,25 @@
if any(wctx.sub(s).dirty() for s in wctx.substate):
m1['.hgsubstate'] = modifiednodeid
+ # Don't use m2-vs-ma optimization if:
+ # - ma is the same as m1 or m2, which we're just going to diff again later
+ # - The caller specifically asks for a full diff, which is useful during bid
+ # merge.
+ if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
+ # Identify which files are relevant to the merge, so we can limit the
+ # total m1-vs-m2 diff to just those files. This has significant
+ # performance benefits in large repositories.
+ relevantfiles = set(ma.diff(m2).keys())
+
+ # For copied and moved files, we need to add the source file too.
+ for copykey, copyvalue in copy.iteritems():
+ if copyvalue in relevantfiles:
+ relevantfiles.add(copykey)
+ for movedirkey in movewithdir:
+ relevantfiles.add(movedirkey)
+ filesmatcher = scmutil.matchfiles(repo, relevantfiles)
+ matcher = matchmod.intersectmatchers(matcher, filesmatcher)
+
diff = m1.diff(m2, match=matcher)
if matcher is None:
@@ -955,7 +975,10 @@
def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
acceptremote, followcopies, matcher=None,
mergeforce=False):
- "Calculate the actions needed to merge mctx into wctx using ancestors"
+ """Calculate the actions needed to merge mctx into wctx using ancestors"""
+ # Avoid cycle.
+ from . import sparse
+
if len(ancestors) == 1: # default
actions, diverge, renamedelete = manifestmerge(
repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
@@ -974,7 +997,7 @@
repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
actions, diverge1, renamedelete1 = manifestmerge(
repo, wctx, mctx, ancestor, branchmerge, force, matcher,
- acceptremote, followcopies)
+ acceptremote, followcopies, forcefulldiff=True)
_checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
# Track the shortest set of warning on the theory that bid
@@ -1054,16 +1077,17 @@
fractions = _forgetremoved(wctx, mctx, branchmerge)
actions.update(fractions)
- return actions, diverge, renamedelete
+ prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
+ actions)
-def batchremove(repo, actions):
+ return prunedactions, diverge, renamedelete
+
+def batchremove(repo, wctx, actions):
"""apply removes to the working directory
yields tuples for progress updates
"""
verbose = repo.ui.verbose
- unlinkpath = repo.wvfs.unlinkpath
- audit = repo.wvfs.audit
try:
cwd = pycompat.getcwd()
except OSError as err:
@@ -1075,9 +1099,9 @@
repo.ui.debug(" %s: %s -> r\n" % (f, msg))
if verbose:
repo.ui.note(_("removing %s\n") % f)
- audit(f)
+ wctx[f].audit()
try:
- unlinkpath(f, ignoremissing=True)
+ wctx[f].remove(ignoremissing=True)
except OSError as inst:
repo.ui.warn(_("update failed to remove %s: %s!\n") %
(f, inst.strerror))
@@ -1100,7 +1124,7 @@
"(consider changing to repo root: %s)\n") %
repo.root)
-def batchget(repo, mctx, actions):
+def batchget(repo, mctx, wctx, actions):
"""apply gets to the working directory
mctx is the context to get from
@@ -1109,7 +1133,6 @@
"""
verbose = repo.ui.verbose
fctx = mctx.filectx
- wwrite = repo.wwrite
ui = repo.ui
i = 0
with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
@@ -1130,7 +1153,7 @@
if repo.wvfs.isdir(f) and not repo.wvfs.islink(f):
repo.wvfs.removedirs(f)
- wwrite(f, fctx(f).data(), flags, backgroundclose=True)
+ wctx[f].write(fctx(f).data(), flags, backgroundclose=True)
if i == 100:
yield i, f
i = 0
@@ -1181,17 +1204,16 @@
if f1 != f and move:
moves.append(f1)
- audit = repo.wvfs.audit
_updating = _('updating')
_files = _('files')
progress = repo.ui.progress
# remove renamed files after safely stored
for f in moves:
- if os.path.lexists(repo.wjoin(f)):
+ if wctx[f].lexists():
repo.ui.debug("removing %s\n" % f)
- audit(f)
- repo.wvfs.unlinkpath(f)
+ wctx[f].audit()
+ wctx[f].remove()
numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
@@ -1200,14 +1222,16 @@
# remove in parallel (must come first)
z = 0
- prog = worker.worker(repo.ui, 0.001, batchremove, (repo,), actions['r'])
+ prog = worker.worker(repo.ui, 0.001, batchremove, (repo, wctx),
+ actions['r'])
for i, item in prog:
z += i
progress(_updating, z, item=item, total=numupdates, unit=_files)
removed = len(actions['r'])
# get in parallel
- prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx), actions['g'])
+ prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx, wctx),
+ actions['g'])
for i, item in prog:
z += i
progress(_updating, z, item=item, total=numupdates, unit=_files)
@@ -1246,9 +1270,9 @@
progress(_updating, z, item=f, total=numupdates, unit=_files)
f0, flags = args
repo.ui.note(_("moving %s to %s\n") % (f0, f))
- audit(f)
- repo.wwrite(f, wctx.filectx(f0).data(), flags)
- repo.wvfs.unlinkpath(f0)
+ wctx[f].audit()
+ wctx[f].write(wctx.filectx(f0).data(), flags)
+ wctx[f0].remove()
updated += 1
# local directory rename, get
@@ -1258,7 +1282,7 @@
progress(_updating, z, item=f, total=numupdates, unit=_files)
f0, flags = args
repo.ui.note(_("getting %s to %s\n") % (f0, f))
- repo.wwrite(f, mctx.filectx(f0).data(), flags)
+ wctx[f].write(mctx.filectx(f0).data(), flags)
updated += 1
# exec
@@ -1267,8 +1291,8 @@
z += 1
progress(_updating, z, item=f, total=numupdates, unit=_files)
flags, = args
- audit(f)
- util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
+ wctx[f].audit()
+ wctx[f].setflags('l' in flags, 'x' in flags)
updated += 1
# the ordering is important here -- ms.mergedriver will raise if the merge
@@ -1301,7 +1325,7 @@
subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
overwrite, labels)
continue
- audit(f)
+ wctx[f].audit()
complete, r = ms.preresolve(f, wctx)
if not complete:
numupdates += 1
@@ -1496,6 +1520,8 @@
Return the same tuple as applyupdates().
"""
+ # Avoid cycle.
+ from . import sparse
# This function used to find the default destination if node was None, but
# that's now in destutil.py.
@@ -1568,8 +1594,8 @@
dirty = wc.dirty(missing=True)
if dirty:
# Branching is a bit strange to ensure we do the minimal
- # amount of call to obsolete.foreground.
- foreground = obsolete.foreground(repo, [p1.node()])
+ # amount of call to obsutil.foreground.
+ foreground = obsutil.foreground(repo, [p1.node()])
# note: the <node> variable contains a random identifier
if repo[node].node() in foreground:
pass # allow updating to successors
@@ -1587,7 +1613,7 @@
pas = [p1]
# deprecated config: merge.followcopies
- followcopies = repo.ui.configbool('merge', 'followcopies', True)
+ followcopies = repo.ui.configbool('merge', 'followcopies')
if overwrite:
followcopies = False
elif not pas[0]:
@@ -1676,15 +1702,19 @@
stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
if not partial:
- repo.dirstate.beginparentchange()
- repo.setparents(fp1, fp2)
- recordupdates(repo, actions, branchmerge)
- # update completed, clear state
- util.unlink(repo.vfs.join('updatestate'))
+ with repo.dirstate.parentchange():
+ repo.setparents(fp1, fp2)
+ recordupdates(repo, actions, branchmerge)
+ # update completed, clear state
+ util.unlink(repo.vfs.join('updatestate'))
- if not branchmerge:
- repo.dirstate.setbranch(p2.branch())
- repo.dirstate.endparentchange()
+ if not branchmerge:
+ repo.dirstate.setbranch(p2.branch())
+
+ # If we're updating to a location, clean up any stale temporary includes
+ # (ex: this happens during hg rebase --abort).
+ if not branchmerge:
+ sparse.prunetemporaryincludes(repo)
if not partial:
repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
@@ -1722,10 +1752,9 @@
parents.remove(pctx)
pother = parents[0].node()
- repo.dirstate.beginparentchange()
- repo.setparents(repo['.'].node(), pother)
- repo.dirstate.write(repo.currenttransaction())
- # fix up dirstate for copies and renames
- copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
- repo.dirstate.endparentchange()
+ with repo.dirstate.parentchange():
+ repo.setparents(repo['.'].node(), pother)
+ repo.dirstate.write(repo.currenttransaction())
+ # fix up dirstate for copies and renames
+ copies.duplicatecopies(repo, ctx.rev(), pctx.rev())
return stats
--- a/mercurial/minirst.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/minirst.py Wed Jul 19 07:51:41 2017 -0500
@@ -315,7 +315,8 @@
# column markers are ASCII so we can calculate column
# position in bytes
columns = [x for x in xrange(len(div))
- if div[x] == '=' and (x == 0 or div[x - 1] == ' ')]
+ if div[x:x + 1] == '=' and (x == 0 or
+ div[x - 1:x] == ' ')]
rows = []
for l in block['lines'][1:-1]:
if l == div:
@@ -356,7 +357,7 @@
len(block['lines']) == 2 and
encoding.colwidth(block['lines'][0]) == len(block['lines'][1]) and
_sectionre.match(block['lines'][1])):
- block['underline'] = block['lines'][1][0]
+ block['underline'] = block['lines'][1][0:1]
block['type'] = 'section'
del block['lines'][1]
return blocks
@@ -452,7 +453,7 @@
}
def formatoption(block, width):
- desc = ' '.join(map(str.strip, block['lines']))
+ desc = ' '.join(map(bytes.strip, block['lines']))
colwidth = encoding.colwidth(block['optstr'])
usablewidth = width - 1
hanging = block['optstrwidth']
@@ -474,7 +475,7 @@
hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
defindent = indent + hang * ' '
- text = ' '.join(map(str.strip, block['lines']))
+ text = ' '.join(map(bytes.strip, block['lines']))
return '%s\n%s\n' % (indent + admonition,
util.wrap(text, width=width,
initindent=defindent,
@@ -512,7 +513,7 @@
term = indent + block['lines'][0]
hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
defindent = indent + hang * ' '
- text = ' '.join(map(str.strip, block['lines'][1:]))
+ text = ' '.join(map(bytes.strip, block['lines'][1:]))
return '%s\n%s\n' % (term, util.wrap(text, width=width,
initindent=defindent,
hangindent=defindent))
@@ -567,7 +568,7 @@
if btype == 'admonition':
admonition = escape(_admonitiontitles[b['admonitiontitle']])
- text = escape(' '.join(map(str.strip, lines)))
+ text = escape(' '.join(map(bytes.strip, lines)))
out.append('<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
elif btype == 'paragraph':
out.append('<p>\n%s\n</p>\n' % escape('\n'.join(lines)))
@@ -597,7 +598,7 @@
elif btype == 'definition':
openlist('dl', level)
term = escape(lines[0])
- text = escape(' '.join(map(str.strip, lines[1:])))
+ text = escape(' '.join(map(bytes.strip, lines[1:])))
out.append(' <dt>%s\n <dd>%s\n' % (term, text))
elif btype == 'bullet':
bullet, head = lines[0].split(' ', 1)
@@ -609,12 +610,12 @@
elif btype == 'field':
openlist('dl', level)
key = escape(b['key'])
- text = escape(' '.join(map(str.strip, lines)))
+ text = escape(' '.join(map(bytes.strip, lines)))
out.append(' <dt>%s\n <dd>%s\n' % (key, text))
elif btype == 'option':
openlist('dl', level)
opt = escape(b['optstr'])
- desc = escape(' '.join(map(str.strip, lines)))
+ desc = escape(' '.join(map(bytes.strip, lines)))
out.append(' <dt>%s\n <dd>%s\n' % (opt, desc))
# close lists if indent level of next block is lower
--- a/mercurial/mpatch_module.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,195 +0,0 @@
-/*
- mpatch.c - efficient binary patching for Mercurial
-
- This implements a patch algorithm that's O(m + nlog n) where m is the
- size of the output and n is the number of patches.
-
- Given a list of binary patches, it unpacks each into a hunk list,
- then combines the hunk lists with a treewise recursion to form a
- single hunk list. This hunk list is then applied to the original
- text.
-
- The text (or binary) fragments are copied directly from their source
- Python objects into a preallocated output string to avoid the
- allocation of intermediate Python objects. Working memory is about 2x
- the total number of hunks.
-
- Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
-
- This software may be used and distributed according to the terms
- of the GNU General Public License, incorporated herein by reference.
-*/
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-#include "bitmanipulation.h"
-#include "compat.h"
-#include "mpatch.h"
-
-static char mpatch_doc[] = "Efficient binary patching.";
-static PyObject *mpatch_Error;
-
-static void setpyerr(int r)
-{
- switch (r) {
- case MPATCH_ERR_NO_MEM:
- PyErr_NoMemory();
- break;
- case MPATCH_ERR_CANNOT_BE_DECODED:
- PyErr_SetString(mpatch_Error, "patch cannot be decoded");
- break;
- case MPATCH_ERR_INVALID_PATCH:
- PyErr_SetString(mpatch_Error, "invalid patch");
- break;
- }
-}
-
-struct mpatch_flist *cpygetitem(void *bins, ssize_t pos)
-{
- const char *buffer;
- struct mpatch_flist *res;
- ssize_t blen;
- int r;
-
- PyObject *tmp = PyList_GetItem((PyObject*)bins, pos);
- if (!tmp)
- return NULL;
- if (PyObject_AsCharBuffer(tmp, &buffer, (Py_ssize_t*)&blen))
- return NULL;
- if ((r = mpatch_decode(buffer, blen, &res)) < 0) {
- if (!PyErr_Occurred())
- setpyerr(r);
- return NULL;
- }
- return res;
-}
-
-static PyObject *
-patches(PyObject *self, PyObject *args)
-{
- PyObject *text, *bins, *result;
- struct mpatch_flist *patch;
- const char *in;
- int r = 0;
- char *out;
- Py_ssize_t len, outlen, inlen;
-
- if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
- return NULL;
-
- len = PyList_Size(bins);
- if (!len) {
- /* nothing to do */
- Py_INCREF(text);
- return text;
- }
-
- if (PyObject_AsCharBuffer(text, &in, &inlen))
- return NULL;
-
- patch = mpatch_fold(bins, cpygetitem, 0, len);
- if (!patch) { /* error already set or memory error */
- if (!PyErr_Occurred())
- PyErr_NoMemory();
- return NULL;
- }
-
- outlen = mpatch_calcsize(inlen, patch);
- if (outlen < 0) {
- r = (int)outlen;
- result = NULL;
- goto cleanup;
- }
- result = PyBytes_FromStringAndSize(NULL, outlen);
- if (!result) {
- result = NULL;
- goto cleanup;
- }
- out = PyBytes_AsString(result);
- if ((r = mpatch_apply(out, in, inlen, patch)) < 0) {
- Py_DECREF(result);
- result = NULL;
- }
-cleanup:
- mpatch_lfree(patch);
- if (!result && !PyErr_Occurred())
- setpyerr(r);
- return result;
-}
-
-/* calculate size of a patched file directly */
-static PyObject *
-patchedsize(PyObject *self, PyObject *args)
-{
- long orig, start, end, len, outlen = 0, last = 0, pos = 0;
- Py_ssize_t patchlen;
- char *bin;
-
- if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
- return NULL;
-
- while (pos >= 0 && pos < patchlen) {
- start = getbe32(bin + pos);
- end = getbe32(bin + pos + 4);
- len = getbe32(bin + pos + 8);
- if (start > end)
- break; /* sanity check */
- pos += 12 + len;
- outlen += start - last;
- last = end;
- outlen += len;
- }
-
- if (pos != patchlen) {
- if (!PyErr_Occurred())
- PyErr_SetString(mpatch_Error, "patch cannot be decoded");
- return NULL;
- }
-
- outlen += orig - last;
- return Py_BuildValue("l", outlen);
-}
-
-static PyMethodDef methods[] = {
- {"patches", patches, METH_VARARGS, "apply a series of patches\n"},
- {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"},
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef mpatch_module = {
- PyModuleDef_HEAD_INIT,
- "mpatch",
- mpatch_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_mpatch(void)
-{
- PyObject *m;
-
- m = PyModule_Create(&mpatch_module);
- if (m == NULL)
- return NULL;
-
- mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError",
- NULL, NULL);
- Py_INCREF(mpatch_Error);
- PyModule_AddObject(m, "mpatchError", mpatch_Error);
-
- return m;
-}
-#else
-PyMODINIT_FUNC
-initmpatch(void)
-{
- Py_InitModule3("mpatch", methods, mpatch_doc);
- mpatch_Error = PyErr_NewException("mercurial.mpatch.mpatchError",
- NULL, NULL);
-}
-#endif
--- a/mercurial/namespaces.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/namespaces.py Wed Jul 19 07:51:41 2017 -0500
@@ -35,7 +35,8 @@
# i18n: column positioning for "hg log"
logfmt=_("bookmark: %s\n"),
listnames=bmknames,
- namemap=bmknamemap, nodemap=bmknodemap)
+ namemap=bmknamemap, nodemap=bmknodemap,
+ builtin=True)
self.addnamespace(n)
tagnames = lambda repo: [t for t, n in repo.tagslist()]
@@ -46,7 +47,8 @@
logfmt=_("tag: %s\n"),
listnames=tagnames,
namemap=tagnamemap, nodemap=tagnodemap,
- deprecated=set(['tip']))
+ deprecated={'tip'},
+ builtin=True)
self.addnamespace(n)
bnames = lambda repo: repo.branchmap().keys()
@@ -56,7 +58,8 @@
# i18n: column positioning for "hg log"
logfmt=_("branch: %s\n"),
listnames=bnames,
- namemap=bnamemap, nodemap=bnodemap)
+ namemap=bnamemap, nodemap=bnodemap,
+ builtin=True)
self.addnamespace(n)
def __getitem__(self, namespace):
@@ -66,9 +69,11 @@
def __iter__(self):
return self._names.__iter__()
- def iteritems(self):
+ def items(self):
return self._names.iteritems()
+ iteritems = items
+
def addnamespace(self, namespace, order=None):
"""register a namespace
@@ -132,12 +137,13 @@
'namemap': function that takes a name and returns a list of nodes
'nodemap': function that takes a node and returns a list of names
'deprecated': set of names to be masked for ordinary use
-
+ 'builtin': bool indicating if this namespace is supported by core
+ Mercurial.
"""
def __init__(self, name, templatename=None, logname=None, colorname=None,
logfmt=None, listnames=None, namemap=None, nodemap=None,
- deprecated=None):
+ deprecated=None, builtin=False):
"""create a namespace
name: the namespace to be registered (in plural form)
@@ -152,7 +158,7 @@
namemap: function that inputs a name, output node(s)
nodemap: function that inputs a node, output name(s)
deprecated: set of names to be masked for ordinary use
-
+ builtin: whether namespace is implemented by core Mercurial
"""
self.name = name
self.templatename = templatename
@@ -181,6 +187,8 @@
else:
self.deprecated = deprecated
+ self.builtin = builtin
+
def names(self, repo, node):
"""method that returns a (sorted) list of names in a namespace that
match a given node"""
--- a/mercurial/node.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/node.py Wed Jul 19 07:51:41 2017 -0500
@@ -23,12 +23,13 @@
addednodeid = ('0' * 15) + 'added'
modifiednodeid = ('0' * 12) + 'modified'
-wdirnodes = set((newnodeid, addednodeid, modifiednodeid))
+wdirnodes = {newnodeid, addednodeid, modifiednodeid}
# pseudo identifiers for working directory
# (they are experimental, so don't add too many dependencies on them)
wdirrev = 0x7fffffff
wdirid = b"\xff" * 20
+wdirhex = hex(wdirid)
def short(node):
return hex(node[:6])
--- a/mercurial/obsolete.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/obsolete.py Wed Jul 19 07:51:41 2017 -0500
@@ -74,14 +74,16 @@
from .i18n import _
from . import (
- base85,
error,
node,
- parsers,
+ obsutil,
phases,
+ policy,
util,
)
+parsers = policy.importmod(r'parsers')
+
_pack = struct.pack
_unpack = struct.unpack
_calcsize = struct.calcsize
@@ -96,6 +98,27 @@
allowunstableopt = 'allowunstable'
exchangeopt = 'exchange'
+def isenabled(repo, option):
+ """Returns True if the given repository has the given obsolete option
+ enabled.
+ """
+ result = set(repo.ui.configlist('experimental', 'evolution'))
+ if 'all' in result:
+ return True
+
+ # For migration purposes, temporarily return true if the config hasn't been
+ # set but _enabled is true.
+ if len(result) == 0 and _enabled:
+ return True
+
+ # createmarkers must be enabled if other options are enabled
+ if ((allowunstableopt in result or exchangeopt in result) and
+ not createmarkersopt in result):
+ raise error.Abort(_("'createmarkers' obsolete option must be enabled "
+ "if other obsolete options are enabled"))
+
+ return option in result
+
### obsolescence marker flag
## bumpedfix flag
@@ -155,10 +178,9 @@
_fm0fsize = _calcsize(_fm0fixed)
_fm0fnodesize = _calcsize(_fm0node)
-def _fm0readmarkers(data, off):
+def _fm0readmarkers(data, off, stop):
# Loop on markers
- l = len(data)
- while off + _fm0fsize <= l:
+ while off < stop:
# read fixed part
cur = data[off:off + _fm0fsize]
off += _fm0fsize
@@ -218,8 +240,8 @@
if not parents:
# mark that we explicitly recorded no parents
metadata['p0'] = ''
- for i, p in enumerate(parents):
- metadata['p%i' % (i + 1)] = node.hex(p)
+ for i, p in enumerate(parents, 1):
+ metadata['p%i' % i] = node.hex(p)
metadata = _fm0encodemeta(metadata)
numsuc = len(sucs)
format = _fm0fixed + (_fm0node * numsuc)
@@ -294,7 +316,7 @@
_fm1metapair = 'BB'
_fm1metapairsize = _calcsize('BB')
-def _fm1purereadmarkers(data, off):
+def _fm1purereadmarkers(data, off, stop):
# make some global constants local for performance
noneflag = _fm1parentnone
sha2flag = usingsha256
@@ -308,10 +330,9 @@
unpack = _unpack
# Loop on markers
- stop = len(data) - _fm1fsize
ufixed = struct.Struct(_fm1fixed).unpack
- while off <= stop:
+ while off < stop:
# read fixed part
o1 = off + fsize
t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
@@ -405,11 +426,10 @@
data.append(value)
return ''.join(data)
-def _fm1readmarkers(data, off):
+def _fm1readmarkers(data, off, stop):
native = getattr(parsers, 'fm1readmarkers', None)
if not native:
- return _fm1purereadmarkers(data, off)
- stop = len(data) - _fm1fsize
+ return _fm1purereadmarkers(data, off, stop)
return native(data, off, stop)
# mapping to read/write various marker formats
@@ -417,68 +437,34 @@
formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
_fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
+def _readmarkerversion(data):
+ return _unpack('>B', data[0:1])[0]
+
@util.nogc
-def _readmarkers(data):
+def _readmarkers(data, off=None, stop=None):
"""Read and enumerate markers from raw data"""
- off = 0
- diskversion = _unpack('>B', data[off:off + 1])[0]
- off += 1
+ diskversion = _readmarkerversion(data)
+ if not off:
+ off = 1 # skip 1 byte version number
+ if stop is None:
+ stop = len(data)
if diskversion not in formats:
- raise error.Abort(_('parsing obsolete marker: unknown version %r')
- % diskversion)
- return diskversion, formats[diskversion][0](data, off)
+ msg = _('parsing obsolete marker: unknown version %r') % diskversion
+ raise error.UnknownVersion(msg, version=diskversion)
+ return diskversion, formats[diskversion][0](data, off, stop)
+
+def encodeheader(version=_fm0version):
+ return _pack('>B', version)
def encodemarkers(markers, addheader=False, version=_fm0version):
# Kept separate from flushmarkers(), it will be reused for
# markers exchange.
encodeone = formats[version][1]
if addheader:
- yield _pack('>B', version)
+ yield encodeheader(version)
for marker in markers:
yield encodeone(marker)
-
-class marker(object):
- """Wrap obsolete marker raw data"""
-
- def __init__(self, repo, data):
- # the repo argument will be used to create changectx in later version
- self._repo = repo
- self._data = data
- self._decodedmeta = None
-
- def __hash__(self):
- return hash(self._data)
-
- def __eq__(self, other):
- if type(other) != type(self):
- return False
- return self._data == other._data
-
- def precnode(self):
- """Precursor changeset node identifier"""
- return self._data[0]
-
- def succnodes(self):
- """List of successor changesets node identifiers"""
- return self._data[1]
-
- def parentnodes(self):
- """Parents of the precursors (None if not recorded)"""
- return self._data[5]
-
- def metadata(self):
- """Decoded metadata dictionary"""
- return dict(self._data[3])
-
- def date(self):
- """Creation date as (unixtime, offset)"""
- return self._data[4]
-
- def flags(self):
- """The flags field of the marker"""
- return self._data[2]
-
@util.nogc
def _addsuccessors(successors, markers):
for mark in markers:
@@ -531,7 +517,7 @@
# caches for various obsolescence related cache
self.caches = {}
self.svfs = svfs
- self._version = defaultformat
+ self._defaultformat = defaultformat
self._readonly = readonly
def __iter__(self):
@@ -562,7 +548,7 @@
return self._readonly
def create(self, transaction, prec, succs=(), flag=0, parents=None,
- date=None, metadata=None):
+ date=None, metadata=None, ui=None):
"""obsolete: add a new obsolete marker
* ensuring it is hashable
@@ -581,6 +567,10 @@
if 'date' in metadata:
# as a courtesy for out-of-tree extensions
date = util.parsedate(metadata.pop('date'))
+ elif ui is not None:
+ date = ui.configdate('devel', 'default-date')
+ if date is None:
+ date = util.makedate()
else:
date = util.makedate()
if len(prec) != 20:
@@ -604,10 +594,11 @@
if self._readonly:
raise error.Abort(_('creating obsolete markers is not enabled on '
'this repo'))
- known = set(self._all)
+ known = set()
+ getsuccessors = self.successors.get
new = []
for m in markers:
- if m not in known:
+ if m not in getsuccessors(m[0], ()) and m not in known:
known.add(m)
new.append(m)
if new:
@@ -616,13 +607,16 @@
offset = f.tell()
transaction.add('obsstore', offset)
# offset == 0: new file - add the version header
- for bytes in encodemarkers(new, offset == 0, self._version):
- f.write(bytes)
+ data = b''.join(encodemarkers(new, offset == 0, self._version))
+ f.write(data)
finally:
# XXX: f.close() == filecache invalidation == obsstore rebuilt.
# call 'filecacheentry.refresh()' here
f.close()
- self._addmarkers(new)
+ addedmarkers = transaction.changes.get('obsmarkers')
+ if addedmarkers is not None:
+ addedmarkers.update(new)
+ self._addmarkers(new, data)
# new marker *may* have changed several set. invalidate the cache.
self.caches.clear()
# records the number of new markers for the transaction hooks
@@ -638,8 +632,19 @@
return self.add(transaction, markers)
@propertycache
+ def _data(self):
+ return self.svfs.tryread('obsstore')
+
+ @propertycache
+ def _version(self):
+ if len(self._data) >= 1:
+ return _readmarkerversion(self._data)
+ else:
+ return self._defaultformat
+
+ @propertycache
def _all(self):
- data = self.svfs.tryread('obsstore')
+ data = self._data
if not data:
return []
self._version, markers = _readmarkers(data)
@@ -668,8 +673,9 @@
def _cached(self, attr):
return attr in self.__dict__
- def _addmarkers(self, markers):
+ def _addmarkers(self, markers, rawdata):
markers = list(markers) # to allow repeated iteration
+ self._data = self._data + rawdata
self._all.extend(markers)
if self._cached('successors'):
_addsuccessors(self.successors, markers)
@@ -694,6 +700,7 @@
seenmarkers = set()
seennodes = set(pendingnodes)
precursorsmarkers = self.precursors
+ succsmarkers = self.successors
children = self.children
while pendingnodes:
direct = set()
@@ -701,6 +708,8 @@
direct.update(precursorsmarkers.get(current, ()))
pruned = [m for m in children.get(current, ()) if not m[1]]
direct.update(pruned)
+ pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
+ direct.update(pruned)
direct -= seenmarkers
pendingnodes = set([m[0] for m in direct])
seenmarkers |= direct
@@ -708,6 +717,22 @@
seennodes |= pendingnodes
return seenmarkers
+def makestore(ui, repo):
+ """Create an obsstore instance from a repo."""
+ # read default format for new obsstore.
+ # developer config: format.obsstore-version
+ defaultformat = ui.configint('format', 'obsstore-version')
+ # rely on obsstore class default when possible.
+ kwargs = {}
+ if defaultformat is not None:
+ kwargs['defaultformat'] = defaultformat
+ readonly = not isenabled(repo, createmarkersopt)
+ store = obsstore(repo.svfs, readonly=readonly, **kwargs)
+ if store and readonly:
+ ui.warn(_('obsolete feature not enabled but %i markers found!\n')
+ % len(list(store)))
+ return store
+
def commonversion(versions):
"""Return the newest version listed in both versions and our local formats.
@@ -744,7 +769,7 @@
currentlen += len(nextdata)
for idx, part in enumerate(reversed(parts)):
data = ''.join([_pack('>B', _fm0version)] + part)
- keys['dump%i' % idx] = base85.b85encode(data)
+ keys['dump%i' % idx] = util.b85encode(data)
return keys
def listmarkers(repo):
@@ -757,11 +782,11 @@
"""Push markers over pushkey"""
if not key.startswith('dump'):
repo.ui.warn(_('unknown key: %r') % key)
- return 0
+ return False
if old:
repo.ui.warn(_('unexpected old value for %r') % key)
- return 0
- data = base85.b85decode(new)
+ return False
+ data = util.b85decode(new)
lock = repo.lock()
try:
tr = repo.transaction('pushkey: obsolete markers')
@@ -769,322 +794,56 @@
repo.obsstore.mergemarkers(tr, data)
repo.invalidatevolatilesets()
tr.close()
- return 1
+ return True
finally:
tr.release()
finally:
lock.release()
-def getmarkers(repo, nodes=None):
- """returns markers known in a repository
-
- If <nodes> is specified, only markers "relevant" to those nodes are are
- returned"""
- if nodes is None:
- rawmarkers = repo.obsstore
- else:
- rawmarkers = repo.obsstore.relevantmarkers(nodes)
-
- for markerdata in rawmarkers:
- yield marker(repo, markerdata)
-
-def relevantmarkers(repo, node):
- """all obsolete markers relevant to some revision"""
- for markerdata in repo.obsstore.relevantmarkers(node):
- yield marker(repo, markerdata)
-
-
-def precursormarkers(ctx):
- """obsolete marker marking this changeset as a successors"""
- for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
- yield marker(ctx.repo(), data)
-
-def successormarkers(ctx):
- """obsolete marker making this changeset obsolete"""
- for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
- yield marker(ctx.repo(), data)
+# keep compatibility for the 4.3 cycle
+def allprecursors(obsstore, nodes, ignoreflags=0):
+ movemsg = 'obsolete.allprecursors moved to obsutil.allprecursors'
+ util.nouideprecwarn(movemsg, '4.3')
+ return obsutil.allprecursors(obsstore, nodes, ignoreflags)
def allsuccessors(obsstore, nodes, ignoreflags=0):
- """Yield node for every successor of <nodes>.
-
- Some successors may be unknown locally.
+ movemsg = 'obsolete.allsuccessors moved to obsutil.allsuccessors'
+ util.nouideprecwarn(movemsg, '4.3')
+ return obsutil.allsuccessors(obsstore, nodes, ignoreflags)
- This is a linear yield unsuited to detecting split changesets. It includes
- initial nodes too."""
- remaining = set(nodes)
- seen = set(remaining)
- while remaining:
- current = remaining.pop()
- yield current
- for mark in obsstore.successors.get(current, ()):
- # ignore marker flagged with specified flag
- if mark[2] & ignoreflags:
- continue
- for suc in mark[1]:
- if suc not in seen:
- seen.add(suc)
- remaining.add(suc)
+def marker(repo, data):
+ movemsg = 'obsolete.marker moved to obsutil.marker'
+ repo.ui.deprecwarn(movemsg, '4.3')
+ return obsutil.marker(repo, data)
-def allprecursors(obsstore, nodes, ignoreflags=0):
- """Yield node for every precursors of <nodes>.
-
- Some precursors may be unknown locally.
-
- This is a linear yield unsuited to detecting folded changesets. It includes
- initial nodes too."""
+def getmarkers(repo, nodes=None, exclusive=False):
+ movemsg = 'obsolete.getmarkers moved to obsutil.getmarkers'
+ repo.ui.deprecwarn(movemsg, '4.3')
+ return obsutil.getmarkers(repo, nodes=nodes, exclusive=exclusive)
- remaining = set(nodes)
- seen = set(remaining)
- while remaining:
- current = remaining.pop()
- yield current
- for mark in obsstore.precursors.get(current, ()):
- # ignore marker flagged with specified flag
- if mark[2] & ignoreflags:
- continue
- suc = mark[0]
- if suc not in seen:
- seen.add(suc)
- remaining.add(suc)
+def exclusivemarkers(repo, nodes):
+ movemsg = 'obsolete.exclusivemarkers moved to obsutil.exclusivemarkers'
+ repo.ui.deprecwarn(movemsg, '4.3')
+ return obsutil.exclusivemarkers(repo, nodes)
def foreground(repo, nodes):
- """return all nodes in the "foreground" of other node
-
- The foreground of a revision is anything reachable using parent -> children
- or precursor -> successor relation. It is very similar to "descendant" but
- augmented with obsolescence information.
-
- Beware that possible obsolescence cycle may result if complex situation.
- """
- repo = repo.unfiltered()
- foreground = set(repo.set('%ln::', nodes))
- if repo.obsstore:
- # We only need this complicated logic if there is obsolescence
- # XXX will probably deserve an optimised revset.
- nm = repo.changelog.nodemap
- plen = -1
- # compute the whole set of successors or descendants
- while len(foreground) != plen:
- plen = len(foreground)
- succs = set(c.node() for c in foreground)
- mutable = [c.node() for c in foreground if c.mutable()]
- succs.update(allsuccessors(repo.obsstore, mutable))
- known = (n for n in succs if n in nm)
- foreground = set(repo.set('%ln::', known))
- return set(c.node() for c in foreground)
-
+ movemsg = 'obsolete.foreground moved to obsutil.foreground'
+ repo.ui.deprecwarn(movemsg, '4.3')
+ return obsutil.foreground(repo, nodes)
def successorssets(repo, initialnode, cache=None):
- """Return set of all latest successors of initial nodes
-
- The successors set of a changeset A are the group of revisions that succeed
- A. It succeeds A as a consistent whole, each revision being only a partial
- replacement. The successors set contains non-obsolete changesets only.
-
- This function returns the full list of successor sets which is why it
- returns a list of tuples and not just a single tuple. Each tuple is a valid
- successors set. Note that (A,) may be a valid successors set for changeset A
- (see below).
-
- In most cases, a changeset A will have a single element (e.g. the changeset
- A is replaced by A') in its successors set. Though, it is also common for a
- changeset A to have no elements in its successor set (e.g. the changeset
- has been pruned). Therefore, the returned list of successors sets will be
- [(A',)] or [], respectively.
-
- When a changeset A is split into A' and B', however, it will result in a
- successors set containing more than a single element, i.e. [(A',B')].
- Divergent changesets will result in multiple successors sets, i.e. [(A',),
- (A'')].
-
- If a changeset A is not obsolete, then it will conceptually have no
- successors set. To distinguish this from a pruned changeset, the successor
- set will contain itself only, i.e. [(A,)].
-
- Finally, successors unknown locally are considered to be pruned (obsoleted
- without any successors).
-
- The optional `cache` parameter is a dictionary that may contain precomputed
- successors sets. It is meant to reuse the computation of a previous call to
- `successorssets` when multiple calls are made at the same time. The cache
- dictionary is updated in place. The caller is responsible for its life
- span. Code that makes multiple calls to `successorssets` *must* use this
- cache mechanism or suffer terrible performance.
- """
-
- succmarkers = repo.obsstore.successors
-
- # Stack of nodes we search successors sets for
- toproceed = [initialnode]
- # set version of above list for fast loop detection
- # element added to "toproceed" must be added here
- stackedset = set(toproceed)
- if cache is None:
- cache = {}
-
- # This while loop is the flattened version of a recursive search for
- # successors sets
- #
- # def successorssets(x):
- # successors = directsuccessors(x)
- # ss = [[]]
- # for succ in directsuccessors(x):
- # # product as in itertools cartesian product
- # ss = product(ss, successorssets(succ))
- # return ss
- #
- # But we can not use plain recursive calls here:
- # - that would blow the python call stack
- # - obsolescence markers may have cycles, we need to handle them.
- #
- # The `toproceed` list act as our call stack. Every node we search
- # successors set for are stacked there.
- #
- # The `stackedset` is set version of this stack used to check if a node is
- # already stacked. This check is used to detect cycles and prevent infinite
- # loop.
- #
- # successors set of all nodes are stored in the `cache` dictionary.
- #
- # After this while loop ends we use the cache to return the successors sets
- # for the node requested by the caller.
- while toproceed:
- # Every iteration tries to compute the successors sets of the topmost
- # node of the stack: CURRENT.
- #
- # There are four possible outcomes:
- #
- # 1) We already know the successors sets of CURRENT:
- # -> mission accomplished, pop it from the stack.
- # 2) Node is not obsolete:
- # -> the node is its own successors sets. Add it to the cache.
- # 3) We do not know successors set of direct successors of CURRENT:
- # -> We add those successors to the stack.
- # 4) We know successors sets of all direct successors of CURRENT:
- # -> We can compute CURRENT successors set and add it to the
- # cache.
- #
- current = toproceed[-1]
- if current in cache:
- # case (1): We already know the successors sets
- stackedset.remove(toproceed.pop())
- elif current not in succmarkers:
- # case (2): The node is not obsolete.
- if current in repo:
- # We have a valid last successors.
- cache[current] = [(current,)]
- else:
- # Final obsolete version is unknown locally.
- # Do not count that as a valid successors
- cache[current] = []
- else:
- # cases (3) and (4)
- #
- # We proceed in two phases. Phase 1 aims to distinguish case (3)
- # from case (4):
- #
- # For each direct successors of CURRENT, we check whether its
- # successors sets are known. If they are not, we stack the
- # unknown node and proceed to the next iteration of the while
- # loop. (case 3)
- #
- # During this step, we may detect obsolescence cycles: a node
- # with unknown successors sets but already in the call stack.
- # In such a situation, we arbitrary set the successors sets of
- # the node to nothing (node pruned) to break the cycle.
- #
- # If no break was encountered we proceed to phase 2.
- #
- # Phase 2 computes successors sets of CURRENT (case 4); see details
- # in phase 2 itself.
- #
- # Note the two levels of iteration in each phase.
- # - The first one handles obsolescence markers using CURRENT as
- # precursor (successors markers of CURRENT).
- #
- # Having multiple entry here means divergence.
- #
- # - The second one handles successors defined in each marker.
- #
- # Having none means pruned node, multiple successors means split,
- # single successors are standard replacement.
- #
- for mark in sorted(succmarkers[current]):
- for suc in mark[1]:
- if suc not in cache:
- if suc in stackedset:
- # cycle breaking
- cache[suc] = []
- else:
- # case (3) If we have not computed successors sets
- # of one of those successors we add it to the
- # `toproceed` stack and stop all work for this
- # iteration.
- toproceed.append(suc)
- stackedset.add(suc)
- break
- else:
- continue
- break
- else:
- # case (4): we know all successors sets of all direct
- # successors
- #
- # Successors set contributed by each marker depends on the
- # successors sets of all its "successors" node.
- #
- # Each different marker is a divergence in the obsolescence
- # history. It contributes successors sets distinct from other
- # markers.
- #
- # Within a marker, a successor may have divergent successors
- # sets. In such a case, the marker will contribute multiple
- # divergent successors sets. If multiple successors have
- # divergent successors sets, a Cartesian product is used.
- #
- # At the end we post-process successors sets to remove
- # duplicated entry and successors set that are strict subset of
- # another one.
- succssets = []
- for mark in sorted(succmarkers[current]):
- # successors sets contributed by this marker
- markss = [[]]
- for suc in mark[1]:
- # cardinal product with previous successors
- productresult = []
- for prefix in markss:
- for suffix in cache[suc]:
- newss = list(prefix)
- for part in suffix:
- # do not duplicated entry in successors set
- # first entry wins.
- if part not in newss:
- newss.append(part)
- productresult.append(newss)
- markss = productresult
- succssets.extend(markss)
- # remove duplicated and subset
- seen = []
- final = []
- candidate = sorted(((set(s), s) for s in succssets if s),
- key=lambda x: len(x[1]), reverse=True)
- for setversion, listversion in candidate:
- for seenset in seen:
- if setversion.issubset(seenset):
- break
- else:
- final.append(listversion)
- seen.append(setversion)
- final.reverse() # put small successors set first
- cache[current] = final
- return cache[initialnode]
+ movemsg = 'obsolete.successorssets moved to obsutil.successorssets'
+ repo.ui.deprecwarn(movemsg, '4.3')
+ return obsutil.successorssets(repo, initialnode, cache=cache)
# mapping of 'set-name' -> <function to compute this set>
cachefuncs = {}
def cachefor(name):
"""Decorator to register a function as computing the cache for a set"""
def decorator(func):
- assert name not in cachefuncs
+ if name in cachefuncs:
+ msg = "duplicated registration for volatileset '%s' (existing: %r)"
+ raise error.ProgrammingError(msg % (name, cachefuncs[name]))
cachefuncs[name] = func
return func
return decorator
@@ -1118,30 +877,34 @@
if 'obsstore' in repo._filecache:
repo.obsstore.caches.clear()
+def _mutablerevs(repo):
+ """the set of mutable revision in the repository"""
+ return repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
+
@cachefor('obsolete')
def _computeobsoleteset(repo):
"""the set of obsolete revisions"""
- obs = set()
getnode = repo.changelog.node
- notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
- for r in notpublic:
- if getnode(r) in repo.obsstore.successors:
- obs.add(r)
+ notpublic = _mutablerevs(repo)
+ isobs = repo.obsstore.successors.__contains__
+ obs = set(r for r in notpublic if isobs(getnode(r)))
return obs
@cachefor('unstable')
def _computeunstableset(repo):
"""the set of non obsolete revisions with obsolete parents"""
- revs = [(ctx.rev(), ctx) for ctx in
- repo.set('(not public()) and (not obsolete())')]
- revs.sort(key=lambda x:x[0])
+ pfunc = repo.changelog.parentrevs
+ mutable = _mutablerevs(repo)
+ obsolete = getrevs(repo, 'obsolete')
+ others = mutable - obsolete
unstable = set()
- for rev, ctx in revs:
+ for r in sorted(others):
# A rev is unstable if one of its parent is obsolete or unstable
# this works since we traverse following growing rev order
- if any((x.obsolete() or (x.rev() in unstable))
- for x in ctx.parents()):
- unstable.add(rev)
+ for p in pfunc(r):
+ if p in obsolete or p in unstable:
+ unstable.add(r)
+ break
return unstable
@cachefor('suspended')
@@ -1170,7 +933,7 @@
# We only evaluate mutable, non-obsolete revision
node = ctx.node()
# (future) A cache of precursors may worth if split is very common
- for pnode in allprecursors(repo.obsstore, [node],
+ for pnode in obsutil.allprecursors(repo.obsstore, [node],
ignoreflags=bumpedfix):
prev = torev(pnode) # unfiltered! but so is phasecache
if (prev is not None) and (phase(repo, prev) <= public):
@@ -1196,7 +959,7 @@
continue # emergency cycle hanging prevention
seen.add(prec)
if prec not in newermap:
- successorssets(repo, prec, newermap)
+ obsutil.successorssets(repo, prec, cache=newermap)
newer = [n for n in newermap[prec] if n]
if len(newer) > 1:
divergent.add(ctx.rev())
@@ -1205,7 +968,8 @@
return divergent
-def createmarkers(repo, relations, flag=0, date=None, metadata=None):
+def createmarkers(repo, relations, flag=0, date=None, metadata=None,
+ operation=None):
"""Add obsolete markers between changesets in a repo
<relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
@@ -1226,6 +990,10 @@
metadata = {}
if 'user' not in metadata:
metadata['user'] = repo.ui.username()
+ useoperation = repo.ui.configbool('experimental',
+ 'evolution.track-operation')
+ if useoperation and operation:
+ metadata['operation'] = operation
tr = repo.transaction('add-obsolescence-marker')
try:
markerargs = []
@@ -1258,29 +1026,9 @@
for args in markerargs:
nprec, nsucs, npare, localmetadata = args
repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
- date=date, metadata=localmetadata)
+ date=date, metadata=localmetadata,
+ ui=repo.ui)
repo.filteredrevcache.clear()
tr.close()
finally:
tr.release()
-
-def isenabled(repo, option):
- """Returns True if the given repository has the given obsolete option
- enabled.
- """
- result = set(repo.ui.configlist('experimental', 'evolution'))
- if 'all' in result:
- return True
-
- # For migration purposes, temporarily return true if the config hasn't been
- # set but _enabled is true.
- if len(result) == 0 and _enabled:
- return True
-
- # createmarkers must be enabled if other options are enabled
- if ((allowunstableopt in result or exchangeopt in result) and
- not createmarkersopt in result):
- raise error.Abort(_("'createmarkers' obsolete option must be enabled "
- "if other obsolete options are enabled"))
-
- return option in result
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/obsutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,537 @@
+# obsutil.py - utility functions for obsolescence
+#
+# Copyright 2017 Boris Feld <boris.feld@octobus.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from . import (
+ phases,
+)
+
+class marker(object):
+ """Wrap obsolete marker raw data"""
+
+ def __init__(self, repo, data):
+ # the repo argument will be used to create changectx in later version
+ self._repo = repo
+ self._data = data
+ self._decodedmeta = None
+
+ def __hash__(self):
+ return hash(self._data)
+
+ def __eq__(self, other):
+ if type(other) != type(self):
+ return False
+ return self._data == other._data
+
+ def precnode(self):
+ """Precursor changeset node identifier"""
+ return self._data[0]
+
+ def succnodes(self):
+ """List of successor changesets node identifiers"""
+ return self._data[1]
+
+ def parentnodes(self):
+ """Parents of the precursors (None if not recorded)"""
+ return self._data[5]
+
+ def metadata(self):
+ """Decoded metadata dictionary"""
+ return dict(self._data[3])
+
+ def date(self):
+ """Creation date as (unixtime, offset)"""
+ return self._data[4]
+
+ def flags(self):
+ """The flags field of the marker"""
+ return self._data[2]
+
+def getmarkers(repo, nodes=None, exclusive=False):
+ """returns markers known in a repository
+
+ If <nodes> is specified, only markers "relevant" to those nodes are are
+ returned"""
+ if nodes is None:
+ rawmarkers = repo.obsstore
+ elif exclusive:
+ rawmarkers = exclusivemarkers(repo, nodes)
+ else:
+ rawmarkers = repo.obsstore.relevantmarkers(nodes)
+
+ for markerdata in rawmarkers:
+ yield marker(repo, markerdata)
+
+def closestpredecessors(repo, nodeid):
+ """yield the list of next predecessors pointing on visible changectx nodes
+
+ This function respect the repoview filtering, filtered revision will be
+ considered missing.
+ """
+
+ precursors = repo.obsstore.precursors
+ stack = [nodeid]
+ seen = set(stack)
+
+ while stack:
+ current = stack.pop()
+ currentpreccs = precursors.get(current, ())
+
+ for prec in currentpreccs:
+ precnodeid = prec[0]
+
+ # Basic cycle protection
+ if precnodeid in seen:
+ continue
+ seen.add(precnodeid)
+
+ if precnodeid in repo:
+ yield precnodeid
+ else:
+ stack.append(precnodeid)
+
+def allprecursors(obsstore, nodes, ignoreflags=0):
+ """Yield node for every precursors of <nodes>.
+
+ Some precursors may be unknown locally.
+
+ This is a linear yield unsuited to detecting folded changesets. It includes
+ initial nodes too."""
+
+ remaining = set(nodes)
+ seen = set(remaining)
+ while remaining:
+ current = remaining.pop()
+ yield current
+ for mark in obsstore.precursors.get(current, ()):
+ # ignore marker flagged with specified flag
+ if mark[2] & ignoreflags:
+ continue
+ suc = mark[0]
+ if suc not in seen:
+ seen.add(suc)
+ remaining.add(suc)
+
+def allsuccessors(obsstore, nodes, ignoreflags=0):
+ """Yield node for every successor of <nodes>.
+
+ Some successors may be unknown locally.
+
+ This is a linear yield unsuited to detecting split changesets. It includes
+ initial nodes too."""
+ remaining = set(nodes)
+ seen = set(remaining)
+ while remaining:
+ current = remaining.pop()
+ yield current
+ for mark in obsstore.successors.get(current, ()):
+ # ignore marker flagged with specified flag
+ if mark[2] & ignoreflags:
+ continue
+ for suc in mark[1]:
+ if suc not in seen:
+ seen.add(suc)
+ remaining.add(suc)
+
+def _filterprunes(markers):
+ """return a set with no prune markers"""
+ return set(m for m in markers if m[1])
+
+def exclusivemarkers(repo, nodes):
+ """set of markers relevant to "nodes" but no other locally-known nodes
+
+ This function compute the set of markers "exclusive" to a locally-known
+ node. This means we walk the markers starting from <nodes> until we reach a
+ locally-known precursors outside of <nodes>. Element of <nodes> with
+ locally-known successors outside of <nodes> are ignored (since their
+ precursors markers are also relevant to these successors).
+
+ For example:
+
+ # (A0 rewritten as A1)
+ #
+ # A0 <-1- A1 # Marker "1" is exclusive to A1
+
+ or
+
+ # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
+ #
+ # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
+
+ or
+
+ # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
+ #
+ # <-2- A1 # Marker "2" is exclusive to A0,A1
+ # /
+ # <-1- A0
+ # \
+ # <-3- A2 # Marker "3" is exclusive to A0,A2
+ #
+ # in addition:
+ #
+ # Markers "2,3" are exclusive to A1,A2
+ # Markers "1,2,3" are exclusive to A0,A1,A2
+
+ See test/test-obsolete-bundle-strip.t for more examples.
+
+ An example usage is strip. When stripping a changeset, we also want to
+ strip the markers exclusive to this changeset. Otherwise we would have
+ "dangling"" obsolescence markers from its precursors: Obsolescence markers
+ marking a node as obsolete without any successors available locally.
+
+ As for relevant markers, the prune markers for children will be followed.
+ Of course, they will only be followed if the pruned children is
+ locally-known. Since the prune markers are relevant to the pruned node.
+ However, while prune markers are considered relevant to the parent of the
+ pruned changesets, prune markers for locally-known changeset (with no
+ successors) are considered exclusive to the pruned nodes. This allows
+ to strip the prune markers (with the rest of the exclusive chain) alongside
+ the pruned changesets.
+ """
+ # running on a filtered repository would be dangerous as markers could be
+ # reported as exclusive when they are relevant for other filtered nodes.
+ unfi = repo.unfiltered()
+
+ # shortcut to various useful item
+ nm = unfi.changelog.nodemap
+ precursorsmarkers = unfi.obsstore.precursors
+ successormarkers = unfi.obsstore.successors
+ childrenmarkers = unfi.obsstore.children
+
+ # exclusive markers (return of the function)
+ exclmarkers = set()
+ # we need fast membership testing
+ nodes = set(nodes)
+ # looking for head in the obshistory
+ #
+ # XXX we are ignoring all issues in regard with cycle for now.
+ stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
+ stack.sort()
+ # nodes already stacked
+ seennodes = set(stack)
+ while stack:
+ current = stack.pop()
+ # fetch precursors markers
+ markers = list(precursorsmarkers.get(current, ()))
+ # extend the list with prune markers
+ for mark in successormarkers.get(current, ()):
+ if not mark[1]:
+ markers.append(mark)
+ # and markers from children (looking for prune)
+ for mark in childrenmarkers.get(current, ()):
+ if not mark[1]:
+ markers.append(mark)
+ # traverse the markers
+ for mark in markers:
+ if mark in exclmarkers:
+ # markers already selected
+ continue
+
+ # If the markers is about the current node, select it
+ #
+ # (this delay the addition of markers from children)
+ if mark[1] or mark[0] == current:
+ exclmarkers.add(mark)
+
+ # should we keep traversing through the precursors?
+ prec = mark[0]
+
+ # nodes in the stack or already processed
+ if prec in seennodes:
+ continue
+
+ # is this a locally known node ?
+ known = prec in nm
+ # if locally-known and not in the <nodes> set the traversal
+ # stop here.
+ if known and prec not in nodes:
+ continue
+
+ # do not keep going if there are unselected markers pointing to this
+ # nodes. If we end up traversing these unselected markers later the
+ # node will be taken care of at that point.
+ precmarkers = _filterprunes(successormarkers.get(prec))
+ if precmarkers.issubset(exclmarkers):
+ seennodes.add(prec)
+ stack.append(prec)
+
+ return exclmarkers
+
+def foreground(repo, nodes):
+ """return all nodes in the "foreground" of other node
+
+ The foreground of a revision is anything reachable using parent -> children
+ or precursor -> successor relation. It is very similar to "descendant" but
+ augmented with obsolescence information.
+
+ Beware that possible obsolescence cycle may result if complex situation.
+ """
+ repo = repo.unfiltered()
+ foreground = set(repo.set('%ln::', nodes))
+ if repo.obsstore:
+ # We only need this complicated logic if there is obsolescence
+ # XXX will probably deserve an optimised revset.
+ nm = repo.changelog.nodemap
+ plen = -1
+ # compute the whole set of successors or descendants
+ while len(foreground) != plen:
+ plen = len(foreground)
+ succs = set(c.node() for c in foreground)
+ mutable = [c.node() for c in foreground if c.mutable()]
+ succs.update(allsuccessors(repo.obsstore, mutable))
+ known = (n for n in succs if n in nm)
+ foreground = set(repo.set('%ln::', known))
+ return set(c.node() for c in foreground)
+
+def getobsoleted(repo, tr):
+ """return the set of pre-existing revisions obsoleted by a transaction"""
+ torev = repo.unfiltered().changelog.nodemap.get
+ phase = repo._phasecache.phase
+ succsmarkers = repo.obsstore.successors.get
+ public = phases.public
+ addedmarkers = tr.changes.get('obsmarkers')
+ addedrevs = tr.changes.get('revs')
+ seenrevs = set(addedrevs)
+ obsoleted = set()
+ for mark in addedmarkers:
+ node = mark[0]
+ rev = torev(node)
+ if rev is None or rev in seenrevs:
+ continue
+ seenrevs.add(rev)
+ if phase(repo, rev) == public:
+ continue
+ if set(succsmarkers(node)).issubset(addedmarkers):
+ obsoleted.add(rev)
+ return obsoleted
+
+def successorssets(repo, initialnode, closest=False, cache=None):
+ """Return set of all latest successors of initial nodes
+
+ The successors set of a changeset A are the group of revisions that succeed
+ A. It succeeds A as a consistent whole, each revision being only a partial
+ replacement. By default, the successors set contains non-obsolete
+ changesets only, walking the obsolescence graph until reaching a leaf. If
+ 'closest' is set to True, closest successors-sets are return (the
+ obsolescence walk stops on known changesets).
+
+ This function returns the full list of successor sets which is why it
+ returns a list of tuples and not just a single tuple. Each tuple is a valid
+ successors set. Note that (A,) may be a valid successors set for changeset A
+ (see below).
+
+ In most cases, a changeset A will have a single element (e.g. the changeset
+ A is replaced by A') in its successors set. Though, it is also common for a
+ changeset A to have no elements in its successor set (e.g. the changeset
+ has been pruned). Therefore, the returned list of successors sets will be
+ [(A',)] or [], respectively.
+
+ When a changeset A is split into A' and B', however, it will result in a
+ successors set containing more than a single element, i.e. [(A',B')].
+ Divergent changesets will result in multiple successors sets, i.e. [(A',),
+ (A'')].
+
+ If a changeset A is not obsolete, then it will conceptually have no
+ successors set. To distinguish this from a pruned changeset, the successor
+ set will contain itself only, i.e. [(A,)].
+
+ Finally, final successors unknown locally are considered to be pruned
+ (pruned: obsoleted without any successors). (Final: successors not affected
+ by markers).
+
+ The 'closest' mode respect the repoview filtering. For example, without
+ filter it will stop at the first locally known changeset, with 'visible'
+ filter it will stop on visible changesets).
+
+ The optional `cache` parameter is a dictionary that may contains
+ precomputed successors sets. It is meant to reuse the computation of a
+ previous call to `successorssets` when multiple calls are made at the same
+ time. The cache dictionary is updated in place. The caller is responsible
+ for its life span. Code that makes multiple calls to `successorssets`
+ *should* use this cache mechanism or risk a performance hit.
+
+ Since results are different depending of the 'closest' most, the same cache
+ cannot be reused for both mode.
+ """
+
+ succmarkers = repo.obsstore.successors
+
+ # Stack of nodes we search successors sets for
+ toproceed = [initialnode]
+ # set version of above list for fast loop detection
+ # element added to "toproceed" must be added here
+ stackedset = set(toproceed)
+ if cache is None:
+ cache = {}
+
+ # This while loop is the flattened version of a recursive search for
+ # successors sets
+ #
+ # def successorssets(x):
+ # successors = directsuccessors(x)
+ # ss = [[]]
+ # for succ in directsuccessors(x):
+ # # product as in itertools cartesian product
+ # ss = product(ss, successorssets(succ))
+ # return ss
+ #
+ # But we can not use plain recursive calls here:
+ # - that would blow the python call stack
+ # - obsolescence markers may have cycles, we need to handle them.
+ #
+ # The `toproceed` list act as our call stack. Every node we search
+ # successors set for are stacked there.
+ #
+ # The `stackedset` is set version of this stack used to check if a node is
+ # already stacked. This check is used to detect cycles and prevent infinite
+ # loop.
+ #
+ # successors set of all nodes are stored in the `cache` dictionary.
+ #
+ # After this while loop ends we use the cache to return the successors sets
+ # for the node requested by the caller.
+ while toproceed:
+ # Every iteration tries to compute the successors sets of the topmost
+ # node of the stack: CURRENT.
+ #
+ # There are four possible outcomes:
+ #
+ # 1) We already know the successors sets of CURRENT:
+ # -> mission accomplished, pop it from the stack.
+ # 2) Stop the walk:
+ # default case: Node is not obsolete
+ # closest case: Node is known at this repo filter level
+ # -> the node is its own successors sets. Add it to the cache.
+ # 3) We do not know successors set of direct successors of CURRENT:
+ # -> We add those successors to the stack.
+ # 4) We know successors sets of all direct successors of CURRENT:
+ # -> We can compute CURRENT successors set and add it to the
+ # cache.
+ #
+ current = toproceed[-1]
+
+ # case 2 condition is a bit hairy because of closest,
+ # we compute it on its own
+ case2condition = ((current not in succmarkers)
+ or (closest and current != initialnode
+ and current in repo))
+
+ if current in cache:
+ # case (1): We already know the successors sets
+ stackedset.remove(toproceed.pop())
+ elif case2condition:
+ # case (2): end of walk.
+ if current in repo:
+ # We have a valid successors.
+ cache[current] = [(current,)]
+ else:
+ # Final obsolete version is unknown locally.
+ # Do not count that as a valid successors
+ cache[current] = []
+ else:
+ # cases (3) and (4)
+ #
+ # We proceed in two phases. Phase 1 aims to distinguish case (3)
+ # from case (4):
+ #
+ # For each direct successors of CURRENT, we check whether its
+ # successors sets are known. If they are not, we stack the
+ # unknown node and proceed to the next iteration of the while
+ # loop. (case 3)
+ #
+ # During this step, we may detect obsolescence cycles: a node
+ # with unknown successors sets but already in the call stack.
+ # In such a situation, we arbitrary set the successors sets of
+ # the node to nothing (node pruned) to break the cycle.
+ #
+ # If no break was encountered we proceed to phase 2.
+ #
+ # Phase 2 computes successors sets of CURRENT (case 4); see details
+ # in phase 2 itself.
+ #
+ # Note the two levels of iteration in each phase.
+ # - The first one handles obsolescence markers using CURRENT as
+ # precursor (successors markers of CURRENT).
+ #
+ # Having multiple entry here means divergence.
+ #
+ # - The second one handles successors defined in each marker.
+ #
+ # Having none means pruned node, multiple successors means split,
+ # single successors are standard replacement.
+ #
+ for mark in sorted(succmarkers[current]):
+ for suc in mark[1]:
+ if suc not in cache:
+ if suc in stackedset:
+ # cycle breaking
+ cache[suc] = []
+ else:
+ # case (3) If we have not computed successors sets
+ # of one of those successors we add it to the
+ # `toproceed` stack and stop all work for this
+ # iteration.
+ toproceed.append(suc)
+ stackedset.add(suc)
+ break
+ else:
+ continue
+ break
+ else:
+ # case (4): we know all successors sets of all direct
+ # successors
+ #
+ # Successors set contributed by each marker depends on the
+ # successors sets of all its "successors" node.
+ #
+ # Each different marker is a divergence in the obsolescence
+ # history. It contributes successors sets distinct from other
+ # markers.
+ #
+ # Within a marker, a successor may have divergent successors
+ # sets. In such a case, the marker will contribute multiple
+ # divergent successors sets. If multiple successors have
+ # divergent successors sets, a Cartesian product is used.
+ #
+ # At the end we post-process successors sets to remove
+ # duplicated entry and successors set that are strict subset of
+ # another one.
+ succssets = []
+ for mark in sorted(succmarkers[current]):
+ # successors sets contributed by this marker
+ markss = [[]]
+ for suc in mark[1]:
+ # cardinal product with previous successors
+ productresult = []
+ for prefix in markss:
+ for suffix in cache[suc]:
+ newss = list(prefix)
+ for part in suffix:
+ # do not duplicated entry in successors set
+ # first entry wins.
+ if part not in newss:
+ newss.append(part)
+ productresult.append(newss)
+ markss = productresult
+ succssets.extend(markss)
+ # remove duplicated and subset
+ seen = []
+ final = []
+ candidate = sorted(((set(s), s) for s in succssets if s),
+ key=lambda x: len(x[1]), reverse=True)
+ for setversion, listversion in candidate:
+ for seenset in seen:
+ if setversion.issubset(seenset):
+ break
+ else:
+ final.append(listversion)
+ seen.append(setversion)
+ final.reverse() # put small successors set first
+ cache[current] = final
+ return cache[initialnode]
--- a/mercurial/osutil.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1328 +0,0 @@
-/*
- osutil.c - native operating system services
-
- Copyright 2007 Matt Mackall and others
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#define _ATFILE_SOURCE
-#include <Python.h>
-#include <fcntl.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <errno.h>
-
-#ifdef _WIN32
-#include <windows.h>
-#include <io.h>
-#else
-#include <dirent.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <unistd.h>
-#ifdef HAVE_LINUX_STATFS
-#include <linux/magic.h>
-#include <sys/vfs.h>
-#endif
-#ifdef HAVE_BSD_STATFS
-#include <sys/mount.h>
-#include <sys/param.h>
-#endif
-#endif
-
-#ifdef __APPLE__
-#include <sys/attr.h>
-#include <sys/vnode.h>
-#endif
-
-#include "util.h"
-
-/* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */
-#ifndef PATH_MAX
-#define PATH_MAX 4096
-#endif
-
-#ifdef _WIN32
-/*
-stat struct compatible with hg expectations
-Mercurial only uses st_mode, st_size and st_mtime
-the rest is kept to minimize changes between implementations
-*/
-struct hg_stat {
- int st_dev;
- int st_mode;
- int st_nlink;
- __int64 st_size;
- int st_mtime;
- int st_ctime;
-};
-struct listdir_stat {
- PyObject_HEAD
- struct hg_stat st;
-};
-#else
-struct listdir_stat {
- PyObject_HEAD
- struct stat st;
-};
-#endif
-
-#ifdef IS_PY3K
-#define listdir_slot(name) \
- static PyObject *listdir_stat_##name(PyObject *self, void *x) \
- { \
- return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
- }
-#else
-#define listdir_slot(name) \
- static PyObject *listdir_stat_##name(PyObject *self, void *x) \
- { \
- return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
- }
-#endif
-
-listdir_slot(st_dev)
-listdir_slot(st_mode)
-listdir_slot(st_nlink)
-#ifdef _WIN32
-static PyObject *listdir_stat_st_size(PyObject *self, void *x)
-{
- return PyLong_FromLongLong(
- (PY_LONG_LONG)((struct listdir_stat *)self)->st.st_size);
-}
-#else
-listdir_slot(st_size)
-#endif
-listdir_slot(st_mtime)
-listdir_slot(st_ctime)
-
-static struct PyGetSetDef listdir_stat_getsets[] = {
- {"st_dev", listdir_stat_st_dev, 0, 0, 0},
- {"st_mode", listdir_stat_st_mode, 0, 0, 0},
- {"st_nlink", listdir_stat_st_nlink, 0, 0, 0},
- {"st_size", listdir_stat_st_size, 0, 0, 0},
- {"st_mtime", listdir_stat_st_mtime, 0, 0, 0},
- {"st_ctime", listdir_stat_st_ctime, 0, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k)
-{
- return t->tp_alloc(t, 0);
-}
-
-static void listdir_stat_dealloc(PyObject *o)
-{
- o->ob_type->tp_free(o);
-}
-
-static PyTypeObject listdir_stat_type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "osutil.stat", /*tp_name*/
- sizeof(struct listdir_stat), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- (destructor)listdir_stat_dealloc, /*tp_dealloc*/
- 0, /*tp_print*/
- 0, /*tp_getattr*/
- 0, /*tp_setattr*/
- 0, /*tp_compare*/
- 0, /*tp_repr*/
- 0, /*tp_as_number*/
- 0, /*tp_as_sequence*/
- 0, /*tp_as_mapping*/
- 0, /*tp_hash */
- 0, /*tp_call*/
- 0, /*tp_str*/
- 0, /*tp_getattro*/
- 0, /*tp_setattro*/
- 0, /*tp_as_buffer*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
- "stat objects", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- listdir_stat_getsets, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- listdir_stat_new, /* tp_new */
-};
-
-#ifdef _WIN32
-
-static int to_python_time(const FILETIME *tm)
-{
- /* number of seconds between epoch and January 1 1601 */
- const __int64 a0 = (__int64)134774L * (__int64)24L * (__int64)3600L;
- /* conversion factor from 100ns to 1s */
- const __int64 a1 = 10000000;
- /* explicit (int) cast to suspend compiler warnings */
- return (int)((((__int64)tm->dwHighDateTime << 32)
- + tm->dwLowDateTime) / a1 - a0);
-}
-
-static PyObject *make_item(const WIN32_FIND_DATAA *fd, int wantstat)
-{
- PyObject *py_st;
- struct hg_stat *stp;
-
- int kind = (fd->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)
- ? _S_IFDIR : _S_IFREG;
-
- if (!wantstat)
- return Py_BuildValue("si", fd->cFileName, kind);
-
- py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
- if (!py_st)
- return NULL;
-
- stp = &((struct listdir_stat *)py_st)->st;
- /*
- use kind as st_mode
- rwx bits on Win32 are meaningless
- and Hg does not use them anyway
- */
- stp->st_mode = kind;
- stp->st_mtime = to_python_time(&fd->ftLastWriteTime);
- stp->st_ctime = to_python_time(&fd->ftCreationTime);
- if (kind == _S_IFREG)
- stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
- + fd->nFileSizeLow;
- return Py_BuildValue("siN", fd->cFileName,
- kind, py_st);
-}
-
-static PyObject *_listdir(char *path, int plen, int wantstat, char *skip)
-{
- PyObject *rval = NULL; /* initialize - return value */
- PyObject *list;
- HANDLE fh;
- WIN32_FIND_DATAA fd;
- char *pattern;
-
- /* build the path + \* pattern string */
- pattern = PyMem_Malloc(plen + 3); /* path + \* + \0 */
- if (!pattern) {
- PyErr_NoMemory();
- goto error_nomem;
- }
- memcpy(pattern, path, plen);
-
- if (plen > 0) {
- char c = path[plen-1];
- if (c != ':' && c != '/' && c != '\\')
- pattern[plen++] = '\\';
- }
- pattern[plen++] = '*';
- pattern[plen] = '\0';
-
- fh = FindFirstFileA(pattern, &fd);
- if (fh == INVALID_HANDLE_VALUE) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
- goto error_file;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_list;
-
- do {
- PyObject *item;
-
- if (fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
- if (!strcmp(fd.cFileName, ".")
- || !strcmp(fd.cFileName, ".."))
- continue;
-
- if (skip && !strcmp(fd.cFileName, skip)) {
- rval = PyList_New(0);
- goto error;
- }
- }
-
- item = make_item(&fd, wantstat);
- if (!item)
- goto error;
-
- if (PyList_Append(list, item)) {
- Py_XDECREF(item);
- goto error;
- }
-
- Py_XDECREF(item);
- } while (FindNextFileA(fh, &fd));
-
- if (GetLastError() != ERROR_NO_MORE_FILES) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), path);
- goto error;
- }
-
- rval = list;
- Py_XINCREF(rval);
-error:
- Py_XDECREF(list);
-error_list:
- FindClose(fh);
-error_file:
- PyMem_Free(pattern);
-error_nomem:
- return rval;
-}
-
-#else
-
-int entkind(struct dirent *ent)
-{
-#ifdef DT_REG
- switch (ent->d_type) {
- case DT_REG: return S_IFREG;
- case DT_DIR: return S_IFDIR;
- case DT_LNK: return S_IFLNK;
- case DT_BLK: return S_IFBLK;
- case DT_CHR: return S_IFCHR;
- case DT_FIFO: return S_IFIFO;
- case DT_SOCK: return S_IFSOCK;
- }
-#endif
- return -1;
-}
-
-static PyObject *makestat(const struct stat *st)
-{
- PyObject *stat;
-
- stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
- if (stat)
- memcpy(&((struct listdir_stat *)stat)->st, st, sizeof(*st));
- return stat;
-}
-
-static PyObject *_listdir_stat(char *path, int pathlen, int keepstat,
- char *skip)
-{
- PyObject *list, *elem, *stat = NULL, *ret = NULL;
- char fullpath[PATH_MAX + 10];
- int kind, err;
- struct stat st;
- struct dirent *ent;
- DIR *dir;
-#ifdef AT_SYMLINK_NOFOLLOW
- int dfd = -1;
-#endif
-
- if (pathlen >= PATH_MAX) {
- errno = ENAMETOOLONG;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
- strncpy(fullpath, path, PATH_MAX);
- fullpath[pathlen] = '/';
-
-#ifdef AT_SYMLINK_NOFOLLOW
- dfd = open(path, O_RDONLY);
- if (dfd == -1) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
- dir = fdopendir(dfd);
-#else
- dir = opendir(path);
-#endif
- if (!dir) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_dir;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_list;
-
- while ((ent = readdir(dir))) {
- if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, ".."))
- continue;
-
- kind = entkind(ent);
- if (kind == -1 || keepstat) {
-#ifdef AT_SYMLINK_NOFOLLOW
- err = fstatat(dfd, ent->d_name, &st,
- AT_SYMLINK_NOFOLLOW);
-#else
- strncpy(fullpath + pathlen + 1, ent->d_name,
- PATH_MAX - pathlen);
- fullpath[PATH_MAX] = '\0';
- err = lstat(fullpath, &st);
-#endif
- if (err == -1) {
- /* race with file deletion? */
- if (errno == ENOENT)
- continue;
- strncpy(fullpath + pathlen + 1, ent->d_name,
- PATH_MAX - pathlen);
- fullpath[PATH_MAX] = 0;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError,
- fullpath);
- goto error;
- }
- kind = st.st_mode & S_IFMT;
- }
-
- /* quit early? */
- if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) {
- ret = PyList_New(0);
- goto error;
- }
-
- if (keepstat) {
- stat = makestat(&st);
- if (!stat)
- goto error;
- elem = Py_BuildValue("siN", ent->d_name, kind, stat);
- } else
- elem = Py_BuildValue("si", ent->d_name, kind);
- if (!elem)
- goto error;
- stat = NULL;
-
- PyList_Append(list, elem);
- Py_DECREF(elem);
- }
-
- ret = list;
- Py_INCREF(ret);
-
-error:
- Py_DECREF(list);
- Py_XDECREF(stat);
-error_list:
- closedir(dir);
- /* closedir also closes its dirfd */
- goto error_value;
-error_dir:
-#ifdef AT_SYMLINK_NOFOLLOW
- close(dfd);
-#endif
-error_value:
- return ret;
-}
-
-#ifdef __APPLE__
-
-typedef struct {
- u_int32_t length;
- attrreference_t name;
- fsobj_type_t obj_type;
- struct timespec mtime;
-#if __LITTLE_ENDIAN__
- mode_t access_mask;
- uint16_t padding;
-#else
- uint16_t padding;
- mode_t access_mask;
-#endif
- off_t size;
-} __attribute__((packed)) attrbuf_entry;
-
-int attrkind(attrbuf_entry *entry)
-{
- switch (entry->obj_type) {
- case VREG: return S_IFREG;
- case VDIR: return S_IFDIR;
- case VLNK: return S_IFLNK;
- case VBLK: return S_IFBLK;
- case VCHR: return S_IFCHR;
- case VFIFO: return S_IFIFO;
- case VSOCK: return S_IFSOCK;
- }
- return -1;
-}
-
-/* get these many entries at a time */
-#define LISTDIR_BATCH_SIZE 50
-
-static PyObject *_listdir_batch(char *path, int pathlen, int keepstat,
- char *skip, bool *fallback)
-{
- PyObject *list, *elem, *stat = NULL, *ret = NULL;
- int kind, err;
- unsigned long index;
- unsigned int count, old_state, new_state;
- bool state_seen = false;
- attrbuf_entry *entry;
- /* from the getattrlist(2) man page: a path can be no longer than
- (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will
- silently truncate attribute data if attrBufSize is too small." So
- pass in a buffer big enough for the worst case. */
- char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)];
- unsigned int basep_unused;
-
- struct stat st;
- int dfd = -1;
-
- /* these must match the attrbuf_entry struct, otherwise you'll end up
- with garbage */
- struct attrlist requested_attr = {0};
- requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT;
- requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE |
- ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK);
- requested_attr.fileattr = ATTR_FILE_DATALENGTH;
-
- *fallback = false;
-
- if (pathlen >= PATH_MAX) {
- errno = ENAMETOOLONG;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
-
- dfd = open(path, O_RDONLY);
- if (dfd == -1) {
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error_value;
- }
-
- list = PyList_New(0);
- if (!list)
- goto error_dir;
-
- do {
- count = LISTDIR_BATCH_SIZE;
- err = getdirentriesattr(dfd, &requested_attr, &attrbuf,
- sizeof(attrbuf), &count, &basep_unused,
- &new_state, 0);
- if (err < 0) {
- if (errno == ENOTSUP) {
- /* We're on a filesystem that doesn't support
- getdirentriesattr. Fall back to the
- stat-based implementation. */
- *fallback = true;
- } else
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
- goto error;
- }
-
- if (!state_seen) {
- old_state = new_state;
- state_seen = true;
- } else if (old_state != new_state) {
- /* There's an edge case with getdirentriesattr. Consider
- the following initial list of files:
-
- a
- b
- <--
- c
- d
-
- If the iteration is paused at the arrow, and b is
- deleted before it is resumed, getdirentriesattr will
- not return d at all! Ordinarily we're expected to
- restart the iteration from the beginning. To avoid
- getting stuck in a retry loop here, fall back to
- stat. */
- *fallback = true;
- goto error;
- }
-
- entry = (attrbuf_entry *)attrbuf;
-
- for (index = 0; index < count; index++) {
- char *filename = ((char *)&entry->name) +
- entry->name.attr_dataoffset;
-
- if (!strcmp(filename, ".") || !strcmp(filename, ".."))
- continue;
-
- kind = attrkind(entry);
- if (kind == -1) {
- PyErr_Format(PyExc_OSError,
- "unknown object type %u for file "
- "%s%s!",
- entry->obj_type, path, filename);
- goto error;
- }
-
- /* quit early? */
- if (skip && kind == S_IFDIR && !strcmp(filename, skip)) {
- ret = PyList_New(0);
- goto error;
- }
-
- if (keepstat) {
- /* from the getattrlist(2) man page: "Only the
- permission bits ... are valid". */
- st.st_mode = (entry->access_mask & ~S_IFMT) | kind;
- st.st_mtime = entry->mtime.tv_sec;
- st.st_size = entry->size;
- stat = makestat(&st);
- if (!stat)
- goto error;
- elem = Py_BuildValue("siN", filename, kind, stat);
- } else
- elem = Py_BuildValue("si", filename, kind);
- if (!elem)
- goto error;
- stat = NULL;
-
- PyList_Append(list, elem);
- Py_DECREF(elem);
-
- entry = (attrbuf_entry *)((char *)entry + entry->length);
- }
- } while (err == 0);
-
- ret = list;
- Py_INCREF(ret);
-
-error:
- Py_DECREF(list);
- Py_XDECREF(stat);
-error_dir:
- close(dfd);
-error_value:
- return ret;
-}
-
-#endif /* __APPLE__ */
-
-static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
-{
-#ifdef __APPLE__
- PyObject *ret;
- bool fallback = false;
-
- ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback);
- if (ret != NULL || !fallback)
- return ret;
-#endif
- return _listdir_stat(path, pathlen, keepstat, skip);
-}
-
-static PyObject *statfiles(PyObject *self, PyObject *args)
-{
- PyObject *names, *stats;
- Py_ssize_t i, count;
-
- if (!PyArg_ParseTuple(args, "O:statfiles", &names))
- return NULL;
-
- count = PySequence_Length(names);
- if (count == -1) {
- PyErr_SetString(PyExc_TypeError, "not a sequence");
- return NULL;
- }
-
- stats = PyList_New(count);
- if (stats == NULL)
- return NULL;
-
- for (i = 0; i < count; i++) {
- PyObject *stat, *pypath;
- struct stat st;
- int ret, kind;
- char *path;
-
- /* With a large file count or on a slow filesystem,
- don't block signals for long (issue4878). */
- if ((i % 1000) == 999 && PyErr_CheckSignals() == -1)
- goto bail;
-
- pypath = PySequence_GetItem(names, i);
- if (!pypath)
- goto bail;
- path = PyBytes_AsString(pypath);
- if (path == NULL) {
- Py_DECREF(pypath);
- PyErr_SetString(PyExc_TypeError, "not a string");
- goto bail;
- }
- ret = lstat(path, &st);
- Py_DECREF(pypath);
- kind = st.st_mode & S_IFMT;
- if (ret != -1 && (kind == S_IFREG || kind == S_IFLNK)) {
- stat = makestat(&st);
- if (stat == NULL)
- goto bail;
- PyList_SET_ITEM(stats, i, stat);
- } else {
- Py_INCREF(Py_None);
- PyList_SET_ITEM(stats, i, Py_None);
- }
- }
-
- return stats;
-
-bail:
- Py_DECREF(stats);
- return NULL;
-}
-
-/*
- * recvfds() simply does not release GIL during blocking io operation because
- * command server is known to be single-threaded.
- *
- * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
- * Currently, recvfds() is not supported on these platforms.
- */
-#ifdef CMSG_LEN
-
-static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
-{
- char dummy[1];
- struct iovec iov = {dummy, sizeof(dummy)};
- struct msghdr msgh = {0};
- struct cmsghdr *cmsg;
-
- msgh.msg_iov = &iov;
- msgh.msg_iovlen = 1;
- msgh.msg_control = cbuf;
- msgh.msg_controllen = (socklen_t)cbufsize;
- if (recvmsg(sockfd, &msgh, 0) < 0)
- return -1;
-
- for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
- cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
- if (cmsg->cmsg_level != SOL_SOCKET ||
- cmsg->cmsg_type != SCM_RIGHTS)
- continue;
- *rfds = (int *)CMSG_DATA(cmsg);
- return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
- }
-
- *rfds = cbuf;
- return 0;
-}
-
-static PyObject *recvfds(PyObject *self, PyObject *args)
-{
- int sockfd;
- int *rfds = NULL;
- ssize_t rfdscount, i;
- char cbuf[256];
- PyObject *rfdslist = NULL;
-
- if (!PyArg_ParseTuple(args, "i", &sockfd))
- return NULL;
-
- rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
- if (rfdscount < 0)
- return PyErr_SetFromErrno(PyExc_OSError);
-
- rfdslist = PyList_New(rfdscount);
- if (!rfdslist)
- goto bail;
- for (i = 0; i < rfdscount; i++) {
- PyObject *obj = PyLong_FromLong(rfds[i]);
- if (!obj)
- goto bail;
- PyList_SET_ITEM(rfdslist, i, obj);
- }
- return rfdslist;
-
-bail:
- Py_XDECREF(rfdslist);
- return NULL;
-}
-
-#endif /* CMSG_LEN */
-
-#if defined(HAVE_SETPROCTITLE)
-/* setproctitle is the first choice - available in FreeBSD */
-#define SETPROCNAME_USE_SETPROCTITLE
-#elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
-/* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
- * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
-#define SETPROCNAME_USE_ARGVREWRITE
-#else
-#define SETPROCNAME_USE_NONE
-#endif
-
-#ifndef SETPROCNAME_USE_NONE
-static PyObject *setprocname(PyObject *self, PyObject *args)
-{
- const char *name = NULL;
- if (!PyArg_ParseTuple(args, "s", &name))
- return NULL;
-
-#if defined(SETPROCNAME_USE_SETPROCTITLE)
- setproctitle("%s", name);
-#elif defined(SETPROCNAME_USE_ARGVREWRITE)
- {
- static char *argvstart = NULL;
- static size_t argvsize = 0;
- if (argvstart == NULL) {
- int argc = 0, i;
- char **argv = NULL;
- char *argvend;
- extern void Py_GetArgcArgv(int *argc, char ***argv);
- Py_GetArgcArgv(&argc, &argv);
-
- /* Check the memory we can use. Typically, argv[i] and
- * argv[i + 1] are continuous. */
- argvend = argvstart = argv[0];
- for (i = 0; i < argc; ++i) {
- if (argv[i] > argvend || argv[i] < argvstart)
- break; /* not continuous */
- size_t len = strlen(argv[i]);
- argvend = argv[i] + len + 1 /* '\0' */;
- }
- if (argvend > argvstart) /* sanity check */
- argvsize = argvend - argvstart;
- }
-
- if (argvstart && argvsize > 1) {
- int n = snprintf(argvstart, argvsize, "%s", name);
- if (n >= 0 && (size_t)n < argvsize)
- memset(argvstart + n, 0, argvsize - n);
- }
- }
-#endif
-
- Py_RETURN_NONE;
-}
-#endif /* ndef SETPROCNAME_USE_NONE */
-
-#if defined(HAVE_BSD_STATFS)
-static const char *describefstype(const struct statfs *pbuf)
-{
- /* BSD or OSX provides a f_fstypename field */
- return pbuf->f_fstypename;
-}
-#elif defined(HAVE_LINUX_STATFS)
-static const char *describefstype(const struct statfs *pbuf)
-{
- /* Begin of Linux filesystems */
-#ifdef ADFS_SUPER_MAGIC
- if (pbuf->f_type == ADFS_SUPER_MAGIC)
- return "adfs";
-#endif
-#ifdef AFFS_SUPER_MAGIC
- if (pbuf->f_type == AFFS_SUPER_MAGIC)
- return "affs";
-#endif
-#ifdef AUTOFS_SUPER_MAGIC
- if (pbuf->f_type == AUTOFS_SUPER_MAGIC)
- return "autofs";
-#endif
-#ifdef BDEVFS_MAGIC
- if (pbuf->f_type == BDEVFS_MAGIC)
- return "bdevfs";
-#endif
-#ifdef BEFS_SUPER_MAGIC
- if (pbuf->f_type == BEFS_SUPER_MAGIC)
- return "befs";
-#endif
-#ifdef BFS_MAGIC
- if (pbuf->f_type == BFS_MAGIC)
- return "bfs";
-#endif
-#ifdef BINFMTFS_MAGIC
- if (pbuf->f_type == BINFMTFS_MAGIC)
- return "binfmtfs";
-#endif
-#ifdef BTRFS_SUPER_MAGIC
- if (pbuf->f_type == BTRFS_SUPER_MAGIC)
- return "btrfs";
-#endif
-#ifdef CGROUP_SUPER_MAGIC
- if (pbuf->f_type == CGROUP_SUPER_MAGIC)
- return "cgroup";
-#endif
-#ifdef CIFS_MAGIC_NUMBER
- if (pbuf->f_type == CIFS_MAGIC_NUMBER)
- return "cifs";
-#endif
-#ifdef CODA_SUPER_MAGIC
- if (pbuf->f_type == CODA_SUPER_MAGIC)
- return "coda";
-#endif
-#ifdef COH_SUPER_MAGIC
- if (pbuf->f_type == COH_SUPER_MAGIC)
- return "coh";
-#endif
-#ifdef CRAMFS_MAGIC
- if (pbuf->f_type == CRAMFS_MAGIC)
- return "cramfs";
-#endif
-#ifdef DEBUGFS_MAGIC
- if (pbuf->f_type == DEBUGFS_MAGIC)
- return "debugfs";
-#endif
-#ifdef DEVFS_SUPER_MAGIC
- if (pbuf->f_type == DEVFS_SUPER_MAGIC)
- return "devfs";
-#endif
-#ifdef DEVPTS_SUPER_MAGIC
- if (pbuf->f_type == DEVPTS_SUPER_MAGIC)
- return "devpts";
-#endif
-#ifdef EFIVARFS_MAGIC
- if (pbuf->f_type == EFIVARFS_MAGIC)
- return "efivarfs";
-#endif
-#ifdef EFS_SUPER_MAGIC
- if (pbuf->f_type == EFS_SUPER_MAGIC)
- return "efs";
-#endif
-#ifdef EXT_SUPER_MAGIC
- if (pbuf->f_type == EXT_SUPER_MAGIC)
- return "ext";
-#endif
-#ifdef EXT2_OLD_SUPER_MAGIC
- if (pbuf->f_type == EXT2_OLD_SUPER_MAGIC)
- return "ext2";
-#endif
-#ifdef EXT2_SUPER_MAGIC
- if (pbuf->f_type == EXT2_SUPER_MAGIC)
- return "ext2";
-#endif
-#ifdef EXT3_SUPER_MAGIC
- if (pbuf->f_type == EXT3_SUPER_MAGIC)
- return "ext3";
-#endif
-#ifdef EXT4_SUPER_MAGIC
- if (pbuf->f_type == EXT4_SUPER_MAGIC)
- return "ext4";
-#endif
-#ifdef F2FS_SUPER_MAGIC
- if (pbuf->f_type == F2FS_SUPER_MAGIC)
- return "f2fs";
-#endif
-#ifdef FUSE_SUPER_MAGIC
- if (pbuf->f_type == FUSE_SUPER_MAGIC)
- return "fuse";
-#endif
-#ifdef FUTEXFS_SUPER_MAGIC
- if (pbuf->f_type == FUTEXFS_SUPER_MAGIC)
- return "futexfs";
-#endif
-#ifdef HFS_SUPER_MAGIC
- if (pbuf->f_type == HFS_SUPER_MAGIC)
- return "hfs";
-#endif
-#ifdef HOSTFS_SUPER_MAGIC
- if (pbuf->f_type == HOSTFS_SUPER_MAGIC)
- return "hostfs";
-#endif
-#ifdef HPFS_SUPER_MAGIC
- if (pbuf->f_type == HPFS_SUPER_MAGIC)
- return "hpfs";
-#endif
-#ifdef HUGETLBFS_MAGIC
- if (pbuf->f_type == HUGETLBFS_MAGIC)
- return "hugetlbfs";
-#endif
-#ifdef ISOFS_SUPER_MAGIC
- if (pbuf->f_type == ISOFS_SUPER_MAGIC)
- return "isofs";
-#endif
-#ifdef JFFS2_SUPER_MAGIC
- if (pbuf->f_type == JFFS2_SUPER_MAGIC)
- return "jffs2";
-#endif
-#ifdef JFS_SUPER_MAGIC
- if (pbuf->f_type == JFS_SUPER_MAGIC)
- return "jfs";
-#endif
-#ifdef MINIX_SUPER_MAGIC
- if (pbuf->f_type == MINIX_SUPER_MAGIC)
- return "minix";
-#endif
-#ifdef MINIX2_SUPER_MAGIC
- if (pbuf->f_type == MINIX2_SUPER_MAGIC)
- return "minix2";
-#endif
-#ifdef MINIX3_SUPER_MAGIC
- if (pbuf->f_type == MINIX3_SUPER_MAGIC)
- return "minix3";
-#endif
-#ifdef MQUEUE_MAGIC
- if (pbuf->f_type == MQUEUE_MAGIC)
- return "mqueue";
-#endif
-#ifdef MSDOS_SUPER_MAGIC
- if (pbuf->f_type == MSDOS_SUPER_MAGIC)
- return "msdos";
-#endif
-#ifdef NCP_SUPER_MAGIC
- if (pbuf->f_type == NCP_SUPER_MAGIC)
- return "ncp";
-#endif
-#ifdef NFS_SUPER_MAGIC
- if (pbuf->f_type == NFS_SUPER_MAGIC)
- return "nfs";
-#endif
-#ifdef NILFS_SUPER_MAGIC
- if (pbuf->f_type == NILFS_SUPER_MAGIC)
- return "nilfs";
-#endif
-#ifdef NTFS_SB_MAGIC
- if (pbuf->f_type == NTFS_SB_MAGIC)
- return "ntfs-sb";
-#endif
-#ifdef OCFS2_SUPER_MAGIC
- if (pbuf->f_type == OCFS2_SUPER_MAGIC)
- return "ocfs2";
-#endif
-#ifdef OPENPROM_SUPER_MAGIC
- if (pbuf->f_type == OPENPROM_SUPER_MAGIC)
- return "openprom";
-#endif
-#ifdef OVERLAYFS_SUPER_MAGIC
- if (pbuf->f_type == OVERLAYFS_SUPER_MAGIC)
- return "overlay";
-#endif
-#ifdef PIPEFS_MAGIC
- if (pbuf->f_type == PIPEFS_MAGIC)
- return "pipefs";
-#endif
-#ifdef PROC_SUPER_MAGIC
- if (pbuf->f_type == PROC_SUPER_MAGIC)
- return "proc";
-#endif
-#ifdef PSTOREFS_MAGIC
- if (pbuf->f_type == PSTOREFS_MAGIC)
- return "pstorefs";
-#endif
-#ifdef QNX4_SUPER_MAGIC
- if (pbuf->f_type == QNX4_SUPER_MAGIC)
- return "qnx4";
-#endif
-#ifdef QNX6_SUPER_MAGIC
- if (pbuf->f_type == QNX6_SUPER_MAGIC)
- return "qnx6";
-#endif
-#ifdef RAMFS_MAGIC
- if (pbuf->f_type == RAMFS_MAGIC)
- return "ramfs";
-#endif
-#ifdef REISERFS_SUPER_MAGIC
- if (pbuf->f_type == REISERFS_SUPER_MAGIC)
- return "reiserfs";
-#endif
-#ifdef ROMFS_MAGIC
- if (pbuf->f_type == ROMFS_MAGIC)
- return "romfs";
-#endif
-#ifdef SECURITYFS_MAGIC
- if (pbuf->f_type == SECURITYFS_MAGIC)
- return "securityfs";
-#endif
-#ifdef SELINUX_MAGIC
- if (pbuf->f_type == SELINUX_MAGIC)
- return "selinux";
-#endif
-#ifdef SMACK_MAGIC
- if (pbuf->f_type == SMACK_MAGIC)
- return "smack";
-#endif
-#ifdef SMB_SUPER_MAGIC
- if (pbuf->f_type == SMB_SUPER_MAGIC)
- return "smb";
-#endif
-#ifdef SOCKFS_MAGIC
- if (pbuf->f_type == SOCKFS_MAGIC)
- return "sockfs";
-#endif
-#ifdef SQUASHFS_MAGIC
- if (pbuf->f_type == SQUASHFS_MAGIC)
- return "squashfs";
-#endif
-#ifdef SYSFS_MAGIC
- if (pbuf->f_type == SYSFS_MAGIC)
- return "sysfs";
-#endif
-#ifdef SYSV2_SUPER_MAGIC
- if (pbuf->f_type == SYSV2_SUPER_MAGIC)
- return "sysv2";
-#endif
-#ifdef SYSV4_SUPER_MAGIC
- if (pbuf->f_type == SYSV4_SUPER_MAGIC)
- return "sysv4";
-#endif
-#ifdef TMPFS_MAGIC
- if (pbuf->f_type == TMPFS_MAGIC)
- return "tmpfs";
-#endif
-#ifdef UDF_SUPER_MAGIC
- if (pbuf->f_type == UDF_SUPER_MAGIC)
- return "udf";
-#endif
-#ifdef UFS_MAGIC
- if (pbuf->f_type == UFS_MAGIC)
- return "ufs";
-#endif
-#ifdef USBDEVICE_SUPER_MAGIC
- if (pbuf->f_type == USBDEVICE_SUPER_MAGIC)
- return "usbdevice";
-#endif
-#ifdef V9FS_MAGIC
- if (pbuf->f_type == V9FS_MAGIC)
- return "v9fs";
-#endif
-#ifdef VXFS_SUPER_MAGIC
- if (pbuf->f_type == VXFS_SUPER_MAGIC)
- return "vxfs";
-#endif
-#ifdef XENFS_SUPER_MAGIC
- if (pbuf->f_type == XENFS_SUPER_MAGIC)
- return "xenfs";
-#endif
-#ifdef XENIX_SUPER_MAGIC
- if (pbuf->f_type == XENIX_SUPER_MAGIC)
- return "xenix";
-#endif
-#ifdef XFS_SUPER_MAGIC
- if (pbuf->f_type == XFS_SUPER_MAGIC)
- return "xfs";
-#endif
- /* End of Linux filesystems */
- return NULL;
-}
-#endif /* def HAVE_LINUX_STATFS */
-
-#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
-/* given a directory path, return filesystem type name (best-effort) */
-static PyObject *getfstype(PyObject *self, PyObject *args)
-{
- const char *path = NULL;
- struct statfs buf;
- int r;
- if (!PyArg_ParseTuple(args, "s", &path))
- return NULL;
-
- memset(&buf, 0, sizeof(buf));
- r = statfs(path, &buf);
- if (r != 0)
- return PyErr_SetFromErrno(PyExc_OSError);
- return Py_BuildValue("s", describefstype(&buf));
-}
-#endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
-
-#endif /* ndef _WIN32 */
-
-static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs)
-{
- PyObject *statobj = NULL; /* initialize - optional arg */
- PyObject *skipobj = NULL; /* initialize - optional arg */
- char *path, *skip = NULL;
- int wantstat, plen;
-
- static char *kwlist[] = {"path", "stat", "skip", NULL};
-
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|OO:listdir",
- kwlist, &path, &plen, &statobj, &skipobj))
- return NULL;
-
- wantstat = statobj && PyObject_IsTrue(statobj);
-
- if (skipobj && skipobj != Py_None) {
- skip = PyBytes_AsString(skipobj);
- if (!skip)
- return NULL;
- }
-
- return _listdir(path, plen, wantstat, skip);
-}
-
-#ifdef _WIN32
-static PyObject *posixfile(PyObject *self, PyObject *args, PyObject *kwds)
-{
- static char *kwlist[] = {"name", "mode", "buffering", NULL};
- PyObject *file_obj = NULL;
- char *name = NULL;
- char *mode = "rb";
- DWORD access = 0;
- DWORD creation;
- HANDLE handle;
- int fd, flags = 0;
- int bufsize = -1;
- char m0, m1, m2;
- char fpmode[4];
- int fppos = 0;
- int plus;
- FILE *fp;
-
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:posixfile", kwlist,
- Py_FileSystemDefaultEncoding,
- &name, &mode, &bufsize))
- return NULL;
-
- m0 = mode[0];
- m1 = m0 ? mode[1] : '\0';
- m2 = m1 ? mode[2] : '\0';
- plus = m1 == '+' || m2 == '+';
-
- fpmode[fppos++] = m0;
- if (m1 == 'b' || m2 == 'b') {
- flags = _O_BINARY;
- fpmode[fppos++] = 'b';
- }
- else
- flags = _O_TEXT;
- if (m0 == 'r' && !plus) {
- flags |= _O_RDONLY;
- access = GENERIC_READ;
- } else {
- /*
- work around http://support.microsoft.com/kb/899149 and
- set _O_RDWR for 'w' and 'a', even if mode has no '+'
- */
- flags |= _O_RDWR;
- access = GENERIC_READ | GENERIC_WRITE;
- fpmode[fppos++] = '+';
- }
- fpmode[fppos++] = '\0';
-
- switch (m0) {
- case 'r':
- creation = OPEN_EXISTING;
- break;
- case 'w':
- creation = CREATE_ALWAYS;
- break;
- case 'a':
- creation = OPEN_ALWAYS;
- flags |= _O_APPEND;
- break;
- default:
- PyErr_Format(PyExc_ValueError,
- "mode string must begin with one of 'r', 'w', "
- "or 'a', not '%c'", m0);
- goto bail;
- }
-
- handle = CreateFile(name, access,
- FILE_SHARE_READ | FILE_SHARE_WRITE |
- FILE_SHARE_DELETE,
- NULL,
- creation,
- FILE_ATTRIBUTE_NORMAL,
- 0);
-
- if (handle == INVALID_HANDLE_VALUE) {
- PyErr_SetFromWindowsErrWithFilename(GetLastError(), name);
- goto bail;
- }
-
- fd = _open_osfhandle((intptr_t)handle, flags);
-
- if (fd == -1) {
- CloseHandle(handle);
- PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
- goto bail;
- }
-#ifndef IS_PY3K
- fp = _fdopen(fd, fpmode);
- if (fp == NULL) {
- _close(fd);
- PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
- goto bail;
- }
-
- file_obj = PyFile_FromFile(fp, name, mode, fclose);
- if (file_obj == NULL) {
- fclose(fp);
- goto bail;
- }
-
- PyFile_SetBufSize(file_obj, bufsize);
-#else
- file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
- if (file_obj == NULL)
- goto bail;
-#endif
-bail:
- PyMem_Free(name);
- return file_obj;
-}
-#endif
-
-#ifdef __APPLE__
-#include <ApplicationServices/ApplicationServices.h>
-
-static PyObject *isgui(PyObject *self)
-{
- CFDictionaryRef dict = CGSessionCopyCurrentDictionary();
-
- if (dict != NULL) {
- CFRelease(dict);
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-#endif
-
-static char osutil_doc[] = "Native operating system services.";
-
-static PyMethodDef methods[] = {
- {"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS,
- "list a directory\n"},
-#ifdef _WIN32
- {"posixfile", (PyCFunction)posixfile, METH_VARARGS | METH_KEYWORDS,
- "Open a file with POSIX-like semantics.\n"
-"On error, this function may raise either a WindowsError or an IOError."},
-#else
- {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
- "stat a series of files or symlinks\n"
-"Returns None for non-existent entries and entries of other types.\n"},
-#ifdef CMSG_LEN
- {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
- "receive list of file descriptors via socket\n"},
-#endif
-#ifndef SETPROCNAME_USE_NONE
- {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
- "set process title (best-effort)\n"},
-#endif
-#if defined(HAVE_BSD_STATFS) || defined(HAVE_LINUX_STATFS)
- {"getfstype", (PyCFunction)getfstype, METH_VARARGS,
- "get filesystem type (best-effort)\n"},
-#endif
-#endif /* ndef _WIN32 */
-#ifdef __APPLE__
- {
- "isgui", (PyCFunction)isgui, METH_NOARGS,
- "Is a CoreGraphics session available?"
- },
-#endif
- {NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef osutil_module = {
- PyModuleDef_HEAD_INIT,
- "osutil",
- osutil_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_osutil(void)
-{
- if (PyType_Ready(&listdir_stat_type) < 0)
- return NULL;
-
- return PyModule_Create(&osutil_module);
-}
-#else
-PyMODINIT_FUNC initosutil(void)
-{
- if (PyType_Ready(&listdir_stat_type) == -1)
- return;
-
- Py_InitModule3("osutil", methods, osutil_doc);
-}
-#endif
--- a/mercurial/parsers.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2943 +0,0 @@
-/*
- parsers.c - efficient content parsing
-
- Copyright 2008 Matt Mackall <mpm@selenic.com> and others
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#include <Python.h>
-#include <ctype.h>
-#include <stddef.h>
-#include <string.h>
-
-#include "util.h"
-#include "bitmanipulation.h"
-
-#ifdef IS_PY3K
-/* The mapping of Python types is meant to be temporary to get Python
- * 3 to compile. We should remove this once Python 3 support is fully
- * supported and proper types are used in the extensions themselves. */
-#define PyInt_Type PyLong_Type
-#define PyInt_Check PyLong_Check
-#define PyInt_FromLong PyLong_FromLong
-#define PyInt_FromSsize_t PyLong_FromSsize_t
-#define PyInt_AS_LONG PyLong_AS_LONG
-#define PyInt_AsLong PyLong_AsLong
-#endif
-
-static char *versionerrortext = "Python minor version mismatch";
-
-static int8_t hextable[256] = {
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
-};
-
-static char lowertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40',
- '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */
- '\x78', '\x79', '\x7a', /* X-Z */
- '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67',
- '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f',
- '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77',
- '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-static char uppertable[128] = {
- '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
- '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
- '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
- '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
- '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
- '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
- '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
- '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
- '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
- '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
- '\x60',
- '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
- '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
- '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
- '\x58', '\x59', '\x5a', /* x-z */
- '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
-};
-
-static inline int hexdigit(const char *p, Py_ssize_t off)
-{
- int8_t val = hextable[(unsigned char)p[off]];
-
- if (val >= 0) {
- return val;
- }
-
- PyErr_SetString(PyExc_ValueError, "input contains non-hex character");
- return 0;
-}
-
-/*
- * Turn a hex-encoded string into binary.
- */
-PyObject *unhexlify(const char *str, int len)
-{
- PyObject *ret;
- char *d;
- int i;
-
- ret = PyBytes_FromStringAndSize(NULL, len / 2);
-
- if (!ret)
- return NULL;
-
- d = PyBytes_AsString(ret);
-
- for (i = 0; i < len;) {
- int hi = hexdigit(str, i++);
- int lo = hexdigit(str, i++);
- *d++ = (hi << 4) | lo;
- }
-
- return ret;
-}
-
-static inline PyObject *_asciitransform(PyObject *str_obj,
- const char table[128],
- PyObject *fallback_fn)
-{
- char *str, *newstr;
- Py_ssize_t i, len;
- PyObject *newobj = NULL;
- PyObject *ret = NULL;
-
- str = PyBytes_AS_STRING(str_obj);
- len = PyBytes_GET_SIZE(str_obj);
-
- newobj = PyBytes_FromStringAndSize(NULL, len);
- if (!newobj)
- goto quit;
-
- newstr = PyBytes_AS_STRING(newobj);
-
- for (i = 0; i < len; i++) {
- char c = str[i];
- if (c & 0x80) {
- if (fallback_fn != NULL) {
- ret = PyObject_CallFunctionObjArgs(fallback_fn,
- str_obj, NULL);
- } else {
- PyObject *err = PyUnicodeDecodeError_Create(
- "ascii", str, len, i, (i + 1),
- "unexpected code byte");
- PyErr_SetObject(PyExc_UnicodeDecodeError, err);
- Py_XDECREF(err);
- }
- goto quit;
- }
- newstr[i] = table[(unsigned char)c];
- }
-
- ret = newobj;
- Py_INCREF(ret);
-quit:
- Py_XDECREF(newobj);
- return ret;
-}
-
-static PyObject *asciilower(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, lowertable, NULL);
-}
-
-static PyObject *asciiupper(PyObject *self, PyObject *args)
-{
- PyObject *str_obj;
- if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
- return NULL;
- return _asciitransform(str_obj, uppertable, NULL);
-}
-
-static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
-{
- /* _PyDict_NewPresized expects a minused parameter, but it actually
- creates a dictionary that's the nearest power of two bigger than the
- parameter. For example, with the initial minused = 1000, the
- dictionary created has size 1024. Of course in a lot of cases that
- can be greater than the maximum load factor Python's dict object
- expects (= 2/3), so as soon as we cross the threshold we'll resize
- anyway. So create a dictionary that's at least 3/2 the size. */
- return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
-}
-
-static PyObject *dict_new_presized(PyObject *self, PyObject *args)
-{
- Py_ssize_t expected_size;
-
- if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size))
- return NULL;
-
- return _dict_new_presized(expected_size);
-}
-
-static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
-{
- PyObject *dmap, *spec_obj, *normcase_fallback;
- PyObject *file_foldmap = NULL;
- enum normcase_spec spec;
- PyObject *k, *v;
- dirstateTupleObject *tuple;
- Py_ssize_t pos = 0;
- const char *table;
-
- if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
- &PyDict_Type, &dmap,
- &PyInt_Type, &spec_obj,
- &PyFunction_Type, &normcase_fallback))
- goto quit;
-
- spec = (int)PyInt_AS_LONG(spec_obj);
- switch (spec) {
- case NORMCASE_LOWER:
- table = lowertable;
- break;
- case NORMCASE_UPPER:
- table = uppertable;
- break;
- case NORMCASE_OTHER:
- table = NULL;
- break;
- default:
- PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
- goto quit;
- }
-
- /* Add some more entries to deal with additions outside this
- function. */
- file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11);
- if (file_foldmap == NULL)
- goto quit;
-
- while (PyDict_Next(dmap, &pos, &k, &v)) {
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto quit;
- }
-
- tuple = (dirstateTupleObject *)v;
- if (tuple->state != 'r') {
- PyObject *normed;
- if (table != NULL) {
- normed = _asciitransform(k, table,
- normcase_fallback);
- } else {
- normed = PyObject_CallFunctionObjArgs(
- normcase_fallback, k, NULL);
- }
-
- if (normed == NULL)
- goto quit;
- if (PyDict_SetItem(file_foldmap, normed, k) == -1) {
- Py_DECREF(normed);
- goto quit;
- }
- Py_DECREF(normed);
- }
- }
- return file_foldmap;
-quit:
- Py_XDECREF(file_foldmap);
- return NULL;
-}
-
-/*
- * This code assumes that a manifest is stitched together with newline
- * ('\n') characters.
- */
-static PyObject *parse_manifest(PyObject *self, PyObject *args)
-{
- PyObject *mfdict, *fdict;
- char *str, *start, *end;
- int len;
-
- if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest",
- &PyDict_Type, &mfdict,
- &PyDict_Type, &fdict,
- &str, &len))
- goto quit;
-
- start = str;
- end = str + len;
- while (start < end) {
- PyObject *file = NULL, *node = NULL;
- PyObject *flags = NULL;
- char *zero = NULL, *newline = NULL;
- ptrdiff_t nlen;
-
- zero = memchr(start, '\0', end - start);
- if (!zero) {
- PyErr_SetString(PyExc_ValueError,
- "manifest entry has no separator");
- goto quit;
- }
-
- newline = memchr(zero + 1, '\n', end - (zero + 1));
- if (!newline) {
- PyErr_SetString(PyExc_ValueError,
- "manifest contains trailing garbage");
- goto quit;
- }
-
- file = PyBytes_FromStringAndSize(start, zero - start);
-
- if (!file)
- goto bail;
-
- nlen = newline - zero - 1;
-
- node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
- if (!node)
- goto bail;
-
- if (nlen > 40) {
- flags = PyBytes_FromStringAndSize(zero + 41,
- nlen - 40);
- if (!flags)
- goto bail;
-
- if (PyDict_SetItem(fdict, file, flags) == -1)
- goto bail;
- }
-
- if (PyDict_SetItem(mfdict, file, node) == -1)
- goto bail;
-
- start = newline + 1;
-
- Py_XDECREF(flags);
- Py_XDECREF(node);
- Py_XDECREF(file);
- continue;
- bail:
- Py_XDECREF(flags);
- Py_XDECREF(node);
- Py_XDECREF(file);
- goto quit;
- }
-
- Py_INCREF(Py_None);
- return Py_None;
-quit:
- return NULL;
-}
-
-static inline dirstateTupleObject *make_dirstate_tuple(char state, int mode,
- int size, int mtime)
-{
- dirstateTupleObject *t = PyObject_New(dirstateTupleObject,
- &dirstateTupleType);
- if (!t)
- return NULL;
- t->state = state;
- t->mode = mode;
- t->size = size;
- t->mtime = mtime;
- return t;
-}
-
-static PyObject *dirstate_tuple_new(PyTypeObject *subtype, PyObject *args,
- PyObject *kwds)
-{
- /* We do all the initialization here and not a tp_init function because
- * dirstate_tuple is immutable. */
- dirstateTupleObject *t;
- char state;
- int size, mode, mtime;
- if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime))
- return NULL;
-
- t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1);
- if (!t)
- return NULL;
- t->state = state;
- t->mode = mode;
- t->size = size;
- t->mtime = mtime;
-
- return (PyObject *)t;
-}
-
-static void dirstate_tuple_dealloc(PyObject *o)
-{
- PyObject_Del(o);
-}
-
-static Py_ssize_t dirstate_tuple_length(PyObject *o)
-{
- return 4;
-}
-
-static PyObject *dirstate_tuple_item(PyObject *o, Py_ssize_t i)
-{
- dirstateTupleObject *t = (dirstateTupleObject *)o;
- switch (i) {
- case 0:
- return PyBytes_FromStringAndSize(&t->state, 1);
- case 1:
- return PyInt_FromLong(t->mode);
- case 2:
- return PyInt_FromLong(t->size);
- case 3:
- return PyInt_FromLong(t->mtime);
- default:
- PyErr_SetString(PyExc_IndexError, "index out of range");
- return NULL;
- }
-}
-
-static PySequenceMethods dirstate_tuple_sq = {
- dirstate_tuple_length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- dirstate_tuple_item, /* sq_item */
- 0, /* sq_ass_item */
- 0, /* sq_contains */
- 0, /* sq_inplace_concat */
- 0 /* sq_inplace_repeat */
-};
-
-PyTypeObject dirstateTupleType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "dirstate_tuple", /* tp_name */
- sizeof(dirstateTupleObject),/* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)dirstate_tuple_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &dirstate_tuple_sq, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "dirstate tuple", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- dirstate_tuple_new, /* tp_new */
-};
-
-static PyObject *parse_dirstate(PyObject *self, PyObject *args)
-{
- PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
- PyObject *fname = NULL, *cname = NULL, *entry = NULL;
- char state, *cur, *str, *cpos;
- int mode, size, mtime;
- unsigned int flen, len, pos = 40;
- int readlen;
-
- if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
- &PyDict_Type, &dmap,
- &PyDict_Type, &cmap,
- &str, &readlen))
- goto quit;
-
- len = readlen;
-
- /* read parents */
- if (len < 40) {
- PyErr_SetString(
- PyExc_ValueError, "too little data for parents");
- goto quit;
- }
-
- parents = Py_BuildValue("s#s#", str, 20, str + 20, 20);
- if (!parents)
- goto quit;
-
- /* read filenames */
- while (pos >= 40 && pos < len) {
- if (pos + 17 > len) {
- PyErr_SetString(PyExc_ValueError,
- "overflow in dirstate");
- goto quit;
- }
- cur = str + pos;
- /* unpack header */
- state = *cur;
- mode = getbe32(cur + 1);
- size = getbe32(cur + 5);
- mtime = getbe32(cur + 9);
- flen = getbe32(cur + 13);
- pos += 17;
- cur += 17;
- if (flen > len - pos) {
- PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
- goto quit;
- }
-
- entry = (PyObject *)make_dirstate_tuple(state, mode, size,
- mtime);
- cpos = memchr(cur, 0, flen);
- if (cpos) {
- fname = PyBytes_FromStringAndSize(cur, cpos - cur);
- cname = PyBytes_FromStringAndSize(cpos + 1,
- flen - (cpos - cur) - 1);
- if (!fname || !cname ||
- PyDict_SetItem(cmap, fname, cname) == -1 ||
- PyDict_SetItem(dmap, fname, entry) == -1)
- goto quit;
- Py_DECREF(cname);
- } else {
- fname = PyBytes_FromStringAndSize(cur, flen);
- if (!fname ||
- PyDict_SetItem(dmap, fname, entry) == -1)
- goto quit;
- }
- Py_DECREF(fname);
- Py_DECREF(entry);
- fname = cname = entry = NULL;
- pos += flen;
- }
-
- ret = parents;
- Py_INCREF(ret);
-quit:
- Py_XDECREF(fname);
- Py_XDECREF(cname);
- Py_XDECREF(entry);
- Py_XDECREF(parents);
- return ret;
-}
-
-/*
- * Build a set of non-normal and other parent entries from the dirstate dmap
-*/
-static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args) {
- PyObject *dmap, *fname, *v;
- PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
- Py_ssize_t pos;
-
- if (!PyArg_ParseTuple(args, "O!:nonnormalentries",
- &PyDict_Type, &dmap))
- goto bail;
-
- nonnset = PySet_New(NULL);
- if (nonnset == NULL)
- goto bail;
-
- otherpset = PySet_New(NULL);
- if (otherpset == NULL)
- goto bail;
-
- pos = 0;
- while (PyDict_Next(dmap, &pos, &fname, &v)) {
- dirstateTupleObject *t;
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto bail;
- }
- t = (dirstateTupleObject *)v;
-
- if (t->state == 'n' && t->size == -2) {
- if (PySet_Add(otherpset, fname) == -1) {
- goto bail;
- }
- }
-
- if (t->state == 'n' && t->mtime != -1)
- continue;
- if (PySet_Add(nonnset, fname) == -1)
- goto bail;
- }
-
- result = Py_BuildValue("(OO)", nonnset, otherpset);
- if (result == NULL)
- goto bail;
- Py_DECREF(nonnset);
- Py_DECREF(otherpset);
- return result;
-bail:
- Py_XDECREF(nonnset);
- Py_XDECREF(otherpset);
- Py_XDECREF(result);
- return NULL;
-}
-
-/*
- * Efficiently pack a dirstate object into its on-disk format.
- */
-static PyObject *pack_dirstate(PyObject *self, PyObject *args)
-{
- PyObject *packobj = NULL;
- PyObject *map, *copymap, *pl, *mtime_unset = NULL;
- Py_ssize_t nbytes, pos, l;
- PyObject *k, *v = NULL, *pn;
- char *p, *s;
- int now;
-
- if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate",
- &PyDict_Type, &map, &PyDict_Type, ©map,
- &pl, &now))
- return NULL;
-
- if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) {
- PyErr_SetString(PyExc_TypeError, "expected 2-element sequence");
- return NULL;
- }
-
- /* Figure out how much we need to allocate. */
- for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
- PyObject *c;
- if (!PyBytes_Check(k)) {
- PyErr_SetString(PyExc_TypeError, "expected string key");
- goto bail;
- }
- nbytes += PyBytes_GET_SIZE(k) + 17;
- c = PyDict_GetItem(copymap, k);
- if (c) {
- if (!PyBytes_Check(c)) {
- PyErr_SetString(PyExc_TypeError,
- "expected string key");
- goto bail;
- }
- nbytes += PyBytes_GET_SIZE(c) + 1;
- }
- }
-
- packobj = PyBytes_FromStringAndSize(NULL, nbytes);
- if (packobj == NULL)
- goto bail;
-
- p = PyBytes_AS_STRING(packobj);
-
- pn = PySequence_ITEM(pl, 0);
- if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
- pn = PySequence_ITEM(pl, 1);
- if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
-
- for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
- dirstateTupleObject *tuple;
- char state;
- int mode, size, mtime;
- Py_ssize_t len, l;
- PyObject *o;
- char *t;
-
- if (!dirstate_tuple_check(v)) {
- PyErr_SetString(PyExc_TypeError,
- "expected a dirstate tuple");
- goto bail;
- }
- tuple = (dirstateTupleObject *)v;
-
- state = tuple->state;
- mode = tuple->mode;
- size = tuple->size;
- mtime = tuple->mtime;
- if (state == 'n' && mtime == now) {
- /* See pure/parsers.py:pack_dirstate for why we do
- * this. */
- mtime = -1;
- mtime_unset = (PyObject *)make_dirstate_tuple(
- state, mode, size, mtime);
- if (!mtime_unset)
- goto bail;
- if (PyDict_SetItem(map, k, mtime_unset) == -1)
- goto bail;
- Py_DECREF(mtime_unset);
- mtime_unset = NULL;
- }
- *p++ = state;
- putbe32((uint32_t)mode, p);
- putbe32((uint32_t)size, p + 4);
- putbe32((uint32_t)mtime, p + 8);
- t = p + 12;
- p += 16;
- len = PyBytes_GET_SIZE(k);
- memcpy(p, PyBytes_AS_STRING(k), len);
- p += len;
- o = PyDict_GetItem(copymap, k);
- if (o) {
- *p++ = '\0';
- l = PyBytes_GET_SIZE(o);
- memcpy(p, PyBytes_AS_STRING(o), l);
- p += l;
- len += l + 1;
- }
- putbe32((uint32_t)len, t);
- }
-
- pos = p - PyBytes_AS_STRING(packobj);
- if (pos != nbytes) {
- PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
- (long)pos, (long)nbytes);
- goto bail;
- }
-
- return packobj;
-bail:
- Py_XDECREF(mtime_unset);
- Py_XDECREF(packobj);
- Py_XDECREF(v);
- return NULL;
-}
-
-/*
- * A base-16 trie for fast node->rev mapping.
- *
- * Positive value is index of the next node in the trie
- * Negative value is a leaf: -(rev + 1)
- * Zero is empty
- */
-typedef struct {
- int children[16];
-} nodetree;
-
-/*
- * This class has two behaviors.
- *
- * When used in a list-like way (with integer keys), we decode an
- * entry in a RevlogNG index file on demand. Our last entry is a
- * sentinel, always a nullid. We have limited support for
- * integer-keyed insert and delete, only at elements right before the
- * sentinel.
- *
- * With string keys, we lazily perform a reverse mapping from node to
- * rev, using a base-16 trie.
- */
-typedef struct {
- PyObject_HEAD
- /* Type-specific fields go here. */
- PyObject *data; /* raw bytes of index */
- Py_buffer buf; /* buffer of data */
- PyObject **cache; /* cached tuples */
- const char **offsets; /* populated on demand */
- Py_ssize_t raw_length; /* original number of elements */
- Py_ssize_t length; /* current number of elements */
- PyObject *added; /* populated on demand */
- PyObject *headrevs; /* cache, invalidated on changes */
- PyObject *filteredrevs;/* filtered revs set */
- nodetree *nt; /* base-16 trie */
- unsigned ntlength; /* # nodes in use */
- unsigned ntcapacity; /* # nodes allocated */
- int ntdepth; /* maximum depth of tree */
- int ntsplits; /* # splits performed */
- int ntrev; /* last rev scanned */
- int ntlookups; /* # lookups */
- int ntmisses; /* # lookups that miss the cache */
- int inlined;
-} indexObject;
-
-static Py_ssize_t index_length(const indexObject *self)
-{
- if (self->added == NULL)
- return self->length;
- return self->length + PyList_GET_SIZE(self->added);
-}
-
-static PyObject *nullentry;
-static const char nullid[20];
-
-static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
-
-#if LONG_MAX == 0x7fffffffL
-static char *tuple_format = "Kiiiiiis#";
-#else
-static char *tuple_format = "kiiiiiis#";
-#endif
-
-/* A RevlogNG v1 index entry is 64 bytes long. */
-static const long v1_hdrsize = 64;
-
-/*
- * Return a pointer to the beginning of a RevlogNG record.
- */
-static const char *index_deref(indexObject *self, Py_ssize_t pos)
-{
- if (self->inlined && pos > 0) {
- if (self->offsets == NULL) {
- self->offsets = PyMem_Malloc(self->raw_length *
- sizeof(*self->offsets));
- if (self->offsets == NULL)
- return (const char *)PyErr_NoMemory();
- inline_scan(self, self->offsets);
- }
- return self->offsets[pos];
- }
-
- return (const char *)(self->buf.buf) + pos * v1_hdrsize;
-}
-
-static inline int index_get_parents(indexObject *self, Py_ssize_t rev,
- int *ps, int maxrev)
-{
- if (rev >= self->length - 1) {
- PyObject *tuple = PyList_GET_ITEM(self->added,
- rev - self->length + 1);
- ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
- ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
- } else {
- const char *data = index_deref(self, rev);
- ps[0] = getbe32(data + 24);
- ps[1] = getbe32(data + 28);
- }
- /* If index file is corrupted, ps[] may point to invalid revisions. So
- * there is a risk of buffer overflow to trust them unconditionally. */
- if (ps[0] > maxrev || ps[1] > maxrev) {
- PyErr_SetString(PyExc_ValueError, "parent out of range");
- return -1;
- }
- return 0;
-}
-
-
-/*
- * RevlogNG format (all in big endian, data may be inlined):
- * 6 bytes: offset
- * 2 bytes: flags
- * 4 bytes: compressed length
- * 4 bytes: uncompressed length
- * 4 bytes: base revision
- * 4 bytes: link revision
- * 4 bytes: parent 1 revision
- * 4 bytes: parent 2 revision
- * 32 bytes: nodeid (only 20 bytes used)
- */
-static PyObject *index_get(indexObject *self, Py_ssize_t pos)
-{
- uint64_t offset_flags;
- int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
- const char *c_node_id;
- const char *data;
- Py_ssize_t length = index_length(self);
- PyObject *entry;
-
- if (pos < 0)
- pos += length;
-
- if (pos < 0 || pos >= length) {
- PyErr_SetString(PyExc_IndexError, "revlog index out of range");
- return NULL;
- }
-
- if (pos == length - 1) {
- Py_INCREF(nullentry);
- return nullentry;
- }
-
- if (pos >= self->length - 1) {
- PyObject *obj;
- obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
- Py_INCREF(obj);
- return obj;
- }
-
- if (self->cache) {
- if (self->cache[pos]) {
- Py_INCREF(self->cache[pos]);
- return self->cache[pos];
- }
- } else {
- self->cache = calloc(self->raw_length, sizeof(PyObject *));
- if (self->cache == NULL)
- return PyErr_NoMemory();
- }
-
- data = index_deref(self, pos);
- if (data == NULL)
- return NULL;
-
- offset_flags = getbe32(data + 4);
- if (pos == 0) /* mask out version number for the first entry */
- offset_flags &= 0xFFFF;
- else {
- uint32_t offset_high = getbe32(data);
- offset_flags |= ((uint64_t)offset_high) << 32;
- }
-
- comp_len = getbe32(data + 8);
- uncomp_len = getbe32(data + 12);
- base_rev = getbe32(data + 16);
- link_rev = getbe32(data + 20);
- parent_1 = getbe32(data + 24);
- parent_2 = getbe32(data + 28);
- c_node_id = data + 32;
-
- entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
- uncomp_len, base_rev, link_rev,
- parent_1, parent_2, c_node_id, 20);
-
- if (entry) {
- PyObject_GC_UnTrack(entry);
- Py_INCREF(entry);
- }
-
- self->cache[pos] = entry;
-
- return entry;
-}
-
-/*
- * Return the 20-byte SHA of the node corresponding to the given rev.
- */
-static const char *index_node(indexObject *self, Py_ssize_t pos)
-{
- Py_ssize_t length = index_length(self);
- const char *data;
-
- if (pos == length - 1 || pos == INT_MAX)
- return nullid;
-
- if (pos >= length)
- return NULL;
-
- if (pos >= self->length - 1) {
- PyObject *tuple, *str;
- tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
- str = PyTuple_GetItem(tuple, 7);
- return str ? PyBytes_AS_STRING(str) : NULL;
- }
-
- data = index_deref(self, pos);
- return data ? data + 32 : NULL;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev);
-
-static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
-{
- if (PyBytes_AsStringAndSize(obj, node, nodelen) == -1)
- return -1;
- if (*nodelen == 20)
- return 0;
- PyErr_SetString(PyExc_ValueError, "20-byte hash required");
- return -1;
-}
-
-static PyObject *index_insert(indexObject *self, PyObject *args)
-{
- PyObject *obj;
- char *node;
- int index;
- Py_ssize_t len, nodelen;
-
- if (!PyArg_ParseTuple(args, "iO", &index, &obj))
- return NULL;
-
- if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
- PyErr_SetString(PyExc_TypeError, "8-tuple required");
- return NULL;
- }
-
- if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
- return NULL;
-
- len = index_length(self);
-
- if (index < 0)
- index += len;
-
- if (index != len - 1) {
- PyErr_SetString(PyExc_IndexError,
- "insert only supported at index -1");
- return NULL;
- }
-
- if (self->added == NULL) {
- self->added = PyList_New(0);
- if (self->added == NULL)
- return NULL;
- }
-
- if (PyList_Append(self->added, obj) == -1)
- return NULL;
-
- if (self->nt)
- nt_insert(self, node, index);
-
- Py_CLEAR(self->headrevs);
- Py_RETURN_NONE;
-}
-
-static void _index_clearcaches(indexObject *self)
-{
- if (self->cache) {
- Py_ssize_t i;
-
- for (i = 0; i < self->raw_length; i++)
- Py_CLEAR(self->cache[i]);
- free(self->cache);
- self->cache = NULL;
- }
- if (self->offsets) {
- PyMem_Free(self->offsets);
- self->offsets = NULL;
- }
- if (self->nt) {
- free(self->nt);
- self->nt = NULL;
- }
- Py_CLEAR(self->headrevs);
-}
-
-static PyObject *index_clearcaches(indexObject *self)
-{
- _index_clearcaches(self);
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntrev = -1;
- self->ntlookups = self->ntmisses = 0;
- Py_RETURN_NONE;
-}
-
-static PyObject *index_stats(indexObject *self)
-{
- PyObject *obj = PyDict_New();
- PyObject *t = NULL;
-
- if (obj == NULL)
- return NULL;
-
-#define istat(__n, __d) \
- do { \
- t = PyInt_FromSsize_t(self->__n); \
- if (!t) \
- goto bail; \
- if (PyDict_SetItemString(obj, __d, t) == -1) \
- goto bail; \
- Py_DECREF(t); \
- } while (0)
-
- if (self->added) {
- Py_ssize_t len = PyList_GET_SIZE(self->added);
- t = PyInt_FromSsize_t(len);
- if (!t)
- goto bail;
- if (PyDict_SetItemString(obj, "index entries added", t) == -1)
- goto bail;
- Py_DECREF(t);
- }
-
- if (self->raw_length != self->length - 1)
- istat(raw_length, "revs on disk");
- istat(length, "revs in memory");
- istat(ntcapacity, "node trie capacity");
- istat(ntdepth, "node trie depth");
- istat(ntlength, "node trie count");
- istat(ntlookups, "node trie lookups");
- istat(ntmisses, "node trie misses");
- istat(ntrev, "node trie last rev scanned");
- istat(ntsplits, "node trie splits");
-
-#undef istat
-
- return obj;
-
-bail:
- Py_XDECREF(obj);
- Py_XDECREF(t);
- return NULL;
-}
-
-/*
- * When we cache a list, we want to be sure the caller can't mutate
- * the cached copy.
- */
-static PyObject *list_copy(PyObject *list)
-{
- Py_ssize_t len = PyList_GET_SIZE(list);
- PyObject *newlist = PyList_New(len);
- Py_ssize_t i;
-
- if (newlist == NULL)
- return NULL;
-
- for (i = 0; i < len; i++) {
- PyObject *obj = PyList_GET_ITEM(list, i);
- Py_INCREF(obj);
- PyList_SET_ITEM(newlist, i, obj);
- }
-
- return newlist;
-}
-
-static int check_filter(PyObject *filter, Py_ssize_t arg) {
- if (filter) {
- PyObject *arglist, *result;
- int isfiltered;
-
- arglist = Py_BuildValue("(n)", arg);
- if (!arglist) {
- return -1;
- }
-
- result = PyEval_CallObject(filter, arglist);
- Py_DECREF(arglist);
- if (!result) {
- return -1;
- }
-
- /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
- * same as this function, so we can just return it directly.*/
- isfiltered = PyObject_IsTrue(result);
- Py_DECREF(result);
- return isfiltered;
- } else {
- return 0;
- }
-}
-
-static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
- Py_ssize_t marker, char *phases)
-{
- PyObject *iter = NULL;
- PyObject *iter_item = NULL;
- Py_ssize_t min_idx = index_length(self) + 1;
- long iter_item_long;
-
- if (PyList_GET_SIZE(list) != 0) {
- iter = PyObject_GetIter(list);
- if (iter == NULL)
- return -2;
- while ((iter_item = PyIter_Next(iter)))
- {
- iter_item_long = PyInt_AS_LONG(iter_item);
- Py_DECREF(iter_item);
- if (iter_item_long < min_idx)
- min_idx = iter_item_long;
- phases[iter_item_long] = marker;
- }
- Py_DECREF(iter);
- }
-
- return min_idx;
-}
-
-static inline void set_phase_from_parents(char *phases, int parent_1,
- int parent_2, Py_ssize_t i)
-{
- if (parent_1 >= 0 && phases[parent_1] > phases[i])
- phases[i] = phases[parent_1];
- if (parent_2 >= 0 && phases[parent_2] > phases[i])
- phases[i] = phases[parent_2];
-}
-
-static PyObject *reachableroots2(indexObject *self, PyObject *args)
-{
-
- /* Input */
- long minroot;
- PyObject *includepatharg = NULL;
- int includepath = 0;
- /* heads and roots are lists */
- PyObject *heads = NULL;
- PyObject *roots = NULL;
- PyObject *reachable = NULL;
-
- PyObject *val;
- Py_ssize_t len = index_length(self) - 1;
- long revnum;
- Py_ssize_t k;
- Py_ssize_t i;
- Py_ssize_t l;
- int r;
- int parents[2];
-
- /* Internal data structure:
- * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit
- * revstates: array of length len+1 (all revs + nullrev) */
- int *tovisit = NULL;
- long lentovisit = 0;
- enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
- char *revstates = NULL;
-
- /* Get arguments */
- if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
- &PyList_Type, &roots,
- &PyBool_Type, &includepatharg))
- goto bail;
-
- if (includepatharg == Py_True)
- includepath = 1;
-
- /* Initialize return set */
- reachable = PyList_New(0);
- if (reachable == NULL)
- goto bail;
-
- /* Initialize internal datastructures */
- tovisit = (int *)malloc((len + 1) * sizeof(int));
- if (tovisit == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- revstates = (char *)calloc(len + 1, 1);
- if (revstates == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- l = PyList_GET_SIZE(roots);
- for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
- if (revnum == -1 && PyErr_Occurred())
- goto bail;
- /* If root is out of range, e.g. wdir(), it must be unreachable
- * from heads. So we can just ignore it. */
- if (revnum + 1 < 0 || revnum + 1 >= len + 1)
- continue;
- revstates[revnum + 1] |= RS_ROOT;
- }
-
- /* Populate tovisit with all the heads */
- l = PyList_GET_SIZE(heads);
- for (i = 0; i < l; i++) {
- revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
- if (revnum == -1 && PyErr_Occurred())
- goto bail;
- if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
- PyErr_SetString(PyExc_IndexError, "head out of range");
- goto bail;
- }
- if (!(revstates[revnum + 1] & RS_SEEN)) {
- tovisit[lentovisit++] = (int)revnum;
- revstates[revnum + 1] |= RS_SEEN;
- }
- }
-
- /* Visit the tovisit list and find the reachable roots */
- k = 0;
- while (k < lentovisit) {
- /* Add the node to reachable if it is a root*/
- revnum = tovisit[k++];
- if (revstates[revnum + 1] & RS_ROOT) {
- revstates[revnum + 1] |= RS_REACHABLE;
- val = PyInt_FromLong(revnum);
- if (val == NULL)
- goto bail;
- r = PyList_Append(reachable, val);
- Py_DECREF(val);
- if (r < 0)
- goto bail;
- if (includepath == 0)
- continue;
- }
-
- /* Add its parents to the list of nodes to visit */
- if (revnum == -1)
- continue;
- r = index_get_parents(self, revnum, parents, (int)len - 1);
- if (r < 0)
- goto bail;
- for (i = 0; i < 2; i++) {
- if (!(revstates[parents[i] + 1] & RS_SEEN)
- && parents[i] >= minroot) {
- tovisit[lentovisit++] = parents[i];
- revstates[parents[i] + 1] |= RS_SEEN;
- }
- }
- }
-
- /* Find all the nodes in between the roots we found and the heads
- * and add them to the reachable set */
- if (includepath == 1) {
- long minidx = minroot;
- if (minidx < 0)
- minidx = 0;
- for (i = minidx; i < len; i++) {
- if (!(revstates[i + 1] & RS_SEEN))
- continue;
- r = index_get_parents(self, i, parents, (int)len - 1);
- /* Corrupted index file, error is set from
- * index_get_parents */
- if (r < 0)
- goto bail;
- if (((revstates[parents[0] + 1] |
- revstates[parents[1] + 1]) & RS_REACHABLE)
- && !(revstates[i + 1] & RS_REACHABLE)) {
- revstates[i + 1] |= RS_REACHABLE;
- val = PyInt_FromLong(i);
- if (val == NULL)
- goto bail;
- r = PyList_Append(reachable, val);
- Py_DECREF(val);
- if (r < 0)
- goto bail;
- }
- }
- }
-
- free(revstates);
- free(tovisit);
- return reachable;
-bail:
- Py_XDECREF(reachable);
- free(revstates);
- free(tovisit);
- return NULL;
-}
-
-static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
-{
- PyObject *roots = Py_None;
- PyObject *ret = NULL;
- PyObject *phaseslist = NULL;
- PyObject *phaseroots = NULL;
- PyObject *phaseset = NULL;
- PyObject *phasessetlist = NULL;
- PyObject *rev = NULL;
- Py_ssize_t len = index_length(self) - 1;
- Py_ssize_t numphase = 0;
- Py_ssize_t minrevallphases = 0;
- Py_ssize_t minrevphase = 0;
- Py_ssize_t i = 0;
- char *phases = NULL;
- long phase;
-
- if (!PyArg_ParseTuple(args, "O", &roots))
- goto done;
- if (roots == NULL || !PyList_Check(roots))
- goto done;
-
- phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
- if (phases == NULL) {
- PyErr_NoMemory();
- goto done;
- }
- /* Put the phase information of all the roots in phases */
- numphase = PyList_GET_SIZE(roots)+1;
- minrevallphases = len + 1;
- phasessetlist = PyList_New(numphase);
- if (phasessetlist == NULL)
- goto done;
-
- PyList_SET_ITEM(phasessetlist, 0, Py_None);
- Py_INCREF(Py_None);
-
- for (i = 0; i < numphase-1; i++) {
- phaseroots = PyList_GET_ITEM(roots, i);
- phaseset = PySet_New(NULL);
- if (phaseset == NULL)
- goto release;
- PyList_SET_ITEM(phasessetlist, i+1, phaseset);
- if (!PyList_Check(phaseroots))
- goto release;
- minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
- if (minrevphase == -2) /* Error from add_roots_get_min */
- goto release;
- minrevallphases = MIN(minrevallphases, minrevphase);
- }
- /* Propagate the phase information from the roots to the revs */
- if (minrevallphases != -1) {
- int parents[2];
- for (i = minrevallphases; i < len; i++) {
- if (index_get_parents(self, i, parents,
- (int)len - 1) < 0)
- goto release;
- set_phase_from_parents(phases, parents[0], parents[1], i);
- }
- }
- /* Transform phase list to a python list */
- phaseslist = PyList_New(len);
- if (phaseslist == NULL)
- goto release;
- for (i = 0; i < len; i++) {
- PyObject *phaseval;
-
- phase = phases[i];
- /* We only store the sets of phase for non public phase, the public phase
- * is computed as a difference */
- if (phase != 0) {
- phaseset = PyList_GET_ITEM(phasessetlist, phase);
- rev = PyInt_FromLong(i);
- if (rev == NULL)
- goto release;
- PySet_Add(phaseset, rev);
- Py_XDECREF(rev);
- }
- phaseval = PyInt_FromLong(phase);
- if (phaseval == NULL)
- goto release;
- PyList_SET_ITEM(phaseslist, i, phaseval);
- }
- ret = PyTuple_Pack(2, phaseslist, phasessetlist);
-
-release:
- Py_XDECREF(phaseslist);
- Py_XDECREF(phasessetlist);
-done:
- free(phases);
- return ret;
-}
-
-static PyObject *index_headrevs(indexObject *self, PyObject *args)
-{
- Py_ssize_t i, j, len;
- char *nothead = NULL;
- PyObject *heads = NULL;
- PyObject *filter = NULL;
- PyObject *filteredrevs = Py_None;
-
- if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
- return NULL;
- }
-
- if (self->headrevs && filteredrevs == self->filteredrevs)
- return list_copy(self->headrevs);
-
- Py_DECREF(self->filteredrevs);
- self->filteredrevs = filteredrevs;
- Py_INCREF(filteredrevs);
-
- if (filteredrevs != Py_None) {
- filter = PyObject_GetAttrString(filteredrevs, "__contains__");
- if (!filter) {
- PyErr_SetString(PyExc_TypeError,
- "filteredrevs has no attribute __contains__");
- goto bail;
- }
- }
-
- len = index_length(self) - 1;
- heads = PyList_New(0);
- if (heads == NULL)
- goto bail;
- if (len == 0) {
- PyObject *nullid = PyInt_FromLong(-1);
- if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
- Py_XDECREF(nullid);
- goto bail;
- }
- goto done;
- }
-
- nothead = calloc(len, 1);
- if (nothead == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- for (i = len - 1; i >= 0; i--) {
- int isfiltered;
- int parents[2];
-
- /* If nothead[i] == 1, it means we've seen an unfiltered child of this
- * node already, and therefore this node is not filtered. So we can skip
- * the expensive check_filter step.
- */
- if (nothead[i] != 1) {
- isfiltered = check_filter(filter, i);
- if (isfiltered == -1) {
- PyErr_SetString(PyExc_TypeError,
- "unable to check filter");
- goto bail;
- }
-
- if (isfiltered) {
- nothead[i] = 1;
- continue;
- }
- }
-
- if (index_get_parents(self, i, parents, (int)len - 1) < 0)
- goto bail;
- for (j = 0; j < 2; j++) {
- if (parents[j] >= 0)
- nothead[parents[j]] = 1;
- }
- }
-
- for (i = 0; i < len; i++) {
- PyObject *head;
-
- if (nothead[i])
- continue;
- head = PyInt_FromSsize_t(i);
- if (head == NULL || PyList_Append(heads, head) == -1) {
- Py_XDECREF(head);
- goto bail;
- }
- }
-
-done:
- self->headrevs = heads;
- Py_XDECREF(filter);
- free(nothead);
- return list_copy(self->headrevs);
-bail:
- Py_XDECREF(filter);
- Py_XDECREF(heads);
- free(nothead);
- return NULL;
-}
-
-static inline int nt_level(const char *node, Py_ssize_t level)
-{
- int v = node[level>>1];
- if (!(level & 1))
- v >>= 4;
- return v & 0xf;
-}
-
-/*
- * Return values:
- *
- * -4: match is ambiguous (multiple candidates)
- * -2: not found
- * rest: valid rev
- */
-static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
- int hex)
-{
- int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
- int level, maxlevel, off;
-
- if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
- return -1;
-
- if (self->nt == NULL)
- return -2;
-
- if (hex)
- maxlevel = nodelen > 40 ? 40 : (int)nodelen;
- else
- maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
-
- for (level = off = 0; level < maxlevel; level++) {
- int k = getnybble(node, level);
- nodetree *n = &self->nt[off];
- int v = n->children[k];
-
- if (v < 0) {
- const char *n;
- Py_ssize_t i;
-
- v = -(v + 1);
- n = index_node(self, v);
- if (n == NULL)
- return -2;
- for (i = level; i < maxlevel; i++)
- if (getnybble(node, i) != nt_level(n, i))
- return -2;
- return v;
- }
- if (v == 0)
- return -2;
- off = v;
- }
- /* multiple matches against an ambiguous prefix */
- return -4;
-}
-
-static int nt_new(indexObject *self)
-{
- if (self->ntlength == self->ntcapacity) {
- if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) {
- PyErr_SetString(PyExc_MemoryError,
- "overflow in nt_new");
- return -1;
- }
- self->ntcapacity *= 2;
- self->nt = realloc(self->nt,
- self->ntcapacity * sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_SetString(PyExc_MemoryError, "out of memory");
- return -1;
- }
- memset(&self->nt[self->ntlength], 0,
- sizeof(nodetree) * (self->ntcapacity - self->ntlength));
- }
- return self->ntlength++;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev)
-{
- int level = 0;
- int off = 0;
-
- while (level < 40) {
- int k = nt_level(node, level);
- nodetree *n;
- int v;
-
- n = &self->nt[off];
- v = n->children[k];
-
- if (v == 0) {
- n->children[k] = -rev - 1;
- return 0;
- }
- if (v < 0) {
- const char *oldnode = index_node(self, -(v + 1));
- int noff;
-
- if (!oldnode || !memcmp(oldnode, node, 20)) {
- n->children[k] = -rev - 1;
- return 0;
- }
- noff = nt_new(self);
- if (noff == -1)
- return -1;
- /* self->nt may have been changed by realloc */
- self->nt[off].children[k] = noff;
- off = noff;
- n = &self->nt[off];
- n->children[nt_level(oldnode, ++level)] = v;
- if (level > self->ntdepth)
- self->ntdepth = level;
- self->ntsplits += 1;
- } else {
- level += 1;
- off = v;
- }
- }
-
- return -1;
-}
-
-static int nt_init(indexObject *self)
-{
- if (self->nt == NULL) {
- if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) {
- PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
- return -1;
- }
- self->ntcapacity = self->raw_length < 4
- ? 4 : (int)self->raw_length / 2;
-
- self->nt = calloc(self->ntcapacity, sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_NoMemory();
- return -1;
- }
- self->ntlength = 1;
- self->ntrev = (int)index_length(self) - 1;
- self->ntlookups = 1;
- self->ntmisses = 0;
- if (nt_insert(self, nullid, INT_MAX) == -1)
- return -1;
- }
- return 0;
-}
-
-/*
- * Return values:
- *
- * -3: error (exception set)
- * -2: not found (no exception set)
- * rest: valid rev
- */
-static int index_find_node(indexObject *self,
- const char *node, Py_ssize_t nodelen)
-{
- int rev;
-
- self->ntlookups++;
- rev = nt_find(self, node, nodelen, 0);
- if (rev >= -1)
- return rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- /*
- * For the first handful of lookups, we scan the entire index,
- * and cache only the matching nodes. This optimizes for cases
- * like "hg tip", where only a few nodes are accessed.
- *
- * After that, we cache every node we visit, using a single
- * scan amortized over multiple lookups. This gives the best
- * bulk performance, e.g. for "hg log".
- */
- if (self->ntmisses++ < 4) {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- if (nt_insert(self, n, rev) == -1)
- return -3;
- break;
- }
- }
- } else {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL) {
- self->ntrev = rev + 1;
- return -2;
- }
- if (nt_insert(self, n, rev) == -1) {
- self->ntrev = rev + 1;
- return -3;
- }
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- break;
- }
- }
- self->ntrev = rev;
- }
-
- if (rev >= 0)
- return rev;
- return -2;
-}
-
-static void raise_revlog_error(void)
-{
- PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
-
- mod = PyImport_ImportModule("mercurial.error");
- if (mod == NULL) {
- goto cleanup;
- }
-
- dict = PyModule_GetDict(mod);
- if (dict == NULL) {
- goto cleanup;
- }
- Py_INCREF(dict);
-
- errclass = PyDict_GetItemString(dict, "RevlogError");
- if (errclass == NULL) {
- PyErr_SetString(PyExc_SystemError,
- "could not find RevlogError");
- goto cleanup;
- }
-
- /* value of exception is ignored by callers */
- PyErr_SetString(errclass, "RevlogError");
-
-cleanup:
- Py_XDECREF(dict);
- Py_XDECREF(mod);
-}
-
-static PyObject *index_getitem(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- int rev;
-
- if (PyInt_Check(value))
- return index_get(self, PyInt_AS_LONG(value));
-
- if (node_check(value, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev >= -1)
- return PyInt_FromLong(rev);
- if (rev == -2)
- raise_revlog_error();
- return NULL;
-}
-
-static int nt_partialmatch(indexObject *self, const char *node,
- Py_ssize_t nodelen)
-{
- int rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- if (self->ntrev > 0) {
- /* ensure that the radix tree is fully populated */
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (nt_insert(self, n, rev) == -1)
- return -3;
- }
- self->ntrev = rev;
- }
-
- return nt_find(self, node, nodelen, 1);
-}
-
-static PyObject *index_partialmatch(indexObject *self, PyObject *args)
-{
- const char *fullnode;
- int nodelen;
- char *node;
- int rev, i;
-
- if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
- return NULL;
-
- if (nodelen < 4) {
- PyErr_SetString(PyExc_ValueError, "key too short");
- return NULL;
- }
-
- if (nodelen > 40) {
- PyErr_SetString(PyExc_ValueError, "key too long");
- return NULL;
- }
-
- for (i = 0; i < nodelen; i++)
- hexdigit(node, i);
- if (PyErr_Occurred()) {
- /* input contains non-hex characters */
- PyErr_Clear();
- Py_RETURN_NONE;
- }
-
- rev = nt_partialmatch(self, node, nodelen);
-
- switch (rev) {
- case -4:
- raise_revlog_error();
- case -3:
- return NULL;
- case -2:
- Py_RETURN_NONE;
- case -1:
- return PyBytes_FromStringAndSize(nullid, 20);
- }
-
- fullnode = index_node(self, rev);
- if (fullnode == NULL) {
- PyErr_Format(PyExc_IndexError,
- "could not access rev %d", rev);
- return NULL;
- }
- return PyBytes_FromStringAndSize(fullnode, 20);
-}
-
-static PyObject *index_m_get(indexObject *self, PyObject *args)
-{
- Py_ssize_t nodelen;
- PyObject *val;
- char *node;
- int rev;
-
- if (!PyArg_ParseTuple(args, "O", &val))
- return NULL;
- if (node_check(val, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev == -3)
- return NULL;
- if (rev == -2)
- Py_RETURN_NONE;
- return PyInt_FromLong(rev);
-}
-
-static int index_contains(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
-
- if (PyInt_Check(value)) {
- long rev = PyInt_AS_LONG(value);
- return rev >= -1 && rev < index_length(self);
- }
-
- if (node_check(value, &node, &nodelen) == -1)
- return -1;
-
- switch (index_find_node(self, node, nodelen)) {
- case -3:
- return -1;
- case -2:
- return 0;
- default:
- return 1;
- }
-}
-
-typedef uint64_t bitmask;
-
-/*
- * Given a disjoint set of revs, return all candidates for the
- * greatest common ancestor. In revset notation, this is the set
- * "heads(::a and ::b and ...)"
- */
-static PyObject *find_gca_candidates(indexObject *self, const int *revs,
- int revcount)
-{
- const bitmask allseen = (1ull << revcount) - 1;
- const bitmask poison = 1ull << revcount;
- PyObject *gca = PyList_New(0);
- int i, v, interesting;
- int maxrev = -1;
- bitmask sp;
- bitmask *seen;
-
- if (gca == NULL)
- return PyErr_NoMemory();
-
- for (i = 0; i < revcount; i++) {
- if (revs[i] > maxrev)
- maxrev = revs[i];
- }
-
- seen = calloc(sizeof(*seen), maxrev + 1);
- if (seen == NULL) {
- Py_DECREF(gca);
- return PyErr_NoMemory();
- }
-
- for (i = 0; i < revcount; i++)
- seen[revs[i]] = 1ull << i;
-
- interesting = revcount;
-
- for (v = maxrev; v >= 0 && interesting; v--) {
- bitmask sv = seen[v];
- int parents[2];
-
- if (!sv)
- continue;
-
- if (sv < poison) {
- interesting -= 1;
- if (sv == allseen) {
- PyObject *obj = PyInt_FromLong(v);
- if (obj == NULL)
- goto bail;
- if (PyList_Append(gca, obj) == -1) {
- Py_DECREF(obj);
- goto bail;
- }
- sv |= poison;
- for (i = 0; i < revcount; i++) {
- if (revs[i] == v)
- goto done;
- }
- }
- }
- if (index_get_parents(self, v, parents, maxrev) < 0)
- goto bail;
-
- for (i = 0; i < 2; i++) {
- int p = parents[i];
- if (p == -1)
- continue;
- sp = seen[p];
- if (sv < poison) {
- if (sp == 0) {
- seen[p] = sv;
- interesting++;
- }
- else if (sp != sv)
- seen[p] |= sv;
- } else {
- if (sp && sp < poison)
- interesting--;
- seen[p] = sv;
- }
- }
- }
-
-done:
- free(seen);
- return gca;
-bail:
- free(seen);
- Py_XDECREF(gca);
- return NULL;
-}
-
-/*
- * Given a disjoint set of revs, return the subset with the longest
- * path to the root.
- */
-static PyObject *find_deepest(indexObject *self, PyObject *revs)
-{
- const Py_ssize_t revcount = PyList_GET_SIZE(revs);
- static const Py_ssize_t capacity = 24;
- int *depth, *interesting = NULL;
- int i, j, v, ninteresting;
- PyObject *dict = NULL, *keys = NULL;
- long *seen = NULL;
- int maxrev = -1;
- long final;
-
- if (revcount > capacity) {
- PyErr_Format(PyExc_OverflowError,
- "bitset size (%ld) > capacity (%ld)",
- (long)revcount, (long)capacity);
- return NULL;
- }
-
- for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
- if (n > maxrev)
- maxrev = n;
- }
-
- depth = calloc(sizeof(*depth), maxrev + 1);
- if (depth == NULL)
- return PyErr_NoMemory();
-
- seen = calloc(sizeof(*seen), maxrev + 1);
- if (seen == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- interesting = calloc(sizeof(*interesting), 2 << revcount);
- if (interesting == NULL) {
- PyErr_NoMemory();
- goto bail;
- }
-
- if (PyList_Sort(revs) == -1)
- goto bail;
-
- for (i = 0; i < revcount; i++) {
- int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
- long b = 1l << i;
- depth[n] = 1;
- seen[n] = b;
- interesting[b] = 1;
- }
-
- ninteresting = (int)revcount;
-
- for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
- int dv = depth[v];
- int parents[2];
- long sv;
-
- if (dv == 0)
- continue;
-
- sv = seen[v];
- if (index_get_parents(self, v, parents, maxrev) < 0)
- goto bail;
-
- for (i = 0; i < 2; i++) {
- int p = parents[i];
- long sp;
- int dp;
-
- if (p == -1)
- continue;
-
- dp = depth[p];
- sp = seen[p];
- if (dp <= dv) {
- depth[p] = dv + 1;
- if (sp != sv) {
- interesting[sv] += 1;
- seen[p] = sv;
- if (sp) {
- interesting[sp] -= 1;
- if (interesting[sp] == 0)
- ninteresting -= 1;
- }
- }
- }
- else if (dv == dp - 1) {
- long nsp = sp | sv;
- if (nsp == sp)
- continue;
- seen[p] = nsp;
- interesting[sp] -= 1;
- if (interesting[sp] == 0 && interesting[nsp] > 0)
- ninteresting -= 1;
- interesting[nsp] += 1;
- }
- }
- interesting[sv] -= 1;
- if (interesting[sv] == 0)
- ninteresting -= 1;
- }
-
- final = 0;
- j = ninteresting;
- for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
- if (interesting[i] == 0)
- continue;
- final |= i;
- j -= 1;
- }
- if (final == 0) {
- keys = PyList_New(0);
- goto bail;
- }
-
- dict = PyDict_New();
- if (dict == NULL)
- goto bail;
-
- for (i = 0; i < revcount; i++) {
- PyObject *key;
-
- if ((final & (1 << i)) == 0)
- continue;
-
- key = PyList_GET_ITEM(revs, i);
- Py_INCREF(key);
- Py_INCREF(Py_None);
- if (PyDict_SetItem(dict, key, Py_None) == -1) {
- Py_DECREF(key);
- Py_DECREF(Py_None);
- goto bail;
- }
- }
-
- keys = PyDict_Keys(dict);
-
-bail:
- free(depth);
- free(seen);
- free(interesting);
- Py_XDECREF(dict);
-
- return keys;
-}
-
-/*
- * Given a (possibly overlapping) set of revs, return all the
- * common ancestors heads: heads(::args[0] and ::a[1] and ...)
- */
-static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
-{
- PyObject *ret = NULL;
- Py_ssize_t argcount, i, len;
- bitmask repeat = 0;
- int revcount = 0;
- int *revs;
-
- argcount = PySequence_Length(args);
- revs = PyMem_Malloc(argcount * sizeof(*revs));
- if (argcount > 0 && revs == NULL)
- return PyErr_NoMemory();
- len = index_length(self) - 1;
-
- for (i = 0; i < argcount; i++) {
- static const int capacity = 24;
- PyObject *obj = PySequence_GetItem(args, i);
- bitmask x;
- long val;
-
- if (!PyInt_Check(obj)) {
- PyErr_SetString(PyExc_TypeError,
- "arguments must all be ints");
- Py_DECREF(obj);
- goto bail;
- }
- val = PyInt_AsLong(obj);
- Py_DECREF(obj);
- if (val == -1) {
- ret = PyList_New(0);
- goto done;
- }
- if (val < 0 || val >= len) {
- PyErr_SetString(PyExc_IndexError,
- "index out of range");
- goto bail;
- }
- /* this cheesy bloom filter lets us avoid some more
- * expensive duplicate checks in the common set-is-disjoint
- * case */
- x = 1ull << (val & 0x3f);
- if (repeat & x) {
- int k;
- for (k = 0; k < revcount; k++) {
- if (val == revs[k])
- goto duplicate;
- }
- }
- else repeat |= x;
- if (revcount >= capacity) {
- PyErr_Format(PyExc_OverflowError,
- "bitset size (%d) > capacity (%d)",
- revcount, capacity);
- goto bail;
- }
- revs[revcount++] = (int)val;
- duplicate:;
- }
-
- if (revcount == 0) {
- ret = PyList_New(0);
- goto done;
- }
- if (revcount == 1) {
- PyObject *obj;
- ret = PyList_New(1);
- if (ret == NULL)
- goto bail;
- obj = PyInt_FromLong(revs[0]);
- if (obj == NULL)
- goto bail;
- PyList_SET_ITEM(ret, 0, obj);
- goto done;
- }
-
- ret = find_gca_candidates(self, revs, revcount);
- if (ret == NULL)
- goto bail;
-
-done:
- PyMem_Free(revs);
- return ret;
-
-bail:
- PyMem_Free(revs);
- Py_XDECREF(ret);
- return NULL;
-}
-
-/*
- * Given a (possibly overlapping) set of revs, return the greatest
- * common ancestors: those with the longest path to the root.
- */
-static PyObject *index_ancestors(indexObject *self, PyObject *args)
-{
- PyObject *ret;
- PyObject *gca = index_commonancestorsheads(self, args);
- if (gca == NULL)
- return NULL;
-
- if (PyList_GET_SIZE(gca) <= 1) {
- return gca;
- }
-
- ret = find_deepest(self, gca);
- Py_DECREF(gca);
- return ret;
-}
-
-/*
- * Invalidate any trie entries introduced by added revs.
- */
-static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
-{
- Py_ssize_t i, len = PyList_GET_SIZE(self->added);
-
- for (i = start; i < len; i++) {
- PyObject *tuple = PyList_GET_ITEM(self->added, i);
- PyObject *node = PyTuple_GET_ITEM(tuple, 7);
-
- nt_insert(self, PyBytes_AS_STRING(node), -1);
- }
-
- if (start == 0)
- Py_CLEAR(self->added);
-}
-
-/*
- * Delete a numeric range of revs, which must be at the end of the
- * range, but exclude the sentinel nullid entry.
- */
-static int index_slice_del(indexObject *self, PyObject *item)
-{
- Py_ssize_t start, stop, step, slicelength;
- Py_ssize_t length = index_length(self);
- int ret = 0;
-
-/* Argument changed from PySliceObject* to PyObject* in Python 3. */
-#ifdef IS_PY3K
- if (PySlice_GetIndicesEx(item, length,
-#else
- if (PySlice_GetIndicesEx((PySliceObject*)item, length,
-#endif
- &start, &stop, &step, &slicelength) < 0)
- return -1;
-
- if (slicelength <= 0)
- return 0;
-
- if ((step < 0 && start < stop) || (step > 0 && start > stop))
- stop = start;
-
- if (step < 0) {
- stop = start + 1;
- start = stop + step*(slicelength - 1) - 1;
- step = -step;
- }
-
- if (step != 1) {
- PyErr_SetString(PyExc_ValueError,
- "revlog index delete requires step size of 1");
- return -1;
- }
-
- if (stop != length - 1) {
- PyErr_SetString(PyExc_IndexError,
- "revlog index deletion indices are invalid");
- return -1;
- }
-
- if (start < self->length - 1) {
- if (self->nt) {
- Py_ssize_t i;
-
- for (i = start + 1; i < self->length - 1; i++) {
- const char *node = index_node(self, i);
-
- if (node)
- nt_insert(self, node, -1);
- }
- if (self->added)
- nt_invalidate_added(self, 0);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- self->length = start + 1;
- if (start < self->raw_length) {
- if (self->cache) {
- Py_ssize_t i;
- for (i = start; i < self->raw_length; i++)
- Py_CLEAR(self->cache[i]);
- }
- self->raw_length = start;
- }
- goto done;
- }
-
- if (self->nt) {
- nt_invalidate_added(self, start - self->length + 1);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- if (self->added)
- ret = PyList_SetSlice(self->added, start - self->length + 1,
- PyList_GET_SIZE(self->added), NULL);
-done:
- Py_CLEAR(self->headrevs);
- return ret;
-}
-
-/*
- * Supported ops:
- *
- * slice deletion
- * string assignment (extend node->rev mapping)
- * string deletion (shrink node->rev mapping)
- */
-static int index_assign_subscript(indexObject *self, PyObject *item,
- PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- long rev;
-
- if (PySlice_Check(item) && value == NULL)
- return index_slice_del(self, item);
-
- if (node_check(item, &node, &nodelen) == -1)
- return -1;
-
- if (value == NULL)
- return self->nt ? nt_insert(self, node, -1) : 0;
- rev = PyInt_AsLong(value);
- if (rev > INT_MAX || rev < 0) {
- if (!PyErr_Occurred())
- PyErr_SetString(PyExc_ValueError, "rev out of range");
- return -1;
- }
-
- if (nt_init(self) == -1)
- return -1;
- return nt_insert(self, node, (int)rev);
-}
-
-/*
- * Find all RevlogNG entries in an index that has inline data. Update
- * the optional "offsets" table with those entries.
- */
-static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
-{
- const char *data = (const char *)self->buf.buf;
- Py_ssize_t pos = 0;
- Py_ssize_t end = self->buf.len;
- long incr = v1_hdrsize;
- Py_ssize_t len = 0;
-
- while (pos + v1_hdrsize <= end && pos >= 0) {
- uint32_t comp_len;
- /* 3rd element of header is length of compressed inline data */
- comp_len = getbe32(data + pos + 8);
- incr = v1_hdrsize + comp_len;
- if (offsets)
- offsets[len] = data + pos;
- len++;
- pos += incr;
- }
-
- if (pos != end) {
- if (!PyErr_Occurred())
- PyErr_SetString(PyExc_ValueError, "corrupt index file");
- return -1;
- }
-
- return len;
-}
-
-static int index_init(indexObject *self, PyObject *args)
-{
- PyObject *data_obj, *inlined_obj;
- Py_ssize_t size;
-
- /* Initialize before argument-checking to avoid index_dealloc() crash. */
- self->raw_length = 0;
- self->added = NULL;
- self->cache = NULL;
- self->data = NULL;
- memset(&self->buf, 0, sizeof(self->buf));
- self->headrevs = NULL;
- self->filteredrevs = Py_None;
- Py_INCREF(Py_None);
- self->nt = NULL;
- self->offsets = NULL;
-
- if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
- return -1;
- if (!PyObject_CheckBuffer(data_obj)) {
- PyErr_SetString(PyExc_TypeError,
- "data does not support buffer interface");
- return -1;
- }
-
- if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
- return -1;
- size = self->buf.len;
-
- self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
- self->data = data_obj;
-
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntlookups = self->ntmisses = 0;
- self->ntrev = -1;
- Py_INCREF(self->data);
-
- if (self->inlined) {
- Py_ssize_t len = inline_scan(self, NULL);
- if (len == -1)
- goto bail;
- self->raw_length = len;
- self->length = len + 1;
- } else {
- if (size % v1_hdrsize) {
- PyErr_SetString(PyExc_ValueError, "corrupt index file");
- goto bail;
- }
- self->raw_length = size / v1_hdrsize;
- self->length = self->raw_length + 1;
- }
-
- return 0;
-bail:
- return -1;
-}
-
-static PyObject *index_nodemap(indexObject *self)
-{
- Py_INCREF(self);
- return (PyObject *)self;
-}
-
-static void index_dealloc(indexObject *self)
-{
- _index_clearcaches(self);
- Py_XDECREF(self->filteredrevs);
- if (self->buf.buf) {
- PyBuffer_Release(&self->buf);
- memset(&self->buf, 0, sizeof(self->buf));
- }
- Py_XDECREF(self->data);
- Py_XDECREF(self->added);
- PyObject_Del(self);
-}
-
-static PySequenceMethods index_sequence_methods = {
- (lenfunc)index_length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)index_get, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)index_contains, /* sq_contains */
-};
-
-static PyMappingMethods index_mapping_methods = {
- (lenfunc)index_length, /* mp_length */
- (binaryfunc)index_getitem, /* mp_subscript */
- (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
-};
-
-static PyMethodDef index_methods[] = {
- {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
- "return the gca set of the given revs"},
- {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
- METH_VARARGS,
- "return the heads of the common ancestors of the given revs"},
- {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
- "clear the index caches"},
- {"get", (PyCFunction)index_m_get, METH_VARARGS,
- "get an index entry"},
- {"computephasesmapsets", (PyCFunction)compute_phases_map_sets,
- METH_VARARGS, "compute phases"},
- {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
- "reachableroots"},
- {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
- "get head revisions"}, /* Can do filtering since 3.2 */
- {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
- "get filtered head revisions"}, /* Can always do filtering */
- {"insert", (PyCFunction)index_insert, METH_VARARGS,
- "insert an index entry"},
- {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
- "match a potentially ambiguous node ID"},
- {"stats", (PyCFunction)index_stats, METH_NOARGS,
- "stats for the index"},
- {NULL} /* Sentinel */
-};
-
-static PyGetSetDef index_getset[] = {
- {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject indexType = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "parsers.index", /* tp_name */
- sizeof(indexObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)index_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &index_sequence_methods, /* tp_as_sequence */
- &index_mapping_methods, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "revlog index", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- index_methods, /* tp_methods */
- 0, /* tp_members */
- index_getset, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)index_init, /* tp_init */
- 0, /* tp_alloc */
-};
-
-/*
- * returns a tuple of the form (index, index, cache) with elements as
- * follows:
- *
- * index: an index object that lazily parses RevlogNG records
- * cache: if data is inlined, a tuple (0, index_file_content), else None
- * index_file_content could be a string, or a buffer
- *
- * added complications are for backwards compatibility
- */
-static PyObject *parse_index2(PyObject *self, PyObject *args)
-{
- PyObject *tuple = NULL, *cache = NULL;
- indexObject *idx;
- int ret;
-
- idx = PyObject_New(indexObject, &indexType);
- if (idx == NULL)
- goto bail;
-
- ret = index_init(idx, args);
- if (ret == -1)
- goto bail;
-
- if (idx->inlined) {
- cache = Py_BuildValue("iO", 0, idx->data);
- if (cache == NULL)
- goto bail;
- } else {
- cache = Py_None;
- Py_INCREF(cache);
- }
-
- tuple = Py_BuildValue("NN", idx, cache);
- if (!tuple)
- goto bail;
- return tuple;
-
-bail:
- Py_XDECREF(idx);
- Py_XDECREF(cache);
- Py_XDECREF(tuple);
- return NULL;
-}
-
-#define BUMPED_FIX 1
-#define USING_SHA_256 2
-#define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
-
-static PyObject *readshas(
- const char *source, unsigned char num, Py_ssize_t hashwidth)
-{
- int i;
- PyObject *list = PyTuple_New(num);
- if (list == NULL) {
- return NULL;
- }
- for (i = 0; i < num; i++) {
- PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
- if (hash == NULL) {
- Py_DECREF(list);
- return NULL;
- }
- PyTuple_SET_ITEM(list, i, hash);
- source += hashwidth;
- }
- return list;
-}
-
-static PyObject *fm1readmarker(const char *databegin, const char *dataend,
- uint32_t *msize)
-{
- const char *data = databegin;
- const char *meta;
-
- double mtime;
- int16_t tz;
- uint16_t flags;
- unsigned char nsuccs, nparents, nmetadata;
- Py_ssize_t hashwidth = 20;
-
- PyObject *prec = NULL, *parents = NULL, *succs = NULL;
- PyObject *metadata = NULL, *ret = NULL;
- int i;
-
- if (data + FM1_HEADER_SIZE > dataend) {
- goto overflow;
- }
-
- *msize = getbe32(data);
- data += 4;
- mtime = getbefloat64(data);
- data += 8;
- tz = getbeint16(data);
- data += 2;
- flags = getbeuint16(data);
- data += 2;
-
- if (flags & USING_SHA_256) {
- hashwidth = 32;
- }
-
- nsuccs = (unsigned char)(*data++);
- nparents = (unsigned char)(*data++);
- nmetadata = (unsigned char)(*data++);
-
- if (databegin + *msize > dataend) {
- goto overflow;
- }
- dataend = databegin + *msize; /* narrow down to marker size */
-
- if (data + hashwidth > dataend) {
- goto overflow;
- }
- prec = PyBytes_FromStringAndSize(data, hashwidth);
- data += hashwidth;
- if (prec == NULL) {
- goto bail;
- }
-
- if (data + nsuccs * hashwidth > dataend) {
- goto overflow;
- }
- succs = readshas(data, nsuccs, hashwidth);
- if (succs == NULL) {
- goto bail;
- }
- data += nsuccs * hashwidth;
-
- if (nparents == 1 || nparents == 2) {
- if (data + nparents * hashwidth > dataend) {
- goto overflow;
- }
- parents = readshas(data, nparents, hashwidth);
- if (parents == NULL) {
- goto bail;
- }
- data += nparents * hashwidth;
- } else {
- parents = Py_None;
- Py_INCREF(parents);
- }
-
- if (data + 2 * nmetadata > dataend) {
- goto overflow;
- }
- meta = data + (2 * nmetadata);
- metadata = PyTuple_New(nmetadata);
- if (metadata == NULL) {
- goto bail;
- }
- for (i = 0; i < nmetadata; i++) {
- PyObject *tmp, *left = NULL, *right = NULL;
- Py_ssize_t leftsize = (unsigned char)(*data++);
- Py_ssize_t rightsize = (unsigned char)(*data++);
- if (meta + leftsize + rightsize > dataend) {
- goto overflow;
- }
- left = PyBytes_FromStringAndSize(meta, leftsize);
- meta += leftsize;
- right = PyBytes_FromStringAndSize(meta, rightsize);
- meta += rightsize;
- tmp = PyTuple_New(2);
- if (!left || !right || !tmp) {
- Py_XDECREF(left);
- Py_XDECREF(right);
- Py_XDECREF(tmp);
- goto bail;
- }
- PyTuple_SET_ITEM(tmp, 0, left);
- PyTuple_SET_ITEM(tmp, 1, right);
- PyTuple_SET_ITEM(metadata, i, tmp);
- }
- ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
- metadata, mtime, (int)tz * 60, parents);
- goto bail; /* return successfully */
-
-overflow:
- PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
-bail:
- Py_XDECREF(prec);
- Py_XDECREF(succs);
- Py_XDECREF(metadata);
- Py_XDECREF(parents);
- return ret;
-}
-
-
-static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
- const char *data, *dataend;
- int datalen;
- Py_ssize_t offset, stop;
- PyObject *markers = NULL;
-
- if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) {
- return NULL;
- }
- dataend = data + datalen;
- data += offset;
- markers = PyList_New(0);
- if (!markers) {
- return NULL;
- }
- while (offset < stop) {
- uint32_t msize;
- int error;
- PyObject *record = fm1readmarker(data, dataend, &msize);
- if (!record) {
- goto bail;
- }
- error = PyList_Append(markers, record);
- Py_DECREF(record);
- if (error) {
- goto bail;
- }
- data += msize;
- offset += msize;
- }
- return markers;
-bail:
- Py_DECREF(markers);
- return NULL;
-}
-
-static char parsers_doc[] = "Efficient content parsing.";
-
-PyObject *encodedir(PyObject *self, PyObject *args);
-PyObject *pathencode(PyObject *self, PyObject *args);
-PyObject *lowerencode(PyObject *self, PyObject *args);
-
-static PyMethodDef methods[] = {
- {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
- {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
- "create a set containing non-normal and other parent entries of given "
- "dirstate\n"},
- {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
- {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
- {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
- {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
- {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
- {"dict_new_presized", dict_new_presized, METH_VARARGS,
- "construct a dict with an expected size\n"},
- {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
- "make file foldmap\n"},
- {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
- {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
- {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
- {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
- "parse v1 obsolete markers\n"},
- {NULL, NULL}
-};
-
-void dirs_module_init(PyObject *mod);
-void manifest_module_init(PyObject *mod);
-
-static void module_init(PyObject *mod)
-{
- /* This module constant has two purposes. First, it lets us unit test
- * the ImportError raised without hard-coding any error text. This
- * means we can change the text in the future without breaking tests,
- * even across changesets without a recompile. Second, its presence
- * can be used to determine whether the version-checking logic is
- * present, which also helps in testing across changesets without a
- * recompile. Note that this means the pure-Python version of parsers
- * should not have this module constant. */
- PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
-
- dirs_module_init(mod);
- manifest_module_init(mod);
-
- indexType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&indexType) < 0 ||
- PyType_Ready(&dirstateTupleType) < 0)
- return;
- Py_INCREF(&indexType);
- PyModule_AddObject(mod, "index", (PyObject *)&indexType);
- Py_INCREF(&dirstateTupleType);
- PyModule_AddObject(mod, "dirstatetuple",
- (PyObject *)&dirstateTupleType);
-
- nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
- -1, -1, -1, -1, nullid, 20);
- if (nullentry)
- PyObject_GC_UnTrack(nullentry);
-}
-
-static int check_python_version(void)
-{
- PyObject *sys = PyImport_ImportModule("sys"), *ver;
- long hexversion;
- if (!sys)
- return -1;
- ver = PyObject_GetAttrString(sys, "hexversion");
- Py_DECREF(sys);
- if (!ver)
- return -1;
- hexversion = PyInt_AsLong(ver);
- Py_DECREF(ver);
- /* sys.hexversion is a 32-bit number by default, so the -1 case
- * should only occur in unusual circumstances (e.g. if sys.hexversion
- * is manually set to an invalid value). */
- if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
- PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension "
- "modules were compiled with Python " PY_VERSION ", but "
- "Mercurial is currently using Python with sys.hexversion=%ld: "
- "Python %s\n at: %s", versionerrortext, hexversion,
- Py_GetVersion(), Py_GetProgramFullPath());
- return -1;
- }
- return 0;
-}
-
-#ifdef IS_PY3K
-static struct PyModuleDef parsers_module = {
- PyModuleDef_HEAD_INIT,
- "parsers",
- parsers_doc,
- -1,
- methods
-};
-
-PyMODINIT_FUNC PyInit_parsers(void)
-{
- PyObject *mod;
-
- if (check_python_version() == -1)
- return NULL;
- mod = PyModule_Create(&parsers_module);
- module_init(mod);
- return mod;
-}
-#else
-PyMODINIT_FUNC initparsers(void)
-{
- PyObject *mod;
-
- if (check_python_version() == -1)
- return;
- mod = Py_InitModule3("parsers", methods, parsers_doc);
- module_init(mod);
-}
-#endif
--- a/mercurial/patch.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/patch.py Wed Jul 19 07:51:41 2017 -0500
@@ -26,20 +26,21 @@
short,
)
from . import (
- base85,
copies,
- diffhelpers,
encoding,
error,
mail,
mdiff,
pathutil,
+ policy,
pycompat,
scmutil,
similar,
util,
vfs as vfsmod,
)
+
+diffhelpers = policy.importmod(r'diffhelpers')
stringio = util.stringio
gitre = re.compile(br'diff --git a/(.*) b/(.*)')
@@ -447,14 +448,14 @@
def exists(self, fname):
raise NotImplementedError
+ def close(self):
+ raise NotImplementedError
+
class fsbackend(abstractbackend):
def __init__(self, ui, basedir):
super(fsbackend, self).__init__(ui)
self.opener = vfsmod.vfs(basedir)
- def _join(self, f):
- return os.path.join(self.opener.base, f)
-
def getfile(self, fname):
if self.opener.islink(fname):
return (self.opener.readlink(fname), (True, False))
@@ -802,7 +803,7 @@
for x, s in enumerate(self.lines):
self.hash.setdefault(s, []).append(x)
- for fuzzlen in xrange(self.ui.configint("patch", "fuzz", 2) + 1):
+ for fuzzlen in xrange(self.ui.configint("patch", "fuzz") + 1):
for toponly in [True, False]:
old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
oldstart = oldstart + self.offset + self.skew
@@ -921,18 +922,24 @@
XXX shouldn't we merge this with the other hunk class?
"""
- maxcontext = 3
- def __init__(self, header, fromline, toline, proc, before, hunk, after):
- def trimcontext(number, lines):
- delta = len(lines) - self.maxcontext
- if False and delta > 0:
- return number + delta, lines[:self.maxcontext]
- return number, lines
+ def __init__(self, header, fromline, toline, proc, before, hunk, after,
+ maxcontext=None):
+ def trimcontext(lines, reverse=False):
+ if maxcontext is not None:
+ delta = len(lines) - maxcontext
+ if delta > 0:
+ if reverse:
+ return delta, lines[delta:]
+ else:
+ return delta, lines[:maxcontext]
+ return 0, lines
self.header = header
- self.fromline, self.before = trimcontext(fromline, before)
- self.toline, self.after = trimcontext(toline, after)
+ trimedbefore, self.before = trimcontext(before, True)
+ self.fromline = fromline + trimedbefore
+ self.toline = toline + trimedbefore
+ _trimedafter, self.after = trimcontext(after, False)
self.proc = proc
self.hunk = hunk
self.added, self.removed = self.countchanges(self.hunk)
@@ -958,6 +965,18 @@
rem = len([h for h in hunk if h[0] == '-'])
return add, rem
+ def reversehunk(self):
+ """return another recordhunk which is the reverse of the hunk
+
+ If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
+ that, swap fromline/toline and +/- signs while keep other things
+ unchanged.
+ """
+ m = {'+': '-', '-': '+'}
+ hunk = ['%s%s' % (m[l[0]], l[1:]) for l in self.hunk]
+ return recordhunk(self.header, self.toline, self.fromline, self.proc,
+ self.before, hunk, self.after)
+
def write(self, fp):
delta = len(self.before) + len(self.after)
if self.after and self.after[-1] == '\\ No newline at end of file\n':
@@ -1430,7 +1449,7 @@
else:
l = ord(l) - ord('a') + 27
try:
- dec.append(base85.b85decode(line[1:])[:l])
+ dec.append(util.b85decode(line[1:])[:l])
except ValueError as e:
raise PatchError(_('could not decode "%s" binary patch: %s')
% (self._fname, str(e)))
@@ -1492,7 +1511,7 @@
c
1
2
- @@ -1,6 +2,6 @@
+ @@ -2,6 +1,6 @@
c
1
2
@@ -1500,31 +1519,63 @@
+4
5
d
- @@ -5,3 +6,2 @@
+ @@ -6,3 +5,2 @@
5
d
-lastline
'''
- from . import crecord as crecordmod
newhunks = []
for c in hunks:
- if isinstance(c, crecordmod.uihunk):
- # curses hunks encapsulate the record hunk in _hunk
- c = c._hunk
- if isinstance(c, recordhunk):
- for j, line in enumerate(c.hunk):
- if line.startswith("-"):
- c.hunk[j] = "+" + c.hunk[j][1:]
- elif line.startswith("+"):
- c.hunk[j] = "-" + c.hunk[j][1:]
- c.added, c.removed = c.removed, c.added
+ if util.safehasattr(c, 'reversehunk'):
+ c = c.reversehunk()
newhunks.append(c)
return newhunks
-def parsepatch(originalchunks):
- """patch -> [] of headers -> [] of hunks """
+def parsepatch(originalchunks, maxcontext=None):
+ """patch -> [] of headers -> [] of hunks
+
+ If maxcontext is not None, trim context lines if necessary.
+
+ >>> rawpatch = '''diff --git a/folder1/g b/folder1/g
+ ... --- a/folder1/g
+ ... +++ b/folder1/g
+ ... @@ -1,8 +1,10 @@
+ ... 1
+ ... 2
+ ... -3
+ ... 4
+ ... 5
+ ... 6
+ ... +6.1
+ ... +6.2
+ ... 7
+ ... 8
+ ... +9'''
+ >>> out = util.stringio()
+ >>> headers = parsepatch([rawpatch], maxcontext=1)
+ >>> for header in headers:
+ ... header.write(out)
+ ... for hunk in header.hunks:
+ ... hunk.write(out)
+ >>> print(out.getvalue())
+ diff --git a/folder1/g b/folder1/g
+ --- a/folder1/g
+ +++ b/folder1/g
+ @@ -2,3 +2,2 @@
+ 2
+ -3
+ 4
+ @@ -6,2 +5,4 @@
+ 6
+ +6.1
+ +6.2
+ 7
+ @@ -8,1 +9,2 @@
+ 8
+ +9
+ """
class parser(object):
"""patch parsing state machine"""
def __init__(self):
@@ -1546,7 +1597,7 @@
def addcontext(self, context):
if self.hunk:
h = recordhunk(self.header, self.fromline, self.toline,
- self.proc, self.before, self.hunk, context)
+ self.proc, self.before, self.hunk, context, maxcontext)
self.header.hunks.append(h)
self.fromline += len(self.before) + h.removed
self.toline += len(self.before) + h.added
@@ -2073,7 +2124,7 @@
if files is None:
files = set()
if eolmode is None:
- eolmode = ui.config('patch', 'eol', 'strict')
+ eolmode = ui.config('patch', 'eol')
if eolmode.lower() not in eolmodes:
raise error.Abort(_('unsupported line endings type: %s') % eolmode)
eolmode = eolmode.lower()
@@ -2508,12 +2559,15 @@
revinfo = ' '.join(["-r %s" % rev for rev in revs])
return 'diff %s %s' % (revinfo, f)
+ def isempty(fctx):
+ return fctx is None or fctx.size() == 0
+
date1 = util.datestr(ctx1.date())
date2 = util.datestr(ctx2.date())
gitmode = {'l': '120000', 'x': '100755', '': '100644'}
- if relroot != '' and (repo.ui.configbool('devel', 'all')
+ if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
or repo.ui.configbool('devel', 'check-relroot')):
for f in modified + added + removed + copy.keys() + copy.values():
if f is not None and not f.startswith(relroot):
@@ -2523,28 +2577,30 @@
for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
content1 = None
content2 = None
+ fctx1 = None
+ fctx2 = None
flag1 = None
flag2 = None
if f1:
- content1 = getfilectx(f1, ctx1).data()
+ fctx1 = getfilectx(f1, ctx1)
if opts.git or losedatafn:
flag1 = ctx1.flags(f1)
if f2:
- content2 = getfilectx(f2, ctx2).data()
+ fctx2 = getfilectx(f2, ctx2)
if opts.git or losedatafn:
flag2 = ctx2.flags(f2)
- binary = False
- if opts.git or losedatafn:
- binary = util.binary(content1) or util.binary(content2)
+ # if binary is True, output "summary" or "base85", but not "text diff"
+ binary = not opts.text and any(f.isbinary()
+ for f in [fctx1, fctx2] if f is not None)
if losedatafn and not opts.git:
if (binary or
# copy/rename
f2 in copy or
# empty file creation
- (not f1 and not content2) or
+ (not f1 and isempty(fctx2)) or
# empty file deletion
- (not content1 and not f2) or
+ (isempty(fctx1) and not f2) or
# create with flags
(not f1 and flag2) or
# change flags
@@ -2577,7 +2633,37 @@
elif revs and not repo.ui.quiet:
header.append(diffline(path1, revs))
- if binary and opts.git and not opts.nobinary and not opts.text:
+ # fctx.is | diffopts | what to | is fctx.data()
+ # binary() | text nobinary git index | output? | outputted?
+ # ------------------------------------|----------------------------
+ # yes | no no no * | summary | no
+ # yes | no no yes * | base85 | yes
+ # yes | no yes no * | summary | no
+ # yes | no yes yes 0 | summary | no
+ # yes | no yes yes >0 | summary | semi [1]
+ # yes | yes * * * | text diff | yes
+ # no | * * * * | text diff | yes
+ # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
+ if binary and (not opts.git or (opts.git and opts.nobinary and not
+ opts.index)):
+ # fast path: no binary content will be displayed, content1 and
+ # content2 are only used for equivalent test. cmp() could have a
+ # fast path.
+ if fctx1 is not None:
+ content1 = b'\0'
+ if fctx2 is not None:
+ if fctx1 is not None and not fctx1.cmp(fctx2):
+ content2 = b'\0' # not different
+ else:
+ content2 = b'\0\0'
+ else:
+ # normal path: load contents
+ if fctx1 is not None:
+ content1 = fctx1.data()
+ if fctx2 is not None:
+ content2 = fctx2.data()
+
+ if binary and opts.git and not opts.nobinary:
text = mdiff.b85diff(content1, content2)
if text:
header.append('index %s..%s' %
@@ -2620,19 +2706,28 @@
if filename:
results.append((filename, adds, removes, isbinary))
+ # inheader is used to track if a line is in the
+ # header portion of the diff. This helps properly account
+ # for lines that start with '--' or '++'
+ inheader = False
+
for line in lines:
if line.startswith('diff'):
addresult()
- # set numbers to 0 anyway when starting new file
+ # starting a new file diff
+ # set numbers to 0 and reset inheader
+ inheader = True
adds, removes, isbinary = 0, 0, False
if line.startswith('diff --git a/'):
filename = gitre.search(line).group(2)
elif line.startswith('diff -r'):
# format: "diff -r ... -r ... filename"
filename = diffre.search(line).group(1)
- elif line.startswith('+') and not line.startswith('+++ '):
+ elif line.startswith('@@'):
+ inheader = False
+ elif line.startswith('+') and not inheader:
adds += 1
- elif line.startswith('-') and not line.startswith('--- '):
+ elif line.startswith('-') and not inheader:
removes += 1
elif (line.startswith('GIT binary patch') or
line.startswith('Binary file')):
@@ -2664,7 +2759,7 @@
if isbinary:
count = 'Bin'
else:
- count = adds + removes
+ count = '%d' % (adds + removes)
pluses = '+' * scale(adds)
minuses = '-' * scale(removes)
output.append(' %s%s | %*s %s%s\n' %
@@ -2687,10 +2782,10 @@
if line and line[-1] in '+-':
name, graph = line.rsplit(' ', 1)
yield (name + ' ', '')
- m = re.search(r'\++', graph)
+ m = re.search(br'\++', graph)
if m:
yield (m.group(0), 'diffstat.inserted')
- m = re.search(r'-+', graph)
+ m = re.search(br'-+', graph)
if m:
yield (m.group(0), 'diffstat.deleted')
else:
--- a/mercurial/pathencode.c Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,765 +0,0 @@
-/*
- pathencode.c - efficient path name encoding
-
- Copyright 2012 Facebook
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-/*
- * An implementation of the name encoding scheme used by the fncache
- * store. The common case is of a path < 120 bytes long, which is
- * handled either in a single pass with no allocations or two passes
- * with a single allocation. For longer paths, multiple passes are
- * required.
- */
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <assert.h>
-#include <ctype.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "util.h"
-
-/* state machine for the fast path */
-enum path_state {
- START, /* first byte of a path component */
- A, /* "AUX" */
- AU,
- THIRD, /* third of a 3-byte sequence, e.g. "AUX", "NUL" */
- C, /* "CON" or "COMn" */
- CO,
- COMLPT, /* "COM" or "LPT" */
- COMLPTn,
- L,
- LP,
- N,
- NU,
- P, /* "PRN" */
- PR,
- LDOT, /* leading '.' */
- DOT, /* '.' in a non-leading position */
- H, /* ".h" */
- HGDI, /* ".hg", ".d", or ".i" */
- SPACE,
- DEFAULT /* byte of a path component after the first */
-};
-
-/* state machine for dir-encoding */
-enum dir_state {
- DDOT,
- DH,
- DHGDI,
- DDEFAULT
-};
-
-static inline int inset(const uint32_t bitset[], char c)
-{
- return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31));
-}
-
-static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
- char c)
-{
- if (dest) {
- assert(*destlen < destsize);
- dest[*destlen] = c;
- }
- (*destlen)++;
-}
-
-static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
- const void *src, Py_ssize_t len)
-{
- if (dest) {
- assert(*destlen + len < destsize);
- memcpy((void *)&dest[*destlen], src, len);
- }
- *destlen += len;
-}
-
-static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize,
- uint8_t c)
-{
- static const char hexdigit[] = "0123456789abcdef";
-
- charcopy(dest, destlen, destsize, hexdigit[c >> 4]);
- charcopy(dest, destlen, destsize, hexdigit[c & 15]);
-}
-
-/* 3-byte escape: tilde followed by two hex digits */
-static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize,
- char c)
-{
- charcopy(dest, destlen, destsize, '~');
- hexencode(dest, destlen, destsize, c);
-}
-
-static Py_ssize_t _encodedir(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- enum dir_state state = DDEFAULT;
- Py_ssize_t i = 0, destlen = 0;
-
- while (i < len) {
- switch (state) {
- case DDOT:
- switch (src[i]) {
- case 'd':
- case 'i':
- state = DHGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = DH;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DDEFAULT;
- break;
- }
- break;
- case DH:
- if (src[i] == 'g') {
- state = DHGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DDEFAULT;
- break;
- case DHGDI:
- if (src[i] == '/') {
- memcopy(dest, &destlen, destsize, ".hg", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- state = DDEFAULT;
- break;
- case DDEFAULT:
- if (src[i] == '.')
- state = DDOT;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- }
- }
-
- return destlen;
-}
-
-PyObject *encodedir(PyObject *self, PyObject *args)
-{
- Py_ssize_t len, newlen;
- PyObject *pathobj, *newobj;
- char *path;
-
- if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
- return NULL;
-
- if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
- PyErr_SetString(PyExc_TypeError, "expected a string");
- return NULL;
- }
-
- newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1;
-
- if (newlen == len + 1) {
- Py_INCREF(pathobj);
- return pathobj;
- }
-
- newobj = PyBytes_FromStringAndSize(NULL, newlen);
-
- if (newobj) {
- assert(PyBytes_Check(newobj));
- Py_SIZE(newobj)--;
- _encodedir(PyBytes_AS_STRING(newobj), newlen, path,
- len + 1);
- }
-
- return newobj;
-}
-
-static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8],
- char *dest, Py_ssize_t destlen, size_t destsize,
- const char *src, Py_ssize_t len,
- int encodedir)
-{
- enum path_state state = START;
- Py_ssize_t i = 0;
-
- /*
- * Python strings end with a zero byte, which we use as a
- * terminal token as they are not valid inside path names.
- */
-
- while (i < len) {
- switch (state) {
- case START:
- switch (src[i]) {
- case '/':
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case '.':
- state = LDOT;
- escape3(dest, &destlen, destsize, src[i++]);
- break;
- case ' ':
- state = DEFAULT;
- escape3(dest, &destlen, destsize, src[i++]);
- break;
- case 'a':
- state = A;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'c':
- state = C;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'l':
- state = L;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'n':
- state = N;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'p':
- state = P;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- break;
- }
- break;
- case A:
- if (src[i] == 'u') {
- state = AU;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case AU:
- if (src[i] == 'x') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case THIRD:
- state = DEFAULT;
- switch (src[i]) {
- case '.':
- case '/':
- case '\0':
- escape3(dest, &destlen, destsize, src[i - 1]);
- break;
- default:
- i--;
- break;
- }
- break;
- case C:
- if (src[i] == 'o') {
- state = CO;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case CO:
- if (src[i] == 'm') {
- state = COMLPT;
- i++;
- }
- else if (src[i] == 'n') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case COMLPT:
- switch (src[i]) {
- case '1': case '2': case '3': case '4': case '5':
- case '6': case '7': case '8': case '9':
- state = COMLPTn;
- i++;
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, src[i - 1]);
- break;
- }
- break;
- case COMLPTn:
- state = DEFAULT;
- switch (src[i]) {
- case '.':
- case '/':
- case '\0':
- escape3(dest, &destlen, destsize, src[i - 2]);
- charcopy(dest, &destlen, destsize, src[i - 1]);
- break;
- default:
- memcopy(dest, &destlen, destsize,
- &src[i - 2], 2);
- break;
- }
- break;
- case L:
- if (src[i] == 'p') {
- state = LP;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case LP:
- if (src[i] == 't') {
- state = COMLPT;
- i++;
- }
- else state = DEFAULT;
- break;
- case N:
- if (src[i] == 'u') {
- state = NU;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case NU:
- if (src[i] == 'l') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case P:
- if (src[i] == 'r') {
- state = PR;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case PR:
- if (src[i] == 'n') {
- state = THIRD;
- i++;
- }
- else state = DEFAULT;
- break;
- case LDOT:
- switch (src[i]) {
- case 'd':
- case 'i':
- state = HGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = H;
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- break;
- }
- break;
- case DOT:
- switch (src[i]) {
- case '/':
- case '\0':
- state = START;
- memcopy(dest, &destlen, destsize, "~2e", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'd':
- case 'i':
- state = HGDI;
- charcopy(dest, &destlen, destsize, '.');
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- case 'h':
- state = H;
- memcopy(dest, &destlen, destsize, ".h", 2);
- i++;
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, '.');
- break;
- }
- break;
- case H:
- if (src[i] == 'g') {
- state = HGDI;
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case HGDI:
- if (src[i] == '/') {
- state = START;
- if (encodedir)
- memcopy(dest, &destlen, destsize, ".hg",
- 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- }
- else state = DEFAULT;
- break;
- case SPACE:
- switch (src[i]) {
- case '/':
- case '\0':
- state = START;
- memcopy(dest, &destlen, destsize, "~20", 3);
- charcopy(dest, &destlen, destsize, src[i++]);
- break;
- default:
- state = DEFAULT;
- charcopy(dest, &destlen, destsize, ' ');
- break;
- }
- break;
- case DEFAULT:
- while (inset(onebyte, src[i])) {
- charcopy(dest, &destlen, destsize, src[i++]);
- if (i == len)
- goto done;
- }
- switch (src[i]) {
- case '.':
- state = DOT;
- i++;
- break;
- case ' ':
- state = SPACE;
- i++;
- break;
- case '/':
- state = START;
- charcopy(dest, &destlen, destsize, '/');
- i++;
- break;
- default:
- if (inset(onebyte, src[i])) {
- do {
- charcopy(dest, &destlen,
- destsize, src[i++]);
- } while (i < len &&
- inset(onebyte, src[i]));
- }
- else if (inset(twobytes, src[i])) {
- char c = src[i++];
- charcopy(dest, &destlen, destsize, '_');
- charcopy(dest, &destlen, destsize,
- c == '_' ? '_' : c + 32);
- }
- else
- escape3(dest, &destlen, destsize,
- src[i++]);
- break;
- }
- break;
- }
- }
-done:
- return destlen;
-}
-
-static Py_ssize_t basicencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe };
-
- static const uint32_t onebyte[8] = {
- 1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
- };
-
- Py_ssize_t destlen = 0;
-
- return _encode(twobytes, onebyte, dest, destlen, destsize,
- src, len, 1);
-}
-
-static const Py_ssize_t maxstorepathlen = 120;
-
-static Py_ssize_t _lowerencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t onebyte[8] = {
- 1, 0x2bfffbfb, 0xe8000001, 0x2fffffff
- };
-
- static const uint32_t lower[8] = { 0, 0, 0x7fffffe };
-
- Py_ssize_t i, destlen = 0;
-
- for (i = 0; i < len; i++) {
- if (inset(onebyte, src[i]))
- charcopy(dest, &destlen, destsize, src[i]);
- else if (inset(lower, src[i]))
- charcopy(dest, &destlen, destsize, src[i] + 32);
- else
- escape3(dest, &destlen, destsize, src[i]);
- }
-
- return destlen;
-}
-
-PyObject *lowerencode(PyObject *self, PyObject *args)
-{
- char *path;
- Py_ssize_t len, newlen;
- PyObject *ret;
-
- if (!PyArg_ParseTuple(args, "s#:lowerencode", &path, &len))
- return NULL;
-
- newlen = _lowerencode(NULL, 0, path, len);
- ret = PyBytes_FromStringAndSize(NULL, newlen);
- if (ret)
- _lowerencode(PyBytes_AS_STRING(ret), newlen, path, len);
-
- return ret;
-}
-
-/* See store.py:_auxencode for a description. */
-static Py_ssize_t auxencode(char *dest, size_t destsize,
- const char *src, Py_ssize_t len)
-{
- static const uint32_t twobytes[8];
-
- static const uint32_t onebyte[8] = {
- ~0U, 0xffff3ffe, ~0U, ~0U, ~0U, ~0U, ~0U, ~0U,
- };
-
- return _encode(twobytes, onebyte, dest, 0, destsize, src, len, 0);
-}
-
-static PyObject *hashmangle(const char *src, Py_ssize_t len, const char sha[20])
-{
- static const Py_ssize_t dirprefixlen = 8;
- static const Py_ssize_t maxshortdirslen = 68;
- char *dest;
- PyObject *ret;
-
- Py_ssize_t i, d, p, lastslash = len - 1, lastdot = -1;
- Py_ssize_t destsize, destlen = 0, slop, used;
-
- while (lastslash >= 0 && src[lastslash] != '/') {
- if (src[lastslash] == '.' && lastdot == -1)
- lastdot = lastslash;
- lastslash--;
- }
-
-#if 0
- /* All paths should end in a suffix of ".i" or ".d".
- Unfortunately, the file names in test-hybridencode.py
- violate this rule. */
- if (lastdot != len - 3) {
- PyErr_SetString(PyExc_ValueError,
- "suffix missing or wrong length");
- return NULL;
- }
-#endif
-
- /* If src contains a suffix, we will append it to the end of
- the new string, so make room. */
- destsize = 120;
- if (lastdot >= 0)
- destsize += len - lastdot - 1;
-
- ret = PyBytes_FromStringAndSize(NULL, destsize);
- if (ret == NULL)
- return NULL;
-
- dest = PyBytes_AS_STRING(ret);
- memcopy(dest, &destlen, destsize, "dh/", 3);
-
- /* Copy up to dirprefixlen bytes of each path component, up to
- a limit of maxshortdirslen bytes. */
- for (i = d = p = 0; i < lastslash; i++, p++) {
- if (src[i] == '/') {
- char d = dest[destlen - 1];
- /* After truncation, a directory name may end
- in a space or dot, which are unportable. */
- if (d == '.' || d == ' ')
- dest[destlen - 1] = '_';
- /* The + 3 is to account for "dh/" in the beginning */
- if (destlen > maxshortdirslen + 3)
- break;
- charcopy(dest, &destlen, destsize, src[i]);
- p = -1;
- }
- else if (p < dirprefixlen)
- charcopy(dest, &destlen, destsize, src[i]);
- }
-
- /* Rewind to just before the last slash copied. */
- if (destlen > maxshortdirslen + 3)
- do {
- destlen--;
- } while (destlen > 0 && dest[destlen] != '/');
-
- if (destlen > 3) {
- if (lastslash > 0) {
- char d = dest[destlen - 1];
- /* The last directory component may be
- truncated, so make it safe. */
- if (d == '.' || d == ' ')
- dest[destlen - 1] = '_';
- }
-
- charcopy(dest, &destlen, destsize, '/');
- }
-
- /* Add a prefix of the original file's name. Its length
- depends on the number of bytes left after accounting for
- hash and suffix. */
- used = destlen + 40;
- if (lastdot >= 0)
- used += len - lastdot - 1;
- slop = maxstorepathlen - used;
- if (slop > 0) {
- Py_ssize_t basenamelen =
- lastslash >= 0 ? len - lastslash - 2 : len - 1;
-
- if (basenamelen > slop)
- basenamelen = slop;
- if (basenamelen > 0)
- memcopy(dest, &destlen, destsize, &src[lastslash + 1],
- basenamelen);
- }
-
- /* Add hash and suffix. */
- for (i = 0; i < 20; i++)
- hexencode(dest, &destlen, destsize, sha[i]);
-
- if (lastdot >= 0)
- memcopy(dest, &destlen, destsize, &src[lastdot],
- len - lastdot - 1);
-
- assert(PyBytes_Check(ret));
- Py_SIZE(ret) = destlen;
-
- return ret;
-}
-
-/*
- * Avoiding a trip through Python would improve performance by 50%,
- * but we don't encounter enough long names to be worth the code.
- */
-static int sha1hash(char hash[20], const char *str, Py_ssize_t len)
-{
- static PyObject *shafunc;
- PyObject *shaobj, *hashobj;
-
- if (shafunc == NULL) {
- PyObject *hashlib, *name = PyBytes_FromString("hashlib");
-
- if (name == NULL)
- return -1;
-
- hashlib = PyImport_Import(name);
- Py_DECREF(name);
-
- if (hashlib == NULL) {
- PyErr_SetString(PyExc_ImportError, "hashlib");
- return -1;
- }
- shafunc = PyObject_GetAttrString(hashlib, "sha1");
- Py_DECREF(hashlib);
-
- if (shafunc == NULL) {
- PyErr_SetString(PyExc_AttributeError,
- "module 'hashlib' has no "
- "attribute 'sha1'");
- return -1;
- }
- }
-
- shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
-
- if (shaobj == NULL)
- return -1;
-
- hashobj = PyObject_CallMethod(shaobj, "digest", "");
- Py_DECREF(shaobj);
- if (hashobj == NULL)
- return -1;
-
- if (!PyBytes_Check(hashobj) || PyBytes_GET_SIZE(hashobj) != 20) {
- PyErr_SetString(PyExc_TypeError,
- "result of digest is not a 20-byte hash");
- Py_DECREF(hashobj);
- return -1;
- }
-
- memcpy(hash, PyBytes_AS_STRING(hashobj), 20);
- Py_DECREF(hashobj);
- return 0;
-}
-
-#define MAXENCODE 4096 * 4
-
-static PyObject *hashencode(const char *src, Py_ssize_t len)
-{
- char dired[MAXENCODE];
- char lowered[MAXENCODE];
- char auxed[MAXENCODE];
- Py_ssize_t dirlen, lowerlen, auxlen, baselen;
- char sha[20];
-
- baselen = (len - 5) * 3;
- if (baselen >= MAXENCODE) {
- PyErr_SetString(PyExc_ValueError, "string too long");
- return NULL;
- }
-
- dirlen = _encodedir(dired, baselen, src, len);
- if (sha1hash(sha, dired, dirlen - 1) == -1)
- return NULL;
- lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5);
- auxlen = auxencode(auxed, baselen, lowered, lowerlen);
- return hashmangle(auxed, auxlen, sha);
-}
-
-PyObject *pathencode(PyObject *self, PyObject *args)
-{
- Py_ssize_t len, newlen;
- PyObject *pathobj, *newobj;
- char *path;
-
- if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
- return NULL;
-
- if (PyBytes_AsStringAndSize(pathobj, &path, &len) == -1) {
- PyErr_SetString(PyExc_TypeError, "expected a string");
- return NULL;
- }
-
- if (len > maxstorepathlen)
- newlen = maxstorepathlen + 2;
- else
- newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
-
- if (newlen <= maxstorepathlen + 1) {
- if (newlen == len + 1) {
- Py_INCREF(pathobj);
- return pathobj;
- }
-
- newobj = PyBytes_FromStringAndSize(NULL, newlen);
-
- if (newobj) {
- assert(PyBytes_Check(newobj));
- Py_SIZE(newobj)--;
- basicencode(PyBytes_AS_STRING(newobj), newlen, path,
- len + 1);
- }
- }
- else
- newobj = hashencode(path, len + 1);
-
- return newobj;
-}
--- a/mercurial/pathutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pathutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -46,7 +46,7 @@
else:
self.normcase = lambda x: x
- def __call__(self, path):
+ def __call__(self, path, mode=None):
'''Check the relative path.
path may contain a pattern (e.g. foodir/**.txt)'''
--- a/mercurial/phases.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/phases.py Wed Jul 19 07:51:41 2017 -0500
@@ -154,6 +154,18 @@
dirty = True
return roots, dirty
+def _trackphasechange(data, rev, old, new):
+ """add a phase move the <data> dictionnary
+
+ If data is None, nothing happens.
+ """
+ if data is None:
+ return
+ existing = data.get(rev)
+ if existing is not None:
+ old = existing[0]
+ data[rev] = (old, new)
+
class phasecache(object):
def __init__(self, repo, phasedefaults, _load=True):
if _load:
@@ -282,11 +294,33 @@
tr.addfilegenerator('phase', ('phaseroots',), self._write)
tr.hookargs['phases_moved'] = '1'
+ def registernew(self, repo, tr, targetphase, nodes):
+ repo = repo.unfiltered()
+ self._retractboundary(repo, tr, targetphase, nodes)
+ if tr is not None and 'phases' in tr.changes:
+ phasetracking = tr.changes['phases']
+ torev = repo.changelog.rev
+ phase = self.phase
+ for n in nodes:
+ rev = torev(n)
+ revphase = phase(repo, rev)
+ _trackphasechange(phasetracking, rev, None, revphase)
+ repo.invalidatevolatilesets()
+
def advanceboundary(self, repo, tr, targetphase, nodes):
+ """Set all 'nodes' to phase 'targetphase'
+
+ Nodes with a phase lower than 'targetphase' are not affected.
+ """
# Be careful to preserve shallow-copied values: do not update
# phaseroots values, replace them.
+ if tr is None:
+ phasetracking = None
+ else:
+ phasetracking = tr.changes.get('phases')
repo = repo.unfiltered()
+
delroots = [] # set of root deleted by this path
for phase in xrange(targetphase + 1, len(allphases)):
# filter nodes that are not in a compatible phase already
@@ -294,27 +328,63 @@
if self.phase(repo, repo[n].rev()) >= phase]
if not nodes:
break # no roots to move anymore
+
olds = self.phaseroots[phase]
+
+ affected = repo.revs('%ln::%ln', olds, nodes)
+ for r in affected:
+ _trackphasechange(phasetracking, r, self.phase(repo, r),
+ targetphase)
+
roots = set(ctx.node() for ctx in repo.set(
- 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
+ 'roots((%ln::) - %ld)', olds, affected))
if olds != roots:
self._updateroots(phase, roots, tr)
# some roots may need to be declared for lower phases
delroots.extend(olds - roots)
- # declare deleted root in the target phase
- if targetphase != 0:
- self.retractboundary(repo, tr, targetphase, delroots)
+ # declare deleted root in the target phase
+ if targetphase != 0:
+ self._retractboundary(repo, tr, targetphase, delroots)
repo.invalidatevolatilesets()
def retractboundary(self, repo, tr, targetphase, nodes):
+ oldroots = self.phaseroots[:targetphase + 1]
+ if tr is None:
+ phasetracking = None
+ else:
+ phasetracking = tr.changes.get('phases')
+ repo = repo.unfiltered()
+ if (self._retractboundary(repo, tr, targetphase, nodes)
+ and phasetracking is not None):
+
+ # find the affected revisions
+ new = self.phaseroots[targetphase]
+ old = oldroots[targetphase]
+ affected = set(repo.revs('(%ln::) - (%ln::)', new, old))
+
+ # find the phase of the affected revision
+ for phase in xrange(targetphase, -1, -1):
+ if phase:
+ roots = oldroots[phase]
+ revs = set(repo.revs('%ln::%ld', roots, affected))
+ affected -= revs
+ else: # public phase
+ revs = affected
+ for r in revs:
+ _trackphasechange(phasetracking, r, phase, targetphase)
+ repo.invalidatevolatilesets()
+
+ def _retractboundary(self, repo, tr, targetphase, nodes):
# Be careful to preserve shallow-copied values: do not update
# phaseroots values, replace them.
repo = repo.unfiltered()
currentroots = self.phaseroots[targetphase]
+ finalroots = oldroots = set(currentroots)
newroots = [n for n in nodes
if self.phase(repo, repo[n].rev()) < targetphase]
if newroots:
+
if nullid in newroots:
raise error.Abort(_('cannot change null revision phase'))
currentroots = currentroots.copy()
@@ -330,9 +400,10 @@
finalroots = set(n for n in currentroots if repo[n].rev() <
minnewroot)
finalroots.update(ctx.node() for ctx in updatedroots)
-
+ if finalroots != oldroots:
self._updateroots(targetphase, finalroots, tr)
- repo.invalidatevolatilesets()
+ return True
+ return False
def filterunknown(self, repo):
"""remove unknown nodes from the phase boundary
@@ -385,6 +456,16 @@
phcache.retractboundary(repo, tr, targetphase, nodes)
repo._phasecache.replace(phcache)
+def registernew(repo, tr, targetphase, nodes):
+ """register a new revision and its phase
+
+ Code adding revisions to the repository should use this function to
+ set new changeset in their target phase (or higher).
+ """
+ phcache = repo._phasecache.copy()
+ phcache.registernew(repo, tr, targetphase, nodes)
+ repo._phasecache.replace(phcache)
+
def listphases(repo):
"""List phases root for serialization over pushkey"""
# Use ordered dictionary so behavior is deterministic.
@@ -423,12 +504,34 @@
if currentphase == oldphase and newphase < oldphase:
with repo.transaction('pushkey-phase') as tr:
advanceboundary(repo, tr, newphase, [bin(nhex)])
- return 1
+ return True
elif currentphase == newphase:
# raced, but got correct result
- return 1
+ return True
else:
- return 0
+ return False
+
+def subsetphaseheads(repo, subset):
+ """Finds the phase heads for a subset of a history
+
+ Returns a list indexed by phase number where each item is a list of phase
+ head nodes.
+ """
+ cl = repo.changelog
+
+ headsbyphase = [[] for i in allphases]
+ # No need to keep track of secret phase; any heads in the subset that
+ # are not mentioned are implicitly secret.
+ for phase in allphases[:-1]:
+ revset = "heads(%%ln & %s())" % phasenames[phase]
+ headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
+ return headsbyphase
+
+def updatephases(repo, tr, headsbyphase):
+ """Updates the repo with the given phase heads"""
+ # Now advance phase boundaries of all but secret phase
+ for phase in allphases[:-1]:
+ advanceboundary(repo, tr, phase, headsbyphase[phase])
def analyzeremotephases(repo, subset, roots):
"""Compute phases heads and root in a subset of node from root dict
--- a/mercurial/policy.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/policy.py Wed Jul 19 07:51:41 2017 -0500
@@ -18,10 +18,18 @@
# cffi-allow - allow pure Python implementation if cffi version is missing
# py - only load pure Python modules
#
-# By default, require the C extensions for performance reasons.
-policy = b'c'
-policynoc = (b'cffi', b'cffi-allow', b'py')
-policynocffi = (b'c', b'py')
+# By default, fall back to the pure modules so the in-place build can
+# run without recompiling the C extensions. This will be overridden by
+# __modulepolicy__ generated by setup.py.
+policy = b'allow'
+_packageprefs = {
+ # policy: (versioned package, pure package)
+ b'c': (r'cext', None),
+ b'allow': (r'cext', r'pure'),
+ b'cffi': (r'cffi', None),
+ b'cffi-allow': (r'cffi', r'pure'),
+ b'py': (None, r'pure'),
+}
try:
from . import __modulepolicy__
@@ -33,8 +41,8 @@
#
# The canonical way to do this is to test platform.python_implementation().
# But we don't import platform and don't bloat for it here.
-if '__pypy__' in sys.builtin_module_names:
- policy = 'cffi'
+if r'__pypy__' in sys.builtin_module_names:
+ policy = b'cffi'
# Our C extensions aren't yet compatible with Python 3. So use pure Python
# on Python 3 for now.
@@ -43,7 +51,54 @@
# Environment variable can always force settings.
if sys.version_info[0] >= 3:
- if 'HGMODULEPOLICY' in os.environ:
- policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
+ if r'HGMODULEPOLICY' in os.environ:
+ policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8')
else:
- policy = os.environ.get('HGMODULEPOLICY', policy)
+ policy = os.environ.get(r'HGMODULEPOLICY', policy)
+
+def _importfrom(pkgname, modname):
+ # from .<pkgname> import <modname> (where . is looked through this module)
+ fakelocals = {}
+ pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1)
+ try:
+ fakelocals[modname] = mod = getattr(pkg, modname)
+ except AttributeError:
+ raise ImportError(r'cannot import name %s' % modname)
+ # force import; fakelocals[modname] may be replaced with the real module
+ getattr(mod, r'__doc__', None)
+ return fakelocals[modname]
+
+# keep in sync with "version" in C modules
+_cextversions = {
+ (r'cext', r'base85'): 1,
+ (r'cext', r'bdiff'): 1,
+ (r'cext', r'diffhelpers'): 1,
+ (r'cext', r'mpatch'): 1,
+ (r'cext', r'osutil'): 1,
+ (r'cext', r'parsers'): 1,
+}
+
+def _checkmod(pkgname, modname, mod):
+ expected = _cextversions.get((pkgname, modname))
+ actual = getattr(mod, r'version', None)
+ if actual != expected:
+ raise ImportError(r'cannot import module %s.%s '
+ r'(expected version: %d, actual: %r)'
+ % (pkgname, modname, expected, actual))
+
+def importmod(modname):
+ """Import module according to policy and check API version"""
+ try:
+ verpkg, purepkg = _packageprefs[policy]
+ except KeyError:
+ raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
+ assert verpkg or purepkg
+ if verpkg:
+ try:
+ mod = _importfrom(verpkg, modname)
+ _checkmod(verpkg, modname, mod)
+ return mod
+ except ImportError:
+ if not purepkg:
+ raise
+ return _importfrom(purepkg, modname)
--- a/mercurial/posix.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/posix.py Wed Jul 19 07:51:41 2017 -0500
@@ -98,7 +98,8 @@
return (os.lstat(f).st_mode & 0o100 != 0)
def setflags(f, l, x):
- s = os.lstat(f).st_mode
+ st = os.lstat(f)
+ s = st.st_mode
if l:
if not stat.S_ISLNK(s):
# switch file to link
@@ -125,6 +126,14 @@
s = 0o666 & ~umask # avoid restatting for chmod
sx = s & 0o100
+ if st.st_nlink > 1 and bool(x) != bool(sx):
+ # the file is a hardlink, break it
+ with open(f, "rb") as fp:
+ data = fp.read()
+ unlink(f)
+ with open(f, "wb") as fp:
+ fp.write(data)
+
if x and not sx:
# Turn on +x for every +r bit when making a file executable
# and obey umask.
@@ -244,7 +253,17 @@
# create a fixed file to link to; doesn't matter if it
# already exists.
target = 'checklink-target'
- open(os.path.join(cachedir, target), 'w').close()
+ try:
+ open(os.path.join(cachedir, target), 'w').close()
+ except IOError as inst:
+ if inst[0] == errno.EACCES:
+ # If we can't write to cachedir, just pretend
+ # that the fs is readonly and by association
+ # that the fs won't support symlinks. This
+ # seems like the least dangerous way to avoid
+ # data loss.
+ return False
+ raise
try:
os.symlink(target, name)
if cachedir is None:
@@ -474,7 +493,7 @@
def setsignalhandler():
pass
-_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
+_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file does not
@@ -494,7 +513,7 @@
def getuser():
'''return name of current user'''
- return getpass.getuser()
+ return pycompat.fsencode(getpass.getuser())
def username(uid=None):
"""Return the name of the user with the given uid.
--- a/mercurial/profiling.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/profiling.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,15 +13,27 @@
from . import (
encoding,
error,
+ extensions,
util,
)
+def _loadprofiler(ui, profiler):
+ """load profiler extension. return profile method, or None on failure"""
+ extname = profiler
+ extensions.loadall(ui, whitelist=[extname])
+ try:
+ mod = extensions.find(extname)
+ except KeyError:
+ return None
+ else:
+ return getattr(mod, 'profile', None)
+
@contextlib.contextmanager
def lsprofile(ui, fp):
- format = ui.config('profiling', 'format', default='text')
- field = ui.config('profiling', 'sort', default='inlinetime')
- limit = ui.configint('profiling', 'limit', default=30)
- climit = ui.configint('profiling', 'nested', default=0)
+ format = ui.config('profiling', 'format')
+ field = ui.config('profiling', 'sort')
+ limit = ui.configint('profiling', 'limit')
+ climit = ui.configint('profiling', 'nested')
if format not in ['text', 'kcachegrind']:
ui.warn(_("unrecognized profiling format '%s'"
@@ -60,7 +72,7 @@
'flamegraph not available - install from '
'https://github.com/evanhempel/python-flamegraph'))
# developer config: profiling.freq
- freq = ui.configint('profiling', 'freq', default=1000)
+ freq = ui.configint('profiling', 'freq')
filter_ = None
collapse_recursion = True
thread = flamegraph.ProfileThread(fp, 1.0 / freq,
@@ -80,7 +92,7 @@
def statprofile(ui, fp):
from . import statprof
- freq = ui.configint('profiling', 'freq', default=1000)
+ freq = ui.configint('profiling', 'freq')
if freq > 0:
# Cannot reset when profiler is already active. So silently no-op.
if statprof.state.profile_level == 0:
@@ -95,7 +107,7 @@
finally:
data = statprof.stop()
- profformat = ui.config('profiling', 'statformat', 'hotpath')
+ profformat = ui.config('profiling', 'statformat')
formats = {
'byline': statprof.DisplayFormats.ByLine,
@@ -114,6 +126,8 @@
kwargs = {}
def fraction(s):
+ if isinstance(s, (float, int)):
+ return float(s)
if s.endswith('%'):
v = float(s[:-1]) / 100
else:
@@ -126,67 +140,99 @@
showmin = ui.configwith(fraction, 'profiling', 'showmin', 0.005)
showmax = ui.configwith(fraction, 'profiling', 'showmax', 0.999)
kwargs.update(minthreshold=showmin, maxthreshold=showmax)
+ elif profformat == 'hotpath':
+ # inconsistent config: profiling.showmin
+ limit = ui.configwith(fraction, 'profiling', 'showmin', 0.05)
+ kwargs['limit'] = limit
statprof.display(fp, data=data, format=displayformat, **kwargs)
-@contextlib.contextmanager
-def profile(ui):
+class profile(object):
"""Start profiling.
Profiling is active when the context manager is active. When the context
manager exits, profiling results will be written to the configured output.
"""
- profiler = encoding.environ.get('HGPROF')
- if profiler is None:
- profiler = ui.config('profiling', 'type', default='stat')
- if profiler not in ('ls', 'stat', 'flame'):
- ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
- profiler = 'stat'
+ def __init__(self, ui, enabled=True):
+ self._ui = ui
+ self._output = None
+ self._fp = None
+ self._fpdoclose = True
+ self._profiler = None
+ self._enabled = enabled
+ self._entered = False
+ self._started = False
- output = ui.config('profiling', 'output')
+ def __enter__(self):
+ self._entered = True
+ if self._enabled:
+ self.start()
+ return self
+
+ def start(self):
+ """Start profiling.
+
+ The profiling will stop at the context exit.
- if output == 'blackbox':
- fp = util.stringio()
- elif output:
- path = ui.expandpath(output)
- fp = open(path, 'wb')
- else:
- fp = ui.ferr
+ If the profiler was already started, this has no effect."""
+ if not self._entered:
+ raise error.ProgrammingError()
+ if self._started:
+ return
+ self._started = True
+ profiler = encoding.environ.get('HGPROF')
+ proffn = None
+ if profiler is None:
+ profiler = self._ui.config('profiling', 'type', default='stat')
+ if profiler not in ('ls', 'stat', 'flame'):
+ # try load profiler from extension with the same name
+ proffn = _loadprofiler(self._ui, profiler)
+ if proffn is None:
+ self._ui.warn(_("unrecognized profiler '%s' - ignored\n")
+ % profiler)
+ profiler = 'stat'
+
+ self._output = self._ui.config('profiling', 'output')
- try:
- if profiler == 'ls':
- proffn = lsprofile
- elif profiler == 'flame':
- proffn = flameprofile
- else:
- proffn = statprofile
+ try:
+ if self._output == 'blackbox':
+ self._fp = util.stringio()
+ elif self._output:
+ path = self._ui.expandpath(self._output)
+ self._fp = open(path, 'wb')
+ else:
+ self._fpdoclose = False
+ self._fp = self._ui.ferr
- with proffn(ui, fp):
- yield
+ if proffn is not None:
+ pass
+ elif profiler == 'ls':
+ proffn = lsprofile
+ elif profiler == 'flame':
+ proffn = flameprofile
+ else:
+ proffn = statprofile
- finally:
- if output:
- if output == 'blackbox':
- val = 'Profile:\n%s' % fp.getvalue()
+ self._profiler = proffn(self._ui, self._fp)
+ self._profiler.__enter__()
+ except: # re-raises
+ self._closefp()
+ raise
+
+ def __exit__(self, exception_type, exception_value, traceback):
+ propagate = None
+ if self._profiler is not None:
+ propagate = self._profiler.__exit__(exception_type, exception_value,
+ traceback)
+ if self._output == 'blackbox':
+ val = 'Profile:\n%s' % self._fp.getvalue()
# ui.log treats the input as a format string,
# so we need to escape any % signs.
val = val.replace('%', '%%')
- ui.log('profile', val)
- fp.close()
-
-@contextlib.contextmanager
-def maybeprofile(ui):
- """Profile if enabled, else do nothing.
-
- This context manager can be used to optionally profile if profiling
- is enabled. Otherwise, it does nothing.
+ self._ui.log('profile', val)
+ self._closefp()
+ return propagate
- The purpose of this context manager is to make calling code simpler:
- just use a single code path for calling into code you may want to profile
- and this function determines whether to start profiling.
- """
- if ui.configbool('profiling', 'enabled'):
- with profile(ui):
- yield
- else:
- yield
+ def _closefp(self):
+ if self._fpdoclose and self._fp is not None:
+ self._fp.close()
--- a/mercurial/progress.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/progress.py Wed Jul 19 07:51:41 2017 -0500
@@ -92,15 +92,15 @@
self.startvals = {}
self.printed = False
self.lastprint = time.time() + float(self.ui.config(
- 'progress', 'delay', default=3))
+ 'progress', 'delay'))
self.curtopic = None
self.lasttopic = None
self.indetcount = 0
self.refresh = float(self.ui.config(
- 'progress', 'refresh', default=0.1))
+ 'progress', 'refresh'))
self.changedelay = max(3 * self.refresh,
float(self.ui.config(
- 'progress', 'changedelay', default=1)))
+ 'progress', 'changedelay')))
self.order = self.ui.configlist(
'progress', 'format',
default=['topic', 'bar', 'number', 'estimate'])
@@ -191,7 +191,7 @@
def complete(self):
if not shouldprint(self.ui):
return
- if self.ui.configbool('progress', 'clear-complete', default=True):
+ if self.ui.configbool('progress', 'clear-complete'):
self.clear()
else:
self._writeerr('\n')
@@ -217,7 +217,7 @@
elapsed = now - self.starttimes[topic]
# experimental config: progress.estimate
if elapsed > float(
- self.ui.config('progress', 'estimate', default=2)):
+ self.ui.config('progress', 'estimate')):
seconds = (elapsed * (target - delta)) // delta + 1
return fmtremaining(seconds)
return ''
@@ -227,7 +227,7 @@
delta = pos - initialpos
elapsed = now - self.starttimes[topic]
if elapsed > float(
- self.ui.config('progress', 'estimate', default=2)):
+ self.ui.config('progress', 'estimate')):
return _('%d %s/sec') % (delta / elapsed, unit)
return ''
--- a/mercurial/pure/bdiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pure/bdiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,10 +11,6 @@
import re
import struct
-from . import policy
-policynocffi = policy.policynocffi
-modulepolicy = policy.policy
-
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
lines = [l + '\n' for l in text.split('\n')]
@@ -93,70 +89,3 @@
text = re.sub('[ \t\r]+', ' ', text)
text = text.replace(' \n', '\n')
return text
-
-if modulepolicy not in policynocffi:
- try:
- from _bdiff_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
- else:
- def blocks(sa, sb):
- a = ffi.new("struct bdiff_line**")
- b = ffi.new("struct bdiff_line**")
- ac = ffi.new("char[]", str(sa))
- bc = ffi.new("char[]", str(sb))
- l = ffi.new("struct bdiff_hunk*")
- try:
- an = lib.bdiff_splitlines(ac, len(sa), a)
- bn = lib.bdiff_splitlines(bc, len(sb), b)
- if not a[0] or not b[0]:
- raise MemoryError
- count = lib.bdiff_diff(a[0], an, b[0], bn, l)
- if count < 0:
- raise MemoryError
- rl = [None] * count
- h = l.next
- i = 0
- while h:
- rl[i] = (h.a1, h.a2, h.b1, h.b2)
- h = h.next
- i += 1
- finally:
- lib.free(a[0])
- lib.free(b[0])
- lib.bdiff_freehunks(l.next)
- return rl
-
- def bdiff(sa, sb):
- a = ffi.new("struct bdiff_line**")
- b = ffi.new("struct bdiff_line**")
- ac = ffi.new("char[]", str(sa))
- bc = ffi.new("char[]", str(sb))
- l = ffi.new("struct bdiff_hunk*")
- try:
- an = lib.bdiff_splitlines(ac, len(sa), a)
- bn = lib.bdiff_splitlines(bc, len(sb), b)
- if not a[0] or not b[0]:
- raise MemoryError
- count = lib.bdiff_diff(a[0], an, b[0], bn, l)
- if count < 0:
- raise MemoryError
- rl = []
- h = l.next
- la = lb = 0
- while h:
- if h.a1 != la or h.b1 != lb:
- lgt = (b[0] + h.b1).l - (b[0] + lb).l
- rl.append(struct.pack(">lll", (a[0] + la).l - a[0].l,
- (a[0] + h.a1).l - a[0].l, lgt))
- rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
- la = h.a2
- lb = h.b2
- h = h.next
-
- finally:
- lib.free(a[0])
- lib.free(b[0])
- lib.bdiff_freehunks(l.next)
- return "".join(rl)
--- a/mercurial/pure/mpatch.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pure/mpatch.py Wed Jul 19 07:51:41 2017 -0500
@@ -9,10 +9,8 @@
import struct
-from . import policy, pycompat
+from .. import pycompat
stringio = pycompat.stringio
-modulepolicy = policy.policy
-policynocffi = policy.policynocffi
class mpatchError(Exception):
"""error raised when a delta cannot be decoded
@@ -127,44 +125,3 @@
outlen += orig - last
return outlen
-
-if modulepolicy not in policynocffi:
- try:
- from _mpatch_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
- else:
- @ffi.def_extern()
- def cffi_get_next_item(arg, pos):
- all, bins = ffi.from_handle(arg)
- container = ffi.new("struct mpatch_flist*[1]")
- to_pass = ffi.new("char[]", str(bins[pos]))
- all.append(to_pass)
- r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
- if r < 0:
- return ffi.NULL
- return container[0]
-
- def patches(text, bins):
- lgt = len(bins)
- all = []
- if not lgt:
- return text
- arg = (all, bins)
- patch = lib.mpatch_fold(ffi.new_handle(arg),
- lib.cffi_get_next_item, 0, lgt)
- if not patch:
- raise mpatchError("cannot decode chunk")
- outlen = lib.mpatch_calcsize(len(text), patch)
- if outlen < 0:
- lib.mpatch_lfree(patch)
- raise mpatchError("inconsistency detected")
- buf = ffi.new("char[]", outlen)
- if lib.mpatch_apply(buf, text, len(text), patch) < 0:
- lib.mpatch_lfree(patch)
- raise mpatchError("error applying patches")
- res = ffi.buffer(buf, outlen)[:]
- lib.mpatch_lfree(patch)
- return res
-
--- a/mercurial/pure/osutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pure/osutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,14 +13,10 @@
import socket
import stat as statmod
-from . import (
- policy,
+from .. import (
pycompat,
)
-modulepolicy = policy.policy
-policynocffi = policy.policynocffi
-
def _mode_to_kind(mode):
if statmod.S_ISREG(mode):
return statmod.S_IFREG
@@ -38,7 +34,7 @@
return statmod.S_IFSOCK
return mode
-def listdirpure(path, stat=False, skip=None):
+def listdir(path, stat=False, skip=None):
'''listdir(path, stat=False) -> list_of_tuples
Return a sorted list containing information about the entries
@@ -68,96 +64,6 @@
result.append((fn, _mode_to_kind(st.st_mode)))
return result
-ffi = None
-if modulepolicy not in policynocffi and pycompat.sysplatform == 'darwin':
- try:
- from _osutil_cffi import ffi, lib
- except ImportError:
- if modulepolicy == 'cffi': # strict cffi import
- raise
-
-if pycompat.sysplatform == 'darwin' and ffi is not None:
- listdir_batch_size = 4096
- # tweakable number, only affects performance, which chunks
- # of bytes do we get back from getattrlistbulk
-
- attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
-
- attrkinds[lib.VREG] = statmod.S_IFREG
- attrkinds[lib.VDIR] = statmod.S_IFDIR
- attrkinds[lib.VLNK] = statmod.S_IFLNK
- attrkinds[lib.VBLK] = statmod.S_IFBLK
- attrkinds[lib.VCHR] = statmod.S_IFCHR
- attrkinds[lib.VFIFO] = statmod.S_IFIFO
- attrkinds[lib.VSOCK] = statmod.S_IFSOCK
-
- class stat_res(object):
- def __init__(self, st_mode, st_mtime, st_size):
- self.st_mode = st_mode
- self.st_mtime = st_mtime
- self.st_size = st_size
-
- tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
- buf = ffi.new("char[]", listdir_batch_size)
-
- def listdirinternal(dfd, req, stat, skip):
- ret = []
- while True:
- r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
- if r == 0:
- break
- if r == -1:
- raise OSError(ffi.errno, os.strerror(ffi.errno))
- cur = ffi.cast("val_attrs_t*", buf)
- for i in range(r):
- lgt = cur.length
- assert lgt == ffi.cast('uint32_t*', cur)[0]
- ofs = cur.name_info.attr_dataoffset
- str_lgt = cur.name_info.attr_length
- base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
- name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
- str_lgt - 1))
- tp = attrkinds[cur.obj_type]
- if name == "." or name == "..":
- continue
- if skip == name and tp == statmod.S_ISDIR:
- return []
- if stat:
- mtime = cur.mtime.tv_sec
- mode = (cur.accessmask & ~lib.S_IFMT)| tp
- ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
- st_size=cur.datalength)))
- else:
- ret.append((name, tp))
- cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
- + lgt)
- return ret
-
- def listdir(path, stat=False, skip=None):
- req = ffi.new("struct attrlist*")
- req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
- req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
- lib.ATTR_CMN_NAME |
- lib.ATTR_CMN_OBJTYPE |
- lib.ATTR_CMN_ACCESSMASK |
- lib.ATTR_CMN_MODTIME)
- req.fileattr = lib.ATTR_FILE_DATALENGTH
- dfd = lib.open(path, lib.O_RDONLY, 0)
- if dfd == -1:
- raise OSError(ffi.errno, os.strerror(ffi.errno))
-
- try:
- ret = listdirinternal(dfd, req, stat, skip)
- finally:
- try:
- lib.close(dfd)
- except BaseException:
- pass # we ignore all the errors from closing, not
- # much we can do about that
- return ret
-else:
- listdir = listdirpure
-
if pycompat.osname != 'nt':
posixfile = open
--- a/mercurial/pure/parsers.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pure/parsers.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,8 +10,8 @@
import struct
import zlib
-from .node import nullid
-from . import pycompat
+from ..node import nullid
+from .. import pycompat
stringio = pycompat.stringio
--- a/mercurial/pvec.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pvec.py Wed Jul 19 07:51:41 2017 -0500
@@ -52,7 +52,6 @@
from .node import nullrev
from . import (
- base85,
util,
)
@@ -166,13 +165,13 @@
else:
pvc[n] = _mergevec(pvc[p1], pvc[p2], node)
bs = _join(*pvc[ctx.rev()])
- return pvec(base85.b85encode(bs))
+ return pvec(util.b85encode(bs))
class pvec(object):
def __init__(self, hashorctx):
if isinstance(hashorctx, str):
self._bs = hashorctx
- self._depth, self._vec = _split(base85.b85decode(hashorctx))
+ self._depth, self._vec = _split(util.b85decode(hashorctx))
else:
self._vec = ctxpvec(hashorctx)
--- a/mercurial/pycompat.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/pycompat.py Wed Jul 19 07:51:41 2017 -0500
@@ -32,6 +32,9 @@
import socketserver
import xmlrpc.client as xmlrpclib
+empty = _queue.Empty
+queue = _queue.Queue
+
def identity(a):
return a
@@ -43,7 +46,6 @@
fsencode = os.fsencode
fsdecode = os.fsdecode
- # A bytes version of os.name.
oslinesep = os.linesep.encode('ascii')
osname = os.name.encode('ascii')
ospathsep = os.pathsep.encode('ascii')
@@ -87,6 +89,14 @@
>>> s = bytestr(b'foo')
>>> assert s is bytestr(s)
+ __bytes__() should be called if provided:
+
+ >>> class bytesable(object):
+ ... def __bytes__(self):
+ ... return b'bytes'
+ >>> bytestr(bytesable())
+ b'bytes'
+
There's no implicit conversion from non-ascii str as its encoding is
unknown:
@@ -127,7 +137,8 @@
def __new__(cls, s=b''):
if isinstance(s, bytestr):
return s
- if not isinstance(s, (bytes, bytearray)):
+ if (not isinstance(s, (bytes, bytearray))
+ and not hasattr(s, u'__bytes__')): # hasattr-py3-only
s = str(s).encode(u'ascii')
return bytes.__new__(cls, s)
@@ -164,6 +175,26 @@
return s
return s.decode(u'latin-1')
+ def strurl(url):
+ """Converts a bytes url back to str"""
+ return url.decode(u'ascii')
+
+ def bytesurl(url):
+ """Converts a str url to bytes by encoding in ascii"""
+ return url.encode(u'ascii')
+
+ def raisewithtb(exc, tb):
+ """Raise exception with the given traceback"""
+ raise exc.with_traceback(tb)
+
+ def getdoc(obj):
+ """Get docstring as bytes; may be None so gettext() won't confuse it
+ with _('')"""
+ doc = getattr(obj, u'__doc__', None)
+ if doc is None:
+ return doc
+ return sysbytes(doc)
+
def _wrapattrfunc(f):
@functools.wraps(f)
def w(object, name, *args):
@@ -181,10 +212,13 @@
def open(name, mode='r', buffering=-1):
return builtins.open(name, sysstr(mode), buffering)
- # getopt.getopt() on Python 3 deals with unicodes internally so we cannot
- # pass bytes there. Passing unicodes will result in unicodes as return
- # values which we need to convert again to bytes.
def getoptb(args, shortlist, namelist):
+ """
+ Takes bytes arguments, converts them to unicode, pass them to
+ getopt.getopt(), convert the returned values back to bytes and then
+ return them for Python 3 compatibility as getopt.getopt() don't accepts
+ bytes on Python 3.
+ """
args = [a.decode('latin-1') for a in args]
shortlist = shortlist.decode('latin-1')
namelist = [a.decode('latin-1') for a in namelist]
@@ -194,24 +228,30 @@
args = [a.encode('latin-1') for a in args]
return opts, args
- # keys of keyword arguments in Python need to be strings which are unicodes
- # Python 3. This function takes keyword arguments, convert the keys to str.
def strkwargs(dic):
+ """
+ Converts the keys of a python dictonary to str i.e. unicodes so that
+ they can be passed as keyword arguments as dictonaries with bytes keys
+ can't be passed as keyword arguments to functions on Python 3.
+ """
dic = dict((k.decode('latin-1'), v) for k, v in dic.iteritems())
return dic
- # keys of keyword arguments need to be unicode while passing into
- # a function. This function helps us to convert those keys back to bytes
- # again as we need to deal with bytes.
def byteskwargs(dic):
+ """
+ Converts keys of python dictonaries to bytes as they were converted to
+ str to pass that dictonary as a keyword argument on Python 3.
+ """
dic = dict((k.encode('latin-1'), v) for k, v in dic.iteritems())
return dic
- # shlex.split() accepts unicodes on Python 3. This function takes bytes
- # argument, convert it into unicodes, pass into shlex.split(), convert the
- # returned value to bytes and return that.
# TODO: handle shlex.shlex().
def shlexsplit(s):
+ """
+ Takes bytes argument, convert it to str i.e. unicodes, pass that into
+ shlex.split(), convert the returned value to bytes and return that for
+ Python 3 compatibility as shelx.split() don't accept bytes on Python 3.
+ """
ret = shlex.split(s.decode('latin-1'))
return [a.encode('latin-1') for a in ret]
@@ -223,11 +263,19 @@
iterbytestr = iter
sysbytes = identity
sysstr = identity
+ strurl = identity
+ bytesurl = identity
- # Partial backport from os.py in Python 3, which only accepts bytes.
- # In Python 2, our paths should only ever be bytes, a unicode path
- # indicates a bug.
+ # this can't be parsed on Python 3
+ exec('def raisewithtb(exc, tb):\n'
+ ' raise exc, None, tb\n')
+
def fsencode(filename):
+ """
+ Partial backport from os.py in Python 3, which only accepts bytes.
+ In Python 2, our paths should only ever be bytes, a unicode path
+ indicates a bug.
+ """
if isinstance(filename, str):
return filename
else:
@@ -238,6 +286,9 @@
# better not to touch Python 2 part as it's already working fine.
fsdecode = identity
+ def getdoc(obj):
+ return getattr(obj, '__doc__', None)
+
def getoptb(args, shortlist, namelist):
return getopt.getopt(args, shortlist, namelist)
@@ -261,9 +312,6 @@
stringio = cStringIO.StringIO
maplist = map
-empty = _queue.Empty
-queue = _queue.Queue
-
class _pycompatstub(object):
def __init__(self):
self._aliases = {}
--- a/mercurial/rcutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/rcutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,7 +11,6 @@
from . import (
encoding,
- osutil,
pycompat,
util,
)
@@ -30,7 +29,7 @@
p = util.expandpath(path)
if os.path.isdir(p):
join = os.path.join
- return [join(p, f) for f, k in osutil.listdir(p) if f.endswith('.rc')]
+ return [join(p, f) for f, k in util.listdir(p) if f.endswith('.rc')]
return [p]
def envrcitems(env=None):
--- a/mercurial/registrar.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/registrar.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,11 +8,19 @@
from __future__ import absolute_import
from . import (
+ configitems,
error,
pycompat,
util,
)
+# unlike the other registered items, config options are neither functions or
+# classes. Registering the option is just small function call.
+#
+# We still add the official API to the registrar module for consistency with
+# the other items extensions want might to register.
+configitem = configitems.getitemregister
+
class _funcregistrarbase(object):
"""Base of decorator to register a function for specific purpose
@@ -96,6 +104,47 @@
"""
pass
+class command(_funcregistrarbase):
+ """Decorator to register a command function to table
+
+ This class receives a command table as its argument. The table should
+ be a dict.
+
+ The created object can be used as a decorator for adding commands to
+ that command table. This accepts multiple arguments to define a command.
+
+ The first argument is the command name.
+
+ The options argument is an iterable of tuples defining command arguments.
+ See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
+
+ The synopsis argument defines a short, one line summary of how to use the
+ command. This shows up in the help output.
+
+ The norepo argument defines whether the command does not require a
+ local repository. Most commands operate against a repository, thus the
+ default is False.
+
+ The optionalrepo argument defines whether the command optionally requires
+ a local repository.
+
+ The inferrepo argument defines whether to try to find a repository from the
+ command line arguments. If True, arguments will be examined for potential
+ repository locations. See ``findrepo()``. If a repository is found, it
+ will be used.
+ """
+
+ def _doregister(self, func, name, options=(), synopsis=None,
+ norepo=False, optionalrepo=False, inferrepo=False):
+ func.norepo = norepo
+ func.optionalrepo = optionalrepo
+ func.inferrepo = inferrepo
+ if synopsis:
+ self._table[name] = func, list(options), synopsis
+ else:
+ self._table[name] = func, list(options)
+ return func
+
class revsetpredicate(_funcregistrarbase):
"""Decorator to register revset predicate
--- a/mercurial/repair.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/repair.py Wed Jul 19 07:51:41 2017 -0500
@@ -16,18 +16,17 @@
from . import (
bundle2,
changegroup,
+ discovery,
error,
exchange,
obsolete,
+ obsutil,
util,
)
-def _bundle(repo, bases, heads, node, suffix, compress=True):
+def _bundle(repo, bases, heads, node, suffix, compress=True, obsolescence=True):
"""create a bundle with the specified revisions as a backup"""
- cgversion = changegroup.safeversion(repo)
- cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
- version=cgversion)
backupdir = "strip-backup"
vfs = repo.vfs
if not vfs.isdir(backupdir):
@@ -39,6 +38,7 @@
totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
+ cgversion = changegroup.safeversion(repo)
comp = None
if cgversion != '01':
bundletype = "HG20"
@@ -48,8 +48,15 @@
bundletype = "HG10BZ"
else:
bundletype = "HG10UN"
- return bundle2.writebundle(repo.ui, cg, name, bundletype, vfs,
- compression=comp)
+
+ outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads)
+ contentopts = {
+ 'cg.version': cgversion,
+ 'obsolescence': obsolescence,
+ 'phases': True,
+ }
+ return bundle2.writenewbundle(repo.ui, repo, 'strip', name, bundletype,
+ outgoing, contentopts, vfs, compression=comp)
def _collectfiles(repo, striprev):
"""find out the filelogs affected by the strip"""
@@ -74,8 +81,12 @@
return s
def strip(ui, repo, nodelist, backup=True, topic='backup'):
- # This function operates within a transaction of its own, but does
- # not take any lock on the repo.
+ # This function requires the caller to lock the repo, but it operates
+ # within a transaction of its own, and thus requires there to be no current
+ # transaction when it is called.
+ if repo.currenttransaction() is not None:
+ raise error.ProgrammingError('cannot strip from inside a transaction')
+
# Simple way to maintain backwards compatibility for this
# argument.
if backup in ['none', 'strip']:
@@ -120,6 +131,13 @@
savebases = [cl.node(r) for r in saverevs]
stripbases = [cl.node(r) for r in tostrip]
+ stripobsidx = obsmarkers = ()
+ if repo.ui.configbool('devel', 'strip-obsmarkers'):
+ obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
+ if obsmarkers:
+ stripobsidx = [i for i, m in enumerate(repo.obsstore)
+ if m in obsmarkers]
+
# For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), but
# is much faster
newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
@@ -148,16 +166,16 @@
tmpbundlefile = None
if saveheads:
# do not compress temporary bundle if we remove it from disk later
+ #
+ # We do not include obsolescence, it might re-introduce prune markers
+ # we are trying to strip. This is harmless since the stripped markers
+ # are already backed up and we did not touched the markers for the
+ # saved changesets.
tmpbundlefile = _bundle(repo, savebases, saveheads, node, 'temp',
- compress=False)
+ compress=False, obsolescence=False)
mfst = repo.manifestlog._revlog
- curtr = repo.currenttransaction()
- if curtr is not None:
- del curtr # avoid carrying reference to transaction for nothing
- raise error.ProgrammingError('cannot strip from inside a transaction')
-
try:
with repo.transaction("strip") as tr:
offset = len(tr.entries)
@@ -165,13 +183,8 @@
tr.startgroup()
cl.strip(striprev, tr)
mfst.strip(striprev, tr)
- if 'treemanifest' in repo.requirements: # safe but unnecessary
- # otherwise
- for unencoded, encoded, size in repo.store.datafiles():
- if (unencoded.startswith('meta/') and
- unencoded.endswith('00manifest.i')):
- dir = unencoded[5:-12]
- repo.manifestlog._revlog.dirlog(dir).strip(striprev, tr)
+ striptrees(repo, tr, striprev, files)
+
for fn in files:
repo.file(fn).strip(striprev, tr)
tr.endgroup()
@@ -183,6 +196,10 @@
if troffset == 0:
repo.store.markremoved(file)
+ deleteobsmarkers(repo.obsstore, stripobsidx)
+ del repo.obsstore
+
+ repo._phasecache.filterunknown(repo)
if tmpbundlefile:
ui.note(_("adding branch\n"))
f = vfs.open(tmpbundlefile, "rb")
@@ -190,26 +207,22 @@
if not repo.ui.verbose:
# silence internal shuffling chatter
repo.ui.pushbuffer()
- if isinstance(gen, bundle2.unbundle20):
- with repo.transaction('strip') as tr:
- tr.hookargs = {'source': 'strip',
- 'url': 'bundle:' + vfs.join(tmpbundlefile)}
- bundle2.applybundle(repo, gen, tr, source='strip',
- url='bundle:' + vfs.join(tmpbundlefile))
- else:
- gen.apply(repo, 'strip', 'bundle:' + vfs.join(tmpbundlefile),
- True)
+ tmpbundleurl = 'bundle:' + vfs.join(tmpbundlefile)
+ txnname = 'strip'
+ if not isinstance(gen, bundle2.unbundle20):
+ txnname = "strip\n%s" % util.hidepassword(tmpbundleurl)
+ with repo.transaction(txnname) as tr:
+ bundle2.applybundle(repo, gen, tr, source='strip',
+ url=tmpbundleurl)
if not repo.ui.verbose:
repo.ui.popbuffer()
f.close()
repo._phasecache.invalidate()
- for m in updatebm:
- bm[m] = repo[newbmtarget].node()
- with repo.lock():
- with repo.transaction('repair') as tr:
- bm.recordchange(tr)
+ with repo.transaction('repair') as tr:
+ bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
+ bm.applychanges(repo, tr, bmchanges)
# remove undo files
for undovfs, undofile in repo.undofiles():
@@ -240,6 +253,72 @@
# extensions can use it
return backupfile
+def safestriproots(ui, repo, nodes):
+ """return list of roots of nodes where descendants are covered by nodes"""
+ torev = repo.unfiltered().changelog.rev
+ revs = set(torev(n) for n in nodes)
+ # tostrip = wanted - unsafe = wanted - ancestors(orphaned)
+ # orphaned = affected - wanted
+ # affected = descendants(roots(wanted))
+ # wanted = revs
+ tostrip = set(repo.revs('%ld-(::((roots(%ld)::)-%ld))', revs, revs, revs))
+ notstrip = revs - tostrip
+ if notstrip:
+ nodestr = ', '.join(sorted(short(repo[n].node()) for n in notstrip))
+ ui.warn(_('warning: orphaned descendants detected, '
+ 'not stripping %s\n') % nodestr)
+ return [c.node() for c in repo.set('roots(%ld)', tostrip)]
+
+class stripcallback(object):
+ """used as a transaction postclose callback"""
+
+ def __init__(self, ui, repo, backup, topic):
+ self.ui = ui
+ self.repo = repo
+ self.backup = backup
+ self.topic = topic or 'backup'
+ self.nodelist = []
+
+ def addnodes(self, nodes):
+ self.nodelist.extend(nodes)
+
+ def __call__(self, tr):
+ roots = safestriproots(self.ui, self.repo, self.nodelist)
+ if roots:
+ strip(self.ui, self.repo, roots, self.backup, self.topic)
+
+def delayedstrip(ui, repo, nodelist, topic=None):
+ """like strip, but works inside transaction and won't strip irreverent revs
+
+ nodelist must explicitly contain all descendants. Otherwise a warning will
+ be printed that some nodes are not stripped.
+
+ Always do a backup. The last non-None "topic" will be used as the backup
+ topic name. The default backup topic name is "backup".
+ """
+ tr = repo.currenttransaction()
+ if not tr:
+ nodes = safestriproots(ui, repo, nodelist)
+ return strip(ui, repo, nodes, True, topic)
+ # transaction postclose callbacks are called in alphabet order.
+ # use '\xff' as prefix so we are likely to be called last.
+ callback = tr.getpostclose('\xffstrip')
+ if callback is None:
+ callback = stripcallback(ui, repo, True, topic)
+ tr.addpostclose('\xffstrip', callback)
+ if topic:
+ callback.topic = topic
+ callback.addnodes(nodelist)
+
+def striptrees(repo, tr, striprev, files):
+ if 'treemanifest' in repo.requirements: # safe but unnecessary
+ # otherwise
+ for unencoded, encoded, size in repo.store.datafiles():
+ if (unencoded.startswith('meta/') and
+ unencoded.endswith('00manifest.i')):
+ dir = unencoded[5:-12]
+ repo.manifestlog._revlog.dirlog(dir).strip(striprev, tr)
+
def rebuildfncache(ui, repo):
"""Rebuilds the fncache file from repo history.
--- a/mercurial/repoview.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/repoview.py Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,9 @@
from __future__ import absolute_import
import copy
-import hashlib
-import heapq
-import struct
from .node import nullrev
from . import (
- error,
obsolete,
phases,
tags as tagsmod,
@@ -32,141 +28,40 @@
lead to crashes."""
return obsolete.getrevs(repo, 'obsolete')
-def _getstatichidden(repo):
- """Revision to be hidden (disregarding dynamic blocker)
-
- To keep a consistent graph, we cannot hide any revisions with
- non-hidden descendants. This function computes the set of
- revisions that could be hidden while keeping the graph consistent.
-
- A second pass will be done to apply "dynamic blocker" like bookmarks or
- working directory parents.
-
+def pinnedrevs(repo):
+ """revisions blocking hidden changesets from being filtered
"""
- assert not repo.changelog.filteredrevs
- hidden = set(hideablerevs(repo))
- if hidden:
- getphase = repo._phasecache.phase
- getparentrevs = repo.changelog.parentrevs
- # Skip heads which are public (guaranteed to not be hidden)
- heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
- heapq.heapify(heap)
- heappop = heapq.heappop
- heappush = heapq.heappush
- seen = set() # no need to init it with heads, they have no children
- while heap:
- rev = -heappop(heap)
- # All children have been processed so at that point, if no children
- # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
- blocker = rev not in hidden
- for parent in getparentrevs(rev):
- if parent == nullrev:
- continue
- if blocker:
- # If visible, ensure parent will be visible too
- hidden.discard(parent)
- # - Avoid adding the same revision twice
- # - Skip nodes which are public (guaranteed to not be hidden)
- pre = len(seen)
- seen.add(parent)
- if pre < len(seen) and getphase(repo, rev):
- heappush(heap, -parent)
- return hidden
-
-def _getdynamicblockers(repo):
- """Non-cacheable revisions blocking hidden changesets from being filtered.
-
- Get revisions that will block hidden changesets and are likely to change,
- but unlikely to create hidden blockers. They won't be cached, so be careful
- with adding additional computation."""
cl = repo.changelog
- blockers = set()
- blockers.update([par.rev() for par in repo[None].parents()])
- blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()])
+ pinned = set()
+ pinned.update([par.rev() for par in repo[None].parents()])
+ pinned.update([cl.rev(bm) for bm in repo._bookmarks.values()])
tags = {}
tagsmod.readlocaltags(repo.ui, repo, tags, {})
if tags:
rev, nodemap = cl.rev, cl.nodemap
- blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
- return blockers
+ pinned.update(rev(t[0]) for t in tags.values() if t[0] in nodemap)
+ return pinned
-cacheversion = 1
-cachefile = 'cache/hidden'
-
-def cachehash(repo, hideable):
- """return sha1 hash of repository data to identify a valid cache.
- We calculate a sha1 of repo heads and the content of the obsstore and write
- it to the cache. Upon reading we can easily validate by checking the hash
- against the stored one and discard the cache in case the hashes don't match.
- """
- h = hashlib.sha1()
- h.update(''.join(repo.heads()))
- h.update('%d' % hash(frozenset(hideable)))
- return h.digest()
+def _revealancestors(pfunc, hidden, revs):
+ """reveals contiguous chains of hidden ancestors of 'revs' by removing them
+ from 'hidden'
-def _writehiddencache(cachefile, cachehash, hidden):
- """write hidden data to a cache file"""
- data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
- cachefile.write(struct.pack(">H", cacheversion))
- cachefile.write(cachehash)
- cachefile.write(data)
+ - pfunc(r): a funtion returning parent of 'r',
+ - hidden: the (preliminary) hidden revisions, to be updated
+ - revs: iterable of revnum,
-def trywritehiddencache(repo, hideable, hidden):
- """write cache of hidden changesets to disk
-
- Will not write the cache if a wlock cannot be obtained lazily.
- The cache consists of a head of 22byte:
- 2 byte version number of the cache
- 20 byte sha1 to validate the cache
- n*4 byte hidden revs
+ (Ancestors are revealed exclusively, i.e. the elements in 'revs' are
+ *not* revealed)
"""
- wlock = fh = None
- try:
- wlock = repo.wlock(wait=False)
- # write cache to file
- newhash = cachehash(repo, hideable)
- fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
- _writehiddencache(fh, newhash, hidden)
- fh.close()
- except (IOError, OSError):
- repo.ui.debug('error writing hidden changesets cache\n')
- except error.LockHeld:
- repo.ui.debug('cannot obtain lock to write hidden changesets cache\n')
- finally:
- if wlock:
- wlock.release()
-
-def _readhiddencache(repo, cachefilename, newhash):
- hidden = fh = None
- try:
- if repo.vfs.exists(cachefile):
- fh = repo.vfs.open(cachefile, 'rb')
- version, = struct.unpack(">H", fh.read(2))
- oldhash = fh.read(20)
- if (cacheversion, oldhash) == (version, newhash):
- # cache is valid, so we can start reading the hidden revs
- data = fh.read()
- count = len(data) / 4
- hidden = frozenset(struct.unpack('>%ii' % count, data))
- return hidden
- except struct.error:
- repo.ui.debug('corrupted hidden cache\n')
- # No need to fix the content as it will get rewritten
- return None
- except (IOError, OSError):
- repo.ui.debug('cannot read hidden cache\n')
- return None
- finally:
- if fh:
- fh.close()
-
-def tryreadcache(repo, hideable):
- """read a cache if the cache exists and is valid, otherwise returns None."""
- newhash = cachehash(repo, hideable)
- return _readhiddencache(repo, cachefile, newhash)
+ stack = list(revs)
+ while stack:
+ for p in pfunc(stack.pop()):
+ if p != nullrev and p in hidden:
+ hidden.remove(p)
+ stack.append(p)
def computehidden(repo):
"""compute the set of hidden revision to filter
@@ -174,22 +69,16 @@
During most operation hidden should be filtered."""
assert not repo.changelog.filteredrevs
- hidden = frozenset()
- hideable = hideablerevs(repo)
- if hideable:
- cl = repo.changelog
- hidden = tryreadcache(repo, hideable)
- if hidden is None:
- hidden = frozenset(_getstatichidden(repo))
- trywritehiddencache(repo, hideable, hidden)
+ hidden = hideablerevs(repo)
+ if hidden:
+ hidden = set(hidden - pinnedrevs(repo))
+ pfunc = repo.changelog.parentrevs
+ mutablephases = (phases.draft, phases.secret)
+ mutable = repo._phasecache.getrevset(repo, mutablephases)
- # check if we have wd parents, bookmarks or tags pointing to hidden
- # changesets and remove those.
- dynamic = hidden & _getdynamicblockers(repo)
- if dynamic:
- blocked = cl.ancestors(dynamic, inclusive=True)
- hidden = frozenset(r for r in hidden if r not in blocked)
- return hidden
+ visible = mutable - hidden
+ _revealancestors(pfunc, hidden, visible)
+ return frozenset(hidden)
def computeunserved(repo):
"""compute the set of revision that should be filtered when used a server
@@ -354,10 +243,3 @@
def __delattr__(self, attr):
return delattr(self._unfilteredrepo, attr)
-
- # The `requirements` attribute is initialized during __init__. But
- # __getattr__ won't be called as it also exists on the class. We need
- # explicit forwarding to main repo here
- @property
- def requirements(self):
- return self._unfilteredrepo.requirements
--- a/mercurial/revlog.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/revlog.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
from __future__ import absolute_import
+import binascii
import collections
import errno
import hashlib
@@ -26,32 +27,39 @@
hex,
nullid,
nullrev,
+ wdirhex,
+ wdirid,
+ wdirrev,
)
from .i18n import _
from . import (
ancestor,
error,
mdiff,
- parsers,
+ policy,
pycompat,
templatefilters,
util,
)
-_pack = struct.pack
-_unpack = struct.unpack
+parsers = policy.importmod(r'parsers')
+
# Aliased for performance.
_zlibdecompress = zlib.decompress
# revlog header flags
REVLOGV0 = 0
-REVLOGNG = 1
-REVLOGNGINLINEDATA = (1 << 16)
-REVLOGGENERALDELTA = (1 << 17)
-REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
-REVLOG_DEFAULT_FORMAT = REVLOGNG
+REVLOGV1 = 1
+# Dummy value until file format is finalized.
+# Reminder: change the bounds check in revlog.__init__ when this is changed.
+REVLOGV2 = 0xDEAD
+FLAG_INLINE_DATA = (1 << 16)
+FLAG_GENERALDELTA = (1 << 17)
+REVLOG_DEFAULT_FLAGS = FLAG_INLINE_DATA
+REVLOG_DEFAULT_FORMAT = REVLOGV1
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
-REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
+REVLOGV1_FLAGS = FLAG_INLINE_DATA | FLAG_GENERALDELTA
+REVLOGV2_FLAGS = REVLOGV1_FLAGS
# revlog index flags
REVIDX_ISCENSORED = (1 << 15) # revision has censor metadata, must be verified
@@ -142,10 +150,14 @@
s.update(p1)
else:
# none of the parent nodes are nullid
- l = [p1, p2]
- l.sort()
- s = hashlib.sha1(l[0])
- s.update(l[1])
+ if p1 < p2:
+ a = p1
+ b = p2
+ else:
+ a = p2
+ b = p1
+ s = hashlib.sha1(a)
+ s.update(b)
s.update(text)
return s.digest()
@@ -157,11 +169,13 @@
# 20 bytes: parent 1 nodeid
# 20 bytes: parent 2 nodeid
# 20 bytes: nodeid
-indexformatv0 = ">4l20s20s20s"
+indexformatv0 = struct.Struct(">4l20s20s20s")
+indexformatv0_pack = indexformatv0.pack
+indexformatv0_unpack = indexformatv0.unpack
class revlogoldio(object):
def __init__(self):
- self.size = struct.calcsize(indexformatv0)
+ self.size = indexformatv0.size
def parseindex(self, data, inline):
s = self.size
@@ -172,7 +186,7 @@
while off + s <= l:
cur = data[off:off + s]
off += s
- e = _unpack(indexformatv0, cur)
+ e = indexformatv0_unpack(cur)
# transform to revlogv1 format
e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
@@ -187,10 +201,10 @@
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
- raise RevlogError(_("index entry flags need RevlogNG"))
+ raise RevlogError(_('index entry flags need revlog version 1'))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
- return _pack(indexformatv0, *e2)
+ return indexformatv0_pack(*e2)
# index ng:
# 6 bytes: offset
@@ -202,8 +216,11 @@
# 4 bytes: parent 1 rev
# 4 bytes: parent 2 rev
# 32 bytes: nodeid
-indexformatng = ">Qiiiiii20s12x"
-versionformat = ">I"
+indexformatng = struct.Struct(">Qiiiiii20s12x")
+indexformatng_pack = indexformatng.pack
+versionformat = struct.Struct(">I")
+versionformat_pack = versionformat.pack
+versionformat_unpack = versionformat.unpack
# corresponds to uncompressed length of indexformatng (2 gigs, 4-byte
# signed integer)
@@ -211,7 +228,7 @@
class revlogio(object):
def __init__(self):
- self.size = struct.calcsize(indexformatng)
+ self.size = indexformatng.size
def parseindex(self, data, inline):
# call the C implementation to parse the index data
@@ -219,9 +236,9 @@
return index, getattr(index, 'nodemap', None), cache
def packentry(self, entry, node, version, rev):
- p = _pack(indexformatng, *entry)
+ p = indexformatng_pack(*entry)
if rev == 0:
- p = _pack(versionformat, version) + p[4:]
+ p = versionformat_pack(version) + p[4:]
return p
class revlog(object):
@@ -252,7 +269,7 @@
If checkambig, indexfile is opened with checkambig=True at
writing, to avoid file stat ambiguity.
"""
- def __init__(self, opener, indexfile, checkambig=False):
+ def __init__(self, opener, indexfile, datafile=None, checkambig=False):
"""
create a revlog object
@@ -260,7 +277,7 @@
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
- self.datafile = indexfile[:-2] + ".d"
+ self.datafile = datafile or (indexfile[:-2] + ".d")
self.opener = opener
# When True, indexfile is opened with checkambig=True at writing, to
# avoid file stat ambiguity.
@@ -282,13 +299,17 @@
self._nodecache = {nullid: nullrev}
self._nodepos = None
self._compengine = 'zlib'
+ self._maxdeltachainspan = -1
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
- if 'revlogv1' in opts:
+ if 'revlogv2' in opts:
+ # version 2 revlogs always use generaldelta.
+ v = REVLOGV2 | FLAG_GENERALDELTA | FLAG_INLINE_DATA
+ elif 'revlogv1' in opts:
if 'generaldelta' in opts:
- v |= REVLOGGENERALDELTA
+ v |= FLAG_GENERALDELTA
else:
v = 0
if 'chunkcachesize' in opts:
@@ -300,6 +321,8 @@
self._lazydeltabase = bool(opts.get('lazydeltabase', False))
if 'compengine' in opts:
self._compengine = opts['compengine']
+ if 'maxdeltachainspan' in opts:
+ self._maxdeltachainspan = opts['maxdeltachainspan']
if self._chunkcachesize <= 0:
raise RevlogError(_('revlog chunk cache size %r is not greater '
@@ -315,26 +338,35 @@
indexdata = f.read()
f.close()
if len(indexdata) > 0:
- v = struct.unpack(versionformat, indexdata[:4])[0]
+ v = versionformat_unpack(indexdata[:4])[0]
self._initempty = False
except IOError as inst:
if inst.errno != errno.ENOENT:
raise
self.version = v
- self._inline = v & REVLOGNGINLINEDATA
- self._generaldelta = v & REVLOGGENERALDELTA
+ self._inline = v & FLAG_INLINE_DATA
+ self._generaldelta = v & FLAG_GENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
- if fmt == REVLOGV0 and flags:
- raise RevlogError(_("index %s unknown flags %#04x for format v0")
- % (self.indexfile, flags >> 16))
- elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
- raise RevlogError(_("index %s unknown flags %#04x for revlogng")
- % (self.indexfile, flags >> 16))
- elif fmt > REVLOGNG:
- raise RevlogError(_("index %s unknown format %d")
- % (self.indexfile, fmt))
+ if fmt == REVLOGV0:
+ if flags:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ elif fmt == REVLOGV1:
+ if flags & ~REVLOGV1_FLAGS:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ elif fmt == REVLOGV2:
+ if flags & ~REVLOGV2_FLAGS:
+ raise RevlogError(_('unknown flags (%#04x) in version %d '
+ 'revlog %s') %
+ (flags >> 16, fmt, self.indexfile))
+ else:
+ raise RevlogError(_('unknown version (%d) in revlog %s') %
+ (fmt, self.indexfile))
self.storedeltachains = True
@@ -409,6 +441,8 @@
raise
except RevlogError:
# parsers.c radix tree lookup failed
+ if node == wdirid:
+ raise error.WdirUnsupported
raise LookupError(node, self.indexfile, _('no node'))
except KeyError:
# pure python cache lookup failed
@@ -423,6 +457,8 @@
if v == node:
self._nodepos = r - 1
return r
+ if node == wdirid:
+ raise error.WdirUnsupported
raise LookupError(node, self.indexfile, _('no node'))
# Accessors for index entries.
@@ -475,10 +511,20 @@
return self.index[rev][4]
def parentrevs(self, rev):
- return self.index[rev][5:7]
+ try:
+ return self.index[rev][5:7]
+ except IndexError:
+ if rev == wdirrev:
+ raise error.WdirUnsupported
+ raise
def node(self, rev):
- return self.index[rev][7]
+ try:
+ return self.index[rev][7]
+ except IndexError:
+ if rev == wdirrev:
+ raise error.WdirUnsupported
+ raise
# Derived from index values.
@@ -534,6 +580,12 @@
revs in ascending order and ``stopped`` is a bool indicating whether
``stoprev`` was hit.
"""
+ # Try C implementation.
+ try:
+ return self.index.deltachain(rev, stoprev, self._generaldelta)
+ except AttributeError:
+ pass
+
chain = []
# Alias to prevent attribute lookup in tight loop.
@@ -913,8 +965,8 @@
stop = []
stoprevs = set([self.rev(n) for n in stop])
startrev = self.rev(start)
- reachable = set((startrev,))
- heads = set((startrev,))
+ reachable = {startrev}
+ heads = {startrev}
parentrevs = self.parentrevs
for r in self.revs(start=startrev + 1):
@@ -1016,10 +1068,17 @@
pass
def _partialmatch(self, id):
+ maybewdir = wdirhex.startswith(id)
try:
partial = self.index.partialmatch(id)
if partial and self.hasnode(partial):
+ if maybewdir:
+ # single 'ff...' match in radix tree, ambiguous with wdir
+ raise RevlogError
return partial
+ if maybewdir:
+ # no 'ff...' match in radix tree, wdir identified
+ raise error.WdirUnsupported
return None
except RevlogError:
# parsers.c radix tree lookup gave multiple matches
@@ -1044,13 +1103,15 @@
nl = [n for n in nl if hex(n).startswith(id) and
self.hasnode(n)]
if len(nl) > 0:
- if len(nl) == 1:
+ if len(nl) == 1 and not maybewdir:
self._pcache[id] = nl[0]
return nl[0]
raise LookupError(id, self.indexfile,
_('ambiguous identifier'))
+ if maybewdir:
+ raise error.WdirUnsupported
return None
- except TypeError:
+ except (TypeError, binascii.Error):
pass
def lookup(self, id):
@@ -1075,7 +1136,7 @@
p1, p2 = self.parents(node)
return hash(text, p1, p2) != node
- def _addchunk(self, offset, data):
+ def _cachesegment(self, offset, data):
"""Add a segment to the revlog cache.
Accepts an absolute offset and the data that is at that location.
@@ -1087,7 +1148,7 @@
else:
self._chunkcache = offset, data
- def _loadchunk(self, offset, length, df=None):
+ def _readsegment(self, offset, length, df=None):
"""Load a segment of raw data from the revlog.
Accepts an absolute offset, length to read, and an optional existing
@@ -1118,12 +1179,12 @@
d = df.read(reallength)
if closehandle:
df.close()
- self._addchunk(realoffset, d)
+ self._cachesegment(realoffset, d)
if offset != realoffset or reallength != length:
return util.buffer(d, offset - realoffset, length)
return d
- def _getchunk(self, offset, length, df=None):
+ def _getsegment(self, offset, length, df=None):
"""Obtain a segment of raw data from the revlog.
Accepts an absolute offset, length of bytes to obtain, and an
@@ -1145,9 +1206,9 @@
return d # avoid a copy
return util.buffer(d, cachestart, cacheend - cachestart)
- return self._loadchunk(offset, length, df=df)
+ return self._readsegment(offset, length, df=df)
- def _chunkraw(self, startrev, endrev, df=None):
+ def _getsegmentforrevs(self, startrev, endrev, df=None):
"""Obtain a segment of raw data corresponding to a range of revisions.
Accepts the start and end revisions and an optional already-open
@@ -1179,7 +1240,7 @@
end += (endrev + 1) * self._io.size
length = end - start
- return start, self._getchunk(start, length, df=df)
+ return start, self._getsegment(start, length, df=df)
def _chunk(self, rev, df=None):
"""Obtain a single decompressed chunk for a revision.
@@ -1190,7 +1251,7 @@
Returns a str holding uncompressed data for the requested revision.
"""
- return self.decompress(self._chunkraw(rev, rev, df=df)[1])
+ return self.decompress(self._getsegmentforrevs(rev, rev, df=df)[1])
def _chunks(self, revs, df=None):
"""Obtain decompressed chunks for the specified revisions.
@@ -1217,7 +1278,7 @@
ladd = l.append
try:
- offset, data = self._chunkraw(revs[0], revs[-1], df=df)
+ offset, data = self._getsegmentforrevs(revs[0], revs[-1], df=df)
except OverflowError:
# issue4215 - we can't cache a run of chunks greater than
# 2G on Windows
@@ -1359,6 +1420,9 @@
Note: If the ``raw`` argument is set, it has precedence over the
operation and will only update the value of ``validatehash``.
"""
+ # fast path: no flag processors will run
+ if flags == 0:
+ return text, True
if not operation in ('read', 'write'):
raise ProgrammingError(_("invalid '%s' operation ") % (operation))
# Check all flags are known.
@@ -1443,13 +1507,13 @@
df = self.opener(self.datafile, 'w')
try:
for r in self:
- df.write(self._chunkraw(r, r)[1])
+ df.write(self._getsegmentforrevs(r, r)[1])
finally:
df.close()
fp = self.opener(self.indexfile, 'w', atomictemp=True,
checkambig=self._checkambig)
- self.version &= ~(REVLOGNGINLINEDATA)
+ self.version &= ~FLAG_INLINE_DATA
self._inline = False
for i in self:
e = self._io.packentry(self.index[i], self.node, self.version, i)
@@ -1502,6 +1566,15 @@
if validatehash:
self.checkhash(rawtext, node, p1=p1, p2=p2)
+ return self.addrawrevision(rawtext, transaction, link, p1, p2, node,
+ flags, cachedelta=cachedelta)
+
+ def addrawrevision(self, rawtext, transaction, link, p1, p2, node, flags,
+ cachedelta=None):
+ """add a raw revision with known flags, node and parents
+ useful when reusing a revision not stored in this revlog (ex: received
+ over wire, or read from an external bundle).
+ """
dfh = None
if not self._inline:
dfh = self.opener(self.datafile, "a+")
@@ -1596,7 +1669,13 @@
# - 'compresseddeltalen' is the sum of the total size of deltas we need
# to apply -- bounding it limits the amount of CPU we consume.
dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
- if (dist > textlen * 4 or l > textlen or
+
+ defaultmax = textlen * 4
+ maxdist = self._maxdeltachainspan
+ if not maxdist:
+ maxdist = dist # ensure the conditional pass
+ maxdist = max(maxdist, defaultmax)
+ if (dist > maxdist or l > textlen or
compresseddeltalen > textlen * 2 or
(self._maxchainlen and chainlen > self._maxchainlen)):
return False
@@ -1798,9 +1877,7 @@
this revlog and the node that was added.
"""
- # track the base of the current delta log
- content = []
- node = None
+ nodes = []
r = len(self)
end = 0
@@ -1831,7 +1908,7 @@
delta = chunkdata['delta']
flags = chunkdata['flags'] or REVIDX_DEFAULT_FLAGS
- content.append(node)
+ nodes.append(node)
link = linkmapper(cs)
if node in self.nodemap:
@@ -1890,7 +1967,7 @@
dfh.close()
ifh.close()
- return content
+ return nodes
def iscensored(self, rev):
"""Check if a file revision is censored."""
@@ -2027,7 +2104,7 @@
DELTAREUSESAMEREVS = 'samerevs'
DELTAREUSENEVER = 'never'
- DELTAREUSEALL = set(['always', 'samerevs', 'never'])
+ DELTAREUSEALL = {'always', 'samerevs', 'never'}
def clone(self, tr, destrevlog, addrevisioncb=None,
deltareuse=DELTAREUSESAMEREVS, aggressivemergedeltas=None):
--- a/mercurial/revset.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/revset.py Wed Jul 19 07:51:41 2017 -0500
@@ -7,11 +7,11 @@
from __future__ import absolute_import
-import heapq
import re
from .i18n import _
from . import (
+ dagop,
destutil,
encoding,
error,
@@ -19,11 +19,13 @@
match as matchmod,
node,
obsolete as obsmod,
+ obsutil,
pathutil,
phases,
registrar,
repoview,
revsetlang,
+ scmutil,
smartset,
util,
)
@@ -48,123 +50,6 @@
spanset = smartset.spanset
fullreposet = smartset.fullreposet
-def _revancestors(repo, revs, followfirst):
- """Like revlog.ancestors(), but supports followfirst."""
- if followfirst:
- cut = 1
- else:
- cut = None
- cl = repo.changelog
-
- def iterate():
- revs.sort(reverse=True)
- irevs = iter(revs)
- h = []
-
- inputrev = next(irevs, None)
- if inputrev is not None:
- heapq.heappush(h, -inputrev)
-
- seen = set()
- while h:
- current = -heapq.heappop(h)
- if current == inputrev:
- inputrev = next(irevs, None)
- if inputrev is not None:
- heapq.heappush(h, -inputrev)
- if current not in seen:
- seen.add(current)
- yield current
- for parent in cl.parentrevs(current)[:cut]:
- if parent != node.nullrev:
- heapq.heappush(h, -parent)
-
- return generatorset(iterate(), iterasc=False)
-
-def _revdescendants(repo, revs, followfirst):
- """Like revlog.descendants() but supports followfirst."""
- if followfirst:
- cut = 1
- else:
- cut = None
-
- def iterate():
- cl = repo.changelog
- # XXX this should be 'parentset.min()' assuming 'parentset' is a
- # smartset (and if it is not, it should.)
- first = min(revs)
- nullrev = node.nullrev
- if first == nullrev:
- # Are there nodes with a null first parent and a non-null
- # second one? Maybe. Do we care? Probably not.
- for i in cl:
- yield i
- else:
- seen = set(revs)
- for i in cl.revs(first + 1):
- for x in cl.parentrevs(i)[:cut]:
- if x != nullrev and x in seen:
- seen.add(i)
- yield i
- break
-
- return generatorset(iterate(), iterasc=True)
-
-def _reachablerootspure(repo, minroot, roots, heads, includepath):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
- if not roots:
- return []
- parentrevs = repo.changelog.parentrevs
- roots = set(roots)
- visit = list(heads)
- reachable = set()
- seen = {}
- # prefetch all the things! (because python is slow)
- reached = reachable.add
- dovisit = visit.append
- nextvisit = visit.pop
- # open-code the post-order traversal due to the tiny size of
- # sys.getrecursionlimit()
- while visit:
- rev = nextvisit()
- if rev in roots:
- reached(rev)
- if not includepath:
- continue
- parents = parentrevs(rev)
- seen[rev] = parents
- for parent in parents:
- if parent >= minroot and parent not in seen:
- dovisit(parent)
- if not reachable:
- return baseset()
- if not includepath:
- return reachable
- for rev in sorted(seen):
- for parent in seen[rev]:
- if parent in reachable:
- reached(rev)
- return reachable
-
-def reachableroots(repo, roots, heads, includepath=False):
- """return (heads(::<roots> and ::<heads>))
-
- If includepath is True, return (<roots>::<heads>)."""
- if not roots:
- return baseset()
- minroot = roots.min()
- roots = list(roots)
- heads = list(heads)
- try:
- revs = repo.changelog.reachableroots(minroot, heads, roots, includepath)
- except AttributeError:
- revs = _reachablerootspure(repo, minroot, roots, heads, includepath)
- revs = baseset(revs)
- revs.sort()
- return revs
-
# helpers
def getset(repo, subset, x):
@@ -185,7 +70,7 @@
# operator methods
def stringset(repo, subset, x):
- x = repo[x].rev()
+ x = scmutil.intrev(repo[x])
if (x in subset
or x == node.nullrev and isinstance(subset, fullreposet)):
return baseset([x])
@@ -236,8 +121,8 @@
def dagrange(repo, subset, x, y, order):
r = fullreposet(repo)
- xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
- includepath=True)
+ xs = dagop.reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
+ includepath=True)
return subset & xs
def andset(repo, subset, x, y, order):
@@ -266,6 +151,31 @@
def notset(repo, subset, x, order):
return subset - getset(repo, subset, x)
+def relationset(repo, subset, x, y, order):
+ raise error.ParseError(_("can't use a relation in this context"))
+
+def relsubscriptset(repo, subset, x, y, z, order):
+ # this is pretty basic implementation of 'x#y[z]' operator, still
+ # experimental so undocumented. see the wiki for further ideas.
+ # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan
+ rel = getsymbol(y)
+ n = getinteger(z, _("relation subscript must be an integer"))
+
+ # TODO: perhaps this should be a table of relation functions
+ if rel in ('g', 'generations'):
+ # TODO: support range, rewrite tests, and drop startdepth argument
+ # from ancestors() and descendants() predicates
+ if n <= 0:
+ n = -n
+ return _ancestors(repo, subset, x, startdepth=n, stopdepth=n + 1)
+ else:
+ return _descendants(repo, subset, x, startdepth=n, stopdepth=n + 1)
+
+ raise error.UnknownIdentifier(rel, ['generations'])
+
+def subscriptset(repo, subset, x, y, order):
+ raise error.ParseError(_("can't use a subscript in this context"))
+
def listset(repo, subset, *xs):
raise error.ParseError(_("can't use a list in this context"),
hint=_('see hg help "revsets.x or y"'))
@@ -354,18 +264,42 @@
return baseset([anc.rev()])
return baseset()
-def _ancestors(repo, subset, x, followfirst=False):
+def _ancestors(repo, subset, x, followfirst=False, startdepth=None,
+ stopdepth=None):
heads = getset(repo, fullreposet(repo), x)
if not heads:
return baseset()
- s = _revancestors(repo, heads, followfirst)
+ s = dagop.revancestors(repo, heads, followfirst, startdepth, stopdepth)
return subset & s
-@predicate('ancestors(set)', safe=True)
+@predicate('ancestors(set[, depth])', safe=True)
def ancestors(repo, subset, x):
- """Changesets that are ancestors of a changeset in set.
+ """Changesets that are ancestors of changesets in set, including the
+ given changesets themselves.
+
+ If depth is specified, the result only includes changesets up to
+ the specified generation.
"""
- return _ancestors(repo, subset, x)
+ # startdepth is for internal use only until we can decide the UI
+ args = getargsdict(x, 'ancestors', 'set depth startdepth')
+ if 'set' not in args:
+ # i18n: "ancestors" is a keyword
+ raise error.ParseError(_('ancestors takes at least 1 argument'))
+ startdepth = stopdepth = None
+ if 'startdepth' in args:
+ n = getinteger(args['startdepth'],
+ "ancestors expects an integer startdepth")
+ if n < 0:
+ raise error.ParseError("negative startdepth")
+ startdepth = n
+ if 'depth' in args:
+ # i18n: "ancestors" is a keyword
+ n = getinteger(args['depth'], _("ancestors expects an integer depth"))
+ if n < 0:
+ raise error.ParseError(_("negative depth"))
+ stopdepth = n + 1
+ return _ancestors(repo, subset, args['set'],
+ startdepth=startdepth, stopdepth=stopdepth)
@predicate('_firstancestors', safe=True)
def _firstancestors(repo, subset, x):
@@ -373,17 +307,41 @@
# Like ``ancestors(set)`` but follows only the first parents.
return _ancestors(repo, subset, x, followfirst=True)
+def _childrenspec(repo, subset, x, n, order):
+ """Changesets that are the Nth child of a changeset
+ in set.
+ """
+ cs = set()
+ for r in getset(repo, fullreposet(repo), x):
+ for i in range(n):
+ c = repo[r].children()
+ if len(c) == 0:
+ break
+ if len(c) > 1:
+ raise error.RepoLookupError(
+ _("revision in set has more than one child"))
+ r = c[0].rev()
+ else:
+ cs.add(r)
+ return subset & cs
+
def ancestorspec(repo, subset, x, n, order):
"""``set~n``
Changesets that are the Nth ancestor (first parents only) of a changeset
in set.
"""
n = getinteger(n, _("~ expects a number"))
+ if n < 0:
+ # children lookup
+ return _childrenspec(repo, subset, x, -n, order)
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
for i in range(n):
- r = cl.parentrevs(r)[0]
+ try:
+ r = cl.parentrevs(r)[0]
+ except error.WdirUnsupported:
+ r = repo[r].parents()[0].rev()
ps.add(r)
return subset & ps
@@ -451,9 +409,8 @@
for bmrev in matchrevs:
bms.add(repo[bmrev].rev())
else:
- bms = set([repo[r].rev()
- for r in repo._bookmarks.values()])
- bms -= set([node.nullrev])
+ bms = {repo[r].rev() for r in repo._bookmarks.values()}
+ bms -= {node.nullrev}
return subset & bms
@predicate('branch(string or set)', safe=True)
@@ -466,6 +423,11 @@
:hg:`help revisions.patterns`.
"""
getbi = repo.revbranchcache().branchinfo
+ def getbranch(r):
+ try:
+ return getbi(r)[0]
+ except error.WdirUnsupported:
+ return repo[r].branch()
try:
b = getstring(x, '')
@@ -478,21 +440,21 @@
# note: falls through to the revspec case if no branch with
# this name exists and pattern kind is not specified explicitly
if pattern in repo.branchmap():
- return subset.filter(lambda r: matcher(getbi(r)[0]),
+ return subset.filter(lambda r: matcher(getbranch(r)),
condrepr=('<branch %r>', b))
if b.startswith('literal:'):
raise error.RepoLookupError(_("branch '%s' does not exist")
% pattern)
else:
- return subset.filter(lambda r: matcher(getbi(r)[0]),
+ return subset.filter(lambda r: matcher(getbranch(r)),
condrepr=('<branch %r>', b))
s = getset(repo, fullreposet(repo), x)
b = set()
for r in s:
- b.add(getbi(r)[0])
+ b.add(getbranch(r))
c = s.__contains__
- return subset.filter(lambda r: c(r) or getbi(r)[0] in b,
+ return subset.filter(lambda r: c(r) or getbranch(r) in b,
condrepr=lambda: '<branch %r>' % sorted(b))
@predicate('bumped()', safe=True)
@@ -659,30 +621,42 @@
return subset.filter(lambda r: matcher(repo[r].description()),
condrepr=('<desc %r>', ds))
-def _descendants(repo, subset, x, followfirst=False):
+def _descendants(repo, subset, x, followfirst=False, startdepth=None,
+ stopdepth=None):
roots = getset(repo, fullreposet(repo), x)
if not roots:
return baseset()
- s = _revdescendants(repo, roots, followfirst)
+ s = dagop.revdescendants(repo, roots, followfirst, startdepth, stopdepth)
+ return subset & s
+
+@predicate('descendants(set[, depth])', safe=True)
+def descendants(repo, subset, x):
+ """Changesets which are descendants of changesets in set, including the
+ given changesets themselves.
- # Both sets need to be ascending in order to lazily return the union
- # in the correct order.
- base = subset & roots
- desc = subset & s
- result = base + desc
- if subset.isascending():
- result.sort()
- elif subset.isdescending():
- result.sort(reverse=True)
- else:
- result = subset & result
- return result
-
-@predicate('descendants(set)', safe=True)
-def descendants(repo, subset, x):
- """Changesets which are descendants of changesets in set.
+ If depth is specified, the result only includes changesets up to
+ the specified generation.
"""
- return _descendants(repo, subset, x)
+ # startdepth is for internal use only until we can decide the UI
+ args = getargsdict(x, 'descendants', 'set depth startdepth')
+ if 'set' not in args:
+ # i18n: "descendants" is a keyword
+ raise error.ParseError(_('descendants takes at least 1 argument'))
+ startdepth = stopdepth = None
+ if 'startdepth' in args:
+ n = getinteger(args['startdepth'],
+ "descendants expects an integer startdepth")
+ if n < 0:
+ raise error.ParseError("negative startdepth")
+ startdepth = n
+ if 'depth' in args:
+ # i18n: "descendants" is a keyword
+ n = getinteger(args['depth'], _("descendants expects an integer depth"))
+ if n < 0:
+ raise error.ParseError(_("negative depth"))
+ stopdepth = n + 1
+ return _descendants(repo, subset, args['set'],
+ startdepth=startdepth, stopdepth=stopdepth)
@predicate('_firstdescendants', safe=True)
def _firstdescendants(repo, subset, x):
@@ -850,11 +824,11 @@
return subset & s
-@predicate('first(set, [n])', safe=True)
-def first(repo, subset, x):
+@predicate('first(set, [n])', safe=True, takeorder=True)
+def first(repo, subset, x, order):
"""An alias for limit().
"""
- return limit(repo, subset, x)
+ return limit(repo, subset, x, order)
def _follow(repo, subset, x, name, followfirst=False):
l = getargs(x, 0, 2, _("%s takes no arguments or a pattern "
@@ -882,7 +856,7 @@
# include the revision responsible for the most recent version
s.add(fctx.introrev())
else:
- s = _revancestors(repo, baseset([c.rev()]), followfirst)
+ s = dagop.revancestors(repo, baseset([c.rev()]), followfirst)
return subset & s
@@ -915,8 +889,6 @@
descendants of 'startrev' are returned though renames are (currently) not
followed in this direction.
"""
- from . import context # avoid circular import issues
-
args = getargsdict(x, 'followlines', 'file *lines startrev descend')
if len(args['lines']) != 1:
raise error.ParseError(_("followlines requires a line range"))
@@ -956,12 +928,12 @@
if descend:
rs = generatorset(
(c.rev() for c, _linerange
- in context.blockdescendants(fctx, fromline, toline)),
+ in dagop.blockdescendants(fctx, fromline, toline)),
iterasc=True)
else:
rs = generatorset(
(c.rev() for c, _linerange
- in context.blockancestors(fctx, fromline, toline)),
+ in dagop.blockancestors(fctx, fromline, toline)),
iterasc=False)
return subset & rs
@@ -1118,8 +1090,8 @@
return subset.filter(matches, condrepr=('<keyword %r>', kw))
-@predicate('limit(set[, n[, offset]])', safe=True)
-def limit(repo, subset, x):
+@predicate('limit(set[, n[, offset]])', safe=True, takeorder=True)
+def limit(repo, subset, x, order):
"""First n members of set, defaulting to 1, starting from offset.
"""
args = getargsdict(x, 'limit', 'set n offset')
@@ -1128,28 +1100,20 @@
raise error.ParseError(_("limit requires one to three arguments"))
# i18n: "limit" is a keyword
lim = getinteger(args.get('n'), _("limit expects a number"), default=1)
+ if lim < 0:
+ raise error.ParseError(_("negative number to select"))
# i18n: "limit" is a keyword
ofs = getinteger(args.get('offset'), _("limit expects a number"), default=0)
if ofs < 0:
raise error.ParseError(_("negative offset"))
os = getset(repo, fullreposet(repo), args['set'])
- result = []
- it = iter(os)
- for x in xrange(ofs):
- y = next(it, None)
- if y is None:
- break
- for x in xrange(lim):
- y = next(it, None)
- if y is None:
- break
- elif y in subset:
- result.append(y)
- return baseset(result, datarepr=('<limit n=%d, offset=%d, %r, %r>',
- lim, ofs, subset, os))
+ ls = os.slice(ofs, ofs + lim)
+ if order == followorder and lim > 1:
+ return subset & ls
+ return ls & subset
-@predicate('last(set, [n])', safe=True)
-def last(repo, subset, x):
+@predicate('last(set, [n])', safe=True, takeorder=True)
+def last(repo, subset, x, order):
"""Last n members of set, defaulting to 1.
"""
# i18n: "last" is a keyword
@@ -1158,17 +1122,15 @@
if len(l) == 2:
# i18n: "last" is a keyword
lim = getinteger(l[1], _("last expects a number"))
+ if lim < 0:
+ raise error.ParseError(_("negative number to select"))
os = getset(repo, fullreposet(repo), l[0])
os.reverse()
- result = []
- it = iter(os)
- for x in xrange(lim):
- y = next(it, None)
- if y is None:
- break
- elif y in subset:
- result.append(y)
- return baseset(result, datarepr=('<last n=%d, %r, %r>', lim, subset, os))
+ ls = os.slice(0, lim)
+ if order == followorder and lim > 1:
+ return subset & ls
+ ls.reverse()
+ return ls & subset
@predicate('max(set)', safe=True)
def maxrev(repo, subset, x):
@@ -1276,7 +1238,7 @@
if name not in ns.deprecated:
names.update(repo[n].rev() for n in ns.nodes(repo, name))
- names -= set([node.nullrev])
+ names -= {node.nullrev}
return subset & names
@predicate('id(string)', safe=True)
@@ -1290,13 +1252,18 @@
if len(n) == 40:
try:
rn = repo.changelog.rev(node.bin(n))
+ except error.WdirUnsupported:
+ rn = node.wdirrev
except (LookupError, TypeError):
rn = None
else:
rn = None
- pm = repo.changelog._partialmatch(n)
- if pm is not None:
- rn = repo.changelog.rev(pm)
+ try:
+ pm = repo.changelog._partialmatch(n)
+ if pm is not None:
+ rn = repo.changelog.rev(pm)
+ except error.WdirUnsupported:
+ rn = node.wdirrev
if rn is None:
return baseset()
@@ -1326,7 +1293,7 @@
if not include:
return baseset()
- descendants = set(_revdescendants(repo, include, False))
+ descendants = set(dagop.revdescendants(repo, include, False))
exclude = [rev for rev in cl.headrevs()
if not rev in descendants and not rev in include]
else:
@@ -1363,8 +1330,8 @@
return src
src = prev
- o = set([_firstsrc(r) for r in dests])
- o -= set([None])
+ o = {_firstsrc(r) for r in dests}
+ o -= {None}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & o
@@ -1393,7 +1360,7 @@
outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
repo.ui.popbuffer()
cl = repo.changelog
- o = set([cl.rev(r) for r in outgoing.missing])
+ o = {cl.rev(r) for r in outgoing.missing}
return subset & o
@predicate('p1([set])', safe=True)
@@ -1409,8 +1376,11 @@
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
- ps.add(cl.parentrevs(r)[0])
- ps -= set([node.nullrev])
+ try:
+ ps.add(cl.parentrevs(r)[0])
+ except error.WdirUnsupported:
+ ps.add(repo[r].parents()[0].rev())
+ ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@@ -1432,8 +1402,13 @@
ps = set()
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
- ps.add(cl.parentrevs(r)[1])
- ps -= set([node.nullrev])
+ try:
+ ps.add(cl.parentrevs(r)[1])
+ except error.WdirUnsupported:
+ parents = repo[r].parents()
+ if len(parents) == 2:
+ ps.add(parents[1])
+ ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@@ -1454,11 +1429,11 @@
up = ps.update
parentrevs = cl.parentrevs
for r in getset(repo, fullreposet(repo), x):
- if r == node.wdirrev:
+ try:
+ up(parentrevs(r))
+ except error.WdirUnsupported:
up(p.rev() for p in repo[r].parents())
- else:
- up(parentrevs(r))
- ps -= set([node.nullrev])
+ ps -= {node.nullrev}
return subset & ps
def _phase(repo, subset, *targets):
@@ -1500,11 +1475,19 @@
if n == 0:
ps.add(r)
elif n == 1:
- ps.add(cl.parentrevs(r)[0])
- elif n == 2:
- parents = cl.parentrevs(r)
- if parents[1] != node.nullrev:
- ps.add(parents[1])
+ try:
+ ps.add(cl.parentrevs(r)[0])
+ except error.WdirUnsupported:
+ ps.add(repo[r].parents()[0].rev())
+ else:
+ try:
+ parents = cl.parentrevs(r)
+ if parents[1] != node.nullrev:
+ ps.add(parents[1])
+ except error.WdirUnsupported:
+ parents = repo[r].parents()
+ if len(parents) == 2:
+ ps.add(parents[1].rev())
return subset & ps
@predicate('present(set)', safe=True)
@@ -1597,7 +1580,7 @@
except (TypeError, ValueError):
# i18n: "rev" is a keyword
raise error.ParseError(_("rev expects a number"))
- if l not in repo.changelog and l != node.nullrev:
+ if l not in repo.changelog and l not in (node.nullrev, node.wdirrev):
return baseset()
return subset & baseset([l])
@@ -1812,7 +1795,8 @@
firstbranch = ()
if 'topo.firstbranch' in opts:
firstbranch = getset(repo, subset, opts['topo.firstbranch'])
- revs = baseset(_toposort(revs, repo.changelog.parentrevs, firstbranch),
+ revs = baseset(dagop.toposort(revs, repo.changelog.parentrevs,
+ firstbranch),
istopo=True)
if keyflags[0][1]:
revs.reverse()
@@ -1824,204 +1808,6 @@
ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
return baseset([c.rev() for c in ctxs])
-def _toposort(revs, parentsfunc, firstbranch=()):
- """Yield revisions from heads to roots one (topo) branch at a time.
-
- This function aims to be used by a graph generator that wishes to minimize
- the number of parallel branches and their interleaving.
-
- Example iteration order (numbers show the "true" order in a changelog):
-
- o 4
- |
- o 1
- |
- | o 3
- | |
- | o 2
- |/
- o 0
-
- Note that the ancestors of merges are understood by the current
- algorithm to be on the same branch. This means no reordering will
- occur behind a merge.
- """
-
- ### Quick summary of the algorithm
- #
- # This function is based around a "retention" principle. We keep revisions
- # in memory until we are ready to emit a whole branch that immediately
- # "merges" into an existing one. This reduces the number of parallel
- # branches with interleaved revisions.
- #
- # During iteration revs are split into two groups:
- # A) revision already emitted
- # B) revision in "retention". They are stored as different subgroups.
- #
- # for each REV, we do the following logic:
- #
- # 1) if REV is a parent of (A), we will emit it. If there is a
- # retention group ((B) above) that is blocked on REV being
- # available, we emit all the revisions out of that retention
- # group first.
- #
- # 2) else, we'll search for a subgroup in (B) awaiting for REV to be
- # available, if such subgroup exist, we add REV to it and the subgroup is
- # now awaiting for REV.parents() to be available.
- #
- # 3) finally if no such group existed in (B), we create a new subgroup.
- #
- #
- # To bootstrap the algorithm, we emit the tipmost revision (which
- # puts it in group (A) from above).
-
- revs.sort(reverse=True)
-
- # Set of parents of revision that have been emitted. They can be considered
- # unblocked as the graph generator is already aware of them so there is no
- # need to delay the revisions that reference them.
- #
- # If someone wants to prioritize a branch over the others, pre-filling this
- # set will force all other branches to wait until this branch is ready to be
- # emitted.
- unblocked = set(firstbranch)
-
- # list of groups waiting to be displayed, each group is defined by:
- #
- # (revs: lists of revs waiting to be displayed,
- # blocked: set of that cannot be displayed before those in 'revs')
- #
- # The second value ('blocked') correspond to parents of any revision in the
- # group ('revs') that is not itself contained in the group. The main idea
- # of this algorithm is to delay as much as possible the emission of any
- # revision. This means waiting for the moment we are about to display
- # these parents to display the revs in a group.
- #
- # This first implementation is smart until it encounters a merge: it will
- # emit revs as soon as any parent is about to be emitted and can grow an
- # arbitrary number of revs in 'blocked'. In practice this mean we properly
- # retains new branches but gives up on any special ordering for ancestors
- # of merges. The implementation can be improved to handle this better.
- #
- # The first subgroup is special. It corresponds to all the revision that
- # were already emitted. The 'revs' lists is expected to be empty and the
- # 'blocked' set contains the parents revisions of already emitted revision.
- #
- # You could pre-seed the <parents> set of groups[0] to a specific
- # changesets to select what the first emitted branch should be.
- groups = [([], unblocked)]
- pendingheap = []
- pendingset = set()
-
- heapq.heapify(pendingheap)
- heappop = heapq.heappop
- heappush = heapq.heappush
- for currentrev in revs:
- # Heap works with smallest element, we want highest so we invert
- if currentrev not in pendingset:
- heappush(pendingheap, -currentrev)
- pendingset.add(currentrev)
- # iterates on pending rev until after the current rev have been
- # processed.
- rev = None
- while rev != currentrev:
- rev = -heappop(pendingheap)
- pendingset.remove(rev)
-
- # Seek for a subgroup blocked, waiting for the current revision.
- matching = [i for i, g in enumerate(groups) if rev in g[1]]
-
- if matching:
- # The main idea is to gather together all sets that are blocked
- # on the same revision.
- #
- # Groups are merged when a common blocking ancestor is
- # observed. For example, given two groups:
- #
- # revs [5, 4] waiting for 1
- # revs [3, 2] waiting for 1
- #
- # These two groups will be merged when we process
- # 1. In theory, we could have merged the groups when
- # we added 2 to the group it is now in (we could have
- # noticed the groups were both blocked on 1 then), but
- # the way it works now makes the algorithm simpler.
- #
- # We also always keep the oldest subgroup first. We can
- # probably improve the behavior by having the longest set
- # first. That way, graph algorithms could minimise the length
- # of parallel lines their drawing. This is currently not done.
- targetidx = matching.pop(0)
- trevs, tparents = groups[targetidx]
- for i in matching:
- gr = groups[i]
- trevs.extend(gr[0])
- tparents |= gr[1]
- # delete all merged subgroups (except the one we kept)
- # (starting from the last subgroup for performance and
- # sanity reasons)
- for i in reversed(matching):
- del groups[i]
- else:
- # This is a new head. We create a new subgroup for it.
- targetidx = len(groups)
- groups.append(([], set([rev])))
-
- gr = groups[targetidx]
-
- # We now add the current nodes to this subgroups. This is done
- # after the subgroup merging because all elements from a subgroup
- # that relied on this rev must precede it.
- #
- # we also update the <parents> set to include the parents of the
- # new nodes.
- if rev == currentrev: # only display stuff in rev
- gr[0].append(rev)
- gr[1].remove(rev)
- parents = [p for p in parentsfunc(rev) if p > node.nullrev]
- gr[1].update(parents)
- for p in parents:
- if p not in pendingset:
- pendingset.add(p)
- heappush(pendingheap, -p)
-
- # Look for a subgroup to display
- #
- # When unblocked is empty (if clause), we were not waiting for any
- # revisions during the first iteration (if no priority was given) or
- # if we emitted a whole disconnected set of the graph (reached a
- # root). In that case we arbitrarily take the oldest known
- # subgroup. The heuristic could probably be better.
- #
- # Otherwise (elif clause) if the subgroup is blocked on
- # a revision we just emitted, we can safely emit it as
- # well.
- if not unblocked:
- if len(groups) > 1: # display other subset
- targetidx = 1
- gr = groups[1]
- elif not gr[1] & unblocked:
- gr = None
-
- if gr is not None:
- # update the set of awaited revisions with the one from the
- # subgroup
- unblocked |= gr[1]
- # output all revisions in the subgroup
- for r in gr[0]:
- yield r
- # delete the subgroup that you just output
- # unless it is groups[0] in which case you just empty it.
- if targetidx:
- del groups[targetidx]
- else:
- gr[0][:] = []
- # Check if we have some subgroup waiting for revisions we are not going to
- # iterate over
- for g in groups:
- for r in g[0]:
- yield r
-
@predicate('subrepo([pattern])')
def subrepo(repo, subset, x):
"""Changesets that add, modify or remove the given subrepo. If no subrepo
@@ -2066,6 +1852,28 @@
return subset.filter(matches, condrepr=('<subrepo %r>', pat))
+def _mapbynodefunc(repo, s, f):
+ """(repo, smartset, [node] -> [node]) -> smartset
+
+ Helper method to map a smartset to another smartset given a function only
+ talking about nodes. Handles converting between rev numbers and nodes, and
+ filtering.
+ """
+ cl = repo.unfiltered().changelog
+ torev = cl.rev
+ tonode = cl.node
+ nodemap = cl.nodemap
+ result = set(torev(n) for n in f(tonode(r) for r in s) if n in nodemap)
+ return smartset.baseset(result - repo.changelog.filteredrevs)
+
+@predicate('successors(set)', safe=True)
+def successors(repo, subset, x):
+ """All successors for set, including the given set themselves"""
+ s = getset(repo, fullreposet(repo), x)
+ f = lambda nodes: obsutil.allsuccessors(repo.obsstore, nodes)
+ d = _mapbynodefunc(repo, s, f)
+ return subset & d
+
def _substringmatcher(pattern, casesensitive=True):
kind, pattern, matcher = util.stringmatcher(pattern,
casesensitive=casesensitive)
@@ -2098,11 +1906,11 @@
if tn is None:
raise error.RepoLookupError(_("tag '%s' does not exist")
% pattern)
- s = set([repo[tn].rev()])
+ s = {repo[tn].rev()}
else:
- s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
+ s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
else:
- s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
+ s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'}
return subset & s
@predicate('tagged', safe=True)
@@ -2128,7 +1936,7 @@
"""
return author(repo, subset, x)
-@predicate('wdir', safe=True)
+@predicate('wdir()', safe=True)
def wdir(repo, subset, x):
"""Working directory. (EXPERIMENTAL)"""
# i18n: "wdir" is a keyword
@@ -2221,6 +2029,9 @@
"or": orset,
"not": notset,
"difference": differenceset,
+ "relation": relationset,
+ "relsubscript": relsubscriptset,
+ "subscript": subscriptset,
"list": listset,
"keyvalue": keyvaluepair,
"func": func,
@@ -2241,12 +2052,15 @@
"""
return matchany(ui, [spec], repo=repo, order=order)
-def matchany(ui, specs, repo=None, order=defineorder):
+def matchany(ui, specs, repo=None, order=defineorder, localalias=None):
"""Create a matcher that will include any revisions matching one of the
given specs
If order=followorder, a matcher takes the ordering specified by the input
set.
+
+ If localalias is not None, it is a dict {name: definitionstring}. It takes
+ precedence over [revsetalias] config section.
"""
if not specs:
def mfunc(repo, subset=None):
@@ -2263,8 +2077,15 @@
tree = ('or',
('list',) + tuple(revsetlang.parse(s, lookup) for s in specs))
+ aliases = []
+ warn = None
if ui:
- tree = revsetlang.expandaliases(ui, tree)
+ aliases.extend(ui.configitems('revsetalias'))
+ warn = ui.warn
+ if localalias:
+ aliases.extend(localalias.items())
+ if aliases:
+ tree = revsetlang.expandaliases(tree, aliases, warn=warn)
tree = revsetlang.foldconcat(tree)
tree = revsetlang.analyze(tree, order)
tree = revsetlang.optimize(tree)
--- a/mercurial/revsetlang.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/revsetlang.py Wed Jul 19 07:51:41 2017 -0500
@@ -21,6 +21,8 @@
elements = {
# token-type: binding-strength, primary, prefix, infix, suffix
"(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None),
+ "[": (21, None, None, ("subscript", 1, "]"), None),
+ "#": (21, None, None, ("relation", 21), None),
"##": (20, None, None, ("_concat", 20), None),
"~": (18, None, None, ("ancestor", 18), None),
"^": (18, None, None, ("parent", 18), "parentpost"),
@@ -39,15 +41,16 @@
"=": (3, None, None, ("keyvalue", 3), None),
",": (2, None, None, ("list", 2), None),
")": (0, None, None, None, None),
+ "]": (0, None, None, None, None),
"symbol": (0, "symbol", None, None, None),
"string": (0, "string", None, None, None),
"end": (0, None, None, None, None),
}
-keywords = set(['and', 'or', 'not'])
+keywords = {'and', 'or', 'not'}
-_quoteletters = set(['"', "'"])
-_simpleopletters = set(pycompat.iterbytestr("():=,-|&+!~^%"))
+_quoteletters = {'"', "'"}
+_simpleopletters = set(pycompat.iterbytestr("()[]#:=,-|&+!~^%"))
# default set of valid characters for the initial letter of symbols
_syminitletters = set(pycompat.iterbytestr(
@@ -236,6 +239,25 @@
return parser.buildargsdict(getlist(x), funcname, parser.splitargspec(keys),
keyvaluenode='keyvalue', keynode='symbol')
+def _isnamedfunc(x, funcname):
+ """Check if given tree matches named function"""
+ return x and x[0] == 'func' and getsymbol(x[1]) == funcname
+
+def _isposargs(x, n):
+ """Check if given tree is n-length list of positional arguments"""
+ l = getlist(x)
+ return len(l) == n and all(y and y[0] != 'keyvalue' for y in l)
+
+def _matchnamedfunc(x, funcname):
+ """Return args tree if given tree matches named function; otherwise None
+
+ This can't be used for testing a nullary function since its args tree
+ is also None. Use _isnamedfunc() instead.
+ """
+ if not _isnamedfunc(x, funcname):
+ return
+ return x[2]
+
# Constants for ordering requirement, used in _analyze():
#
# If 'define', any nested functions and operations can change the ordering of
@@ -286,14 +308,10 @@
>>> f('ancestors(A)', 'not ancestors(B)')
('list', ('symbol', 'A'), ('symbol', 'B'))
"""
- if (revs is not None
- and revs[0] == 'func'
- and getsymbol(revs[1]) == 'ancestors'
- and bases is not None
- and bases[0] == 'not'
- and bases[1][0] == 'func'
- and getsymbol(bases[1][1]) == 'ancestors'):
- return ('list', revs[2], bases[1][2])
+ ta = _matchnamedfunc(revs, 'ancestors')
+ tb = bases and bases[0] == 'not' and _matchnamedfunc(bases[1], 'ancestors')
+ if _isposargs(ta, 1) and _isposargs(tb, 1):
+ return ('list', ta, tb)
def _fixops(x):
"""Rewrite raw parsed tree to resolve ambiguous syntax which cannot be
@@ -316,6 +334,9 @@
# make number of arguments deterministic:
# x + y + z -> (or x y z) -> (or (list x y z))
return (op, _fixops(('list',) + x[1:]))
+ elif op == 'subscript' and x[1][0] == 'relation':
+ # x#y[z] ternary
+ return _fixops(('relsubscript', x[1][1], x[1][2], x[2]))
return (op,) + tuple(_fixops(y) for y in x[1:])
@@ -354,10 +375,16 @@
return (op, _analyze(x[1], defineorder), order)
elif op == 'group':
return _analyze(x[1], order)
- elif op in ('dagrange', 'range', 'parent', 'ancestor'):
+ elif op in ('dagrange', 'range', 'parent', 'ancestor', 'relation',
+ 'subscript'):
ta = _analyze(x[1], defineorder)
tb = _analyze(x[2], defineorder)
return (op, ta, tb, order)
+ elif op == 'relsubscript':
+ ta = _analyze(x[1], defineorder)
+ tb = _analyze(x[2], defineorder)
+ tc = _analyze(x[3], defineorder)
+ return (op, ta, tb, tc, order)
elif op == 'list':
return (op,) + tuple(_analyze(y, order) for y in x[1:])
elif op == 'keyvalue':
@@ -461,11 +488,19 @@
o = _optimize(x[1], small)
order = x[2]
return o[0], (op, o[1], order)
- elif op in ('dagrange', 'range', 'parent', 'ancestor'):
+ elif op in ('dagrange', 'range'):
wa, ta = _optimize(x[1], small)
wb, tb = _optimize(x[2], small)
order = x[3]
return wa + wb, (op, ta, tb, order)
+ elif op in ('parent', 'ancestor', 'relation', 'subscript'):
+ w, t = _optimize(x[1], small)
+ order = x[3]
+ return w, (op, t, x[2], order)
+ elif op == 'relsubscript':
+ w, t = _optimize(x[1], small)
+ order = x[4]
+ return w, (op, t, x[2], x[3], order)
elif op == 'list':
ws, ts = zip(*(_optimize(y, small) for y in x[1:]))
return sum(ws), (op,) + ts
@@ -546,14 +581,16 @@
if tree[0] == 'func' and tree[1][0] == 'symbol':
return tree[1][1], getlist(tree[2])
-def expandaliases(ui, tree):
- aliases = _aliasrules.buildmap(ui.configitems('revsetalias'))
+def expandaliases(tree, aliases, warn=None):
+ """Expand aliases in a tree, aliases is a list of (name, value) tuples"""
+ aliases = _aliasrules.buildmap(aliases)
tree = _aliasrules.expand(aliases, tree)
# warn about problematic (but not referred) aliases
- for name, alias in sorted(aliases.iteritems()):
- if alias.error and not alias.warned:
- ui.warn(_('warning: %s\n') % (alias.error))
- alias.warned = True
+ if warn is not None:
+ for name, alias in sorted(aliases.iteritems()):
+ if alias.error and not alias.warned:
+ warn(_('warning: %s\n') % (alias.error))
+ alias.warned = True
return tree
def foldconcat(tree):
--- a/mercurial/scmposix.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/scmposix.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,8 +8,8 @@
from . import (
encoding,
- osutil,
pycompat,
+ util,
)
# BSD 'more' escapes ANSI color sequences by default. This can be disabled by
@@ -23,7 +23,7 @@
rcdir = os.path.join(path, 'hgrc.d')
try:
rcs.extend([os.path.join(rcdir, f)
- for f, kind in osutil.listdir(rcdir)
+ for f, kind in util.listdir(rcdir)
if f.endswith(".rc")])
except OSError:
pass
--- a/mercurial/scmutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/scmutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,20 +13,28 @@
import os
import re
import socket
+import weakref
from .i18n import _
-from .node import wdirrev
+from .node import (
+ hex,
+ nullid,
+ wdirid,
+ wdirrev,
+)
+
from . import (
encoding,
error,
match as matchmod,
+ obsolete,
+ obsutil,
pathutil,
phases,
pycompat,
revsetlang,
similar,
util,
- vfs as vfsmod,
)
if pycompat.osname == 'nt':
@@ -121,10 +129,6 @@
secretlist = []
if excluded:
for n in excluded:
- if n not in repo:
- # discovery should not have included the filtered revision,
- # we have to explicitly exclude it until discovery is cleanup.
- continue
ctx = repo[n]
if ctx.phase() >= phases.secret and not ctx.extinct():
secretlist.append(n)
@@ -186,13 +190,13 @@
ui.warn(_("abort: file censored %s!\n") % inst)
except error.RevlogError as inst:
ui.warn(_("abort: %s!\n") % inst)
- except error.SignalInterrupt:
- ui.warn(_("killed!\n"))
except error.InterventionRequired as inst:
ui.warn("%s\n" % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
return 1
+ except error.WdirUnsupported:
+ ui.warn(_("abort: working directory revision cannot be specified\n"))
except error.Abort as inst:
ui.warn(_("abort: %s\n") % inst)
if inst.hint:
@@ -215,7 +219,7 @@
reason = inst.reason
if isinstance(reason, unicode):
# SSLError of Python 2.7.9 contains a unicode
- reason = reason.encode(encoding.encoding, 'replace')
+ reason = encoding.unitolocal(reason)
ui.warn(_("abort: error: %s\n") % reason)
elif (util.safehasattr(inst, "args")
and inst.args and inst.args[0] == errno.EPIPE):
@@ -277,7 +281,7 @@
def checkportabilityalert(ui):
'''check if the user's config requests nothing, a warning, or abort for
non-portable filenames'''
- val = ui.config('ui', 'portablefilenames', 'warn')
+ val = ui.config('ui', 'portablefilenames')
lval = val.lower()
bval = util.parsebool(val)
abort = pycompat.osname == 'nt' or lval == 'abort'
@@ -335,27 +339,6 @@
key = s.digest()
return key
-def _deprecated(old, new, func):
- msg = ('class at mercurial.scmutil.%s moved to mercurial.vfs.%s'
- % (old, new))
- def wrapper(*args, **kwargs):
- util.nouideprecwarn(msg, '4.2')
- return func(*args, **kwargs)
- return wrapper
-
-# compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
-#
-# This is hard to instal deprecation warning to this since we do not have
-# access to a 'ui' object.
-opener = _deprecated('opener', 'vfs', vfsmod.vfs)
-vfs = _deprecated('vfs', 'vfs', vfsmod.vfs)
-filteropener = _deprecated('filteropener', 'filtervfs', vfsmod.filtervfs)
-filtervfs = _deprecated('filtervfs', 'filtervfs', vfsmod.filtervfs)
-abstractvfs = _deprecated('abstractvfs', 'abstractvfs', vfsmod.abstractvfs)
-readonlyvfs = _deprecated('readonlyvfs', 'readonlyvfs', vfsmod.readonlyvfs)
-auditvfs = _deprecated('auditvfs', 'auditvfs', vfsmod.auditvfs)
-checkambigatclosing = vfsmod.checkambigatclosing
-
def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
'''yield every hg repository under path, always recursively.
The recurse flag will only control recursion into repo working dirs'''
@@ -404,9 +387,17 @@
newdirs.append(d)
dirs[:] = newdirs
-def intrev(rev):
- """Return integer for a given revision that can be used in comparison or
+def binnode(ctx):
+ """Return binary node id for a given basectx"""
+ node = ctx.node()
+ if node is None:
+ return wdirid
+ return node
+
+def intrev(ctx):
+ """Return integer for a given basectx that can be used in comparison or
arithmetic operation"""
+ rev = ctx.rev()
if rev is None:
return wdirrev
return rev
@@ -494,7 +485,7 @@
return parents
if repo.ui.debugflag:
return [parents[0], repo['null']]
- if parents[0].rev() >= intrev(ctx.rev()) - 1:
+ if parents[0].rev() >= intrev(ctx) - 1:
return []
return parents
@@ -561,7 +552,7 @@
Fetch user defined path from config file: [ui] origbackuppath = <path>
Fall back to default (filepath) if not specified
'''
- origbackuppath = ui.config('ui', 'origbackuppath', None)
+ origbackuppath = ui.config('ui', 'origbackuppath')
if origbackuppath is None:
return filepath + ".orig"
@@ -575,6 +566,86 @@
return fullorigpath + ".orig"
+class _containsnode(object):
+ """proxy __contains__(node) to container.__contains__ which accepts revs"""
+
+ def __init__(self, repo, revcontainer):
+ self._torev = repo.changelog.rev
+ self._revcontains = revcontainer.__contains__
+
+ def __contains__(self, node):
+ return self._revcontains(self._torev(node))
+
+def cleanupnodes(repo, mapping, operation):
+ """do common cleanups when old nodes are replaced by new nodes
+
+ That includes writing obsmarkers or stripping nodes, and moving bookmarks.
+ (we might also want to move working directory parent in the future)
+
+ mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
+ replacements. operation is a string, like "rebase".
+ """
+ if not util.safehasattr(mapping, 'items'):
+ mapping = {n: () for n in mapping}
+
+ with repo.transaction('cleanup') as tr:
+ # Move bookmarks
+ bmarks = repo._bookmarks
+ bmarkchanges = []
+ allnewnodes = [n for ns in mapping.values() for n in ns]
+ for oldnode, newnodes in mapping.items():
+ oldbmarks = repo.nodebookmarks(oldnode)
+ if not oldbmarks:
+ continue
+ from . import bookmarks # avoid import cycle
+ if len(newnodes) > 1:
+ # usually a split, take the one with biggest rev number
+ newnode = next(repo.set('max(%ln)', newnodes)).node()
+ elif len(newnodes) == 0:
+ # move bookmark backwards
+ roots = list(repo.set('max((::%n) - %ln)', oldnode,
+ list(mapping)))
+ if roots:
+ newnode = roots[0].node()
+ else:
+ newnode = nullid
+ else:
+ newnode = newnodes[0]
+ repo.ui.debug('moving bookmarks %r from %s to %s\n' %
+ (oldbmarks, hex(oldnode), hex(newnode)))
+ # Delete divergent bookmarks being parents of related newnodes
+ deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
+ allnewnodes, newnode, oldnode)
+ deletenodes = _containsnode(repo, deleterevs)
+ for name in oldbmarks:
+ bmarkchanges.append((name, newnode))
+ for b in bookmarks.divergent2delete(repo, deletenodes, name):
+ bmarkchanges.append((b, None))
+
+ if bmarkchanges:
+ bmarks.applychanges(repo, tr, bmarkchanges)
+
+ # Obsolete or strip nodes
+ if obsolete.isenabled(repo, obsolete.createmarkersopt):
+ # If a node is already obsoleted, and we want to obsolete it
+ # without a successor, skip that obssolete request since it's
+ # unnecessary. That's the "if s or not isobs(n)" check below.
+ # Also sort the node in topology order, that might be useful for
+ # some obsstore logic.
+ # NOTE: the filtering and sorting might belong to createmarkers.
+ # Unfiltered repo is needed since nodes in mapping might be hidden.
+ unfi = repo.unfiltered()
+ isobs = unfi.obsstore.successors.__contains__
+ torev = unfi.changelog.rev
+ sortfunc = lambda ns: torev(ns[0])
+ rels = [(unfi[n], tuple(unfi[m] for m in s))
+ for n, s in sorted(mapping.items(), key=sortfunc)
+ if s or not isobs(n)]
+ obsolete.createmarkers(repo, rels, operation=operation)
+ else:
+ from . import repair # avoid import cycle
+ repair.delayedstrip(repo.ui, repo, list(mapping), operation)
+
def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
if opts is None:
opts = {}
@@ -931,39 +1002,71 @@
"""helper function to know if a repo should be created as general delta
"""
# experimental config: format.generaldelta
- return (ui.configbool('format', 'generaldelta', False)
- or ui.configbool('format', 'usegeneraldelta', True))
+ return (ui.configbool('format', 'generaldelta')
+ or ui.configbool('format', 'usegeneraldelta'))
def gddeltaconfig(ui):
"""helper function to know if incoming delta should be optimised
"""
# experimental config: format.generaldelta
- return ui.configbool('format', 'generaldelta', False)
+ return ui.configbool('format', 'generaldelta')
class simplekeyvaluefile(object):
"""A simple file with key=value lines
Keys must be alphanumerics and start with a letter, values must not
contain '\n' characters"""
+ firstlinekey = '__firstline'
def __init__(self, vfs, path, keys=None):
self.vfs = vfs
self.path = path
- def read(self):
+ def read(self, firstlinenonkeyval=False):
+ """Read the contents of a simple key-value file
+
+ 'firstlinenonkeyval' indicates whether the first line of file should
+ be treated as a key-value pair or reuturned fully under the
+ __firstline key."""
lines = self.vfs.readlines(self.path)
+ d = {}
+ if firstlinenonkeyval:
+ if not lines:
+ e = _("empty simplekeyvalue file")
+ raise error.CorruptedState(e)
+ # we don't want to include '\n' in the __firstline
+ d[self.firstlinekey] = lines[0][:-1]
+ del lines[0]
+
try:
- d = dict(line[:-1].split('=', 1) for line in lines if line)
+ # the 'if line.strip()' part prevents us from failing on empty
+ # lines which only contain '\n' therefore are not skipped
+ # by 'if line'
+ updatedict = dict(line[:-1].split('=', 1) for line in lines
+ if line.strip())
+ if self.firstlinekey in updatedict:
+ e = _("%r can't be used as a key")
+ raise error.CorruptedState(e % self.firstlinekey)
+ d.update(updatedict)
except ValueError as e:
raise error.CorruptedState(str(e))
return d
- def write(self, data):
+ def write(self, data, firstline=None):
"""Write key=>value mapping to a file
data is a dict. Keys must be alphanumerical and start with a letter.
- Values must not contain newline characters."""
+ Values must not contain newline characters.
+
+ If 'firstline' is not None, it is written to file before
+ everything else, as it is, not in a key=value form"""
lines = []
+ if firstline is not None:
+ lines.append('%s\n' % firstline)
+
for k, v in data.items():
+ if k == self.firstlinekey:
+ e = "key name '%s' is reserved" % self.firstlinekey
+ raise error.ProgrammingError(e)
if not k[0].isalpha():
e = "keys must start with a letter in a key-value file"
raise error.ProgrammingError(e)
@@ -976,3 +1079,27 @@
lines.append("%s=%s\n" % (k, v))
with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
fp.write(''.join(lines))
+
+_reportobsoletedsource = [
+ 'debugobsolete',
+ 'pull',
+ 'push',
+ 'serve',
+ 'unbundle',
+]
+
+def registersummarycallback(repo, otr, txnname=''):
+ """register a callback to issue a summary after the transaction is closed
+ """
+ for source in _reportobsoletedsource:
+ if txnname.startswith(source):
+ reporef = weakref.ref(repo)
+ def reportsummary(tr):
+ """the actual callback reporting the summary"""
+ repo = reporef()
+ obsoleted = obsutil.getobsoleted(repo, tr)
+ if obsoleted:
+ repo.ui.status(_('obsoleted %i changesets\n')
+ % len(obsoleted))
+ otr.addpostclose('00-txnreport', reportsummary)
+ break
--- a/mercurial/scmwindows.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/scmwindows.py Wed Jul 19 07:51:41 2017 -0500
@@ -4,7 +4,6 @@
from . import (
encoding,
- osutil,
pycompat,
util,
win32,
@@ -29,7 +28,7 @@
# Use hgrc.d found in directory with hg.exe
progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
if os.path.isdir(progrcd):
- for f, kind in osutil.listdir(progrcd):
+ for f, kind in util.listdir(progrcd):
if f.endswith('.rc'):
rcpath.append(os.path.join(progrcd, f))
# else look for a system rcpath in the registry
@@ -42,7 +41,7 @@
if p.lower().endswith('mercurial.ini'):
rcpath.append(p)
elif os.path.isdir(p):
- for f, kind in osutil.listdir(p):
+ for f, kind in util.listdir(p):
if f.endswith('.rc'):
rcpath.append(os.path.join(p, f))
return rcpath
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/selectors2.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,744 @@
+""" Back-ported, durable, and portable selectors """
+
+# MIT License
+#
+# Copyright (c) 2017 Seth Michael Larson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+# no-check-code
+
+from __future__ import absolute_import
+
+import collections
+import errno
+import math
+import platform
+import select
+import socket
+import sys
+import time
+
+namedtuple = collections.namedtuple
+Mapping = collections.Mapping
+
+try:
+ monotonic = time.monotonic
+except AttributeError:
+ monotonic = time.time
+
+__author__ = 'Seth Michael Larson'
+__email__ = 'sethmichaellarson@protonmail.com'
+__version__ = '2.0.0'
+__license__ = 'MIT'
+__url__ = 'https://www.github.com/SethMichaelLarson/selectors2'
+
+__all__ = ['EVENT_READ',
+ 'EVENT_WRITE',
+ 'SelectorKey',
+ 'DefaultSelector',
+ 'BaseSelector']
+
+EVENT_READ = (1 << 0)
+EVENT_WRITE = (1 << 1)
+_DEFAULT_SELECTOR = None
+_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None.
+_ERROR_TYPES = (OSError, IOError, socket.error)
+
+
+SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
+
+
+class _SelectorMapping(Mapping):
+ """ Mapping of file objects to selector keys """
+
+ def __init__(self, selector):
+ self._selector = selector
+
+ def __len__(self):
+ return len(self._selector._fd_to_key)
+
+ def __getitem__(self, fileobj):
+ try:
+ fd = self._selector._fileobj_lookup(fileobj)
+ return self._selector._fd_to_key[fd]
+ except KeyError:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ def __iter__(self):
+ return iter(self._selector._fd_to_key)
+
+
+def _fileobj_to_fd(fileobj):
+ """ Return a file descriptor from a file object. If
+ given an integer will simply return that integer back. """
+ if isinstance(fileobj, int):
+ fd = fileobj
+ else:
+ try:
+ fd = int(fileobj.fileno())
+ except (AttributeError, TypeError, ValueError):
+ raise ValueError("Invalid file object: {0!r}".format(fileobj))
+ if fd < 0:
+ raise ValueError("Invalid file descriptor: {0}".format(fd))
+ return fd
+
+
+class BaseSelector(object):
+ """ Abstract Selector class
+
+ A selector supports registering file objects to be monitored
+ for specific I/O events.
+
+ A file object is a file descriptor or any object with a
+ `fileno()` method. An arbitrary object can be attached to the
+ file object which can be used for example to store context info,
+ a callback, etc.
+
+ A selector can use various implementations (select(), poll(), epoll(),
+ and kqueue()) depending on the platform. The 'DefaultSelector' class uses
+ the most efficient implementation for the current platform.
+ """
+ def __init__(self):
+ # Maps file descriptors to keys.
+ self._fd_to_key = {}
+
+ # Read-only mapping returned by get_map()
+ self._map = _SelectorMapping(self)
+
+ def _fileobj_lookup(self, fileobj):
+ """ Return a file descriptor from a file object.
+ This wraps _fileobj_to_fd() to do an exhaustive
+ search in case the object is invalid but we still
+ have it in our map. Used by unregister() so we can
+ unregister an object that was previously registered
+ even if it is closed. It is also used by _SelectorMapping
+ """
+ try:
+ return _fileobj_to_fd(fileobj)
+ except ValueError:
+
+ # Search through all our mapped keys.
+ for key in self._fd_to_key.values():
+ if key.fileobj is fileobj:
+ return key.fd
+
+ # Raise ValueError after all.
+ raise
+
+ def register(self, fileobj, events, data=None):
+ """ Register a file object for a set of events to monitor. """
+ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
+ raise ValueError("Invalid events: {0!r}".format(events))
+
+ key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)
+
+ if key.fd in self._fd_to_key:
+ raise KeyError("{0!r} (FD {1}) is already registered"
+ .format(fileobj, key.fd))
+
+ self._fd_to_key[key.fd] = key
+ return key
+
+ def unregister(self, fileobj):
+ """ Unregister a file object from being monitored. """
+ try:
+ key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ # Getting the fileno of a closed socket on Windows errors with EBADF.
+ except socket.error as err:
+ if err.errno != errno.EBADF:
+ raise
+ else:
+ for key in self._fd_to_key.values():
+ if key.fileobj is fileobj:
+ self._fd_to_key.pop(key.fd)
+ break
+ else:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+ return key
+
+ def modify(self, fileobj, events, data=None):
+ """ Change a registered file object monitored events and data. """
+ # NOTE: Some subclasses optimize this operation even further.
+ try:
+ key = self._fd_to_key[self._fileobj_lookup(fileobj)]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ if events != key.events:
+ self.unregister(fileobj)
+ key = self.register(fileobj, events, data)
+
+ elif data != key.data:
+ # Use a shortcut to update the data.
+ key = key._replace(data=data)
+ self._fd_to_key[key.fd] = key
+
+ return key
+
+ def select(self, timeout=None):
+ """ Perform the actual selection until some monitored file objects
+ are ready or the timeout expires. """
+ raise NotImplementedError()
+
+ def close(self):
+ """ Close the selector. This must be called to ensure that all
+ underlying resources are freed. """
+ self._fd_to_key.clear()
+ self._map = None
+
+ def get_key(self, fileobj):
+ """ Return the key associated with a registered file object. """
+ mapping = self.get_map()
+ if mapping is None:
+ raise RuntimeError("Selector is closed")
+ try:
+ return mapping[fileobj]
+ except KeyError:
+ raise KeyError("{0!r} is not registered".format(fileobj))
+
+ def get_map(self):
+ """ Return a mapping of file objects to selector keys """
+ return self._map
+
+ def _key_from_fd(self, fd):
+ """ Return the key associated to a given file descriptor
+ Return None if it is not found. """
+ try:
+ return self._fd_to_key[fd]
+ except KeyError:
+ return None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *_):
+ self.close()
+
+
+# Almost all platforms have select.select()
+if hasattr(select, "select"):
+ class SelectSelector(BaseSelector):
+ """ Select-based selector. """
+ def __init__(self):
+ super(SelectSelector, self).__init__()
+ self._readers = set()
+ self._writers = set()
+
+ def register(self, fileobj, events, data=None):
+ key = super(SelectSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ self._readers.add(key.fd)
+ if events & EVENT_WRITE:
+ self._writers.add(key.fd)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(SelectSelector, self).unregister(fileobj)
+ self._readers.discard(key.fd)
+ self._writers.discard(key.fd)
+ return key
+
+ def select(self, timeout=None):
+ # Selecting on empty lists on Windows errors out.
+ if not len(self._readers) and not len(self._writers):
+ return []
+
+ timeout = None if timeout is None else max(timeout, 0.0)
+ ready = []
+ r, w, _ = _syscall_wrapper(self._wrap_select, True, self._readers,
+ self._writers, timeout)
+ r = set(r)
+ w = set(w)
+ for fd in r | w:
+ events = 0
+ if fd in r:
+ events |= EVENT_READ
+ if fd in w:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def _wrap_select(self, r, w, timeout=None):
+ """ Wrapper for select.select because timeout is a positional arg """
+ return select.select(r, w, [], timeout)
+
+ __all__.append('SelectSelector')
+
+ # Jython has a different implementation of .fileno() for socket objects.
+ if platform.system() == 'Java':
+ class _JythonSelectorMapping(object):
+ """ This is an implementation of _SelectorMapping that is built
+ for use specifically with Jython, which does not provide a hashable
+ value from socket.socket.fileno(). """
+
+ def __init__(self, selector):
+ assert isinstance(selector, JythonSelectSelector)
+ self._selector = selector
+
+ def __len__(self):
+ return len(self._selector._sockets)
+
+ def __getitem__(self, fileobj):
+ for sock, key in self._selector._sockets:
+ if sock is fileobj:
+ return key
+ else:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ class JythonSelectSelector(SelectSelector):
+ """ This is an implementation of SelectSelector that is for Jython
+ which works around that Jython's socket.socket.fileno() does not
+ return an integer fd value. All SelectorKey.fd will be equal to -1
+ and should not be used. This instead uses object id to compare fileobj
+ and will only use select.select as it's the only selector that allows
+ directly passing in socket objects rather than registering fds.
+ See: http://bugs.jython.org/issue1678
+ https://wiki.python.org/jython/NewSocketModule#socket.fileno.28.29_does_not_return_an_integer
+ """
+
+ def __init__(self):
+ super(JythonSelectSelector, self).__init__()
+
+ self._sockets = [] # Uses a list of tuples instead of dictionary.
+ self._map = _JythonSelectorMapping(self)
+ self._readers = []
+ self._writers = []
+
+ # Jython has a select.cpython_compatible_select function in older versions.
+ self._select_func = getattr(select, 'cpython_compatible_select', select.select)
+
+ def register(self, fileobj, events, data=None):
+ for sock, _ in self._sockets:
+ if sock is fileobj:
+ raise KeyError("{0!r} is already registered"
+ .format(fileobj, sock))
+
+ key = SelectorKey(fileobj, -1, events, data)
+ self._sockets.append((fileobj, key))
+
+ if events & EVENT_READ:
+ self._readers.append(fileobj)
+ if events & EVENT_WRITE:
+ self._writers.append(fileobj)
+ return key
+
+ def unregister(self, fileobj):
+ for i, (sock, key) in enumerate(self._sockets):
+ if sock is fileobj:
+ break
+ else:
+ raise KeyError("{0!r} is not registered.".format(fileobj))
+
+ if key.events & EVENT_READ:
+ self._readers.remove(fileobj)
+ if key.events & EVENT_WRITE:
+ self._writers.remove(fileobj)
+
+ del self._sockets[i]
+ return key
+
+ def _wrap_select(self, r, w, timeout=None):
+ """ Wrapper for select.select because timeout is a positional arg """
+ return self._select_func(r, w, [], timeout)
+
+ __all__.append('JythonSelectSelector')
+ SelectSelector = JythonSelectSelector # Override so the wrong selector isn't used.
+
+
+if hasattr(select, "poll"):
+ class PollSelector(BaseSelector):
+ """ Poll-based selector """
+ def __init__(self):
+ super(PollSelector, self).__init__()
+ self._poll = select.poll()
+
+ def register(self, fileobj, events, data=None):
+ key = super(PollSelector, self).register(fileobj, events, data)
+ event_mask = 0
+ if events & EVENT_READ:
+ event_mask |= select.POLLIN
+ if events & EVENT_WRITE:
+ event_mask |= select.POLLOUT
+ self._poll.register(key.fd, event_mask)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(PollSelector, self).unregister(fileobj)
+ self._poll.unregister(key.fd)
+ return key
+
+ def _wrap_poll(self, timeout=None):
+ """ Wrapper function for select.poll.poll() so that
+ _syscall_wrapper can work with only seconds. """
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0
+ else:
+ # select.poll.poll() has a resolution of 1 millisecond,
+ # round away from zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1000)
+
+ result = self._poll.poll(timeout)
+ return result
+
+ def select(self, timeout=None):
+ ready = []
+ fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+
+ return ready
+
+ __all__.append('PollSelector')
+
+if hasattr(select, "epoll"):
+ class EpollSelector(BaseSelector):
+ """ Epoll-based selector """
+ def __init__(self):
+ super(EpollSelector, self).__init__()
+ self._epoll = select.epoll()
+
+ def fileno(self):
+ return self._epoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(EpollSelector, self).register(fileobj, events, data)
+ events_mask = 0
+ if events & EVENT_READ:
+ events_mask |= select.EPOLLIN
+ if events & EVENT_WRITE:
+ events_mask |= select.EPOLLOUT
+ _syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(EpollSelector, self).unregister(fileobj)
+ try:
+ _syscall_wrapper(self._epoll.unregister, False, key.fd)
+ except _ERROR_TYPES:
+ # This can occur when the fd was closed since registry.
+ pass
+ return key
+
+ def select(self, timeout=None):
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0.0
+ else:
+ # select.epoll.poll() has a resolution of 1 millisecond
+ # but luckily takes seconds so we don't need a wrapper
+ # like PollSelector. Just for better rounding.
+ timeout = math.ceil(timeout * 1000) * 0.001
+ timeout = float(timeout)
+ else:
+ timeout = -1.0 # epoll.poll() must have a float.
+
+ # We always want at least 1 to ensure that select can be called
+ # with no file descriptors registered. Otherwise will fail.
+ max_events = max(len(self._fd_to_key), 1)
+
+ ready = []
+ fd_events = _syscall_wrapper(self._epoll.poll, True,
+ timeout=timeout,
+ maxevents=max_events)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.EPOLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.EPOLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+ return ready
+
+ def close(self):
+ self._epoll.close()
+ super(EpollSelector, self).close()
+
+ __all__.append('EpollSelector')
+
+
+if hasattr(select, "devpoll"):
+ class DevpollSelector(BaseSelector):
+ """Solaris /dev/poll selector."""
+
+ def __init__(self):
+ super(DevpollSelector, self).__init__()
+ self._devpoll = select.devpoll()
+
+ def fileno(self):
+ return self._devpoll.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(DevpollSelector, self).register(fileobj, events, data)
+ poll_events = 0
+ if events & EVENT_READ:
+ poll_events |= select.POLLIN
+ if events & EVENT_WRITE:
+ poll_events |= select.POLLOUT
+ self._devpoll.register(key.fd, poll_events)
+ return key
+
+ def unregister(self, fileobj):
+ key = super(DevpollSelector, self).unregister(fileobj)
+ self._devpoll.unregister(key.fd)
+ return key
+
+ def _wrap_poll(self, timeout=None):
+ """ Wrapper function for select.poll.poll() so that
+ _syscall_wrapper can work with only seconds. """
+ if timeout is not None:
+ if timeout <= 0:
+ timeout = 0
+ else:
+ # select.devpoll.poll() has a resolution of 1 millisecond,
+ # round away from zero to wait *at least* timeout seconds.
+ timeout = math.ceil(timeout * 1000)
+
+ result = self._devpoll.poll(timeout)
+ return result
+
+ def select(self, timeout=None):
+ ready = []
+ fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
+ for fd, event_mask in fd_events:
+ events = 0
+ if event_mask & ~select.POLLIN:
+ events |= EVENT_WRITE
+ if event_mask & ~select.POLLOUT:
+ events |= EVENT_READ
+
+ key = self._key_from_fd(fd)
+ if key:
+ ready.append((key, events & key.events))
+
+ return ready
+
+ def close(self):
+ self._devpoll.close()
+ super(DevpollSelector, self).close()
+
+ __all__.append('DevpollSelector')
+
+
+if hasattr(select, "kqueue"):
+ class KqueueSelector(BaseSelector):
+ """ Kqueue / Kevent-based selector """
+ def __init__(self):
+ super(KqueueSelector, self).__init__()
+ self._kqueue = select.kqueue()
+
+ def fileno(self):
+ return self._kqueue.fileno()
+
+ def register(self, fileobj, events, data=None):
+ key = super(KqueueSelector, self).register(fileobj, events, data)
+ if events & EVENT_READ:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_READ,
+ select.KQ_EV_ADD)
+
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+
+ if events & EVENT_WRITE:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_WRITE,
+ select.KQ_EV_ADD)
+
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+
+ return key
+
+ def unregister(self, fileobj):
+ key = super(KqueueSelector, self).unregister(fileobj)
+ if key.events & EVENT_READ:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_READ,
+ select.KQ_EV_DELETE)
+ try:
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+ except _ERROR_TYPES:
+ pass
+ if key.events & EVENT_WRITE:
+ kevent = select.kevent(key.fd,
+ select.KQ_FILTER_WRITE,
+ select.KQ_EV_DELETE)
+ try:
+ _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
+ except _ERROR_TYPES:
+ pass
+
+ return key
+
+ def select(self, timeout=None):
+ if timeout is not None:
+ timeout = max(timeout, 0)
+
+ max_events = len(self._fd_to_key) * 2
+ ready_fds = {}
+
+ kevent_list = _syscall_wrapper(self._kqueue.control, True,
+ None, max_events, timeout)
+
+ for kevent in kevent_list:
+ fd = kevent.ident
+ event_mask = kevent.filter
+ events = 0
+ if event_mask == select.KQ_FILTER_READ:
+ events |= EVENT_READ
+ if event_mask == select.KQ_FILTER_WRITE:
+ events |= EVENT_WRITE
+
+ key = self._key_from_fd(fd)
+ if key:
+ if key.fd not in ready_fds:
+ ready_fds[key.fd] = (key, events & key.events)
+ else:
+ old_events = ready_fds[key.fd][1]
+ ready_fds[key.fd] = (key, (events | old_events) & key.events)
+
+ return list(ready_fds.values())
+
+ def close(self):
+ self._kqueue.close()
+ super(KqueueSelector, self).close()
+
+ __all__.append('KqueueSelector')
+
+
+def _can_allocate(struct):
+ """ Checks that select structs can be allocated by the underlying
+ operating system, not just advertised by the select module. We don't
+ check select() because we'll be hopeful that most platforms that
+ don't have it available will not advertise it. (ie: GAE) """
+ try:
+ # select.poll() objects won't fail until used.
+ if struct == 'poll':
+ p = select.poll()
+ p.poll(0)
+
+ # All others will fail on allocation.
+ else:
+ getattr(select, struct)().close()
+ return True
+ except (OSError, AttributeError):
+ return False
+
+
+# Python 3.5 uses a more direct route to wrap system calls to increase speed.
+if sys.version_info >= (3, 5):
+ def _syscall_wrapper(func, _, *args, **kwargs):
+ """ This is the short-circuit version of the below logic
+ because in Python 3.5+ all selectors restart system calls. """
+ return func(*args, **kwargs)
+else:
+ def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
+ """ Wrapper function for syscalls that could fail due to EINTR.
+ All functions should be retried if there is time left in the timeout
+ in accordance with PEP 475. """
+ timeout = kwargs.get("timeout", None)
+ if timeout is None:
+ expires = None
+ recalc_timeout = False
+ else:
+ timeout = float(timeout)
+ if timeout < 0.0: # Timeout less than 0 treated as no timeout.
+ expires = None
+ else:
+ expires = monotonic() + timeout
+
+ args = list(args)
+ if recalc_timeout and "timeout" not in kwargs:
+ raise ValueError(
+ "Timeout must be in args or kwargs to be recalculated")
+
+ result = _SYSCALL_SENTINEL
+ while result is _SYSCALL_SENTINEL:
+ try:
+ result = func(*args, **kwargs)
+ # OSError is thrown by select.select
+ # IOError is thrown by select.epoll.poll
+ # select.error is thrown by select.poll.poll
+ # Aren't we thankful for Python 3.x rework for exceptions?
+ except (OSError, IOError, select.error) as e:
+ # select.error wasn't a subclass of OSError in the past.
+ errcode = None
+ if hasattr(e, "errno"):
+ errcode = e.errno
+ elif hasattr(e, "args"):
+ errcode = e.args[0]
+
+ # Also test for the Windows equivalent of EINTR.
+ is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
+ errcode == errno.WSAEINTR))
+
+ if is_interrupt:
+ if expires is not None:
+ current_time = monotonic()
+ if current_time > expires:
+ raise OSError(errno=errno.ETIMEDOUT)
+ if recalc_timeout:
+ if "timeout" in kwargs:
+ kwargs["timeout"] = expires - current_time
+ continue
+ raise
+ return result
+
+
+# Choose the best implementation, roughly:
+# kqueue == devpoll == epoll > poll > select
+# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
+def DefaultSelector():
+ """ This function serves as a first call for DefaultSelector to
+ detect if the select module is being monkey-patched incorrectly
+ by eventlet, greenlet, and preserve proper behavior. """
+ global _DEFAULT_SELECTOR
+ if _DEFAULT_SELECTOR is None:
+ if platform.system() == 'Java': # Platform-specific: Jython
+ _DEFAULT_SELECTOR = JythonSelectSelector
+ elif _can_allocate('kqueue'):
+ _DEFAULT_SELECTOR = KqueueSelector
+ elif _can_allocate('devpoll'):
+ _DEFAULT_SELECTOR = DevpollSelector
+ elif _can_allocate('epoll'):
+ _DEFAULT_SELECTOR = EpollSelector
+ elif _can_allocate('poll'):
+ _DEFAULT_SELECTOR = PollSelector
+ elif hasattr(select, 'select'):
+ _DEFAULT_SELECTOR = SelectSelector
+ else: # Platform-specific: AppEngine
+ raise RuntimeError('Platform does not have a selector.')
+ return _DEFAULT_SELECTOR()
--- a/mercurial/server.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/server.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,7 +8,6 @@
from __future__ import absolute_import
import os
-import sys
import tempfile
from .i18n import _
@@ -19,6 +18,7 @@
commandserver,
error,
hgweb,
+ pycompat,
util,
)
@@ -29,11 +29,11 @@
def writepid(pid):
if opts['pid_file']:
if appendpid:
- mode = 'a'
+ mode = 'ab'
else:
- mode = 'w'
+ mode = 'wb'
fp = open(opts['pid_file'], mode)
- fp.write(str(pid) + '\n')
+ fp.write('%d\n' % pid)
fp.close()
if opts['daemon'] and not opts['daemon_postexec']:
@@ -42,7 +42,7 @@
os.close(lockfd)
try:
if not runargs:
- runargs = util.hgcmd() + sys.argv[1:]
+ runargs = util.hgcmd() + pycompat.sysargv[1:]
runargs.append('--daemon-postexec=unlink:%s' % lockpath)
# Don't pass --cwd to the child process, because we've already
# changed directory.
@@ -123,7 +123,7 @@
if opts.get('port'):
opts['port'] = util.getport(opts.get('port'))
- alluis = set([ui])
+ alluis = {ui}
if repo:
baseui = repo.baseui
alluis.update([repo.baseui, repo.ui])
--- a/mercurial/setdiscovery.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/setdiscovery.py Wed Jul 19 07:51:41 2017 -0500
@@ -53,6 +53,7 @@
from . import (
dagutil,
error,
+ util,
)
def _updatesample(dag, nodes, sample, quicksamplesize=0):
@@ -136,6 +137,8 @@
'''Return a tuple (common, anyincoming, remoteheads) used to identify
missing nodes from or in remote.
'''
+ start = util.timer()
+
roundtrips = 0
cl = local.changelog
dag = dagutil.revlogdag(cl)
@@ -235,15 +238,21 @@
# common.bases can include nullrev, but our contract requires us to not
# return any heads in that case, so discard that
result.discard(nullrev)
+ elapsed = util.timer() - start
ui.progress(_('searching'), None)
- ui.debug("%d total queries\n" % roundtrips)
+ ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
+ msg = ('found %d common and %d unknown server heads,'
+ ' %d roundtrips in %.4fs\n')
+ missing = set(result) - set(srvheads)
+ ui.log('discovery', msg, len(result), len(missing), roundtrips,
+ elapsed)
if not result and srvheadhashes != [nullid]:
if abortwhenunrelated:
raise error.Abort(_("repository is unrelated"))
else:
ui.warn(_("warning: repository is unrelated\n"))
- return (set([nullid]), True, srvheadhashes,)
+ return ({nullid}, True, srvheadhashes,)
anyincoming = (srvheadhashes != [nullid])
return dag.externalizeall(result), anyincoming, srvheadhashes
--- a/mercurial/similar.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/similar.py Wed Jul 19 07:51:41 2017 -0500
@@ -9,7 +9,6 @@
from .i18n import _
from . import (
- bdiff,
mdiff,
)
@@ -56,10 +55,10 @@
def _score(fctx, otherdata):
orig, lines = otherdata
text = fctx.data()
- # bdiff.blocks() returns blocks of matching lines
+ # mdiff.blocks() returns blocks of matching lines
# count the number of bytes in each
equal = 0
- matches = bdiff.blocks(text, orig)
+ matches = mdiff.blocks(text, orig)
for x1, x2, y1, y2 in matches:
for line in lines[y1:y2]:
equal += len(line)
--- a/mercurial/simplemerge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/simplemerge.py Wed Jul 19 07:51:41 2017 -0500
@@ -24,6 +24,7 @@
from . import (
error,
mdiff,
+ pycompat,
util,
vfs as vfsmod,
)
@@ -455,7 +456,8 @@
extrakwargs['base_marker'] = '|||||||'
extrakwargs['name_base'] = name_base
extrakwargs['minimize'] = False
- for line in m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs):
+ for line in m3.merge_lines(name_a=name_a, name_b=name_b,
+ **pycompat.strkwargs(extrakwargs)):
out.write(line)
if not opts.get('print'):
--- a/mercurial/smartset.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/smartset.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,6 +8,7 @@
from __future__ import absolute_import
from . import (
+ error,
util,
)
@@ -116,7 +117,7 @@
"""reverse the expected iteration order"""
raise NotImplementedError()
- def sort(self, reverse=True):
+ def sort(self, reverse=False):
"""get the set to iterate in an ascending or descending order"""
raise NotImplementedError()
@@ -155,6 +156,28 @@
condition = util.cachefunc(condition)
return filteredset(self, condition, condrepr)
+ def slice(self, start, stop):
+ """Return new smartset that contains selected elements from this set"""
+ if start < 0 or stop < 0:
+ raise error.ProgrammingError('negative index not allowed')
+ return self._slice(start, stop)
+
+ def _slice(self, start, stop):
+ # sub classes may override this. start and stop must not be negative,
+ # but start > stop is allowed, which should be an empty set.
+ ys = []
+ it = iter(self)
+ for x in xrange(start):
+ y = next(it, None)
+ if y is None:
+ break
+ for x in xrange(stop - start):
+ y = next(it, None)
+ if y is None:
+ break
+ ys.append(y)
+ return baseset(ys, datarepr=('slice=%d:%d %r', start, stop, self))
+
class baseset(abstractsmartset):
"""Basic data structure that represents a revset and contains the basic
operation that it should be able to perform.
@@ -245,7 +268,7 @@
@util.propertycache
def _list(self):
# _list is only lazily constructed if we have _set
- assert '_set' in self.__dict__
+ assert r'_set' in self.__dict__
return list(self._set)
def __iter__(self):
@@ -349,6 +372,18 @@
def __sub__(self, other):
return self._fastsetop(other, '__sub__')
+ def _slice(self, start, stop):
+ # creating new list should be generally cheaper than iterating items
+ if self._ascending is None:
+ return baseset(self._list[start:stop], istopo=self._istopo)
+
+ data = self._asclist
+ if not self._ascending:
+ start, stop = max(len(data) - stop, 0), max(len(data) - start, 0)
+ s = baseset(data[start:stop], istopo=self._istopo)
+ s._ascending = self._ascending
+ return s
+
def __repr__(self):
d = {None: '', False: '-', True: '+'}[self._ascending]
s = _formatsetrepr(self._datarepr)
@@ -731,6 +766,11 @@
be iterated more than once.
When asked for membership it generates values until either it finds the
requested one or has gone through all the elements in the generator
+
+ >>> xs = generatorset([0, 1, 4], iterasc=True)
+ >>> assert xs.last() == xs.last()
+ >>> xs.last() # cached
+ 4
"""
def __init__(self, gen, iterasc=None):
"""
@@ -836,7 +876,10 @@
if i < _len(genlist):
yield genlist[i]
else:
- yield _next(nextgen)
+ try:
+ yield _next(nextgen)
+ except StopIteration:
+ return
i += 1
return gen()
@@ -899,14 +942,29 @@
# we need to consume all and try again
for x in self._consumegen():
pass
- return self.first()
+ return self.last()
return next(it(), None)
def __repr__(self):
d = {False: '-', True: '+'}[self._ascending]
return '<%s%s>' % (type(self).__name__, d)
-class spanset(abstractsmartset):
+def spanset(repo, start=0, end=None):
+ """Create a spanset that represents a range of repository revisions
+
+ start: first revision included the set (default to 0)
+ end: first revision excluded (last+1) (default to len(repo))
+
+ Spanset will be descending if `end` < `start`.
+ """
+ if end is None:
+ end = len(repo)
+ ascending = start <= end
+ if not ascending:
+ start, end = end + 1, start + 1
+ return _spanset(start, end, ascending, repo.changelog.filteredrevs)
+
+class _spanset(abstractsmartset):
"""Duck type for baseset class which represents a range of revisions and
can work lazily and without having all the range in memory
@@ -916,23 +974,11 @@
- revision filtered with this repoview will be skipped.
"""
- def __init__(self, repo, start=0, end=None):
- """
- start: first revision included the set
- (default to 0)
- end: first revision excluded (last+1)
- (default to len(repo)
-
- Spanset will be descending if `end` < `start`.
- """
- if end is None:
- end = len(repo)
- self._ascending = start <= end
- if not self._ascending:
- start, end = end + 1, start +1
+ def __init__(self, start, end, ascending, hiddenrevs):
self._start = start
self._end = end
- self._hiddenrevs = repo.changelog.filteredrevs
+ self._ascending = ascending
+ self._hiddenrevs = hiddenrevs
def sort(self, reverse=False):
self._ascending = not reverse
@@ -1018,12 +1064,24 @@
return x
return None
+ def _slice(self, start, stop):
+ if self._hiddenrevs:
+ # unoptimized since all hidden revisions in range has to be scanned
+ return super(_spanset, self)._slice(start, stop)
+ if self._ascending:
+ x = min(self._start + start, self._end)
+ y = min(self._start + stop, self._end)
+ else:
+ x = max(self._end - stop, self._start)
+ y = max(self._end - start, self._start)
+ return _spanset(x, y, self._ascending, self._hiddenrevs)
+
def __repr__(self):
d = {False: '-', True: '+'}[self._ascending]
- return '<%s%s %d:%d>' % (type(self).__name__, d,
- self._start, self._end - 1)
+ return '<%s%s %d:%d>' % (type(self).__name__.lstrip('_'), d,
+ self._start, self._end)
-class fullreposet(spanset):
+class fullreposet(_spanset):
"""a set containing all revisions in the repo
This class exists to host special optimization and magic to handle virtual
@@ -1031,7 +1089,8 @@
"""
def __init__(self, repo):
- super(fullreposet, self).__init__(repo)
+ super(fullreposet, self).__init__(0, len(repo), True,
+ repo.changelog.filteredrevs)
def __and__(self, other):
"""As self contains the whole repo, all of the other set should also be
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/sparse.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,689 @@
+# sparse.py - functionality for sparse checkouts
+#
+# Copyright 2014 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import collections
+import hashlib
+import os
+
+from .i18n import _
+from .node import nullid
+from . import (
+ error,
+ match as matchmod,
+ merge as mergemod,
+ pycompat,
+ scmutil,
+ util,
+)
+
+# Whether sparse features are enabled. This variable is intended to be
+# temporary to facilitate porting sparse to core. It should eventually be
+# a per-repo option, possibly a repo requirement.
+enabled = False
+
+def parseconfig(ui, raw):
+ """Parse sparse config file content.
+
+ Returns a tuple of includes, excludes, and profiles.
+ """
+ includes = set()
+ excludes = set()
+ profiles = set()
+ current = None
+ havesection = False
+
+ for line in raw.split('\n'):
+ line = line.strip()
+ if not line or line.startswith('#'):
+ # empty or comment line, skip
+ continue
+ elif line.startswith('%include '):
+ line = line[9:].strip()
+ if line:
+ profiles.add(line)
+ elif line == '[include]':
+ if havesection and current != includes:
+ # TODO pass filename into this API so we can report it.
+ raise error.Abort(_('sparse config cannot have includes ' +
+ 'after excludes'))
+ havesection = True
+ current = includes
+ continue
+ elif line == '[exclude]':
+ havesection = True
+ current = excludes
+ elif line:
+ if current is None:
+ raise error.Abort(_('sparse config entry outside of '
+ 'section: %s') % line,
+ hint=_('add an [include] or [exclude] line '
+ 'to declare the entry type'))
+
+ if line.strip().startswith('/'):
+ ui.warn(_('warning: sparse profile cannot use' +
+ ' paths starting with /, ignoring %s\n') % line)
+ continue
+ current.add(line)
+
+ return includes, excludes, profiles
+
+# Exists as separate function to facilitate monkeypatching.
+def readprofile(repo, profile, changeid):
+ """Resolve the raw content of a sparse profile file."""
+ # TODO add some kind of cache here because this incurs a manifest
+ # resolve and can be slow.
+ return repo.filectx(profile, changeid=changeid).data()
+
+def patternsforrev(repo, rev):
+ """Obtain sparse checkout patterns for the given rev.
+
+ Returns a tuple of iterables representing includes, excludes, and
+ patterns.
+ """
+ # Feature isn't enabled. No-op.
+ if not enabled:
+ return set(), set(), set()
+
+ raw = repo.vfs.tryread('sparse')
+ if not raw:
+ return set(), set(), set()
+
+ if rev is None:
+ raise error.Abort(_('cannot parse sparse patterns from working '
+ 'directory'))
+
+ includes, excludes, profiles = parseconfig(repo.ui, raw)
+ ctx = repo[rev]
+
+ if profiles:
+ visited = set()
+ while profiles:
+ profile = profiles.pop()
+ if profile in visited:
+ continue
+
+ visited.add(profile)
+
+ try:
+ raw = readprofile(repo, profile, rev)
+ except error.ManifestLookupError:
+ msg = (
+ "warning: sparse profile '%s' not found "
+ "in rev %s - ignoring it\n" % (profile, ctx))
+ # experimental config: sparse.missingwarning
+ if repo.ui.configbool(
+ 'sparse', 'missingwarning'):
+ repo.ui.warn(msg)
+ else:
+ repo.ui.debug(msg)
+ continue
+
+ pincludes, pexcludes, subprofs = parseconfig(repo.ui, raw)
+ includes.update(pincludes)
+ excludes.update(pexcludes)
+ profiles.update(subprofs)
+
+ profiles = visited
+
+ if includes:
+ includes.add('.hg*')
+
+ return includes, excludes, profiles
+
+def activeconfig(repo):
+ """Determine the active sparse config rules.
+
+ Rules are constructed by reading the current sparse config and bringing in
+ referenced profiles from parents of the working directory.
+ """
+ revs = [repo.changelog.rev(node) for node in
+ repo.dirstate.parents() if node != nullid]
+
+ allincludes = set()
+ allexcludes = set()
+ allprofiles = set()
+
+ for rev in revs:
+ includes, excludes, profiles = patternsforrev(repo, rev)
+ allincludes |= includes
+ allexcludes |= excludes
+ allprofiles |= profiles
+
+ return allincludes, allexcludes, allprofiles
+
+def configsignature(repo, includetemp=True):
+ """Obtain the signature string for the current sparse configuration.
+
+ This is used to construct a cache key for matchers.
+ """
+ cache = repo._sparsesignaturecache
+
+ signature = cache.get('signature')
+
+ if includetemp:
+ tempsignature = cache.get('tempsignature')
+ else:
+ tempsignature = '0'
+
+ if signature is None or (includetemp and tempsignature is None):
+ signature = hashlib.sha1(repo.vfs.tryread('sparse')).hexdigest()
+ cache['signature'] = signature
+
+ if includetemp:
+ raw = repo.vfs.tryread('tempsparse')
+ tempsignature = hashlib.sha1(raw).hexdigest()
+ cache['tempsignature'] = tempsignature
+
+ return '%s %s' % (signature, tempsignature)
+
+def writeconfig(repo, includes, excludes, profiles):
+ """Write the sparse config file given a sparse configuration."""
+ with repo.vfs('sparse', 'wb') as fh:
+ for p in sorted(profiles):
+ fh.write('%%include %s\n' % p)
+
+ if includes:
+ fh.write('[include]\n')
+ for i in sorted(includes):
+ fh.write(i)
+ fh.write('\n')
+
+ if excludes:
+ fh.write('[exclude]\n')
+ for e in sorted(excludes):
+ fh.write(e)
+ fh.write('\n')
+
+ repo._sparsesignaturecache.clear()
+
+def readtemporaryincludes(repo):
+ raw = repo.vfs.tryread('tempsparse')
+ if not raw:
+ return set()
+
+ return set(raw.split('\n'))
+
+def writetemporaryincludes(repo, includes):
+ repo.vfs.write('tempsparse', '\n'.join(sorted(includes)))
+ repo._sparsesignaturecache.clear()
+
+def addtemporaryincludes(repo, additional):
+ includes = readtemporaryincludes(repo)
+ for i in additional:
+ includes.add(i)
+ writetemporaryincludes(repo, includes)
+
+def prunetemporaryincludes(repo):
+ if not enabled or not repo.vfs.exists('tempsparse'):
+ return
+
+ s = repo.status()
+ if s.modified or s.added or s.removed or s.deleted:
+ # Still have pending changes. Don't bother trying to prune.
+ return
+
+ sparsematch = matcher(repo, includetemp=False)
+ dirstate = repo.dirstate
+ actions = []
+ dropped = []
+ tempincludes = readtemporaryincludes(repo)
+ for file in tempincludes:
+ if file in dirstate and not sparsematch(file):
+ message = _('dropping temporarily included sparse files')
+ actions.append((file, None, message))
+ dropped.append(file)
+
+ typeactions = collections.defaultdict(list)
+ typeactions['r'] = actions
+ mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
+
+ # Fix dirstate
+ for file in dropped:
+ dirstate.drop(file)
+
+ repo.vfs.unlink('tempsparse')
+ repo._sparsesignaturecache.clear()
+ msg = _('cleaned up %d temporarily added file(s) from the '
+ 'sparse checkout\n')
+ repo.ui.status(msg % len(tempincludes))
+
+def forceincludematcher(matcher, includes):
+ """Returns a matcher that returns true for any of the forced includes
+ before testing against the actual matcher."""
+ kindpats = [('path', include, '') for include in includes]
+ includematcher = matchmod.includematcher('', '', kindpats)
+ return matchmod.unionmatcher([includematcher, matcher])
+
+def matcher(repo, revs=None, includetemp=True):
+ """Obtain a matcher for sparse working directories for the given revs.
+
+ If multiple revisions are specified, the matcher is the union of all
+ revs.
+
+ ``includetemp`` indicates whether to use the temporary sparse profile.
+ """
+ # If sparse isn't enabled, sparse matcher matches everything.
+ if not enabled:
+ return matchmod.always(repo.root, '')
+
+ if not revs or revs == [None]:
+ revs = [repo.changelog.rev(node)
+ for node in repo.dirstate.parents() if node != nullid]
+
+ signature = configsignature(repo, includetemp=includetemp)
+
+ key = '%s %s' % (signature, ' '.join(map(pycompat.bytestr, revs)))
+
+ result = repo._sparsematchercache.get(key)
+ if result:
+ return result
+
+ matchers = []
+ for rev in revs:
+ try:
+ includes, excludes, profiles = patternsforrev(repo, rev)
+
+ if includes or excludes:
+ # Explicitly include subdirectories of includes so
+ # status will walk them down to the actual include.
+ subdirs = set()
+ for include in includes:
+ # TODO consider using posix path functions here so Windows
+ # \ directory separators don't come into play.
+ dirname = os.path.dirname(include)
+ # basename is used to avoid issues with absolute
+ # paths (which on Windows can include the drive).
+ while os.path.basename(dirname):
+ subdirs.add(dirname)
+ dirname = os.path.dirname(dirname)
+
+ matcher = matchmod.match(repo.root, '', [],
+ include=includes, exclude=excludes,
+ default='relpath')
+ if subdirs:
+ matcher = forceincludematcher(matcher, subdirs)
+ matchers.append(matcher)
+ except IOError:
+ pass
+
+ if not matchers:
+ result = matchmod.always(repo.root, '')
+ elif len(matchers) == 1:
+ result = matchers[0]
+ else:
+ result = matchmod.unionmatcher(matchers)
+
+ if includetemp:
+ tempincludes = readtemporaryincludes(repo)
+ result = forceincludematcher(result, tempincludes)
+
+ repo._sparsematchercache[key] = result
+
+ return result
+
+def filterupdatesactions(repo, wctx, mctx, branchmerge, actions):
+ """Filter updates to only lay out files that match the sparse rules."""
+ if not enabled:
+ return actions
+
+ oldrevs = [pctx.rev() for pctx in wctx.parents()]
+ oldsparsematch = matcher(repo, oldrevs)
+
+ if oldsparsematch.always():
+ return actions
+
+ files = set()
+ prunedactions = {}
+
+ if branchmerge:
+ # If we're merging, use the wctx filter, since we're merging into
+ # the wctx.
+ sparsematch = matcher(repo, [wctx.parents()[0].rev()])
+ else:
+ # If we're updating, use the target context's filter, since we're
+ # moving to the target context.
+ sparsematch = matcher(repo, [mctx.rev()])
+
+ temporaryfiles = []
+ for file, action in actions.iteritems():
+ type, args, msg = action
+ files.add(file)
+ if sparsematch(file):
+ prunedactions[file] = action
+ elif type == 'm':
+ temporaryfiles.append(file)
+ prunedactions[file] = action
+ elif branchmerge:
+ if type != 'k':
+ temporaryfiles.append(file)
+ prunedactions[file] = action
+ elif type == 'f':
+ prunedactions[file] = action
+ elif file in wctx:
+ prunedactions[file] = ('r', args, msg)
+
+ if len(temporaryfiles) > 0:
+ repo.ui.status(_('temporarily included %d file(s) in the sparse '
+ 'checkout for merging\n') % len(temporaryfiles))
+ addtemporaryincludes(repo, temporaryfiles)
+
+ # Add the new files to the working copy so they can be merged, etc
+ actions = []
+ message = 'temporarily adding to sparse checkout'
+ wctxmanifest = repo[None].manifest()
+ for file in temporaryfiles:
+ if file in wctxmanifest:
+ fctx = repo[None][file]
+ actions.append((file, (fctx.flags(), False), message))
+
+ typeactions = collections.defaultdict(list)
+ typeactions['g'] = actions
+ mergemod.applyupdates(repo, typeactions, repo[None], repo['.'],
+ False)
+
+ dirstate = repo.dirstate
+ for file, flags, msg in actions:
+ dirstate.normal(file)
+
+ profiles = activeconfig(repo)[2]
+ changedprofiles = profiles & files
+ # If an active profile changed during the update, refresh the checkout.
+ # Don't do this during a branch merge, since all incoming changes should
+ # have been handled by the temporary includes above.
+ if changedprofiles and not branchmerge:
+ mf = mctx.manifest()
+ for file in mf:
+ old = oldsparsematch(file)
+ new = sparsematch(file)
+ if not old and new:
+ flags = mf.flags(file)
+ prunedactions[file] = ('g', (flags, False), '')
+ elif old and not new:
+ prunedactions[file] = ('r', [], '')
+
+ return prunedactions
+
+def refreshwdir(repo, origstatus, origsparsematch, force=False):
+ """Refreshes working directory by taking sparse config into account.
+
+ The old status and sparse matcher is compared against the current sparse
+ matcher.
+
+ Will abort if a file with pending changes is being excluded or included
+ unless ``force`` is True.
+ """
+ # Verify there are no pending changes
+ pending = set()
+ pending.update(origstatus.modified)
+ pending.update(origstatus.added)
+ pending.update(origstatus.removed)
+ sparsematch = matcher(repo)
+ abort = False
+
+ for f in pending:
+ if not sparsematch(f):
+ repo.ui.warn(_("pending changes to '%s'\n") % f)
+ abort = not force
+
+ if abort:
+ raise error.Abort(_('could not update sparseness due to pending '
+ 'changes'))
+
+ # Calculate actions
+ dirstate = repo.dirstate
+ ctx = repo['.']
+ added = []
+ lookup = []
+ dropped = []
+ mf = ctx.manifest()
+ files = set(mf)
+
+ actions = {}
+
+ for file in files:
+ old = origsparsematch(file)
+ new = sparsematch(file)
+ # Add files that are newly included, or that don't exist in
+ # the dirstate yet.
+ if (new and not old) or (old and new and not file in dirstate):
+ fl = mf.flags(file)
+ if repo.wvfs.exists(file):
+ actions[file] = ('e', (fl,), '')
+ lookup.append(file)
+ else:
+ actions[file] = ('g', (fl, False), '')
+ added.append(file)
+ # Drop files that are newly excluded, or that still exist in
+ # the dirstate.
+ elif (old and not new) or (not old and not new and file in dirstate):
+ dropped.append(file)
+ if file not in pending:
+ actions[file] = ('r', [], '')
+
+ # Verify there are no pending changes in newly included files
+ abort = False
+ for file in lookup:
+ repo.ui.warn(_("pending changes to '%s'\n") % file)
+ abort = not force
+ if abort:
+ raise error.Abort(_('cannot change sparseness due to pending '
+ 'changes (delete the files or use '
+ '--force to bring them back dirty)'))
+
+ # Check for files that were only in the dirstate.
+ for file, state in dirstate.iteritems():
+ if not file in files:
+ old = origsparsematch(file)
+ new = sparsematch(file)
+ if old and not new:
+ dropped.append(file)
+
+ # Apply changes to disk
+ typeactions = dict((m, []) for m in 'a f g am cd dc r dm dg m e k'.split())
+ for f, (m, args, msg) in actions.iteritems():
+ if m not in typeactions:
+ typeactions[m] = []
+ typeactions[m].append((f, args, msg))
+
+ mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
+
+ # Fix dirstate
+ for file in added:
+ dirstate.normal(file)
+
+ for file in dropped:
+ dirstate.drop(file)
+
+ for file in lookup:
+ # File exists on disk, and we're bringing it back in an unknown state.
+ dirstate.normallookup(file)
+
+ return added, dropped, lookup
+
+def aftercommit(repo, node):
+ """Perform actions after a working directory commit."""
+ # This function is called unconditionally, even if sparse isn't
+ # enabled.
+ ctx = repo[node]
+
+ profiles = patternsforrev(repo, ctx.rev())[2]
+
+ # profiles will only have data if sparse is enabled.
+ if profiles & set(ctx.files()):
+ origstatus = repo.status()
+ origsparsematch = matcher(repo)
+ refreshwdir(repo, origstatus, origsparsematch, force=True)
+
+ prunetemporaryincludes(repo)
+
+def _updateconfigandrefreshwdir(repo, includes, excludes, profiles,
+ force=False, removing=False):
+ """Update the sparse config and working directory state."""
+ raw = repo.vfs.tryread('sparse')
+ oldincludes, oldexcludes, oldprofiles = parseconfig(repo.ui, raw)
+
+ oldstatus = repo.status()
+ oldmatch = matcher(repo)
+ oldrequires = set(repo.requirements)
+
+ # TODO remove this try..except once the matcher integrates better
+ # with dirstate. We currently have to write the updated config
+ # because that will invalidate the matcher cache and force a
+ # re-read. We ideally want to update the cached matcher on the
+ # repo instance then flush the new config to disk once wdir is
+ # updated. But this requires massive rework to matcher() and its
+ # consumers.
+
+ if 'exp-sparse' in oldrequires and removing:
+ repo.requirements.discard('exp-sparse')
+ scmutil.writerequires(repo.vfs, repo.requirements)
+ elif 'exp-sparse' not in oldrequires:
+ repo.requirements.add('exp-sparse')
+ scmutil.writerequires(repo.vfs, repo.requirements)
+
+ try:
+ writeconfig(repo, includes, excludes, profiles)
+ return refreshwdir(repo, oldstatus, oldmatch, force=force)
+ except Exception:
+ if repo.requirements != oldrequires:
+ repo.requirements.clear()
+ repo.requirements |= oldrequires
+ scmutil.writerequires(repo.vfs, repo.requirements)
+ writeconfig(repo, oldincludes, oldexcludes, oldprofiles)
+ raise
+
+def clearrules(repo, force=False):
+ """Clears include/exclude rules from the sparse config.
+
+ The remaining sparse config only has profiles, if defined. The working
+ directory is refreshed, as needed.
+ """
+ with repo.wlock():
+ raw = repo.vfs.tryread('sparse')
+ includes, excludes, profiles = parseconfig(repo.ui, raw)
+
+ if not includes and not excludes:
+ return
+
+ _updateconfigandrefreshwdir(repo, set(), set(), profiles, force=force)
+
+def importfromfiles(repo, opts, paths, force=False):
+ """Import sparse config rules from files.
+
+ The updated sparse config is written out and the working directory
+ is refreshed, as needed.
+ """
+ with repo.wlock():
+ # read current configuration
+ raw = repo.vfs.tryread('sparse')
+ includes, excludes, profiles = parseconfig(repo.ui, raw)
+ aincludes, aexcludes, aprofiles = activeconfig(repo)
+
+ # Import rules on top; only take in rules that are not yet
+ # part of the active rules.
+ changed = False
+ for p in paths:
+ with util.posixfile(util.expandpath(p)) as fh:
+ raw = fh.read()
+
+ iincludes, iexcludes, iprofiles = parseconfig(repo.ui, raw)
+ oldsize = len(includes) + len(excludes) + len(profiles)
+ includes.update(iincludes - aincludes)
+ excludes.update(iexcludes - aexcludes)
+ profiles.update(iprofiles - aprofiles)
+ if len(includes) + len(excludes) + len(profiles) > oldsize:
+ changed = True
+
+ profilecount = includecount = excludecount = 0
+ fcounts = (0, 0, 0)
+
+ if changed:
+ profilecount = len(profiles - aprofiles)
+ includecount = len(includes - aincludes)
+ excludecount = len(excludes - aexcludes)
+
+ fcounts = map(len, _updateconfigandrefreshwdir(
+ repo, includes, excludes, profiles, force=force))
+
+ printchanges(repo.ui, opts, profilecount, includecount, excludecount,
+ *fcounts)
+
+def updateconfig(repo, pats, opts, include=False, exclude=False, reset=False,
+ delete=False, enableprofile=False, disableprofile=False,
+ force=False):
+ """Perform a sparse config update.
+
+ Only one of the actions may be performed.
+
+ The new config is written out and a working directory refresh is performed.
+ """
+ with repo.wlock():
+ raw = repo.vfs.tryread('sparse')
+ oldinclude, oldexclude, oldprofiles = parseconfig(repo.ui, raw)
+
+ if reset:
+ newinclude = set()
+ newexclude = set()
+ newprofiles = set()
+ else:
+ newinclude = set(oldinclude)
+ newexclude = set(oldexclude)
+ newprofiles = set(oldprofiles)
+
+ if any(pat.startswith('/') for pat in pats):
+ repo.ui.warn(_('warning: paths cannot start with /, ignoring: %s\n')
+ % ([pat for pat in pats if pat.startswith('/')]))
+ elif include:
+ newinclude.update(pats)
+ elif exclude:
+ newexclude.update(pats)
+ elif enableprofile:
+ newprofiles.update(pats)
+ elif disableprofile:
+ newprofiles.difference_update(pats)
+ elif delete:
+ newinclude.difference_update(pats)
+ newexclude.difference_update(pats)
+
+ profilecount = (len(newprofiles - oldprofiles) -
+ len(oldprofiles - newprofiles))
+ includecount = (len(newinclude - oldinclude) -
+ len(oldinclude - newinclude))
+ excludecount = (len(newexclude - oldexclude) -
+ len(oldexclude - newexclude))
+
+ fcounts = map(len, _updateconfigandrefreshwdir(
+ repo, newinclude, newexclude, newprofiles, force=force,
+ removing=reset))
+
+ printchanges(repo.ui, opts, profilecount, includecount,
+ excludecount, *fcounts)
+
+def printchanges(ui, opts, profilecount=0, includecount=0, excludecount=0,
+ added=0, dropped=0, conflicting=0):
+ """Print output summarizing sparse config changes."""
+ with ui.formatter('sparse', opts) as fm:
+ fm.startitem()
+ fm.condwrite(ui.verbose, 'profiles_added', _('Profiles changed: %d\n'),
+ profilecount)
+ fm.condwrite(ui.verbose, 'include_rules_added',
+ _('Include rules changed: %d\n'), includecount)
+ fm.condwrite(ui.verbose, 'exclude_rules_added',
+ _('Exclude rules changed: %d\n'), excludecount)
+
+ # In 'plain' verbose mode, mergemod.applyupdates already outputs what
+ # files are added or removed outside of the templating formatter
+ # framework. No point in repeating ourselves in that case.
+ if not fm.isplain():
+ fm.condwrite(ui.verbose, 'files_added', _('Files added: %d\n'),
+ added)
+ fm.condwrite(ui.verbose, 'files_dropped', _('Files dropped: %d\n'),
+ dropped)
+ fm.condwrite(ui.verbose, 'files_conflicting',
+ _('Files conflicting: %d\n'), conflicting)
--- a/mercurial/sshpeer.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/sshpeer.py Wed Jul 19 07:51:41 2017 -0500
@@ -12,6 +12,7 @@
from .i18n import _
from . import (
error,
+ pycompat,
util,
wireproto,
)
@@ -145,8 +146,8 @@
self.port = u.port
self.path = u.path or "."
- sshcmd = self.ui.config("ui", "ssh", "ssh")
- remotecmd = self.ui.config("ui", "remotecmd", "hg")
+ sshcmd = self.ui.config("ui", "ssh")
+ remotecmd = self.ui.config("ui", "remotecmd")
args = util.sshargs(sshcmd,
_serverquote(self.host),
@@ -259,6 +260,7 @@
yield wireproto.unescapearg(work)
def _callstream(self, cmd, **args):
+ args = pycompat.byteskwargs(args)
self.ui.debug("sending %s command\n" % cmd)
self.pipeo.write("%s\n" % cmd)
_func, names = wireproto.commands[cmd]
--- a/mercurial/sslutil.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/sslutil.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,7 +13,6 @@
import os
import re
import ssl
-import sys
from .i18n import _
from . import (
@@ -30,17 +29,17 @@
# modern/secure or legacy/insecure. Many operations in this module have
# separate code paths depending on support in Python.
-configprotocols = set([
+configprotocols = {
'tls1.0',
'tls1.1',
'tls1.2',
-])
+}
hassni = getattr(ssl, 'HAS_SNI', False)
# TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
# against doesn't support them.
-supportedprotocols = set(['tls1.0'])
+supportedprotocols = {'tls1.0'}
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
supportedprotocols.add('tls1.1')
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
@@ -58,9 +57,6 @@
# We implement SSLContext using the interface from the standard library.
class SSLContext(object):
- # ssl.wrap_socket gained the "ciphers" named argument in 2.7.
- _supportsciphers = sys.version_info >= (2, 7)
-
def __init__(self, protocol):
# From the public interface of SSLContext
self.protocol = protocol
@@ -92,13 +88,6 @@
self._cacerts = cafile
def set_ciphers(self, ciphers):
- if not self._supportsciphers:
- raise error.Abort(_('setting ciphers in [hostsecurity] is not '
- 'supported by this version of Python'),
- hint=_('remove the config option or run '
- 'Mercurial with a modern Python '
- 'version (preferred)'))
-
self._ciphers = ciphers
def wrap_socket(self, socket, server_hostname=None, server_side=False):
@@ -113,11 +102,9 @@
'cert_reqs': self.verify_mode,
'ssl_version': self.protocol,
'ca_certs': self._cacerts,
+ 'ciphers': self._ciphers,
}
- if self._supportsciphers:
- args['ciphers'] = self._ciphers
-
return ssl.wrap_socket(socket, **args)
def _hostsettings(ui, hostname):
@@ -309,7 +296,7 @@
# disable protocols via SSLContext.options and OP_NO_* constants.
# However, SSLContext.options doesn't work unless we have the
# full/real SSLContext available to us.
- if supportedprotocols == set(['tls1.0']):
+ if supportedprotocols == {'tls1.0'}:
if protocol != 'tls1.0':
raise error.Abort(_('current Python does not support protocol '
'setting %s') % protocol,
@@ -356,6 +343,13 @@
if not serverhostname:
raise error.Abort(_('serverhostname argument is required'))
+ for f in (keyfile, certfile):
+ if f and not os.path.exists(f):
+ raise error.Abort(_('certificate file (%s) does not exist; '
+ 'cannot connect to %s') % (f, serverhostname),
+ hint=_('restore missing file or fix references '
+ 'in Mercurial config'))
+
settings = _hostsettings(ui, serverhostname)
# We can't use ssl.create_default_context() because it calls
@@ -443,7 +437,7 @@
# is really old. (e.g. server doesn't support TLS 1.0+ or
# client doesn't support modern TLS versions introduced
# several years from when this comment was written).
- if supportedprotocols != set(['tls1.0']):
+ if supportedprotocols != {'tls1.0'}:
ui.warn(_(
'(could not communicate with %s using security '
'protocols %s; if you are using a modern Mercurial '
@@ -481,6 +475,12 @@
ui.warn(_(
'(see https://mercurial-scm.org/wiki/SecureConnections '
'for more info)\n'))
+
+ elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and
+ pycompat.osname == 'nt'):
+
+ ui.warn(_('(the full certificate chain may not be available '
+ 'locally; see "hg help debugssl")\n'))
raise
# check if wrap_socket failed silently because socket had been
@@ -512,6 +512,13 @@
Typically ``cafile`` is only defined if ``requireclientcert`` is true.
"""
+ # This function is not used much by core Mercurial, so the error messaging
+ # doesn't have to be as detailed as for wrapsocket().
+ for f in (certfile, keyfile, cafile):
+ if f and not os.path.exists(f):
+ raise error.Abort(_('referenced certificate file (%s) does not '
+ 'exist') % f)
+
protocol, options, _protocolui = protocolsettings('tls1.0')
# This config option is intended for use in tests only. It is a giant
@@ -820,13 +827,11 @@
if settings['legacyfingerprint']:
ui.warn(_('(SHA-1 fingerprint for %s found in legacy '
'[hostfingerprints] section; '
- 'if you trust this fingerprint, set the '
- 'following config value in [hostsecurity] and '
- 'remove the old one from [hostfingerprints] '
- 'to upgrade to a more secure SHA-256 '
- 'fingerprint: '
- '%s:fingerprints=%s)\n') % (
- host, host, nicefingerprint))
+ 'if you trust this fingerprint, remove the old '
+ 'SHA-1 fingerprint from [hostfingerprints] and '
+ 'add the following entry to the new '
+ '[hostsecurity] section: %s:fingerprints=%s)\n') %
+ (host, host, nicefingerprint))
return
# Pinned fingerprint didn't match. This is a fatal error.
--- a/mercurial/statichttprepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/statichttprepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -124,9 +124,11 @@
vfsclass = build_opener(ui, authinfo)
self.vfs = vfsclass(self.path)
+ self.cachevfs = vfsclass(self.vfs.join('cache'))
self._phasedefaults = []
self.names = namespaces.namespaces()
+ self.filtername = None
try:
requirements = scmutil.readrequires(self.vfs, self.supported)
@@ -164,6 +166,8 @@
self.encodepats = None
self.decodepats = None
self._transref = None
+ # Cache of types representing filtered repos.
+ self._filteredrepotypes = {}
def _restrictcapabilities(self, caps):
caps = super(statichttprepository, self)._restrictcapabilities(caps)
--- a/mercurial/statprof.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/statprof.py Wed Jul 19 07:51:41 2017 -0500
@@ -126,14 +126,14 @@
__all__ = ['start', 'stop', 'reset', 'display', 'profile']
-skips = set(["util.py:check", "extensions.py:closure",
- "color.py:colorcmd", "dispatch.py:checkargs",
- "dispatch.py:<lambda>", "dispatch.py:_runcatch",
- "dispatch.py:_dispatch", "dispatch.py:_runcommand",
- "pager.py:pagecmd", "dispatch.py:run",
- "dispatch.py:dispatch", "dispatch.py:runcommand",
- "hg.py:<module>", "evolve.py:warnobserrors",
- ])
+skips = {"util.py:check", "extensions.py:closure",
+ "color.py:colorcmd", "dispatch.py:checkargs",
+ "dispatch.py:<lambda>", "dispatch.py:_runcatch",
+ "dispatch.py:_dispatch", "dispatch.py:_runcommand",
+ "pager.py:pagecmd", "dispatch.py:run",
+ "dispatch.py:dispatch", "dispatch.py:runcommand",
+ "hg.py:<module>", "evolve.py:warnobserrors",
+}
###########################################################################
## Utils
--- a/mercurial/store.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/store.py Wed Jul 19 07:51:41 2017 -0500
@@ -15,12 +15,14 @@
from .i18n import _
from . import (
error,
- parsers,
+ policy,
pycompat,
util,
vfs as vfsmod,
)
+parsers = policy.importmod(r'parsers')
+
# This avoids a collision between a file named foo and a dir named
# foo.i or foo.d
def _encodedir(path):
@@ -473,9 +475,9 @@
self._load()
return iter(self.entries)
-class _fncachevfs(vfsmod.abstractvfs, vfsmod.auditvfs):
+class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
def __init__(self, vfs, fnc, encode):
- vfsmod.auditvfs.__init__(self, vfs)
+ vfsmod.proxyvfs.__init__(self, vfs)
self.fncache = fnc
self.encode = encode
--- a/mercurial/streamclone.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/streamclone.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
from . import (
branchmap,
error,
+ phases,
store,
util,
)
@@ -80,11 +81,21 @@
streamreqs = remote.capable('streamreqs')
# This is weird and shouldn't happen with modern servers.
if not streamreqs:
+ pullop.repo.ui.warn(_(
+ 'warning: stream clone requested but server has them '
+ 'disabled\n'))
return False, None
streamreqs = set(streamreqs.split(','))
# Server requires something we don't support. Bail.
- if streamreqs - repo.supportedformats:
+ missingreqs = streamreqs - repo.supportedformats
+ if missingreqs:
+ pullop.repo.ui.warn(_(
+ 'warning: stream clone requested but client is missing '
+ 'requirements: %s\n') % ', '.join(sorted(missingreqs)))
+ pullop.repo.ui.warn(
+ _('(see https://www.mercurial-scm.org/wiki/MissingRequirement '
+ 'for more information)\n'))
return False, None
requirements = streamreqs
@@ -152,9 +163,18 @@
repo.invalidate()
-def allowservergeneration(ui):
+def allowservergeneration(repo):
"""Whether streaming clones are allowed from the server."""
- return ui.configbool('server', 'uncompressed', True, untrusted=True)
+ if not repo.ui.configbool('server', 'uncompressed', untrusted=True):
+ return False
+
+ # The way stream clone works makes it impossible to hide secret changesets.
+ # So don't allow this by default.
+ secret = phases.hassecret(repo)
+ if secret:
+ return repo.ui.configbool('server', 'uncompressedallowsecret')
+
+ return True
# This is it's own function so extensions can override it.
def _walkstreamfiles(repo):
@@ -193,25 +213,22 @@
(len(entries), total_bytes))
svfs = repo.svfs
- oldaudit = svfs.mustaudit
debugflag = repo.ui.debugflag
- svfs.mustaudit = False
def emitrevlogdata():
- try:
- for name, size in entries:
- if debugflag:
- repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
- # partially encode name over the wire for backwards compat
- yield '%s\0%d\n' % (store.encodedir(name), size)
+ for name, size in entries:
+ if debugflag:
+ repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
+ # partially encode name over the wire for backwards compat
+ yield '%s\0%d\n' % (store.encodedir(name), size)
+ # auditing at this stage is both pointless (paths are already
+ # trusted by the local repo) and expensive
+ with svfs(name, 'rb', auditpath=False) as fp:
if size <= 65536:
- with svfs(name, 'rb') as fp:
- yield fp.read(size)
+ yield fp.read(size)
else:
- for chunk in util.filechunkiter(svfs(name), limit=size):
+ for chunk in util.filechunkiter(fp, limit=size):
yield chunk
- finally:
- svfs.mustaudit = oldaudit
return len(entries), total_bytes, emitrevlogdata()
--- a/mercurial/subrepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/subrepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -73,7 +73,7 @@
raise ex
except error.Abort as ex:
subrepo = subrelpath(self)
- errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
+ errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
# avoid handling this exception by raising a SubrepoAbort exception
raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
cause=sys.exc_info())
@@ -147,7 +147,7 @@
kind = 'hg'
if src.startswith('['):
if ']' not in src:
- raise error.Abort(_('missing ] in subrepo source'))
+ raise error.Abort(_('missing ] in subrepository source'))
kind, src = src.split(']', 1)
kind = kind[1:]
src = src.lstrip() # strip any extra whitespace after ']'
@@ -401,7 +401,7 @@
substate = getattr(ctx, "substate", None)
if not substate:
return commitphase
- check = ui.config('phases', 'checksubrepos', 'follow')
+ check = ui.config('phases', 'checksubrepos')
if check not in ('ignore', 'follow', 'abort'):
raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
% (check))
@@ -460,14 +460,15 @@
"""
return False
- def dirty(self, ignoreupdate=False):
+ def dirty(self, ignoreupdate=False, missing=False):
"""returns true if the dirstate of the subrepo is dirty or does not
match current stored state. If ignoreupdate is true, only check
- whether the subrepo has uncommitted changes in its dirstate.
+ whether the subrepo has uncommitted changes in its dirstate. If missing
+ is true, check for deleted files.
"""
raise NotImplementedError
- def dirtyreason(self, ignoreupdate=False):
+ def dirtyreason(self, ignoreupdate=False, missing=False):
"""return reason string if it is ``dirty()``
Returned string should have enough information for the message
@@ -475,14 +476,15 @@
This returns None, otherwise.
"""
- if self.dirty(ignoreupdate=ignoreupdate):
- return _("uncommitted changes in subrepository '%s'"
+ if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
+ return _('uncommitted changes in subrepository "%s"'
) % subrelpath(self)
def bailifchanged(self, ignoreupdate=False, hint=None):
"""raise Abort if subrepository is ``dirty()``
"""
- dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
+ dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
+ missing=True)
if dirtyreason:
raise error.Abort(dirtyreason, hint=hint)
@@ -538,7 +540,7 @@
self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
return 1
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
return 1
def status(self, rev2, **opts):
@@ -767,10 +769,11 @@
dry_run, similarity)
@annotatesubrepoerror
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
rev = self._state[1]
ctx = self._repo[rev]
- return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
+ return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
+ prefix, **opts)
@annotatesubrepoerror
def status(self, rev2, **opts):
@@ -814,7 +817,7 @@
return total
@annotatesubrepoerror
- def dirty(self, ignoreupdate=False):
+ def dirty(self, ignoreupdate=False, missing=False):
r = self._state[1]
if r == '' and not ignoreupdate: # no state recorded
return True
@@ -822,7 +825,7 @@
if r != w.p1().hex() and not ignoreupdate:
# different version checked out
return True
- return w.dirty() # working directory changed
+ return w.dirty(missing=missing) # working directory changed
def basestate(self):
return self._repo['.'].hex()
@@ -893,7 +896,7 @@
ctx = urepo[revision]
if ctx.hidden():
urepo.ui.warn(
- _('revision %s in subrepo %s is hidden\n') \
+ _('revision %s in subrepository "%s" is hidden\n') \
% (revision[0:12], self._path))
repo = urepo
hg.updaterepo(repo, revision, overwrite)
@@ -907,12 +910,14 @@
def mergefunc():
if anc == cur and dst.branch() == cur.branch():
- self.ui.debug("updating subrepo %s\n" % subrelpath(self))
+ self.ui.debug('updating subrepository "%s"\n'
+ % subrelpath(self))
hg.update(self._repo, state[1])
elif anc == dst:
- self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
+ self.ui.debug('skipping subrepository "%s"\n'
+ % subrelpath(self))
else:
- self.ui.debug("merging subrepo %s\n" % subrelpath(self))
+ self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
hg.merge(self._repo, state[1], remind=False)
wctx = self._repo[None]
@@ -1201,8 +1206,10 @@
return True, True, bool(missing)
return bool(changes), False, bool(missing)
- def dirty(self, ignoreupdate=False):
- if not self._wcchanged()[0]:
+ def dirty(self, ignoreupdate=False, missing=False):
+ wcchanged = self._wcchanged()
+ changed = wcchanged[0] or (missing and wcchanged[2])
+ if not changed:
if self._state[1] in self._wcrevs() or ignoreupdate:
return False
return True
@@ -1550,11 +1557,11 @@
# try only origin: the originally cloned repo
self._gitcommand(['fetch'])
if not self._githavelocally(revision):
- raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
- (revision, self._relpath))
+ raise error.Abort(_('revision %s does not exist in subrepository '
+ '"%s"\n') % (revision, self._relpath))
@annotatesubrepoerror
- def dirty(self, ignoreupdate=False):
+ def dirty(self, ignoreupdate=False, missing=False):
if self._gitmissing():
return self._state[1] != ''
if self._gitisbare():
@@ -1606,8 +1613,8 @@
def rawcheckout():
# no branch to checkout, check it out with no branch
- self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
- self._relpath)
+ self.ui.warn(_('checking out detached HEAD in '
+ 'subrepository "%s"\n') % self._relpath)
self.ui.warn(_('check out a git branch if you intend '
'to make changes\n'))
checkout(['-q', revision])
@@ -1726,14 +1733,14 @@
# determine if the current branch is even useful
if not self._gitisancestor(self._state[1], current):
self.ui.warn(_('unrelated git branch checked out '
- 'in subrepo %s\n') % self._relpath)
+ 'in subrepository "%s"\n') % self._relpath)
return False
- self.ui.status(_('pushing branch %s of subrepo %s\n') %
+ self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
(current.split('/', 2)[2], self._relpath))
ret = self._gitdir(cmd + ['origin', current])
return ret[1] == 0
else:
- self.ui.warn(_('no branch checked out in subrepo %s\n'
+ self.ui.warn(_('no branch checked out in subrepository "%s"\n'
'cannot push revision %s\n') %
(self._relpath, self._state[1]))
return False
@@ -1771,7 +1778,7 @@
if exact:
rejected.append(f)
continue
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
self._gitcommand(command + [f])
for f in rejected:
@@ -1832,7 +1839,7 @@
@annotatesubrepoerror
- def cat(self, match, prefix, **opts):
+ def cat(self, match, fm, fntemplate, prefix, **opts):
rev = self._state[1]
if match.anypats():
return 1 #No support for include/exclude yet
@@ -1840,9 +1847,10 @@
if not match.files():
return 1
+ # TODO: add support for non-plain formatter (see cmdutil.cat())
for f in match.files():
output = self._gitcommand(["show", "%s:%s" % (rev, f)])
- fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
+ fp = cmdutil.makefileobj(self._subparent, fntemplate,
self._ctx.node(),
pathname=self.wvfs.reljoin(prefix, f))
fp.write(output)
@@ -1878,9 +1886,9 @@
deleted, unknown, ignored, clean = [], [], [], []
command = ['status', '--porcelain', '-z']
- if opts.get('unknown'):
+ if opts.get(r'unknown'):
command += ['--untracked-files=all']
- if opts.get('ignored'):
+ if opts.get(r'ignored'):
command += ['--ignored']
out = self._gitcommand(command)
@@ -1908,7 +1916,7 @@
elif st == '!!':
ignored.append(filename1)
- if opts.get('clean'):
+ if opts.get(r'clean'):
out = self._gitcommand(['ls-files'])
for f in out.split('\n'):
if not f in changedfiles:
@@ -1921,7 +1929,7 @@
def diff(self, ui, diffopts, node2, match, prefix, **opts):
node1 = self._state[1]
cmd = ['diff', '--no-renames']
- if opts['stat']:
+ if opts[r'stat']:
cmd.append('--stat')
else:
# for Git, this also implies '-p'
@@ -1964,7 +1972,7 @@
@annotatesubrepoerror
def revert(self, substate, *pats, **opts):
self.ui.status(_('reverting subrepo %s\n') % substate[0])
- if not opts.get('no_backup'):
+ if not opts.get(r'no_backup'):
status = self.status(None)
names = status.modified
for name in names:
@@ -1973,7 +1981,7 @@
(name, bakname))
self.wvfs.rename(name, bakname)
- if not opts.get('dry_run'):
+ if not opts.get(r'dry_run'):
self.get(substate, overwrite=True)
return []
--- a/mercurial/tagmerge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/tagmerge.py Wed Jul 19 07:51:41 2017 -0500
@@ -136,7 +136,7 @@
prevlinenum = linenum
return groupednodes
-def writemergedtags(repo, mergedtags):
+def writemergedtags(fcd, mergedtags):
'''
write the merged tags while trying to minimize the diff to the first parent
@@ -169,9 +169,7 @@
# finally we can join the sorted groups to get the final contents of the
# merged .hgtags file, and then write it to disk
mergedtagstring = '\n'.join([tags for rank, tags in finaltags if tags])
- fp = repo.wvfs('.hgtags', 'wb')
- fp.write(mergedtagstring + '\n')
- fp.close()
+ fcd.write(mergedtagstring + '\n', fcd.flags())
def singletagmerge(p1nodes, p2nodes):
'''
@@ -268,7 +266,7 @@
% (numconflicts, ', '.join(sorted(conflictedtags))))
return True, 1
- writemergedtags(repo, mergedtags)
+ writemergedtags(fcd, mergedtags)
ui.note(_('.hgtags merged successfully\n'))
return False, 0
--- a/mercurial/tags.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/tags.py Wed Jul 19 07:51:41 2017 -0500
@@ -362,7 +362,7 @@
def _filename(repo):
"""name of a tagcache file for a given repo or repoview"""
- filename = 'cache/tags2'
+ filename = 'tags2'
if repo.filtername:
filename = '%s-%s' % (filename, repo.filtername)
return filename
@@ -386,7 +386,7 @@
info from each returned head. (See findglobaltags().)
'''
try:
- cachefile = repo.vfs(_filename(repo), 'r')
+ cachefile = repo.cachevfs(_filename(repo), 'r')
# force reading the file for static-http
cachelines = iter(cachefile)
except IOError:
@@ -486,11 +486,11 @@
def _writetagcache(ui, repo, valid, cachetags):
filename = _filename(repo)
try:
- cachefile = repo.vfs(filename, 'w', atomictemp=True)
+ cachefile = repo.cachevfs(filename, 'w', atomictemp=True)
except (OSError, IOError):
return
- ui.log('tagscache', 'writing .hg/%s with %d tags\n',
+ ui.log('tagscache', 'writing .hg/cache/%s with %d tags\n',
filename, len(cachetags))
if valid[2]:
@@ -539,9 +539,10 @@
raise error.Abort(_('working copy of .hgtags is changed'),
hint=_('please commit .hgtags manually'))
- repo.tags() # instantiate the cache
- _tag(repo.unfiltered(), names, node, message, local, user, date,
- editor=editor)
+ with repo.wlock():
+ repo.tags() # instantiate the cache
+ _tag(repo.unfiltered(), names, node, message, local, user, date,
+ editor=editor)
def _tag(repo, names, node, message, local, user, date, extra=None,
editor=False):
@@ -616,7 +617,7 @@
return tagnode
-_fnodescachefile = 'cache/hgtagsfnodes1'
+_fnodescachefile = 'hgtagsfnodes1'
_fnodesrecsize = 4 + 20 # changeset fragment + filenode
_fnodesmissingrec = '\xff' * 24
@@ -650,7 +651,7 @@
try:
- data = repo.vfs.read(_fnodescachefile)
+ data = repo.cachevfs.read(_fnodescachefile)
except (OSError, IOError):
data = ""
self._raw = bytearray(data)
@@ -758,13 +759,12 @@
try:
lock = repo.wlock(wait=False)
except error.LockError:
- repo.ui.log('tagscache',
- 'not writing .hg/%s because lock cannot be acquired\n' %
- (_fnodescachefile))
+ repo.ui.log('tagscache', 'not writing .hg/cache/%s because '
+ 'lock cannot be acquired\n' % (_fnodescachefile))
return
try:
- f = repo.vfs.open(_fnodescachefile, 'ab')
+ f = repo.cachevfs.open(_fnodescachefile, 'ab')
try:
# if the file has been truncated
actualoffset = f.tell()
@@ -774,7 +774,7 @@
f.seek(self._dirtyoffset)
f.truncate()
repo.ui.log('tagscache',
- 'writing %d bytes to %s\n' % (
+ 'writing %d bytes to cache/%s\n' % (
len(data), _fnodescachefile))
f.write(data)
self._dirtyoffset = None
@@ -782,7 +782,7 @@
f.close()
except (IOError, OSError) as inst:
repo.ui.log('tagscache',
- "couldn't write %s: %s\n" % (
+ "couldn't write cache/%s: %s\n" % (
_fnodescachefile, inst))
finally:
lock.release()
--- a/mercurial/templatefilters.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templatefilters.py Wed Jul 19 07:51:41 2017 -0500
@@ -1,4 +1,4 @@
-# template-filters.py - common template expansion filters
+# templatefilters.py - common template expansion filters
#
# Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
#
@@ -16,6 +16,7 @@
encoding,
hbisect,
node,
+ pycompat,
registrar,
templatekw,
util,
@@ -24,6 +25,9 @@
urlerr = util.urlerr
urlreq = util.urlreq
+if pycompat.ispy3:
+ long = int
+
# filters are callables like:
# fn(obj)
# with:
@@ -226,15 +230,16 @@
elif obj is True:
return 'true'
elif isinstance(obj, (int, long, float)):
- return str(obj)
- elif isinstance(obj, str):
+ return pycompat.bytestr(obj)
+ elif isinstance(obj, bytes):
return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
elif util.safehasattr(obj, 'keys'):
- out = ['%s: %s' % (json(k), json(v))
+ out = ['"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid),
+ json(v, paranoid))
for k, v in sorted(obj.iteritems())]
return '{' + ', '.join(out) + '}'
elif util.safehasattr(obj, '__iter__'):
- out = [json(i) for i in obj]
+ out = [json(i, paranoid) for i in obj]
return '[' + ', '.join(out) + ']'
else:
raise TypeError('cannot encode type %s' % obj.__class__.__name__)
@@ -351,11 +356,11 @@
text and concatenating them.
"""
thing = templatekw.unwraphybrid(thing)
- if util.safehasattr(thing, '__iter__') and not isinstance(thing, str):
+ if util.safehasattr(thing, '__iter__') and not isinstance(thing, bytes):
return "".join([stringify(t) for t in thing if t is not None])
if thing is None:
return ""
- return str(thing)
+ return pycompat.bytestr(thing)
@templatefilter('stripdir')
def stripdir(text):
--- a/mercurial/templatekw.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templatekw.py Wed Jul 19 07:51:41 2017 -0500
@@ -8,12 +8,19 @@
from __future__ import absolute_import
from .i18n import _
-from .node import hex, nullid
+from .node import (
+ hex,
+ nullid,
+ short,
+)
+
from . import (
encoding,
error,
hbisect,
+ obsutil,
patch,
+ pycompat,
registrar,
scmutil,
util,
@@ -52,6 +59,8 @@
yield makemap(x)
def __contains__(self, x):
return x in self._values
+ def __getitem__(self, key):
+ return self._values[key]
def __len__(self):
return len(self._values)
def __iter__(self):
@@ -112,23 +121,24 @@
expand 'end_foos'.
'''
templ = mapping['templ']
+ strmapping = pycompat.strkwargs(mapping)
if not plural:
plural = name + 's'
if not values:
noname = 'no_' + plural
if noname in templ:
- yield templ(noname, **mapping)
+ yield templ(noname, **strmapping)
return
if name not in templ:
- if isinstance(values[0], str):
+ if isinstance(values[0], bytes):
yield separator.join(values)
else:
for v in values:
- yield dict(v, **mapping)
+ yield dict(v, **strmapping)
return
startname = 'start_' + plural
if startname in templ:
- yield templ(startname, **mapping)
+ yield templ(startname, **strmapping)
vmapping = mapping.copy()
def one(v, tag=name):
try:
@@ -139,7 +149,7 @@
vmapping[a] = b
except ValueError:
vmapping[name] = v
- return templ(tag, **vmapping)
+ return templ(tag, **pycompat.strkwargs(vmapping))
lastname = 'last_' + name
if lastname in templ:
last = values.pop()
@@ -151,17 +161,17 @@
yield one(last, tag=lastname)
endname = 'end_' + plural
if endname in templ:
- yield templ(endname, **mapping)
+ yield templ(endname, **strmapping)
def _formatrevnode(ctx):
"""Format changeset as '{rev}:{node|formatnode}', which is the default
template provided by cmdutil.changeset_templater"""
repo = ctx.repo()
if repo.ui.debugflag:
- hexnode = ctx.hex()
+ hexfunc = hex
else:
- hexnode = ctx.hex()[:12]
- return '%d:%s' % (scmutil.intrev(ctx.rev()), hexnode)
+ hexfunc = short
+ return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
def getfiles(repo, ctx, revcache):
if 'files' not in revcache:
@@ -279,7 +289,7 @@
"""String. The name of the branch on which the changeset was
committed.
"""
- return args['ctx'].branch()
+ return args[r'ctx'].branch()
@templatekeyword('branches')
def showbranches(**args):
@@ -287,6 +297,7 @@
changeset was committed. Will be empty if the branch name was
default. (DEPRECATED)
"""
+ args = pycompat.byteskwargs(args)
branch = args['ctx'].branch()
if branch != 'default':
return showlist('branch', [branch], args, plural='branches')
@@ -297,6 +308,7 @@
"""List of strings. Any bookmarks associated with the
changeset. Also sets 'active', the name of the active bookmark.
"""
+ args = pycompat.byteskwargs(args)
repo = args['ctx']._repo
bookmarks = args['ctx'].bookmarks()
active = repo._activebookmark
@@ -307,6 +319,7 @@
@templatekeyword('children')
def showchildren(**args):
"""List of strings. The children of the changeset."""
+ args = pycompat.byteskwargs(args)
ctx = args['ctx']
childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
return showlist('children', childrevs, args, element='child')
@@ -322,8 +335,8 @@
def showactivebookmark(**args):
"""String. The active bookmark, if it is
associated with the changeset"""
- active = args['repo']._activebookmark
- if active and active in args['ctx'].bookmarks():
+ active = args[r'repo']._activebookmark
+ if active and active in args[r'ctx'].bookmarks():
return active
return ''
@@ -354,6 +367,7 @@
@templatekeyword('envvars')
def showenvvars(repo, **args):
"""A dictionary of environment variables. (EXPERIMENTAL)"""
+ args = pycompat.byteskwargs(args)
env = repo.ui.exportableenviron()
env = util.sortdict((k, env[k]) for k in sorted(env))
return showdict('envvar', env, args, plural='envvars')
@@ -362,6 +376,7 @@
def showextras(**args):
"""List of dicts with key, value entries of the 'extras'
field of this changeset."""
+ args = pycompat.byteskwargs(args)
extras = args['ctx'].extra()
extras = util.sortdict((k, extras[k]) for k in sorted(extras))
makemap = lambda k: {'key': k, 'value': extras[k]}
@@ -373,6 +388,7 @@
@templatekeyword('file_adds')
def showfileadds(**args):
"""List of strings. Files added by this changeset."""
+ args = pycompat.byteskwargs(args)
repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
element='file')
@@ -382,6 +398,7 @@
"""List of strings. Files copied in this changeset with
their sources.
"""
+ args = pycompat.byteskwargs(args)
cache, ctx = args['cache'], args['ctx']
copies = args['revcache'].get('copies')
if copies is None:
@@ -406,6 +423,7 @@
"""List of strings. Like "file_copies" but displayed
only if the --copied switch is set.
"""
+ args = pycompat.byteskwargs(args)
copies = args['revcache'].get('copies') or []
copies = util.sortdict(copies)
return showdict('file_copy', copies, args, plural='file_copies',
@@ -414,6 +432,7 @@
@templatekeyword('file_dels')
def showfiledels(**args):
"""List of strings. Files removed by this changeset."""
+ args = pycompat.byteskwargs(args)
repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
element='file')
@@ -421,6 +440,7 @@
@templatekeyword('file_mods')
def showfilemods(**args):
"""List of strings. Files modified by this changeset."""
+ args = pycompat.byteskwargs(args)
repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
element='file')
@@ -430,6 +450,7 @@
"""List of strings. All files modified, added, or removed by this
changeset.
"""
+ args = pycompat.byteskwargs(args)
return showlist('file', args['ctx'].files(), args)
@templatekeyword('graphnode')
@@ -464,6 +485,7 @@
def showlatesttags(pattern, **args):
"""helper method for the latesttag keyword and function"""
+ args = pycompat.byteskwargs(args)
repo, ctx = args['repo'], args['ctx']
cache = args['cache']
latesttags = getlatesttags(repo, ctx, cache, pattern)
@@ -497,7 +519,7 @@
def _showchangessincetag(repo, ctx, **args):
offset = 0
revs = [ctx.rev()]
- tag = args['tag']
+ tag = args[r'tag']
# The only() revset doesn't currently support wdir()
if ctx.rev() is None:
@@ -508,18 +530,19 @@
@templatekeyword('manifest')
def showmanifest(**args):
- repo, ctx, templ = args['repo'], args['ctx'], args['templ']
+ repo, ctx, templ = args[r'repo'], args[r'ctx'], args[r'templ']
mnode = ctx.manifestnode()
if mnode is None:
# just avoid crash, we might want to use the 'ff...' hash in future
return
args = args.copy()
- args.update({'rev': repo.manifestlog._revlog.rev(mnode),
- 'node': hex(mnode)})
+ args.update({r'rev': repo.manifestlog._revlog.rev(mnode),
+ r'node': hex(mnode)})
return templ('manifest', **args)
def shownames(namespace, **args):
"""helper method to generate a template keyword for a namespace"""
+ args = pycompat.byteskwargs(args)
ctx = args['ctx']
repo = ctx.repo()
ns = repo.names[namespace]
@@ -530,15 +553,30 @@
def shownamespaces(**args):
"""Dict of lists. Names attached to this changeset per
namespace."""
+ args = pycompat.byteskwargs(args)
ctx = args['ctx']
repo = ctx.repo()
- namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
- args))
- for k, ns in repo.names.iteritems())
+
+ namespaces = util.sortdict()
+ colornames = {}
+ builtins = {}
+
+ for k, ns in repo.names.iteritems():
+ namespaces[k] = showlist('name', ns.names(repo, ctx.node()), args)
+ colornames[k] = ns.colorname
+ builtins[k] = ns.builtin
+
f = _showlist('namespace', list(namespaces), args)
- return _hybrid(f, namespaces,
- lambda k: {'namespace': k, 'names': namespaces[k]},
- lambda x: x['namespace'])
+
+ def makemap(ns):
+ return {
+ 'namespace': ns,
+ 'names': namespaces[ns],
+ 'builtin': builtins[ns],
+ 'colorname': colornames[ns],
+ }
+
+ return _hybrid(f, namespaces, makemap, lambda x: x['namespace'])
@templatekeyword('node')
def shownode(repo, ctx, templ, **args):
@@ -555,6 +593,68 @@
return 'obsolete'
return ''
+@templatekeyword('peerpaths')
+def showpeerpaths(repo, **args):
+ """A dictionary of repository locations defined in the [paths] section
+ of your configuration file. (EXPERIMENTAL)"""
+ # see commands.paths() for naming of dictionary keys
+ paths = util.sortdict()
+ for k, p in sorted(repo.ui.paths.iteritems()):
+ d = util.sortdict()
+ d['url'] = p.rawloc
+ d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
+ def f():
+ yield d['url']
+ paths[k] = hybriddict(d, gen=f())
+
+ # no hybriddict() since d['path'] can't be formatted as a string. perhaps
+ # hybriddict() should call templatefilters.stringify(d[value]).
+ return _hybrid(None, paths, lambda k: {'name': k, 'path': paths[k]},
+ lambda d: '%s=%s' % (d['name'], d['path']['url']))
+
+@templatekeyword("predecessors")
+def showpredecessors(repo, ctx, **args):
+ """Returns the list if the closest visible successors
+ """
+ predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
+ predecessors = map(hex, predecessors)
+
+ return _hybrid(None, predecessors,
+ lambda x: {'ctx': repo[x], 'revcache': {}},
+ lambda d: _formatrevnode(d['ctx']))
+
+@templatekeyword("successorssets")
+def showsuccessorssets(repo, ctx, **args):
+ """Returns a string of sets of successors for a changectx
+
+ Format used is: [ctx1, ctx2], [ctx3] if ctx has been splitted into ctx1 and
+ ctx2 while also diverged into ctx3"""
+ if not ctx.obsolete():
+ return ''
+ args = pycompat.byteskwargs(args)
+
+ ssets = obsutil.successorssets(repo, ctx.node(), closest=True)
+ ssets = [[hex(n) for n in ss] for ss in ssets]
+
+ data = []
+ for ss in ssets:
+ h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}},
+ lambda d: _formatrevnode(d['ctx']))
+ data.append(h)
+
+ # Format the successorssets
+ def render(d):
+ t = []
+ for i in d.gen:
+ t.append(i)
+ return "".join(t)
+
+ def gen(data):
+ yield "; ".join(render(d) for d in data)
+
+ return _hybrid(gen(data), data, lambda x: {'successorset': x},
+ lambda d: d["successorset"])
+
@templatekeyword('p1rev')
def showp1rev(repo, ctx, templ, **args):
"""Integer. The repository-local revision number of the changeset's
@@ -586,10 +686,12 @@
"""List of strings. The parents of the changeset in "rev:node"
format. If the changeset has only one "natural" parent (the predecessor
revision) nothing is shown."""
+ args = pycompat.byteskwargs(args)
repo = args['repo']
ctx = args['ctx']
pctxs = scmutil.meaningfulparents(repo, ctx)
- prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
+ # ifcontains() needs a list of str
+ prevs = ["%d" % p.rev() for p in pctxs]
parents = [[('rev', p.rev()),
('node', p.hex()),
('phase', p.phasestr())]
@@ -611,13 +713,15 @@
@templatekeyword('rev')
def showrev(repo, ctx, templ, **args):
"""Integer. The repository-local changeset revision number."""
- return scmutil.intrev(ctx.rev())
+ return scmutil.intrev(ctx)
def showrevslist(name, revs, **args):
"""helper to generate a list of revisions in which a mapped template will
be evaluated"""
+ args = pycompat.byteskwargs(args)
repo = args['ctx'].repo()
- revs = [str(r) for r in revs] # ifcontains() needs a list of str
+ # ifcontains() needs a list of str
+ revs = ["%d" % r for r in revs]
f = _showlist(name, revs, args)
return _hybrid(f, revs,
lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
@@ -626,6 +730,7 @@
@templatekeyword('subrepos')
def showsubrepos(**args):
"""List of strings. Updated subrepositories in the changeset."""
+ args = pycompat.byteskwargs(args)
ctx = args['ctx']
substate = ctx.substate
if not substate:
@@ -665,6 +770,7 @@
(EXPERIMENTAL)
"""
+ args = pycompat.byteskwargs(args)
return showlist('trouble', args['ctx'].troubles(), args)
# tell hggettext to extract docstrings from these functions:
--- a/mercurial/templater.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templater.py Wed Jul 19 07:51:41 2017 -0500
@@ -53,6 +53,7 @@
"""Parse a template expression into a stream of tokens, which must end
with term if specified"""
pos = start
+ program = pycompat.bytestr(program)
while pos < end:
c = program[pos]
if c.isspace(): # skip inter-token whitespace
@@ -370,7 +371,7 @@
except TemplateNotFound:
v = default
if callable(v):
- return v(**mapping)
+ return v(**pycompat.strkwargs(mapping))
return v
def buildtemplate(exp, context):
@@ -873,7 +874,7 @@
repo = ctx.repo()
def query(expr):
- m = revsetmod.match(repo.ui, expr)
+ m = revsetmod.match(repo.ui, expr, repo=repo)
return m(repo)
if len(args) > 1:
@@ -959,6 +960,9 @@
return True
except error.RevlogError:
return False
+ except error.WdirUnsupported:
+ # single 'ff...' match
+ return True
shortest = node
startlength = max(6, minlength)
@@ -1097,21 +1101,21 @@
def _flatten(thing):
'''yield a single stream from a possibly nested set of iterators'''
thing = templatekw.unwraphybrid(thing)
- if isinstance(thing, str):
+ if isinstance(thing, bytes):
yield thing
elif thing is None:
pass
elif not util.safehasattr(thing, '__iter__'):
- yield str(thing)
+ yield pycompat.bytestr(thing)
else:
for i in thing:
i = templatekw.unwraphybrid(i)
- if isinstance(i, str):
+ if isinstance(i, bytes):
yield i
elif i is None:
pass
elif not util.safehasattr(i, '__iter__'):
- yield str(i)
+ yield pycompat.bytestr(i)
else:
for j in _flatten(i):
yield j
@@ -1294,7 +1298,12 @@
(self.map[t][1], inst.args[1]))
return self.cache[t]
+ def render(self, mapping):
+ """Render the default unnamed template and return result as string"""
+ return stringify(self('', **mapping))
+
def __call__(self, t, **mapping):
+ mapping = pycompat.byteskwargs(mapping)
ttype = t in self.map and self.map[t][0] or 'default'
if ttype not in self.ecache:
try:
--- a/mercurial/templates/gitweb/bookmarks.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/bookmarks.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/branches.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/branches.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
branches |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/changelog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/changelog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -12,14 +12,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / changelog
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
-
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
changelog |
@@ -32,10 +26,12 @@
<br/>
{changenav%nav}<br/>
</div>
+{searchform}
+</div>
{entries%changelogentry}
-<div class="page_nav">
+<div class="extra_nav">
{changenav%nav}<br/>
</div>
--- a/mercurial/templates/gitweb/changelogentry.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/changelogentry.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -7,8 +7,6 @@
</div>
<i>{author|obfuscate} [{date|rfc822date}] rev {rev}</i><br/>
</div>
-<div class="log_body description">
-{desc|strip|escape|websub|addbreaks|nonempty}
-<br/>
-<br/>
+<div class="log_body description">{desc|strip|escape|websub|nonempty}
+
</div>
--- a/mercurial/templates/gitweb/changeset.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/changeset.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -24,7 +25,8 @@
changeset |
<a href="{url|urlescape}raw-rev/{symrev}">raw</a> {archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div>
@@ -43,9 +45,7 @@
{child%changesetchild}
</table></div>
-<div class="page_body description">
-{desc|strip|escape|websub|addbreaks|nonempty}
-</div>
+<div class="page_body description">{desc|strip|escape|websub|nonempty}</div>
<div class="list_head"></div>
<div class="title_text">
<table cellspacing="0">
--- a/mercurial/templates/gitweb/error.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/error.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -21,7 +22,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="page_body">
--- a/mercurial/templates/gitweb/fileannotate.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/fileannotate.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-file/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
@@ -59,13 +61,18 @@
</table>
</div>
-<div class="page_path description">
-{desc|strip|escape|websub|addbreaks|nonempty}
-</div>
+<div class="page_path description">{desc|strip|escape|websub|nonempty}</div>
<div class="page_body">
<table>
+<tbody class="sourcelines"
+ data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}"
+ data-selectabletag="TR"
+ data-ishead="{ishead}">
{annotate%annotateline}
+</tbody>
</table>
</div>
+<script type="text/javascript" src="{staticurl|urlescape}followlines.js"></script>
+
{footer}
--- a/mercurial/templates/gitweb/filecomparison.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/filecomparison.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
comparison |
<a href="{url|urlescape}raw-diff/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/filediff.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/filediff.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-diff/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
--- a/mercurial/templates/gitweb/filelog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/filelog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,6 +31,8 @@
<br/>
{nav%filenav}
</div>
+{searchform}
+</div>
<div class="title" >
{file|urlescape}{if(linerange,
@@ -40,7 +43,7 @@
{entries%filelogentry}
</table>
-<div class="page_nav">
+<div class="extra_nav">
<a href="{url|urlescape}log/{symrev}/{file|urlescape}{lessvars%urlparameter}">less</a>
<a href="{url|urlescape}log/{symrev}/{file|urlescape}{morevars%urlparameter}">more</a>
{nav%filenav}
--- a/mercurial/templates/gitweb/filerevision.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/filerevision.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +31,8 @@
<a href="{url|urlescape}comparison/{symrev}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url|urlescape}raw-file/{symrev}/{file|urlescape}">raw</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{file|escape}</div>
@@ -59,12 +61,15 @@
</table>
</div>
-<div class="page_path description">
-{desc|strip|escape|websub|addbreaks|nonempty}
-</div>
+<div class="page_path description">{desc|strip|escape|websub|nonempty}</div>
<div class="page_body">
-<pre class="sourcelines stripes" data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}" data-ishead="{ishead}">{text%fileline}</pre>
+<pre class="sourcelines stripes"
+ data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}"
+ data-selectabletag="SPAN"
+ data-ishead="{ishead}">
+{text%fileline}
+</pre>
</div>
<script type="text/javascript" src="{staticurl|urlescape}followlines.js"></script>
--- a/mercurial/templates/gitweb/graph.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/graph.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,13 +13,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / graph
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog/{symrev}{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -32,7 +27,9 @@
<br/>
<a href="{url|urlescape}graph/{symrev}{lessvars%urlparameter}">less</a>
<a href="{url|urlescape}graph/{symrev}{morevars%urlparameter}">more</a>
-| {changenav%navgraph}<br/>
+| {changenav%navgraph}
+</div>
+{searchform}
</div>
<div class="title"> </div>
@@ -102,7 +99,7 @@
// stop hiding script -->
</script>
-<div class="page_nav">
+<div class="extra_nav">
<a href="{url|urlescape}graph/{symrev}{lessvars%urlparameter}">less</a>
<a href="{url|urlescape}graph/{symrev}{morevars%urlparameter}">more</a>
| {changenav%navgraph}
--- a/mercurial/templates/gitweb/help.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/help.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
help
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/helptopics.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/helptopics.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -25,7 +26,8 @@
'<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>',
'help'
)}
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/manifest.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/manifest.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -23,7 +24,8 @@
files |
<a href="{url|urlescape}rev/{symrev}{sessionvars%urlparameter}">changeset</a> {archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></div>
--- a/mercurial/templates/gitweb/map Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/map Wed Jul 19 07:51:41 2017 -0500
@@ -113,7 +113,7 @@
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
</div>
</td>
- <td><pre><a class="linenr" href="#{lineid}">{linenumber}</a></pre></td>
+ <td class="followlines-btn-parent"><pre><a class="linenr" href="#{lineid}">{linenumber}</a></pre></td>
<td><pre>{line|escape}</pre></td>
</tr>'
annotateparent = '
@@ -323,3 +323,14 @@
urlparameter = '{separator}{name}={value|urlescape}'
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+
+searchform = '
+ <div class="search">
+ <form id="searchform" action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <input name="rev" type="text" value="{query|escape}" size="40" />
+ <div id="hint">{searchhint}</div>
+ </form>
+ </div>'
+searchhint = 'Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/gitweb/search.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/search.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -10,16 +10,10 @@
<div class="page_header">
<a href="{logourl}" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> {pathdef%breadcrumb} / search
-
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" value="{query|escape}" />
-</div>
-</form>
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -30,7 +24,8 @@
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a>{archives%archiveentry}
|
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title">searching for {query|escape}</div>
--- a/mercurial/templates/gitweb/shortlog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/shortlog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -12,13 +12,8 @@
<a href="/">Mercurial</a> {pathdef%breadcrumb} / shortlog
</div>
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
shortlog |
<a href="{url|urlescape}log/{symrev}{sessionvars%urlparameter}">changelog</a> |
@@ -30,13 +25,15 @@
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
<br/>{changenav%navshort}<br/>
</div>
+{searchform}
+</div>
<div class="title"> </div>
<table class="shortlogtable" cellspacing="0">
{entries%shortlogentry}
</table>
-<div class="page_nav">
+<div class="extra_nav">
{changenav%navshort}
</div>
--- a/mercurial/templates/gitweb/summary.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/summary.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -10,15 +10,10 @@
<div class="page_header">
<a href="{logourl}" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> {pathdef%breadcrumb} / summary
-<form action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<div class="search">
-<input type="text" name="rev" />
-</div>
-</form>
</div>
<div class="page_nav">
+<div>
summary |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -28,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a>{archives%archiveentry} |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/gitweb/tags.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/gitweb/tags.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -13,6 +13,7 @@
</div>
<div class="page_nav">
+<div>
<a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a> |
<a href="{url|urlescape}shortlog{sessionvars%urlparameter}">shortlog</a> |
<a href="{url|urlescape}log{sessionvars%urlparameter}">changelog</a> |
@@ -22,7 +23,8 @@
<a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a> |
<a href="{url|urlescape}file{sessionvars%urlparameter}">files</a> |
<a href="{url|urlescape}help{sessionvars%urlparameter}">help</a>
-<br/>
+</div>
+{searchform}
</div>
<div class="title"> </div>
--- a/mercurial/templates/map-cmdline.show Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/map-cmdline.show Wed Jul 19 07:51:41 2017 -0500
@@ -1,9 +1,19 @@
# TODO there are a few deficiencies in this file:
-# * Due to the way the file is loaded, references to other entities in the
-# template doesn't work. That requires us to inline.
# * The "namespace" of the labels needs to be worked out. We currently
# piggyback on existing values so color works.
# * Obsolescence isn't considered for node labels. See _cset_labels in
# map-cmdline.default.
showbookmarks = '{if(active, "*", " ")} {pad(bookmark, longestbookmarklen + 4)}{shortest(node, 5)}\n'
-showwork = '{label("log.changeset changeset.{phase}", shortest(node, 5))}{if(branches, " ({label("log.branch", branch)})")}{if(bookmarks, " ({label("log.bookmarks", bookmarks)})")} {label("log.description", desc|firstline)}'
+
+showwork = '{cset_shortnode}{namespaces % cset_namespace} {cset_shortdesc}'
+showstack = '{showwork}'
+
+cset_shortnode = '{label("log.changeset changeset.{phase}", shortest(node, 5))}'
+
+# Treat branch and tags specially so we don't display "default" or "tip"
+cset_namespace = '{ifeq(namespace, "branches", names_branches, ifeq(namespace, "tags", names_tags, names_others))}'
+names_branches = '{ifeq(branch, "default", "", " ({label('log.{colorname}', branch)})")}'
+names_tags = '{if(names % "{ifeq(name, 'tip', '', name)}", " ({label('log.{colorname}', join(names % "{ifeq(name, 'tip', '', name)}", ' '))})")}'
+names_others = '{if(names, " ({label('log.{colorname}', join(names, ' '))})")}'
+
+cset_shortdesc = '{label("log.description", desc|firstline)}'
--- a/mercurial/templates/monoblue/bookmarks.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/bookmarks.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / bookmarks</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/branches.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/branches.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / branches</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/changelog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/changelog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / changelog</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/changeset.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/changeset.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / changeset</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/error.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/error.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / not found: {repo|escape}</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/fileannotate.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/fileannotate.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / annotate</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filecomparison.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/filecomparison.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file comparison</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filediff.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/filediff.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file diff</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filelog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/filelog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file revisions</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/filerevision.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/filerevision.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / file revision</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/graph.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/graph.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -10,13 +10,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / graph</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/help.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/help.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / help</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/helptopics.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/helptopics.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / help</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/manifest.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/manifest.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / files</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/map Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/map Wed Jul 19 07:51:41 2017 -0500
@@ -279,3 +279,12 @@
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
graph = graph.tmpl
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+
+searchform = '
+ <form action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <dl class="search">
+ <dt><label>Search: </label></dt>
+ <dd><input type="text" name="rev" value="{query|escape}" /></dd>
+ </dl>
+ </form>'
--- a/mercurial/templates/monoblue/notfound.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/notfound.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / not found: {repo|escape}</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/search.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/search.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / search</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" value="{query|escape}" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/shortlog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/shortlog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / shortlog</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/monoblue/summary.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/summary.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / summary</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li class="current">summary</li>
--- a/mercurial/templates/monoblue/tags.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/monoblue/tags.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,7 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb} / tags</h1>
- <form action="{url|urlescape}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+ {searchform}
<ul class="page-nav">
<li><a href="{url|urlescape}summary{sessionvars%urlparameter}">summary</a></li>
--- a/mercurial/templates/paper/bookmarks.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/bookmarks.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>bookmarks</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/branches.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/branches.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>branches</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/changeset.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/changeset.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -36,11 +36,7 @@
{changesetbranch%changelogbranchname}{changesettag}{changesetbookmark}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/error.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/error.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -26,11 +26,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>error</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30"></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">
<p>
--- a/mercurial/templates/paper/fileannotate.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/fileannotate.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -42,11 +42,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
@@ -77,7 +73,10 @@
<th class="line"> line source</th>
</tr>
</thead>
-<tbody class="stripes2">
+<tbody class="stripes2 sourcelines"
+ data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}"
+ data-selectabletag="TR"
+ data-ishead="{ishead}">
{annotate%annotateline}
</tbody>
</table>
@@ -85,4 +84,6 @@
</div>
</div>
+<script type="text/javascript" src="{staticurl|urlescape}followlines.js"></script>
+
{footer}
--- a/mercurial/templates/paper/filecomparison.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/filecomparison.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-<p>{sessionvars%hiddenformentry}</p>
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/filediff.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/filediff.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-<p>{sessionvars%hiddenformentry}</p>
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
--- a/mercurial/templates/paper/filelog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/filelog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -51,11 +51,7 @@
' (following lines {linerange}{if(descend, ', descending')} <a href="{url|urlescape}log/{symrev}/{file|urlescape}{sessionvars%urlparameter}">back to filelog</a>)')}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}log/{symrev}/{file|urlescape}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/filerevision.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/filerevision.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -41,11 +41,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="description">{desc|strip|escape|websub|nonempty}</div>
@@ -71,7 +67,12 @@
<div class="overflow">
<div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
<div class="sourcefirst"> line source</div>
-<pre class="sourcelines stripes4 wrap bottomline" data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}" data-ishead="{ishead}">{text%fileline}</pre>
+<pre class="sourcelines stripes4 wrap bottomline"
+ data-logurl="{url|urlescape}log/{symrev}/{file|urlescape}"
+ data-selectabletag="SPAN"
+ data-ishead="{ishead}">
+{text%fileline}
+</pre>
</div>
<script type="text/javascript" src="{staticurl|urlescape}followlines.js"></script>
--- a/mercurial/templates/paper/graph.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/graph.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -39,11 +39,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>graph</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}graph/{symrev}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/help.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/help.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -24,12 +24,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>Help: {topic}</h3>
-
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div id="doc">
{rstdoc(doc, "html")}
</div>
--- a/mercurial/templates/paper/helptopics.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/helptopics.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -26,11 +26,7 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<tr><td colspan="2"><h2><a name="topics" href="#topics">Topics</a></h2></td></tr>
{topics % helpentry}
--- a/mercurial/templates/paper/manifest.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/manifest.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -35,11 +35,7 @@
{branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}
</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/paper/map Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/map Wed Jul 19 07:51:41 2017 -0500
@@ -94,7 +94,7 @@
<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
</div>
</td>
- <td class="source"><a href="#{lineid}">{linenumber}</a> {line|escape}</td>
+ <td class="source followlines-btn-parent"><a href="#{lineid}">{linenumber}</a> {line|escape}</td>
</tr>'
annotateparent = '
<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rev}</a>'
@@ -243,5 +243,11 @@
hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
breadcrumb = '> <a href="{url|urlescape}">{name|escape}</a> '
+searchform = '
+ <form class="search" action="{url|urlescape}log">
+ {sessionvars%hiddenformentry}
+ <p><input name="rev" id="search1" type="text" size="30" value="{query|escape}" /></p>
+ <div id="hint">{searchhint}</div>
+ </form>'
searchhint = 'Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/paper/search.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/search.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -33,11 +33,7 @@
Use {showunforcekw}</a> instead.')}
</p>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" value="{query|escape}"></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}log{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/shortlog.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/shortlog.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -41,11 +41,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>log</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" value="{query|escape}" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<div class="navigate">
<a href="{url|urlescape}shortlog/{symrev}{lessvars%urlparameter}">less</a>
--- a/mercurial/templates/paper/tags.tmpl Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/paper/tags.tmpl Wed Jul 19 07:51:41 2017 -0500
@@ -34,11 +34,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
<h3>tags</h3>
-<form class="search" action="{url|urlescape}log">
-{sessionvars%hiddenformentry}
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">{searchhint}</div>
-</form>
+{searchform}
<table class="bigtable">
<thead>
--- a/mercurial/templates/static/followlines.js Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/static/followlines.js Wed Jul 19 07:51:41 2017 -0500
@@ -17,15 +17,14 @@
return;
}
- var isHead = parseInt(sourcelines.dataset.ishead || "0");
+ // Tag of children of "sourcelines" element on which to add "line
+ // selection" style.
+ var selectableTag = sourcelines.dataset.selectabletag;
+ if (typeof selectableTag === 'undefined') {
+ return;
+ }
- // tooltip to invite on lines selection
- var tooltip = document.createElement('div');
- tooltip.id = 'followlines-tooltip';
- tooltip.classList.add('hidden');
- var initTooltipText = 'click to start following lines history from here';
- tooltip.textContent = initTooltipText;
- sourcelines.appendChild(tooltip);
+ var isHead = parseInt(sourcelines.dataset.ishead || "0");
//* position "element" on top-right of cursor */
function positionTopRight(element, event) {
@@ -35,39 +34,68 @@
element.style.left = x;
}
- var tooltipTimeoutID;
- //* move the "tooltip" with cursor (top-right) and show it after 1s */
- function moveAndShowTooltip(e) {
- if (typeof tooltipTimeoutID !== 'undefined') {
- // avoid accumulation of timeout callbacks (blinking)
- window.clearTimeout(tooltipTimeoutID);
- }
- tooltip.classList.add('hidden');
- positionTopRight(tooltip, e);
- tooltipTimeoutID = window.setTimeout(function() {
- tooltip.classList.remove('hidden');
- }, 1000);
+ // retrieve all direct *selectable* children of class="sourcelines"
+ // element
+ var selectableElements = Array.prototype.filter.call(
+ sourcelines.children,
+ function(x) { return x.tagName === selectableTag });
+
+ var btnTitleStart = 'start following lines history from here';
+ var btnTitleEnd = 'terminate line block selection here';
+
+ //** return a <button> element with +/- spans */
+ function createButton() {
+ var btn = document.createElement('button');
+ btn.title = btnTitleStart;
+ btn.classList.add('btn-followlines');
+ var plusSpan = document.createElement('span');
+ plusSpan.classList.add('followlines-plus');
+ plusSpan.textContent = '+';
+ btn.appendChild(plusSpan);
+ var br = document.createElement('br');
+ btn.appendChild(br);
+ var minusSpan = document.createElement('span');
+ minusSpan.classList.add('followlines-minus');
+ minusSpan.textContent = '−';
+ btn.appendChild(minusSpan);
+ return btn;
}
- // on mousemove, show tooltip close to cursor position
- sourcelines.addEventListener('mousemove', moveAndShowTooltip);
+ // extend DOM with CSS class for selection highlight and action buttons
+ var followlinesButtons = []
+ for (var i = 0; i < selectableElements.length; i++) {
+ selectableElements[i].classList.add('followlines-select');
+ var btn = createButton();
+ followlinesButtons.push(btn);
+ // insert the <button> as child of `selectableElements[i]` unless the
+ // latter has itself a child with a "followlines-btn-parent" class
+ // (annotate view)
+ var btnSupportElm = selectableElements[i];
+ var childSupportElms = btnSupportElm.getElementsByClassName(
+ 'followlines-btn-parent');
+ if ( childSupportElms.length > 0 ) {
+ btnSupportElm = childSupportElms[0];
+ }
+ var refNode = btnSupportElm.children[0]; // node to insert <button> before
+ btnSupportElm.insertBefore(btn, refNode);
+ }
- // retrieve all direct <span> children of <pre class="sourcelines">
- var spans = Array.prototype.filter.call(
- sourcelines.children,
- function(x) { return x.tagName === 'SPAN' });
-
- // add a "followlines-select" class to change cursor type in CSS
- for (var i = 0; i < spans.length; i++) {
- spans[i].classList.add('followlines-select');
+ // ** re-initialize followlines buttons */
+ function resetButtons() {
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ var btn = followlinesButtons[i];
+ btn.title = btnTitleStart;
+ btn.classList.remove('btn-followlines-end');
+ btn.classList.remove('btn-followlines-hidden');
+ }
}
var lineSelectedCSSClass = 'followlines-selected';
- //** add CSS class on <span> element in `from`-`to` line range */
+ //** add CSS class on selectable elements in `from`-`to` line range */
function addSelectedCSSClass(from, to) {
for (var i = from; i <= to; i++) {
- spans[i].classList.add(lineSelectedCSSClass);
+ selectableElements[i].classList.add(lineSelectedCSSClass);
}
}
@@ -80,39 +108,66 @@
}
}
- // ** return the <span> element parent of `element` */
- function findParentSpan(element) {
+ // ** return the element of type "selectableTag" parent of `element` */
+ function selectableParent(element) {
var parent = element.parentElement;
if (parent === null) {
return null;
}
- if (element.tagName == 'SPAN' && parent.isSameNode(sourcelines)) {
+ if (element.tagName == selectableTag && parent.isSameNode(sourcelines)) {
return element;
}
- return findParentSpan(parent);
+ return selectableParent(parent);
+ }
+
+ // ** update buttons title and style upon first click */
+ function updateButtons(selectable) {
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ var btn = followlinesButtons[i];
+ btn.title = btnTitleEnd;
+ btn.classList.add('btn-followlines-end');
+ }
+ // on clicked button, change title to "cancel"
+ var clicked = selectable.getElementsByClassName('btn-followlines')[0];
+ clicked.title = 'cancel';
+ clicked.classList.remove('btn-followlines-end');
+ }
+
+ //** add `listener` on "click" event for all `followlinesButtons` */
+ function buttonsAddEventListener(listener) {
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ followlinesButtons[i].addEventListener('click', listener);
+ }
+ }
+
+ //** remove `listener` on "click" event for all `followlinesButtons` */
+ function buttonsRemoveEventListener(listener) {
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ followlinesButtons[i].removeEventListener('click', listener);
+ }
}
//** event handler for "click" on the first line of a block */
function lineSelectStart(e) {
- var startElement = findParentSpan(e.target);
+ var startElement = selectableParent(e.target.parentElement);
if (startElement === null) {
- // not a <span> (maybe <a>): abort, keeping event listener
- // registered for other click with <span> target
+ // not a "selectable" element (maybe <a>): abort, keeping event
+ // listener registered for other click with a "selectable" target
return;
}
- // update tooltip text
- tooltip.textContent = 'click again to terminate line block selection here';
+ // update button tooltip text and CSS
+ updateButtons(startElement);
var startId = parseInt(startElement.id.slice(1));
startElement.classList.add(lineSelectedCSSClass); // CSS
// remove this event listener
- sourcelines.removeEventListener('click', lineSelectStart);
+ buttonsRemoveEventListener(lineSelectStart);
//** event handler for "click" on the last line of the block */
function lineSelectEnd(e) {
- var endElement = findParentSpan(e.target);
+ var endElement = selectableParent(e.target.parentElement);
if (endElement === null) {
// not a <span> (maybe <a>): abort, keeping event listener
// registered for other click with <span> target
@@ -120,27 +175,18 @@
}
// remove this event listener
- sourcelines.removeEventListener('click', lineSelectEnd);
+ buttonsRemoveEventListener(lineSelectEnd);
- // hide tooltip and disable motion tracking
- tooltip.classList.add('hidden');
- sourcelines.removeEventListener('mousemove', moveAndShowTooltip);
- window.clearTimeout(tooltipTimeoutID);
-
- //* restore initial "tooltip" state */
- function restoreTooltip() {
- tooltip.textContent = initTooltipText;
- sourcelines.addEventListener('mousemove', moveAndShowTooltip);
- }
+ // reset button tooltip text
+ resetButtons();
// compute line range (startId, endId)
var endId = parseInt(endElement.id.slice(1));
if (endId == startId) {
// clicked twice the same line, cancel and reset initial state
- // (CSS, event listener for selection start, tooltip)
+ // (CSS, event listener for selection start)
removeSelectedCSSClass();
- sourcelines.addEventListener('click', lineSelectStart);
- restoreTooltip();
+ buttonsAddEventListener(lineSelectStart);
return;
}
var inviteElement = endElement;
@@ -161,31 +207,37 @@
inviteElement.appendChild(div);
// set position close to cursor (top-right)
positionTopRight(div, e);
+ // hide all buttons
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ followlinesButtons[i].classList.add('btn-followlines-hidden');
+ }
//** event handler for cancelling selection */
function cancel() {
// remove invite box
div.parentNode.removeChild(div);
// restore initial event listeners
- sourcelines.addEventListener('click', lineSelectStart);
- sourcelines.removeEventListener('click', cancel);
+ buttonsAddEventListener(lineSelectStart);
+ buttonsRemoveEventListener(cancel);
+ for (var i = 0; i < followlinesButtons.length; i++) {
+ followlinesButtons[i].classList.remove('btn-followlines-hidden');
+ }
// remove styles on selected lines
removeSelectedCSSClass();
- // restore tooltip element
- restoreTooltip();
+ resetButtons();
}
// bind cancel event to click on <button>
button.addEventListener('click', cancel);
// as well as on an click on any source line
- sourcelines.addEventListener('click', cancel);
+ buttonsAddEventListener(cancel);
}
- sourcelines.addEventListener('click', lineSelectEnd);
+ buttonsAddEventListener(lineSelectEnd);
}
- sourcelines.addEventListener('click', lineSelectStart);
+ buttonsAddEventListener(lineSelectStart);
//** return a <div id="followlines"> and inner cancel <button> elements */
function followlinesBox(targetUri, fromline, toline, isHead) {
--- a/mercurial/templates/static/style-gitweb.css Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/static/style-gitweb.css Wed Jul 19 07:51:41 2017 -0500
@@ -4,8 +4,19 @@
div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
div.page_header a:visited { color:#0000cc; }
div.page_header a:hover { color:#880000; }
-div.page_nav { padding:8px; }
+div.page_nav {
+ padding:8px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
div.page_nav a:visited { color:#0000cc; }
+div.extra_nav {
+ padding: 8px;
+}
+div.extra_nav a:visited {
+ color: #0000cc;
+}
div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
div.page_footer_text { float:left; color:#555555; font-style:italic; }
@@ -52,13 +63,30 @@
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
-div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+
+.search {
+ margin-right: 8px;
+}
+
+div#hint {
+ position: absolute;
+ display: none;
+ width: 250px;
+ padding: 5px;
+ background: #ffc;
+ border: 1px solid yellow;
+ border-radius: 5px;
+}
+
+#searchform:hover div#hint { display: block; }
+
tr.thisrev a { color:#999999; text-decoration: none; }
tr.thisrev pre { color:#009900; }
td.annotate {
white-space: nowrap;
}
div.annotate-info {
+ z-index: 5;
display: none;
position: absolute;
background-color: #FFFFFF;
@@ -125,7 +153,7 @@
-ms-user-select: none;
user-select: none;
display: inline-block;
- margin-left: -5em;
+ margin-left: -6em;
width: 4em;
color: #999;
text-align: right;
@@ -147,13 +175,11 @@
.description {
font-family: monospace;
+ white-space: pre;
}
/* Followlines */
-div.page_body pre.sourcelines > span.followlines-select:hover {
- cursor: cell;
-}
-
+tbody.sourcelines > tr.followlines-selected,
pre.sourcelines > span.followlines-selected {
background-color: #99C7E9 !important;
}
@@ -190,21 +216,62 @@
font-family: sans-serif;
}
-div#followlines-tooltip {
+.btn-followlines {
display: none;
- position: fixed;
- background-color: #ffc;
- border: 1px solid #999;
- padding: 2px;
+ cursor: pointer;
+ box-sizing: content-box;
+ font-size: 11px;
+ width: 13px;
+ height: 13px;
+ border-radius: 3px;
+ margin: 0px;
+ margin-top: -2px;
+ padding: 0px;
+ background-color: #E5FDE5;
+ border: 1px solid #9BC19B;
+ font-family: monospace;
+ text-align: center;
+ line-height: 5px;
+}
+
+tr .btn-followlines {
+ position: absolute;
}
-.sourcelines:hover > div#followlines-tooltip {
+span .btn-followlines {
+ float: left;
+}
+
+span.followlines-select .btn-followlines {
+ margin-left: -1.6em;
+}
+
+.btn-followlines:hover {
+ transform: scale(1.1, 1.1);
+}
+
+.btn-followlines .followlines-plus {
+ color: green;
+}
+
+.btn-followlines .followlines-minus {
+ color: red;
+}
+
+.btn-followlines-end {
+ background-color: #ffdcdc;
+}
+
+.sourcelines tr:hover .btn-followlines,
+.sourcelines span.followlines-select:hover > .btn-followlines {
display: inline;
}
-.sourcelines:hover > div#followlines-tooltip.hidden {
+.btn-followlines-hidden,
+.sourcelines tr:hover .btn-followlines-hidden {
display: none;
}
+
/* Graph */
div#wrapper {
position: relative;
--- a/mercurial/templates/static/style-paper.css Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/templates/static/style-paper.css Wed Jul 19 07:51:41 2017 -0500
@@ -214,6 +214,7 @@
white-space: nowrap;
}
div.annotate-info {
+ z-index: 5;
display: none;
position: absolute;
background-color: #FFFFFF;
@@ -267,7 +268,7 @@
-ms-user-select: none;
user-select: none;
display: inline-block;
- margin-left: -5em;
+ margin-left: -6em;
width: 4em;
font-size: smaller;
color: #999;
@@ -280,10 +281,8 @@
background-color: #bfdfff;
}
-div.overflow pre.sourcelines > span.followlines-select:hover {
- cursor: cell;
-}
-
+/* Followlines */
+tbody.sourcelines > tr.followlines-selected,
pre.sourcelines > span.followlines-selected {
background-color: #99C7E9;
}
@@ -320,19 +319,59 @@
font-family: sans-serif;
}
-div#followlines-tooltip {
+.btn-followlines {
display: none;
- position: fixed;
- background-color: #ffc;
- border: 1px solid #999;
- padding: 2px;
+ cursor: pointer;
+ box-sizing: content-box;
+ font-size: 12px;
+ width: 13px;
+ height: 13px;
+ border-radius: 3px;
+ margin: 0px;
+ margin-top: -2px;
+ padding: 0px;
+ background-color: #E5FDE5;
+ border: 1px solid #9BC19B;
+ font-family: monospace;
+ text-align: center;
+ line-height: 5px;
+}
+
+tr .btn-followlines {
+ position: absolute;
}
-.sourcelines:hover > div#followlines-tooltip {
+span .btn-followlines {
+ float: left;
+}
+
+span.followlines-select .btn-followlines {
+ margin-left: -1.5em;
+}
+
+.btn-followlines:hover {
+ transform: scale(1.2, 1.2);
+}
+
+.btn-followlines .followlines-plus {
+ color: green;
+}
+
+.btn-followlines .followlines-minus {
+ color: red;
+}
+
+.btn-followlines-end {
+ background-color: #ffdcdc;
+}
+
+.sourcelines tr:hover .btn-followlines,
+.sourcelines span.followlines-select:hover > .btn-followlines {
display: inline;
}
-.sourcelines:hover > div#followlines-tooltip.hidden {
+.btn-followlines-hidden,
+.sourcelines tr:hover .btn-followlines-hidden {
display: none;
}
--- a/mercurial/transaction.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/transaction.py Wed Jul 19 07:51:41 2017 -0500
@@ -26,10 +26,10 @@
# These are the file generators that should only be executed after the
# finalizers are done, since they rely on the output of the finalizers (like
# the changelog having been written).
-postfinalizegenerators = set([
+postfinalizegenerators = {
'bookmarks',
'dirstate'
-])
+}
gengroupall='all'
gengroupprefinalize='prefinalize'
@@ -44,11 +44,12 @@
return _active
def _playback(journal, report, opener, vfsmap, entries, backupentries,
- unlink=True):
+ unlink=True, checkambigfiles=None):
for f, o, _ignore in entries:
if o or not unlink:
+ checkambig = checkambigfiles and (f, '') in checkambigfiles
try:
- fp = opener(f, 'a', checkambig=True)
+ fp = opener(f, 'a', checkambig=checkambig)
fp.truncate(o)
fp.close()
except IOError:
@@ -71,8 +72,9 @@
if f and b:
filepath = vfs.join(f)
backuppath = vfs.join(b)
+ checkambig = checkambigfiles and (f, l) in checkambigfiles
try:
- util.copyfile(backuppath, filepath, checkambig=True)
+ util.copyfile(backuppath, filepath, checkambig=checkambig)
backupfiles.append(b)
except IOError:
report(_("failed to recover %s\n") % f)
@@ -101,7 +103,8 @@
class transaction(object):
def __init__(self, report, opener, vfsmap, journalname, undoname=None,
- after=None, createmode=None, validator=None, releasefn=None):
+ after=None, createmode=None, validator=None, releasefn=None,
+ checkambigfiles=None):
"""Begin a new transaction
Begins a new transaction that allows rolling back writes in the event of
@@ -110,6 +113,10 @@
* `after`: called after the transaction has been committed
* `createmode`: the mode of the journal file that will be created
* `releasefn`: called after releasing (with transaction and result)
+
+ `checkambigfiles` is a set of (path, vfs-location) tuples,
+ which determine whether file stat ambiguity should be avoided
+ for corresponded files.
"""
self.count = 1
self.usages = 1
@@ -137,6 +144,14 @@
releasefn = lambda tr, success: None
self.releasefn = releasefn
+ self.checkambigfiles = set()
+ if checkambigfiles:
+ self.checkambigfiles.update(checkambigfiles)
+
+ # A dict dedicated to precisely tracking the changes introduced in the
+ # transaction.
+ self.changes = {}
+
# a dict of arguments to be passed to hooks
self.hookargs = {}
self.file = opener.open(self.journal, "w")
@@ -288,6 +303,12 @@
# but for bookmarks that are handled outside this mechanism.
self._filegenerators[genid] = (order, filenames, genfunc, location)
+ @active
+ def removefilegenerator(self, genid):
+ """reverse of addfilegenerator, remove a file generator function"""
+ if genid in self._filegenerators:
+ del self._filegenerators[genid]
+
def _generatefiles(self, suffix='', group=gengroupall):
# write files registered for generation
any = False
@@ -308,10 +329,12 @@
name += suffix
if suffix:
self.registertmp(name, location=location)
+ checkambig = False
else:
self.addbackup(name, location=location)
+ checkambig = (name, location) in self.checkambigfiles
files.append(vfs(name, 'w', atomictemp=True,
- checkambig=not suffix))
+ checkambig=checkambig))
genfunc(*files)
finally:
for f in files:
@@ -402,7 +425,7 @@
@active
def addpostclose(self, category, callback):
- """add a callback to be called after the transaction is closed
+ """add or replace a callback to be called after the transaction closed
The transaction will be given as callback's first argument.
@@ -412,6 +435,11 @@
self._postclosecallback[category] = callback
@active
+ def getpostclose(self, category):
+ """return a postclose callback added before, or None"""
+ return self._postclosecallback.get(category, None)
+
+ @active
def addabort(self, category, callback):
"""add a callback to be called when the transaction is aborted.
@@ -427,6 +455,7 @@
'''commit the transaction'''
if self.count == 1:
self.validator(self) # will raise exception if needed
+ self.validator = None # Help prevent cycles.
self._generatefiles(group=gengroupprefinalize)
categories = sorted(self._finalizecallback)
for cat in categories:
@@ -460,6 +489,7 @@
self._writeundo()
if self.after:
self.after()
+ self.after = None # Help prevent cycles.
if self.opener.isfile(self._backupjournal):
self.opener.unlink(self._backupjournal)
if self.opener.isfile(self.journal):
@@ -483,6 +513,7 @@
self.journal = None
self.releasefn(self, True) # notify success of closing transaction
+ self.releasefn = None # Help prevent cycles.
# run post close action
categories = sorted(self._postclosecallback)
@@ -546,15 +577,17 @@
# Prevent double usage and help clear cycles.
self._abortcallback = None
_playback(self.journal, self.report, self.opener, self._vfsmap,
- self.entries, self._backupentries, False)
+ self.entries, self._backupentries, False,
+ checkambigfiles=self.checkambigfiles)
self.report(_("rollback completed\n"))
except BaseException:
self.report(_("rollback failed - please run hg recover\n"))
finally:
self.journal = None
self.releasefn(self, False) # notify failure of transaction
+ self.releasefn = None # Help prevent cycles.
-def rollback(opener, vfsmap, file, report):
+def rollback(opener, vfsmap, file, report, checkambigfiles=None):
"""Rolls back the transaction contained in the given file
Reads the entries in the specified file, and the corresponding
@@ -565,6 +598,10 @@
file\0offset pairs, delimited by newlines. The corresponding
'*.backupfiles' file should contain a list of file\0backupfile
pairs, delimited by \0.
+
+ `checkambigfiles` is a set of (path, vfs-location) tuples,
+ which determine whether file stat ambiguity should be avoided at
+ restoring corresponded files.
"""
entries = []
backupentries = []
@@ -596,4 +633,5 @@
report(_("journal was created by a different version of "
"Mercurial\n"))
- _playback(file, report, opener, vfsmap, entries, backupentries)
+ _playback(file, report, opener, vfsmap, entries, backupentries,
+ checkambigfiles=checkambigfiles)
--- a/mercurial/ui.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/ui.py Wed Jul 19 07:51:41 2017 -0500
@@ -27,6 +27,7 @@
from . import (
color,
config,
+ configitems,
encoding,
error,
formatter,
@@ -43,6 +44,20 @@
_keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
if not c.isalnum())
+# The config knobs that will be altered (if unset) by ui.tweakdefaults.
+tweakrc = """
+[ui]
+# The rollback command is dangerous. As a rule, don't use it.
+rollback = False
+
+[commands]
+# Make `hg status` emit cwd-relative paths by default.
+status.relative = yes
+
+[diff]
+git = 1
+"""
+
samplehgrcs = {
'user':
"""# example user config (see 'hg help config' for more info)
@@ -140,6 +155,10 @@
def _catchterm(*args):
raise error.SignalInterrupt
+# unique object used to detect no default value has been provided when
+# retrieving configuration value.
+_unset = object()
+
class ui(object):
def __init__(self, src=None):
"""Create a fresh new ui object if no src given
@@ -160,6 +179,7 @@
self._bufferapplylabels = None
self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
self._reportuntrusted = True
+ self._knownconfig = configitems.coreitems
self._ocfg = config.config() # overlay
self._tcfg = config.config() # trusted
self._ucfg = config.config() # untrusted
@@ -182,6 +202,7 @@
self.fin = src.fin
self.pageractive = src.pageractive
self._disablepager = src._disablepager
+ self._tweaked = src._tweaked
self._tcfg = src._tcfg.copy()
self._ucfg = src._ucfg.copy()
@@ -205,6 +226,7 @@
self.fin = util.stdin
self.pageractive = False
self._disablepager = False
+ self._tweaked = False
# shared read-only environment
self.environ = encoding.environ
@@ -241,8 +263,29 @@
u.fixconfig(section=section)
else:
raise error.ProgrammingError('unknown rctype: %s' % t)
+ u._maybetweakdefaults()
return u
+ def _maybetweakdefaults(self):
+ if not self.configbool('ui', 'tweakdefaults'):
+ return
+ if self._tweaked or self.plain('tweakdefaults'):
+ return
+
+ # Note: it is SUPER IMPORTANT that you set self._tweaked to
+ # True *before* any calls to setconfig(), otherwise you'll get
+ # infinite recursion between setconfig and this method.
+ #
+ # TODO: We should extract an inner method in setconfig() to
+ # avoid this weirdness.
+ self._tweaked = True
+ tmpcfg = config.config()
+ tmpcfg.parse('<tweakdefaults>', tweakrc)
+ for section in tmpcfg:
+ for name, value in tmpcfg.items(section):
+ if not self.hasconfig(section, name):
+ self.setconfig(section, name, value, "<tweakdefaults>")
+
def copy(self):
return self.__class__(self)
@@ -263,7 +306,7 @@
(util.timer() - starttime) * 1000
def formatter(self, topic, opts):
- return formatter.formatter(self, topic, opts)
+ return formatter.formatter(self, self, topic, opts)
def _trusted(self, fp, f):
st = util.fstat(fp)
@@ -365,8 +408,8 @@
if self.verbose and self.quiet:
self.quiet = self.verbose = False
self._reportuntrusted = self.debugflag or self.configbool("ui",
- "report_untrusted", True)
- self.tracebackflag = self.configbool('ui', 'traceback', False)
+ "report_untrusted")
+ self.tracebackflag = self.configbool('ui', 'traceback')
self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
if section in (None, 'trusted'):
@@ -387,6 +430,7 @@
for cfg in (self._ocfg, self._tcfg, self._ucfg):
cfg.set(section, name, value, source)
self.fixconfig(section=section)
+ self._maybetweakdefaults()
def _data(self, untrusted):
return untrusted and self._ucfg or self._tcfg
@@ -394,29 +438,58 @@
def configsource(self, section, name, untrusted=False):
return self._data(untrusted).source(section, name)
- def config(self, section, name, default=None, untrusted=False):
- if isinstance(name, list):
- alternates = name
- else:
- alternates = [name]
+ def config(self, section, name, default=_unset, untrusted=False):
+ """return the plain string version of a config"""
+ value = self._config(section, name, default=default,
+ untrusted=untrusted)
+ if value is _unset:
+ return None
+ return value
+
+ def _config(self, section, name, default=_unset, untrusted=False):
+ value = default
+ item = self._knownconfig.get(section, {}).get(name)
+ alternates = [(section, name)]
+
+ if item is not None:
+ alternates.extend(item.alias)
- for n in alternates:
- value = self._data(untrusted).get(section, n, None)
- if value is not None:
+ if default is _unset:
+ if item is None:
+ value = default
+ elif item.default is configitems.dynamicdefault:
+ value = None
+ msg = "config item requires an explicit default value: '%s.%s'"
+ msg %= (section, name)
+ self.develwarn(msg, 2, 'warn-config-default')
+ elif callable(item.default):
+ value = item.default()
+ else:
+ value = item.default
+ elif (item is not None
+ and item.default is not configitems.dynamicdefault):
+ msg = ("specifying a default value for a registered "
+ "config item: '%s.%s' '%s'")
+ msg %= (section, name, default)
+ self.develwarn(msg, 2, 'warn-config-default')
+
+ for s, n in alternates:
+ candidate = self._data(untrusted).get(s, n, None)
+ if candidate is not None:
+ value = candidate
+ section = s
name = n
break
- else:
- value = default
if self.debugflag and not untrusted and self._reportuntrusted:
- for n in alternates:
- uvalue = self._ucfg.get(section, n)
+ for s, n in alternates:
+ uvalue = self._ucfg.get(s, n)
if uvalue is not None and uvalue != value:
self.debug("ignoring untrusted configuration option "
- "%s.%s = %s\n" % (section, n, uvalue))
+ "%s.%s = %s\n" % (s, n, uvalue))
return value
- def configsuboptions(self, section, name, default=None, untrusted=False):
+ def configsuboptions(self, section, name, default=_unset, untrusted=False):
"""Get a config option and all sub-options.
Some config options have sub-options that are declared with the
@@ -426,14 +499,8 @@
Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
is a dict of defined sub-options where keys and values are strings.
"""
+ main = self.config(section, name, default, untrusted=untrusted)
data = self._data(untrusted)
- main = data.get(section, name, default)
- if self.debugflag and not untrusted and self._reportuntrusted:
- uvalue = self._ucfg.get(section, name)
- if uvalue is not None and uvalue != main:
- self.debug('ignoring untrusted configuration option '
- '%s.%s = %s\n' % (section, name, uvalue))
-
sub = {}
prefix = '%s:' % name
for k, v in data.items(section):
@@ -449,7 +516,7 @@
return main, sub
- def configpath(self, section, name, default=None, untrusted=False):
+ def configpath(self, section, name, default=_unset, untrusted=False):
'get a path config item, expanded relative to repo root or config file'
v = self.config(section, name, default, untrusted)
if v is None:
@@ -461,7 +528,7 @@
v = os.path.join(base, os.path.expanduser(v))
return v
- def configbool(self, section, name, default=False, untrusted=False):
+ def configbool(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as a boolean
>>> u = ui(); s = 'foo'
@@ -482,8 +549,12 @@
ConfigError: foo.invalid is not a boolean ('somevalue')
"""
- v = self.config(section, name, None, untrusted)
+ v = self._config(section, name, default, untrusted=untrusted)
if v is None:
+ return v
+ if v is _unset:
+ if default is _unset:
+ return False
return default
if isinstance(v, bool):
return v
@@ -493,7 +564,7 @@
% (section, name, v))
return b
- def configwith(self, convert, section, name, default=None,
+ def configwith(self, convert, section, name, default=_unset,
desc=None, untrusted=False):
"""parse a configuration element with a conversion function
@@ -505,7 +576,7 @@
>>> u.configwith(float, s, 'float2')
-4.25
>>> u.configwith(float, s, 'unknown', 7)
- 7
+ 7.0
>>> u.setconfig(s, 'invalid', 'somevalue')
>>> u.configwith(float, s, 'invalid')
Traceback (most recent call last):
@@ -517,18 +588,18 @@
ConfigError: foo.invalid is not a valid womble ('somevalue')
"""
- v = self.config(section, name, None, untrusted)
+ v = self.config(section, name, default, untrusted)
if v is None:
- return default
+ return v # do not attempt to convert None
try:
return convert(v)
- except ValueError:
+ except (ValueError, error.ParseError):
if desc is None:
desc = convert.__name__
raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
% (section, name, desc, v))
- def configint(self, section, name, default=None, untrusted=False):
+ def configint(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as an integer
>>> u = ui(); s = 'foo'
@@ -550,7 +621,7 @@
return self.configwith(int, section, name, default, 'integer',
untrusted)
- def configbytes(self, section, name, default=0, untrusted=False):
+ def configbytes(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as a quantity in bytes
Units can be specified as b (bytes), k or kb (kilobytes), m or
@@ -572,18 +643,20 @@
ConfigError: foo.invalid is not a byte quantity ('somevalue')
"""
- value = self.config(section, name, None, untrusted)
- if value is None:
- if not isinstance(default, str):
- return default
+ value = self._config(section, name, default, untrusted)
+ if value is _unset:
+ if default is _unset:
+ default = 0
value = default
+ if not isinstance(value, str):
+ return value
try:
return util.sizetoint(value)
except error.ParseError:
raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
% (section, name, value))
- def configlist(self, section, name, default=None, untrusted=False):
+ def configlist(self, section, name, default=_unset, untrusted=False):
"""parse a configuration element as a list of comma/space separated
strings
@@ -593,10 +666,28 @@
['this', 'is', 'a small', 'test']
"""
# default is not always a list
- if isinstance(default, bytes):
- default = config.parselist(default)
- return self.configwith(config.parselist, section, name, default or [],
+ v = self.configwith(config.parselist, section, name, default,
'list', untrusted)
+ if isinstance(v, bytes):
+ return config.parselist(v)
+ elif v is None:
+ return []
+ return v
+
+ def configdate(self, section, name, default=_unset, untrusted=False):
+ """parse a configuration element as a tuple of ints
+
+ >>> u = ui(); s = 'foo'
+ >>> u.setconfig(s, 'date', '0 0')
+ >>> u.configdate(s, 'date')
+ (0, 0)
+ """
+ if self.config(section, name, default, untrusted):
+ return self.configwith(util.parsedate, section, name, default,
+ 'date', untrusted)
+ if default is _unset:
+ return None
+ return default
def hasconfig(self, section, name, untrusted=False):
return self._data(untrusted).hasitem(section, name)
@@ -660,7 +751,7 @@
"""
user = encoding.environ.get("HGUSER")
if user is None:
- user = self.config("ui", ["username", "user"])
+ user = self.config("ui", "username")
if user is not None:
user = os.path.expandvars(user)
if user is None:
@@ -833,7 +924,7 @@
(util.timer() - starttime) * 1000
def _isatty(self, fh):
- if self.configbool('ui', 'nontty', False):
+ if self.configbool('ui', 'nontty'):
return False
return util.isatty(fh)
@@ -856,7 +947,7 @@
if (self._disablepager
or self.pageractive
or command in self.configlist('pager', 'ignore')
- or not self.configbool('ui', 'paginate', True)
+ or not self.configbool('ui', 'paginate')
or not self.configbool('pager', 'attend-' + command, True)
# TODO: if we want to allow HGPLAINEXCEPT=pager,
# formatted() will need some adjustment.
@@ -1021,7 +1112,7 @@
# Default interface for all the features
defaultinterface = "text"
- i = self.config("ui", "interface", None)
+ i = self.config("ui", "interface")
if i in alldefaults:
defaultinterface = i
@@ -1057,7 +1148,7 @@
This function refers to input only; for output, see `ui.formatted()'.
'''
- i = self.configbool("ui", "interactive", None)
+ i = self.configbool("ui", "interactive")
if i is None:
# some environments replace stdin without implementing isatty
# usually those are non-interactive
@@ -1095,7 +1186,7 @@
if self.plain():
return False
- i = self.configbool("ui", "formatted", None)
+ i = self.configbool("ui", "formatted")
if i is None:
# some environments replace stdout without implementing isatty
# usually those are non-interactive
@@ -1175,7 +1266,7 @@
# prompt to start parsing. Sadly, we also can't rely on
# choices containing spaces, ASCII, or basically anything
# except an ampersand followed by a character.
- m = re.match(r'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
+ m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
msg = m.group(1)
choices = [p.strip(' ') for p in m.group(2).split('$$')]
return (msg,
@@ -1373,7 +1464,7 @@
def _progbar(self):
"""setup the progbar singleton to the ui object"""
if (self.quiet or self.debugflag
- or self.configbool('progress', 'disable', False)
+ or self.configbool('progress', 'disable')
or not progress.shouldprint(self)):
return None
return getprogbar(self)
--- a/mercurial/unionrepo.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/unionrepo.py Wed Jul 19 07:51:41 2017 -0500
@@ -232,7 +232,7 @@
def instance(ui, path, create):
if create:
raise error.Abort(_('cannot create new union repository'))
- parentpath = ui.config("bundle", "mainreporoot", "")
+ parentpath = ui.config("bundle", "mainreporoot")
if not parentpath:
# try to find the correct path to the working directory repo
parentpath = cmdutil.findrepo(pycompat.getcwd())
--- a/mercurial/upgrade.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/upgrade.py Wed Jul 19 07:51:41 2017 -0500
@@ -28,12 +28,12 @@
An upgrade will not be allowed if the repository doesn't have the
requirements returned by this function.
"""
- return set([
+ return {
# Introduced in Mercurial 0.9.2.
'revlogv1',
# Introduced in Mercurial 0.9.2.
'store',
- ])
+ }
def blocksourcerequirements(repo):
"""Obtain requirements that will prevent an upgrade from occurring.
@@ -41,7 +41,7 @@
An upgrade cannot be performed if the source repository contains a
requirements in the returned set.
"""
- return set([
+ return {
# The upgrade code does not yet support these experimental features.
# This is an artificial limitation.
'manifestv2',
@@ -51,7 +51,7 @@
'parentdelta',
# Upgrade should operate on the actual store, not the shared link.
'shared',
- ])
+ }
def supportremovedrequirements(repo):
"""Obtain requirements that can be removed during an upgrade.
@@ -70,13 +70,13 @@
Extensions should monkeypatch this to add their custom requirements.
"""
- return set([
+ return {
'dotencode',
'fncache',
'generaldelta',
'revlogv1',
'store',
- ])
+ }
def allowednewrequirements(repo):
"""Obtain requirements that can be added to a repository during upgrade.
@@ -88,11 +88,11 @@
bad additions because the whitelist approach is safer and will prevent
future, unknown requirements from accidentally being added.
"""
- return set([
+ return {
'dotencode',
'fncache',
'generaldelta',
- ])
+ }
deficiency = 'deficiency'
optimisation = 'optimization'
@@ -628,7 +628,7 @@
ui.write(_('marking source repository as being upgraded; clients will be '
'unable to read from repository\n'))
scmutil.writerequires(srcrepo.vfs,
- srcrepo.requirements | set(['upgradeinprogress']))
+ srcrepo.requirements | {'upgradeinprogress'})
ui.write(_('starting in-place swap of repository data\n'))
ui.write(_('replaced files will be backed up at %s\n') %
@@ -792,35 +792,33 @@
upgradeactions = [a.name for a in actions]
ui.write(_('beginning upgrade...\n'))
- with repo.wlock():
- with repo.lock():
- ui.write(_('repository locked and read-only\n'))
- # Our strategy for upgrading the repository is to create a new,
- # temporary repository, write data to it, then do a swap of the
- # data. There are less heavyweight ways to do this, but it is easier
- # to create a new repo object than to instantiate all the components
- # (like the store) separately.
- tmppath = tempfile.mkdtemp(prefix='upgrade.', dir=repo.path)
- backuppath = None
- try:
- ui.write(_('creating temporary repository to stage migrated '
- 'data: %s\n') % tmppath)
- dstrepo = localrepo.localrepository(repo.baseui,
- path=tmppath,
- create=True)
+ with repo.wlock(), repo.lock():
+ ui.write(_('repository locked and read-only\n'))
+ # Our strategy for upgrading the repository is to create a new,
+ # temporary repository, write data to it, then do a swap of the
+ # data. There are less heavyweight ways to do this, but it is easier
+ # to create a new repo object than to instantiate all the components
+ # (like the store) separately.
+ tmppath = tempfile.mkdtemp(prefix='upgrade.', dir=repo.path)
+ backuppath = None
+ try:
+ ui.write(_('creating temporary repository to stage migrated '
+ 'data: %s\n') % tmppath)
+ dstrepo = localrepo.localrepository(repo.baseui,
+ path=tmppath,
+ create=True)
- with dstrepo.wlock():
- with dstrepo.lock():
- backuppath = _upgraderepo(ui, repo, dstrepo, newreqs,
- upgradeactions)
+ with dstrepo.wlock(), dstrepo.lock():
+ backuppath = _upgraderepo(ui, repo, dstrepo, newreqs,
+ upgradeactions)
- finally:
- ui.write(_('removing temporary repository %s\n') % tmppath)
- repo.vfs.rmtree(tmppath, forcibly=True)
+ finally:
+ ui.write(_('removing temporary repository %s\n') % tmppath)
+ repo.vfs.rmtree(tmppath, forcibly=True)
- if backuppath:
- ui.warn(_('copy of old repository backed up at %s\n') %
- backuppath)
- ui.warn(_('the old repository will not be deleted; remove '
- 'it to free up disk space once the upgraded '
- 'repository is verified\n'))
+ if backuppath:
+ ui.warn(_('copy of old repository backed up at %s\n') %
+ backuppath)
+ ui.warn(_('the old repository will not be deleted; remove '
+ 'it to free up disk space once the upgraded '
+ 'repository is verified\n'))
--- a/mercurial/url.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/url.py Wed Jul 19 07:51:41 2017 -0500
@@ -454,7 +454,7 @@
authinfo will be added to the password manager
'''
# experimental config: ui.usehttp2
- if ui.configbool('ui', 'usehttp2', False):
+ if ui.configbool('ui', 'usehttp2'):
handlers = [
httpconnectionmod.http2handler(
ui,
--- a/mercurial/util.h Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-/*
- util.h - utility functions for interfacing with the various python APIs.
-
- This software may be used and distributed according to the terms of
- the GNU General Public License, incorporated herein by reference.
-*/
-
-#ifndef _HG_UTIL_H_
-#define _HG_UTIL_H_
-
-#include "compat.h"
-
-#if PY_MAJOR_VERSION >= 3
-#define IS_PY3K
-#endif
-
-typedef struct {
- PyObject_HEAD
- char state;
- int mode;
- int size;
- int mtime;
-} dirstateTupleObject;
-
-extern PyTypeObject dirstateTupleType;
-#define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
-
-/* This should be kept in sync with normcasespecs in encoding.py. */
-enum normcase_spec {
- NORMCASE_LOWER = -1,
- NORMCASE_UPPER = 1,
- NORMCASE_OTHER = 0
-};
-
-#define MIN(a, b) (((a)<(b))?(a):(b))
-/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
-#if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
-#define true 1
-#define false 0
-typedef unsigned char bool;
-#else
-#include <stdbool.h>
-#endif
-
-#endif /* _HG_UTIL_H_ */
--- a/mercurial/util.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/util.py Wed Jul 19 07:51:41 2017 -0500
@@ -19,6 +19,7 @@
import calendar
import codecs
import collections
+import contextlib
import datetime
import errno
import gc
@@ -45,11 +46,17 @@
encoding,
error,
i18n,
- osutil,
- parsers,
+ policy,
pycompat,
)
+base85 = policy.importmod(r'base85')
+osutil = policy.importmod(r'osutil')
+parsers = policy.importmod(r'parsers')
+
+b85decode = base85.b85decode
+b85encode = base85.b85encode
+
cookielib = pycompat.cookielib
empty = pycompat.empty
httplib = pycompat.httplib
@@ -105,6 +112,7 @@
hidewindow = platform.hidewindow
isexec = platform.isexec
isowner = platform.isowner
+listdir = osutil.listdir
localpath = platform.localpath
lookupreg = platform.lookupreg
makedir = platform.makedir
@@ -142,6 +150,15 @@
unlink = platform.unlink
username = platform.username
+try:
+ recvfds = osutil.recvfds
+except AttributeError:
+ pass
+try:
+ setprocname = osutil.setprocname
+except AttributeError:
+ pass
+
# Python compatibility
_notset = object()
@@ -278,16 +295,10 @@
try:
buffer = buffer
except NameError:
- if not pycompat.ispy3:
- def buffer(sliceable, offset=0, length=None):
- if length is not None:
- return sliceable[offset:offset + length]
- return sliceable[offset:]
- else:
- def buffer(sliceable, offset=0, length=None):
- if length is not None:
- return memoryview(sliceable)[offset:offset + length]
- return memoryview(sliceable)[offset:]
+ def buffer(sliceable, offset=0, length=None):
+ if length is not None:
+ return memoryview(sliceable)[offset:offset + length]
+ return memoryview(sliceable)[offset:]
closefds = pycompat.osname == 'posix'
@@ -556,54 +567,40 @@
return f
-class sortdict(dict):
- '''a simple sorted dictionary'''
- def __init__(self, data=None):
- self._list = []
- if data:
- self.update(data)
- def copy(self):
- return sortdict(self)
- def __setitem__(self, key, val):
+class sortdict(collections.OrderedDict):
+ '''a simple sorted dictionary
+
+ >>> d1 = sortdict([('a', 0), ('b', 1)])
+ >>> d2 = d1.copy()
+ >>> d2
+ sortdict([('a', 0), ('b', 1)])
+ >>> d2.update([('a', 2)])
+ >>> d2.keys() # should still be in last-set order
+ ['b', 'a']
+ '''
+
+ def __setitem__(self, key, value):
if key in self:
- self._list.remove(key)
- self._list.append(key)
- dict.__setitem__(self, key, val)
- def __iter__(self):
- return self._list.__iter__()
- def update(self, src):
- if isinstance(src, dict):
- src = src.iteritems()
- for k, v in src:
- self[k] = v
- def clear(self):
- dict.clear(self)
- self._list = []
- def items(self):
- return [(k, self[k]) for k in self._list]
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self._list.remove(key)
- def pop(self, key, *args, **kwargs):
- try:
- self._list.remove(key)
- except ValueError:
- pass
- return dict.pop(self, key, *args, **kwargs)
- def keys(self):
- return self._list[:]
- def iterkeys(self):
- return self._list.__iter__()
- def iteritems(self):
- for k in self._list:
- yield k, self[k]
- def insert(self, index, key, val):
- self._list.insert(index, key)
- dict.__setitem__(self, key, val)
- def __repr__(self):
- if not self:
- return '%s()' % self.__class__.__name__
- return '%s(%r)' % (self.__class__.__name__, self.items())
+ del self[key]
+ super(sortdict, self).__setitem__(key, value)
+
+@contextlib.contextmanager
+def acceptintervention(tr=None):
+ """A context manager that closes the transaction on InterventionRequired
+
+ If no transaction was provided, this simply runs the body and returns
+ """
+ if not tr:
+ yield
+ return
+ try:
+ yield
+ tr.close()
+ except error.InterventionRequired:
+ tr.close()
+ raise
+ finally:
+ tr.release()
class _lrucachenode(object):
"""A node in a doubly linked list.
@@ -1049,28 +1046,20 @@
except Exception:
pass
cmd = quotecommand(cmd)
- if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
- and sys.version_info[1] < 7):
- # subprocess kludge to work around issues in half-baked Python
- # ports, notably bichued/python:
- if not cwd is None:
- os.chdir(cwd)
- rc = os.system(cmd)
+ env = shellenviron(environ)
+ if out is None or _isstdout(out):
+ rc = subprocess.call(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd)
else:
- env = shellenviron(environ)
- if out is None or _isstdout(out):
- rc = subprocess.call(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd)
- else:
- proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- for line in iter(proc.stdout.readline, ''):
- out.write(line)
- proc.wait()
- rc = proc.returncode
- if pycompat.sysplatform == 'OpenVMS' and rc & 1:
- rc = 0
+ proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ for line in iter(proc.stdout.readline, ''):
+ out.write(line)
+ proc.wait()
+ rc = proc.returncode
+ if pycompat.sysplatform == 'OpenVMS' and rc & 1:
+ rc = 0
return rc
def checksignature(func):
@@ -1086,7 +1075,7 @@
return check
# a whilelist of known filesystems where hardlink works reliably
-_hardlinkfswhitelist = set([
+_hardlinkfswhitelist = {
'btrfs',
'ext2',
'ext3',
@@ -1098,7 +1087,7 @@
'ufs',
'xfs',
'zfs',
-])
+}
def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
'''copy a file, preserving mode and optionally other stat info like
@@ -1114,7 +1103,7 @@
oldstat = None
if os.path.lexists(dest):
if checkambig:
- oldstat = checkambig and filestat(dest)
+ oldstat = checkambig and filestat.frompath(dest)
unlink(dest)
if hardlink:
# Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
@@ -1144,7 +1133,7 @@
else:
shutil.copymode(src, dest)
if oldstat and oldstat.stat:
- newstat = filestat(dest)
+ newstat = filestat.frompath(dest)
if newstat.isambig(oldstat):
# stat of copied file is ambiguous to original one
advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
@@ -1164,7 +1153,7 @@
os.stat(os.path.dirname(dst)).st_dev)
topic = gettopic()
os.mkdir(dst)
- for name, kind in osutil.listdir(src):
+ for name, kind in listdir(src):
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
def nprog(t, pos):
@@ -1192,7 +1181,7 @@
return hardlink, num
-_winreservednames = '''con prn aux nul
+_winreservednames = b'''con prn aux nul
com1 com2 com3 com4 com5 com6 com7 com8 com9
lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
_winreservedchars = ':*?"<>|'
@@ -1522,13 +1511,23 @@
exists. Otherwise, it is None. This can avoid preparative
'exists()' examination on client side of this class.
"""
- def __init__(self, path):
+ def __init__(self, stat):
+ self.stat = stat
+
+ @classmethod
+ def frompath(cls, path):
try:
- self.stat = os.stat(path)
+ stat = os.stat(path)
except OSError as err:
if err.errno != errno.ENOENT:
raise
- self.stat = None
+ stat = None
+ return cls(stat)
+
+ @classmethod
+ def fromfp(cls, fp):
+ stat = os.fstat(fp.fileno())
+ return cls(stat)
__hash__ = object.__hash__
@@ -1541,6 +1540,10 @@
self.stat.st_ctime == old.stat.st_ctime and
self.stat.st_mtime == old.stat.st_mtime)
except AttributeError:
+ pass
+ try:
+ return self.stat is None and old.stat is None
+ except AttributeError:
return False
def isambig(self, old):
@@ -1584,7 +1587,10 @@
'old' should be previous filestat of 'path'.
This skips avoiding ambiguity, if a process doesn't have
- appropriate privileges for 'path'.
+ appropriate privileges for 'path'. This returns False in this
+ case.
+
+ Otherwise, this returns True, as "ambiguity is avoided".
"""
advanced = (old.stat.st_mtime + 1) & 0x7fffffff
try:
@@ -1593,8 +1599,9 @@
if inst.errno == errno.EPERM:
# utime() on the file created by another user causes EPERM,
# if a process doesn't have appropriate privileges
- return
+ return False
raise
+ return True
def __ne__(self, other):
return not self == other
@@ -1630,10 +1637,10 @@
if not self._fp.closed:
self._fp.close()
filename = localpath(self.__name)
- oldstat = self._checkambig and filestat(filename)
+ oldstat = self._checkambig and filestat.frompath(filename)
if oldstat and oldstat.stat:
rename(self._tempname, filename)
- newstat = filestat(filename)
+ newstat = filestat.frompath(filename)
if newstat.isambig(oldstat):
# stat of changed file is ambiguous to original one
advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
@@ -1727,8 +1734,7 @@
iterator over chunks of arbitrary size."""
def __init__(self, in_iter):
- """in_iter is the iterator that's iterating over the input chunks.
- targetsize is how big a buffer to try to maintain."""
+ """in_iter is the iterator that's iterating over the input chunks."""
def splitbig(chunks):
for chunk in chunks:
if len(chunk) > 2**20:
@@ -1917,6 +1923,7 @@
# add missing elements from defaults
usenow = False # default to using biased defaults
for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
+ part = pycompat.bytestr(part)
found = [True for p in part if ("%"+p) in format]
if not found:
date += "@" + defaults[part][usenow]
@@ -1926,7 +1933,8 @@
# elements are relative to today
usenow = True
- timetuple = time.strptime(date, format)
+ timetuple = time.strptime(encoding.strfromlocal(date),
+ encoding.strfromlocal(format))
localunixtime = int(calendar.timegm(timetuple))
if offset is None:
# local timezone
@@ -1984,13 +1992,13 @@
# this piece is for rounding the specific end of unknowns
b = bias.get(part)
if b is None:
- if part[0] in "HMS":
+ if part[0:1] in "HMS":
b = "00"
else:
b = "0"
# this piece is for matching the generic end to today's date
- n = datestr(now, "%" + part[0])
+ n = datestr(now, "%" + part[0:1])
defaults[part] = (b, n)
@@ -2002,15 +2010,15 @@
else:
break
else:
- raise Abort(_('invalid date: %r') % date)
+ raise error.ParseError(_('invalid date: %r') % date)
# validate explicit (probably user-specified) date and
# time zone offset. values must fit in signed 32 bits for
# current 32-bit linux runtimes. timezones go from UTC-12
# to UTC+14
if when < -0x80000000 or when > 0x7fffffff:
- raise Abort(_('date exceeds 32 bits: %d') % when)
+ raise error.ParseError(_('date exceeds 32 bits: %d') % when)
if offset < -50400 or offset > 43200:
- raise Abort(_('impossible time zone offset: %d') % offset)
+ raise error.ParseError(_('impossible time zone offset: %d') % offset)
return when, offset
def matchdate(date):
@@ -2328,7 +2336,7 @@
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (i.e. no lines started yet).
- if self.drop_whitespace and chunks[-1].strip() == '' and lines:
+ if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
del chunks[-1]
while chunks:
@@ -2350,13 +2358,13 @@
# If the last chunk on this line is all whitespace, drop it.
if (self.drop_whitespace and
- cur_line and cur_line[-1].strip() == ''):
+ cur_line and cur_line[-1].strip() == r''):
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
- lines.append(indent + ''.join(cur_line))
+ lines.append(indent + r''.join(cur_line))
return lines
@@ -2745,7 +2753,7 @@
attrs.append('%s: %r' % (a, v))
return '<url %s>' % ', '.join(attrs)
- def __str__(self):
+ def __bytes__(self):
r"""Join the URL's components back into a URL string.
Examples:
@@ -2779,9 +2787,6 @@
>>> print url(r'file:///D:\data\hg')
file:///D:\data\hg
"""
- return encoding.strfromlocal(self.__bytes__())
-
- def __bytes__(self):
if self._localpath:
s = self.path
if self.scheme == 'bundle':
@@ -2825,6 +2830,8 @@
s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
return s
+ __str__ = encoding.strmethod(__bytes__)
+
def authinfo(self):
user, passwd = self.user, self.passwd
try:
@@ -2846,7 +2853,7 @@
return True # remote URL
if hasdriveletter(self.path):
return True # absolute for our purposes - can't be joined()
- if self.path.startswith(r'\\'):
+ if self.path.startswith(br'\\'):
return True # Windows UNC path
if self.path.startswith('/'):
return True # POSIX-style
@@ -3058,66 +3065,6 @@
yield path[:pos]
pos = path.rfind('/', 0, pos)
-class ctxmanager(object):
- '''A context manager for use in 'with' blocks to allow multiple
- contexts to be entered at once. This is both safer and more
- flexible than contextlib.nested.
-
- Once Mercurial supports Python 2.7+, this will become mostly
- unnecessary.
- '''
-
- def __init__(self, *args):
- '''Accepts a list of no-argument functions that return context
- managers. These will be invoked at __call__ time.'''
- self._pending = args
- self._atexit = []
-
- def __enter__(self):
- return self
-
- def enter(self):
- '''Create and enter context managers in the order in which they were
- passed to the constructor.'''
- values = []
- for func in self._pending:
- obj = func()
- values.append(obj.__enter__())
- self._atexit.append(obj.__exit__)
- del self._pending
- return values
-
- def atexit(self, func, *args, **kwargs):
- '''Add a function to call when this context manager exits. The
- ordering of multiple atexit calls is unspecified, save that
- they will happen before any __exit__ functions.'''
- def wrapper(exc_type, exc_val, exc_tb):
- func(*args, **kwargs)
- self._atexit.append(wrapper)
- return func
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- '''Context managers are exited in the reverse order from which
- they were created.'''
- received = exc_type is not None
- suppressed = False
- pending = None
- self._atexit.reverse()
- for exitfunc in self._atexit:
- try:
- if exitfunc(exc_type, exc_val, exc_tb):
- suppressed = True
- exc_type = None
- exc_val = None
- exc_tb = None
- except BaseException:
- pending = sys.exc_info()
- exc_type, exc_val, exc_tb = pending = sys.exc_info()
- del self._atexit
- if pending:
- raise exc_val
- return received and suppressed
-
# compression code
SERVERROLE = 'server'
--- a/mercurial/verify.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/verify.py Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,8 @@
self.lrugetctx = util.lrucachefunc(repo.changectx)
self.refersmf = False
self.fncachewarned = False
+ # developer config: verify.skipflags
+ self.skipflags = repo.ui.configint('verify', 'skipflags')
def warn(self, msg):
self.ui.warn(msg + "\n")
@@ -427,16 +429,21 @@
# 2. hash check: depending on flag processor, we may need to
# use either "text" (external), or "rawtext" (in revlog).
try:
- l = len(fl.read(n))
- rp = fl.renamed(n)
- if l != fl.size(i):
- # the "L1 == L2" check
- if len(fl.revision(n, raw=True)) != fl.rawsize(i):
- self.err(lr, _("unpacked size is %s, %s expected") %
- (l, fl.size(i)), f)
+ skipflags = self.skipflags
+ if skipflags:
+ skipflags &= fl.flags(i)
+ if not skipflags:
+ fl.read(n) # side effect: read content and do checkhash
+ rp = fl.renamed(n)
+ # the "L1 == L2" check
+ l1 = fl.rawsize(i)
+ l2 = len(fl.revision(n, raw=True))
+ if l1 != l2:
+ self.err(lr, _("unpacked size is %s, %s expected") %
+ (l2, l1), f)
except error.CensoredNodeError:
# experimental config: censor.policy
- if ui.config("censor", "policy", "abort") == "abort":
+ if ui.config("censor", "policy") == "abort":
self.err(lr, _("censored file data"), f)
except Exception as inst:
self.exc(lr, _("unpacking %s") % short(n), inst, f)
--- a/mercurial/vfs.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/vfs.py Wed Jul 19 07:51:41 2017 -0500
@@ -17,12 +17,28 @@
from .i18n import _
from . import (
error,
- osutil,
pathutil,
pycompat,
util,
)
+def _avoidambig(path, oldstat):
+ """Avoid file stat ambiguity forcibly
+
+ This function causes copying ``path`` file, if it is owned by
+ another (see issue5418 and issue5584 for detail).
+ """
+ def checkandavoid():
+ newstat = util.filestat.frompath(path)
+ # return whether file stat ambiguity is (already) avoided
+ return (not newstat.isambig(oldstat) or
+ newstat.avoidambig(path, oldstat))
+ if not checkandavoid():
+ # simply copy to change owner of path to get privilege to
+ # advance mtime (see issue5418)
+ util.rename(util.mktempcopy(path), path)
+ checkandavoid()
+
class abstractvfs(object):
"""Abstract base class; cannot be instantiated"""
@@ -163,7 +179,7 @@
return fd, fname
def readdir(self, path=None, stat=None, skip=None):
- return osutil.listdir(self.join(path), stat, skip)
+ return util.listdir(self.join(path), stat, skip)
def readlock(self, path):
return util.readlock(self.join(path))
@@ -174,17 +190,20 @@
checkambig argument is used with util.filestat, and is useful
only if destination file is guarded by any lock
(e.g. repo.lock or repo.wlock).
+
+ To avoid file stat ambiguity forcibly, checkambig=True involves
+ copying ``src`` file, if it is owned by another. Therefore, use
+ checkambig=True only in limited cases (see also issue5418 and
+ issue5584 for detail).
"""
+ srcpath = self.join(src)
dstpath = self.join(dst)
- oldstat = checkambig and util.filestat(dstpath)
+ oldstat = checkambig and util.filestat.frompath(dstpath)
if oldstat and oldstat.stat:
- ret = util.rename(self.join(src), dstpath)
- newstat = util.filestat(dstpath)
- if newstat.isambig(oldstat):
- # stat of renamed file is ambiguous to original one
- newstat.avoidambig(dstpath, oldstat)
+ ret = util.rename(srcpath, dstpath)
+ _avoidambig(dstpath, oldstat)
return ret
- return util.rename(self.join(src), dstpath)
+ return util.rename(srcpath, dstpath)
def readlink(self, path):
return os.readlink(self.join(path))
@@ -283,22 +302,14 @@
if realpath:
base = os.path.realpath(base)
self.base = base
- self.mustaudit = audit
+ self._audit = audit
+ if audit:
+ self.audit = pathutil.pathauditor(self.base)
+ else:
+ self.audit = (lambda path, mode=None: True)
self.createmode = None
self._trustnlink = None
- @property
- def mustaudit(self):
- return self._audit
-
- @mustaudit.setter
- def mustaudit(self, onoff):
- self._audit = onoff
- if onoff:
- self.audit = pathutil.pathauditor(self.base)
- else:
- self.audit = util.always
-
@util.propertycache
def _cansymlink(self):
return util.checklink(self.base)
@@ -313,7 +324,8 @@
os.chmod(name, self.createmode & 0o666)
def __call__(self, path, mode="r", text=False, atomictemp=False,
- notindexed=False, backgroundclose=False, checkambig=False):
+ notindexed=False, backgroundclose=False, checkambig=False,
+ auditpath=True):
'''Open ``path`` file, which is relative to vfs root.
Newly created directories are marked as "not to be indexed by
@@ -336,12 +348,19 @@
``checkambig`` argument is passed to atomictemplfile (valid
only for writing), and is useful only if target file is
guarded by any lock (e.g. repo.lock or repo.wlock).
+
+ To avoid file stat ambiguity forcibly, checkambig=True involves
+ copying ``path`` file opened in "append" mode (e.g. for
+ truncation), if it is owned by another. Therefore, use
+ combination of append mode and checkambig=True only in limited
+ cases (see also issue5418 and issue5584 for detail).
'''
- if self._audit:
- r = util.checkosfilename(path)
- if r:
- raise error.Abort("%s: %r" % (r, path))
- self.audit(path)
+ if auditpath:
+ if self._audit:
+ r = util.checkosfilename(path)
+ if r:
+ raise error.Abort("%s: %r" % (r, path))
+ self.audit(path, mode=mode)
f = self.join(path)
if not text and "b" not in mode:
@@ -422,19 +441,11 @@
opener = vfs
-class auditvfs(object):
+class proxyvfs(object):
def __init__(self, vfs):
self.vfs = vfs
@property
- def mustaudit(self):
- return self.vfs.mustaudit
-
- @mustaudit.setter
- def mustaudit(self, onoff):
- self.vfs.mustaudit = onoff
-
- @property
def options(self):
return self.vfs.options
@@ -442,11 +453,11 @@
def options(self, value):
self.vfs.options = value
-class filtervfs(abstractvfs, auditvfs):
+class filtervfs(abstractvfs, proxyvfs):
'''Wrapper vfs for filtering filenames with a function.'''
def __init__(self, vfs, filter):
- auditvfs.__init__(self, vfs)
+ proxyvfs.__init__(self, vfs)
self._filter = filter
def __call__(self, path, *args, **kwargs):
@@ -460,11 +471,11 @@
filteropener = filtervfs
-class readonlyvfs(abstractvfs, auditvfs):
+class readonlyvfs(abstractvfs, proxyvfs):
'''Wrapper vfs preventing any writing.'''
def __init__(self, vfs):
- auditvfs.__init__(self, vfs)
+ proxyvfs.__init__(self, vfs)
def __call__(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rb'):
@@ -534,17 +545,14 @@
# There is overhead to starting and stopping the background threads.
# Don't do background processing unless the file count is large enough
# to justify it.
- minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
- 2048)
+ minfilecount = ui.configint('worker', 'backgroundcloseminfilecount')
# FUTURE dynamically start background threads after minfilecount closes.
# (We don't currently have any callers that don't know their file count)
if expectedcount > 0 and expectedcount < minfilecount:
return
- # Windows defaults to a limit of 512 open files. A buffer of 128
- # should give us enough headway.
- maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
- threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
+ maxqueue = ui.configint('worker', 'backgroundclosemaxqueue')
+ threadcount = ui.configint('worker', 'backgroundclosethreadcount')
ui.debug('starting %d threads for background file closing\n' %
threadcount)
@@ -618,15 +626,12 @@
"""
def __init__(self, fh):
super(checkambigatclosing, self).__init__(fh)
- object.__setattr__(self, r'_oldstat', util.filestat(fh.name))
+ object.__setattr__(self, r'_oldstat', util.filestat.frompath(fh.name))
def _checkambig(self):
oldstat = self._oldstat
if oldstat.stat:
- newstat = util.filestat(self._origfh.name)
- if newstat.isambig(oldstat):
- # stat of changed file is ambiguous to original one
- newstat.avoidambig(self._origfh.name, oldstat)
+ _avoidambig(self._origfh.name, oldstat)
def __exit__(self, exc_type, exc_value, exc_tb):
self._origfh.__exit__(exc_type, exc_value, exc_tb)
--- a/mercurial/win32.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/win32.py Wed Jul 19 07:51:41 2017 -0500
@@ -22,6 +22,7 @@
_kernel32 = ctypes.windll.kernel32
_advapi32 = ctypes.windll.advapi32
_user32 = ctypes.windll.user32
+_crypt32 = ctypes.windll.crypt32
_BOOL = ctypes.c_long
_WORD = ctypes.c_ushort
@@ -31,6 +32,7 @@
_LPCSTR = _LPSTR = ctypes.c_char_p
_HANDLE = ctypes.c_void_p
_HWND = _HANDLE
+_PCCERT_CONTEXT = ctypes.c_void_p
_INVALID_HANDLE_VALUE = _HANDLE(-1).value
@@ -131,31 +133,77 @@
('srWindow', _SMALL_RECT),
('dwMaximumWindowSize', _COORD)]
+_STD_OUTPUT_HANDLE = _DWORD(-11).value
_STD_ERROR_HANDLE = _DWORD(-12).value
-# CreateToolhelp32Snapshot, Process32First, Process32Next
-_TH32CS_SNAPPROCESS = 0x00000002
-_MAX_PATH = 260
+# CERT_TRUST_STATUS dwErrorStatus
+CERT_TRUST_IS_PARTIAL_CHAIN = 0x10000
+
+# CertCreateCertificateContext encodings
+X509_ASN_ENCODING = 0x00000001
+PKCS_7_ASN_ENCODING = 0x00010000
+
+# These structs are only complete enough to achieve what we need.
+class CERT_CHAIN_CONTEXT(ctypes.Structure):
+ _fields_ = (
+ ("cbSize", _DWORD),
+
+ # CERT_TRUST_STATUS struct
+ ("dwErrorStatus", _DWORD),
+ ("dwInfoStatus", _DWORD),
-class _tagPROCESSENTRY32(ctypes.Structure):
- _fields_ = [('dwsize', _DWORD),
- ('cntUsage', _DWORD),
- ('th32ProcessID', _DWORD),
- ('th32DefaultHeapID', ctypes.c_void_p),
- ('th32ModuleID', _DWORD),
- ('cntThreads', _DWORD),
- ('th32ParentProcessID', _DWORD),
- ('pcPriClassBase', _LONG),
- ('dwFlags', _DWORD),
- ('szExeFile', ctypes.c_char * _MAX_PATH)]
+ ("cChain", _DWORD),
+ ("rgpChain", ctypes.c_void_p),
+ ("cLowerQualityChainContext", _DWORD),
+ ("rgpLowerQualityChainContext", ctypes.c_void_p),
+ ("fHasRevocationFreshnessTime", _BOOL),
+ ("dwRevocationFreshnessTime", _DWORD),
+ )
+
+class CERT_USAGE_MATCH(ctypes.Structure):
+ _fields_ = (
+ ("dwType", _DWORD),
- def __init__(self):
- super(_tagPROCESSENTRY32, self).__init__()
- self.dwsize = ctypes.sizeof(self)
+ # CERT_ENHKEY_USAGE struct
+ ("cUsageIdentifier", _DWORD),
+ ("rgpszUsageIdentifier", ctypes.c_void_p), # LPSTR *
+ )
+class CERT_CHAIN_PARA(ctypes.Structure):
+ _fields_ = (
+ ("cbSize", _DWORD),
+ ("RequestedUsage", CERT_USAGE_MATCH),
+ ("RequestedIssuancePolicy", CERT_USAGE_MATCH),
+ ("dwUrlRetrievalTimeout", _DWORD),
+ ("fCheckRevocationFreshnessTime", _BOOL),
+ ("dwRevocationFreshnessTime", _DWORD),
+ ("pftCacheResync", ctypes.c_void_p), # LPFILETIME
+ ("pStrongSignPara", ctypes.c_void_p), # PCCERT_STRONG_SIGN_PARA
+ ("dwStrongSignFlags", _DWORD),
+ )
# types of parameters of C functions used (required by pypy)
+_crypt32.CertCreateCertificateContext.argtypes = [_DWORD, # cert encoding
+ ctypes.c_char_p, # cert
+ _DWORD] # cert size
+_crypt32.CertCreateCertificateContext.restype = _PCCERT_CONTEXT
+
+_crypt32.CertGetCertificateChain.argtypes = [
+ ctypes.c_void_p, # HCERTCHAINENGINE
+ _PCCERT_CONTEXT,
+ ctypes.c_void_p, # LPFILETIME
+ ctypes.c_void_p, # HCERTSTORE
+ ctypes.c_void_p, # PCERT_CHAIN_PARA
+ _DWORD,
+ ctypes.c_void_p, # LPVOID
+ ctypes.c_void_p # PCCERT_CHAIN_CONTEXT *
+ ]
+_crypt32.CertGetCertificateChain.restype = _BOOL
+
+_crypt32.CertFreeCertificateContext.argtypes = [_PCCERT_CONTEXT]
+_crypt32.CertFreeCertificateContext.restype = _BOOL
+
_kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
_DWORD, _DWORD, _HANDLE]
_kernel32.CreateFileA.restype = _HANDLE
@@ -202,6 +250,12 @@
_kernel32.SetConsoleCtrlHandler.argtypes = [_SIGNAL_HANDLER, _BOOL]
_kernel32.SetConsoleCtrlHandler.restype = _BOOL
+_kernel32.SetConsoleMode.argtypes = [_HANDLE, _DWORD]
+_kernel32.SetConsoleMode.restype = _BOOL
+
+_kernel32.GetConsoleMode.argtypes = [_HANDLE, ctypes.c_void_p]
+_kernel32.GetConsoleMode.restype = _BOOL
+
_kernel32.GetStdHandle.argtypes = [_DWORD]
_kernel32.GetStdHandle.restype = _HANDLE
@@ -221,21 +275,17 @@
_user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM]
_user32.EnumWindows.restype = _BOOL
-_kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD]
-_kernel32.CreateToolhelp32Snapshot.restype = _BOOL
-
_kernel32.PeekNamedPipe.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD,
ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
_kernel32.PeekNamedPipe.restype = _BOOL
-_kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p]
-_kernel32.Process32First.restype = _BOOL
-
-_kernel32.Process32Next.argtypes = [_HANDLE, ctypes.c_void_p]
-_kernel32.Process32Next.restype = _BOOL
-
def _raiseoserror(name):
- err = ctypes.WinError()
+ # Force the code to a signed int to avoid an 'int too large' error.
+ # See https://bugs.python.org/issue28474
+ code = _kernel32.GetLastError()
+ if code > 0x7fffffff:
+ code -= 2**32
+ err = ctypes.WinError(code=code)
raise OSError(err.errno, '%s: %s' % (name, err.strerror))
def _getfileinfo(name):
@@ -252,6 +302,51 @@
finally:
_kernel32.CloseHandle(fh)
+def checkcertificatechain(cert, build=True):
+ '''Tests the given certificate to see if there is a complete chain to a
+ trusted root certificate. As a side effect, missing certificates are
+ downloaded and installed unless ``build=False``. True is returned if a
+ chain to a trusted root exists (even if built on the fly), otherwise
+ False. NB: A chain to a trusted root does NOT imply that the certificate
+ is valid.
+ '''
+
+ chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
+
+ pchainctx = chainctxptr()
+ chainpara = CERT_CHAIN_PARA(cbSize=ctypes.sizeof(CERT_CHAIN_PARA),
+ RequestedUsage=CERT_USAGE_MATCH())
+
+ certctx = _crypt32.CertCreateCertificateContext(X509_ASN_ENCODING, cert,
+ len(cert))
+ if certctx is None:
+ _raiseoserror('CertCreateCertificateContext')
+
+ flags = 0
+
+ if not build:
+ flags |= 0x100 # CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE
+
+ try:
+ # Building the certificate chain will update root certs as necessary.
+ if not _crypt32.CertGetCertificateChain(None, # hChainEngine
+ certctx, # pCertContext
+ None, # pTime
+ None, # hAdditionalStore
+ ctypes.byref(chainpara),
+ flags,
+ None, # pvReserved
+ ctypes.byref(pchainctx)):
+ _raiseoserror('CertGetCertificateChain')
+
+ chainctx = pchainctx.contents
+
+ return chainctx.dwErrorStatus & CERT_TRUST_IS_PARTIAL_CHAIN == 0
+ finally:
+ if pchainctx:
+ _crypt32.CertFreeCertificateChain(pchainctx)
+ _crypt32.CertFreeCertificateContext(certctx)
+
def oslink(src, dst):
try:
if not _kernel32.CreateHardLinkA(dst, src, None):
@@ -372,50 +467,28 @@
height = csbi.srWindow.Bottom - csbi.srWindow.Top + 1
return width, height
-def _1stchild(pid):
- '''return the 1st found child of the given pid
+def enablevtmode():
+ '''Enable virtual terminal mode for the associated console. Return True if
+ enabled, else False.'''
- None is returned when no child is found'''
- pe = _tagPROCESSENTRY32()
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
+
+ handle = _kernel32.GetStdHandle(_STD_OUTPUT_HANDLE) # don't close the handle
+ if handle == _INVALID_HANDLE_VALUE:
+ return False
- # create handle to list all processes
- ph = _kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0)
- if ph == _INVALID_HANDLE_VALUE:
- raise ctypes.WinError()
- try:
- r = _kernel32.Process32First(ph, ctypes.byref(pe))
- # loop over all processes
- while r:
- if pe.th32ParentProcessID == pid:
- # return first child found
- return pe.th32ProcessID
- r = _kernel32.Process32Next(ph, ctypes.byref(pe))
- finally:
- _kernel32.CloseHandle(ph)
- if _kernel32.GetLastError() != _ERROR_NO_MORE_FILES:
- raise ctypes.WinError()
- return None # no child found
+ mode = _DWORD(0)
+
+ if not _kernel32.GetConsoleMode(handle, ctypes.byref(mode)):
+ return False
-class _tochildpid(int): # pid is _DWORD, which always matches in an int
- '''helper for spawndetached, returns the child pid on conversion to string
+ if (mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING) == 0:
+ mode.value |= ENABLE_VIRTUAL_TERMINAL_PROCESSING
- Does not resolve the child pid immediately because the child may not yet be
- started.
- '''
- def childpid(self):
- '''returns the child pid of the first found child of the process
- with this pid'''
- return _1stchild(self)
- def __str__(self):
- # run when the pid is written to the file
- ppid = self.childpid()
- if ppid is None:
- # race, child has exited since check
- # fall back to this pid. Its process will also have disappeared,
- # raising the same error type later as when the child pid would
- # be returned.
- return " %d" % self
- return str(ppid)
+ if not _kernel32.SetConsoleMode(handle, mode):
+ return False
+
+ return True
def spawndetached(args):
# No standard library function really spawns a fully detached
@@ -436,10 +509,6 @@
env += '\0'
args = subprocess.list2cmdline(args)
- # Not running the command in shell mode makes Python 2.6 hang when
- # writing to hgweb output socket.
- comspec = encoding.environ.get("COMSPEC", "cmd.exe")
- args = comspec + " /c " + args
res = _kernel32.CreateProcessA(
None, args, None, None, False, _CREATE_NO_WINDOW,
@@ -447,8 +516,7 @@
if not res:
raise ctypes.WinError()
- # _tochildpid because the process is the child of COMSPEC
- return _tochildpid(pi.dwProcessId)
+ return pi.dwProcessId
def unlink(f):
'''try to implement POSIX' unlink semantics on Windows'''
--- a/mercurial/windows.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/windows.py Wed Jul 19 07:51:41 2017 -0500
@@ -17,7 +17,7 @@
from .i18n import _
from . import (
encoding,
- osutil,
+ policy,
pycompat,
win32,
)
@@ -28,6 +28,8 @@
except ImportError:
import winreg
+osutil = policy.importmod(r'osutil')
+
executablepath = win32.executablepath
getuser = win32.getuser
hidewindow = win32.hidewindow
@@ -136,6 +138,9 @@
# convert to a friendlier exception
raise IOError(err.errno, '%s: %s' % (name, err.strerror))
+# may be wrapped by win32mbcs extension
+listdir = osutil.listdir
+
class winstdout(object):
'''stdout on windows misbehaves if sent through a pipe'''
@@ -175,7 +180,6 @@
except IOError as inst:
if inst.errno != errno.EINVAL:
raise
- self.close()
raise IOError(errno.EPIPE, 'Broken pipe')
def _is_win_9x():
@@ -331,7 +335,7 @@
return executable
return findexisting(os.path.expanduser(os.path.expandvars(command)))
-_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
+_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file
@@ -349,7 +353,7 @@
if cache is None:
try:
dmap = dict([(normcase(n), s)
- for n, k, s in osutil.listdir(dir, True)
+ for n, k, s in listdir(dir, True)
if getkind(s.st_mode) in _wantedkinds])
except OSError as err:
# Python >= 2.5 returns ENOENT and adds winerror field
@@ -376,7 +380,7 @@
def removedirs(name):
"""special version of os.removedirs that does not remove symlinked
directories or junction points if they actually contain files"""
- if osutil.listdir(name):
+ if listdir(name):
return
os.rmdir(name)
head, tail = os.path.split(name)
@@ -384,7 +388,7 @@
head, tail = os.path.split(head)
while head and tail:
try:
- if osutil.listdir(head):
+ if listdir(head):
return
os.rmdir(head)
except (ValueError, OSError):
--- a/mercurial/wireproto.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/wireproto.py Wed Jul 19 07:51:41 2017 -0500
@@ -16,6 +16,7 @@
from .node import (
bin,
hex,
+ nullid,
)
from . import (
@@ -410,7 +411,7 @@
remote server as a bundle.
When pushing a bundle10 stream, return an integer indicating the
- result of the push (see localrepository.addchangegroup()).
+ result of the push (see changegroup.apply()).
When pushing a bundle20 stream, return a bundle20 stream.
@@ -603,11 +604,11 @@
return v
if gd:
- v = ui.configbool('server', 'bundle1gd', None)
+ v = ui.configbool('server', 'bundle1gd')
if v is not None:
return v
- return ui.configbool('server', 'bundle1', True)
+ return ui.configbool('server', 'bundle1')
def supportedcompengines(ui, proto, role):
"""Obtain the list of supported compression engines for a request."""
@@ -753,25 +754,25 @@
"""
# copy to prevent modification of the global list
caps = list(wireprotocaps)
- if streamclone.allowservergeneration(repo.ui):
- if repo.ui.configbool('server', 'preferuncompressed', False):
+ if streamclone.allowservergeneration(repo):
+ if repo.ui.configbool('server', 'preferuncompressed'):
caps.append('stream-preferred')
requiredformats = repo.requirements & repo.supportedformats
# if our local revlogs are just revlogv1, add 'stream' cap
- if not requiredformats - set(('revlogv1',)):
+ if not requiredformats - {'revlogv1'}:
caps.append('stream')
# otherwise, add 'streamreqs' detailing our local revlog format
else:
caps.append('streamreqs=%s' % ','.join(sorted(requiredformats)))
- if repo.ui.configbool('experimental', 'bundle2-advertise', True):
+ if repo.ui.configbool('experimental', 'bundle2-advertise'):
capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
caps.append('bundle2=' + urlreq.quote(capsblob))
caps.append('unbundle=%s' % ','.join(bundle2.bundlepriority))
if proto.name == 'http':
caps.append('httpheader=%d' %
- repo.ui.configint('server', 'maxhttpheaderlen', 1024))
- if repo.ui.configbool('experimental', 'httppostargs', False):
+ repo.ui.configint('server', 'maxhttpheaderlen'))
+ if repo.ui.configbool('experimental', 'httppostargs'):
caps.append('httppostargs')
# FUTURE advertise 0.2rx once support is implemented
@@ -841,6 +842,17 @@
hint=bundle2requiredhint)
try:
+ if repo.ui.configbool('server', 'disablefullbundle'):
+ # Check to see if this is a full clone.
+ clheads = set(repo.changelog.heads())
+ heads = set(opts.get('heads', set()))
+ common = set(opts.get('common', set()))
+ common.discard(nullid)
+ if not common and clheads == heads:
+ raise error.Abort(
+ _('server has pull-based clones disabled'),
+ hint=_('remove --pull if specified or upgrade Mercurial'))
+
chunks = exchange.getbundlechunks(repo, 'serve', **opts)
except error.Abort as exc:
# cleanly forward Abort error to the client
@@ -934,7 +946,7 @@
capability with a value representing the version and flags of the repo
it is serving. Client checks to see if it understands the format.
'''
- if not streamclone.allowservergeneration(repo.ui):
+ if not streamclone.allowservergeneration(repo):
return '1\n'
def getstream(it):
--- a/mercurial/worker.py Wed Jul 05 11:24:22 2017 -0400
+++ b/mercurial/worker.py Wed Jul 19 07:51:41 2017 -0500
@@ -26,7 +26,7 @@
# posix
try:
- n = int(os.sysconf('SC_NPROCESSORS_ONLN'))
+ n = int(os.sysconf(r'SC_NPROCESSORS_ONLN'))
if n > 0:
return n
except (AttributeError, ValueError):
@@ -134,37 +134,43 @@
killworkers()
oldchldhandler = signal.signal(signal.SIGCHLD, sigchldhandler)
ui.flush()
+ parentpid = os.getpid()
for pargs in partition(args, workers):
- pid = os.fork()
- if pid == 0:
- signal.signal(signal.SIGINT, oldhandler)
- signal.signal(signal.SIGCHLD, oldchldhandler)
-
- def workerfunc():
- os.close(rfd)
- for i, item in func(*(staticargs + (pargs,))):
- os.write(wfd, '%d %s\n' % (i, item))
- return 0
+ # make sure we use os._exit in all worker code paths. otherwise the
+ # worker may do some clean-ups which could cause surprises like
+ # deadlock. see sshpeer.cleanup for example.
+ # override error handling *before* fork. this is necessary because
+ # exception (signal) may arrive after fork, before "pid =" assignment
+ # completes, and other exception handler (dispatch.py) can lead to
+ # unexpected code path without os._exit.
+ ret = -1
+ try:
+ pid = os.fork()
+ if pid == 0:
+ signal.signal(signal.SIGINT, oldhandler)
+ signal.signal(signal.SIGCHLD, oldchldhandler)
- # make sure we use os._exit in all code paths. otherwise the worker
- # may do some clean-ups which could cause surprises like deadlock.
- # see sshpeer.cleanup for example.
- ret = 0
- try:
+ def workerfunc():
+ os.close(rfd)
+ for i, item in func(*(staticargs + (pargs,))):
+ os.write(wfd, '%d %s\n' % (i, item))
+ return 0
+
+ ret = scmutil.callcatch(ui, workerfunc)
+ except: # parent re-raises, child never returns
+ if os.getpid() == parentpid:
+ raise
+ exctype = sys.exc_info()[0]
+ force = not issubclass(exctype, KeyboardInterrupt)
+ ui.traceback(force=force)
+ finally:
+ if os.getpid() != parentpid:
try:
- ret = scmutil.callcatch(ui, workerfunc)
- finally:
ui.flush()
- except KeyboardInterrupt:
- os._exit(255)
- except: # never return, therefore no re-raises
- try:
- ui.traceback(force=True)
- ui.flush()
+ except: # never returns, no re-raises
+ pass
finally:
- os._exit(255)
- else:
- os._exit(ret & 255)
+ os._exit(ret & 255)
pids.add(pid)
os.close(wfd)
fp = os.fdopen(rfd, pycompat.sysstr('rb'), 0)
--- a/setup.py Wed Jul 05 11:24:22 2017 -0400
+++ b/setup.py Wed Jul 19 07:51:41 2017 -0500
@@ -5,8 +5,8 @@
# 'python setup.py --help' for more options
import sys, platform
-if getattr(sys, 'version_info', (0, 0, 0)) < (2, 6, 0, 'final'):
- raise SystemExit("Mercurial requires Python 2.6 or later.")
+if sys.version_info < (2, 7, 0, 'final'):
+ raise SystemExit('Mercurial requires Python 2.7 or later.')
if sys.version_info[0] >= 3:
printf = eval('print')
@@ -77,6 +77,7 @@
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
+from distutils.command.install import install
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
from distutils.spawn import spawn, find_executable
@@ -142,66 +143,114 @@
py2exeloaded = False
def runcmd(cmd, env):
- if (sys.platform == 'plan9'
- and (sys.version_info[0] == 2 and sys.version_info[1] < 7)):
- # subprocess kludge to work around issues in half-baked Python
- # ports, notably bichued/python:
- _, out, err = os.popen3(cmd)
- return str(out), str(err)
- else:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE, env=env)
- out, err = p.communicate()
- return out, err
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, env=env)
+ out, err = p.communicate()
+ return p.returncode, out, err
+
+class hgcommand(object):
+ def __init__(self, cmd, env):
+ self.cmd = cmd
+ self.env = env
+
+ def run(self, args):
+ cmd = self.cmd + args
+ returncode, out, err = runcmd(cmd, self.env)
+ # If root is executing setup.py, but the repository is owned by
+ # another user (as in "sudo python setup.py install") we will get
+ # trust warnings since the .hg/hgrc file is untrusted. That is
+ # fine, we don't want to load it anyway. Python may warn about
+ # a missing __init__.py in mercurial/locale, we also ignore that.
+ err = [e for e in err.splitlines()
+ if not e.startswith(b'not trusting file') \
+ and not e.startswith(b'warning: Not importing') \
+ and not e.startswith(b'obsolete feature not enabled')]
+ if err or returncode != 0:
+ printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
+ printf(b'\n'.join([b' ' + e for e in err]), file=sys.stderr)
+ return ''
+ return out
+
+def findhg():
+ """Try to figure out how we should invoke hg for examining the local
+ repository contents.
-def runhg(cmd, env):
- out, err = runcmd(cmd, env)
- # If root is executing setup.py, but the repository is owned by
- # another user (as in "sudo python setup.py install") we will get
- # trust warnings since the .hg/hgrc file is untrusted. That is
- # fine, we don't want to load it anyway. Python may warn about
- # a missing __init__.py in mercurial/locale, we also ignore that.
- err = [e for e in err.splitlines()
- if not e.startswith(b'not trusting file') \
- and not e.startswith(b'warning: Not importing') \
- and not e.startswith(b'obsolete feature not enabled')]
- if err:
- printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
- printf(b'\n'.join([b' ' + e for e in err]), file=sys.stderr)
- return ''
- return out
+ Returns an hgcommand object."""
+ # By default, prefer the "hg" command in the user's path. This was
+ # presumably the hg command that the user used to create this repository.
+ #
+ # This repository may require extensions or other settings that would not
+ # be enabled by running the hg script directly from this local repository.
+ hgenv = os.environ.copy()
+ # Use HGPLAIN to disable hgrc settings that would change output formatting,
+ # and disable localization for the same reasons.
+ hgenv['HGPLAIN'] = '1'
+ hgenv['LANGUAGE'] = 'C'
+ hgcmd = ['hg']
+ # Run a simple "hg log" command just to see if using hg from the user's
+ # path works and can successfully interact with this repository.
+ check_cmd = ['log', '-r.', '-Ttest']
+ try:
+ retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
+ except EnvironmentError:
+ retcode = -1
+ if retcode == 0:
+ return hgcommand(hgcmd, hgenv)
+
+ # Fall back to trying the local hg installation.
+ hgenv = localhgenv()
+ # Don't source any system hgrc files when using the local hg.
+ hgenv['HGRCPATH'] = ''
+ hgcmd = [sys.executable, 'hg']
+ try:
+ retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
+ except EnvironmentError:
+ retcode = -1
+ if retcode == 0:
+ return hgcommand(hgcmd, hgenv)
+
+ raise SystemExit('Unable to find a working hg binary to extract the '
+ 'version from the repository tags')
+
+def localhgenv():
+ """Get an environment dictionary to use for invoking or importing
+ mercurial from the local repository."""
+ # Execute hg out of this directory with a custom environment which takes
+ # care to not use any hgrc files and do no localization.
+ env = {'HGMODULEPOLICY': 'py',
+ 'HGRCPATH': '',
+ 'LANGUAGE': 'C',
+ 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
+ if 'LD_LIBRARY_PATH' in os.environ:
+ env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
+ if 'SystemRoot' in os.environ:
+ # SystemRoot is required by Windows to load various DLLs. See:
+ # https://bugs.python.org/issue13524#msg148850
+ env['SystemRoot'] = os.environ['SystemRoot']
+ return env
version = ''
-# Execute hg out of this directory with a custom environment which takes care
-# to not use any hgrc files and do no localization.
-env = {'HGMODULEPOLICY': 'py',
- 'HGRCPATH': '',
- 'LANGUAGE': 'C',
- 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
-if 'LD_LIBRARY_PATH' in os.environ:
- env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
-if 'SystemRoot' in os.environ:
- # Copy SystemRoot into the custom environment for Python 2.6
- # under Windows. Otherwise, the subprocess will fail with
- # error 0xc0150004. See: http://bugs.python.org/issue3440
- env['SystemRoot'] = os.environ['SystemRoot']
-
if os.path.isdir('.hg'):
- cmd = [sys.executable, 'hg', 'log', '-r', '.', '--template', '{tags}\n']
- numerictags = [t for t in runhg(cmd, env).split() if t[0].isdigit()]
- hgid = runhg([sys.executable, 'hg', 'id', '-i'], env).strip()
+ hg = findhg()
+ cmd = ['log', '-r', '.', '--template', '{tags}\n']
+ numerictags = [t for t in hg.run(cmd).split() if t[0:1].isdigit()]
+ hgid = hg.run(['id', '-i']).strip()
+ if not hgid:
+ # Bail out if hg is having problems interacting with this repository,
+ # rather than falling through and producing a bogus version number.
+ # Continuing with an invalid version number will break extensions
+ # that define minimumhgversion.
+ raise SystemExit('Unable to determine hg version from local repository')
if numerictags: # tag(s) found
version = numerictags[-1]
if hgid.endswith('+'): # propagate the dirty status to the tag
version += '+'
else: # no tag found
- ltagcmd = [sys.executable, 'hg', 'parents', '--template',
- '{latesttag}']
- ltag = runhg(ltagcmd, env)
- changessincecmd = [sys.executable, 'hg', 'log', '-T', 'x\n', '-r',
- "only(.,'%s')" % ltag]
- changessince = len(runhg(changessincecmd, env).splitlines())
+ ltagcmd = ['parents', '--template', '{latesttag}']
+ ltag = hg.run(ltagcmd)
+ changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
+ changessince = len(hg.run(changessincecmd).splitlines())
version = '%s+%s-%s' % (ltag, changessince, hgid)
if version.endswith('+'):
version += time.strftime('%Y%m%d')
@@ -353,15 +402,15 @@
self.distribution.ext_modules = []
elif self.distribution.cffi:
from mercurial.cffi import (
- bdiff,
- mpatch,
+ bdiffbuild,
+ mpatchbuild,
)
- exts = [mpatch.ffi.distutils_extension(),
- bdiff.ffi.distutils_extension()]
+ exts = [mpatchbuild.ffi.distutils_extension(),
+ bdiffbuild.ffi.distutils_extension()]
# cffi modules go here
if sys.platform == 'darwin':
- from mercurial.cffi import osutil
- exts.append(osutil.ffi.distutils_extension())
+ from mercurial.cffi import osutilbuild
+ exts.append(osutilbuild.ffi.distutils_extension())
self.distribution.ext_modules = exts
else:
h = os.path.join(get_python_inc(), 'Python.h')
@@ -370,11 +419,17 @@
'Mercurial but weren\'t found in %s' % h)
def run(self):
+ basepath = os.path.join(self.build_lib, 'mercurial')
+ self.mkpath(basepath)
+
if self.distribution.pure:
modulepolicy = 'py'
+ elif self.build_lib == '.':
+ # in-place build should run without rebuilding C extensions
+ modulepolicy = 'allow'
else:
modulepolicy = 'c'
- with open("mercurial/__modulepolicy__.py", "w") as f:
+ with open(os.path.join(basepath, '__modulepolicy__.py'), "w") as f:
f.write('# this file is autogenerated by setup.py\n')
f.write('modulepolicy = b"%s"\n' % modulepolicy)
@@ -399,8 +454,9 @@
# here no extension enabled, disabled() lists up everything
code = ('import pprint; from mercurial import extensions; '
'pprint.pprint(extensions.disabled())')
- out, err = runcmd([sys.executable, '-c', code], env)
- if err:
+ returncode, out, err = runcmd([sys.executable, '-c', code],
+ localhgenv())
+ if err or returncode != 0:
raise DistutilsExecError(err)
with open(self._indexfilename, 'w') as f:
@@ -461,6 +517,25 @@
dir = os.path.dirname(self.get_ext_fullpath('dummy'))
return os.path.join(self.build_temp, dir, 'hg.exe')
+class hginstall(install):
+
+ user_options = install.user_options + [
+ ('old-and-unmanageable', None,
+ 'noop, present for eggless setuptools compat'),
+ ('single-version-externally-managed', None,
+ 'noop, present for eggless setuptools compat'),
+ ]
+
+ # Also helps setuptools not be sad while we refuse to create eggs.
+ single_version_externally_managed = True
+
+ def get_sub_commands(self):
+ # Screen out egg related commands to prevent egg generation. But allow
+ # mercurial.egg-info generation, since that is part of modern
+ # packaging.
+ excl = {'bdist_egg'}
+ return filter(lambda x: x not in excl, install.get_sub_commands(self))
+
class hginstalllib(install_lib):
'''
This is a specialization of install_lib that replaces the copy_file used
@@ -572,20 +647,27 @@
'build_py': hgbuildpy,
'build_scripts': hgbuildscripts,
'build_hgextindex': buildhgextindex,
+ 'install': hginstall,
'install_lib': hginstalllib,
'install_scripts': hginstallscripts,
'build_hgexe': buildhgexe,
}
-packages = ['mercurial', 'mercurial.hgweb', 'mercurial.httpclient',
+packages = ['mercurial',
+ 'mercurial.cext',
+ 'mercurial.cffi',
+ 'mercurial.hgweb',
+ 'mercurial.httpclient',
'mercurial.pure',
'hgext', 'hgext.convert', 'hgext.fsmonitor',
'hgext.fsmonitor.pywatchman', 'hgext.highlight',
- 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd']
+ 'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd',
+ 'hgdemandimport']
common_depends = ['mercurial/bitmanipulation.h',
'mercurial/compat.h',
- 'mercurial/util.h']
+ 'mercurial/cext/util.h']
+common_include_dirs = ['mercurial']
osutil_cflags = []
osutil_ldflags = []
@@ -614,22 +696,29 @@
osutil_ldflags += ['-framework', 'ApplicationServices']
extmodules = [
- Extension('mercurial.base85', ['mercurial/base85.c'],
+ Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.bdiff', ['mercurial/bdiff.c',
- 'mercurial/bdiff_module.c'],
+ Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
+ 'mercurial/cext/bdiff.c'],
+ include_dirs=common_include_dirs,
depends=common_depends + ['mercurial/bdiff.h']),
- Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'],
+ Extension('mercurial.cext.diffhelpers', ['mercurial/cext/diffhelpers.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.mpatch', ['mercurial/mpatch.c',
- 'mercurial/mpatch_module.c'],
+ Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
+ 'mercurial/cext/mpatch.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.parsers', ['mercurial/dirs.c',
- 'mercurial/manifest.c',
- 'mercurial/parsers.c',
- 'mercurial/pathencode.c'],
+ Extension('mercurial.cext.parsers', ['mercurial/cext/dirs.c',
+ 'mercurial/cext/manifest.c',
+ 'mercurial/cext/parsers.c',
+ 'mercurial/cext/pathencode.c',
+ 'mercurial/cext/revlog.c'],
+ include_dirs=common_include_dirs,
depends=common_depends),
- Extension('mercurial.osutil', ['mercurial/osutil.c'],
+ Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
+ include_dirs=common_include_dirs,
extra_compile_args=osutil_cflags,
extra_link_args=osutil_ldflags,
depends=common_depends),
@@ -664,6 +753,23 @@
class HackedMingw32CCompiler(object):
pass
+if os.name == 'nt':
+ # Allow compiler/linker flags to be added to Visual Studio builds. Passing
+ # extra_link_args to distutils.extensions.Extension() doesn't have any
+ # effect.
+ from distutils import msvccompiler
+
+ compiler = msvccompiler.MSVCCompiler
+
+ class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
+ def initialize(self):
+ compiler.initialize(self)
+ # "warning LNK4197: export 'func' specified multiple times"
+ self.ldflags_shared.append('/ignore:4197')
+ self.ldflags_shared_debug.append('/ignore:4197')
+
+ msvccompiler.MSVCCompiler = HackedMSVCCompiler
+
packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
'help/*.txt',
'help/internals/*.txt',
@@ -708,7 +814,7 @@
setupversion = version.split('+', 1)[0]
if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
- version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[0].splitlines()
+ version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
if version:
version = version[0]
if sys.version_info[0] == 3:
@@ -779,7 +885,7 @@
package_data=packagedata,
cmdclass=cmdclass,
distclass=hgdist,
- options={'py2exe': {'packages': ['hgext', 'email']},
+ options={'py2exe': {'packages': ['hgdemandimport', 'hgext', 'email']},
'bdist_mpkg': {'zipdist': False,
'license': 'COPYING',
'readme': 'contrib/macosx/Readme.html',
--- a/tests/autodiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/autodiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -3,14 +3,14 @@
from __future__ import absolute_import
from mercurial import (
- cmdutil,
error,
patch,
+ registrar,
scmutil,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('autodiff',
[('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')],
--- a/tests/badserverext.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/badserverext.py Wed Jul 19 07:51:41 2017 -0500
@@ -33,10 +33,30 @@
import socket
+from mercurial import(
+ registrar,
+)
+
from mercurial.hgweb import (
server,
)
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+configitem('badserver', 'closeafteraccept',
+ default=False,
+)
+configitem('badserver', 'closeafterrecvbytes',
+ default=0,
+)
+configitem('badserver', 'closeaftersendbytes',
+ default=0,
+)
+configitem('badserver', 'closebeforeaccept',
+ default=False,
+)
+
# We can't adjust __class__ on a socket instance. So we define a proxy type.
class socketproxy(object):
__slots__ = (
@@ -256,9 +276,9 @@
def process_request(self, socket, address):
# Wrap socket in a proxy if we need to count bytes.
closeafterrecvbytes = self._ui.configint('badserver',
- 'closeafterrecvbytes', 0)
+ 'closeafterrecvbytes')
closeaftersendbytes = self._ui.configint('badserver',
- 'closeaftersendbytes', 0)
+ 'closeaftersendbytes')
if closeafterrecvbytes or closeaftersendbytes:
socket = socketproxy(socket, self.errorlog,
--- a/tests/blacklists/fsmonitor Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/blacklists/fsmonitor Wed Jul 19 07:51:41 2017 -0500
@@ -1,16 +1,10 @@
# Blacklist for a full testsuite run with fsmonitor enabled.
-# Use with
-# run-tests --blacklist=blacklists/fsmonitor \
-# --extra-config="extensions.fsmonitor="
+# Used by fsmonitor-run-tests.
# The following tests all fail because they either use extensions that conflict
# with fsmonitor, use subrepositories, or don't anticipate the extra file in
# the .hg directory that fsmonitor adds.
-test-basic.t
-test-blackbox.t
-test-check-commit.t
-test-commandserver.t
-test-copy.t
-test-debugextensions.t
+
+#### mainly testing eol extension
test-eol-add.t
test-eol-clone.t
test-eol-hook.t
@@ -19,11 +13,8 @@
test-eol-update.t
test-eol.t
test-eolfilename.t
-test-extension.t
-test-fncache.t
-test-hardlinks.t
-test-help.t
-test-inherit-mode.t
+
+#### mainly testing largefiles extension
test-issue3084.t
test-largefiles-cache.t
test-largefiles-misc.t
@@ -32,21 +23,16 @@
test-largefiles-wireproto.t
test-largefiles.t
test-lfconvert.t
-test-merge-tools.t
+
+#### mainly testing nested repositories
test-nested-repo.t
-test-permissions.t
test-push-warn.t
test-subrepo-deep-nested-change.t
test-subrepo-recursion.t
test-subrepo.t
-test-tags.t
-# The following tests remain enabled; they fail *too*, but only because they
-# occasionally use blacklisted extensions and don't anticipate the warning
-# generated.
-#test-log.t
-#test-hook.t
-#test-rename.t
-#test-histedit-fold.t
-#test-fileset-generated.t
-#test-init.t
+#### fixing these seems redundant, because these don't focus on
+#### operations in the working directory or .hg
+test-debugextensions.t
+test-extension.t
+test-help.t
--- a/tests/check-perf-code.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/check-perf-code.py Wed Jul 19 07:51:41 2017 -0500
@@ -26,7 +26,7 @@
replacement = [('.py', ''), ('.c', ''), # trim suffix
('mercurial%s' % (os.sep), ''), # trim "mercurial/" path
]
- ignored = set(['__init__'])
+ ignored = {'__init__'}
modules = {}
# convert from file name to module name, and count # of appearances
--- a/tests/drawdag.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/drawdag.py Wed Jul 19 07:51:41 2017 -0500
@@ -72,6 +72,13 @@
This is because 'o' is specially handled in the input: instead of using 'o' as
the node name, the word to the right will be used.
+
+Some special comments could have side effects:
+
+ - Create obsmarkers
+ # replace: A -> B -> C -> D # chained 1 to 1 replacements
+ # split: A -> B, C # 1 to many
+ # prune: A, B, C # many to nothing
"""
from __future__ import absolute_import, print_function
@@ -80,16 +87,17 @@
from mercurial.i18n import _
from mercurial import (
- cmdutil,
context,
error,
node,
+ obsolete,
+ registrar,
scmutil,
tags as tagsmod,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
_pipechars = '\\/+-|'
_nonpipechars = ''.join(chr(i) for i in xrange(33, 127)
@@ -145,7 +153,7 @@
def parents(y, x):
"""(int, int) -> [str]. follow the ASCII edges at given position,
return a list of parents"""
- visited = set([(y, x)])
+ visited = {(y, x)}
visit = []
result = []
@@ -214,6 +222,9 @@
def data(self):
return self._data
+ def filenode(self):
+ return None
+
def path(self):
return self._path
@@ -224,22 +235,21 @@
return ''
class simplecommitctx(context.committablectx):
- def __init__(self, repo, name, parentctxs, added=None):
+ def __init__(self, repo, name, parentctxs, added):
opts = {
- 'changes': scmutil.status([], added or [], [], [], [], [], []),
+ 'changes': scmutil.status([], list(added), [], [], [], [], []),
'date': '0 0',
'extra': {'branch': 'default'},
}
super(simplecommitctx, self).__init__(self, name, **opts)
self._repo = repo
- self._name = name
+ self._added = added
self._parents = parentctxs
- self._parents.sort(key=lambda c: c.node())
while len(self._parents) < 2:
self._parents.append(repo[node.nullid])
def filectx(self, key):
- return simplefilectx(key, self._name)
+ return simplefilectx(key, self._added[key])
def commit(self):
return self._repo.commitctx(self)
@@ -306,8 +316,48 @@
if name in committed:
continue
pctxs = [repo[committed[n]] for n in parents]
- ctx = simplecommitctx(repo, name, pctxs, [name])
+ pctxs.sort(key=lambda c: c.node())
+ added = {}
+ if len(parents) > 1:
+ # If it's a merge, take the files and contents from the parents
+ for f in pctxs[1].manifest():
+ if f not in pctxs[0].manifest():
+ added[f] = pctxs[1][f].data()
+ else:
+ # If it's not a merge, add a single file
+ added[name] = name
+ ctx = simplecommitctx(repo, name, pctxs, added)
n = ctx.commit()
committed[name] = n
tagsmod.tag(repo, name, n, message=None, user=None, date=None,
local=True)
+
+ # handle special comments
+ with repo.wlock(), repo.lock(), repo.transaction('drawdag'):
+ getctx = lambda x: repo.unfiltered()[committed[x.strip()]]
+ for line in text.splitlines():
+ if ' # ' not in line:
+ continue
+
+ rels = [] # obsolete relationships
+ comment = line.split(' # ', 1)[1].split(' # ')[0].strip()
+ args = comment.split(':', 1)
+ if len(args) <= 1:
+ continue
+
+ cmd = args[0].strip()
+ arg = args[1].strip()
+
+ if cmd in ('replace', 'rebase', 'amend'):
+ nodes = [getctx(m) for m in arg.split('->')]
+ for i in range(len(nodes) - 1):
+ rels.append((nodes[i], (nodes[i + 1],)))
+ elif cmd in ('split',):
+ pre, succs = arg.split('->')
+ succs = succs.split(',')
+ rels.append((getctx(pre), (getctx(s) for s in succs)))
+ elif cmd in ('prune',):
+ for n in arg.split(','):
+ rels.append((getctx(n), ()))
+ if rels:
+ obsolete.createmarkers(repo, rels, date=(0, 0), operation=cmd)
--- a/tests/f Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/f Wed Jul 19 07:51:41 2017 -0500
@@ -51,7 +51,7 @@
if opts.type:
facts.append('file')
if opts.hexdump or opts.dump or opts.md5:
- content = file(f, 'rb').read()
+ content = open(f, 'rb').read()
elif islink:
if opts.type:
facts.append('link')
--- a/tests/fakedirstatewritetime.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/fakedirstatewritetime.py Wed Jul 19 07:51:41 2017 -0500
@@ -2,7 +2,7 @@
# specified by '[fakedirstatewritetime] fakenow', only when
# 'dirstate.write()' is invoked via functions below:
#
-# - 'workingctx._checklookup()' (= 'repo.status()')
+# - 'workingctx._poststatusfixup()' (= 'repo.status()')
# - 'committablectx.markcommitted()'
from __future__ import absolute_import
@@ -11,10 +11,12 @@
context,
dirstate,
extensions,
- parsers,
+ policy,
util,
)
+parsers = policy.importmod(r'parsers')
+
def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
# execute what original parsers.pack_dirstate should do actually
# for consistency
@@ -53,16 +55,16 @@
parsers.pack_dirstate = orig_pack_dirstate
dirstate._getfsnow = orig_dirstate_getfsnow
-def _checklookup(orig, workingctx, files):
+def _poststatusfixup(orig, workingctx, status, fixup):
ui = workingctx.repo().ui
- return fakewrite(ui, lambda : orig(workingctx, files))
+ return fakewrite(ui, lambda : orig(workingctx, status, fixup))
def markcommitted(orig, committablectx, node):
ui = committablectx.repo().ui
return fakewrite(ui, lambda : orig(committablectx, node))
def extsetup(ui):
- extensions.wrapfunction(context.workingctx, '_checklookup',
- _checklookup)
+ extensions.wrapfunction(context.workingctx, '_poststatusfixup',
+ _poststatusfixup)
extensions.wrapfunction(context.committablectx, 'markcommitted',
markcommitted)
--- a/tests/fakemergerecord.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/fakemergerecord.py Wed Jul 19 07:51:41 2017 -0500
@@ -5,12 +5,12 @@
from __future__ import absolute_import
from mercurial import (
- cmdutil,
merge,
+ registrar,
)
cmdtable = {}
-command = cmdutil.command(cmdtable)
+command = registrar.command(cmdtable)
@command('fakemergerecord',
[('X', 'mandatory', None, 'add a fake mandatory record'),
--- a/tests/filterpyflakes.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/filterpyflakes.py Wed Jul 19 07:51:41 2017 -0500
@@ -11,8 +11,11 @@
for line in sys.stdin:
# We blacklist tests that are too noisy for us
pats = [
- r"undefined name '(WindowsError|memoryview)'",
+ r"undefined name 'WindowsError'",
r"redefinition of unused '[^']+' from line",
+ # for cffi, allow re-exports from pure.*
+ r"cffi/[^:]*:.*\bimport \*' used",
+ r"cffi/[^:]*:.*\*' imported but unused",
]
keep = True
@@ -32,8 +35,3 @@
for line in lines:
sys.stdout.write(line)
print()
-
-# self test of "undefined name" detection for other than 'memoryview'
-if False:
- print(memoryview)
- print(undefinedname)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/fsmonitor-run-tests.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+
+# fsmonitor-run-tests.py - Run Mercurial tests with fsmonitor enabled
+#
+# Copyright 2017 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# This is a wrapper around run-tests.py that spins up an isolated instance of
+# Watchman and runs the Mercurial tests against it. This ensures that the global
+# version of Watchman isn't affected by anything this test does.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import argparse
+import contextlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import uuid
+
+osenvironb = getattr(os, 'environb', os.environ)
+
+if sys.version_info > (3, 5, 0):
+ PYTHON3 = True
+ xrange = range # we use xrange in one place, and we'd rather not use range
+ def _bytespath(p):
+ return p.encode('utf-8')
+
+elif sys.version_info >= (3, 0, 0):
+ print('%s is only supported on Python 3.5+ and 2.7, not %s' %
+ (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
+ sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
+else:
+ PYTHON3 = False
+
+ # In python 2.x, path operations are generally done using
+ # bytestrings by default, so we don't have to do any extra
+ # fiddling there. We define the wrapper functions anyway just to
+ # help keep code consistent between platforms.
+ def _bytespath(p):
+ return p
+
+def getparser():
+ """Obtain the argument parser used by the CLI."""
+ parser = argparse.ArgumentParser(
+ description='Run tests with fsmonitor enabled.',
+ epilog='Unrecognized options are passed to run-tests.py.')
+ # - keep these sorted
+ # - none of these options should conflict with any in run-tests.py
+ parser.add_argument('--keep-fsmonitor-tmpdir', action='store_true',
+ help='keep temporary directory with fsmonitor state')
+ parser.add_argument('--watchman',
+ help='location of watchman binary (default: watchman in PATH)',
+ default='watchman')
+
+ return parser
+
+@contextlib.contextmanager
+def watchman(args):
+ basedir = tempfile.mkdtemp(prefix='hg-fsmonitor')
+ try:
+ # Much of this configuration is borrowed from Watchman's test harness.
+ cfgfile = os.path.join(basedir, 'config.json')
+ # TODO: allow setting a config
+ with open(cfgfile, 'w') as f:
+ f.write(json.dumps({}))
+
+ logfile = os.path.join(basedir, 'log')
+ clilogfile = os.path.join(basedir, 'cli-log')
+ if os.name == 'nt':
+ sockfile = '\\\\.\\pipe\\watchman-test-%s' % uuid.uuid4().hex
+ else:
+ sockfile = os.path.join(basedir, 'sock')
+ pidfile = os.path.join(basedir, 'pid')
+ statefile = os.path.join(basedir, 'state')
+
+ argv = [
+ args.watchman,
+ '--sockname', sockfile,
+ '--logfile', logfile,
+ '--pidfile', pidfile,
+ '--statefile', statefile,
+ '--foreground',
+ '--log-level=2', # debug logging for watchman
+ ]
+
+ envb = osenvironb.copy()
+ envb[b'WATCHMAN_CONFIG_FILE'] = _bytespath(cfgfile)
+ with open(clilogfile, 'wb') as f:
+ proc = subprocess.Popen(
+ argv, env=envb, stdin=None, stdout=f, stderr=f)
+ try:
+ yield sockfile
+ finally:
+ proc.terminate()
+ proc.kill()
+ finally:
+ if args.keep_fsmonitor_tmpdir:
+ print('fsmonitor dir available at %s' % basedir)
+ else:
+ shutil.rmtree(basedir, ignore_errors=True)
+
+def run():
+ parser = getparser()
+ args, runtestsargv = parser.parse_known_args()
+
+ with watchman(args) as sockfile:
+ osenvironb[b'WATCHMAN_SOCK'] = _bytespath(sockfile)
+ # Indicate to hghave that we're running with fsmonitor enabled.
+ osenvironb[b'HGFSMONITOR_TESTS'] = b'1'
+
+ runtestdir = os.path.dirname(__file__)
+ runtests = os.path.join(runtestdir, 'run-tests.py')
+ blacklist = os.path.join(runtestdir, 'blacklists', 'fsmonitor')
+
+ runtestsargv.insert(0, runtests)
+ runtestsargv.extend([
+ '--extra-config',
+ 'extensions.fsmonitor=',
+ '--blacklist',
+ blacklist,
+ ])
+
+ return subprocess.call(runtestsargv)
+
+if __name__ == '__main__':
+ sys.exit(run())
--- a/tests/generate-working-copy-states.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/generate-working-copy-states.py Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,7 @@
content in parentcontents]) + "-" + tracked
yield (filename, parentcontents)
else:
- for content in (set([None, 'content' + str(depth + 1)]) |
+ for content in ({None, 'content' + str(depth + 1)} |
set(parentcontents)):
for combination in generatestates(maxchangesets,
parentcontents + [content]):
--- a/tests/helpers-testrepo.sh Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/helpers-testrepo.sh Wed Jul 19 07:51:41 2017 -0500
@@ -1,19 +1,53 @@
-# The test-repo is a live hg repository which may have evolution
-# markers created, e.g. when a ~/.hgrc enabled evolution.
+# In most cases, the mercurial repository can be read by the bundled hg, but
+# that isn't always true because third-party extensions may change the store
+# format, for example. In which case, the system hg installation is used.
#
-# Tests are run using a custom HGRCPATH, which do not
-# enable evolution markers by default.
+# We want to use the hg version being tested when interacting with the test
+# repository, and the system hg when interacting with the mercurial source code
+# repository.
#
-# If test-repo includes evolution markers, and we do not
-# enable evolution markers, hg will occasionally complain
-# when it notices them, which disrupts tests resulting in
-# sporadic failures.
+# The mercurial source repository was typically orignally cloned with the
+# system mercurial installation, and may require extensions or settings from
+# the system installation.
+syshg () {
+ (
+ syshgenv
+ exec hg "$@"
+ )
+}
+
+# Revert the environment so that running "hg" runs the system hg
+# rather than the test hg installation.
+syshgenv () {
+ . "$HGTEST_RESTOREENV"
+ HGPLAIN=1
+ export HGPLAIN
+}
+
+# The test-repo is a live hg repository which may have evolution markers
+# created, e.g. when a ~/.hgrc enabled evolution.
#
-# Since we aren't performing any write operations on the
-# test-repo, there's no harm in telling hg that we support
-# evolution markers, which is what the following lines
-# for the hgrc file do:
-cat >> $HGRCPATH << EOF
+# Tests may be run using a custom HGRCPATH, which do not enable evolution
+# markers by default.
+#
+# If test-repo includes evolution markers, and we do not enable evolution
+# markers, hg will occasionally complain when it notices them, which disrupts
+# tests resulting in sporadic failures.
+#
+# Since we aren't performing any write operations on the test-repo, there's
+# no harm in telling hg that we support evolution markers, which is what the
+# following lines for the hgrc file do:
+cat >> "$HGRCPATH" << EOF
[experimental]
-evolution=createmarkers
+evolution = createmarkers
EOF
+
+# Use the system hg command if the bundled hg can't read the repository with
+# no warning nor error.
+if [ -n "`hg id -R "$TESTDIR/.." 2>&1 >/dev/null`" ]; then
+ alias testrepohg=syshg
+ alias testrepohgenv=syshgenv
+else
+ alias testrepohg=hg
+ alias testrepohgenv=:
+fi
--- a/tests/hghave.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/hghave.py Wed Jul 19 07:51:41 2017 -0500
@@ -278,6 +278,17 @@
def has_git():
return matchoutput('git --version 2>&1', br'^git version')
+def getgitversion():
+ m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
+ if not m:
+ return (0, 0)
+ return (int(m.group(1)), int(m.group(2)))
+
+@checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
+def has_git_range(v):
+ major, minor = v.split('.')[0:2]
+ return getgitversion() >= (int(major), int(minor))
+
@check("docutils", "Docutils text processing library")
def has_docutils():
try:
@@ -502,7 +513,7 @@
@check("serve", "platform and python can manage 'hg serve -d'")
def has_serve():
- return os.name != 'nt' # gross approximation
+ return True
@check("test-repo", "running tests from repository")
def has_test_repo():
@@ -574,16 +585,6 @@
def has_demandimport():
return os.environ.get('HGDEMANDIMPORT') != 'disable'
-@check("absimport", "absolute_import in __future__")
-def has_absimport():
- import __future__
- from mercurial import util
- return util.safehasattr(__future__, "absolute_import")
-
-@check("py27+", "running with Python 2.7+")
-def has_python27ornewer():
- return sys.version_info[0:2] >= (2, 7)
-
@check("py3k", "running with Python 3.x")
def has_py3k():
return 3 == sys.version_info[0]
@@ -609,7 +610,7 @@
os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
])
-@check("slow", "allow slow tests")
+@check("slow", "allow slow tests (use --allow-slow-tests)")
def has_slow():
return os.environ.get('HGTEST_SLOW') == 'slow'
@@ -638,3 +639,16 @@
@check("devfull", "/dev/full special file")
def has_dev_full():
return os.path.exists('/dev/full')
+
+@check("virtualenv", "Python virtualenv support")
+def has_virtualenv():
+ try:
+ import virtualenv
+ virtualenv.ACTIVATE_SH
+ return True
+ except ImportError:
+ return False
+
+@check("fsmonitor", "running tests with fsmonitor")
+def has_fsmonitor():
+ return 'HGFSMONITOR_TESTS' in os.environ
--- a/tests/killdaemons.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/killdaemons.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,6 +10,26 @@
if os.name =='nt':
import ctypes
+ _BOOL = ctypes.c_long
+ _DWORD = ctypes.c_ulong
+ _UINT = ctypes.c_uint
+ _HANDLE = ctypes.c_void_p
+
+ ctypes.windll.kernel32.CloseHandle.argtypes = [_HANDLE]
+ ctypes.windll.kernel32.CloseHandle.restype = _BOOL
+
+ ctypes.windll.kernel32.GetLastError.argtypes = []
+ ctypes.windll.kernel32.GetLastError.restype = _DWORD
+
+ ctypes.windll.kernel32.OpenProcess.argtypes = [_DWORD, _BOOL, _DWORD]
+ ctypes.windll.kernel32.OpenProcess.restype = _HANDLE
+
+ ctypes.windll.kernel32.TerminateProcess.argtypes = [_HANDLE, _UINT]
+ ctypes.windll.kernel32.TerminateProcess.restype = _BOOL
+
+ ctypes.windll.kernel32.WaitForSingleObject.argtypes = [_HANDLE, _DWORD]
+ ctypes.windll.kernel32.WaitForSingleObject.restype = _DWORD
+
def _check(ret, expectederr=None):
if ret == 0:
winerrno = ctypes.GetLastError()
@@ -24,10 +44,11 @@
SYNCHRONIZE = 0x00100000
WAIT_OBJECT_0 = 0
WAIT_TIMEOUT = 258
+ WAIT_FAILED = _DWORD(0xFFFFFFFF).value
handle = ctypes.windll.kernel32.OpenProcess(
PROCESS_TERMINATE|SYNCHRONIZE|PROCESS_QUERY_INFORMATION,
False, pid)
- if handle == 0:
+ if handle is None:
_check(0, 87) # err 87 when process not found
return # process not found, already finished
try:
@@ -36,8 +57,8 @@
pass # terminated, but process handle still available
elif r == WAIT_TIMEOUT:
_check(ctypes.windll.kernel32.TerminateProcess(handle, -1))
- else:
- _check(r)
+ elif r == WAIT_FAILED:
+ _check(0) # err stored in GetLastError()
# TODO?: forcefully kill when timeout
# and ?shorter waiting time? when tryhard==True
@@ -47,8 +68,8 @@
pass # process is terminated
elif r == WAIT_TIMEOUT:
logfn('# Daemon process %d is stuck')
- else:
- _check(r) # any error
+ elif r == WAIT_FAILED:
+ _check(0) # err stored in GetLastError()
except: #re-raises
ctypes.windll.kernel32.CloseHandle(handle) # no _check, keep error
raise
@@ -78,18 +99,20 @@
logfn = lambda s: s
# Kill off any leftover daemon processes
try:
- fp = open(pidfile)
- for line in fp:
- try:
- pid = int(line)
- if pid <= 0:
- raise ValueError
- except ValueError:
- logfn('# Not killing daemon process %s - invalid pid'
- % line.rstrip())
- continue
+ pids = []
+ with open(pidfile) as fp:
+ for line in fp:
+ try:
+ pid = int(line)
+ if pid <= 0:
+ raise ValueError
+ except ValueError:
+ logfn('# Not killing daemon process %s - invalid pid'
+ % line.rstrip())
+ continue
+ pids.append(pid)
+ for pid in pids:
kill(pid, logfn, tryhard)
- fp.close()
if remove:
os.unlink(pidfile)
except IOError:
--- a/tests/md5sum.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/md5sum.py Wed Jul 19 07:51:41 2017 -0500
@@ -34,7 +34,7 @@
m = md5()
try:
- for data in iter(lambda: fp.read(8192), ''):
+ for data in iter(lambda: fp.read(8192), b''):
m.update(data)
except IOError as msg:
sys.stderr.write('%s: I/O error: %s\n' % (filename, msg))
--- a/tests/mockblackbox.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/mockblackbox.py Wed Jul 19 07:51:41 2017 -0500
@@ -3,8 +3,7 @@
util,
)
-def makedate():
- return 0, 0
+# XXX: we should probably offer a devel option to do this in blackbox directly
def getuser():
return 'bob'
def getpid():
@@ -12,6 +11,5 @@
# mock the date and user apis so the output is always the same
def uisetup(ui):
- util.makedate = makedate
util.getuser = getuser
util.getpid = getpid
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/revnamesext.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,18 @@
+# Dummy extension to define a namespace containing revision names
+
+from __future__ import absolute_import
+
+from mercurial import (
+ namespaces,
+)
+
+def reposetup(ui, repo):
+ names = {'r%d' % rev: repo[rev].node() for rev in repo}
+ namemap = lambda r, name: names.get(name)
+ nodemap = lambda r, node: ['r%d' % repo[node].rev()]
+
+ ns = namespaces.namespace('revnames', templatename='revname',
+ logname='revname',
+ listnames=lambda r: names.keys(),
+ namemap=namemap, nodemap=nodemap)
+ repo.names.addnamespace(ns)
--- a/tests/run-tests.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/run-tests.py Wed Jul 19 07:51:41 2017 -0500
@@ -58,11 +58,7 @@
import socket
import subprocess
import sys
-try:
- import sysconfig
-except ImportError:
- # sysconfig doesn't exist in Python 2.6
- sysconfig = None
+import sysconfig
import tempfile
import threading
import time
@@ -74,6 +70,13 @@
except ImportError:
import queue
+try:
+ import shlex
+ shellquote = shlex.quote
+except (ImportError, AttributeError):
+ import pipes
+ shellquote = pipes.quote
+
if os.environ.get('RTUNICODEPEDANTRY', False):
try:
reload(sys)
@@ -81,9 +84,21 @@
except NameError:
pass
+origenviron = os.environ.copy()
osenvironb = getattr(os, 'environb', os.environ)
processlock = threading.Lock()
+pygmentspresent = False
+# ANSI color is unsupported prior to Windows 10
+if os.name != 'nt':
+ try: # is pygments installed
+ import pygments
+ import pygments.lexers as lexers
+ import pygments.formatters as formatters
+ pygmentspresent = True
+ except ImportError:
+ pass
+
if sys.version_info > (3, 5, 0):
PYTHON3 = True
xrange = range # we use xrange in one place, and we'd rather not use range
@@ -94,7 +109,7 @@
return p.decode('utf-8')
elif sys.version_info >= (3, 0, 0):
- print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
+ print('%s is only supported on Python 3.5+ and 2.7, not %s' %
(sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
else:
@@ -220,6 +235,22 @@
f.close()
return entries
+def parsettestcases(path):
+ """read a .t test file, return a set of test case names
+
+ If path does not exist, return an empty set.
+ """
+ cases = set()
+ try:
+ with open(path, 'rb') as f:
+ for l in f:
+ if l.startswith(b'#testcases '):
+ cases.update(l[11:].split())
+ except IOError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+ return cases
+
def getparser():
"""Obtain the OptionParser used by the CLI."""
parser = optparse.OptionParser("%prog [options] [tests]")
@@ -235,6 +266,9 @@
help="output files annotated with coverage")
parser.add_option("-c", "--cover", action="store_true",
help="print a test coverage report")
+ parser.add_option("--color", choices=["always", "auto", "never"],
+ default="auto",
+ help="colorisation: always|auto|never (default: auto)")
parser.add_option("-d", "--debug", action="store_true",
help="debug mode: write output of test scripts to console"
" rather than capturing and diffing it (disables timeout)")
@@ -251,6 +285,8 @@
help="keep temporary directory after running tests")
parser.add_option("-k", "--keywords",
help="run tests matching keywords")
+ parser.add_option("--list-tests", action="store_true",
+ help="list tests instead of running them")
parser.add_option("-l", "--local", action="store_true",
help="shortcut for --with-hg=<testdir>/../hg, "
"and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
@@ -260,6 +296,8 @@
help="run each test N times (default=1)", default=1)
parser.add_option("-n", "--nodiff", action="store_true",
help="skip showing test changes")
+ parser.add_option("--outputdir", type="string",
+ help="directory to write error logs to (default=test directory)")
parser.add_option("-p", "--port", type="int",
help="port on which servers should listen"
" (default: $%s or %d)" % defaults['port'])
@@ -305,7 +343,7 @@
parser.add_option("--ipv6", action="store_true",
help="prefer IPv6 to IPv4 for network related tests")
parser.add_option("-3", "--py3k-warnings", action="store_true",
- help="enable Py3k warnings on Python 2.6+")
+ help="enable Py3k warnings on Python 2.7+")
# This option should be deleted once test-check-py3-compat.t and other
# Python 3 tests run with Python 3.
parser.add_option("--with-python3", metavar="PYTHON3",
@@ -345,7 +383,7 @@
if not (os.path.isfile(options.with_hg) and
os.access(options.with_hg, os.X_OK)):
parser.error('--with-hg must specify an executable hg script')
- if not os.path.basename(options.with_hg) == b'hg':
+ if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
sys.stderr.write('warning: --with-hg should specify an hg script\n')
if options.local:
testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
@@ -373,6 +411,10 @@
parser.error('--chg does not work when --with-hg is specified '
'(use --with-chg instead)')
+ if options.color == 'always' and not pygmentspresent:
+ sys.stderr.write('warning: --color=always ignored because '
+ 'pygments is not installed\n')
+
global useipv6
if options.ipv6:
useipv6 = checksocketfamily('AF_INET6')
@@ -423,7 +465,7 @@
if options.py3k_warnings:
if PYTHON3:
parser.error(
- '--py3k-warnings can only be used on Python 2.6 and 2.7')
+ '--py3k-warnings can only be used on Python 2.7')
if options.with_python3:
if PYTHON3:
parser.error('--with-python3 cannot be used when executing with '
@@ -527,10 +569,10 @@
sys.stdout.flush()
def terminate(proc):
- """Terminate subprocess (with fallback for Python versions < 2.6)"""
+ """Terminate subprocess"""
vlog('# Terminating process %d' % proc.pid)
try:
- getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
+ proc.terminate()
except OSError:
pass
@@ -550,7 +592,7 @@
# Status code reserved for skipped tests (used by hghave).
SKIPPED_STATUS = 80
- def __init__(self, path, tmpdir, keeptmpdir=False,
+ def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
debug=False,
timeout=defaults['timeout'],
startport=defaults['port'], extraconfigopts=None,
@@ -591,7 +633,9 @@
self.bname = os.path.basename(path)
self.name = _strpath(self.bname)
self._testdir = os.path.dirname(path)
- self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
+ self._outputdir = outputdir
+ self._tmpname = os.path.basename(path)
+ self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
self._threadtmp = tmpdir
self._keeptmpdir = keeptmpdir
@@ -615,16 +659,19 @@
self._testtmp = None
self._chgsockdir = None
+ self._refout = self.readrefout()
+
+ def readrefout(self):
+ """read reference output"""
# If we're not in --debug mode and reference output file exists,
# check test output against it.
- if debug:
- self._refout = None # to match "out is None"
+ if self._debug:
+ return None # to match "out is None"
elif os.path.exists(self.refpath):
- f = open(self.refpath, 'rb')
- self._refout = f.read().splitlines(True)
- f.close()
+ with open(self.refpath, 'rb') as f:
+ return f.read().splitlines(True)
else:
- self._refout = []
+ return []
# needed to get base class __repr__ running
@property
@@ -650,7 +697,7 @@
if e.errno != errno.EEXIST:
raise
- name = os.path.basename(self.path)
+ name = self._tmpname
self._testtmp = os.path.join(self._threadtmp, name)
os.mkdir(self._testtmp)
@@ -693,21 +740,12 @@
except KeyboardInterrupt:
self._aborted = True
raise
- except SkipTest as e:
+ except unittest.SkipTest as e:
result.addSkip(self, str(e))
# The base class will have already counted this as a
# test we "ran", but we want to exclude skipped tests
# from those we count towards those run.
result.testsRun -= 1
- except IgnoreTest as e:
- result.addIgnore(self, str(e))
- # As with skips, ignores also should be excluded from
- # the number of tests executed.
- result.testsRun -= 1
- except WarnTest as e:
- result.addWarn(self, str(e))
- except ReportedTest as e:
- pass
except self.failureException as e:
# This differs from unittest in that we don't capture
# the stack trace. This is for historical reasons and
@@ -740,6 +778,7 @@
This will return a tuple describing the result of the test.
"""
env = self._getenv()
+ self._genrestoreenv(env)
self._daemonpids.append(env['DAEMON_PIDS'])
self._createhgrc(env['HGRCPATH'])
@@ -771,11 +810,11 @@
self.fail('hg have failed checking for %s' % failed[-1])
else:
self._skipped = True
- raise SkipTest(missing[-1])
+ raise unittest.SkipTest(missing[-1])
elif ret == 'timeout':
self.fail('timed out')
elif ret is False:
- raise WarnTest('no result code from test')
+ self.fail('no result code from test')
elif out != self._refout:
# Diff generation may rely on written .err file.
if (ret != 0 or out != self._refout) and not self._skipped \
@@ -829,7 +868,7 @@
def _run(self, env):
# This should be implemented in child classes to run tests.
- raise SkipTest('unknown test type')
+ raise unittest.SkipTest('unknown test type')
def abort(self):
"""Terminate execution of this test."""
@@ -876,14 +915,42 @@
else:
return b'127.0.0.1'
+ def _genrestoreenv(self, testenv):
+ """Generate a script that can be used by tests to restore the original
+ environment."""
+ # Put the restoreenv script inside self._threadtmp
+ scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
+ testenv['HGTEST_RESTOREENV'] = scriptpath
+
+ # Only restore environment variable names that the shell allows
+ # us to export.
+ name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
+
+ # Do not restore these variables; otherwise tests would fail.
+ reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
+
+ with open(scriptpath, 'w') as envf:
+ for name, value in origenviron.items():
+ if not name_regex.match(name):
+ # Skip environment variables with unusual names not
+ # allowed by most shells.
+ continue
+ if name in reqnames:
+ continue
+ envf.write('%s=%s\n' % (name, shellquote(value)))
+
+ for name in testenv:
+ if name in origenviron or name in reqnames:
+ continue
+ envf.write('unset %s\n' % (name,))
+
def _getenv(self):
"""Obtain environment variables to use during test execution."""
def defineport(i):
offset = '' if i == 0 else '%s' % i
env["HGPORT%s" % offset] = '%s' % (self._startport + i)
env = os.environ.copy()
- if sysconfig is not None:
- env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
+ env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
env['HGEMITWARNINGS'] = '1'
env['TESTTMP'] = self._testtmp
env['HOME'] = self._testtmp
@@ -938,12 +1005,9 @@
hgrc.write(b'mergemarkers = detailed\n')
hgrc.write(b'promptecho = True\n')
hgrc.write(b'[defaults]\n')
- hgrc.write(b'backout = -d "0 0"\n')
- hgrc.write(b'commit = -d "0 0"\n')
- hgrc.write(b'shelve = --date "0 0"\n')
- hgrc.write(b'tag = -d "0 0"\n')
hgrc.write(b'[devel]\n')
hgrc.write(b'all-warnings = true\n')
+ hgrc.write(b'default-date = 0 0\n')
hgrc.write(b'[largefiles]\n')
hgrc.write(b'usercache = %s\n' %
(os.path.join(self._testtmp, b'.cache/largefiles')))
@@ -1060,6 +1124,19 @@
ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
+ def __init__(self, path, *args, **kwds):
+ # accept an extra "case" parameter
+ case = None
+ if 'case' in kwds:
+ case = kwds.pop('case')
+ self._case = case
+ self._allcases = parsettestcases(path)
+ super(TTest, self).__init__(path, *args, **kwds)
+ if case:
+ self.name = '%s (case %s)' % (self.name, _strpath(case))
+ self.errpath = b'%s.%s.err' % (self.errpath[:-4], case)
+ self._tmpname += b'-%s' % case
+
@property
def refpath(self):
return os.path.join(self._testdir, self.bname)
@@ -1069,6 +1146,12 @@
lines = f.readlines()
f.close()
+ # .t file is both reference output and the test input, keep reference
+ # output updated with the the test input. This avoids some race
+ # conditions where the reference output does not match the actual test.
+ if self._refout is not None:
+ self._refout = lines
+
salt, script, after, expected = self._parsetest(lines)
# Write out the generated script.
@@ -1115,6 +1198,20 @@
self._timeout = self._slowtimeout
return True, None
+ def _iftest(self, args):
+ # implements "#if"
+ reqs = []
+ for arg in args:
+ if arg.startswith(b'no-') and arg[3:] in self._allcases:
+ if arg[3:] == self._case:
+ return False
+ elif arg in self._allcases:
+ if arg != self._case:
+ return False
+ else:
+ reqs.append(arg)
+ return self._hghave(reqs)[0]
+
def _parsetest(self, lines):
# We generate a shell script which outputs unique markers to line
# up script results with our source. These markers include input
@@ -1172,7 +1269,7 @@
after.setdefault(pos, []).append(' !!! invalid #if\n')
if skipping is not None:
after.setdefault(pos, []).append(' !!! nested #if\n')
- skipping = not self._hghave(lsplit[1:])[0]
+ skipping = not self._iftest(lsplit[1:])
after.setdefault(pos, []).append(l)
elif l.startswith(b'#else'):
if skipping is None:
@@ -1445,18 +1542,6 @@
iolock = threading.RLock()
-class SkipTest(Exception):
- """Raised to indicate that a test is to be skipped."""
-
-class IgnoreTest(Exception):
- """Raised to indicate that a test is to be ignored."""
-
-class WarnTest(Exception):
- """Raised to indicate that a test warned."""
-
-class ReportedTest(Exception):
- """Raised to indicate that a test already reported."""
-
class TestResult(unittest._TextTestResult):
"""Holds results when executing via unittest."""
# Don't worry too much about accessing the non-public _TextTestResult.
@@ -1475,17 +1560,19 @@
# sense to map it into skip some day.
self.ignored = []
- # We have a custom "warned" result that isn't present in any Python
- # unittest implementation. It is very similar to failed. It may make
- # sense to map it into fail some day.
- self.warned = []
-
self.times = []
self._firststarttime = None
# Data stored for the benefit of generating xunit reports.
self.successes = []
self.faildata = {}
+ if options.color == 'auto':
+ self.color = pygmentspresent and self.stream.isatty()
+ elif options.color == 'never':
+ self.color = False
+ else: # 'always', for testing purposes
+ self.color = pygmentspresent
+
def addFailure(self, test, reason):
self.failures.append((test, reason))
@@ -1534,19 +1621,6 @@
self.testsRun += 1
self.stream.flush()
- def addWarn(self, test, reason):
- self.warned.append((test, reason))
-
- if self._options.first:
- self.stop()
-
- with iolock:
- if self.showAll:
- self.stream.writeln('warned %s' % reason)
- else:
- self.stream.write('~')
- self.stream.flush()
-
def addOutputMismatch(self, test, ret, got, expected):
"""Record a mismatch in test output for a particular test."""
if self.shouldStop:
@@ -1571,13 +1645,16 @@
servefail, lines = getdiff(expected, got,
test.refpath, test.errpath)
if servefail:
- self.addFailure(
- test,
+ raise test.failureException(
'server failed to start (HGPORT=%s)' % test._startport)
- raise ReportedTest('server failed to start')
else:
self.stream.write('\n')
for line in lines:
+ if self.color:
+ line = pygments.highlight(
+ line,
+ lexers.DiffLexer(),
+ formatters.Terminal256Formatter())
if PYTHON3:
self.stream.flush()
self.stream.buffer.write(line)
@@ -1588,14 +1665,19 @@
# handle interactive prompt without releasing iolock
if self._options.interactive:
- self.stream.write('Accept this change? [n] ')
- answer = sys.stdin.readline().strip()
- if answer.lower() in ('y', 'yes'):
- if test.name.endswith('.t'):
- rename(test.errpath, test.path)
- else:
- rename(test.errpath, '%s.out' % test.path)
- accepted = True
+ if test.readrefout() != expected:
+ self.stream.write(
+ 'Reference output has changed (run again to prompt '
+ 'changes)')
+ else:
+ self.stream.write('Accept this change? [n] ')
+ answer = sys.stdin.readline().strip()
+ if answer.lower() in ('y', 'yes'):
+ if test.path.endswith(b'.t'):
+ rename(test.errpath, test.path)
+ else:
+ rename(test.errpath, '%s.out' % test.path)
+ accepted = True
if not accepted:
self.faildata[test.name] = b''.join(lines)
@@ -1689,7 +1771,7 @@
def get():
num_tests[0] += 1
if getattr(test, 'should_reload', False):
- return self._loadtest(test.path, num_tests[0])
+ return self._loadtest(test, num_tests[0])
return test
if not os.path.exists(test.path):
result.addSkip(test, "Doesn't exist")
@@ -1731,6 +1813,8 @@
if not v:
channel = n
break
+ else:
+ raise ValueError('Could not find output channel')
channels[channel] = "=" + test.name[5:].split(".")[0]
try:
test(result)
@@ -1740,10 +1824,11 @@
except: # re-raises
done.put(('!', test, 'run-test raised an error, see traceback'))
raise
- try:
- channels[channel] = ''
- except IndexError:
- pass
+ finally:
+ try:
+ channels[channel] = ''
+ except IndexError:
+ pass
def stat():
count = 0
@@ -1787,7 +1872,7 @@
if getattr(test, 'should_reload', False):
num_tests[0] += 1
tests.append(
- self._loadtest(test.name, num_tests[0]))
+ self._loadtest(test, num_tests[0]))
else:
tests.append(test)
if self._jobs == 1:
@@ -1822,10 +1907,10 @@
# alphabetically, while times for each test are listed from oldest to
# newest.
-def loadtimes(testdir):
+def loadtimes(outputdir):
times = []
try:
- with open(os.path.join(testdir, b'.testtimes-')) as fp:
+ with open(os.path.join(outputdir, b'.testtimes-')) as fp:
for line in fp:
ts = line.split()
times.append((ts[0], [float(t) for t in ts[1:]]))
@@ -1834,8 +1919,8 @@
raise
return times
-def savetimes(testdir, result):
- saved = dict(loadtimes(testdir))
+def savetimes(outputdir, result):
+ saved = dict(loadtimes(outputdir))
maxruns = 5
skipped = set([str(t[0]) for t in result.skipped])
for tdata in result.times:
@@ -1846,11 +1931,11 @@
ts[:] = ts[-maxruns:]
fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
- dir=testdir, text=True)
+ dir=outputdir, text=True)
with os.fdopen(fd, 'w') as fp:
for name, ts in sorted(saved.items()):
fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
- timepath = os.path.join(testdir, b'.testtimes')
+ timepath = os.path.join(outputdir, b'.testtimes')
try:
os.unlink(timepath)
except OSError:
@@ -1868,6 +1953,25 @@
self._runner = runner
+ def listtests(self, test):
+ result = TestResult(self._runner.options, self.stream,
+ self.descriptions, 0)
+ test = sorted(test, key=lambda t: t.name)
+ for t in test:
+ print(t.name)
+ result.addSuccess(t)
+
+ if self._runner.options.xunit:
+ with open(self._runner.options.xunit, "wb") as xuf:
+ self._writexunit(result, xuf)
+
+ if self._runner.options.json:
+ jsonpath = os.path.join(self._runner._outputdir, b'report.json')
+ with open(jsonpath, 'w') as fp:
+ self._writejson(result, fp)
+
+ return result
+
def run(self, test):
result = TestResult(self._runner.options, self.stream,
self.descriptions, self.verbosity)
@@ -1875,7 +1979,6 @@
test(result)
failed = len(result.failures)
- warned = len(result.warned)
skipped = len(result.skipped)
ignored = len(result.ignored)
@@ -1885,79 +1988,23 @@
if not self._runner.options.noskips:
for test, msg in result.skipped:
self.stream.writeln('Skipped %s: %s' % (test.name, msg))
- for test, msg in result.warned:
- self.stream.writeln('Warned %s: %s' % (test.name, msg))
for test, msg in result.failures:
self.stream.writeln('Failed %s: %s' % (test.name, msg))
for test, msg in result.errors:
self.stream.writeln('Errored %s: %s' % (test.name, msg))
if self._runner.options.xunit:
- with open(self._runner.options.xunit, 'wb') as xuf:
- timesd = dict((t[0], t[3]) for t in result.times)
- doc = minidom.Document()
- s = doc.createElement('testsuite')
- s.setAttribute('name', 'run-tests')
- s.setAttribute('tests', str(result.testsRun))
- s.setAttribute('errors', "0") # TODO
- s.setAttribute('failures', str(failed))
- s.setAttribute('skipped', str(skipped + ignored))
- doc.appendChild(s)
- for tc in result.successes:
- t = doc.createElement('testcase')
- t.setAttribute('name', tc.name)
- t.setAttribute('time', '%.3f' % timesd[tc.name])
- s.appendChild(t)
- for tc, err in sorted(result.faildata.items()):
- t = doc.createElement('testcase')
- t.setAttribute('name', tc)
- t.setAttribute('time', '%.3f' % timesd[tc])
- # createCDATASection expects a unicode or it will
- # convert using default conversion rules, which will
- # fail if string isn't ASCII.
- err = cdatasafe(err).decode('utf-8', 'replace')
- cd = doc.createCDATASection(err)
- t.appendChild(cd)
- s.appendChild(t)
- xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
+ with open(self._runner.options.xunit, "wb") as xuf:
+ self._writexunit(result, xuf)
if self._runner.options.json:
- jsonpath = os.path.join(self._runner._testdir, b'report.json')
+ jsonpath = os.path.join(self._runner._outputdir, b'report.json')
with open(jsonpath, 'w') as fp:
- timesd = {}
- for tdata in result.times:
- test = tdata[0]
- timesd[test] = tdata[1:]
-
- outcome = {}
- groups = [('success', ((tc, None)
- for tc in result.successes)),
- ('failure', result.failures),
- ('skip', result.skipped)]
- for res, testcases in groups:
- for tc, __ in testcases:
- if tc.name in timesd:
- diff = result.faildata.get(tc.name, b'')
- tres = {'result': res,
- 'time': ('%0.3f' % timesd[tc.name][2]),
- 'cuser': ('%0.3f' % timesd[tc.name][0]),
- 'csys': ('%0.3f' % timesd[tc.name][1]),
- 'start': ('%0.3f' % timesd[tc.name][3]),
- 'end': ('%0.3f' % timesd[tc.name][4]),
- 'diff': diff.decode('unicode_escape'),
- }
- else:
- # blacklisted test
- tres = {'result': res}
-
- outcome[tc.name] = tres
- jsonout = json.dumps(outcome, sort_keys=True, indent=4,
- separators=(',', ': '))
- fp.writelines(("testreport =", jsonout))
+ self._writejson(result, fp)
self._runner._checkhglib('Tested')
- savetimes(self._runner._testdir, result)
+ savetimes(self._runner._outputdir, result)
if failed and self._runner.options.known_good_rev:
def nooutput(args):
@@ -1993,14 +2040,14 @@
'%s %s by %s (%s)' % (
test, verb, dat['node'], dat['summary']))
self.stream.writeln(
- '# Ran %d tests, %d skipped, %d warned, %d failed.'
- % (result.testsRun,
- skipped + ignored, warned, failed))
+ '# Ran %d tests, %d skipped, %d failed.'
+ % (result.testsRun, skipped + ignored, failed))
if failed:
self.stream.writeln('python hash seed: %s' %
os.environ['PYTHONHASHSEED'])
if self._runner.options.time:
self.printtimes(result.times)
+ self.stream.flush()
return result
@@ -2016,6 +2063,95 @@
cuser, csys, real, start, end = tdata[1:6]
self.stream.writeln(cols % (start, end, cuser, csys, real, test))
+ @staticmethod
+ def _writexunit(result, outf):
+ # See http://llg.cubic.org/docs/junit/ for a reference.
+ timesd = dict((t[0], t[3]) for t in result.times)
+ doc = minidom.Document()
+ s = doc.createElement('testsuite')
+ s.setAttribute('name', 'run-tests')
+ s.setAttribute('tests', str(result.testsRun))
+ s.setAttribute('errors', "0") # TODO
+ s.setAttribute('failures', str(len(result.failures)))
+ s.setAttribute('skipped', str(len(result.skipped) +
+ len(result.ignored)))
+ doc.appendChild(s)
+ for tc in result.successes:
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc.name)
+ tctime = timesd.get(tc.name)
+ if tctime is not None:
+ t.setAttribute('time', '%.3f' % tctime)
+ s.appendChild(t)
+ for tc, err in sorted(result.faildata.items()):
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc)
+ tctime = timesd.get(tc)
+ if tctime is not None:
+ t.setAttribute('time', '%.3f' % tctime)
+ # createCDATASection expects a unicode or it will
+ # convert using default conversion rules, which will
+ # fail if string isn't ASCII.
+ err = cdatasafe(err).decode('utf-8', 'replace')
+ cd = doc.createCDATASection(err)
+ # Use 'failure' here instead of 'error' to match errors = 0,
+ # failures = len(result.failures) in the testsuite element.
+ failelem = doc.createElement('failure')
+ failelem.setAttribute('message', 'output changed')
+ failelem.setAttribute('type', 'output-mismatch')
+ failelem.appendChild(cd)
+ t.appendChild(failelem)
+ s.appendChild(t)
+ for tc, message in result.skipped:
+ # According to the schema, 'skipped' has no attributes. So store
+ # the skip message as a text node instead.
+ t = doc.createElement('testcase')
+ t.setAttribute('name', tc.name)
+ message = cdatasafe(message).decode('utf-8', 'replace')
+ cd = doc.createCDATASection(message)
+ skipelem = doc.createElement('skipped')
+ skipelem.appendChild(cd)
+ t.appendChild(skipelem)
+ s.appendChild(t)
+ outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
+
+ @staticmethod
+ def _writejson(result, outf):
+ timesd = {}
+ for tdata in result.times:
+ test = tdata[0]
+ timesd[test] = tdata[1:]
+
+ outcome = {}
+ groups = [('success', ((tc, None)
+ for tc in result.successes)),
+ ('failure', result.failures),
+ ('skip', result.skipped)]
+ for res, testcases in groups:
+ for tc, __ in testcases:
+ if tc.name in timesd:
+ diff = result.faildata.get(tc.name, b'')
+ try:
+ diff = diff.decode('unicode_escape')
+ except UnicodeDecodeError as e:
+ diff = '%r decoding diff, sorry' % e
+ tres = {'result': res,
+ 'time': ('%0.3f' % timesd[tc.name][2]),
+ 'cuser': ('%0.3f' % timesd[tc.name][0]),
+ 'csys': ('%0.3f' % timesd[tc.name][1]),
+ 'start': ('%0.3f' % timesd[tc.name][3]),
+ 'end': ('%0.3f' % timesd[tc.name][4]),
+ 'diff': diff,
+ }
+ else:
+ # blacklisted test
+ tres = {'result': res}
+
+ outcome[tc.name] = tres
+ jsonout = json.dumps(outcome, sort_keys=True, indent=4,
+ separators=(',', ': '))
+ outf.writelines(("testreport =", jsonout))
+
class TestRunner(object):
"""Holds context for executing tests.
@@ -2024,7 +2160,6 @@
# Programs required to run tests.
REQUIREDTOOLS = [
- os.path.basename(_bytespath(sys.executable)),
b'diff',
b'grep',
b'unzip',
@@ -2043,6 +2178,7 @@
self.options = None
self._hgroot = None
self._testdir = None
+ self._outputdir = None
self._hgtmp = None
self._installdir = None
self._bindir = None
@@ -2067,11 +2203,11 @@
self.options = options
self._checktools()
- tests = self.findtests(args)
+ testdescs = self.findtests(args)
if options.profile_runner:
import statprof
statprof.start()
- result = self._run(tests)
+ result = self._run(testdescs)
if options.profile_runner:
statprof.stop()
statprof.display()
@@ -2080,9 +2216,9 @@
finally:
os.umask(oldmask)
- def _run(self, tests):
+ def _run(self, testdescs):
if self.options.random:
- random.shuffle(tests)
+ random.shuffle(testdescs)
else:
# keywords for slow tests
slow = {b'svn': 10,
@@ -2100,6 +2236,7 @@
perf = {}
def sortkey(f):
# run largest tests first, as they tend to take the longest
+ f = f['path']
try:
return perf[f]
except KeyError:
@@ -2117,10 +2254,14 @@
val /= 10.0
perf[f] = val / 1000.0
return perf[f]
- tests.sort(key=sortkey)
+ testdescs.sort(key=sortkey)
self._testdir = osenvironb[b'TESTDIR'] = getattr(
os, 'getcwdb', os.getcwd)()
+ if self.options.outputdir:
+ self._outputdir = canonpath(_bytespath(self.options.outputdir))
+ else:
+ self._outputdir = self._testdir
if 'PYTHONHASHSEED' not in os.environ:
# use a random python hash seed all the time
@@ -2245,9 +2386,10 @@
vlog("# Using HGTMP", self._hgtmp)
vlog("# Using PATH", os.environ["PATH"])
vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
+ vlog("# Writing to directory", self._outputdir)
try:
- return self._runtests(tests) or 0
+ return self._runtests(testdescs) or 0
finally:
time.sleep(.1)
self._cleanup()
@@ -2267,35 +2409,52 @@
else:
args = os.listdir(b'.')
- return [t for t in args
- if os.path.basename(t).startswith(b'test-')
- and (t.endswith(b'.py') or t.endswith(b'.t'))]
-
- def _runtests(self, tests):
- try:
- if self._installdir:
- self._installhg()
- self._checkhglib("Testing")
+ tests = []
+ for t in args:
+ if not (os.path.basename(t).startswith(b'test-')
+ and (t.endswith(b'.py') or t.endswith(b'.t'))):
+ continue
+ if t.endswith(b'.t'):
+ # .t file may contain multiple test cases
+ cases = sorted(parsettestcases(t))
+ if cases:
+ tests += [{'path': t, 'case': c} for c in sorted(cases)]
+ else:
+ tests.append({'path': t})
else:
- self._usecorrectpython()
- if self.options.chg:
- assert self._installdir
- self._installchg()
-
+ tests.append({'path': t})
+ return tests
+
+ def _runtests(self, testdescs):
+ def _reloadtest(test, i):
+ # convert a test back to its description dict
+ desc = {'path': test.path}
+ case = getattr(test, '_case', None)
+ if case:
+ desc['case'] = case
+ return self._gettest(desc, i)
+
+ try:
if self.options.restart:
- orig = list(tests)
- while tests:
- if os.path.exists(tests[0] + ".err"):
+ orig = list(testdescs)
+ while testdescs:
+ desc = testdescs[0]
+ # desc['path'] is a relative path
+ if 'case' in desc:
+ errpath = b'%s.%s.err' % (desc['path'], desc['case'])
+ else:
+ errpath = b'%s.err' % desc['path']
+ errpath = os.path.join(self._outputdir, errpath)
+ if os.path.exists(errpath):
break
- tests.pop(0)
- if not tests:
+ testdescs.pop(0)
+ if not testdescs:
print("running all tests")
- tests = orig
-
- tests = [self._gettest(t, i) for i, t in enumerate(tests)]
+ testdescs = orig
+
+ tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
failed = False
- warned = False
kws = self.options.keywords
if kws is not None and PYTHON3:
kws = kws.encode('utf-8')
@@ -2309,17 +2468,28 @@
loop=self.options.loop,
runs_per_test=self.options.runs_per_test,
showchannels=self.options.showchannels,
- tests=tests, loadtest=self._gettest)
+ tests=tests, loadtest=_reloadtest)
verbosity = 1
if self.options.verbose:
verbosity = 2
runner = TextTestRunner(self, verbosity=verbosity)
- result = runner.run(suite)
+
+ if self.options.list_tests:
+ result = runner.listtests(suite)
+ else:
+ if self._installdir:
+ self._installhg()
+ self._checkhglib("Testing")
+ else:
+ self._usecorrectpython()
+ if self.options.chg:
+ assert self._installdir
+ self._installchg()
+
+ result = runner.run(suite)
if result.failures:
failed = True
- if result.warned:
- warned = True
if self.options.anycoverage:
self._outputcoverage()
@@ -2329,8 +2499,6 @@
if failed:
return 1
- if warned:
- return 80
def _getport(self, count):
port = self._ports.get(count) # do we have a cached entry?
@@ -2350,13 +2518,14 @@
self._ports[count] = port
return port
- def _gettest(self, test, count):
+ def _gettest(self, testdesc, count):
"""Obtain a Test by looking at its filename.
Returns a Test instance. The Test may not be runnable if it doesn't
map to a known type.
"""
- lctest = test.lower()
+ path = testdesc['path']
+ lctest = path.lower()
testcls = Test
for ext, cls in self.TESTTYPES:
@@ -2364,10 +2533,13 @@
testcls = cls
break
- refpath = os.path.join(self._testdir, test)
+ refpath = os.path.join(self._testdir, path)
tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
- t = testcls(refpath, tmpdir,
+ # extra keyword parameters. 'case' is used by .t tests
+ kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
+
+ t = testcls(refpath, self._outputdir, tmpdir,
keeptmpdir=self.options.keep_tmpdir,
debug=self.options.debug,
timeout=self.options.timeout,
@@ -2377,7 +2549,7 @@
shell=self.options.shell,
hgcommand=self._hgcommand,
usechg=bool(self.options.with_chg or self.options.chg),
- useipv6=useipv6)
+ useipv6=useipv6, **kwds)
t.should_reload = True
return t
@@ -2621,10 +2793,10 @@
cov.report(ignore_errors=True, omit=omit)
if self.options.htmlcov:
- htmldir = os.path.join(self._testdir, 'htmlcov')
+ htmldir = os.path.join(self._outputdir, 'htmlcov')
cov.html_report(directory=htmldir, omit=omit)
if self.options.annotate:
- adir = os.path.join(self._testdir, 'annotated')
+ adir = os.path.join(self._outputdir, 'annotated')
if not os.path.isdir(adir):
os.mkdir(adir)
cov.annotate(directory=adir, omit=omit)
--- a/tests/test-acl.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-acl.t Wed Jul 19 07:51:41 2017 -0500
@@ -112,15 +112,15 @@
adding foo/file.txt revisions
adding quux/file.py revisions
added 3 changesets with 3 changes to 3 files
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -147,7 +147,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -177,15 +176,15 @@
added 3 changesets with 3 changes to 3 files
calling hook pretxnchangegroup.acl: hgext.acl.hook
acl: changes have source "push" - skipping
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -213,7 +212,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -253,15 +251,15 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -289,7 +287,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -354,7 +351,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -424,7 +420,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -491,7 +486,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -563,7 +557,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -632,7 +625,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -703,7 +695,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -743,15 +734,15 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -786,7 +777,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -864,7 +854,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -937,7 +926,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1021,7 +1009,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1061,15 +1048,15 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -1107,7 +1094,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1147,15 +1133,15 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -1189,7 +1175,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1265,7 +1250,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1306,15 +1290,15 @@
acl: path access granted: "f9cafe1212c8"
acl: branch access granted: "911600dab2ae" on branch "default"
acl: path access granted: "911600dab2ae"
- updating the branch cache
bundle2-input-part: total payload size 1553
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-bundle: 3 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-bundle: 1 parts total
@@ -1348,7 +1332,6 @@
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
- invalid branchheads cache (served): tip differs
listing keys for "bookmarks"
3 changesets found
list of changesets:
@@ -1508,18 +1491,18 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
@@ -1804,18 +1787,18 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
@@ -1897,18 +1880,18 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
@@ -2058,18 +2041,18 @@
acl: path access granted: "911600dab2ae"
acl: branch access granted: "e8fc755d4d82" on branch "foobar"
acl: path access granted: "e8fc755d4d82"
- updating the branch cache
bundle2-input-part: total payload size 2068
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955"
bundle2-input-part: "pushkey" (params: 4 mandatory) supported
pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01"
bundle2-input-bundle: 4 parts total
+ updating the branch cache
bundle2-output-bundle: "HG20", 3 parts total
bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
- bundle2-input-bundle: with-transaction
+ bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
--- a/tests/test-add.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-add.t Wed Jul 19 07:51:41 2017 -0500
@@ -14,6 +14,11 @@
adding a
$ hg st
A a
+ $ mkdir dir
+ $ cd dir
+ $ hg add ../a
+ ../a already tracked!
+ $ cd ..
$ echo b > b
$ hg add -n b
@@ -196,7 +201,6 @@
adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
$ hg forget capsdir1/capsdir/abc.txt
- removing CapsDir1/CapsDir/AbC.txt (glob)
$ hg forget capsdir1/capsdir
removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
@@ -232,7 +236,6 @@
+def
$ hg mv CapsDir1/CapsDir/abc.txt CapsDir1/CapsDir/ABC.txt
- moving CapsDir1/CapsDir/AbC.txt to CapsDir1/CapsDir/ABC.txt (glob)
$ hg ci -m "case changing rename" CapsDir1/CapsDir/AbC.txt CapsDir1/CapsDir/ABC.txt
$ hg status -A capsdir1/capsdir
--- a/tests/test-addremove-similar.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-addremove-similar.t Wed Jul 19 07:51:41 2017 -0500
@@ -57,7 +57,7 @@
should be sorted by path for stable result
- $ for i in `python $TESTDIR/seq.py 0 9`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 0 9`; do
> cp small-file $i
> done
$ rm small-file
@@ -88,7 +88,7 @@
pick one from many identical files
$ cp 0 a
- $ rm `python $TESTDIR/seq.py 0 9`
+ $ rm `$PYTHON $TESTDIR/seq.py 0 9`
$ hg addremove
removing 0
removing 1
@@ -107,11 +107,11 @@
pick one from many similar files
$ cp 0 a
- $ for i in `python $TESTDIR/seq.py 0 9`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 0 9`; do
> echo $i >> $i
> done
$ hg commit -m 'make them slightly different'
- $ rm `python $TESTDIR/seq.py 0 9`
+ $ rm `$PYTHON $TESTDIR/seq.py 0 9`
$ hg addremove -s50
removing 0
removing 1
--- a/tests/test-addremove.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-addremove.t Wed Jul 19 07:51:41 2017 -0500
@@ -29,15 +29,12 @@
$ hg -v addremove
adding foo
$ hg forget foo
-#if windows
+
$ hg -v addremove nonexistent
- nonexistent: The system cannot find the file specified
+ nonexistent: The system cannot find the file specified (windows !)
+ nonexistent: No such file or directory (no-windows !)
[1]
-#else
- $ hg -v addremove nonexistent
- nonexistent: No such file or directory
- [1]
-#endif
+
$ cd ..
$ hg init subdir
@@ -87,17 +84,13 @@
adding c
$ rm c
-#if windows
+
$ hg ci -A -m "c" nonexistent
- nonexistent: The system cannot find the file specified
+ nonexistent: The system cannot find the file specified (windows !)
+ nonexistent: No such file or directory (no-windows !)
abort: failed to mark all new/missing files as added/removed
[255]
-#else
- $ hg ci -A -m "c" nonexistent
- nonexistent: No such file or directory
- abort: failed to mark all new/missing files as added/removed
- [255]
-#endif
+
$ hg st
! c
$ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-amend.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,220 @@
+#testcases obsstore-off obsstore-on
+
+ $ cat << EOF >> $HGRCPATH
+ > [extensions]
+ > amend=
+ > debugdrawdag=$TESTDIR/drawdag.py
+ > [diff]
+ > git=1
+ > EOF
+
+#if obsstore-on
+ $ cat << EOF >> $HGRCPATH
+ > [experimental]
+ > evolution=createmarkers
+ > EOF
+#endif
+
+Basic amend
+
+ $ hg init repo1
+ $ cd repo1
+ $ hg debugdrawdag <<'EOS'
+ > B
+ > |
+ > A
+ > EOS
+
+ $ hg update B -q
+ $ echo 2 >> B
+
+#if obsstore-off
+ $ hg amend
+ saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/112478962961-af2c0941-amend.hg (glob)
+ $ hg log -p -G --hidden -T '{rev} {node|short} {desc}\n'
+ @ 1 be169c7e8dbe B
+ | diff --git a/B b/B
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/B
+ | @@ -0,0 +1,1 @@
+ | +B2
+ |
+ o 0 426bada5c675 A
+ diff --git a/A b/A
+ new file mode 100644
+ --- /dev/null
+ +++ b/A
+ @@ -0,0 +1,1 @@
+ +A
+ \ No newline at end of file
+
+#else
+ $ hg amend
+ $ hg log -p -G --hidden -T '{rev} {node|short} {desc}\n'
+ @ 3 be169c7e8dbe B
+ | diff --git a/B b/B
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/B
+ | @@ -0,0 +1,1 @@
+ | +B2
+ |
+ | x 2 edf08988b141 temporary amend commit for 112478962961
+ | | diff --git a/B b/B
+ | | --- a/B
+ | | +++ b/B
+ | | @@ -1,1 +1,1 @@
+ | | -B
+ | | \ No newline at end of file
+ | | +B2
+ | |
+ | x 1 112478962961 B
+ |/ diff --git a/B b/B
+ | new file mode 100644
+ | --- /dev/null
+ | +++ b/B
+ | @@ -0,0 +1,1 @@
+ | +B
+ | \ No newline at end of file
+ |
+ o 0 426bada5c675 A
+ diff --git a/A b/A
+ new file mode 100644
+ --- /dev/null
+ +++ b/A
+ @@ -0,0 +1,1 @@
+ +A
+ \ No newline at end of file
+
+#endif
+
+Nothing changed
+
+ $ hg amend
+ nothing changed
+ [1]
+
+Matcher and metadata options
+
+ $ echo 3 > C
+ $ echo 4 > D
+ $ hg add C D
+ $ hg amend -m NEWMESSAGE -I C -q
+ $ hg log -r . -T '{node|short} {desc} {files}\n'
+ c7ba14d9075b NEWMESSAGE B C
+ $ echo 5 > E
+ $ rm C
+ $ hg amend -d '2000 1000' -u 'Foo <foo@example.com>' -A C D -q
+ $ hg log -r . -T '{node|short} {desc} {files} {author} {date}\n'
+ 14f6c4bcc865 NEWMESSAGE B D Foo <foo@example.com> 2000.01000
+
+Amend with editor
+
+ $ cat > $TESTTMP/prefix.sh <<'EOF'
+ > printf 'EDITED: ' > $TESTTMP/msg
+ > cat "$1" >> $TESTTMP/msg
+ > mv $TESTTMP/msg "$1"
+ > EOF
+ $ chmod +x $TESTTMP/prefix.sh
+
+ $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend --edit -q
+ $ hg log -r . -T '{node|short} {desc}\n'
+ 298f085230c3 EDITED: NEWMESSAGE
+ $ HGEDITOR="sh $TESTTMP/prefix.sh" hg amend -e -m MSG -q
+ $ hg log -r . -T '{node|short} {desc}\n'
+ 974f07f28537 EDITED: MSG
+
+ $ echo FOO > $TESTTMP/msg
+ $ hg amend -l $TESTTMP/msg -m BAR
+ abort: options --message and --logfile are mutually exclusive
+ [255]
+ $ hg amend -l $TESTTMP/msg -q
+ $ hg log -r . -T '{node|short} {desc}\n'
+ 507be9bdac71 FOO
+
+Interactive mode
+
+ $ touch F G
+ $ hg add F G
+ $ cat <<EOS | hg amend -i --config ui.interactive=1 -q
+ > y
+ > n
+ > EOS
+ diff --git a/F b/F
+ new file mode 100644
+ examine changes to 'F'? [Ynesfdaq?] y
+
+ diff --git a/G b/G
+ new file mode 100644
+ examine changes to 'G'? [Ynesfdaq?] n
+
+ $ hg log -r . -T '{files}\n'
+ B D F
+
+Amend in the middle of a stack
+
+ $ hg init $TESTTMP/repo2
+ $ cd $TESTTMP/repo2
+ $ hg debugdrawdag <<'EOS'
+ > C
+ > |
+ > B
+ > |
+ > A
+ > EOS
+
+ $ hg update -q B
+ $ echo 2 >> B
+ $ hg amend
+ abort: cannot amend changeset with children
+ [255]
+
+#if obsstore-on
+
+With allowunstable, amend could work in the middle of a stack
+
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > evolution=createmarkers, allowunstable
+ > EOF
+
+ $ hg amend
+ $ hg log -T '{rev} {node|short} {desc}\n' -G
+ @ 4 be169c7e8dbe B
+ |
+ | o 2 26805aba1e60 C
+ | |
+ | x 1 112478962961 B
+ |/
+ o 0 426bada5c675 A
+
+#endif
+
+Cannot amend public changeset
+
+ $ hg phase -r A --public
+ $ hg update -C -q A
+ $ hg amend -m AMEND -q
+ abort: cannot amend public changesets
+ [255]
+
+Amend a merge changeset
+
+ $ hg init $TESTTMP/repo3
+ $ cd $TESTTMP/repo3
+ $ hg debugdrawdag <<'EOS'
+ > C
+ > /|
+ > A B
+ > EOS
+ $ hg update -q C
+ $ hg amend -m FOO -q
+ $ rm .hg/localtags
+ $ hg log -G -T '{desc}\n'
+ @ FOO
+ |\
+ | o B
+ |
+ o A
+
--- a/tests/test-ancestor.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ancestor.py Wed Jul 19 07:51:41 2017 -0500
@@ -13,10 +13,15 @@
ancestor,
debugcommands,
hg,
+ pycompat,
ui as uimod,
util,
)
+if pycompat.ispy3:
+ long = int
+ xrange = range
+
def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
'''nodes: total number of nodes in the graph
rootprob: probability that a new node (not 0) will be a root
@@ -37,7 +42,7 @@
p1 = i - 1
else:
p1 = rng.randrange(i - 1)
- p2 = rng.choice(range(0, p1) + range(p1 + 1, i))
+ p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i)))
graph[i] = [p1, p2]
elif rng.random() < prevprob:
graph[i] = [i - 1]
@@ -49,7 +54,7 @@
def buildancestorsets(graph):
ancs = [None] * len(graph)
for i in xrange(len(graph)):
- ancs[i] = set([i])
+ ancs[i] = {i}
if graph[i] == [nullrev]:
continue
for p in graph[i]:
@@ -212,15 +217,17 @@
# The C gca algorithm requires a real repo. These are textual descriptions of
-# DAGs that have been known to be problematic.
+# DAGs that have been known to be problematic, and, optionally, known pairs
+# of revisions and their expected ancestor list.
dagtests = [
- '+2*2*2/*3/2',
- '+3*3/*2*2/*4*4/*4/2*4/2*2',
+ ('+2*2*2/*3/2', {}),
+ ('+3*3/*2*2/*4*4/*4/2*4/2*2', {}),
+ ('+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}),
]
def test_gca():
u = uimod.ui.load()
- for i, dag in enumerate(dagtests):
- repo = hg.repository(u, 'gca%d' % i, create=1)
+ for i, (dag, tests) in enumerate(dagtests):
+ repo = hg.repository(u, b'gca%d' % i, create=1)
cl = repo.changelog
if not util.safehasattr(cl.index, 'ancestors'):
# C version not available
@@ -230,15 +237,21 @@
# Compare the results of the Python and C versions. This does not
# include choosing a winner when more than one gca exists -- we make
# sure both return exactly the same set of gcas.
+ # Also compare against expected results, if available.
for a in cl:
for b in cl:
cgcas = sorted(cl.index.ancestors(a, b))
pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b))
- if cgcas != pygcas:
+ expected = None
+ if (a, b) in tests:
+ expected = tests[(a, b)]
+ if cgcas != pygcas or (expected and cgcas != expected):
print("test_gca: for dag %s, gcas for %d, %d:"
% (dag, a, b))
print(" C returned: %s" % cgcas)
print(" Python returned: %s" % pygcas)
+ if expected:
+ print(" expected: %s" % expected)
def main():
seed = None
--- a/tests/test-annotate.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-annotate.t Wed Jul 19 07:51:41 2017 -0500
@@ -56,21 +56,18 @@
$ hg annotate -Tjson a
[
{
- "line": "a\n",
- "rev": 0
+ "abspath": "a",
+ "lines": [{"line": "a\n", "rev": 0}],
+ "path": "a"
}
]
$ hg annotate -Tjson -cdfnul a
[
{
- "date": [1.0, 0],
- "file": "a",
- "line": "a\n",
- "line_number": 1,
- "node": "8435f90966e442695d2ded29fdade2bac5ad8065",
- "rev": 0,
- "user": "nobody"
+ "abspath": "a",
+ "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
+ "path": "a"
}
]
@@ -88,6 +85,37 @@
> EOF
$ hg ci -mb2 -d '2 0'
+annotate multiple files (JSON)
+
+ $ hg annotate -Tjson a b
+ [
+ {
+ "abspath": "a",
+ "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
+ "path": "a"
+ },
+ {
+ "abspath": "b",
+ "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
+ "path": "b"
+ }
+ ]
+
+annotate multiple files (template)
+
+ $ hg annotate -T'== {abspath} ==\n{lines % "{rev}: {line}"}' a b
+ == a ==
+ 0: a
+ 1: a
+ 1: a
+ == b ==
+ 0: a
+ 1: a
+ 1: a
+ 3: b4
+ 3: b5
+ 3: b6
+
annotate -n b
$ hg annotate -n b
@@ -217,6 +245,79 @@
3 b:5: b5
7 b:7: d
+--skip nothing (should be the same as no --skip at all)
+
+ $ hg annotate -nlf b --skip '1::0'
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 7 b:7: d
+
+--skip a modified line. Note a slight behavior difference in pure - this is
+because the pure code comes up with slightly different deltas internally.
+
+ $ hg annotate -nlf b --skip 6
+ 0 a:1: a
+ 1 a:2: z (no-pure !)
+ 0 a:1: z (pure !)
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 7 b:7: d
+
+--skip added lines (and test multiple skip)
+
+ $ hg annotate -nlf b --skip 3
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 1 a:3: b4
+ 4 b:5: c
+ 1 a:3: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 4
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 1 a:3: c
+ 3 b:5: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 3 --skip 4
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 1 a:3: b4
+ 1 a:3: c
+ 1 a:3: b5
+ 7 b:7: d
+
+ $ hg annotate -nlf b --skip 'merge()'
+ 0 a:1: a
+ 6 b:2: z
+ 1 a:3: a
+ 3 b:4: b4
+ 4 b:5: c
+ 3 b:5: b5
+ 3 b:5: d
+
+--skip everything -- use the revision the file was introduced in
+
+ $ hg annotate -nlf b --skip 'all()'
+ 0 a:1: a
+ 0 a:1: z
+ 0 a:1: a
+ 0 a:1: b4
+ 0 a:1: c
+ 0 a:1: b5
+ 0 a:1: d
+
Issue2807: alignment of line numbers with -l
$ echo more >> b
@@ -429,14 +530,9 @@
$ hg annotate -ncr "wdir()" -Tjson foo
[
{
- "line": "foo\n",
- "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd",
- "rev": 11
- },
- {
- "line": "foofoo\n",
- "node": null,
- "rev": null
+ "abspath": "foo",
+ "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
+ "path": "foo"
}
]
@@ -457,28 +553,20 @@
annotate missing file
$ rm baz
-#if windows
+
$ hg annotate -ncr "wdir()" baz
- abort: $TESTTMP\repo\baz: The system cannot find the file specified
+ abort: $TESTTMP\repo\baz: The system cannot find the file specified (windows !)
+ abort: No such file or directory: $TESTTMP/repo/baz (no-windows !)
[255]
-#else
- $ hg annotate -ncr "wdir()" baz
- abort: No such file or directory: $TESTTMP/repo/baz
- [255]
-#endif
annotate removed file
$ hg rm baz
-#if windows
+
$ hg annotate -ncr "wdir()" baz
- abort: $TESTTMP\repo\baz: The system cannot find the file specified
+ abort: $TESTTMP\repo\baz: The system cannot find the file specified (windows !)
+ abort: No such file or directory: $TESTTMP/repo/baz (no-windows !)
[255]
-#else
- $ hg annotate -ncr "wdir()" baz
- abort: No such file or directory: $TESTTMP/repo/baz
- [255]
-#endif
$ hg revert --all --no-backup --quiet
$ hg id -n
@@ -629,6 +717,65 @@
|
~
+Issue5595: on a merge changeset with different line ranges depending on
+parent, be conservative and use the surrounding interval to avoid loosing
+track of possible further descendants in specified range.
+
+ $ hg up 23 --quiet
+ $ hg cat baz -r 24
+ 0
+ 0
+ 1 baz:1
+ 2 baz:2
+ 3+ baz:3
+ 4 baz:4
+ 5
+ 6
+ $ cat > baz << EOF
+ > 0
+ > 0
+ > a
+ > b
+ > 3+ baz:3
+ > 4 baz:4
+ > y
+ > z
+ > EOF
+ $ hg ci -m 'baz: mostly rewrite with some content from 24'
+ created new head
+ $ hg merge --tool :merge-other 24
+ merging baz
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m 'merge forgetting about baz rewrite'
+ $ cat > baz << EOF
+ > 0
+ > 0
+ > 1 baz:1
+ > 2+ baz:2
+ > 3+ baz:3
+ > 4 baz:4
+ > 5
+ > 6
+ > EOF
+ $ hg ci -m 'baz: narrow change (2->2+)'
+ $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:4, startrev=20, descend=True)' --graph
+ @ 33: baz: narrow change (2->2+)
+ |
+ o 32: merge forgetting about baz rewrite
+ |\
+ | o 31: baz: mostly rewrite with some content from 24
+ | :
+ | : o 30: baz:3->+3
+ | :/
+ +---o 27: baz:3+->3-
+ | :
+ o : 24: baz:3->3+
+ :/
+ o 20: baz:4
+ |\
+ ~ ~
+
check error cases
$ hg up 24 --quiet
$ hg log -r 'followlines()'
@@ -665,12 +812,34 @@
abort: line range exceeds file size
[255]
$ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
- hg: parse error at 43: syntax error in revset 'followlines(baz, 2:4, startrev=20, descend=[1])'
+ hg: parse error at 43: not a prefix: [
[255]
$ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
hg: parse error: descend argument must be a boolean
[255]
+Test empty annotate output
+
+ $ printf '\0' > binary
+ $ touch empty
+ $ hg ci -qAm 'add binary and empty files'
+
+ $ hg annotate binary empty
+ binary: binary file
+
+ $ hg annotate -Tjson binary empty
+ [
+ {
+ "abspath": "binary",
+ "path": "binary"
+ },
+ {
+ "abspath": "empty",
+ "lines": [],
+ "path": "empty"
+ }
+ ]
+
Test annotate with whitespace options
$ cd ..
--- a/tests/test-archive.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-archive.t Wed Jul 19 07:51:41 2017 -0500
@@ -105,7 +105,7 @@
> except util.urlerr.httperror as e:
> sys.stderr.write(str(e) + '\n')
> EOF
- $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/.hg_archival.txt
test-archive-1701ef1f1510/.hgsub
test-archive-1701ef1f1510/.hgsubstate
@@ -113,7 +113,7 @@
test-archive-1701ef1f1510/baz/bletch
test-archive-1701ef1f1510/foo
test-archive-1701ef1f1510/subrepo/sub
- $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
test-archive-1701ef1f1510/.hg_archival.txt
test-archive-1701ef1f1510/.hgsub
test-archive-1701ef1f1510/.hgsubstate
@@ -121,7 +121,7 @@
test-archive-1701ef1f1510/baz/bletch
test-archive-1701ef1f1510/foo
test-archive-1701ef1f1510/subrepo/sub
- $ python getarchive.py "$TIP" zip > archive.zip
+ $ $PYTHON getarchive.py "$TIP" zip > archive.zip
$ unzip -t archive.zip
Archive: archive.zip
testing: test-archive-1701ef1f1510/.hg_archival.txt*OK (glob)
@@ -135,19 +135,19 @@
test that we can download single directories and files
- $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/baz/bletch
- $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
+ $ $PYTHON getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
test-archive-1701ef1f1510/foo
test that we detect file patterns that match no files
- $ python getarchive.py "$TIP" gz foobar
+ $ $PYTHON getarchive.py "$TIP" gz foobar
HTTP Error 404: file(s) not found: foobar
test that we reject unsafe patterns
- $ python getarchive.py "$TIP" gz relre:baz
+ $ $PYTHON getarchive.py "$TIP" gz relre:baz
HTTP Error 404: file(s) not found: relre:baz
$ killdaemons.py
@@ -231,7 +231,7 @@
$ sleep 1
$ hg archive -t tgz tip.tar.gz
$ mv tip.tar.gz tip2.tar.gz
- $ python md5comp.py tip1.tar.gz tip2.tar.gz
+ $ $PYTHON md5comp.py tip1.tar.gz tip2.tar.gz
True
$ hg archive -t zip -p /illegal test.zip
@@ -364,12 +364,12 @@
$ hg -R repo archive --prefix tar-extracted archive.tar
$ (TZ=UTC-3; export TZ; tar xf archive.tar)
- $ python show_mtime.py tar-extracted/a
+ $ $PYTHON show_mtime.py tar-extracted/a
456789012
$ hg -R repo archive --prefix zip-extracted archive.zip
$ (TZ=UTC-3; export TZ; unzip -q archive.zip)
- $ python show_mtime.py zip-extracted/a
+ $ $PYTHON show_mtime.py zip-extracted/a
456789012
$ cd ..
--- a/tests/test-atomictempfile.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-atomictempfile.py Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,8 @@
# if a programmer screws up and passes bad args to atomictempfile, they
# get a plain ordinary TypeError, not infinite recursion
def testoops(self):
- self.assertRaises(TypeError, atomictempfile)
+ with self.assertRaises(TypeError):
+ atomictempfile()
# checkambig=True avoids ambiguity of timestamp
def testcheckambig(self):
--- a/tests/test-automv.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-automv.t Wed Jul 19 07:51:41 2017 -0500
@@ -162,7 +162,7 @@
R a.txt
$ hg commit --amend -m 'amended'
detected move of 1 files
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
a.txt
@@ -185,7 +185,7 @@
R a.txt
$ hg commit --amend -m 'amended'
detected move of 1 files
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
a.txt
@@ -207,7 +207,7 @@
A b.txt
R a.txt
$ hg commit --amend -m 'amended'
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
A c.txt
@@ -229,7 +229,7 @@
R a.txt
$ hg commit --amend --config automv.similarity='60' -m 'amended'
detected move of 1 files
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
a.txt
@@ -248,7 +248,7 @@
! a.txt
? b.txt
$ hg commit --amend -m 'amended'
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status -C
! a.txt
? b.txt
@@ -270,7 +270,7 @@
A d.txt
R a.txt
$ hg commit --amend -m 'amended' d.txt
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A c.txt
A d.txt
@@ -279,7 +279,7 @@
R a.txt
$ hg commit --amend -m 'amended'
detected move of 1 files
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
a.txt
@@ -301,7 +301,7 @@
A b.txt
R a.txt
$ hg commit --amend -m 'amended' --no-automv
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
A c.txt
@@ -322,7 +322,7 @@
$ hg commit -m "removed a"
$ hg add b.txt
$ hg commit --amend -m 'amended'
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-amend.hg (glob)
$ hg status --change . -C
A b.txt
R a.txt
--- a/tests/test-bad-extension.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bad-extension.t Wed Jul 19 07:51:41 2017 -0500
@@ -62,10 +62,10 @@
names of extensions failed to load can be accessed via extensions.notloaded()
$ cat <<EOF > showbadexts.py
- > from mercurial import cmdutil, commands, extensions
+ > from mercurial import commands, extensions, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('showbadexts', norepo=True)
+ > command = registrar.command(cmdtable)
+ > @command(b'showbadexts', norepo=True)
> def showbadexts(ui, *pats, **opts):
> ui.write('BADEXTS: %s\n' % ' '.join(sorted(extensions.notloaded())))
> EOF
--- a/tests/test-bad-pull.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bad-pull.t Wed Jul 19 07:51:41 2017 -0500
@@ -7,7 +7,7 @@
$ test -d copy
[1]
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/foo copy2
abort: HTTP Error 404: * (glob)
--- a/tests/test-basic.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-basic.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,11 +1,9 @@
Create a repository:
$ hg config
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
devel.all-warnings=true
+ devel.default-date=0 0
+ extensions.fsmonitor= (fsmonitor !)
largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
ui.slash=True
ui.interactive=False
@@ -63,7 +61,7 @@
> EOF
$ hg up null
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- $ python ./update_to_rev0.py
+ $ $PYTHON ./update_to_rev0.py
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg identify -n
0
--- a/tests/test-bdiff.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bdiff.py Wed Jul 19 07:51:41 2017 -0500
@@ -4,8 +4,7 @@
import unittest
from mercurial import (
- bdiff,
- mpatch,
+ mdiff,
)
class diffreplace(
@@ -16,10 +15,10 @@
class BdiffTests(unittest.TestCase):
def assert_bdiff_applies(self, a, b):
- d = bdiff.bdiff(a, b)
+ d = mdiff.textdiff(a, b)
c = a
if d:
- c = mpatch.patches(a, [d])
+ c = mdiff.patches(a, [d])
self.assertEqual(
c, b, ("bad diff+patch result from\n %r to\n "
"%r: \nbdiff: %r\npatched: %r" % (a, b, d, c[:200])))
@@ -54,7 +53,7 @@
self.assert_bdiff(a, b)
def showdiff(self, a, b):
- bin = bdiff.bdiff(a, b)
+ bin = mdiff.textdiff(a, b)
pos = 0
q = 0
actions = []
@@ -110,7 +109,7 @@
("", "", 0),
]
for a, b, allws in cases:
- c = bdiff.fixws(a, allws)
+ c = mdiff.fixws(a, allws)
self.assertEqual(
c, b, 'fixws(%r) want %r got %r (allws=%r)' % (a, b, c, allws))
--- a/tests/test-bisect.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bisect.t Wed Jul 19 07:51:41 2017 -0500
@@ -453,7 +453,7 @@
test bisecting command
$ cat > script.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import sys
> from mercurial import ui, hg
> repo = hg.repository(ui.ui.load(), '.')
@@ -463,12 +463,12 @@
$ chmod +x script.py
$ hg bisect -r
$ hg up -qr tip
- $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters"
+ $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters"
changeset 31:58c80a7c8a40: good
abort: cannot bisect (no known bad revisions)
[255]
$ hg up -qr 0
- $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters"
+ $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters"
changeset 0:b99c7b9c8e11: bad
changeset 15:e7fa0811edb0: good
changeset 7:03750880c6b5: good
@@ -551,7 +551,14 @@
date: Thu Jan 01 00:00:06 1970 +0000
summary: msg 6
-
+ $ hg graft -q 15
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ abort: unresolved conflicts, can't continue
+ (use 'hg resolve' and 'hg graft --continue')
+ [255]
+ $ hg bisect --reset
+ $ hg up -C .
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
Check that bisect does not break on obsolete changesets
=========================================================
@@ -565,6 +572,7 @@
---------------------
$ hg debugobsolete `hg id --debug -i -r tip`
+ obsoleted 1 changesets
$ hg bisect --reset
$ hg bisect --good 15
$ hg bisect --bad 30
@@ -604,3 +612,51 @@
date: Thu Jan 01 00:00:26 1970 +0000
summary: msg 26
+Test the validation message when exclusive options are used:
+
+ $ hg bisect -r
+ $ hg bisect -b -c false
+ abort: --bad and --command are incompatible
+ [255]
+ $ hg bisect -b -e
+ abort: --bad and --extend are incompatible
+ [255]
+ $ hg bisect -b -g
+ abort: --bad and --good are incompatible
+ [255]
+ $ hg bisect -b -r
+ abort: --bad and --reset are incompatible
+ [255]
+ $ hg bisect -b -s
+ abort: --bad and --skip are incompatible
+ [255]
+ $ hg bisect -c false -e
+ abort: --command and --extend are incompatible
+ [255]
+ $ hg bisect -c false -g
+ abort: --command and --good are incompatible
+ [255]
+ $ hg bisect -c false -r
+ abort: --command and --reset are incompatible
+ [255]
+ $ hg bisect -c false -s
+ abort: --command and --skip are incompatible
+ [255]
+ $ hg bisect -e -g
+ abort: --extend and --good are incompatible
+ [255]
+ $ hg bisect -e -r
+ abort: --extend and --reset are incompatible
+ [255]
+ $ hg bisect -e -s
+ abort: --extend and --skip are incompatible
+ [255]
+ $ hg bisect -g -r
+ abort: --good and --reset are incompatible
+ [255]
+ $ hg bisect -g -s
+ abort: --good and --skip are incompatible
+ [255]
+ $ hg bisect -r -s
+ abort: --reset and --skip are incompatible
+ [255]
--- a/tests/test-bisect2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bisect2.t Wed Jul 19 07:51:41 2017 -0500
@@ -244,6 +244,7 @@
$ hg up -C
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d42e18c7bc9b: 18"
3 other heads for branch "default"
complex bisect test 1 # first bad rev is 9
--- a/tests/test-blackbox.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-blackbox.t Wed Jul 19 07:51:41 2017 -0500
@@ -135,9 +135,17 @@
$ echo '[hooks]' >> .hg/hgrc
$ echo 'update = echo hooked' >> .hg/hgrc
$ hg update
+ The fsmonitor extension is incompatible with the eol extension and has been disabled. (fsmonitor !)
hooked
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d02f48003e62: c"
1 other heads for branch "default"
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > # disable eol, because it is not needed for subsequent tests
+ > # (in addition, keeping it requires extra care for fsmonitor)
+ > eol=!
+ > EOF
$ hg blackbox -l 6
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update
1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> writing .hg/cache/tags2-visible with 0 tags
@@ -171,7 +179,7 @@
$ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\
> os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
> \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
- $ python $TESTDIR/blackbox-readonly-dispatch.py
+ $ $PYTHON $TESTDIR/blackbox-readonly-dispatch.py
running: add foo
result: 0
running: commit -m commit1 -d 2000-01-01 foo
@@ -195,8 +203,8 @@
result: None
$ hg blackbox
1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> commit -m commit2 -d 2000-01-02 foo
- 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> updated served branch cache in * seconds (glob)
- 1970/01/01 00:00:00 bob @0e46349438790c460c5c9f7546bfcd39b267bbd2 (5000)> wrote served branch cache with 1 labels and 1 nodes
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated served branch cache in * seconds (glob)
+ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote served branch cache with 1 labels and 1 nodes
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> commit -m commit2 -d 2000-01-02 foo exited 0 after * seconds (glob)
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> log -r 0
1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
--- a/tests/test-bookmarks-pushpull.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bookmarks-pushpull.t Wed Jul 19 07:51:41 2017 -0500
@@ -203,7 +203,7 @@
(test that too many divergence of bookmark)
- $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
+ $ $PYTHON $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
$ hg pull ../a
pulling from ../a
searching for changes
@@ -231,7 +231,7 @@
@1 2:0d2164f0ce0d
@foo 2:0d2164f0ce0d
- $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
+ $ $PYTHON $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
$ hg bookmarks -d "@1"
$ hg push -f ../a
@@ -475,6 +475,7 @@
$ hg id --debug -r 5
c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
$ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
+ obsoleted 1 changesets
$ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
$ hg push http://localhost:$HGPORT2/
pushing to http://localhost:$HGPORT2/
@@ -484,6 +485,7 @@
remote: adding file changes
remote: added 2 changesets with 2 changes to 1 files (+1 heads)
remote: 2 new obsolescence markers
+ remote: obsoleted 1 changesets
updating bookmark Y
$ hg -R ../a book
@ 1:0d2164f0ce0d
@@ -808,7 +810,7 @@
> ssh=ssh://user@dummy/issue4455-dest
> http=http://localhost:$HGPORT/
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh=$PYTHON "$TESTDIR/dummyssh"
> EOF
$ cat >> ../issue4455-dest/.hg/hgrc << EOF
> [hooks]
--- a/tests/test-bookmarks-rebase.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bookmarks-rebase.t Wed Jul 19 07:51:41 2017 -0500
@@ -38,7 +38,7 @@
$ hg rebase -s two -d one
rebasing 3:2ae46b1d99a7 "3" (tip two)
- saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-rebase.hg (glob)
$ hg log
changeset: 3:42e5ed2cdcf4
--- a/tests/test-bookmarks.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bookmarks.t Wed Jul 19 07:51:41 2017 -0500
@@ -311,8 +311,45 @@
abort: cannot use an integer as a name
[255]
+bookmark with a name that matches a node id
+ $ hg bookmark 925d80f479bb db815d6d32e6
+ bookmark 925d80f479bb matches a changeset hash
+ (did you leave a -r out of an 'hg bookmark' command?)
+ bookmark db815d6d32e6 matches a changeset hash
+ (did you leave a -r out of an 'hg bookmark' command?)
+ $ hg bookmark -d 925d80f479bb
+ $ hg bookmark -d db815d6d32e6
+
+ $ cd ..
+
+bookmark with a name that matches an ambiguous node id
+
+ $ hg init ambiguous
+ $ cd ambiguous
+ $ echo 0 > a
+ $ hg ci -qAm 0
+ $ for i in 1057 2857 4025; do
+ > hg up -q 0
+ > echo $i > a
+ > hg ci -qm $i
+ > done
+ $ hg up -q null
+ $ hg log -r0: -T '{rev}:{node}\n'
+ 0:b4e73ffab476aa0ee32ed81ca51e07169844bc6a
+ 1:c56256a09cd28e5764f32e8e2810d0f01e2e357a
+ 2:c5623987d205cd6d9d8389bfc40fff9dbb670b48
+ 3:c562ddd9c94164376c20b86b0b4991636a3bf84f
+
+ $ hg bookmark -r0 c562
+ $ hg bookmarks
+ c562 0:b4e73ffab476
+
+ $ cd ..
+
incompatible options
+ $ cd repo
+
$ hg bookmark -m Y -d Z
abort: --delete and --rename are incompatible
[255]
@@ -552,12 +589,17 @@
update to active bookmark if it's not the parent
+(it is known issue that fsmonitor can't handle nested repositories. In
+this test scenario, cloned-bookmark-default and tobundle exist in the
+working directory of current repository)
+
$ hg summary
parent: 2:db815d6d32e6
2
branch: default
bookmarks: *Z Y x y
- commit: 1 added, 1 unknown (new branch head)
+ commit: 1 added, 1 unknown (new branch head) (no-fsmonitor !)
+ commit: 1 added, * unknown (new branch head) (glob) (fsmonitor !)
update: 2 new changesets (update)
phases: 5 draft
$ hg update
@@ -663,7 +705,7 @@
test missing revisions
- $ echo "925d80f479bc z" > .hg/bookmarks
+ $ echo "925d80f479b925d80f479bc925d80f479bccabab z" > .hg/bookmarks
$ hg book
no bookmarks set
@@ -737,6 +779,10 @@
no-op update doesn't deactivate bookmarks
+(it is known issue that fsmonitor can't handle nested repositories. In
+this test scenario, cloned-bookmark-default and tobundle exist in the
+working directory of current repository)
+
$ hg bookmarks
* four 3:9ba5f110a0b3
should-end-on-two 2:db815d6d32e6
@@ -749,7 +795,8 @@
y
branch: test
bookmarks: *four
- commit: 2 unknown (clean)
+ commit: 2 unknown (clean) (no-fsmonitor !)
+ commit: * unknown (clean) (glob) (fsmonitor !)
update: (current)
phases: 4 draft
--- a/tests/test-branches.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-branches.t Wed Jul 19 07:51:41 2017 -0500
@@ -530,6 +530,45 @@
a: Adding b branch head 2
default: Adding root node
+ $ cat <<'EOF' > "$TESTTMP/map-myjson"
+ > docheader = '\{\n'
+ > docfooter = '\n}\n'
+ > separator = ',\n'
+ > branches = ' {dict(branch, node|short)|json}'
+ > EOF
+ $ hg branches -T "$TESTTMP/map-myjson"
+ {
+ {"branch": "b", "node": "e23b5505d1ad"},
+ {"branch": "a branch *", "node": "10ff5895aa57"}, (glob)
+ {"branch": "a", "node": "d8cbc61dbaa6"},
+ {"branch": "default", "node": "19709c5a4e75"}
+ }
+
+ $ cat <<'EOF' >> .hg/hgrc
+ > [templates]
+ > myjson = ' {dict(branch, node|short)|json}'
+ > myjson:docheader = '\{\n'
+ > myjson:docfooter = '\n}\n'
+ > myjson:separator = ',\n'
+ > EOF
+ $ hg branches -T myjson
+ {
+ {"branch": "b", "node": "e23b5505d1ad"},
+ {"branch": "a branch *", "node": "10ff5895aa57"}, (glob)
+ {"branch": "a", "node": "d8cbc61dbaa6"},
+ {"branch": "default", "node": "19709c5a4e75"}
+ }
+
+ $ cat <<'EOF' >> .hg/hgrc
+ > [templates]
+ > :docheader = 'should not be selected as a docheader for literal templates\n'
+ > EOF
+ $ hg branches -T '{branch}\n'
+ b
+ a branch name much longer than the default justification used by branches
+ a
+ default
+
Tests of revision branch name caching
We rev branch cache is updated automatically. In these tests we use a trick to
--- a/tests/test-bugzilla.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bugzilla.t Wed Jul 19 07:51:41 2017 -0500
@@ -3,7 +3,14 @@
$ cat <<EOF > bzmock.py
> from __future__ import absolute_import
> from mercurial import extensions
+ > from mercurial import registrar
>
+ > configtable = {}
+ > configitem = registrar.configitem(configtable)
+ >
+ > configitem('bugzilla', 'mocklog',
+ > default=None,
+ > )
> def extsetup(ui):
> bugzilla = extensions.find('bugzilla')
> class bzmock(bugzilla.bzaccess):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bundle-phases.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,285 @@
+ $ cat >> $HGRCPATH <<EOF
+ > [experimental]
+ > bundle-phases=yes
+ > [extensions]
+ > strip=
+ > drawdag=$TESTDIR/drawdag.py
+ > EOF
+
+Set up repo with linear history
+ $ hg init linear
+ $ cd linear
+ $ hg debugdrawdag <<'EOF'
+ > E
+ > |
+ > D
+ > |
+ > C
+ > |
+ > B
+ > |
+ > A
+ > EOF
+ $ hg phase --public A
+ $ hg phase --force --secret D
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+Phases are restored when unbundling
+ $ hg bundle --base B -r E bundle
+ 3 changesets found
+ $ hg debugbundle bundle
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '3'), ('targetphase', '2')])"
+ 26805aba1e600a82e93661149f2313866a221a7b
+ f585351a92f85104bff7c284233c338b10eb1df7
+ 9bc730a19041f9ec7cb33c626e811aa233efb18c
+ phase-heads -- 'sortdict()'
+ 26805aba1e600a82e93661149f2313866a221a7b draft
+ $ hg strip --no-backup C
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+Root revision's phase is preserved
+ $ hg bundle -a bundle
+ 5 changesets found
+ $ hg strip --no-backup A
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+Completely public history can be restored
+ $ hg phase --public E
+ $ hg bundle -a bundle
+ 5 changesets found
+ $ hg strip --no-backup A
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E public
+ |
+ o D public
+ |
+ o C public
+ |
+ o B public
+ |
+ o A public
+
+Direct transition from public to secret can be restored
+ $ hg phase --secret --force D
+ $ hg bundle -a bundle
+ 5 changesets found
+ $ hg strip --no-backup A
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C public
+ |
+ o B public
+ |
+ o A public
+
+Revisions within bundle preserve their phase even if parent changes its phase
+ $ hg phase --draft --force B
+ $ hg bundle --base B -r E bundle
+ 3 changesets found
+ $ hg strip --no-backup C
+ $ hg phase --public B
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C draft
+ |
+ o B public
+ |
+ o A public
+
+Phase of ancestors of stripped node get advanced to accommodate child
+ $ hg bundle --base B -r E bundle
+ 3 changesets found
+ $ hg strip --no-backup C
+ $ hg phase --force --secret B
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E secret
+ |
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+Unbundling advances phases of changesets even if they were already in the repo.
+To test that, create a bundle of everything in draft phase and then unbundle
+to see that secret becomes draft, but public remains public.
+ $ hg phase --draft --force A
+ $ hg phase --draft E
+ $ hg bundle -a bundle
+ 5 changesets found
+ $ hg phase --public A
+ $ hg phase --secret --force E
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o E draft
+ |
+ o D draft
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+Unbundling change in the middle of a stack does not affect later changes
+ $ hg strip --no-backup E
+ $ hg phase --secret --force D
+ $ hg log -G -T '{desc} {phase}\n'
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+ $ hg bundle --base A -r B bundle
+ 1 changesets found
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{desc} {phase}\n'
+ o D secret
+ |
+ o C draft
+ |
+ o B draft
+ |
+ o A public
+
+
+ $ cd ..
+
+Set up repo with non-linear history
+ $ hg init non-linear
+ $ cd non-linear
+ $ hg debugdrawdag <<'EOF'
+ > D E
+ > |\|
+ > B C
+ > |/
+ > A
+ > EOF
+ $ hg phase --public C
+ $ hg phase --force --secret B
+ $ hg log -G -T '{node|short} {desc} {phase}\n'
+ o 03ca77807e91 E draft
+ |
+ | o 4e4f9194f9f1 D secret
+ |/|
+ o | dc0947a82db8 C public
+ | |
+ | o 112478962961 B secret
+ |/
+ o 426bada5c675 A public
+
+
+Restore bundle of entire repo
+ $ hg bundle -a bundle
+ 5 changesets found
+ $ hg debugbundle bundle
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '5'), ('targetphase', '2')])"
+ 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
+ 112478962961147124edd43549aedd1a335e44bf
+ dc0947a82db884575bb76ea10ac97b08536bfa03
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
+ 03ca77807e919db8807c3749086dc36fb478cac0
+ phase-heads -- 'sortdict()'
+ dc0947a82db884575bb76ea10ac97b08536bfa03 public
+ 03ca77807e919db8807c3749086dc36fb478cac0 draft
+ $ hg strip --no-backup A
+ $ hg unbundle -q bundle
+ $ rm bundle
+ $ hg log -G -T '{node|short} {desc} {phase}\n'
+ o 03ca77807e91 E draft
+ |
+ | o 4e4f9194f9f1 D secret
+ |/|
+ o | dc0947a82db8 C public
+ | |
+ | o 112478962961 B secret
+ |/
+ o 426bada5c675 A public
+
+
+ $ hg bundle --base 'A + C' -r D bundle
+ 2 changesets found
+ $ hg debugbundle bundle
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2'), ('targetphase', '2')])"
+ 112478962961147124edd43549aedd1a335e44bf
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
+ phase-heads -- 'sortdict()'
+ $ rm bundle
+
+ $ hg bundle --base A -r D bundle
+ 3 changesets found
+ $ hg debugbundle bundle
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '3'), ('targetphase', '2')])"
+ 112478962961147124edd43549aedd1a335e44bf
+ dc0947a82db884575bb76ea10ac97b08536bfa03
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
+ phase-heads -- 'sortdict()'
+ dc0947a82db884575bb76ea10ac97b08536bfa03 public
+ $ rm bundle
+
+ $ hg bundle --base 'B + C' -r 'D + E' bundle
+ 2 changesets found
+ $ hg debugbundle bundle
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2'), ('targetphase', '2')])"
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
+ 03ca77807e919db8807c3749086dc36fb478cac0
+ phase-heads -- 'sortdict()'
+ 03ca77807e919db8807c3749086dc36fb478cac0 draft
+ $ rm bundle
--- a/tests/test-bundle.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bundle.t Wed Jul 19 07:51:41 2017 -0500
@@ -302,6 +302,20 @@
$ hg debugbundle --spec packednongd.hg
none-packed1;requirements%3Drevlogv1
+Warning emitted when packed bundles contain secret changesets
+
+ $ hg init testsecret
+ $ cd testsecret
+ $ touch foo
+ $ hg -q commit -A -m initial
+ $ hg phase --force --secret -r .
+ $ cd ..
+
+ $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
+ (warning: stream clone bundle will contain secret revisions)
+ writing 301 bytes for 3 files
+ bundle requirements: generaldelta, revlogv1
+
Unpacking packed1 bundles with "hg unbundle" isn't allowed
$ hg init packed
--- a/tests/test-bundle2-exchange.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bundle2-exchange.t Wed Jul 19 07:51:41 2017 -0500
@@ -18,7 +18,7 @@
> evolution=createmarkers,exchange
> bundle2-output-capture=True
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh="$PYTHON" "$TESTDIR/dummyssh"
> logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
> [web]
> push_ssl = false
@@ -909,7 +909,7 @@
$ cp $TESTTMP/hgrc.orig $HGRCPATH
$ cat >> $HGRCPATH <<EOF
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh="$PYTHON" "$TESTDIR/dummyssh"
> EOF
$ cat >> $TESTTMP/locktester.py <<EOF
--- a/tests/test-bundle2-format.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bundle2-format.t Wed Jul 19 07:51:41 2017 -0500
@@ -14,7 +14,6 @@
> """
>
> import sys, os, gc
- > from mercurial import cmdutil
> from mercurial import util
> from mercurial import bundle2
> from mercurial import scmutil
@@ -22,6 +21,7 @@
> from mercurial import changegroup
> from mercurial import error
> from mercurial import obsolete
+ > from mercurial import registrar
>
>
> try:
@@ -33,7 +33,7 @@
> pass
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
> Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
@@ -70,7 +70,7 @@
> for val in op.reply.capabilities[cap]:
> op.ui.write('debugreply: %r\n' % val)
>
- > @command('bundle2',
+ > @command(b'bundle2',
> [('', 'param', [], 'stream level parameter'),
> ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
> ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
@@ -113,7 +113,7 @@
> headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
> headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
> outgoing = discovery.outgoing(repo, headcommon, headmissing)
- > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
+ > cg = changegroup.getchangegroup(repo, 'test:bundle2', outgoing, None)
> bundler.newpart('changegroup', data=cg.getchunks(),
> mandatory=False)
>
@@ -169,7 +169,7 @@
> finally:
> file.flush()
>
- > @command('unbundle2', [], '')
+ > @command(b'unbundle2', [], '')
> def cmdunbundle2(ui, repo, replypath=None):
> """process a bundle2 stream from stdin on the current repo"""
> try:
@@ -200,7 +200,7 @@
> for chunk in op.reply.getchunks():
> file.write(chunk)
>
- > @command('statbundle2', [], '')
+ > @command(b'statbundle2', [], '')
> def cmdstatbundle2(ui, repo):
> """print statistic on the bundle2 container read from stdin"""
> unbundler = bundle2.getunbundler(ui, sys.stdin)
@@ -229,7 +229,7 @@
> [experimental]
> evolution=createmarkers
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh=$PYTHON "$TESTDIR/dummyssh"
> logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
> [web]
> push_ssl = false
--- a/tests/test-bundle2-pushback.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bundle2-pushback.t Wed Jul 19 07:51:41 2017 -0500
@@ -25,7 +25,7 @@
$ cat >> $HGRCPATH <<EOF
> [ui]
- > ssh = python "$TESTDIR/dummyssh"
+ > ssh = $PYTHON "$TESTDIR/dummyssh"
> username = nobody <no.reply@example.com>
>
> [alias]
--- a/tests/test-bundle2-remote-changegroup.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-bundle2-remote-changegroup.t Wed Jul 19 07:51:41 2017 -0500
@@ -29,7 +29,7 @@
> from the given file.
> - raw-remote-changegroup <python expression>
> Creates a remote-changegroup part with the data given in the
- > python expression as parameters. The python expression is
+ > Python expression as parameters. The Python expression is
> evaluated with eval, and is expected to be a dict.
> """
> def newpart(name, data=''):
@@ -74,12 +74,12 @@
Start a simple HTTP server to serve bundles
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ cat >> $HGRCPATH << EOF
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh=$PYTHON "$TESTDIR/dummyssh"
> logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
> EOF
--- a/tests/test-cache-abuse.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-cache-abuse.t Wed Jul 19 07:51:41 2017 -0500
@@ -24,6 +24,7 @@
$ echo dumb > dumb
$ hg ci -qAmdumb
$ hg debugobsolete b1174d11b69e63cb0c5726621a43c859f0858d7f
+ obsoleted 1 changesets
$ hg phase -pr t1
$ hg phase -fsr t2
@@ -70,10 +71,6 @@
$ damage tags tags2-visible
$ damage "tag -f t3" hgtagsfnodes1
-Beat up hidden cache:
-
- $ damage log hidden
-
Beat up branch caches:
$ damage branches branch2-base "rm .hg/cache/branch2-[vs]*"
--- a/tests/test-casefolding.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-casefolding.t Wed Jul 19 07:51:41 2017 -0500
@@ -9,7 +9,6 @@
$ cd repo1
$ echo a > a
$ hg add A
- adding a
$ hg st
A a
$ hg ci -m adda
@@ -28,6 +27,7 @@
a
committing manifest
committing changelog
+ updating the branch cache
committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
Case-changing renames should work:
@@ -70,14 +70,12 @@
A D/c
$ hg ci -m addc D/c
$ hg mv d/b d/e
- moving D/b to D/e (glob)
$ hg st
A D/e
R D/b
$ hg revert -aq
$ rm d/e
$ hg mv d/b D/B
- moving D/b to D/B (glob)
$ hg st
A D/B
R D/b
--- a/tests/test-cat.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-cat.t Wed Jul 19 07:51:41 2017 -0500
@@ -63,6 +63,46 @@
tmp/h_45116003780e
tmp/r_2
+Test template output
+
+ $ hg --cwd tmp cat ../b ../c -T '== {path} ({abspath}) ==\n{data}'
+ == ../b (b) == (glob)
+ 1
+ == ../c (c) == (glob)
+ 3
+
+ $ hg cat b c -Tjson --output -
+ [
+ {
+ "abspath": "b",
+ "data": "1\n",
+ "path": "b"
+ },
+ {
+ "abspath": "c",
+ "data": "3\n",
+ "path": "c"
+ }
+ ]
+
+ $ hg cat b c -Tjson --output 'tmp/%p.json'
+ $ cat tmp/b.json
+ [
+ {
+ "abspath": "b",
+ "data": "1\n",
+ "path": "b"
+ }
+ ]
+ $ cat tmp/c.json
+ [
+ {
+ "abspath": "c",
+ "data": "3\n",
+ "path": "c"
+ }
+ ]
+
Test working directory
$ echo b-wdir > b
--- a/tests/test-censor.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-censor.t Wed Jul 19 07:51:41 2017 -0500
@@ -304,7 +304,7 @@
Can censor after revlog has expanded to no longer permit inline storage
- $ for x in `python $TESTDIR/seq.py 0 50000`
+ $ for x in `$PYTHON $TESTDIR/seq.py 0 50000`
> do
> echo "Password: hunter$x" >> target
> done
--- a/tests/test-check-code.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-code.t Wed Jul 19 07:51:41 2017 -0500
@@ -7,41 +7,15 @@
New errors are not allowed. Warnings are strongly discouraged.
(The writing "no-che?k-code" is for not skipping this file when checking.)
- $ hg locate -X contrib/python-zstandard -X hgext/fsmonitor/pywatchman |
+ $ testrepohg locate -X contrib/python-zstandard \
+ > -X hgext/fsmonitor/pywatchman |
> sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false
- contrib/perf.py:869:
- > r.revision(r.node(x))
- don't convert rev to node before passing to revision(nodeorrev)
Skipping i18n/polib.py it has no-che?k-code (glob)
- mercurial/demandimport.py:313:
- > if os.environ.get('HGDEMANDIMPORT') != 'disable':
- use encoding.environ instead (py3)
- mercurial/encoding.py:54:
- > environ = os.environ
- use encoding.environ instead (py3)
- mercurial/encoding.py:56:
- > environ = os.environb
- use encoding.environ instead (py3)
- mercurial/encoding.py:61:
- > for k, v in os.environ.items())
- use encoding.environ instead (py3)
- mercurial/encoding.py:221:
- > for k, v in os.environ.items())
- use encoding.environ instead (py3)
Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob)
Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
- mercurial/policy.py:46:
- > if 'HGMODULEPOLICY' in os.environ:
- use encoding.environ instead (py3)
- mercurial/policy.py:47:
- > policy = os.environ['HGMODULEPOLICY'].encode('utf-8')
- use encoding.environ instead (py3)
- mercurial/policy.py:49:
- > policy = os.environ.get('HGMODULEPOLICY', policy)
- use encoding.environ instead (py3)
+ Skipping mercurial/selectors2.py it has no-che?k-code (glob)
Skipping mercurial/statprof.py it has no-che?k-code (glob)
Skipping tests/badserverext.py it has no-che?k-code (glob)
- [1]
@commands in debugcommands.py should be in alphabetical order.
@@ -61,7 +35,7 @@
Prevent adding new files in the root directory accidentally.
- $ hg files 'glob:*'
+ $ testrepohg files 'glob:*'
.editorconfig
.hgignore
.hgsigs
--- a/tests/test-check-commit.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-commit.t Wed Jul 19 07:51:41 2017 -0500
@@ -8,8 +8,9 @@
$ cd $TESTDIR/..
- $ for node in `hg log --rev 'not public() and ::. and not desc("# no-check-commit")' --template '{node|short}\n'`; do
- > hg export $node | contrib/check-commit > ${TESTTMP}/check-commit.out
+ $ for node in `testrepohg log --rev 'not public() and ::. and not desc("# no-check-commit")' --template '{node|short}\n'`; do
+ > testrepohg export --git $node \
+ > | contrib/check-commit > ${TESTTMP}/check-commit.out
> if [ $? -ne 0 ]; then
> echo "Revision $node does not comply with rules"
> echo '------------------------------------------------------'
--- a/tests/test-check-config.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-config.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,9 +1,47 @@
#require test-repo
$ . "$TESTDIR/helpers-testrepo.sh"
+
+Sanity check check-config.py
+
+ $ cat > testfile.py << EOF
+ > # Good
+ > foo = ui.config('ui', 'username')
+ > # Missing
+ > foo = ui.config('ui', 'doesnotexist')
+ > # Missing different type
+ > foo = ui.configint('ui', 'missingint')
+ > # Missing with default value
+ > foo = ui.configbool('ui', 'missingbool1', default=True)
+ > foo = ui.configbool('ui', 'missingbool2', False)
+ > # Inconsistent values for defaults.
+ > foo = ui.configint('ui', 'intdefault', default=1)
+ > foo = ui.configint('ui', 'intdefault', default=42)
+ > # Can suppress inconsistent value error
+ > foo = ui.configint('ui', 'intdefault2', default=1)
+ > # inconsistent config: ui.intdefault2
+ > foo = ui.configint('ui', 'intdefault2', default=42)
+ > EOF
+
+ $ cat > files << EOF
+ > mercurial/help/config.txt
+ > $TESTTMP/testfile.py
+ > EOF
+
$ cd "$TESTDIR"/..
+ $ $PYTHON contrib/check-config.py < $TESTTMP/files
+ foo = ui.configint('ui', 'intdefault', default=42)
+ conflict on ui.intdefault: ('int', '42') != ('int', '1')
+ at $TESTTMP/testfile.py:12: (glob)
+ undocumented: ui.doesnotexist (str)
+ undocumented: ui.intdefault (int) [42]
+ undocumented: ui.intdefault2 (int) [42]
+ undocumented: ui.missingbool1 (bool) [True]
+ undocumented: ui.missingbool2 (bool)
+ undocumented: ui.missingint (int)
+
New errors are not allowed. Warnings are strongly discouraged.
- $ hg files "set:(**.py or **.txt) - tests/**" | sed 's|\\|/|g' |
- > python contrib/check-config.py
+ $ testrepohg files "set:(**.py or **.txt) - tests/**" | sed 's|\\|/|g' |
+ > $PYTHON contrib/check-config.py
--- a/tests/test-check-execute.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-execute.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,20 +5,20 @@
look for python scripts without the execute bit
- $ hg files 'set:**.py and not exec() and grep(r"^#!.*?python")'
+ $ testrepohg files 'set:**.py and not exec() and grep(r"^#!.*?python")'
[1]
look for python scripts with execute bit but not shebang
- $ hg files 'set:**.py and exec() and not grep(r"^#!.*?python")'
+ $ testrepohg files 'set:**.py and exec() and not grep(r"^#!.*?python")'
[1]
look for shell scripts with execute bit but not shebang
- $ hg files 'set:**.sh and exec() and not grep(r"^#!.*(ba)?sh")'
+ $ testrepohg files 'set:**.sh and exec() and not grep(r"^#!.*(ba)?sh")'
[1]
look for non scripts with no shebang
- $ hg files 'set:exec() and not **.sh and not **.py and not grep(r"^#!")'
+ $ testrepohg files 'set:exec() and not **.sh and not **.py and not grep(r"^#!")'
[1]
--- a/tests/test-check-help.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-help.t Wed Jul 19 07:51:41 2017 -0500
@@ -23,6 +23,7 @@
Check if ":hg:`help TOPIC`" is valid:
(use "xargs -n1 -t" to see which help commands are executed)
- $ hg files 'glob:{hgext,mercurial}/**/*.py' | sed 's|\\|/|g' \
- > | xargs python "$TESTTMP/scanhelptopics.py" \
+ $ testrepohg files 'glob:{hgdemandimport,hgext,mercurial}/**/*.py' \
+ > | sed 's|\\|/|g' \
+ > | xargs $PYTHON "$TESTTMP/scanhelptopics.py" \
> | xargs -n1 hg help > /dev/null
--- a/tests/test-check-module-imports.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-module-imports.t Wed Jul 19 07:51:41 2017 -0500
@@ -14,7 +14,7 @@
Known-bad files are excluded by -X as some of them would produce unstable
outputs, which should be fixed later.
- $ hg locate 'set:**.py or grep(r"^#!.*?python")' \
+ $ testrepohg locate 'set:**.py or grep(r"^#!.*?python")' \
> 'tests/**.t' \
> -X contrib/debugshell.py \
> -X contrib/python-zstandard/ \
@@ -24,7 +24,6 @@
> -X i18n/posplit \
> -X tests/test-hgweb-auth.py \
> -X tests/hypothesishelpers.py \
- > -X tests/test-ctxmanager.py \
> -X tests/test-lock.py \
> -X tests/test-verify-repo-operations.py \
> -X tests/test-hook.t \
@@ -37,4 +36,4 @@
> -X tests/test-hgweb-no-path-info.t \
> -X tests/test-hgweb-no-request-uri.t \
> -X tests/test-hgweb-non-interactive.t \
- > | sed 's-\\-/-g' | python "$import_checker" -
+ > | sed 's-\\-/-g' | $PYTHON "$import_checker" -
--- a/tests/test-check-py3-commands.t Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,161 +0,0 @@
-#require py3exe
-
-This test helps in keeping a track on which commands we can run on
-Python 3 and see what kind of errors are coming up.
-The full traceback is hidden to have a stable output.
- $ HGBIN=`which hg`
-
- $ for cmd in version debuginstall ; do
- > echo $cmd
- > $PYTHON3 $HGBIN $cmd 2>&1 2>&1 | tail -1
- > done
- version
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- debuginstall
- no problems detected
-
-#if test-repo
-Make a clone so that any features in the developer's .hg/hgrc that
-might confuse Python 3 don't break this test. When we can do commit in
-Python 3, we'll stop doing this. We use e76ed1e480ef for the clone
-because it has different files than 273ce12ad8f1, so we can test both
-`files` from dirstate and `files` loaded from a specific revision.
-
- $ hg clone -r e76ed1e480ef "`dirname "$TESTDIR"`" testrepo 2>&1 | tail -1
- 15 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-Test using -R, which exercises some URL code:
- $ $PYTHON3 $HGBIN -R testrepo files -r 273ce12ad8f1 | tail -1
- testrepo/tkmerge
-
-Now prove `hg files` is reading the whole manifest. We have to grep
-out some potential warnings that come from hgrc as yet.
- $ cd testrepo
- $ $PYTHON3 $HGBIN files -r 273ce12ad8f1
- .hgignore
- PKG-INFO
- README
- hg
- mercurial/__init__.py
- mercurial/byterange.py
- mercurial/fancyopts.py
- mercurial/hg.py
- mercurial/mdiff.py
- mercurial/revlog.py
- mercurial/transaction.py
- notes.txt
- setup.py
- tkmerge
-
- $ $PYTHON3 $HGBIN files -r 273ce12ad8f1 | wc -l
- \s*14 (re)
- $ $PYTHON3 $HGBIN files | wc -l
- \s*15 (re)
-
-Test if log-like commands work:
-
- $ $PYTHON3 $HGBIN tip
- changeset: 10:e76ed1e480ef
- tag: tip
- user: oxymoron@cinder.waste.org
- date: Tue May 03 23:37:43 2005 -0800
- summary: Fix linking of changeset revs when merging
-
-
- $ $PYTHON3 $HGBIN log -r0
- changeset: 0:9117c6561b0b
- user: mpm@selenic.com
- date: Tue May 03 13:16:10 2005 -0800
- summary: Add back links from file revisions to changeset revisions
-
-
- $ cd ..
-#endif
-
-Test if `hg config` works:
-
- $ $PYTHON3 $HGBIN config
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
- devel.all-warnings=true
- largefiles.usercache=$TESTTMP/.cache/largefiles
- ui.slash=True
- ui.interactive=False
- ui.mergemarkers=detailed
- ui.promptecho=True
- web.address=localhost
- web.ipv6=False
-
- $ cat > included-hgrc <<EOF
- > [extensions]
- > babar = imaginary_elephant
- > EOF
- $ cat >> $HGRCPATH <<EOF
- > %include $TESTTMP/included-hgrc
- > EOF
- $ $PYTHON3 $HGBIN version | tail -1
- *** failed to import extension babar from imaginary_elephant: *: 'imaginary_elephant' (glob)
- warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
- $ rm included-hgrc
- $ touch included-hgrc
-
-Test bytes-ness of policy.policy with HGMODULEPOLICY
-
- $ HGMODULEPOLICY=py
- $ export HGMODULEPOLICY
- $ $PYTHON3 `which hg` debuginstall 2>&1 2>&1 | tail -1
- no problems detected
-
-`hg init` can create empty repos
-`hg status works fine`
-`hg summary` also works!
-
- $ $PYTHON3 `which hg` init py3repo
- $ cd py3repo
- $ echo "This is the file 'iota'." > iota
- $ $PYTHON3 $HGBIN status
- ? iota
- $ $PYTHON3 $HGBIN add iota
- $ $PYTHON3 $HGBIN status
- A iota
- $ $PYTHON3 $HGBIN commit --message 'commit performed in Python 3'
- $ $PYTHON3 $HGBIN status
-
- $ mkdir A
- $ echo "This is the file 'mu'." > A/mu
- $ $PYTHON3 $HGBIN addremove
- adding A/mu
- $ $PYTHON3 $HGBIN status
- A A/mu
- $ HGEDITOR='echo message > ' $PYTHON3 $HGBIN commit
- $ $PYTHON3 $HGBIN status
- $ $PYHON3 $HGBIN summary
- parent: 1:e1e9167203d4 tip
- message
- branch: default
- commit: (clean)
- update: (current)
- phases: 2 draft
-
-Prove the repo is valid using the Python 2 `hg`:
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
- $ hg log
- changeset: 1:e1e9167203d4
- tag: tip
- user: test
- date: Thu Jan 01 00:00:00 1970 +0000
- summary: message
-
- changeset: 0:71c96e924262
- user: test
- date: Thu Jan 01 00:00:00 1970 +0000
- summary: commit performed in Python 3
-
--- a/tests/test-check-py3-compat.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-py3-compat.t Wed Jul 19 07:51:41 2017 -0500
@@ -3,7 +3,9 @@
$ . "$TESTDIR/helpers-testrepo.sh"
$ cd "$TESTDIR"/..
- $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
+ $ testrepohg files 'set:(**.py)' \
+ > -X hgdemandimport/demandimportpy2.py \
+ > | sed 's|\\|/|g' | xargs $PYTHON contrib/check-py3-compat.py
contrib/python-zstandard/setup.py not using absolute_import
contrib/python-zstandard/setup_zstd.py not using absolute_import
contrib/python-zstandard/tests/common.py not using absolute_import
@@ -22,15 +24,17 @@
tests/test-demandimport.py not using absolute_import
#if py3exe
- $ hg files 'set:(**.py) - grep(pygments)' -X hgext/fsmonitor/pywatchman \
+ $ testrepohg files 'set:(**.py) - grep(pygments)' \
+ > -X hgdemandimport/demandimportpy2.py \
+ > -X hgext/fsmonitor/pywatchman \
> | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py \
> | sed 's/[0-9][0-9]*)$/*)/'
hgext/convert/transport.py: error importing: <*Error> No module named 'svn.client' (error at transport.py:*) (glob)
- hgext/fsmonitor/state.py: error importing: <SyntaxError> from __future__ imports must occur at the beginning of the file (__init__.py, line 30) (error at __init__.py:*)
- hgext/fsmonitor/watchmanclient.py: error importing: <SyntaxError> from __future__ imports must occur at the beginning of the file (__init__.py, line 30) (error at __init__.py:*)
- mercurial/cffi/bdiff.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
- mercurial/cffi/mpatch.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
- mercurial/cffi/osutil.py: error importing: <*Error> No module named 'mercurial.cffi' (error at check-py3-compat.py:*) (glob)
+ mercurial/cffi/bdiff.py: error importing: <ImportError> cannot import name '_bdiff' (error at bdiff.py:*)
+ mercurial/cffi/bdiffbuild.py: error importing: <ImportError> No module named 'cffi' (error at bdiffbuild.py:*)
+ mercurial/cffi/mpatch.py: error importing: <ImportError> cannot import name '_mpatch' (error at mpatch.py:*)
+ mercurial/cffi/mpatchbuild.py: error importing: <ImportError> No module named 'cffi' (error at mpatchbuild.py:*)
+ mercurial/cffi/osutilbuild.py: error importing: <ImportError> No module named 'cffi' (error at osutilbuild.py:*)
mercurial/scmwindows.py: error importing: <*Error> No module named 'msvcrt' (error at win32.py:*) (glob)
mercurial/win32.py: error importing: <*Error> No module named 'msvcrt' (error at win32.py:*) (glob)
mercurial/windows.py: error importing: <*Error> No module named 'msvcrt' (error at windows.py:*) (glob)
@@ -38,7 +42,7 @@
#endif
#if py3exe py3pygments
- $ hg files 'set:(**.py) and grep(pygments)' | sed 's|\\|/|g' \
+ $ testrepohg files 'set:(**.py) and grep(pygments)' | sed 's|\\|/|g' \
> | xargs $PYTHON3 contrib/check-py3-compat.py \
> | sed 's/[0-9][0-9]*)$/*)/'
#endif
--- a/tests/test-check-pyflakes.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-pyflakes.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,14 +1,21 @@
#require test-repo pyflakes hg10
$ . "$TESTDIR/helpers-testrepo.sh"
- $ cd "`dirname "$TESTDIR"`"
run pyflakes on all tracked files ending in .py or without a file ending
(skipping binary file random-seed)
- $ hg locate 'set:**.py or grep("^#!.*python")' -X hgext/fsmonitor/pywatchman \
+ $ cat > test.py <<EOF
+ > print(undefinedname)
+ > EOF
+ $ pyflakes test.py 2>/dev/null | "$TESTDIR/filterpyflakes.py"
+ test.py:1: undefined name 'undefinedname'
+
+ $ cd "`dirname "$TESTDIR"`"
+
+ $ testrepohg locate 'set:**.py or grep("^#!.*python")' \
+ > -X hgext/fsmonitor/pywatchman \
> -X mercurial/pycompat.py -X contrib/python-zstandard \
> 2>/dev/null \
> | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
- tests/filterpyflakes.py:39: undefined name 'undefinedname'
--- a/tests/test-check-pylint.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-pylint.t Wed Jul 19 07:51:41 2017 -0500
@@ -12,7 +12,7 @@
$ touch $TESTTMP/fakerc
$ pylint --rcfile=$TESTTMP/fakerc --disable=all \
> --enable=W0102 --reports=no \
- > mercurial hgext hgext3rd
+ > mercurial hgdemandimport hgext hgext3rd
(?)
------------------------------------ (?)
Your code has been rated at 10.00/10 (?)
--- a/tests/test-check-shbang.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-check-shbang.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,10 +5,21 @@
look for python scripts that do not use /usr/bin/env
- $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bi{1}n/env python")'
+ $ testrepohg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bi{1}n/env python") - **/*.t'
[1]
+In tests, enforce $PYTHON and *not* /usr/bin/env python or similar:
+ $ testrepohg files 'set:grep(r"#!.*?python") and **/*.t' \
+ > -X tests/test-check-execute.t \
+ > -X tests/test-check-module-imports.t \
+ > -X tests/test-check-pyflakes.t \
+ > -X tests/test-check-shbang.t
+ [1]
+
+The above exclusions are because they're looking for files that
+contain Python but don't end in .py - please avoid adding more.
+
look for shell scripts that do not use /bin/sh
- $ hg files 'set:grep(r"^#!.*/bi{1}n/sh") and not grep(r"^#!/bi{1}n/sh")'
+ $ testrepohg files 'set:grep(r"^#!.*/bi{1}n/sh") and not grep(r"^#!/bi{1}n/sh")'
[1]
--- a/tests/test-chg.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-chg.t Wed Jul 19 07:51:41 2017 -0500
@@ -89,7 +89,7 @@
> [extensions]
> pager =
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ chg version > /dev/null
$ touch foo
@@ -105,10 +105,10 @@
chg waits for pager if runcommand raises
$ cat > $TESTTMP/crash.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('crash')
+ > command = registrar.command(cmdtable)
+ > @command(b'crash')
> def pagercrash(ui, repo, *pats, **opts):
> ui.write('going to crash\n')
> raise Exception('.')
--- a/tests/test-clone-cgi.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-clone-cgi.t Wed Jul 19 07:51:41 2017 -0500
@@ -26,13 +26,13 @@
$ . "$TESTDIR/cgienv"
$ QUERY_STRING="cmd=changegroup&roots=0000000000000000000000000000000000000000"; export QUERY_STRING
- $ python hgweb.cgi >page1 2>&1
- $ python "$TESTDIR/md5sum.py" page1
+ $ $PYTHON hgweb.cgi >page1 2>&1
+ $ $PYTHON "$TESTDIR/md5sum.py" page1
1f424bb22ec05c3c6bc866b6e67efe43 page1
make sure headers are sent even when there is no body
- $ QUERY_STRING="cmd=listkeys&namespace=nosuchnamespace" python hgweb.cgi
+ $ QUERY_STRING="cmd=listkeys&namespace=nosuchnamespace" $PYTHON hgweb.cgi
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
Content-Length: 0\r (esc)
--- a/tests/test-clone-uncompressed.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-clone-uncompressed.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,77 @@
bundle2-input-bundle: 1 parts total
checking for updated bookmarks
+Cannot stream clone when there are secret changesets
+
+ $ hg -R server phase --force --secret -r tip
+ $ hg clone --uncompressed -U http://localhost:$HGPORT secret-denied
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ killdaemons.py
+
+Streaming of secrets can be overridden by server config
+
+ $ cd server
+ $ hg --config server.uncompressedallowsecret=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone --uncompressed -U http://localhost:$HGPORT secret-allowed
+ streaming all changes
+ 1027 files to transfer, 96.3 KB of data
+ transferred 96.3 KB in * seconds (*/sec) (glob)
+ searching for changes
+ no changes found
+
+ $ killdaemons.py
+
+Verify interaction between preferuncompressed and secret presence
+
+ $ cd server
+ $ hg --config server.preferuncompressed=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ killdaemons.py
+
+Clone not allowed when full bundles disabled and can't serve secrets
+
+ $ cd server
+ $ hg --config server.disablefullbundle=true serve -p $HGPORT -d --pid-file=hg.pid
+ $ cat hg.pid > $DAEMON_PIDS
+ $ cd ..
+
+ $ hg clone --uncompressed http://localhost:$HGPORT secret-full-disabled
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ remote: abort: server has pull-based clones disabled
+ abort: pull failed on remote
+ (remove --pull if specified or upgrade Mercurial)
+ [255]
+
+Local stream clone with secrets involved
+(This is just a test over behavior: if you have access to the repo's files,
+there is no security so it isn't important to prevent a clone here.)
+
+ $ hg clone -U --uncompressed server local-secret
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
Stream clone while repo is changing:
--- a/tests/test-clone.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-clone.t Wed Jul 19 07:51:41 2017 -0500
@@ -52,6 +52,8 @@
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
+ rbc-names-v1
+ rbc-revs-v1
$ cat a
a
@@ -99,6 +101,8 @@
$ ls .hg/cache
branch2-served
+ rbc-names-v1
+ rbc-revs-v1
$ cat a 2>/dev/null || echo "a not present"
a not present
@@ -520,7 +524,7 @@
> hg.clone(myui, {}, repo, dest="ua")
> EOF
- $ python simpleclone.py
+ $ $PYTHON simpleclone.py
updating to branch default
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -534,7 +538,7 @@
> hg.clone(myui, {}, repo, dest="ua", branch=["stable",])
> EOF
- $ python branchclone.py
+ $ $PYTHON branchclone.py
adding changesets
adding manifests
adding file changes
@@ -710,6 +714,7 @@
$ echo initial2 > foo
$ hg -q commit -A -m initial1
$ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
+ obsoleted 1 changesets
$ cd ..
$ hg -q clone --pull source1a source1b
@@ -1040,7 +1045,7 @@
$ hg id -R remote -r 0
abort: repository remote not found!
[255]
- $ hg --config share.pool=share -q clone -e "python \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
+ $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
$ hg -R remote id -r 0
acb14030fe0a
--- a/tests/test-clonebundles.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-clonebundles.t Wed Jul 19 07:51:41 2017 -0500
@@ -51,7 +51,8 @@
$ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
$ hg clone http://localhost:$HGPORT 404-url
applying clone bundle from http://does.not.exist/bundle.hg
- error fetching bundle: (.* not known|getaddrinfo failed|No address associated with hostname) (re)
+ error fetching bundle: (.* not known|No address associated with hostname) (re) (no-windows !)
+ error fetching bundle: [Errno 11004] getaddrinfo failed (windows !)
abort: error applying bundle
(if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
[255]
@@ -61,14 +62,14 @@
$ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
$ hg clone http://localhost:$HGPORT server-not-runner
applying clone bundle from http://localhost:$HGPORT1/bundle.hg
- error fetching bundle: (.* refused.*|Protocol not supported|Cannot assign requested address) (re)
+ error fetching bundle: (.* refused.*|Protocol not supported|(.* )?Cannot assign requested address) (re)
abort: error applying bundle
(if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
[255]
Server returns 404
- $ python $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
+ $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
$ cat http.pid >> $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT running-404
applying clone bundle from http://localhost:$HGPORT1/bundle.hg
@@ -196,7 +197,7 @@
Feature works over SSH
- $ hg clone -U -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
+ $ hg clone -U -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
applying clone bundle from http://localhost:$HGPORT1/full.hg
adding changesets
adding manifests
--- a/tests/test-command-template.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-command-template.t Wed Jul 19 07:51:41 2017 -0500
@@ -209,14 +209,29 @@
Add some simple styles to settings
- $ echo '[templates]' >> .hg/hgrc
- $ printf 'simple = "{rev}\\n"\n' >> .hg/hgrc
- $ printf 'simple2 = {rev}\\n\n' >> .hg/hgrc
+ $ cat <<'EOF' >> .hg/hgrc
+ > [templates]
+ > simple = "{rev}\n"
+ > simple2 = {rev}\n
+ > rev = "should not precede {rev} keyword\n"
+ > EOF
$ hg log -l1 -Tsimple
8
$ hg log -l1 -Tsimple2
8
+ $ hg log -l1 -Trev
+ should not precede 8 keyword
+ $ hg log -l1 -T '{simple}'
+ 8
+
+Map file shouldn't see user templates:
+
+ $ cat <<EOF > tmpl
+ > changeset = 'nothing expanded:{simple}\n'
+ > EOF
+ $ hg log -l1 --style ./tmpl
+ nothing expanded:
Test templates and style maps in files:
@@ -242,6 +257,38 @@
summary: third
+Test docheader, docfooter and separator in template map
+
+ $ cat <<'EOF' > map-myjson
+ > docheader = '\{\n'
+ > docfooter = '\n}\n'
+ > separator = ',\n'
+ > changeset = ' {dict(rev, node|short)|json}'
+ > EOF
+ $ hg log -l2 -T./map-myjson
+ {
+ {"node": "95c24699272e", "rev": 8},
+ {"node": "29114dbae42b", "rev": 7}
+ }
+
+Test docheader, docfooter and separator in [templates] section
+
+ $ cat <<'EOF' >> .hg/hgrc
+ > [templates]
+ > myjson = ' {dict(rev, node|short)|json}'
+ > myjson:docheader = '\{\n'
+ > myjson:docfooter = '\n}\n'
+ > myjson:separator = ',\n'
+ > :docheader = 'should not be selected as a docheader for literal templates\n'
+ > EOF
+ $ hg log -l2 -Tmyjson
+ {
+ {"node": "95c24699272e", "rev": 8},
+ {"node": "29114dbae42b", "rev": 7}
+ }
+ $ hg log -l1 -T'{rev}\n'
+ 8
+
Template should precede style option
$ hg log -l1 --style default -T '{rev}\n'
@@ -1180,7 +1227,10 @@
common mistake:
- $ hg log -T '{changeset}\n'
+ $ cat << EOF > issue4758
+ > changeset = '{changeset}\n'
+ > EOF
+ $ hg log --style ./issue4758
abort: recursive reference 'changeset' in template
[255]
@@ -1196,7 +1246,10 @@
buildmap() -> gettemplate(), where no thunk was made:
- $ hg log -T '{files % changeset}\n'
+ $ cat << EOF > issue4758
+ > changeset = '{files % changeset}\n'
+ > EOF
+ $ hg log --style ./issue4758
abort: recursive reference 'changeset' in template
[255]
@@ -3503,6 +3556,9 @@
hg: parse error: shortest() expects an integer minlength
[255]
+ $ hg log -r 'wdir()' -T '{node|shortest}\n'
+ ffff
+
$ cd ..
Test shortest(node) with the repo having short hash collision:
@@ -3534,8 +3590,11 @@
9:c5623987d205cd6d9d8389bfc40fff9dbb670b48
10:c562ddd9c94164376c20b86b0b4991636a3bf84f
$ hg debugobsolete a00be79088084cb3aff086ab799f8790e01a976b
+ obsoleted 1 changesets
$ hg debugobsolete c5623987d205cd6d9d8389bfc40fff9dbb670b48
+ obsoleted 1 changesets
$ hg debugobsolete c562ddd9c94164376c20b86b0b4991636a3bf84f
+ obsoleted 1 changesets
nodes starting with '11' (we don't have the revision number '11' though)
@@ -3838,10 +3897,37 @@
Test namespaces dict
- $ hg log -T '{rev}{namespaces % " {namespace}={join(names, ",")}"}\n'
- 2 bookmarks=bar,foo tags=tip branches=text.{rev}
- 1 bookmarks=baz tags= branches=text.{rev}
- 0 bookmarks= tags= branches=default
+ $ hg --config extensions.revnamesext=$TESTDIR/revnamesext.py log -T '{rev}\n{namespaces % " {namespace} color={colorname} builtin={builtin}\n {join(names, ",")}\n"}\n'
+ 2
+ bookmarks color=bookmark builtin=True
+ bar,foo
+ tags color=tag builtin=True
+ tip
+ branches color=branch builtin=True
+ text.{rev}
+ revnames color=revname builtin=False
+ r2
+
+ 1
+ bookmarks color=bookmark builtin=True
+ baz
+ tags color=tag builtin=True
+
+ branches color=branch builtin=True
+ text.{rev}
+ revnames color=revname builtin=False
+ r1
+
+ 0
+ bookmarks color=bookmark builtin=True
+
+ tags color=tag builtin=True
+
+ branches color=branch builtin=True
+ default
+ revnames color=revname builtin=False
+ r0
+
$ hg log -r2 -T '{namespaces % "{namespace}: {names}\n"}'
bookmarks: bar foo
tags: tip
@@ -4170,7 +4256,7 @@
$ hg init nonascii
$ cd nonascii
- $ python <<EOF
+ $ $PYTHON <<EOF
> open('latin1', 'w').write('\xe9')
> open('utf-8', 'w').write('\xc3\xa9')
> EOF
--- a/tests/test-commandserver.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-commandserver.t Wed Jul 19 07:51:41 2017 -0500
@@ -187,11 +187,9 @@
... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
*** runcommand showconfig
bundle.mainreporoot=$TESTTMP/repo
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
devel.all-warnings=true
+ devel.default-date=0 0
+ extensions.fsmonitor= (fsmonitor !)
largefiles.usercache=$TESTTMP/.cache/largefiles
ui.slash=True
ui.interactive=False
@@ -203,10 +201,6 @@
web\.ipv6=(?:True|False) (re)
*** runcommand init foo
*** runcommand -R foo showconfig ui defaults
- defaults.backout=-d "0 0"
- defaults.commit=-d "0 0"
- defaults.shelve=--date "0 0"
- defaults.tag=-d "0 0"
ui.slash=True
ui.interactive=False
ui.mergemarkers=detailed
@@ -521,6 +515,7 @@
*** runcommand up null
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
*** runcommand phase -df tip
+ obsoleted 1 changesets
*** runcommand log --hidden
changeset: 1:731265503d86
tag: tip
@@ -579,19 +574,19 @@
$ cat <<EOF > dbgui.py
> import os, sys
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command("debuggetpass", norepo=True)
+ > command = registrar.command(cmdtable)
+ > @command(b"debuggetpass", norepo=True)
> def debuggetpass(ui):
> ui.write("%s\\n" % ui.getpass())
- > @command("debugprompt", norepo=True)
+ > @command(b"debugprompt", norepo=True)
> def debugprompt(ui):
> ui.write("%s\\n" % ui.prompt("prompt:"))
- > @command("debugreadstdin", norepo=True)
+ > @command(b"debugreadstdin", norepo=True)
> def debugreadstdin(ui):
> ui.write("read: %r\n" % sys.stdin.read(1))
- > @command("debugwritestdout", norepo=True)
+ > @command(b"debugwritestdout", norepo=True)
> def debugwritestdout(ui):
> os.write(1, "low-level stdout fd and\n")
> sys.stdout.write("stdout should be redirected to /dev/null\n")
--- a/tests/test-commit-amend.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-commit-amend.t Wed Jul 19 07:51:41 2017 -0500
@@ -40,7 +40,7 @@
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -m 'amend base1'
pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149
43f1ba15f28a tip
- saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-f1bf3ab8-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-f1bf3ab8-amend.hg (glob)
$ echo 'pretxncommit.foo = ' >> $HGRCPATH
$ hg diff -c .
diff -r ad120869acf0 -r 43f1ba15f28a a
@@ -93,7 +93,7 @@
Add new file:
$ hg ci --amend -m 'amend base1 new file'
- saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-7a3b3496-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-7a3b3496-amend.hg (glob)
Remove file that was added in amended commit:
(and test logfile option)
@@ -102,7 +102,7 @@
$ hg rm b
$ echo 'amend base1 remove new file' > ../logfile
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg ci --amend --logfile ../logfile
- saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-0b55739a-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-0b55739a-amend.hg (glob)
$ hg cat b
b: no such file in rev 74609c7f506e
@@ -117,13 +117,12 @@
a
committing manifest
committing changelog
- stripping amended changeset 74609c7f506e
1 changesets found
uncompressed size of bundle content:
254 (changelog)
163 (manifests)
129 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-1bfde511-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-1bfde511-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
250 (changelog)
@@ -169,10 +168,10 @@
> EOF
$ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -u foo -d '1 0'
HGEDITFORM=commit.amend.normal
- saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-5f5bcb85-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-5f5bcb85-amend.hg (glob)
$ echo a >> a
$ hg ci --amend -u foo -d '1 0'
- saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-83b10a27-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-83b10a27-amend.hg (glob)
$ hg log -r .
changeset: 1:5f357c7560ab
tag: tip
@@ -261,13 +260,12 @@
a
committing manifest
committing changelog
- stripping amended changeset 5f357c7560ab
1 changesets found
uncompressed size of bundle content:
249 (changelog)
163 (manifests)
131 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-e7c84ade-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-e7c84ade-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
257 (changelog)
@@ -303,14 +301,12 @@
a
committing manifest
committing changelog
- stripping intermediate changeset a0ea9b1a4c8c
- stripping amended changeset 7ab3bf440b54
2 changesets found
uncompressed size of bundle content:
464 (changelog)
322 (manifests)
249 a
- saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-8e3b5088-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-8e3b5088-amend.hg (glob)
1 changesets found
uncompressed size of bundle content:
257 (changelog)
@@ -337,13 +333,13 @@
$ hg book book1
$ hg book book2
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-e51094db-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-e51094db-amend.hg (glob)
$ hg book
book1 1:6cec5aa930e2
* book2 1:6cec5aa930e2
$ echo a >> a
$ hg ci --amend -m 'move bookmarks'
- saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-e9b06de4-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-e9b06de4-amend.hg (glob)
$ hg book
book1 1:48bb6e53a15f
* book2 1:48bb6e53a15f
@@ -379,7 +375,7 @@
$ hg branch default -f
marked working directory as branch default
$ hg ci --amend -m 'back to default'
- saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-fd962fef-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-fd962fef-amend.hg (glob)
$ hg branches
default 2:ce12b0b57d46
@@ -395,7 +391,7 @@
$ echo b >> b
$ hg ci -mb
$ hg ci --amend --close-branch -m 'closing branch foo'
- saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-6701c392-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-6701c392-amend.hg (glob)
Same thing, different code path:
@@ -404,7 +400,7 @@
reopening closed branch head 4
$ echo b >> b
$ hg ci --amend --close-branch
- saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-49c0c55d-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-49c0c55d-amend.hg (glob)
$ hg branches
default 2:ce12b0b57d46
@@ -425,7 +421,7 @@
$ hg ci -m 'b -> c'
$ hg mv c d
$ hg ci --amend -m 'b -> d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-adaaa8b1-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-adaaa8b1-amend.hg (glob)
$ hg st --rev '.^' --copies d
A d
b
@@ -433,7 +429,7 @@
$ hg ci -m 'e = d'
$ hg cp e f
$ hg ci --amend -m 'f = d'
- saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-d37aa788-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-d37aa788-amend.hg (glob)
$ hg st --rev '.^' --copies f
A f
d
@@ -444,7 +440,7 @@
$ hg cp a f
$ mv f.orig f
$ hg ci --amend -m replacef
- saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-90259f67-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-90259f67-amend.hg (glob)
$ hg st --change . --copies
$ hg log -r . --template "{file_copies}\n"
@@ -456,7 +452,7 @@
adding g
$ hg mv g h
$ hg ci --amend
- saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-7059e0f1-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-7059e0f1-amend.hg (glob)
$ hg st --change . --copies h
A h
$ hg log -r . --template "{file_copies}\n"
@@ -476,11 +472,11 @@
$ echo a >> a
$ hg ci -ma
$ hg ci --amend -m "a'"
- saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-2be01fd1-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-2be01fd1-amend.hg (glob)
$ hg log -r . --template "{branch}\n"
a
$ hg ci --amend -m "a''"
- saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-ed28c4cd-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-ed28c4cd-amend.hg (glob)
$ hg log -r . --template "{branch}\n"
a
@@ -497,7 +493,7 @@
$ hg graft 12
grafting 12:2647734878ef "fork" (tip)
$ hg ci --amend -m 'graft amend'
- saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-eedb103b-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-eedb103b-amend.hg (glob)
$ hg log -r . --debug | grep extra
extra: amend_source=bd010aea3f39f3fb2a2f884b9ccb0471cd77398e
extra: branch=a
--- a/tests/test-commit-interactive-curses.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-commit-interactive-curses.t Wed Jul 19 07:51:41 2017 -0500
@@ -206,7 +206,7 @@
> X
> EOF
$ hg commit -i -m "newly added file" -d "0 0"
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-28bbe4e2-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-28bbe4e2-amend.hg (glob)
$ hg diff -c .
diff -r a6735021574d -r c1d239d165ae x
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -342,8 +342,8 @@
The default interface is text
$ cp $HGRCPATH.pretest $HGRCPATH
$ chunkselectorinterface() {
- > python <<EOF
- > from mercurial import hg, ui, parsers;\
+ > $PYTHON <<EOF
+ > from mercurial import hg, ui;\
> repo = hg.repository(ui.ui.load(), ".");\
> print repo.ui.interface("chunkselector")
> EOF
--- a/tests/test-commit-interactive.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-commit-interactive.t Wed Jul 19 07:51:41 2017 -0500
@@ -909,7 +909,7 @@
> sys.stdout.write(''.join(escape(c) for c in l))
> EOF
- $ hg commit -i --encoding cp932 2>&1 <<EOF | python $TESTTMP/escape.py | grep '^y - '
+ $ hg commit -i --encoding cp932 2>&1 <<EOF | $PYTHON $TESTTMP/escape.py | grep '^y - '
> ?
> q
> EOF
--- a/tests/test-commit.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-commit.t Wed Jul 19 07:51:41 2017 -0500
@@ -15,20 +15,20 @@
$ hg commit -d '0 0' -m commit-1
$ echo foo >> foo
$ hg commit -d '1 4444444' -m commit-3
- abort: impossible time zone offset: 4444444
+ hg: parse error: impossible time zone offset: 4444444
[255]
$ hg commit -d '1 15.1' -m commit-4
- abort: invalid date: '1\t15.1'
+ hg: parse error: invalid date: '1\t15.1'
[255]
$ hg commit -d 'foo bar' -m commit-5
- abort: invalid date: 'foo bar'
+ hg: parse error: invalid date: 'foo bar'
[255]
$ hg commit -d ' 1 4444' -m commit-6
$ hg commit -d '111111111111 0' -m commit-7
- abort: date exceeds 32 bits: 111111111111
+ hg: parse error: date exceeds 32 bits: 111111111111
[255]
$ hg commit -d '-111111111111 0' -m commit-7
- abort: date exceeds 32 bits: -111111111111
+ hg: parse error: date exceeds 32 bits: -111111111111
[255]
$ echo foo >> foo
$ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2
@@ -38,10 +38,10 @@
3 1901-12-13 20:45:52 +0000
2 1901-12-13 20:45:52 +0000
$ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7
- abort: date exceeds 32 bits: -2147483649
+ hg: parse error: date exceeds 32 bits: -2147483649
[255]
$ hg commit -d '-2147483649 0' -m commit-7
- abort: date exceeds 32 bits: -2147483649
+ hg: parse error: date exceeds 32 bits: -2147483649
[255]
commit added file that has been deleted
@@ -120,10 +120,28 @@
An empty date was interpreted as epoch origin
$ echo foo >> foo
- $ hg commit -d '' -m commit-no-date
+ $ hg commit -d '' -m commit-no-date --config devel.default-date=
$ hg tip --template '{date|isodate}\n' | grep '1970'
[1]
+Using the advanced --extra flag
+
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "commitextras=" >> $HGRCPATH
+ $ hg status
+ ? baz
+ ? quux
+ $ hg add baz
+ $ hg commit -m "adding extras" --extra sourcehash=foo --extra oldhash=bar
+ $ hg log -r . -T '{extras % "{extra}\n"}'
+ branch=default
+ oldhash=bar
+ sourcehash=foo
+ $ hg add quux
+ $ hg commit -m "adding internal used extras" --extra amend_source=hash
+ abort: key 'amend_source' is used internally, can't be set manually
+ [255]
+
Make sure we do not obscure unknown requires file entries (issue2649)
$ echo foo >> foo
@@ -375,7 +393,7 @@
HG: changed changed
HG: removed removed
====
- abort: precommit.test-saving-last-message hook exited with status 1 (in subrepo sub)
+ abort: precommit.test-saving-last-message hook exited with status 1 (in subrepository "sub")
[255]
$ cat .hg/last-message.txt
--- a/tests/test-completion.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-completion.t Wed Jul 19 07:51:41 2017 -0500
@@ -99,6 +99,7 @@
debugnamecomplete
debugobsolete
debugpathcomplete
+ debugpickmergetool
debugpushkey
debugpvec
debugrebuilddirstate
@@ -107,9 +108,11 @@
debugrevlog
debugrevspec
debugsetparents
+ debugssl
debugsub
debugsuccessorssets
debugtemplate
+ debugupdatecaches
debugupgraderepo
debugwalk
debugwireargs
@@ -215,7 +218,7 @@
Show all commands + options
$ hg debugcommands
add: include, exclude, subrepos, dry-run
- annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
+ annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
@@ -228,7 +231,7 @@
push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
remove: after, force, subrepos, include, exclude
serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
- status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
+ status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
summary: remote
update: clean, check, merge, date, rev, tool
addremove: similarity, subrepos, include, exclude, dry-run
@@ -239,13 +242,13 @@
branch: force, clean
branches: active, closed, template
bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
- cat: output, rev, decode, include, exclude
+ cat: output, rev, decode, include, exclude, template
config: untrusted, edit, local, global, template
copy: after, force, include, exclude, dry-run
debugancestor:
debugapplystreamclonebundle:
debugbuilddag: mergeable-file, overwritten-file, new-file
- debugbundle: all, spec
+ debugbundle: all, part-type, spec
debugcheckstate:
debugcolor: style
debugcommands:
@@ -270,19 +273,22 @@
debuglocks: force-lock, force-wlock
debugmergestate:
debugnamecomplete:
- debugobsolete: flags, record-parents, rev, index, delete, date, user, template
+ debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
debugpathcomplete: full, normal, added, removed
+ debugpickmergetool: rev, changedelete, include, exclude, tool
debugpushkey:
debugpvec:
debugrebuilddirstate: rev, minimal
debugrebuildfncache:
debugrename: rev
debugrevlog: changelog, manifest, dir, dump
- debugrevspec: optimize, show-stage, no-optimized, verify-optimized
+ debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
debugsetparents:
+ debugssl:
debugsub: rev
- debugsuccessorssets:
+ debugsuccessorssets: closest
debugtemplate: rev, define
+ debugupdatecaches:
debugupgraderepo: optimize, run
debugwalk: include, exclude
debugwireargs: three, four, five, ssh, remotecmd, insecure
@@ -291,7 +297,7 @@
grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
heads: rev, topo, active, closed, style, template
help: extension, command, keyword, system
- identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
+ identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
locate: rev, print0, fullpath, include, exclude
--- a/tests/test-config.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-config.t Wed Jul 19 07:51:41 2017 -0500
@@ -178,3 +178,36 @@
$ PAGER=p1 hg config --debug --config pager.pager=p2 | grep 'pager\.pager'
--config: pager.pager=p2
+
+verify that aliases are evaluated as well
+
+ $ hg init aliastest
+ $ cd aliastest
+ $ cat > .hg/hgrc << EOF
+ > [ui]
+ > user = repo user
+ > EOF
+ $ touch index
+ $ unset HGUSER
+ $ hg ci -Am test
+ adding index
+ $ hg log --template '{author}\n'
+ repo user
+ $ cd ..
+
+alias has lower priority
+
+ $ hg init aliaspriority
+ $ cd aliaspriority
+ $ cat > .hg/hgrc << EOF
+ > [ui]
+ > user = alias user
+ > username = repo user
+ > EOF
+ $ touch index
+ $ unset HGUSER
+ $ hg ci -Am test
+ adding index
+ $ hg log --template '{author}\n'
+ repo user
+ $ cd ..
--- a/tests/test-conflict.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-conflict.t Wed Jul 19 07:51:41 2017 -0500
@@ -120,7 +120,7 @@
Verify line trimming of custom conflict marker using multi-byte characters
$ hg up -q --clean .
- $ python <<EOF
+ $ $PYTHON <<EOF
> fp = open('logfile', 'w')
> fp.write('12345678901234567890123456789012345678901234567890' +
> '1234567890') # there are 5 more columns for 80 columns
@@ -220,6 +220,7 @@
$ hg up -C
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "e0693e20f496: 123456789012345678901234567890123456789012345678901234567890????"
1 other heads for branch "default"
$ printf "\n\nEnd of file\n" >> a
$ hg ci -m "Add some stuff at the end"
@@ -258,6 +259,7 @@
$ hg up -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "18b51d585961: Add some stuff at the beginning"
1 other heads for branch "default"
$ hg merge --tool :merge-local
merging a
--- a/tests/test-context.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-context.py Wed Jul 19 07:51:41 2017 -0500
@@ -1,9 +1,11 @@
from __future__ import absolute_import, print_function
import os
+from mercurial.node import hex
from mercurial import (
context,
encoding,
hg,
+ scmutil,
ui as uimod,
)
@@ -146,3 +148,34 @@
match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
listclean=True))
print('wcctx._status=%s' % (str(wcctx._status)))
+
+os.chdir('..')
+
+# test manifestlog being changed
+print('== commit with manifestlog invalidated')
+
+repo = hg.repository(u, 'test2', create=1)
+os.chdir('test2')
+
+# make some commits
+for i in [b'1', b'2', b'3']:
+ with open(i, 'wb') as f:
+ f.write(i)
+ status = scmutil.status([], [i], [], [], [], [], [])
+ ctx = context.workingcommitctx(repo, status, text=i, user=b'test@test.com',
+ date=(0, 0))
+ ctx.p1().manifest() # side effect: cache manifestctx
+ n = repo.commitctx(ctx)
+ print('commit %s: %s' % (i, hex(n)))
+
+ # touch 00manifest.i mtime so storecache could expire.
+ # repo.__dict__['manifestlog'] is deleted by transaction releasefn.
+ st = repo.svfs.stat('00manifest.i')
+ repo.svfs.utime('00manifest.i', (st.st_mtime + 1, st.st_mtime + 1))
+
+ # read the file just committed
+ try:
+ if repo[n][i].data() != i:
+ print('data mismatch')
+ except Exception as ex:
+ print('cannot read data: %r' % ex)
--- a/tests/test-context.py.out Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-context.py.out Wed Jul 19 07:51:41 2017 -0500
@@ -44,3 +44,7 @@
wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
<status modified=[], added=['bar-r'], removed=[], deleted=[], unknown=[], ignored=[], clean=['foo']>
wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+== commit with manifestlog invalidated
+commit 1: 2efe531a913fa648867ab8824360371679d05a65
+commit 2: 2caca91f6362020334384ebe27bae67315298abf
+commit 3: abd6b0f49f338be22b094ef2b7425e8048f8337b
--- a/tests/test-contrib-check-code.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-contrib-check-code.t Wed Jul 19 07:51:41 2017 -0500
@@ -152,6 +152,15 @@
> $ function onwarn {}
warning: don't use 'function', use old style
[1]
+ $ cat > error.t <<EOF
+ > $ [ foo == bar ]
+ > EOF
+ $ "$check_code" error.t
+ error.t:1:
+ > $ [ foo == bar ]
+ [ foo == bar ] is a bashism, use [ foo = bar ] instead
+ [1]
+ $ rm error.t
$ cat > raise-format.py <<EOF
> raise SomeException, message
> # this next line is okay
--- a/tests/test-contrib-perf.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-contrib-perf.t Wed Jul 19 07:51:41 2017 -0500
@@ -51,6 +51,8 @@
(no help text available)
perfannotate (no help text available)
perfbdiff benchmark a bdiff between revisions
+ perfbookmarks
+ benchmark parsing bookmarks from disk to memory
perfbranchmap
benchmark the update of a branchmap
perfcca (no help text available)
@@ -93,12 +95,16 @@
perfparents (no help text available)
perfpathcopies
(no help text available)
+ perfphases benchmark phasesets computation
perfrawfiles (no help text available)
- perfrevlog Benchmark reading a series of revisions from a revlog.
perfrevlogchunks
Benchmark operations on revlog chunks.
+ perfrevlogindex
+ Benchmark operations against a revlog index.
perfrevlogrevision
Benchmark obtaining a revlog revision.
+ perfrevlogrevisions
+ Benchmark reading a series of revisions from a revlog.
perfrevrange (no help text available)
perfrevset benchmark the execution time of a revset
perfstartup (no help text available)
@@ -118,6 +124,7 @@
$ hg perfannotate a
$ hg perfbdiff -c 1
$ hg perfbdiff --alldata 1
+ $ hg perfbookmarks
$ hg perfbranchmap
$ hg perfcca
$ hg perfchangegroupchangelog
@@ -145,7 +152,8 @@
$ hg perfnodelookup 2
$ hg perfpathcopies 1 2
$ hg perfrawfiles 2
- $ hg perfrevlog .hg/store/data/a.i
+ $ hg perfrevlogindex -c
+ $ hg perfrevlogrevisions .hg/store/data/a.i
$ hg perfrevlogrevision -m 0
$ hg perfrevlogchunks -c
$ hg perfrevrange
@@ -162,10 +170,6 @@
$ cd "$TESTDIR/.."
- $ (hg files -r 1.2 glob:mercurial/*.c glob:mercurial/*.py;
- > hg files -r tip glob:mercurial/*.c glob:mercurial/*.py) |
+ $ (testrepohg files -r 1.2 glob:mercurial/*.c glob:mercurial/*.py;
+ > testrepohg files -r tip glob:mercurial/*.c glob:mercurial/*.py) |
> "$TESTDIR"/check-perf-code.py contrib/perf.py
- contrib/perf.py:869:
- > r.revision(r.node(x))
- don't convert rev to node before passing to revision(nodeorrev)
- [1]
--- a/tests/test-contrib.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-contrib.t Wed Jul 19 07:51:41 2017 -0500
@@ -26,7 +26,7 @@
Dumping revlog of file a to stdout:
- $ python "$CONTRIBDIR/dumprevlog" .hg/store/data/a.i
+ $ $PYTHON "$CONTRIBDIR/dumprevlog" .hg/store/data/a.i
file: .hg/store/data/a.i
node: 183d2312b35066fb6b3b449b84efc370d50993d0
linkrev: 0
@@ -58,14 +58,14 @@
Dump all revlogs to file repo.dump:
- $ find .hg/store -name "*.i" | sort | xargs python "$CONTRIBDIR/dumprevlog" > ../repo.dump
+ $ find .hg/store -name "*.i" | sort | xargs $PYTHON "$CONTRIBDIR/dumprevlog" > ../repo.dump
$ cd ..
Undumping into repo-b:
$ hg init repo-b
$ cd repo-b
- $ python "$CONTRIBDIR/undumprevlog" < ../repo.dump
+ $ $PYTHON "$CONTRIBDIR/undumprevlog" < ../repo.dump
.hg/store/00changelog.i
.hg/store/00manifest.i
.hg/store/data/a.i
@@ -115,7 +115,7 @@
changing local directly
- $ python simplemerge local base other && echo "merge succeeded"
+ $ $PYTHON simplemerge local base other && echo "merge succeeded"
merge succeeded
$ cat local
local
@@ -125,7 +125,7 @@
printing to stdout
- $ python simplemerge -p local base other
+ $ $PYTHON simplemerge -p local base other
local
base
other
@@ -144,7 +144,7 @@
$ echo end >> conflict-local
$ echo end >> conflict-other
- $ python simplemerge -p conflict-local base conflict-other
+ $ $PYTHON simplemerge -p conflict-local base conflict-other
base
<<<<<<< conflict-local
not other
@@ -156,7 +156,7 @@
1 label
- $ python simplemerge -p -L foo conflict-local base conflict-other
+ $ $PYTHON simplemerge -p -L foo conflict-local base conflict-other
base
<<<<<<< foo
not other
@@ -168,7 +168,7 @@
2 labels
- $ python simplemerge -p -L foo -L bar conflict-local base conflict-other
+ $ $PYTHON simplemerge -p -L foo -L bar conflict-local base conflict-other
base
<<<<<<< foo
not other
@@ -180,7 +180,7 @@
3 labels
- $ python simplemerge -p -L foo -L bar -L base conflict-local base conflict-other
+ $ $PYTHON simplemerge -p -L foo -L bar -L base conflict-local base conflict-other
base
<<<<<<< foo
not other
@@ -194,7 +194,7 @@
too many labels
- $ python simplemerge -p -L foo -L bar -L baz -L buz conflict-local base conflict-other
+ $ $PYTHON simplemerge -p -L foo -L bar -L baz -L buz conflict-local base conflict-other
abort: can only specify three labels.
[255]
@@ -202,13 +202,13 @@
$ $PYTHON -c "f = file('binary-local', 'w'); f.write('\x00'); f.close()"
$ cat orig >> binary-local
- $ python simplemerge -p binary-local base other
+ $ $PYTHON simplemerge -p binary-local base other
warning: binary-local looks like a binary file.
[1]
binary file --text
- $ python simplemerge -a -p binary-local base other 2>&1
+ $ $PYTHON simplemerge -a -p binary-local base other 2>&1
warning: binary-local looks like a binary file.
\x00local (esc)
base
@@ -216,7 +216,7 @@
help
- $ python simplemerge --help
+ $ $PYTHON simplemerge --help
simplemerge [OPTS] LOCAL BASE OTHER
Simple three-way file merge utility with a minimal feature set.
@@ -235,7 +235,7 @@
wrong number of arguments
- $ python simplemerge
+ $ $PYTHON simplemerge
simplemerge: wrong number of arguments
simplemerge [OPTS] LOCAL BASE OTHER
@@ -256,7 +256,7 @@
bad option
- $ python simplemerge --foo -p local base other
+ $ $PYTHON simplemerge --foo -p local base other
simplemerge: option --foo not recognized
simplemerge [OPTS] LOCAL BASE OTHER
--- a/tests/test-convert-bzr-ghosts.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-bzr-ghosts.t Wed Jul 19 07:51:41 2017 -0500
@@ -21,7 +21,7 @@
$ bzr add -q somefile
$ bzr commit -q -m 'Initial layout setup'
$ echo morecontent >> somefile
- $ python ../../ghostcreator.py 'Commit with ghost revision' ghostrev
+ $ $PYTHON ../../ghostcreator.py 'Commit with ghost revision' ghostrev
$ cd ..
$ hg convert source source-hg
initializing destination source-hg repository
--- a/tests/test-convert-bzr-treeroot.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-bzr-treeroot.t Wed Jul 19 07:51:41 2017 -0500
@@ -20,7 +20,7 @@
$ echo content > file
$ bzr add -q file
$ bzr commit -q -m 'Initial add'
- $ python ../../treeset.py 'Changed root' new
+ $ $PYTHON ../../treeset.py 'Changed root' new
$ cd ..
$ hg convert source source-hg
initializing destination source-hg repository
--- a/tests/test-convert-bzr.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-bzr.t Wed Jul 19 07:51:41 2017 -0500
@@ -129,10 +129,10 @@
$ bzr branch -q source source-improve
$ cd source
$ echo more >> a
- $ python ../helper.py 'Editing a' 100
+ $ $PYTHON ../helper.py 'Editing a' 100
$ cd ../source-improve
$ echo content3 >> b
- $ python ../helper.py 'Editing b' 200
+ $ $PYTHON ../helper.py 'Editing b' 200
$ cd ../source
$ bzr merge -q ../source-improve
$ bzr commit -q -m 'Merged improve branch'
--- a/tests/test-convert-clonebranches.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-clonebranches.t Wed Jul 19 07:51:41 2017 -0500
@@ -40,7 +40,7 @@
convert
$ hg convert -v --config convert.hg.clonebranches=1 source dest |
- > python filter.py
+ > $PYTHON filter.py
3 adda
2 changea
1 addb
@@ -73,7 +73,7 @@
incremental conversion
$ hg convert -v --config convert.hg.clonebranches=1 source dest |
- > python filter.py
+ > $PYTHON filter.py
2 c1
pulling from branch0 into branch1
4 changesets found
--- a/tests/test-convert-cvs.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-cvs.t Wed Jul 19 07:51:41 2017 -0500
@@ -498,3 +498,157 @@
$ cd ..
+
+Test transcoding CVS log messages (issue5597)
+=============================================
+
+To emulate commit messages in (non-ascii) multiple encodings portably,
+this test scenario writes CVS history file (*,v file) directly via
+python code.
+
+Commit messages of version 1.2 - 1.4 use u3042 in 3 encodings below.
+
+|encoding |byte sequence | decodable as: |
+| | | utf-8 euc-jp cp932 |
++----------+--------------+--------------------+
+|utf-8 |\xe3\x81\x82 | o x x |
+|euc-jp |\xa4\xa2 | x o o |
+|cp932 |\x82\xa0 | x x o |
+
+ $ mkdir -p cvsrepo/transcoding
+ $ python <<EOF
+ > fp = open('cvsrepo/transcoding/file,v', 'w')
+ > fp.write(('''
+ > head 1.4;
+ > access;
+ > symbols
+ > start:1.1.1.1 INITIAL:1.1.1;
+ > locks; strict;
+ > comment @# @;
+ >
+ >
+ > 1.4
+ > date 2017.07.10.00.00.04; author nobody; state Exp;
+ > branches;
+ > next 1.3;
+ > commitid 10059635D016A510FFA;
+ >
+ > 1.3
+ > date 2017.07.10.00.00.03; author nobody; state Exp;
+ > branches;
+ > next 1.2;
+ > commitid 10059635CFF6A4FF34E;
+ >
+ > 1.2
+ > date 2017.07.10.00.00.02; author nobody; state Exp;
+ > branches;
+ > next 1.1;
+ > commitid 10059635CFD6A4D5095;
+ >
+ > 1.1
+ > date 2017.07.10.00.00.01; author nobody; state Exp;
+ > branches
+ > 1.1.1.1;
+ > next ;
+ > commitid 10059635CFB6A4A3C33;
+ >
+ > 1.1.1.1
+ > date 2017.07.10.00.00.01; author nobody; state Exp;
+ > branches;
+ > next ;
+ > commitid 10059635CFB6A4A3C33;
+ >
+ >
+ > desc
+ > @@
+ >
+ >
+ > 1.4
+ > log
+ > @''' + u'\u3042'.encode('cp932') + ''' (cp932)
+ > @
+ > text
+ > @1
+ > 2
+ > 3
+ > 4
+ > @
+ >
+ >
+ > 1.3
+ > log
+ > @''' + u'\u3042'.encode('euc-jp') + ''' (euc-jp)
+ > @
+ > text
+ > @d4 1
+ > @
+ >
+ >
+ > 1.2
+ > log
+ > @''' + u'\u3042'.encode('utf-8') + ''' (utf-8)
+ > @
+ > text
+ > @d3 1
+ > @
+ >
+ >
+ > 1.1
+ > log
+ > @Initial revision
+ > @
+ > text
+ > @d2 1
+ > @
+ >
+ >
+ > 1.1.1.1
+ > log
+ > @import
+ > @
+ > text
+ > @@
+ > ''').lstrip())
+ > EOF
+
+ $ cvscall -q checkout transcoding
+ U transcoding/file
+
+Test converting in normal case
+------------------------------
+
+(filtering by grep in order to check only form of debug messages)
+
+ $ hg convert --config convert.cvsps.logencoding=utf-8,euc-jp,cp932 -q --debug transcoding transcoding-hg | grep 'transcoding by'
+ transcoding by utf-8: 1.1 of file
+ transcoding by utf-8: 1.1.1.1 of file
+ transcoding by utf-8: 1.2 of file
+ transcoding by euc-jp: 1.3 of file
+ transcoding by cp932: 1.4 of file
+ $ hg -R transcoding-hg --encoding utf-8 log -T "{rev}: {desc}\n"
+ 5: update tags
+ 4: import
+ 3: \xe3\x81\x82 (cp932) (esc)
+ 2: \xe3\x81\x82 (euc-jp) (esc)
+ 1: \xe3\x81\x82 (utf-8) (esc)
+ 0: Initial revision
+ $ rm -rf transcoding-hg
+
+Test converting in error cases
+------------------------------
+
+unknown encoding in convert.cvsps.logencoding
+
+ $ hg convert --config convert.cvsps.logencoding=foobar -q transcoding transcoding-hg
+ abort: unknown encoding: foobar
+ (check convert.cvsps.logencoding configuration)
+ [255]
+ $ rm -rf transcoding-hg
+
+no acceptable encoding in convert.cvsps.logencoding
+
+ $ hg convert --config convert.cvsps.logencoding=utf-8,euc-jp -q transcoding transcoding-hg
+ abort: no encoding can transcode CVS log message for 1.4 of file
+ (check convert.cvsps.logencoding configuration)
+ [255]
+ $ rm -rf transcoding-hg
--- a/tests/test-convert-git.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-git.t Wed Jul 19 07:51:41 2017 -0500
@@ -948,7 +948,7 @@
$ hg convert git-repo4 git-repo4-broken-hg 2>&1 | grep 'abort:'
abort: cannot read changes in 1c0ce3c5886f83a1d78a7b517cdff5cf9ca17bdd
-#if no-windows
+#if no-windows git19
test for escaping the repo name (CVE-2016-3069)
--- a/tests/test-convert-hg-source.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-hg-source.t Wed Jul 19 07:51:41 2017 -0500
@@ -130,7 +130,7 @@
> for i, l in enumerate(file(sys.argv[1]))]
> file(sys.argv[1], 'wb').write(''.join(lines))
> EOF
- $ python rewrite.py new/.hg/shamap
+ $ $PYTHON rewrite.py new/.hg/shamap
$ cd orig
$ hg up -qC 1
$ echo foo >> foo
--- a/tests/test-convert-svn-encoding.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-svn-encoding.t Wed Jul 19 07:51:41 2017 -0500
@@ -53,6 +53,7 @@
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@1
converting: 0/6 revisions (0.00%)
committing changelog
+ updating the branch cache
4 hello
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2
converting: 1/6 revisions (16.67%)
@@ -67,6 +68,7 @@
getting files: \xc3\xa9 2/2 files (100.00%) (esc)
committing manifest
committing changelog
+ updating the branch cache
3 copy files
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@3
converting: 2/6 revisions (33.33%)
@@ -93,6 +95,7 @@
\xc3\xb9/e\xcc\x81: copy \xc3\xa0/e\xcc\x81:a9092a3d84a37b9993b5c73576f6de29b7ea50f6 (esc)
committing manifest
committing changelog
+ updating the branch cache
2 remove files
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@4
converting: 3/6 revisions (50.00%)
@@ -109,18 +112,21 @@
committing files:
committing manifest
committing changelog
+ updating the branch cache
1 branch to branch?
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5
converting: 4/6 revisions (66.67%)
reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
scanning paths: /branches/branch\xc3\xa9 0/1 paths (0.00%) (esc)
committing changelog
+ updating the branch cache
0 branch to branch?e
source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6
converting: 5/6 revisions (83.33%)
reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
scanning paths: /branches/branch\xc3\xa9e 0/1 paths (0.00%) (esc)
committing changelog
+ updating the branch cache
reparent to file://*/svn-repo (glob)
reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
reparent to file://*/svn-repo (glob)
@@ -130,6 +136,7 @@
.hgtags
committing manifest
committing changelog
+ updating the branch cache
run hg sink post-conversion action
$ cd A-hg
$ hg up
--- a/tests/test-convert-svn-sink.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert-svn-sink.t Wed Jul 19 07:51:41 2017 -0500
@@ -10,7 +10,7 @@
> if [ $2 -gt 0 ]; then
> limit="--limit=$2"
> fi
- > svn log --xml -v $limit | python "$TESTDIR/svnxml.py"
+ > svn log --xml -v $limit | $PYTHON "$TESTDIR/svnxml.py"
> )
> }
--- a/tests/test-convert.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-convert.t Wed Jul 19 07:51:41 2017 -0500
@@ -125,9 +125,9 @@
where "original_branch_name" is the name of the branch in the source
repository, and "new_branch_name" is the name of the branch is the
- destination repository. No whitespace is allowed in the branch names. This
- can be used to (for instance) move code in one repository from "default"
- to a named branch.
+ destination repository. No whitespace is allowed in the new branch name.
+ This can be used to (for instance) move code in one repository from
+ "default" to a named branch.
Mercurial Source
################
@@ -171,6 +171,12 @@
single changeset. When very large files were checked in as
part of a changeset then the default may not be long enough.
The default is 60.
+ convert.cvsps.logencoding
+ Specify encoding name to be used for transcoding CVS log
+ messages. Multiple encoding names can be specified as a list
+ (see 'hg help config.Syntax'), but only the first acceptable
+ encoding in the list is used per CVS log entries. This
+ transcoding is executed before cvslog hook below.
convert.cvsps.mergeto
Specify a regular expression to which commit log messages
are matched. If a match occurs, then the conversion process
@@ -581,3 +587,30 @@
branch=default
convert_revision=a3bc6100aa8ec03e00aaf271f1f50046fb432072
convert_source=mysource
+
+ $ cat > branchmap.txt << EOF
+ > old branch new_branch
+ > EOF
+
+ $ hg -R a branch -q 'old branch'
+ $ echo gg > a/g
+ $ hg -R a ci -m 'branch name with spaces'
+ $ hg convert --branchmap branchmap.txt a d
+ initializing destination d repository
+ scanning source...
+ sorting...
+ converting...
+ 6 a
+ 5 b
+ 4 c
+ 3 d
+ 2 e
+ 1 g
+ 0 branch name with spaces
+
+ $ hg -R a branches
+ old branch 6:a24a66ade009
+ default 5:a3bc6100aa8e (inactive)
+ $ hg -R d branches
+ new_branch 6:64ed208b732b
+ default 5:a3bc6100aa8e (inactive)
--- a/tests/test-copy-move-merge.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-copy-move-merge.t Wed Jul 19 07:51:41 2017 -0500
@@ -119,7 +119,7 @@
$ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True
rebasing 2:6adcf8c12e7d "copy b->x"
- saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-rebase.hg (glob)
$ hg up -q 3
$ hg log -f x -T '{rev} {desc}\n'
3 copy b->x
@@ -152,7 +152,7 @@
$ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.disablecopytrace=True
rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
- saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg (glob)
$ hg log -G -f b
@ changeset: 3:76024fb4b05b
--- a/tests/test-copy.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-copy.t Wed Jul 19 07:51:41 2017 -0500
@@ -32,6 +32,7 @@
b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
committing manifest
committing changelog
+ updating the branch cache
committed changeset 1:93580a2c28a50a56f63526fb305067e6fbf739c4
we should see two history entries
@@ -191,6 +192,7 @@
XXX: filtering lfilesrepo.status() in 3.3-rc causes the copy source to not be
displayed.
$ hg st -C --config extensions.largefiles=
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
M bar
foo
--- a/tests/test-ctxmanager.py Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-from __future__ import absolute_import
-
-import silenttestrunner
-import unittest
-
-from mercurial import util
-
-class contextmanager(object):
- def __init__(self, name, trace):
- self.name = name
- self.entered = False
- self.exited = False
- self.trace = trace
-
- def __enter__(self):
- self.entered = True
- self.trace(('enter', self.name))
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.exited = exc_type, exc_val, exc_tb
- self.trace(('exit', self.name))
-
- def __repr__(self):
- return '<ctx %r>' % self.name
-
-class ctxerror(Exception):
- pass
-
-class raise_on_enter(contextmanager):
- def __enter__(self):
- self.trace(('raise', self.name))
- raise ctxerror(self.name)
-
-class raise_on_exit(contextmanager):
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.trace(('raise', self.name))
- raise ctxerror(self.name)
-
-def ctxmgr(name, trace):
- return lambda: contextmanager(name, trace)
-
-class test_ctxmanager(unittest.TestCase):
- def test_basics(self):
- trace = []
- addtrace = trace.append
- with util.ctxmanager(ctxmgr('a', addtrace), ctxmgr('b', addtrace)) as c:
- a, b = c.enter()
- c.atexit(addtrace, ('atexit', 'x'))
- c.atexit(addtrace, ('atexit', 'y'))
- self.assertEqual(trace, [('enter', 'a'), ('enter', 'b'),
- ('atexit', 'y'), ('atexit', 'x'),
- ('exit', 'b'), ('exit', 'a')])
-
- def test_raise_on_enter(self):
- trace = []
- addtrace = trace.append
- def go():
- with util.ctxmanager(ctxmgr('a', addtrace),
- lambda: raise_on_enter('b', addtrace)) as c:
- c.enter()
- addtrace('unreachable')
- self.assertRaises(ctxerror, go)
- self.assertEqual(trace, [('enter', 'a'), ('raise', 'b'), ('exit', 'a')])
-
- def test_raise_on_exit(self):
- trace = []
- addtrace = trace.append
- def go():
- with util.ctxmanager(ctxmgr('a', addtrace),
- lambda: raise_on_exit('b', addtrace)) as c:
- c.enter()
- addtrace('running')
- self.assertRaises(ctxerror, go)
- self.assertEqual(trace, [('enter', 'a'), ('enter', 'b'), 'running',
- ('raise', 'b'), ('exit', 'a')])
-
-if __name__ == '__main__':
- silenttestrunner.main(__name__)
--- a/tests/test-debian-packages.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-debian-packages.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,7 @@
#require test-repo slow debhelper
$ . "$TESTDIR/helpers-testrepo.sh"
+ $ testrepohgenv
Ensure debuild doesn't run the testsuite, as that could get silly.
$ DEB_BUILD_OPTIONS=nocheck
--- a/tests/test-debugcommands.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-debugcommands.t Wed Jul 19 07:51:41 2017 -0500
@@ -27,9 +27,10 @@
chunks size : 44
0x75 (u) : 44 (100.00%)
- avg chain length : 0
- max chain length : 0
- compression ratio : 0
+ avg chain length : 0
+ max chain length : 0
+ max chain reach : 44
+ compression ratio : 0
uncompressed data size (min/max/avg) : 43 / 43 / 43
full revision size (min/max/avg) : 44 / 44 / 44
@@ -109,6 +110,23 @@
6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
+
+Test WdirUnsupported exception
+
+ $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
+ abort: working directory revision cannot be specified
+ [255]
+
+Test cache warming command
+
+ $ rm -rf .hg/cache/
+ $ hg debugupdatecaches --debug
+ updating the branch cache
+ $ ls -r .hg/cache/*
+ .hg/cache/rbc-revs-v1
+ .hg/cache/rbc-names-v1
+ .hg/cache/branch2-served
+
$ cd ..
Test internal debugstacktrace command
@@ -125,7 +143,7 @@
> dst('hi ...\\nfrom h hidden in g', 1, depth=2)
> f()
> EOF
- $ python debugstacktrace.py
+ $ $PYTHON debugstacktrace.py
stacktrace at:
debugstacktrace.py:10 in * (glob)
debugstacktrace.py:3 in f
--- a/tests/test-debugextensions.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-debugextensions.t Wed Jul 19 07:51:41 2017 -0500
@@ -11,7 +11,6 @@
$ cat >> $HGRCPATH <<EOF
> [extensions]
- > color=
> histedit=
> patchbomb=
> rebase=
@@ -21,7 +20,6 @@
> EOF
$ hg debugextensions
- color
ext1 (untested!)
ext2 (3.2.1!)
histedit
@@ -30,9 +28,6 @@
rebase
$ hg debugextensions -v
- color
- location: */hgext/color.py* (glob)
- bundled: yes
ext1
location: */extwithoutinfos.py* (glob)
bundled: no
@@ -58,13 +53,6 @@
[
{
"buglink": "",
- "bundled": true,
- "name": "color",
- "source": "*/hgext/color.py*", (glob)
- "testedwith": []
- },
- {
- "buglink": "",
"bundled": false,
"name": "ext1",
"source": "*/extwithoutinfos.py*", (glob)
--- a/tests/test-demandimport.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-demandimport.py Wed Jul 19 07:51:41 2017 -0500
@@ -52,6 +52,9 @@
import re as fred
print("fred =", f(fred))
+import re as remod
+print("remod =", f(remod))
+
import sys as re
print("re =", f(re))
@@ -59,10 +62,24 @@
print("fred.sub =", f(fred.sub))
print("fred =", f(fred))
+remod.escape # use remod
+print("remod =", f(remod))
+
print("re =", f(re))
print("re.stderr =", f(re.stderr))
print("re =", f(re))
+# Test access to special attributes through demandmod proxy
+from mercurial import pvec as pvecproxy
+print("pvecproxy =", f(pvecproxy))
+print("pvecproxy.__doc__ = %r"
+ % (' '.join(pvecproxy.__doc__.split()[:3]) + ' ...'))
+print("pvecproxy.__name__ = %r" % pvecproxy.__name__)
+# __name__ must be accessible via __dict__ so the relative imports can be
+# resolved
+print("pvecproxy.__dict__['__name__'] = %r" % pvecproxy.__dict__['__name__'])
+print("pvecproxy =", f(pvecproxy))
+
import contextlib
print("contextlib =", f(contextlib))
try:
--- a/tests/test-demandimport.py.out Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-demandimport.py.out Wed Jul 19 07:51:41 2017 -0500
@@ -9,13 +9,20 @@
hgweb_mod = <unloaded module 'hgweb_mod'>
hgweb = <module 'mercurial.hgweb' from '?'>
fred = <unloaded module 're'>
+remod = <unloaded module 're'>
re = <unloaded module 'sys'>
fred = <unloaded module 're'>
fred.sub = <function sub at 0x?>
fred = <proxied module 're'>
+remod = <module 're' from '?'>
re = <unloaded module 'sys'>
re.stderr = <open file '<whatever>', mode 'w' at 0x?>
re = <proxied module 'sys'>
+pvecproxy = <unloaded module 'pvec'>
+pvecproxy.__doc__ = 'A "pvec" is ...'
+pvecproxy.__name__ = 'mercurial.pvec'
+pvecproxy.__dict__['__name__'] = 'mercurial.pvec'
+pvecproxy = <proxied module 'pvec'>
contextlib = <unloaded module 'contextlib'>
contextlib.unknownattr = ImportError: cannot import name unknownattr
__import__('contextlib', ..., ['unknownattr']) = <module 'contextlib' from '?'>
--- a/tests/test-devel-warnings.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-devel-warnings.t Wed Jul 19 07:51:41 2017 -0500
@@ -3,25 +3,25 @@
> """A small extension that tests our developer warnings
> """
>
- > from mercurial import cmdutil, repair, util
+ > from mercurial import error, registrar, repair, util
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
- > @command('buggylocking', [], '')
+ > @command(b'buggylocking', [], '')
> def buggylocking(ui, repo):
> lo = repo.lock()
> wl = repo.wlock()
> wl.release()
> lo.release()
>
- > @command('buggytransaction', [], '')
+ > @command(b'buggytransaction', [], '')
> def buggylocking(ui, repo):
> tr = repo.transaction('buggy')
> # make sure we rollback the transaction as we don't want to rely on the__del__
> tr.release()
>
- > @command('properlocking', [], '')
+ > @command(b'properlocking', [], '')
> def properlocking(ui, repo):
> """check that reentrance is fine"""
> wl = repo.wlock()
@@ -37,14 +37,24 @@
> lo.release()
> wl.release()
>
- > @command('nowaitlocking', [], '')
+ > @command(b'nowaitlocking', [], '')
> def nowaitlocking(ui, repo):
> lo = repo.lock()
> wl = repo.wlock(wait=False)
> wl.release()
> lo.release()
>
- > @command('stripintr', [], '')
+ > @command(b'no-wlock-write', [], '')
+ > def nowlockwrite(ui, repo):
+ > with repo.vfs(b'branch', 'a'):
+ > pass
+ >
+ > @command(b'no-lock-write', [], '')
+ > def nolockwrite(ui, repo):
+ > with repo.svfs(b'fncache', 'a'):
+ > pass
+ >
+ > @command(b'stripintr', [], '')
> def stripintr(ui, repo):
> lo = repo.lock()
> tr = repo.transaction('foobar')
@@ -52,15 +62,18 @@
> repair.strip(repo.ui, repo, [repo['.'].node()])
> finally:
> lo.release()
- > @command('oldanddeprecated', [], '')
+ > @command(b'oldanddeprecated', [], '')
> def oldanddeprecated(ui, repo):
> """test deprecation warning API"""
> def foobar(ui):
> ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
> foobar(ui)
- > @command('nouiwarning', [], '')
+ > @command(b'nouiwarning', [], '')
> def nouiwarning(ui, repo):
> util.nouideprecwarn('this is a test', '13.37')
+ > @command(b'programmingerror', [], '')
+ > def programmingerror(ui, repo):
+ > raise error.ProgrammingError('something went wrong', hint='try again')
> EOF
$ cat << EOF >> $HGRCPATH
@@ -101,11 +114,20 @@
$ hg properlocking
$ hg nowaitlocking
+Writing without lock
+
+ $ hg no-wlock-write
+ devel-warn: write with no wlock: "branch" at: $TESTTMP/buggylocking.py:* (nowlockwrite) (glob)
+
+ $ hg no-lock-write
+ devel-warn: write with no lock: "fncache" at: $TESTTMP/buggylocking.py:* (nolockwrite) (glob)
+
+Stripping from a transaction
+
$ echo a > a
$ hg add a
$ hg commit -m a
$ hg stripintr 2>&1 | egrep -v '^(\*\*| )'
- saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
Traceback (most recent call last):
mercurial.error.ProgrammingError: cannot strip from inside a transaction
@@ -163,13 +185,27 @@
** Python * (glob)
** Mercurial Distributed SCM (*) (glob)
** Extensions loaded: * (glob)
+ ** ProgrammingError: transaction requires locking
Traceback (most recent call last):
mercurial.error.ProgrammingError: transaction requires locking
+ $ hg programmingerror 2>&1 | egrep -v '^ '
+ ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+ ** which supports versions unknown of Mercurial.
+ ** Please disable buggylocking and try your action again.
+ ** If that fixes the bug please report it to the extension author.
+ ** Python * (glob)
+ ** Mercurial Distributed SCM (*) (glob)
+ ** Extensions loaded: * (glob)
+ ** ProgrammingError: something went wrong
+ ** (try again)
+ Traceback (most recent call last):
+ mercurial.error.ProgrammingError: something went wrong
+
Old style deprecation warning
$ hg nouiwarning
- $TESTTMP/buggylocking.py:61: DeprecationWarning: this is a test
+ $TESTTMP/buggylocking.py:*: DeprecationWarning: this is a test (glob)
(compatibility will be dropped after Mercurial-13.37, update your code.)
util.nouideprecwarn('this is a test', '13.37')
@@ -177,4 +213,40 @@
$ HGEMITWARNINGS= hg nouiwarning
+Test warning on config option access and registration
+
+ $ cat << EOF > ${TESTTMP}/buggyconfig.py
+ > """A small extension that tests our developer warnings for config"""
+ >
+ > from mercurial import registrar, configitems
+ >
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ >
+ > configtable = {}
+ > configitem = registrar.configitem(configtable)
+ >
+ > configitem('test', 'some', default='foo')
+ > configitem('test', 'dynamic', default=configitems.dynamicdefault)
+ > # overwrite a core config
+ > configitem('ui', 'quiet', default=False)
+ > configitem('ui', 'interactive', default=None)
+ >
+ > @command(b'buggyconfig')
+ > def cmdbuggyconfig(ui, repo):
+ > repo.ui.config('ui', 'quiet', False)
+ > repo.ui.config('ui', 'interactive', None)
+ > repo.ui.config('test', 'some', 'foo')
+ > repo.ui.config('test', 'dynamic', 'some-required-default')
+ > repo.ui.config('test', 'dynamic')
+ > EOF
+
+ $ hg --config "extensions.buggyconfig=${TESTTMP}/buggyconfig.py" buggyconfig
+ devel-warn: extension 'buggyconfig' overwrite config item 'ui.interactive' at: */mercurial/extensions.py:* (loadall) (glob)
+ devel-warn: extension 'buggyconfig' overwrite config item 'ui.quiet' at: */mercurial/extensions.py:* (loadall) (glob)
+ devel-warn: specifying a default value for a registered config item: 'ui.quiet' 'False' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
+ devel-warn: specifying a default value for a registered config item: 'ui.interactive' 'None' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
+ devel-warn: specifying a default value for a registered config item: 'test.some' 'foo' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
+ devel-warn: config item requires an explicit default value: 'test.dynamic' at: $TESTTMP/buggyconfig.py:* (cmdbuggyconfig) (glob)
+
$ cd ..
--- a/tests/test-diff-binary-file.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-diff-binary-file.t Wed Jul 19 07:51:41 2017 -0500
@@ -83,7 +83,7 @@
> path = sys.argv[1]
> open(path, 'wb').write('\x00\x01\x02\x03')
> EOF
- $ python writebin.py binfile.bin
+ $ $PYTHON writebin.py binfile.bin
$ hg add binfile.bin
$ hg ci -m 'add binfile.bin'
--- a/tests/test-diff-newlines.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-diff-newlines.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,6 @@
$ hg init
- $ $PYTHON -c 'file("a", "wb").write("confuse str.splitlines\nembedded\rnewline\n")'
+ $ $PYTHON -c 'open("a", "wb").write(b"confuse str.splitlines\nembedded\rnewline\n")'
$ hg ci -Ama -d '1 0'
adding a
--- a/tests/test-diffstat.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-diffstat.t Wed Jul 19 07:51:41 2017 -0500
@@ -105,3 +105,83 @@
$ hg diff --stat --root . -I old
$ cd ..
+
+Files with lines beginning with '--' or '++' should be properly counted in diffstat
+
+ $ hg up -Cr tip
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ rm dir1/new
+ $ rm dir2/new
+ $ rm "file with spaces"
+ $ cat > file << EOF
+ > line 1
+ > line 2
+ > line 3
+ > EOF
+ $ hg commit -Am file
+ adding file
+
+Lines added starting with '--' should count as additions
+ $ cat > file << EOF
+ > line 1
+ > -- line 2, with dashes
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r be1569354b24 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ -line 2
+ +-- line 2, with dashes
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+Lines changed starting with '--' should count as deletions
+ $ hg commit -m filev2
+ $ cat > file << EOF
+ > line 1
+ > -- line 2, with dashes, changed again
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r 160f7c034df6 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ --- line 2, with dashes
+ +-- line 2, with dashes, changed again
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+Lines changed starting with '--' should count as deletions
+and starting with '++' should count as additions
+ $ cat > file << EOF
+ > line 1
+ > ++ line 2, switched dashes to plusses
+ > line 3
+ > EOF
+
+ $ hg diff --root .
+ diff -r 160f7c034df6 file
+ --- a/file Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file * (glob)
+ @@ -1,3 +1,3 @@
+ line 1
+ --- line 2, with dashes
+ +++ line 2, switched dashes to plusses
+ line 3
+
+ $ hg diff --root . --stat
+ file | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
--- a/tests/test-dirstate-race.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-dirstate-race.t Wed Jul 19 07:51:41 2017 -0500
@@ -45,7 +45,7 @@
#endif
$ hg add b dir1 d e
- adding dir1/c
+ adding dir1/c (glob)
$ hg commit -m test2
$ cat >> $TESTTMP/dirstaterace.py << EOF
@@ -57,7 +57,8 @@
> extensions.wrapfunction(context.workingctx, '_checklookup', overridechecklookup)
> def overridechecklookup(orig, self, files):
> # make an update that changes the dirstate from underneath
- > self._repo.ui.system(self._repo.ui.config('dirstaterace', 'command'), cwd=self._repo.root)
+ > self._repo.ui.system(r"sh '$TESTTMP/dirstaterace.sh'",
+ > cwd=self._repo.root)
> return orig(self, files)
> EOF
@@ -73,8 +74,11 @@
definitely a bug, but the fix for that is hard and the next status run is fine
anyway.
- $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py \
- > --config dirstaterace.command='rm b && rm -r dir1 && rm d && mkdir d && rm e && mkdir e'
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > rm b && rm -r dir1 && rm d && mkdir d && rm e && mkdir e
+ > EOF
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py
M d
M e
! b
@@ -91,3 +95,142 @@
! d
! dir1/c
! e
+
+ $ rmdir d e
+ $ hg update -C -q .
+
+Test that dirstate changes aren't written out at the end of "hg
+status", if .hg/dirstate is already changed simultaneously before
+acquisition of wlock in workingctx._poststatusfixup().
+
+This avoidance is important to keep consistency of dirstate in race
+condition (see issue5584 for detail).
+
+ $ hg parents -q
+ 1:* (glob)
+
+ $ hg debugrebuilddirstate
+ $ hg debugdirstate
+ n 0 -1 unset a
+ n 0 -1 unset b
+ n 0 -1 unset d
+ n 0 -1 unset dir1/c
+ n 0 -1 unset e
+
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > # This script assumes timetable of typical issue5584 case below:
+ > #
+ > # 1. "hg status" loads .hg/dirstate
+ > # 2. "hg status" confirms clean-ness of FILE
+ > # 3. "hg update -C 0" updates the working directory simultaneously
+ > # (FILE is removed, and FILE is dropped from .hg/dirstate)
+ > # 4. "hg status" acquires wlock
+ > # (.hg/dirstate is re-loaded = no FILE entry in dirstate)
+ > # 5. "hg status" marks FILE in dirstate as clean
+ > # (FILE entry is added to in-memory dirstate)
+ > # 6. "hg status" writes dirstate changes into .hg/dirstate
+ > # (FILE entry is written into .hg/dirstate)
+ > #
+ > # To reproduce similar situation easily and certainly, #2 and #3
+ > # are swapped. "hg cat" below ensures #2 on "hg status" side.
+ >
+ > hg update -q -C 0
+ > hg cat -r 1 b > b
+ > EOF
+
+"hg status" below should excludes "e", of which exec flag is set, for
+portability of test scenario, because unsure but missing "e" is
+treated differently in _checklookup() according to runtime platform.
+
+- "missing(!)" on POSIX, "pctx[f].cmp(self[f])" raises ENOENT
+- "modified(M)" on Windows, "self.flags(f) != pctx.flags(f)" is True
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py --debug -X path:e
+ skip updating dirstate: identity mismatch
+ M a
+ ! d
+ ! dir1/c
+
+ $ hg parents -q
+ 0:* (glob)
+ $ hg files
+ a
+ $ hg debugdirstate
+ n * * * a (glob)
+
+ $ rm b
+
+#if fsmonitor
+
+Create fsmonitor state.
+
+ $ hg status
+ $ f --type .hg/fsmonitor.state
+ .hg/fsmonitor.state: file
+
+Test that invalidating fsmonitor state in the middle (which doesn't require the
+wlock) causes the fsmonitor update to be skipped.
+hg debugrebuilddirstate ensures that the dirstaterace hook will be called, but
+it also invalidates the fsmonitor state. So back it up and restore it.
+
+ $ mv .hg/fsmonitor.state .hg/fsmonitor.state.tmp
+ $ hg debugrebuilddirstate
+ $ mv .hg/fsmonitor.state.tmp .hg/fsmonitor.state
+
+ $ cat > $TESTTMP/dirstaterace.sh <<EOF
+ > rm .hg/fsmonitor.state
+ > EOF
+
+ $ hg status --config extensions.dirstaterace=$TESTTMP/dirstaterace.py --debug
+ skip updating fsmonitor.state: identity mismatch
+ $ f .hg/fsmonitor.state
+ .hg/fsmonitor.state: file not found
+
+#endif
+
+Set up a rebase situation for issue5581.
+
+ $ echo c2 > a
+ $ echo c2 > b
+ $ hg add b
+ $ hg commit -m c2
+ created new head
+ $ echo c3 >> a
+ $ hg commit -m c3
+ $ hg update 2
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo c4 >> a
+ $ echo c4 >> b
+ $ hg commit -m c4
+ created new head
+
+Configure a merge tool that runs status in the middle of the rebase.
+
+ $ cat >> $TESTTMP/mergetool-race.sh << EOF
+ > echo "custom merge tool"
+ > printf "c2\nc3\nc4\n" > \$1
+ > hg --cwd "$TESTTMP/repo" status
+ > echo "custom merge tool end"
+ > EOF
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > rebase =
+ > [merge-tools]
+ > test.executable=sh
+ > test.args=$TESTTMP/mergetool-race.sh \$output
+ > EOF
+
+BROKEN: the "M b" line should not be there
+ $ hg rebase -s . -d 3 --tool test
+ rebasing 4:b08445fd6b2a "c4" (tip)
+ merging a
+ custom merge tool
+ M a
+ M b
+ ? a.orig
+ custom merge tool end
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/* (glob)
+
+This hg status should be empty, whether or not fsmonitor is enabled (issue5581).
+
+ $ hg status
--- a/tests/test-docker-packaging.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-docker-packaging.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,7 @@
#require test-repo slow docker
$ . "$TESTDIR/helpers-testrepo.sh"
+ $ testrepohgenv
Ensure debuild doesn't run the testsuite, as that could get silly.
$ DEB_BUILD_OPTIONS=nocheck
--- a/tests/test-doctest.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-doctest.py Wed Jul 19 07:51:41 2017 -0500
@@ -25,6 +25,7 @@
testmod('mercurial.changelog')
testmod('mercurial.color')
testmod('mercurial.config')
+testmod('mercurial.context')
testmod('mercurial.dagparser', optionflags=doctest.NORMALIZE_WHITESPACE)
testmod('mercurial.dispatch')
testmod('mercurial.encoding')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-drawdag.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,234 @@
+ $ cat >> $HGRCPATH<<EOF
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
+ > [experimental]
+ > evolution=all
+ > EOF
+
+ $ reinit () {
+ > rm -rf .hg && hg init
+ > }
+
+ $ hg init
+
+Test what said in drawdag.py docstring
+
+ $ hg debugdrawdag <<'EOS'
+ > c d
+ > |/
+ > b
+ > |
+ > a
+ > EOS
+
+ $ hg log -G -T '{rev} {desc} ({tags})'
+ o 3 d (d tip)
+ |
+ | o 2 c (c)
+ |/
+ o 1 b (b)
+ |
+ o 0 a (a)
+
+ $ hg debugdrawdag <<'EOS'
+ > foo bar bar foo
+ > | / | |
+ > ancestor(c,d) a baz
+ > EOS
+
+ $ hg log -G -T '{desc}'
+ o foo
+ |\
+ +---o bar
+ | | |
+ | o | baz
+ | /
+ +---o d
+ | |
+ +---o c
+ | |
+ o | b
+ |/
+ o a
+
+ $ reinit
+
+ $ hg debugdrawdag <<'EOS'
+ > o foo
+ > |\
+ > +---o bar
+ > | | |
+ > | o | baz
+ > | /
+ > +---o d
+ > | |
+ > +---o c
+ > | |
+ > o | b
+ > |/
+ > o a
+ > EOS
+
+ $ hg log -G -T '{desc}'
+ o foo
+ |\
+ | | o d
+ | |/
+ | | o c
+ | |/
+ | | o bar
+ | |/|
+ | o | b
+ | |/
+ o / baz
+ /
+ o a
+
+ $ reinit
+
+ $ hg debugdrawdag <<'EOS'
+ > o foo
+ > |\
+ > | | o d
+ > | |/
+ > | | o c
+ > | |/
+ > | | o bar
+ > | |/|
+ > | o | b
+ > | |/
+ > o / baz
+ > /
+ > o a
+ > EOS
+
+ $ hg log -G -T '{desc}'
+ o foo
+ |\
+ | | o d
+ | |/
+ | | o c
+ | |/
+ | | o bar
+ | |/|
+ | o | b
+ | |/
+ o / baz
+ /
+ o a
+
+ $ hg manifest -r a
+ a
+ $ hg manifest -r b
+ a
+ b
+ $ hg manifest -r bar
+ a
+ b
+ $ hg manifest -r foo
+ a
+ b
+ baz
+
+Edges existed in repo are no-ops
+
+ $ reinit
+ $ hg debugdrawdag <<'EOS'
+ > B C C
+ > | | |
+ > A A B
+ > EOS
+
+ $ hg log -G -T '{desc}'
+ o C
+ |\
+ | o B
+ |/
+ o A
+
+
+ $ hg debugdrawdag <<'EOS'
+ > C D C
+ > | | |
+ > B B A
+ > EOS
+
+ $ hg log -G -T '{desc}'
+ o D
+ |
+ | o C
+ |/|
+ o | B
+ |/
+ o A
+
+
+Node with more than 2 parents are disallowed
+
+ $ hg debugdrawdag <<'EOS'
+ > A
+ > /|\
+ > D B C
+ > EOS
+ abort: A: too many parents: C D B
+ [255]
+
+Cycles are disallowed
+
+ $ hg debugdrawdag <<'EOS'
+ > A
+ > |
+ > A
+ > EOS
+ abort: the graph has cycles
+ [255]
+
+ $ hg debugdrawdag <<'EOS'
+ > A
+ > |
+ > B
+ > |
+ > A
+ > EOS
+ abort: the graph has cycles
+ [255]
+
+Create obsmarkers via comments
+
+ $ reinit
+
+ $ hg debugdrawdag <<'EOS'
+ > G
+ > |
+ > I D C F # split: B -> E, F, G
+ > \ \| | # replace: C -> D -> H
+ > H B E # prune: F, I
+ > \|/
+ > A
+ > EOS
+
+ $ hg log -r 'sort(all(), topo)' -G --hidden -T '{desc} {node}'
+ o G 711f53bbef0bebd12eb6f0511d5e2e998b984846
+ |
+ x F 64a8289d249234b9886244d379f15e6b650b28e3
+ |
+ o E 7fb047a69f220c21711122dfd94305a9efb60cba
+ |
+ | x D be0ef73c17ade3fc89dc41701eb9fc3a91b58282
+ | |
+ | | x C 26805aba1e600a82e93661149f2313866a221a7b
+ | |/
+ | x B 112478962961147124edd43549aedd1a335e44bf
+ |/
+ | x I 58e6b987bf7045fcd9c54f496396ca1d1fc81047
+ | |
+ | o H 575c4b5ec114d64b681d33f8792853568bfb2b2c
+ |/
+ o A 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
+
+ $ hg debugobsolete
+ 112478962961147124edd43549aedd1a335e44bf 7fb047a69f220c21711122dfd94305a9efb60cba 64a8289d249234b9886244d379f15e6b650b28e3 711f53bbef0bebd12eb6f0511d5e2e998b984846 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 26805aba1e600a82e93661149f2313866a221a7b be0ef73c17ade3fc89dc41701eb9fc3a91b58282 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ be0ef73c17ade3fc89dc41701eb9fc3a91b58282 575c4b5ec114d64b681d33f8792853568bfb2b2c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 64a8289d249234b9886244d379f15e6b650b28e3 0 {7fb047a69f220c21711122dfd94305a9efb60cba} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 58e6b987bf7045fcd9c54f496396ca1d1fc81047 0 {575c4b5ec114d64b681d33f8792853568bfb2b2c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
--- a/tests/test-duplicateoptions.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-duplicateoptions.py Wed Jul 19 07:51:41 2017 -0500
@@ -6,7 +6,7 @@
ui as uimod,
)
-ignore = set(['highlight', 'win32text', 'factotum'])
+ignore = {'highlight', 'win32text', 'factotum'}
if os.name != 'nt':
ignore.add('win32mbcs')
--- a/tests/test-encoding-align.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-encoding-align.t Wed Jul 19 07:51:41 2017 -0500
@@ -4,7 +4,7 @@
$ export HGENCODING
$ hg init t
$ cd t
- $ python << EOF
+ $ $PYTHON << EOF
> # (byte, width) = (6, 4)
> s = "\xe7\x9f\xad\xe5\x90\x8d"
> # (byte, width) = (7, 7): odd width is good for alignment test
@@ -16,11 +16,11 @@
> f = file('l', 'w'); f.write(l); f.close()
> # instant extension to show list of options
> f = file('showoptlist.py', 'w'); f.write("""# encoding: utf-8
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
- > @command('showoptlist',
+ > @command(b'showoptlist',
> [('s', 'opt1', '', 'short width' + ' %(s)s' * 8, '%(s)s'),
> ('m', 'opt2', '', 'middle width' + ' %(m)s' * 8, '%(m)s'),
> ('l', 'opt3', '', 'long width' + ' %(l)s' * 8, '%(l)s')],
--- a/tests/test-encoding-textwrap.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-encoding-textwrap.t Wed Jul 19 07:51:41 2017 -0500
@@ -6,13 +6,13 @@
define commands to display help text
$ cat << EOF > show.py
- > from mercurial import cmdutil
+ > from mercurial import registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # Japanese full-width characters:
- > @command('show_full_ja', [], '')
+ > @command(b'show_full_ja', [], '')
> def show_full_ja(ui, **opts):
> u'''\u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051
>
@@ -22,7 +22,7 @@
> '''
>
> # Japanese half-width characters:
- > @command('show_half_ja', [], '')
+ > @command(b'show_half_ja', [], '')
> def show_half_ja(ui, *opts):
> u'''\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79
>
@@ -32,7 +32,7 @@
> '''
>
> # Japanese ambiguous-width characters:
- > @command('show_ambig_ja', [], '')
+ > @command(b'show_ambig_ja', [], '')
> def show_ambig_ja(ui, **opts):
> u'''\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb
>
@@ -42,7 +42,7 @@
> '''
>
> # Russian ambiguous-width characters:
- > @command('show_ambig_ru', [], '')
+ > @command(b'show_ambig_ru', [], '')
> def show_ambig_ru(ui, **opts):
> u'''\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438
>
--- a/tests/test-encoding.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-encoding.t Wed Jul 19 07:51:41 2017 -0500
@@ -13,7 +13,7 @@
(run 'hg update' to get a working copy)
$ hg co
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python << EOF
+ $ $PYTHON << EOF
> f = file('latin-1', 'w'); f.write("latin-1 e' encoded: \xe9"); f.close()
> f = file('utf-8', 'w'); f.write("utf-8 e' encoded: \xc3\xa9"); f.close()
> f = file('latin-1-tag', 'w'); f.write("\xe9"); f.close()
--- a/tests/test-eol.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-eol.t Wed Jul 19 07:51:41 2017 -0500
@@ -69,7 +69,7 @@
> echo '% a.txt'
> cat a.txt
> hg diff
- > python ../switch-eol.py $1 a.txt
+ > $PYTHON ../switch-eol.py $1 a.txt
> echo '% hg diff only reports a single changed line:'
> hg diff
> echo "% reverting back to $1 format"
--- a/tests/test-eolfilename.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-eolfilename.t Wed Jul 19 07:51:41 2017 -0500
@@ -33,6 +33,7 @@
[255]
$ echo foo > "$A"
$ hg debugwalk
+ matcher: <alwaysmatcher>
f he\r (no-eol) (esc)
llo he\r (no-eol) (esc)
llo
--- a/tests/test-exchange-obsmarkers-case-A3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-A3.t Wed Jul 19 07:51:41 2017 -0500
@@ -73,7 +73,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ f6298a8ac3a4 (draft): B1
|
@@ -159,7 +161,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ f6298a8ac3a4 (draft): B1
|
@@ -212,6 +216,7 @@
remote: adding file changes
remote: added 1 changesets with 1 changes to 1 files (+1 heads)
remote: 1 new obsolescence markers
+ remote: obsoleted 1 changesets
## post push state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -227,6 +232,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
(run 'hg heads' to see heads, 'hg merge' to merge)
## post pull state
# obstore: main
--- a/tests/test-exchange-obsmarkers-case-A4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-A4.t Wed Jul 19 07:51:41 2017 -0500
@@ -64,6 +64,7 @@
created new head
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(A0)'`
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ e5ea8f9c7314 (draft): A1
|
--- a/tests/test-exchange-obsmarkers-case-A5.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-A5.t Wed Jul 19 07:51:41 2017 -0500
@@ -66,7 +66,9 @@
$ mkcommit A1
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(A0)'`
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
+ obsoleted 1 changesets
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 8c0a98c83722 (draft): A1
|
--- a/tests/test-exchange-obsmarkers-case-A6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-A6.t Wed Jul 19 07:51:41 2017 -0500
@@ -64,6 +64,7 @@
create a marker after this
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ e5ea8f9c7314 (draft): A1
|
@@ -97,6 +98,7 @@
searching for changes
no changes found
remote: 1 new obsolescence markers
+ remote: obsoleted 1 changesets
## post push state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -107,6 +109,7 @@
pulling from main
no changes found
1 new obsolescence markers
+ obsoleted 1 changesets
## post pull state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -130,6 +133,7 @@
searching for changes
no changes found
remote: 1 new obsolescence markers
+ remote: obsoleted 1 changesets
## post push state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -141,6 +145,7 @@
searching for changes
no changes found
1 new obsolescence markers
+ obsoleted 1 changesets
## post pull state
# obstore: main
28b51eb45704506b5c603decd6bf7ac5e0f6a52f e5ea8f9c73143125d36658e90ef70c6d2027a5b7 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
--- a/tests/test-exchange-obsmarkers-case-B5.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-B5.t Wed Jul 19 07:51:41 2017 -0500
@@ -70,8 +70,10 @@
created new head
$ mkcommit B1
$ hg debugobsolete --hidden `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg debugobsolete --hidden aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(B0)'`
$ hg debugobsolete --hidden `getid 'desc(B0)'` `getid 'desc(B1)'`
+ obsoleted 1 changesets
$ hg prune -qd '0 0' 'desc(B1)'
$ hg log -G --hidden
x 069b05c3876d (draft): B1
--- a/tests/test-exchange-obsmarkers-case-B6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-B6.t Wed Jul 19 07:51:41 2017 -0500
@@ -57,6 +57,7 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid 'desc(B0)'` `getid 'desc(B1)'`
+ obsoleted 1 changesets
$ hg prune -qd '0 0' .
$ hg log -G --hidden
x f6298a8ac3a4 (draft): B1
--- a/tests/test-exchange-obsmarkers-case-C2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-C2.t Wed Jul 19 07:51:41 2017 -0500
@@ -62,6 +62,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ e5ea8f9c7314 (draft): A1
|
--- a/tests/test-exchange-obsmarkers-case-C3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-C3.t Wed Jul 19 07:51:41 2017 -0500
@@ -64,6 +64,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg prune -qd '0 0' .
$ hg log -G --hidden
x e5ea8f9c7314 (draft): A1
--- a/tests/test-exchange-obsmarkers-case-C4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-C4.t Wed Jul 19 07:51:41 2017 -0500
@@ -65,6 +65,7 @@
$ mkcommit C
created new head
$ hg debugobsolete --hidden `getid 'desc(A)'` `getid 'desc(B)'`
+ obsoleted 1 changesets
$ hg debugobsolete --hidden `getid 'desc(A)'` `getid 'desc(C)'`
$ hg prune -qd '0 0' .
$ hg log -G --hidden
--- a/tests/test-exchange-obsmarkers-case-D1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-D1.t Wed Jul 19 07:51:41 2017 -0500
@@ -61,7 +61,9 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg prune -d '0 0' 'desc(B)'
+ obsoleted 1 changesets
$ hg strip --hidden -q 'desc(A0)'
$ hg log -G --hidden
@ e5ea8f9c7314 (draft): A1
--- a/tests/test-exchange-obsmarkers-case-D2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-D2.t Wed Jul 19 07:51:41 2017 -0500
@@ -54,7 +54,9 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg prune --date '0 0' .
+ obsoleted 1 changesets
$ hg strip --hidden -q 'desc(A1)'
$ hg log -G --hidden
x 28b51eb45704 (draft): A0
--- a/tests/test-exchange-obsmarkers-case-D3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-D3.t Wed Jul 19 07:51:41 2017 -0500
@@ -57,7 +57,9 @@
created new head
$ mkcommit A1
$ hg debugobsolete `getid 'desc(A0)'` `getid 'desc(A1)'`
+ obsoleted 1 changesets
$ hg prune -d '0 0' .
+ obsoleted 1 changesets
$ hg strip --hidden -q 'desc(A1)'
$ hg log -G --hidden
@ 35b183996678 (draft): B
--- a/tests/test-exchange-obsmarkers-case-D4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-exchange-obsmarkers-case-D4.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,8 +59,10 @@
created new head
$ mkcommit B1
$ hg debugobsolete `getid 'desc(A0)'` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ obsoleted 1 changesets
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid 'desc(A1)'`
$ hg debugobsolete `getid 'desc(B0)'` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+ obsoleted 1 changesets
$ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb `getid 'desc(B1)'`
$ hg log -G --hidden
@ 069b05c3876d (draft): B1
--- a/tests/test-export.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-export.t Wed Jul 19 07:51:41 2017 -0500
@@ -137,6 +137,28 @@
foo-9
+foo-10
+Exporting wdir revision:
+
+ $ echo "foo-wdir" >> foo
+ $ hg export 'wdir()'
+ # HG changeset patch
+ # User test
+ # Date 0 0
+ # Thu Jan 01 00:00:00 1970 +0000
+ # Node ID ffffffffffffffffffffffffffffffffffffffff
+ # Parent f3acbafac161ec68f1598af38f794f28847ca5d3
+
+
+ diff -r f3acbafac161 foo
+ --- a/foo Thu Jan 01 00:00:00 1970 +0000
+ +++ b/foo Thu Jan 01 00:00:00 1970 +0000
+ @@ -10,3 +10,4 @@
+ foo-9
+ foo-10
+ foo-11
+ +foo-wdir
+ $ hg revert -q foo
+
No filename should be printed if stdout is specified explicitly:
$ hg export -v 1 -o -
--- a/tests/test-extdiff.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-extdiff.t Wed Jul 19 07:51:41 2017 -0500
@@ -62,15 +62,10 @@
Should diff cloned files directly:
-#if windows
$ hg falabala -r 0:1
- diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob)
+ diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala -r 0:1
- diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
- [1]
-#endif
Specifying an empty revision should abort.
@@ -92,41 +87,27 @@
Should diff cloned file against wc file:
-#if windows
$ hg falabala
- diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "*\\a\\a" (glob)
+ diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "*\\a\\a" (glob) (windows !)
+ diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala
- diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob)
- [1]
-#endif
Test --change option:
$ hg ci -d '2 0' -mtest3
-#if windows
+
$ hg falabala -c 1
- diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob)
+ diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
-#else
- $ hg falabala -c 1
- diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
- [1]
-#endif
Check diff are made from the first parent:
-#if windows
$ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code"
- diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "a.46c0e4daeb72\\a" (glob)
+ diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "a.46c0e4daeb72\\a" (glob) (windows !)
+ diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob) (no-windows !)
diff-like tools yield a non-zero exit code
-#else
- $ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code"
- diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob)
- diff-like tools yield a non-zero exit code
-#endif
issue3153: ensure using extdiff with removed subrepos doesn't crash:
@@ -158,21 +139,16 @@
> EOF
$ hg update -q -C 0
$ echo a >> a
-#if windows
+
$ hg --debug 4463a | grep '^running'
- running 'echo a-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo a-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b | grep '^running'
- running 'echo b-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug echo | grep '^running'
- running '*echo* "*\\a" "*\\a"' in */extdiff.* (glob)
-#else
- $ hg --debug 4463a | grep '^running'
- running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b | grep '^running'
- running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug echo | grep '^running'
- running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob)
-#endif
+ running '*echo* "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
(getting options from other than extdiff section)
@@ -189,29 +165,22 @@
> [merge-tools]
> 4463b3.diffargs = b3-naked 'single quoted' "double quoted"
> EOF
-#if windows
+
$ hg --debug 4463b2 | grep '^running'
- running 'echo b2-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b2-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b3 | grep '^running'
- running 'echo b3-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b3-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b4 | grep '^running'
- running 'echo "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob) (no-windows !)
$ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
- running 'echo b4-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
+ running 'echo b4-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) (no-windows !)
$ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
- running 'echo echo-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob)
-#else
- $ hg --debug 4463b2 | grep '^running'
- running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b3 | grep '^running'
- running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b4 | grep '^running'
- running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob)
- $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
- running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
- $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
- running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
-#endif
+ running 'echo echo-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) (windows !)
+ running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) (no-windows !)
$ touch 'sp ace'
$ hg add 'sp ace'
@@ -228,12 +197,10 @@
> odd.diffargs = --foo='\$clabel' '\$clabel' "--bar=\$clabel" "\$clabel"
> odd.executable = echo
> EOF
-#if windows
-TODO
-#else
+
$ hg --debug odd | grep '^running'
- running "*/echo --foo='sp ace' 'sp ace' --bar='sp ace' 'sp ace'" in * (glob)
-#endif
+ running '"*\\echo.exe" --foo="sp ace" "sp ace" --bar="sp ace" "sp ace"' in * (glob) (windows !)
+ running "*/echo --foo='sp ace' 'sp ace' --bar='sp ace' 'sp ace'" in * (glob) (no-windows !)
Empty argument must be quoted
@@ -243,22 +210,20 @@
> [merge-tools]
> kdiff3.diffargs=--L1 \$plabel1 --L2 \$clabel \$parent \$child
> EOF
-#if windows
- $ hg --debug kdiff3 -r0 | grep '^running'
- running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob)
-#else
+
$ hg --debug kdiff3 -r0 | grep '^running'
- running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob)
-#endif
+ running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob) (windows !)
+ running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob) (no-windows !)
-#if execbit
Test extdiff of multiple files in tmp dir:
$ hg update -C 0 > /dev/null
$ echo changed > a
$ echo changed > b
+#if execbit
$ chmod +x b
+#endif
Diff in working directory, before:
@@ -270,8 +235,8 @@
-a
+changed
diff --git a/b b/b
- old mode 100644
- new mode 100755
+ old mode 100644 (execbit !)
+ new mode 100755 (execbit !)
--- a/b
+++ b/b
@@ -1,1 +1,1 @@
@@ -284,14 +249,16 @@
Prepare custom diff/edit tool:
$ cat > 'diff tool.py' << EOT
- > #!/usr/bin/env python
+ > #!$PYTHON
> import time
> time.sleep(1) # avoid unchanged-timestamp problems
> file('a/a', 'ab').write('edited\n')
> file('a/b', 'ab').write('edited\n')
> EOT
+#if execbit
$ chmod +x 'diff tool.py'
+#endif
will change to /tmp/extdiff.TMP and populate directories a.TMP and a
and start tool
@@ -310,8 +277,8 @@
+changed
+edited
diff --git a/b b/b
- old mode 100644
- new mode 100755
+ old mode 100644 (execbit !)
+ new mode 100755 (execbit !)
--- a/b
+++ b/b
@@ -1,1 +1,2 @@
@@ -322,41 +289,93 @@
Test extdiff with --option:
$ hg extdiff -p echo -o this -c 1
- this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ this "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg falabala -o this -c 1
- diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ diffing this "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Test extdiff's handling of options with spaces in them:
$ hg edspace -c 1
- name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "name <user@example.com>" "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg extdiff -p echo -o "name <user@example.com>" -c 1
- name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "name <user@example.com>" "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ name <user@example.com> */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Test with revsets:
$ hg extdif -p echo -c "rev(1)"
- */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
$ hg extdif -p echo -r "0::1"
- */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
+ "*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
+ */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
[1]
Fallback to merge-tools.tool.executable|regkey
$ mkdir dir
- $ cat > 'dir/tool.sh' << EOF
+ $ cat > 'dir/tool.sh' << 'EOF'
> #!/bin/sh
+ > # Mimic a tool that syncs all attrs, including mtime
+ > cp $1/a $2/a
+ > touch -r $1/a $2/a
+ > chmod +x $2/a
> echo "** custom diff **"
> EOF
+#if execbit
$ chmod +x dir/tool.sh
+#endif
+
+Windows can't run *.sh directly, so create a shim executable that can be.
+Without something executable, the next hg command will try to run `tl` instead
+of $tool (and fail).
+#if windows
+ $ cat > dir/tool.bat <<EOF
+ > @sh -c "`pwd`/dir/tool.sh %1 %2"
+ > EOF
+ $ tool=`pwd`/dir/tool.bat
+#else
$ tool=`pwd`/dir/tool.sh
+#endif
+
+ $ cat a
+ changed
+ edited
+ $ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool
+ making snapshot of 2 files from rev * (glob)
+ a
+ b
+ making snapshot of 2 files from working directory
+ a
+ b
+ running '$TESTTMP/a/dir/tool.bat a.* a' in */extdiff.* (glob) (windows !)
+ running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob) (no-windows !)
+ ** custom diff **
+ file changed while diffing. Overwriting: $TESTTMP/a/a (src: */extdiff.*/a/a) (glob)
+ cleaning up temp directory
+ [1]
+ $ cat a
+ a
+
+#if execbit
+ $ [ -x a ]
+
+ $ cat > 'dir/tool.sh' << 'EOF'
+ > #!/bin/sh
+ > chmod -x $2/a
+ > echo "** custom diff **"
+ > EOF
+
$ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool
making snapshot of 2 files from rev * (glob)
a
@@ -366,12 +385,15 @@
b
running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob)
** custom diff **
+ file changed while diffing. Overwriting: $TESTTMP/a/a (src: */extdiff.*/a/a) (glob)
cleaning up temp directory
[1]
- $ cd ..
+ $ [ -x a ]
+ [1]
+#endif
-#endif
+ $ cd ..
#if symlink
--- a/tests/test-extension.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-extension.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,9 +2,12 @@
$ cat > foobar.py <<EOF
> import os
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
+ > configtable = {}
+ > configitem = registrar.configitem(configtable)
+ > configitem('tests', 'foo', default="Foo")
> def uisetup(ui):
> ui.write("uisetup called\\n")
> ui.flush()
@@ -12,10 +15,12 @@
> ui.write("reposetup called for %s\\n" % os.path.basename(repo.root))
> ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!"))
> ui.flush()
- > @command('foo', [], 'hg foo')
+ > @command(b'foo', [], 'hg foo')
> def foo(ui, *args, **kwargs):
- > ui.write("Foo\\n")
- > @command('bar', [], 'hg bar', norepo=True)
+ > foo = ui.config('tests', 'foo')
+ > ui.write(foo)
+ > ui.write("\\n")
+ > @command(b'bar', [], 'hg bar', norepo=True)
> def bar(ui, *args, **kwargs):
> ui.write("Bar\\n")
> EOF
@@ -77,15 +82,49 @@
> print "3) %s extsetup" % name
> def reposetup(ui, repo):
> print "4) %s reposetup" % name
+ >
+ > # custom predicate to check registration of functions at loading
+ > from mercurial import (
+ > registrar,
+ > smartset,
+ > )
+ > revsetpredicate = registrar.revsetpredicate()
+ > @revsetpredicate(name, safe=True) # safe=True for query via hgweb
+ > def custompredicate(repo, subset, x):
+ > return smartset.baseset([r for r in subset if r in {0}])
> EOF
$ cp foo.py bar.py
$ echo 'foo = foo.py' >> $HGRCPATH
$ echo 'bar = bar.py' >> $HGRCPATH
-Command with no output, we just want to see the extensions loaded:
+Check normal command's load order of extensions and registration of functions
+
+ $ hg log -r "foo() and bar()" -q
+ 1) foo imported
+ 1) bar imported
+ 2) foo uisetup
+ 2) bar uisetup
+ 3) foo extsetup
+ 3) bar extsetup
+ 4) foo reposetup
+ 4) bar reposetup
+ 0:c24b9ac61126
- $ hg paths
+Check hgweb's load order of extensions and registration of functions
+
+ $ cat > hgweb.cgi <<EOF
+ > #!$PYTHON
+ > from mercurial import demandimport; demandimport.enable()
+ > from mercurial.hgweb import hgweb
+ > from mercurial.hgweb import wsgicgi
+ > application = hgweb('.', 'test repo')
+ > wsgicgi.launch(application)
+ > EOF
+ $ . "$TESTDIR/cgienv"
+
+ $ PATH_INFO='/' SCRIPT_NAME='' $PYTHON hgweb.cgi \
+ > | grep '^[0-9]) ' # ignores HTML output
1) foo imported
1) bar imported
2) foo uisetup
@@ -95,28 +134,17 @@
4) foo reposetup
4) bar reposetup
-Check hgweb's load order:
-
- $ cat > hgweb.cgi <<EOF
- > #!/usr/bin/env python
- > from mercurial import demandimport; demandimport.enable()
- > from mercurial.hgweb import hgweb
- > from mercurial.hgweb import wsgicgi
- > application = hgweb('.', 'test repo')
- > wsgicgi.launch(application)
- > EOF
+(check that revset predicate foo() and bar() are available)
- $ REQUEST_METHOD='GET' PATH_INFO='/' SCRIPT_NAME='' QUERY_STRING='' \
- > SERVER_PORT='80' SERVER_NAME='localhost' python hgweb.cgi \
- > | grep '^[0-9]) ' # ignores HTML output
- 1) foo imported
- 1) bar imported
- 2) foo uisetup
- 2) bar uisetup
- 3) foo extsetup
- 3) bar extsetup
- 4) foo reposetup
- 4) bar reposetup
+#if msys
+ $ PATH_INFO='//shortlog'
+#else
+ $ PATH_INFO='/shortlog'
+#endif
+ $ export PATH_INFO
+ $ SCRIPT_NAME='' QUERY_STRING='rev=foo() and bar()' $PYTHON hgweb.cgi \
+ > | grep '<a href="/rev/[0-9a-z]*">'
+ <a href="/rev/c24b9ac61126">add file</a>
$ echo 'foo = !' >> $HGRCPATH
$ echo 'bar = !' >> $HGRCPATH
@@ -136,7 +164,6 @@
$ touch $TESTTMP/libroot/mod/__init__.py
$ echo "s = 'libroot/mod/ambig.py'" > $TESTTMP/libroot/mod/ambig.py
-#if absimport
$ cat > $TESTTMP/libroot/mod/ambigabs.py <<EOF
> from __future__ import absolute_import
> import ambig # should load "libroot/ambig.py"
@@ -150,7 +177,6 @@
$ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadabs=loadabs.py root)
ambigabs.s=libroot/ambig.py
$TESTTMP/a (glob)
-#endif
#if no-py3k
$ cat > $TESTTMP/libroot/mod/ambigrel.py <<EOF
@@ -249,7 +275,7 @@
$TESTTMP/a (glob)
#endif
-#if demandimport absimport
+#if demandimport
Examine whether module loading is delayed until actual referring, even
though module is imported with "absolute_import" feature.
@@ -311,6 +337,23 @@
> from .legacy import detail as legacydetail
> EOF
+Setup package that re-exports an attribute of its submodule as the same
+name. This leaves 'shadowing.used' pointing to 'used.detail', but still
+the submodule 'used' should be somehow accessible. (issue5617)
+
+ $ mkdir -p $TESTTMP/extlibroot/shadowing
+ $ cat > $TESTTMP/extlibroot/shadowing/used.py <<EOF
+ > detail = "this is extlibroot.shadowing.used"
+ > EOF
+ $ cat > $TESTTMP/extlibroot/shadowing/proxied.py <<EOF
+ > from __future__ import absolute_import
+ > from extlibroot.shadowing.used import detail
+ > EOF
+ $ cat > $TESTTMP/extlibroot/shadowing/__init__.py <<EOF
+ > from __future__ import absolute_import
+ > from .used import detail as used
+ > EOF
+
Setup extension local modules to be imported with "absolute_import"
feature.
@@ -380,21 +423,21 @@
$ cat > $TESTTMP/absextroot/__init__.py <<EOF
> from __future__ import absolute_import
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # "absolute" and "relative" shouldn't be imported before actual
> # command execution, because (1) they import same modules, and (2)
> # preceding import (= instantiate "demandmod" object instead of
> # real "module" object) might hide problem of succeeding import.
>
- > @command('showabsolute', [], norepo=True)
+ > @command(b'showabsolute', [], norepo=True)
> def showabsolute(ui, *args, **opts):
> from absextroot import absolute
> ui.write('ABS: %s\n' % '\nABS: '.join(absolute.getresult()))
>
- > @command('showrelative', [], norepo=True)
+ > @command(b'showrelative', [], norepo=True)
> def showrelative(ui, *args, **opts):
> from . import relative
> ui.write('REL: %s\n' % '\nREL: '.join(relative.getresult()))
@@ -403,6 +446,7 @@
> from extlibroot.lsub1.lsub2 import used as lused, unused as lunused
> from extlibroot.lsub1.lsub2.called import func as lfunc
> from extlibroot.recursedown import absdetail, legacydetail
+ > from extlibroot.shadowing import proxied
>
> def uisetup(ui):
> result = []
@@ -410,6 +454,7 @@
> result.append(lfunc())
> result.append(absdetail)
> result.append(legacydetail)
+ > result.append(proxied.detail)
> ui.write('LIB: %s\n' % '\nLIB: '.join(result))
> EOF
@@ -420,6 +465,7 @@
LIB: this is extlibroot.lsub1.lsub2.called.func()
LIB: this is extlibroot.recursedown.abs.used
LIB: this is extlibroot.recursedown.legacy.used
+ LIB: this is extlibroot.shadowing.used
ABS: this is absextroot.xsub1.xsub2.used
ABS: this is absextroot.xsub1.xsub2.called.func()
@@ -428,6 +474,7 @@
LIB: this is extlibroot.lsub1.lsub2.called.func()
LIB: this is extlibroot.recursedown.abs.used
LIB: this is extlibroot.recursedown.legacy.used
+ LIB: this is extlibroot.shadowing.used
REL: this is absextroot.xsub1.xsub2.used
REL: this is absextroot.xsub1.xsub2.called.func()
REL: this relimporter imports 'this is absextroot.relimportee'
@@ -444,14 +491,14 @@
> EOF
$ cat > $TESTTMP/checkrelativity.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> # demand import avoids failure of importing notexist here
> import extlibroot.lsub1.lsub2.notexist
>
- > @command('checkrelativity', [], norepo=True)
+ > @command(b'checkrelativity', [], norepo=True)
> def checkrelativity(ui, *args, **opts):
> try:
> ui.write(extlibroot.lsub1.lsub2.notexist.text)
@@ -487,14 +534,14 @@
$ cat > debugextension.py <<EOF
> '''only debugcommands
> '''
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('debugfoobar', [], 'hg debugfoobar')
+ > command = registrar.command(cmdtable)
+ > @command(b'debugfoobar', [], 'hg debugfoobar')
> def debugfoobar(ui, repo, *args, **opts):
> "yet another debug command"
> pass
- > @command('foo', [], 'hg foo')
+ > @command(b'foo', [], 'hg foo')
> def foo(ui, repo, *args, **opts):
> """yet another foo command
> This command has been DEPRECATED since forever.
@@ -726,12 +773,12 @@
Test help topic with same name as extension
$ cat > multirevs.py <<EOF
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> """multirevs extension
> Big multi-line module docstring."""
- > @command('multirevs', [], 'ARG', norepo=True)
+ > @command(b'multirevs', [], 'ARG', norepo=True)
> def multirevs(ui, repo, arg, *args, **opts):
> """multirevs command"""
> pass
@@ -803,14 +850,14 @@
> This is an awesome 'dodo' extension. It does nothing and
> writes 'Foo foo'
> """
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('dodo', [], 'hg dodo')
+ > command = registrar.command(cmdtable)
+ > @command(b'dodo', [], 'hg dodo')
> def dodo(ui, *args, **kwargs):
> """Does nothing"""
> ui.write("I do nothing. Yay\\n")
- > @command('foofoo', [], 'hg foofoo')
+ > @command(b'foofoo', [], 'hg foofoo')
> def foofoo(ui, *args, **kwargs):
> """Writes 'Foo foo'"""
> ui.write("Foo foo\\n")
@@ -914,14 +961,14 @@
> This is an awesome 'dudu' extension. It does something and
> also writes 'Beep beep'
> """
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('something', [], 'hg something')
+ > command = registrar.command(cmdtable)
+ > @command(b'something', [], 'hg something')
> def something(ui, *args, **kwargs):
> """Does something"""
> ui.write("I do something. Yaaay\\n")
- > @command('beep', [], 'hg beep')
+ > @command(b'beep', [], 'hg beep')
> def beep(ui, *args, **kwargs):
> """Writes 'Beep beep'"""
> ui.write("Beep beep\\n")
@@ -1157,11 +1204,11 @@
[255]
$ cat > throw.py <<EOF
- > from mercurial import cmdutil, commands, util
+ > from mercurial import commands, registrar, util
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
> class Bogon(Exception): pass
- > @command('throw', [], 'hg throw', norepo=True)
+ > @command(b'throw', [], 'hg throw', norepo=True)
> def throw(ui, **opts):
> """throws an exception"""
> raise Bogon()
@@ -1534,6 +1581,40 @@
$ cd ..
+Prohibit registration of commands that don't use @command (issue5137)
+
+ $ hg init deprecated
+ $ cd deprecated
+
+ $ cat <<EOF > deprecatedcmd.py
+ > def deprecatedcmd(repo, ui):
+ > pass
+ > cmdtable = {
+ > 'deprecatedcmd': (deprecatedcmd, [], ''),
+ > }
+ > EOF
+ $ cat <<EOF > .hg/hgrc
+ > [extensions]
+ > deprecatedcmd = `pwd`/deprecatedcmd.py
+ > mq = !
+ > hgext.mq = !
+ > hgext/mq = !
+ > EOF
+
+ $ hg deprecatedcmd > /dev/null
+ *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+ *** (use @command decorator to register 'deprecatedcmd')
+ hg: unknown command 'deprecatedcmd'
+ [255]
+
+ the extension shouldn't be loaded at all so the mq works:
+
+ $ hg qseries --config extensions.mq= > /dev/null
+ *** failed to import extension deprecatedcmd from $TESTTMP/deprecated/deprecatedcmd.py: missing attributes: norepo, optionalrepo, inferrepo
+ *** (use @command decorator to register 'deprecatedcmd')
+
+ $ cd ..
+
Test synopsis and docstring extending
$ hg init exthelp
@@ -1556,4 +1637,70 @@
$ hg help bookmarks | grep GREPME
hg bookmarks [OPTIONS]... [NAME]... GREPME [--foo] [-x]
GREPME make sure that this is in the help!
+ $ cd ..
+Show deprecation warning for the use of cmdutil.command
+
+ $ cat > nonregistrar.py <<EOF
+ > from mercurial import cmdutil
+ > cmdtable = {}
+ > command = cmdutil.command(cmdtable)
+ > @command(b'foo', [], norepo=True)
+ > def foo(ui):
+ > pass
+ > EOF
+
+ $ hg --config extensions.nonregistrar=`pwd`/nonregistrar.py version > /dev/null
+ devel-warn: cmdutil.command is deprecated, use registrar.command to register 'foo'
+ (compatibility will be dropped after Mercurial-4.6, update your code.) * (glob)
+
+Make sure a broken uisetup doesn't globally break hg:
+ $ cat > $TESTTMP/baduisetup.py <<EOF
+ > from mercurial import (
+ > bdiff,
+ > extensions,
+ > )
+ >
+ > def blockswrapper(orig, *args, **kwargs):
+ > return orig(*args, **kwargs)
+ >
+ > def uisetup(ui):
+ > extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ > EOF
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > baduisetup = $TESTTMP/baduisetup.py
+ > EOF
+
+Even though the extension fails during uisetup, hg is still basically usable:
+ $ hg version
+ *** failed to set up extension baduisetup: No module named bdiff
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ $ hg version --traceback
+ Traceback (most recent call last):
+ File "*/mercurial/extensions.py", line *, in _runuisetup (glob)
+ uisetup(ui)
+ File "$TESTTMP/baduisetup.py", line 10, in uisetup
+ extensions.wrapfunction(bdiff, 'blocks', blockswrapper)
+ File "*/mercurial/extensions.py", line *, in wrapfunction (glob)
+ origfn = getattr(container, funcname)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in __getattr__ (glob)
+ self._load()
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _load (glob)
+ mod = _hgextimport(_origimport, head, globals, locals, None, level)
+ File "*/hgdemandimport/demandimportpy2.py", line *, in _hgextimport (glob)
+ return importfunc(name, globals, *args, **kwargs)
+ ImportError: No module named bdiff
+ *** failed to set up extension baduisetup: No module named bdiff
+ Mercurial Distributed SCM (version *) (glob)
+ (see https://mercurial-scm.org for more information)
+
+ Copyright (C) 2005-2017 Matt Mackall and others
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-extensions-afterloaded.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,113 @@
+Test the extensions.afterloaded() function
+
+ $ cat > foo.py <<EOF
+ > from mercurial import extensions
+ > def uisetup(ui):
+ > ui.write("foo.uisetup\\n")
+ > ui.flush()
+ > def bar_loaded(loaded):
+ > ui.write("foo: bar loaded: %r\\n" % (loaded,))
+ > ui.flush()
+ > extensions.afterloaded('bar', bar_loaded)
+ > EOF
+ $ cat > bar.py <<EOF
+ > def uisetup(ui):
+ > ui.write("bar.uisetup\\n")
+ > ui.flush()
+ > EOF
+ $ basepath=`pwd`
+
+ $ hg init basic
+ $ cd basic
+ $ echo foo > file
+ $ hg add file
+ $ hg commit -m 'add file'
+
+ $ echo '[extensions]' >> .hg/hgrc
+ $ echo "foo = $basepath/foo.py" >> .hg/hgrc
+ $ echo "bar = $basepath/bar.py" >> .hg/hgrc
+ $ hg log -r. -T'{rev}\n'
+ foo.uisetup
+ foo: bar loaded: True
+ bar.uisetup
+ 0
+
+Test afterloaded with the opposite extension load order
+
+ $ cd ..
+ $ hg init basic_reverse
+ $ cd basic_reverse
+ $ echo foo > file
+ $ hg add file
+ $ hg commit -m 'add file'
+
+ $ echo '[extensions]' >> .hg/hgrc
+ $ echo "bar = $basepath/bar.py" >> .hg/hgrc
+ $ echo "foo = $basepath/foo.py" >> .hg/hgrc
+ $ hg log -r. -T'{rev}\n'
+ bar.uisetup
+ foo.uisetup
+ foo: bar loaded: True
+ 0
+
+Test the extensions.afterloaded() function when the requested extension is not
+loaded
+
+ $ cd ..
+ $ hg init notloaded
+ $ cd notloaded
+ $ echo foo > file
+ $ hg add file
+ $ hg commit -m 'add file'
+
+ $ echo '[extensions]' >> .hg/hgrc
+ $ echo "foo = $basepath/foo.py" >> .hg/hgrc
+ $ hg log -r. -T'{rev}\n'
+ foo.uisetup
+ foo: bar loaded: False
+ 0
+
+Test the extensions.afterloaded() function when the requested extension is not
+configured but fails the minimum version check
+
+ $ cd ..
+ $ cat > minvers.py <<EOF
+ > minimumhgversion = '9999.9999'
+ > def uisetup(ui):
+ > ui.write("minvers.uisetup\\n")
+ > ui.flush()
+ > EOF
+ $ hg init minversion
+ $ cd minversion
+ $ echo foo > file
+ $ hg add file
+ $ hg commit -m 'add file'
+
+ $ echo '[extensions]' >> .hg/hgrc
+ $ echo "foo = $basepath/foo.py" >> .hg/hgrc
+ $ echo "bar = $basepath/minvers.py" >> .hg/hgrc
+ $ hg log -r. -T'{rev}\n'
+ (third party extension bar requires version 9999.9999 or newer of Mercurial; disabling)
+ foo.uisetup
+ foo: bar loaded: False
+ 0
+
+Test the extensions.afterloaded() function when the requested extension is not
+configured but fails the minimum version check, using the opposite load order
+for the two extensions.
+
+ $ cd ..
+ $ hg init minversion_reverse
+ $ cd minversion_reverse
+ $ echo foo > file
+ $ hg add file
+ $ hg commit -m 'add file'
+
+ $ echo '[extensions]' >> .hg/hgrc
+ $ echo "bar = $basepath/minvers.py" >> .hg/hgrc
+ $ echo "foo = $basepath/foo.py" >> .hg/hgrc
+ $ hg log -r. -T'{rev}\n'
+ (third party extension bar requires version 9999.9999 or newer of Mercurial; disabling)
+ foo.uisetup
+ foo: bar loaded: False
+ 0
--- a/tests/test-filebranch.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-filebranch.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
> import sys, os
> print "merging for", os.path.basename(sys.argv[1])
> EOF
- $ HGMERGE="python ../merge"; export HGMERGE
+ $ HGMERGE="$PYTHON ../merge"; export HGMERGE
Creating base:
--- a/tests/test-fileset-generated.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-fileset-generated.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,15 +2,15 @@
Set up history and working copy
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove -q --similarity 0
$ hg commit -m first
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove -q --similarity 0
$ hg commit -m second
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*-untracked
$ rm *_*_missing-*
@@ -81,6 +81,7 @@
Largefiles doesn't crash
$ hg log -T '{rev}\n' --stat 'set:modified()' --config extensions.largefiles=
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
1
content1_content2_content1-tracked | 2 +-
content1_content2_content3-tracked | 2 +-
--- a/tests/test-flagprocessor.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-flagprocessor.t Wed Jul 19 07:51:41 2017 -0500
@@ -161,7 +161,8 @@
> EOF
$ echo 'this should fail' > file
$ hg commit -Aqm 'add file'
- abort: cannot register multiple processors on flag '0x8'.
+ *** failed to set up extension duplicate: cannot register multiple processors on flag '0x8'.
+ abort: missing processor for flag '0x1'!
[255]
$ cd ..
--- a/tests/test-fncache.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-fncache.t Wed Jul 19 07:51:41 2017 -0500
@@ -92,6 +92,7 @@
.hg/data/tst.d.hg
.hg/data/tst.d.hg/foo.i
.hg/dirstate
+ .hg/fsmonitor.state (fsmonitor !)
.hg/last-message.txt
.hg/phaseroots
.hg/requires
@@ -121,6 +122,7 @@
.hg/cache/rbc-names-v1
.hg/cache/rbc-revs-v1
.hg/dirstate
+ .hg/fsmonitor.state (fsmonitor !)
.hg/last-message.txt
.hg/requires
.hg/store
--- a/tests/test-gendoc.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-gendoc.t Wed Jul 19 07:51:41 2017 -0500
@@ -8,7 +8,7 @@
$ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do
> LOCALE=`basename "$PO" .po`
> echo "% extracting documentation from $LOCALE"
- > LANGUAGE=$LOCALE python "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit
+ > LANGUAGE=$LOCALE $PYTHON "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit
>
> if [ $LOCALE != C ]; then
> if [ ! -f $TESTDIR/test-gendoc-$LOCALE.t ]; then
--- a/tests/test-generaldelta.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-generaldelta.t Wed Jul 19 07:51:41 2017 -0500
@@ -157,5 +157,195 @@
Stream params: sortdict([('Compression', 'BZ')])
changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9
+ phase-heads -- 'sortdict()'
+ 1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9 draft
$ cd ..
+
+test maxdeltachainspan
+
+ $ hg init source-repo
+ $ cd source-repo
+ $ hg debugbuilddag --new-file '.+5:brancha$.+11:branchb$.+30:branchc<brancha+2<branchb+2'
+ $ cd ..
+ $ hg -R source-repo debugindex -m
+ rev offset length delta linkrev nodeid p1 p2
+ 0 0 46 -1 0 19deeef41503 000000000000 000000000000
+ 1 46 57 0 1 fffc37b38c40 19deeef41503 000000000000
+ 2 103 57 1 2 5822d75c83d9 fffc37b38c40 000000000000
+ 3 160 57 2 3 19cf2273e601 5822d75c83d9 000000000000
+ 4 217 57 3 4 d45ead487afe 19cf2273e601 000000000000
+ 5 274 57 4 5 96e0c2ce55ed d45ead487afe 000000000000
+ 6 331 46 -1 6 0c2ea5222c74 000000000000 000000000000
+ 7 377 57 6 7 4ca08a89134d 0c2ea5222c74 000000000000
+ 8 434 57 7 8 c973dbfd30ac 4ca08a89134d 000000000000
+ 9 491 57 8 9 d81d878ff2cd c973dbfd30ac 000000000000
+ 10 548 58 9 10 dbee7f0dd760 d81d878ff2cd 000000000000
+ 11 606 58 10 11 474be9f1fd4e dbee7f0dd760 000000000000
+ 12 664 58 11 12 594a27502c85 474be9f1fd4e 000000000000
+ 13 722 58 12 13 a7d25307d6a9 594a27502c85 000000000000
+ 14 780 58 13 14 3eb53082272e a7d25307d6a9 000000000000
+ 15 838 58 14 15 d1e94c85caf6 3eb53082272e 000000000000
+ 16 896 58 15 16 8933d9629788 d1e94c85caf6 000000000000
+ 17 954 58 16 17 a33416e52d91 8933d9629788 000000000000
+ 18 1012 47 -1 18 4ccbf31021ed 000000000000 000000000000
+ 19 1059 58 18 19 dcad7a25656c 4ccbf31021ed 000000000000
+ 20 1117 58 19 20 617c4f8be75f dcad7a25656c 000000000000
+ 21 1175 58 20 21 975b9c1d75bb 617c4f8be75f 000000000000
+ 22 1233 58 21 22 74f09cd33b70 975b9c1d75bb 000000000000
+ 23 1291 58 22 23 54e79bfa7ef1 74f09cd33b70 000000000000
+ 24 1349 58 23 24 c556e7ff90af 54e79bfa7ef1 000000000000
+ 25 1407 58 24 25 42daedfe9c6b c556e7ff90af 000000000000
+ 26 1465 58 25 26 f302566947c7 42daedfe9c6b 000000000000
+ 27 1523 58 26 27 2346959851cb f302566947c7 000000000000
+ 28 1581 58 27 28 ca8d867106b4 2346959851cb 000000000000
+ 29 1639 58 28 29 fd9152decab2 ca8d867106b4 000000000000
+ 30 1697 58 29 30 3fe34080a79b fd9152decab2 000000000000
+ 31 1755 58 30 31 bce61a95078e 3fe34080a79b 000000000000
+ 32 1813 58 31 32 1dd9ba54ba15 bce61a95078e 000000000000
+ 33 1871 58 32 33 3cd9b90a9972 1dd9ba54ba15 000000000000
+ 34 1929 58 33 34 5db8c9754ef5 3cd9b90a9972 000000000000
+ 35 1987 58 34 35 ee4a240cc16c 5db8c9754ef5 000000000000
+ 36 2045 58 35 36 9e1d38725343 ee4a240cc16c 000000000000
+ 37 2103 58 36 37 3463f73086a8 9e1d38725343 000000000000
+ 38 2161 58 37 38 88af72fab449 3463f73086a8 000000000000
+ 39 2219 58 38 39 472f5ce73785 88af72fab449 000000000000
+ 40 2277 58 39 40 c91b8351e5b8 472f5ce73785 000000000000
+ 41 2335 58 40 41 9c8289c5c5c0 c91b8351e5b8 000000000000
+ 42 2393 58 41 42 a13fd4a09d76 9c8289c5c5c0 000000000000
+ 43 2451 58 42 43 2ec2c81cafe0 a13fd4a09d76 000000000000
+ 44 2509 58 43 44 f27fdd174392 2ec2c81cafe0 000000000000
+ 45 2567 58 44 45 a539ec59fe41 f27fdd174392 000000000000
+ 46 2625 58 45 46 5e98b9ecb738 a539ec59fe41 000000000000
+ 47 2683 58 46 47 31e6b47899d0 5e98b9ecb738 000000000000
+ 48 2741 58 47 48 2cf25d6636bd 31e6b47899d0 000000000000
+ 49 2799 197 -1 49 9fff62ea0624 96e0c2ce55ed 000000000000
+ 50 2996 58 49 50 467f8e30a066 9fff62ea0624 000000000000
+ 51 3054 356 50 51 346db97283df a33416e52d91 000000000000
+ 52 3410 58 51 52 4e003fd4d5cd 346db97283df 000000000000
+ $ hg clone --pull source-repo --config experimental.maxdeltachainspan=2800 relax-chain --config format.generaldelta=yes
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 53 changesets with 53 changes to 53 files (+2 heads)
+ updating to branch default
+ 14 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R relax-chain debugindex -m
+ rev offset length delta linkrev nodeid p1 p2
+ 0 0 46 -1 0 19deeef41503 000000000000 000000000000
+ 1 46 57 0 1 fffc37b38c40 19deeef41503 000000000000
+ 2 103 57 1 2 5822d75c83d9 fffc37b38c40 000000000000
+ 3 160 57 2 3 19cf2273e601 5822d75c83d9 000000000000
+ 4 217 57 3 4 d45ead487afe 19cf2273e601 000000000000
+ 5 274 57 4 5 96e0c2ce55ed d45ead487afe 000000000000
+ 6 331 46 -1 6 0c2ea5222c74 000000000000 000000000000
+ 7 377 57 6 7 4ca08a89134d 0c2ea5222c74 000000000000
+ 8 434 57 7 8 c973dbfd30ac 4ca08a89134d 000000000000
+ 9 491 57 8 9 d81d878ff2cd c973dbfd30ac 000000000000
+ 10 548 58 9 10 dbee7f0dd760 d81d878ff2cd 000000000000
+ 11 606 58 10 11 474be9f1fd4e dbee7f0dd760 000000000000
+ 12 664 58 11 12 594a27502c85 474be9f1fd4e 000000000000
+ 13 722 58 12 13 a7d25307d6a9 594a27502c85 000000000000
+ 14 780 58 13 14 3eb53082272e a7d25307d6a9 000000000000
+ 15 838 58 14 15 d1e94c85caf6 3eb53082272e 000000000000
+ 16 896 58 15 16 8933d9629788 d1e94c85caf6 000000000000
+ 17 954 58 16 17 a33416e52d91 8933d9629788 000000000000
+ 18 1012 47 -1 18 4ccbf31021ed 000000000000 000000000000
+ 19 1059 58 18 19 dcad7a25656c 4ccbf31021ed 000000000000
+ 20 1117 58 19 20 617c4f8be75f dcad7a25656c 000000000000
+ 21 1175 58 20 21 975b9c1d75bb 617c4f8be75f 000000000000
+ 22 1233 58 21 22 74f09cd33b70 975b9c1d75bb 000000000000
+ 23 1291 58 22 23 54e79bfa7ef1 74f09cd33b70 000000000000
+ 24 1349 58 23 24 c556e7ff90af 54e79bfa7ef1 000000000000
+ 25 1407 58 24 25 42daedfe9c6b c556e7ff90af 000000000000
+ 26 1465 58 25 26 f302566947c7 42daedfe9c6b 000000000000
+ 27 1523 58 26 27 2346959851cb f302566947c7 000000000000
+ 28 1581 58 27 28 ca8d867106b4 2346959851cb 000000000000
+ 29 1639 58 28 29 fd9152decab2 ca8d867106b4 000000000000
+ 30 1697 58 29 30 3fe34080a79b fd9152decab2 000000000000
+ 31 1755 58 30 31 bce61a95078e 3fe34080a79b 000000000000
+ 32 1813 58 31 32 1dd9ba54ba15 bce61a95078e 000000000000
+ 33 1871 58 32 33 3cd9b90a9972 1dd9ba54ba15 000000000000
+ 34 1929 58 33 34 5db8c9754ef5 3cd9b90a9972 000000000000
+ 35 1987 58 34 35 ee4a240cc16c 5db8c9754ef5 000000000000
+ 36 2045 58 35 36 9e1d38725343 ee4a240cc16c 000000000000
+ 37 2103 58 36 37 3463f73086a8 9e1d38725343 000000000000
+ 38 2161 58 37 38 88af72fab449 3463f73086a8 000000000000
+ 39 2219 58 38 39 472f5ce73785 88af72fab449 000000000000
+ 40 2277 58 39 40 c91b8351e5b8 472f5ce73785 000000000000
+ 41 2335 58 40 41 9c8289c5c5c0 c91b8351e5b8 000000000000
+ 42 2393 58 41 42 a13fd4a09d76 9c8289c5c5c0 000000000000
+ 43 2451 58 42 43 2ec2c81cafe0 a13fd4a09d76 000000000000
+ 44 2509 58 43 44 f27fdd174392 2ec2c81cafe0 000000000000
+ 45 2567 58 44 45 a539ec59fe41 f27fdd174392 000000000000
+ 46 2625 58 45 46 5e98b9ecb738 a539ec59fe41 000000000000
+ 47 2683 58 46 47 31e6b47899d0 5e98b9ecb738 000000000000
+ 48 2741 58 47 48 2cf25d6636bd 31e6b47899d0 000000000000
+ 49 2799 197 -1 49 9fff62ea0624 96e0c2ce55ed 000000000000
+ 50 2996 58 49 50 467f8e30a066 9fff62ea0624 000000000000
+ 51 3054 58 17 51 346db97283df a33416e52d91 000000000000
+ 52 3112 369 -1 52 4e003fd4d5cd 346db97283df 000000000000
+ $ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.generaldelta=yes
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 53 changesets with 53 changes to 53 files (+2 heads)
+ updating to branch default
+ 14 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R noconst-chain debugindex -m
+ rev offset length delta linkrev nodeid p1 p2
+ 0 0 46 -1 0 19deeef41503 000000000000 000000000000
+ 1 46 57 0 1 fffc37b38c40 19deeef41503 000000000000
+ 2 103 57 1 2 5822d75c83d9 fffc37b38c40 000000000000
+ 3 160 57 2 3 19cf2273e601 5822d75c83d9 000000000000
+ 4 217 57 3 4 d45ead487afe 19cf2273e601 000000000000
+ 5 274 57 4 5 96e0c2ce55ed d45ead487afe 000000000000
+ 6 331 46 -1 6 0c2ea5222c74 000000000000 000000000000
+ 7 377 57 6 7 4ca08a89134d 0c2ea5222c74 000000000000
+ 8 434 57 7 8 c973dbfd30ac 4ca08a89134d 000000000000
+ 9 491 57 8 9 d81d878ff2cd c973dbfd30ac 000000000000
+ 10 548 58 9 10 dbee7f0dd760 d81d878ff2cd 000000000000
+ 11 606 58 10 11 474be9f1fd4e dbee7f0dd760 000000000000
+ 12 664 58 11 12 594a27502c85 474be9f1fd4e 000000000000
+ 13 722 58 12 13 a7d25307d6a9 594a27502c85 000000000000
+ 14 780 58 13 14 3eb53082272e a7d25307d6a9 000000000000
+ 15 838 58 14 15 d1e94c85caf6 3eb53082272e 000000000000
+ 16 896 58 15 16 8933d9629788 d1e94c85caf6 000000000000
+ 17 954 58 16 17 a33416e52d91 8933d9629788 000000000000
+ 18 1012 47 -1 18 4ccbf31021ed 000000000000 000000000000
+ 19 1059 58 18 19 dcad7a25656c 4ccbf31021ed 000000000000
+ 20 1117 58 19 20 617c4f8be75f dcad7a25656c 000000000000
+ 21 1175 58 20 21 975b9c1d75bb 617c4f8be75f 000000000000
+ 22 1233 58 21 22 74f09cd33b70 975b9c1d75bb 000000000000
+ 23 1291 58 22 23 54e79bfa7ef1 74f09cd33b70 000000000000
+ 24 1349 58 23 24 c556e7ff90af 54e79bfa7ef1 000000000000
+ 25 1407 58 24 25 42daedfe9c6b c556e7ff90af 000000000000
+ 26 1465 58 25 26 f302566947c7 42daedfe9c6b 000000000000
+ 27 1523 58 26 27 2346959851cb f302566947c7 000000000000
+ 28 1581 58 27 28 ca8d867106b4 2346959851cb 000000000000
+ 29 1639 58 28 29 fd9152decab2 ca8d867106b4 000000000000
+ 30 1697 58 29 30 3fe34080a79b fd9152decab2 000000000000
+ 31 1755 58 30 31 bce61a95078e 3fe34080a79b 000000000000
+ 32 1813 58 31 32 1dd9ba54ba15 bce61a95078e 000000000000
+ 33 1871 58 32 33 3cd9b90a9972 1dd9ba54ba15 000000000000
+ 34 1929 58 33 34 5db8c9754ef5 3cd9b90a9972 000000000000
+ 35 1987 58 34 35 ee4a240cc16c 5db8c9754ef5 000000000000
+ 36 2045 58 35 36 9e1d38725343 ee4a240cc16c 000000000000
+ 37 2103 58 36 37 3463f73086a8 9e1d38725343 000000000000
+ 38 2161 58 37 38 88af72fab449 3463f73086a8 000000000000
+ 39 2219 58 38 39 472f5ce73785 88af72fab449 000000000000
+ 40 2277 58 39 40 c91b8351e5b8 472f5ce73785 000000000000
+ 41 2335 58 40 41 9c8289c5c5c0 c91b8351e5b8 000000000000
+ 42 2393 58 41 42 a13fd4a09d76 9c8289c5c5c0 000000000000
+ 43 2451 58 42 43 2ec2c81cafe0 a13fd4a09d76 000000000000
+ 44 2509 58 43 44 f27fdd174392 2ec2c81cafe0 000000000000
+ 45 2567 58 44 45 a539ec59fe41 f27fdd174392 000000000000
+ 46 2625 58 45 46 5e98b9ecb738 a539ec59fe41 000000000000
+ 47 2683 58 46 47 31e6b47899d0 5e98b9ecb738 000000000000
+ 48 2741 58 47 48 2cf25d6636bd 31e6b47899d0 000000000000
+ 49 2799 58 5 49 9fff62ea0624 96e0c2ce55ed 000000000000
+ 50 2857 58 49 50 467f8e30a066 9fff62ea0624 000000000000
+ 51 2915 58 17 51 346db97283df a33416e52d91 000000000000
+ 52 2973 58 51 52 4e003fd4d5cd 346db97283df 000000000000
--- a/tests/test-glog.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-glog.t Wed Jul 19 07:51:41 2017 -0500
@@ -1513,7 +1513,7 @@
('symbol', 'date')
('string', '2 0 to 4 0')))
$ hg log -G -d 'brace ) in a date'
- abort: invalid date: 'brace ) in a date'
+ hg: parse error: invalid date: 'brace ) in a date'
[255]
$ testlog --prune 31 --prune 32
[]
@@ -2294,6 +2294,7 @@
> EOF
$ hg debugobsolete `hg id --debug -i -r 8`
+ obsoleted 1 changesets
$ testlog
[]
[]
--- a/tests/test-graft.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-graft.t Wed Jul 19 07:51:41 2017 -0500
@@ -177,6 +177,7 @@
b
committing manifest
committing changelog
+ updating the branch cache
grafting 5:97f8bfe72746 "5"
searching for copies back to rev 1
unmatched files in other (from topological common ancestor):
@@ -186,11 +187,11 @@
ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746
e: remote is newer -> g
getting e
- b: remote unchanged -> k
committing files:
e
committing manifest
committing changelog
+ updating the branch cache
$ HGEDITOR=cat hg graft 4 3 --log --debug
scanning for duplicate grafts
grafting 4:9c233e8e184d "4"
@@ -203,7 +204,6 @@
preserving e for resolve of e
d: remote is newer -> g
getting d
- b: remote unchanged -> k
e: versions differ -> m (premerge)
picked tool ':merge' for e (binary False symlink False changedelete False)
merging e
--- a/tests/test-hardlinks-whitelisted.t Wed Jul 05 11:24:22 2017 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,389 +0,0 @@
-#require hardlink
-#require hardlink-whitelisted
-
-This test is similar to test-hardlinks.t, but will only run on some filesystems
-that we are sure to have known good hardlink supports (see issue4546 for an
-example where the filesystem claims hardlink support but is actually
-problematic).
-
- $ cat > nlinks.py <<EOF
- > import sys
- > from mercurial import util
- > for f in sorted(sys.stdin.readlines()):
- > f = f[:-1]
- > print util.nlinks(f), f
- > EOF
-
- $ nlinksdir()
- > {
- > find $1 -type f | python $TESTTMP/nlinks.py
- > }
-
-Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
-
- $ cat > linkcp.py <<EOF
- > from mercurial import util
- > import sys
- > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
- > EOF
-
- $ linkcp()
- > {
- > python $TESTTMP/linkcp.py $1 $2
- > }
-
-Prepare repo r1:
-
- $ hg init r1
- $ cd r1
-
- $ echo c1 > f1
- $ hg add f1
- $ hg ci -m0
-
- $ mkdir d1
- $ cd d1
- $ echo c2 > f2
- $ hg add f2
- $ hg ci -m1
- $ cd ../..
-
- $ nlinksdir r1/.hg/store
- 1 r1/.hg/store/00changelog.i
- 1 r1/.hg/store/00manifest.i
- 1 r1/.hg/store/data/d1/f2.i
- 1 r1/.hg/store/data/f1.i
- 1 r1/.hg/store/fncache
- 1 r1/.hg/store/phaseroots
- 1 r1/.hg/store/undo
- 1 r1/.hg/store/undo.backup.fncache
- 1 r1/.hg/store/undo.backupfiles
- 1 r1/.hg/store/undo.phaseroots
-
-
-Create hardlinked clone r2:
-
- $ hg clone -U --debug r1 r2 --config progress.debug=true
- linking: 1
- linking: 2
- linking: 3
- linking: 4
- linking: 5
- linking: 6
- linking: 7
- linked 7 files
-
-Create non-hardlinked clone r3:
-
- $ hg clone --pull r1 r3
- requesting all changes
- adding changesets
- adding manifests
- adding file changes
- added 2 changesets with 2 changes to 2 files
- updating to branch default
- 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-
-Repos r1 and r2 should now contain hardlinked files:
-
- $ nlinksdir r1/.hg/store
- 2 r1/.hg/store/00changelog.i
- 2 r1/.hg/store/00manifest.i
- 2 r1/.hg/store/data/d1/f2.i
- 2 r1/.hg/store/data/f1.i
- 2 r1/.hg/store/fncache
- 1 r1/.hg/store/phaseroots
- 1 r1/.hg/store/undo
- 1 r1/.hg/store/undo.backup.fncache
- 1 r1/.hg/store/undo.backupfiles
- 1 r1/.hg/store/undo.phaseroots
-
- $ nlinksdir r2/.hg/store
- 2 r2/.hg/store/00changelog.i
- 2 r2/.hg/store/00manifest.i
- 2 r2/.hg/store/data/d1/f2.i
- 2 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
-Repo r3 should not be hardlinked:
-
- $ nlinksdir r3/.hg/store
- 1 r3/.hg/store/00changelog.i
- 1 r3/.hg/store/00manifest.i
- 1 r3/.hg/store/data/d1/f2.i
- 1 r3/.hg/store/data/f1.i
- 1 r3/.hg/store/fncache
- 1 r3/.hg/store/phaseroots
- 1 r3/.hg/store/undo
- 1 r3/.hg/store/undo.backupfiles
- 1 r3/.hg/store/undo.phaseroots
-
-
-Create a non-inlined filelog in r3:
-
- $ cd r3/d1
- >>> f = open('data1', 'wb')
- >>> for x in range(10000):
- ... f.write("%s\n" % str(x))
- >>> f.close()
- $ for j in 0 1 2 3 4 5 6 7 8 9; do
- > cat data1 >> f2
- > hg commit -m$j
- > done
- $ cd ../..
-
- $ nlinksdir r3/.hg/store
- 1 r3/.hg/store/00changelog.i
- 1 r3/.hg/store/00manifest.i
- 1 r3/.hg/store/data/d1/f2.d
- 1 r3/.hg/store/data/d1/f2.i
- 1 r3/.hg/store/data/f1.i
- 1 r3/.hg/store/fncache
- 1 r3/.hg/store/phaseroots
- 1 r3/.hg/store/undo
- 1 r3/.hg/store/undo.backup.fncache
- 1 r3/.hg/store/undo.backup.phaseroots
- 1 r3/.hg/store/undo.backupfiles
- 1 r3/.hg/store/undo.phaseroots
-
-Push to repo r1 should break up most hardlinks in r2:
-
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
-
- $ cd r3
- $ hg push
- pushing to $TESTTMP/r1 (glob)
- searching for changes
- adding changesets
- adding manifests
- adding file changes
- added 10 changesets with 10 changes to 1 files
-
- $ cd ..
-
- $ nlinksdir r2/.hg/store
- 1 r2/.hg/store/00changelog.i
- 1 r2/.hg/store/00manifest.i
- 1 r2/.hg/store/data/d1/f2.i
- 2 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
- $ hg -R r2 verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 2 files, 2 changesets, 2 total revisions
-
-
- $ cd r1
- $ hg up
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-Committing a change to f1 in r1 must break up hardlink f1.i in r2:
-
- $ echo c1c1 >> f1
- $ hg ci -m00
- $ cd ..
-
- $ nlinksdir r2/.hg/store
- 1 r2/.hg/store/00changelog.i
- 1 r2/.hg/store/00manifest.i
- 1 r2/.hg/store/data/d1/f2.i
- 1 r2/.hg/store/data/f1.i
- 2 r2/.hg/store/fncache
-
-
- $ cd r3
- $ hg tip --template '{rev}:{node|short}\n'
- 11:a6451b6bc41f
- $ echo bla > f1
- $ hg ci -m1
- $ cd ..
-
-Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
-
- $ linkcp r3 r4
-
-r4 has hardlinks in the working dir (not just inside .hg):
-
- $ nlinksdir r4
- 2 r4/.hg/00changelog.i
- 2 r4/.hg/branch
- 2 r4/.hg/cache/branch2-served
- 2 r4/.hg/cache/checkisexec
- 3 r4/.hg/cache/checklink (?)
- ? r4/.hg/cache/checklink-target (glob)
- 2 r4/.hg/cache/checknoexec
- 2 r4/.hg/cache/rbc-names-v1
- 2 r4/.hg/cache/rbc-revs-v1
- 2 r4/.hg/dirstate
- 2 r4/.hg/hgrc
- 2 r4/.hg/last-message.txt
- 2 r4/.hg/requires
- 2 r4/.hg/store/00changelog.i
- 2 r4/.hg/store/00manifest.i
- 2 r4/.hg/store/data/d1/f2.d
- 2 r4/.hg/store/data/d1/f2.i
- 2 r4/.hg/store/data/f1.i
- 2 r4/.hg/store/fncache
- 2 r4/.hg/store/phaseroots
- 2 r4/.hg/store/undo
- 2 r4/.hg/store/undo.backup.fncache
- 2 r4/.hg/store/undo.backup.phaseroots
- 2 r4/.hg/store/undo.backupfiles
- 2 r4/.hg/store/undo.phaseroots
- 4 r4/.hg/undo.backup.dirstate
- 2 r4/.hg/undo.bookmarks
- 2 r4/.hg/undo.branch
- 2 r4/.hg/undo.desc
- 4 r4/.hg/undo.dirstate
- 2 r4/d1/data1
- 2 r4/d1/f2
- 2 r4/f1
-
-Update back to revision 11 in r4 should break hardlink of file f1:
-
- $ hg -R r4 up 11
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
- $ nlinksdir r4
- 2 r4/.hg/00changelog.i
- 1 r4/.hg/branch
- 2 r4/.hg/cache/branch2-served
- 2 r4/.hg/cache/checkisexec
- 2 r4/.hg/cache/checklink-target
- 2 r4/.hg/cache/checknoexec
- 2 r4/.hg/cache/rbc-names-v1
- 2 r4/.hg/cache/rbc-revs-v1
- 1 r4/.hg/dirstate
- 2 r4/.hg/hgrc
- 2 r4/.hg/last-message.txt
- 2 r4/.hg/requires
- 2 r4/.hg/store/00changelog.i
- 2 r4/.hg/store/00manifest.i
- 2 r4/.hg/store/data/d1/f2.d
- 2 r4/.hg/store/data/d1/f2.i
- 2 r4/.hg/store/data/f1.i
- 2 r4/.hg/store/fncache
- 2 r4/.hg/store/phaseroots
- 2 r4/.hg/store/undo
- 2 r4/.hg/store/undo.backup.fncache
- 2 r4/.hg/store/undo.backup.phaseroots
- 2 r4/.hg/store/undo.backupfiles
- 2 r4/.hg/store/undo.phaseroots
- 4 r4/.hg/undo.backup.dirstate
- 2 r4/.hg/undo.bookmarks
- 2 r4/.hg/undo.branch
- 2 r4/.hg/undo.desc
- 4 r4/.hg/undo.dirstate
- 2 r4/d1/data1
- 2 r4/d1/f2
- 1 r4/f1
-
-
-Test hardlinking outside hg:
-
- $ mkdir x
- $ echo foo > x/a
-
- $ linkcp x y
- $ echo bar >> y/a
-
-No diff if hardlink:
-
- $ diff x/a y/a
-
-Test mq hardlinking:
-
- $ echo "[extensions]" >> $HGRCPATH
- $ echo "mq=" >> $HGRCPATH
-
- $ hg init a
- $ cd a
-
- $ hg qimport -n foo - << EOF
- > # HG changeset patch
- > # Date 1 0
- > diff -r 2588a8b53d66 a
- > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- > +++ b/a Wed Jul 23 15:54:29 2008 +0200
- > @@ -0,0 +1,1 @@
- > +a
- > EOF
- adding foo to series file
-
- $ hg qpush
- applying foo
- now at: foo
-
- $ cd ..
- $ linkcp a b
- $ cd b
-
- $ hg qimport -n bar - << EOF
- > # HG changeset patch
- > # Date 2 0
- > diff -r 2588a8b53d66 a
- > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
- > +++ b/b Wed Jul 23 15:54:29 2008 +0200
- > @@ -0,0 +1,1 @@
- > +b
- > EOF
- adding bar to series file
-
- $ hg qpush
- applying bar
- now at: bar
-
- $ cat .hg/patches/status
- 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
-
- $ cat .hg/patches/series
- foo
- bar
-
- $ cat ../a/.hg/patches/status
- 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
-
- $ cat ../a/.hg/patches/series
- foo
-
-Test tags hardlinking:
-
- $ hg qdel -r qbase:qtip
- patch foo finalized without changeset message
- patch bar finalized without changeset message
-
- $ hg tag -l lfoo
- $ hg tag foo
-
- $ cd ..
- $ linkcp b c
- $ cd c
-
- $ hg tag -l -r 0 lbar
- $ hg tag -r 0 bar
-
- $ cat .hgtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
- 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
-
- $ cat .hg/localtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
- 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
-
- $ cat ../b/.hgtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
-
- $ cat ../b/.hg/localtags
- 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
-
- $ cd ..
--- a/tests/test-hardlinks.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hardlinks.t Wed Jul 19 07:51:41 2017 -0500
@@ -10,7 +10,7 @@
$ nlinksdir()
> {
- > find $1 -type f | python $TESTTMP/nlinks.py
+ > find "$@" -type f | $PYTHON $TESTTMP/nlinks.py
> }
Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
@@ -23,7 +23,7 @@
$ linkcp()
> {
- > python $TESTTMP/linkcp.py $1 $2
+ > $PYTHON $TESTTMP/linkcp.py $1 $2
> }
Prepare repo r1:
@@ -168,6 +168,11 @@
2 r2/.hg/store/data/f1.i
[12] r2/\.hg/store/fncache (re)
+#if hardlink-whitelisted
+ $ nlinksdir r2/.hg/store/fncache
+ 2 r2/.hg/store/fncache
+#endif
+
$ hg -R r2 verify
checking changesets
checking manifests
@@ -193,11 +198,23 @@
1 r2/.hg/store/data/f1.i
[12] r2/\.hg/store/fncache (re)
+#if hardlink-whitelisted
+ $ nlinksdir r2/.hg/store/fncache
+ 2 r2/.hg/store/fncache
+#endif
+
+Create a file which exec permissions we will change
+ $ cd r3
+ $ echo "echo hello world" > f3
+ $ hg add f3
+ $ hg ci -mf3
+ $ cd ..
$ cd r3
$ hg tip --template '{rev}:{node|short}\n'
- 11:a6451b6bc41f
+ 12:d3b77733a28a
$ echo bla > f1
+ $ chmod +x f3
$ hg ci -m1
$ cd ..
@@ -205,19 +222,26 @@
$ linkcp r3 r4
+'checklink' is produced by hardlinking a symlink, which is undefined whether
+the symlink should be followed or not. It does behave differently on Linux and
+BSD. Just remove it so the test pass on both platforms.
+
+ $ rm -f r4/.hg/cache/checklink
+
r4 has hardlinks in the working dir (not just inside .hg):
$ nlinksdir r4
2 r4/.hg/00changelog.i
2 r4/.hg/branch
+ 2 r4/.hg/cache/branch2-base
2 r4/.hg/cache/branch2-served
2 r4/.hg/cache/checkisexec (execbit !)
- 3 r4/.hg/cache/checklink (?)
? r4/.hg/cache/checklink-target (glob) (symlink !)
2 r4/.hg/cache/checknoexec (execbit !)
2 r4/.hg/cache/rbc-names-v1
2 r4/.hg/cache/rbc-revs-v1
2 r4/.hg/dirstate
+ 2 r4/.hg/fsmonitor.state (fsmonitor !)
2 r4/.hg/hgrc
2 r4/.hg/last-message.txt
2 r4/.hg/requires
@@ -226,6 +250,7 @@
2 r4/.hg/store/data/d1/f2.d
2 r4/.hg/store/data/d1/f2.i
2 r4/.hg/store/data/f1.i
+ 2 r4/.hg/store/data/f3.i
2 r4/.hg/store/fncache
2 r4/.hg/store/phaseroots
2 r4/.hg/store/undo
@@ -241,15 +266,24 @@
2 r4/d1/data1
2 r4/d1/f2
2 r4/f1
-
-Update back to revision 11 in r4 should break hardlink of file f1:
+ 2 r4/f3
- $ hg -R r4 up 11
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+Update back to revision 12 in r4 should break hardlink of file f1 and f3:
+#if hardlink-whitelisted
+ $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
+ 4 r4/.hg/undo.backup.dirstate
+ 4 r4/.hg/undo.dirstate
+#endif
+
+
+ $ hg -R r4 up 12
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
$ nlinksdir r4
2 r4/.hg/00changelog.i
1 r4/.hg/branch
+ 2 r4/.hg/cache/branch2-base
2 r4/.hg/cache/branch2-served
2 r4/.hg/cache/checkisexec (execbit !)
2 r4/.hg/cache/checklink-target (symlink !)
@@ -257,6 +291,7 @@
2 r4/.hg/cache/rbc-names-v1
2 r4/.hg/cache/rbc-revs-v1
1 r4/.hg/dirstate
+ 1 r4/.hg/fsmonitor.state (fsmonitor !)
2 r4/.hg/hgrc
2 r4/.hg/last-message.txt
2 r4/.hg/requires
@@ -265,6 +300,7 @@
2 r4/.hg/store/data/d1/f2.d
2 r4/.hg/store/data/d1/f2.i
2 r4/.hg/store/data/f1.i
+ 2 r4/.hg/store/data/f3.i
2 r4/.hg/store/fncache
2 r4/.hg/store/phaseroots
2 r4/.hg/store/undo
@@ -280,7 +316,14 @@
2 r4/d1/data1
2 r4/d1/f2
1 r4/f1
+ 1 r4/f3 (execbit !)
+ 2 r4/f3 (no-execbit !)
+#if hardlink-whitelisted
+ $ nlinksdir r4/.hg/undo.backup.dirstate r4/.hg/undo.dirstate
+ 4 r4/.hg/undo.backup.dirstate
+ 4 r4/.hg/undo.dirstate
+#endif
Test hardlinking outside hg:
--- a/tests/test-help.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-help.t Wed Jul 19 07:51:41 2017 -0500
@@ -660,6 +660,32 @@
(use 'hg help' for the full list of commands or 'hg -v' for details)
[255]
+Typoed command gives suggestion
+ $ hg puls
+ hg: unknown command 'puls'
+ (did you mean one of pull, push?)
+ [255]
+
+Not enabled extension gets suggested
+
+ $ hg rebase
+ hg: unknown command 'rebase'
+ 'rebase' is provided by the following extension:
+
+ rebase command to move sets of revisions to a different ancestor
+
+ (use 'hg help extensions' for information on enabling extensions)
+ [255]
+
+Disabled extension gets suggested
+ $ hg --config extensions.rebase=! rebase
+ hg: unknown command 'rebase'
+ 'rebase' is provided by the following extension:
+
+ rebase command to move sets of revisions to a different ancestor
+
+ (use 'hg help extensions' for information on enabling extensions)
+ [255]
Make sure that we don't run afoul of the help system thinking that
this is a section and erroring out weirdly.
@@ -680,26 +706,26 @@
$ cat > helpext.py <<EOF
> import os
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
- > @command('nohelp',
- > [('', 'longdesc', 3, 'x'*90),
- > ('n', '', None, 'normal desc'),
- > ('', 'newline', '', 'line1\nline2')],
- > 'hg nohelp',
+ > @command(b'nohelp',
+ > [(b'', b'longdesc', 3, b'x'*90),
+ > (b'n', b'', None, b'normal desc'),
+ > (b'', b'newline', b'', b'line1\nline2')],
+ > b'hg nohelp',
> norepo=True)
- > @command('debugoptADV', [('', 'aopt', None, 'option is (ADVANCED)')])
- > @command('debugoptDEP', [('', 'dopt', None, 'option is (DEPRECATED)')])
- > @command('debugoptEXP', [('', 'eopt', None, 'option is (EXPERIMENTAL)')])
+ > @command(b'debugoptADV', [(b'', b'aopt', None, b'option is (ADVANCED)')])
+ > @command(b'debugoptDEP', [(b'', b'dopt', None, b'option is (DEPRECATED)')])
+ > @command(b'debugoptEXP', [(b'', b'eopt', None, b'option is (EXPERIMENTAL)')])
> def nohelp(ui, *args, **kwargs):
> pass
>
> def uisetup(ui):
- > ui.setconfig('alias', 'shellalias', '!echo hi', 'helpext')
- > ui.setconfig('alias', 'hgalias', 'summary', 'helpext')
+ > ui.setconfig(b'alias', b'shellalias', b'!echo hi', b'helpext')
+ > ui.setconfig(b'alias', b'hgalias', b'summary', b'helpext')
>
> EOF
$ echo '[extensions]' >> $HGRCPATH
@@ -912,6 +938,8 @@
debugoptEXP (no help text available)
debugpathcomplete
complete part or all of a tracked path
+ debugpickmergetool
+ examine which merge tool is chosen for specified file
debugpushkey access the pushkey key/value protocol
debugpvec (no help text available)
debugrebuilddirstate
@@ -924,11 +952,14 @@
debugrevspec parse and apply a revision specification
debugsetparents
manually set the parents of the current working directory
+ debugssl test a secure connection to a server
debugsub (no help text available)
debugsuccessorssets
show set of successors for revision
debugtemplate
parse and apply a template
+ debugupdatecaches
+ warm all known caches in the repository
debugupgraderepo
upgrade a repository to use different features
debugwalk show how files match on given patterns
@@ -1642,7 +1673,7 @@
This tests that section lookup by translated string isn't broken by
such str.lower().
- $ python <<EOF
+ $ $PYTHON <<EOF
> def escape(s):
> return ''.join('\u%x' % ord(uc) for uc in s.decode('cp932'))
> # translation of "record" in ja_JP.cp932
@@ -1676,7 +1707,7 @@
> ambiguous = ./ambiguous.py
> EOF
- $ python <<EOF | sh
+ $ $PYTHON <<EOF | sh
> upper = "\x8bL\x98^"
> print "hg --encoding cp932 help -e ambiguous.%s" % upper
> EOF
@@ -1686,7 +1717,7 @@
Upper name should show only this message
- $ python <<EOF | sh
+ $ $PYTHON <<EOF | sh
> lower = "\x8bl\x98^"
> print "hg --encoding cp932 help -e ambiguous.%s" % lower
> EOF
@@ -1760,11 +1791,18 @@
accordingly be named "a.txt.local", "a.txt.other" and "a.txt.base" and
they will be placed in the same directory as "a.txt".
+ This implies permerge. Therefore, files aren't dumped, if premerge runs
+ successfully. Use :forcedump to forcibly write files out.
+
":fail"
Rather than attempting to merge files that were modified on both
branches, it marks them as unresolved. The resolve command must be used
to resolve these conflicts.
+ ":forcedump"
+ Creates three versions of the files as same as :dump, but omits
+ premerge.
+
":local"
Uses the local 'p1()' version of files as the merged version.
@@ -1856,7 +1894,7 @@
Test usage of section marks in help documents
$ cd "$TESTDIR"/../doc
- $ python check-seclevel.py
+ $ $PYTHON check-seclevel.py
$ cd $TESTTMP
#if serve
@@ -1904,9 +1942,10 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2498,7 +2537,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2678,7 +2717,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2879,7 +2918,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -2982,9 +3021,10 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -3089,7 +3129,7 @@
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hghave.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hghave.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,3 +1,5 @@
+ $ . "$TESTDIR/helpers-testrepo.sh"
+
Testing that hghave does not crash when checking features
$ hghave --test-features 2>/dev/null
@@ -18,9 +20,12 @@
> $ echo foo
> foo
> EOF
- $ run-tests.py $HGTEST_RUN_TESTS_PURE test-hghaveaddon.t
+ $ ( \
+ > testrepohgenv; \
+ > $TESTDIR/run-tests.py $HGTEST_RUN_TESTS_PURE test-hghaveaddon.t \
+ > )
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
(invocation via command line)
--- a/tests/test-hgignore.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgignore.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,10 @@
$ hg init ignorerepo
$ cd ignorerepo
+debugignore with no hgignore should be deterministic:
+ $ hg debugignore
+ <nevermatcher>
+
Issue562: .hgignore requires newline at end:
$ touch foo
@@ -15,7 +19,7 @@
> f.close()
> EOF
- $ python makeignore.py
+ $ $PYTHON makeignore.py
Should display baz only:
@@ -48,6 +52,35 @@
abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob)
[255]
+Ensure given files are relative to cwd
+
+ $ echo "dir/.*\.o" > .hgignore
+ $ hg status -i
+ I dir/c.o
+
+ $ hg debugignore dir/c.o dir/missing.o
+ dir/c.o is ignored (glob)
+ (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
+ dir/missing.o is ignored (glob)
+ (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
+ $ cd dir
+ $ hg debugignore c.o missing.o
+ c.o is ignored
+ (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
+ missing.o is ignored
+ (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
+
+For icasefs, inexact matches also work, except for missing files
+
+#if icasefs
+ $ hg debugignore c.O missing.O
+ c.o is ignored
+ (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
+ missing.O is not ignored
+#endif
+
+ $ cd ..
+
$ echo ".*\.o" > .hgignore
$ hg status
A dir/b.o
@@ -78,7 +111,7 @@
$ rm 'baz\#wat'
#endif
-Check it does not ignore the current directory '.':
+Check that '^\.' does not ignore the root directory:
$ echo "^\." > .hgignore
$ hg status
@@ -164,7 +197,7 @@
A b.o
$ hg debugignore
- (?:(?:|.*/)[^/]*(?:/|$))
+ <includematcher includes='(?:(?:|.*/)[^/]*(?:/|$))'>
$ hg debugignore b.o
b.o is ignored
@@ -174,12 +207,17 @@
Check patterns that match only the directory
+"(fsmonitor !)" below assumes that fsmonitor is enabled with
+"walk_on_invalidate = false" (default), which doesn't involve
+re-walking whole repository at detection of .hgignore change.
+
$ echo "^dir\$" > .hgignore
$ hg status
A dir/b.o
? .hgignore
? a.c
? a.o
+ ? dir/c.o (fsmonitor !)
? syntax
Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o)
@@ -198,7 +236,7 @@
$ hg debugignore a.c
a.c is not ignored
$ hg debugignore dir/c.o
- dir/c.o is ignored
+ dir/c.o is ignored (glob)
(ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob)
Check using 'include:' in ignore file
@@ -284,7 +322,7 @@
$ hg status | grep file2
[1]
$ hg debugignore dir1/file2
- dir1/file2 is ignored
+ dir1/file2 is ignored (glob)
(ignore rule in dir2/.hgignore, line 1: 'file*2')
#if windows
--- a/tests/test-hgweb-commands.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-commands.t Wed Jul 19 07:51:41 2017 -0500
@@ -747,6 +747,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -882,9 +883,10 @@
<span class="tag">1.0</span> <span class="tag">anotherthing</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1025,9 +1027,10 @@
</p>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" value="base"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="base" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1312,9 +1315,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1343,8 +1347,13 @@
<div class="overflow">
<div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
<div class="sourcefirst"> line source</div>
- <pre class="sourcelines stripes4 wrap bottomline" data-logurl="/log/1/foo" data-ishead="0">
- <span id="l1">foo</span><a href="#l1"></a></pre>
+ <pre class="sourcelines stripes4 wrap bottomline"
+ data-logurl="/log/1/foo"
+ data-selectabletag="SPAN"
+ data-ishead="0">
+
+ <span id="l1">foo</span><a href="#l1"></a>
+ </pre>
</div>
<script type="text/javascript" src="/static/followlines.js"></script>
@@ -1440,9 +1449,10 @@
<span class="branchname">stable</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1471,8 +1481,13 @@
<div class="overflow">
<div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
<div class="sourcefirst"> line source</div>
- <pre class="sourcelines stripes4 wrap bottomline" data-logurl="/log/2/foo" data-ishead="1">
- <span id="l1">another</span><a href="#l1"></a></pre>
+ <pre class="sourcelines stripes4 wrap bottomline"
+ data-logurl="/log/2/foo"
+ data-selectabletag="SPAN"
+ data-ishead="1">
+
+ <span id="l1">another</span><a href="#l1"></a>
+ </pre>
</div>
<script type="text/javascript" src="/static/followlines.js"></script>
@@ -1528,15 +1543,10 @@
<div class="page_header">
<a href="https://mercurial-scm.org/" title="Mercurial" style="float: right;">Mercurial</a>
<a href="/">Mercurial</a> / summary
- <form action="/log">
- <input type="hidden" name="style" value="gitweb" />
- <div class="search">
- <input type="text" name="rev" />
- </div>
- </form>
</div>
<div class="page_nav">
+ <div>
summary |
<a href="/shortlog?style=gitweb">shortlog</a> |
<a href="/log?style=gitweb">changelog</a> |
@@ -1546,7 +1556,16 @@
<a href="/branches?style=gitweb">branches</a> |
<a href="/file?style=gitweb">files</a> |
<a href="/help?style=gitweb">help</a>
- <br/>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title"> </div>
@@ -1729,13 +1748,8 @@
<a href="/">Mercurial</a> / graph
</div>
- <form action="/log">
- <input type="hidden" name="style" value="gitweb" />
- <div class="search">
- <input type="text" name="rev" />
- </div>
- </form>
<div class="page_nav">
+ <div>
<a href="/summary?style=gitweb">summary</a> |
<a href="/shortlog/tip?style=gitweb">shortlog</a> |
<a href="/log/tip?style=gitweb">changelog</a> |
@@ -1748,7 +1762,17 @@
<br/>
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
- | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title"> </div>
@@ -1818,7 +1842,7 @@
// stop hiding script -->
</script>
- <div class="page_nav">
+ <div class="extra_nav">
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
| <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
@@ -2241,7 +2265,7 @@
$ . "$TESTDIR/cgienv"
$ PATH_INFO=/bookmarks; export PATH_INFO
$ QUERY_STRING='style=raw'
- $ python hgweb.cgi | grep -v ETag:
+ $ $PYTHON hgweb.cgi | grep -v ETag:
Status: 200 Script output follows\r (esc)
Content-Type: text/plain; charset=ascii\r (esc)
\r (esc)
@@ -2250,7 +2274,7 @@
$ PATH_INFO=/; export PATH_INFO
$ QUERY_STRING='cmd=listkeys&namespace=bookmarks'
- $ python hgweb.cgi
+ $ $PYTHON hgweb.cgi
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
Content-Length: 0\r (esc)
@@ -2260,7 +2284,7 @@
$ PATH_INFO=/log; export PATH_INFO
$ QUERY_STRING='rev=babar'
- $ python hgweb.cgi > search
+ $ $PYTHON hgweb.cgi > search
$ grep Status search
Status: 200 Script output follows\r (esc)
@@ -2268,7 +2292,7 @@
$ PATH_INFO=/summary; export PATH_INFO
$ QUERY_STRING='style=monoblue'; export QUERY_STRING
- $ python hgweb.cgi > summary.out
+ $ $PYTHON hgweb.cgi > summary.out
$ grep "^Status" summary.out
Status: 200 Script output follows\r (esc)
@@ -2279,7 +2303,7 @@
$ PATH_INFO=/rev/5; export PATH_INFO
$ QUERY_STRING='style=raw'
- $ python hgweb.cgi #> search
+ $ $PYTHON hgweb.cgi #> search
Status: 404 Not Found\r (esc)
ETag: W/"*"\r (glob) (esc)
Content-Type: text/plain; charset=ascii\r (esc)
@@ -2293,7 +2317,7 @@
$ PATH_INFO=/rev/4; export PATH_INFO
$ QUERY_STRING='style=raw'
- $ python hgweb.cgi #> search
+ $ $PYTHON hgweb.cgi #> search
Status: 404 Not Found\r (esc)
ETag: W/"*"\r (glob) (esc)
Content-Type: text/plain; charset=ascii\r (esc)
@@ -2321,11 +2345,11 @@
$ hg phase --force --secret 0
$ PATH_INFO=/graph/; export PATH_INFO
$ QUERY_STRING=''
- $ python hgweb.cgi | grep Status
+ $ $PYTHON hgweb.cgi | grep Status
Status: 200 Script output follows\r (esc)
(check rendered revision)
$ QUERY_STRING='style=raw'
- $ python hgweb.cgi | grep -v ETag
+ $ $PYTHON hgweb.cgi | grep -v ETag
Status: 200 Script output follows\r (esc)
Content-Type: text/plain; charset=ascii\r (esc)
\r (esc)
--- a/tests/test-hgweb-descend-empties.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-descend-empties.t Wed Jul 19 07:51:41 2017 -0500
@@ -76,9 +76,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -195,9 +196,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
<input type="hidden" name="style" value="coal" />
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -287,13 +289,14 @@
<div class="page-header">
<h1 class="breadcrumb"><a href="/">Mercurial</a> / files</h1>
- <form action="/log">
- <input type="hidden" name="style" value="monoblue" />
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
+
+ <form action="/log">
+ <input type="hidden" name="style" value="monoblue" />
+ <dl class="search">
+ <dt><label>Search: </label></dt>
+ <dd><input type="text" name="rev" value="" /></dd>
+ </dl>
+ </form>
<ul class="page-nav">
<li><a href="/summary?style=monoblue">summary</a></li>
@@ -405,6 +408,7 @@
</div>
<div class="page_nav">
+ <div>
<a href="/summary?style=gitweb">summary</a> |
<a href="/shortlog?style=gitweb">shortlog</a> |
<a href="/log?style=gitweb">changelog</a> |
@@ -415,7 +419,16 @@
files |
<a href="/rev/tip?style=gitweb">changeset</a> |
<a href="/help?style=gitweb">help</a>
- <br/>
+ </div>
+
+ <div class="search">
+ <form id="searchform" action="/log">
+ <input type="hidden" name="style" value="gitweb" />
+ <input name="rev" type="text" value="" size="40" />
+ <div id="hint">Find changesets by keywords (author, files, the commit message), revision
+ number or hash, or <a href="/help/revsets">revset expression</a>.</div>
+ </form>
+ </div>
</div>
<div class="title">/ <span class="logtags"><span class="branchtag" title="default">default</span> <span class="tagtag" title="tip">tip</span> </span></div>
--- a/tests/test-hgweb-diffs.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-diffs.t Wed Jul 19 07:51:41 2017 -0500
@@ -84,9 +84,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -256,9 +257,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -360,9 +362,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -536,9 +539,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -641,9 +645,10 @@
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -770,9 +775,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -901,9 +907,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1038,9 +1045,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-empty.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-empty.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,6 +59,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -169,6 +170,7 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>log</h3>
+
<form class="search" action="/log">
<p><input name="rev" id="search1" type="text" size="30" value="" /></p>
@@ -275,9 +277,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>graph</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -424,9 +427,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-filelog.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-filelog.t Wed Jul 19 07:51:41 2017 -0500
@@ -193,9 +193,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -315,9 +316,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -437,9 +439,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -550,9 +553,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -639,9 +643,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -739,9 +744,10 @@
(following lines 1:2 <a href="/log/tip/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -858,9 +864,10 @@
(following lines 1:2 <a href="/log/tip/c?revcount=1">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
<input type="hidden" name="revcount" value="1" />
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1094,9 +1101,10 @@
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1375,9 +1383,10 @@
(following lines 3:4 <a href="/log/tip/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -1631,9 +1640,10 @@
(following lines 3:4, descending <a href="/log/8/c">back to filelog</a>)
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-no-path-info.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-no-path-info.t Wed Jul 19 07:51:41 2017 -0500
@@ -70,7 +70,7 @@
> env['QUERY_STRING'] = 'style=raw'
> process(hgwebdir({'repo': '.'}))
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-no-request-uri.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-no-request-uri.t Wed Jul 19 07:51:41 2017 -0500
@@ -81,7 +81,7 @@
> env['QUERY_STRING'] = 'style=raw'
> process(hgwebdir({'repo': '.'}))
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-non-interactive.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-non-interactive.t Wed Jul 19 07:51:41 2017 -0500
@@ -76,7 +76,7 @@
> with i._obtainrepo() as repo:
> print sorted([x for x in repo.ui.environ if x.startswith('wsgi')])
> EOF
- $ python request.py
+ $ $PYTHON request.py
---- STATUS
200 Script output follows
---- HEADERS
--- a/tests/test-hgweb-removed.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-removed.t Wed Jul 19 07:51:41 2017 -0500
@@ -65,9 +65,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -192,9 +193,10 @@
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p></p>
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
--- a/tests/test-hgweb-symrev.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb-symrev.t Wed Jul 19 07:51:41 2017 -0500
@@ -482,7 +482,7 @@
<a href="/file/tip?style=gitweb">files</a> |
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
- | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
<a href="/graph/tip?revcount=30&style=gitweb">less</a>
<a href="/graph/tip?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
@@ -572,7 +572,7 @@
<a href="/file/xyzzy?style=gitweb">files</a> |
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
- | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+ | <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
<a href="/graph/xyzzy?revcount=30&style=gitweb">less</a>
<a href="/graph/xyzzy?revcount=120&style=gitweb">more</a>
| <a href="/graph/43c799df6e75?style=gitweb">(0)</a> <a href="/graph/tip?style=gitweb">tip</a>
--- a/tests/test-hgweb.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hgweb.t Wed Jul 19 07:51:41 2017 -0500
@@ -81,9 +81,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -188,9 +189,10 @@
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>error</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30"></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -268,9 +270,10 @@
<span class="tag">tip</span> <span class="tag">@</span> <span class="tag">a b c</span> <span class="tag">d/e/f</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -337,7 +340,7 @@
$ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
200 Script output follows
- content-length: 8012
+ content-length: 9007
content-type: text/css
body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
@@ -346,8 +349,19 @@
div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; }
div.page_header a:visited { color:#0000cc; }
div.page_header a:hover { color:#880000; }
- div.page_nav { padding:8px; }
+ div.page_nav {
+ padding:8px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ }
div.page_nav a:visited { color:#0000cc; }
+ div.extra_nav {
+ padding: 8px;
+ }
+ div.extra_nav a:visited {
+ color: #0000cc;
+ }
div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px}
div.page_footer { padding:4px 8px; background-color: #d9d8d1; }
div.page_footer_text { float:left; color:#555555; font-style:italic; }
@@ -394,13 +408,30 @@
div.pre { font-family:monospace; font-size:12px; white-space:pre; }
div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
- div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+
+ .search {
+ margin-right: 8px;
+ }
+
+ div#hint {
+ position: absolute;
+ display: none;
+ width: 250px;
+ padding: 5px;
+ background: #ffc;
+ border: 1px solid yellow;
+ border-radius: 5px;
+ }
+
+ #searchform:hover div#hint { display: block; }
+
tr.thisrev a { color:#999999; text-decoration: none; }
tr.thisrev pre { color:#009900; }
td.annotate {
white-space: nowrap;
}
div.annotate-info {
+ z-index: 5;
display: none;
position: absolute;
background-color: #FFFFFF;
@@ -467,7 +498,7 @@
-ms-user-select: none;
user-select: none;
display: inline-block;
- margin-left: -5em;
+ margin-left: -6em;
width: 4em;
color: #999;
text-align: right;
@@ -489,13 +520,11 @@
.description {
font-family: monospace;
+ white-space: pre;
}
/* Followlines */
- div.page_body pre.sourcelines > span.followlines-select:hover {
- cursor: cell;
- }
-
+ tbody.sourcelines > tr.followlines-selected,
pre.sourcelines > span.followlines-selected {
background-color: #99C7E9 !important;
}
@@ -532,21 +561,62 @@
font-family: sans-serif;
}
- div#followlines-tooltip {
+ .btn-followlines {
display: none;
- position: fixed;
- background-color: #ffc;
- border: 1px solid #999;
- padding: 2px;
+ cursor: pointer;
+ box-sizing: content-box;
+ font-size: 11px;
+ width: 13px;
+ height: 13px;
+ border-radius: 3px;
+ margin: 0px;
+ margin-top: -2px;
+ padding: 0px;
+ background-color: #E5FDE5;
+ border: 1px solid #9BC19B;
+ font-family: monospace;
+ text-align: center;
+ line-height: 5px;
+ }
+
+ tr .btn-followlines {
+ position: absolute;
}
- .sourcelines:hover > div#followlines-tooltip {
+ span .btn-followlines {
+ float: left;
+ }
+
+ span.followlines-select .btn-followlines {
+ margin-left: -1.6em;
+ }
+
+ .btn-followlines:hover {
+ transform: scale(1.1, 1.1);
+ }
+
+ .btn-followlines .followlines-plus {
+ color: green;
+ }
+
+ .btn-followlines .followlines-minus {
+ color: red;
+ }
+
+ .btn-followlines-end {
+ background-color: #ffdcdc;
+ }
+
+ .sourcelines tr:hover .btn-followlines,
+ .sourcelines span.followlines-select:hover > .btn-followlines {
display: inline;
}
- .sourcelines:hover > div#followlines-tooltip.hidden {
+ .btn-followlines-hidden,
+ .sourcelines tr:hover .btn-followlines-hidden {
display: none;
}
+
/* Graph */
div#wrapper {
position: relative;
--- a/tests/test-highlight.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-highlight.t Wed Jul 19 07:51:41 2017 -0500
@@ -20,8 +20,6 @@
create random Python file to exercise Pygments
$ cat <<EOF > primes.py
- > #!/usr/bin/env python
- >
> """Fun with generators. Corresponding Haskell implementation:
>
> primes = 2 : sieve [3, 5..]
@@ -76,7 +74,7 @@
<script type="text/javascript" src="/static/mercurial.js"></script>
<link rel="stylesheet" href="/highlightcss" type="text/css" />
- <title>test: 06824edf55d0 primes.py</title>
+ <title>test: 1af356141006 primes.py</title>
</head>
<body>
@@ -114,13 +112,14 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- view primes.py @ 0:<a href="/rev/06824edf55d0">06824edf55d0</a>
+ view primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -149,40 +148,43 @@
<div class="overflow">
<div class="sourcefirst linewraptoggle">line wrap: <a class="linewraplink" href="javascript:toggleLinewrap()">on</a></div>
<div class="sourcefirst"> line source</div>
- <pre class="sourcelines stripes4 wrap bottomline" data-logurl="/log/tip/primes.py" data-ishead="1">
- <span id="l1"><span class="c">#!/usr/bin/env python</span></span><a href="#l1"></a>
+ <pre class="sourcelines stripes4 wrap bottomline"
+ data-logurl="/log/tip/primes.py"
+ data-selectabletag="SPAN"
+ data-ishead="1">
+
+ <span id="l1"><span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l1"></a>
<span id="l2"></span><a href="#l2"></a>
- <span id="l3"><span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></span><a href="#l3"></a>
- <span id="l4"></span><a href="#l4"></a>
- <span id="l5"><span class="sd">primes = 2 : sieve [3, 5..]</span></span><a href="#l5"></a>
- <span id="l6"><span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></span><a href="#l6"></a>
- <span id="l7"><span class="sd">"""</span></span><a href="#l7"></a>
+ <span id="l3"><span class="sd">primes = 2 : sieve [3, 5..]</span></span><a href="#l3"></a>
+ <span id="l4"><span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></span><a href="#l4"></a>
+ <span id="l5"><span class="sd">"""</span></span><a href="#l5"></a>
+ <span id="l6"></span><a href="#l6"></a>
+ <span id="l7"><span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></span><a href="#l7"></a>
<span id="l8"></span><a href="#l8"></a>
- <span id="l9"><span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></span><a href="#l9"></a>
- <span id="l10"></span><a href="#l10"></a>
- <span id="l11"><span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></span><a href="#l11"></a>
- <span id="l12"> <span class="sd">"""Generate all primes."""</span></span><a href="#l12"></a>
- <span id="l13"> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l13"></a>
- <span id="l14"> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></span><a href="#l14"></a>
- <span id="l15"> <span class="c"># It is important to yield *here* in order to stop the</span></span><a href="#l15"></a>
- <span id="l16"> <span class="c"># infinite recursion.</span></span><a href="#l16"></a>
- <span id="l17"> <span class="kn">yield</span> <span class="n">p</span></span><a href="#l17"></a>
- <span id="l18"> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></span><a href="#l18"></a>
- <span id="l19"> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l19"></a>
- <span id="l20"> <span class="kn">yield</span> <span class="n">n</span></span><a href="#l20"></a>
- <span id="l21"></span><a href="#l21"></a>
- <span id="l22"> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></span><a href="#l22"></a>
- <span id="l23"> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></span><a href="#l23"></a>
- <span id="l24"></span><a href="#l24"></a>
- <span id="l25"><span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></span><a href="#l25"></a>
- <span id="l26"> <span class="kn">import</span> <span class="nn">sys</span></span><a href="#l26"></a>
- <span id="l27"> <span class="kn">try</span><span class="p">:</span></span><a href="#l27"></a>
- <span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></span><a href="#l28"></a>
- <span id="l29"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l29"></a>
- <span id="l30"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l30"></a>
- <span id="l31"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l31"></a>
- <span id="l32"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l32"></a>
- <span id="l33"></span><a href="#l33"></a></pre>
+ <span id="l9"><span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></span><a href="#l9"></a>
+ <span id="l10"> <span class="sd">"""Generate all primes."""</span></span><a href="#l10"></a>
+ <span id="l11"> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l11"></a>
+ <span id="l12"> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></span><a href="#l12"></a>
+ <span id="l13"> <span class="c"># It is important to yield *here* in order to stop the</span></span><a href="#l13"></a>
+ <span id="l14"> <span class="c"># infinite recursion.</span></span><a href="#l14"></a>
+ <span id="l15"> <span class="kn">yield</span> <span class="n">p</span></span><a href="#l15"></a>
+ <span id="l16"> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></span><a href="#l16"></a>
+ <span id="l17"> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></span><a href="#l17"></a>
+ <span id="l18"> <span class="kn">yield</span> <span class="n">n</span></span><a href="#l18"></a>
+ <span id="l19"></span><a href="#l19"></a>
+ <span id="l20"> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></span><a href="#l20"></a>
+ <span id="l21"> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></span><a href="#l21"></a>
+ <span id="l22"></span><a href="#l22"></a>
+ <span id="l23"><span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></span><a href="#l23"></a>
+ <span id="l24"> <span class="kn">import</span> <span class="nn">sys</span></span><a href="#l24"></a>
+ <span id="l25"> <span class="kn">try</span><span class="p">:</span></span><a href="#l25"></a>
+ <span id="l26"> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></span><a href="#l26"></a>
+ <span id="l27"> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></span><a href="#l27"></a>
+ <span id="l28"> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></span><a href="#l28"></a>
+ <span id="l29"> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></span><a href="#l29"></a>
+ <span id="l30"> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></span><a href="#l30"></a>
+ <span id="l31"></span><a href="#l31"></a>
+ </pre>
</div>
<script type="text/javascript" src="/static/followlines.js"></script>
@@ -249,13 +251,14 @@
<div class="main">
<h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
<h3>
- annotate primes.py @ 0:<a href="/rev/06824edf55d0">06824edf55d0</a>
+ annotate primes.py @ 0:<a href="/rev/1af356141006">1af356141006</a>
<span class="tag">tip</span>
</h3>
+
<form class="search" action="/log">
- <p><input name="rev" id="search1" type="text" size="30" /></p>
+ <p><input name="rev" id="search1" type="text" size="30" value="" /></p>
<div id="hint">Find changesets by keywords (author, files, the commit message), revision
number or hash, or <a href="/help/revsets">revset expression</a>.</div>
</form>
@@ -289,570 +292,539 @@
<th class="line"> line source</th>
</tr>
</thead>
- <tbody class="stripes2">
+ <tbody class="stripes2 sourcelines"
+ data-logurl="/log/tip/primes.py"
+ data-selectabletag="TR"
+ data-ishead="1">
<tr id="l1" class="thisrev">
<td class="annotate parity0">
- <a href="/annotate/06824edf55d0/primes.py#l1">
+ <a href="/annotate/1af356141006/primes.py#l1">
0
</a>
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l1">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l1">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l1"> 1</a> <span class="c">#!/usr/bin/env python</span></td>
+ <td class="source followlines-btn-parent"><a href="#l1"> 1</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></td>
</tr>
<tr id="l2" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l2">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l2">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l2"> 2</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l2"> 2</a> </td>
</tr>
<tr id="l3" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l3">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l3">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l3"> 3</a> <span class="sd">"""Fun with generators. Corresponding Haskell implementation:</span></td>
+ <td class="source followlines-btn-parent"><a href="#l3"> 3</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
</tr>
<tr id="l4" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l4">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l4">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l4"> 4</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l4"> 4</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></td>
</tr>
<tr id="l5" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l5">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l5">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l5"> 5</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
+ <td class="source followlines-btn-parent"><a href="#l5"> 5</a> <span class="sd">"""</span></td>
</tr>
<tr id="l6" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l6">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l6">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l6"> 6</a> <span class="sd"> where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]</span></td>
+ <td class="source followlines-btn-parent"><a href="#l6"> 6</a> </td>
</tr>
<tr id="l7" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l7">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l7">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l7"> 7</a> <span class="sd">"""</span></td>
+ <td class="source followlines-btn-parent"><a href="#l7"> 7</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
</tr>
<tr id="l8" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l8">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l8">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l8"> 8</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l8"> 8</a> </td>
</tr>
<tr id="l9" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l9">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l9">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l9"> 9</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
+ <td class="source followlines-btn-parent"><a href="#l9"> 9</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
</tr>
<tr id="l10" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l10">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l10">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l10"> 10</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l10"> 10</a> <span class="sd">"""Generate all primes."""</span></td>
</tr>
<tr id="l11" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l11">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l11">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l11"> 11</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
+ <td class="source followlines-btn-parent"><a href="#l11"> 11</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
</tr>
<tr id="l12" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l12">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l12">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l12"> 12</a> <span class="sd">"""Generate all primes."""</span></td>
+ <td class="source followlines-btn-parent"><a href="#l12"> 12</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
</tr>
<tr id="l13" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l13">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l13">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l13"> 13</a> <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
+ <td class="source followlines-btn-parent"><a href="#l13"> 13</a> <span class="c"># It is important to yield *here* in order to stop the</span></td>
</tr>
<tr id="l14" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l14">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l14">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l14"> 14</a> <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
+ <td class="source followlines-btn-parent"><a href="#l14"> 14</a> <span class="c"># infinite recursion.</span></td>
</tr>
<tr id="l15" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l15">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l15">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l15"> 15</a> <span class="c"># It is important to yield *here* in order to stop the</span></td>
+ <td class="source followlines-btn-parent"><a href="#l15"> 15</a> <span class="kn">yield</span> <span class="n">p</span></td>
</tr>
<tr id="l16" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l16">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l16">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l16"> 16</a> <span class="c"># infinite recursion.</span></td>
+ <td class="source followlines-btn-parent"><a href="#l16"> 16</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
</tr>
<tr id="l17" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l17">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l17">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l17"> 17</a> <span class="kn">yield</span> <span class="n">p</span></td>
+ <td class="source followlines-btn-parent"><a href="#l17"> 17</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
</tr>
<tr id="l18" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l18">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l18">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l18"> 18</a> <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
+ <td class="source followlines-btn-parent"><a href="#l18"> 18</a> <span class="kn">yield</span> <span class="n">n</span></td>
</tr>
<tr id="l19" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l19">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l19">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l19"> 19</a> <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
+ <td class="source followlines-btn-parent"><a href="#l19"> 19</a> </td>
</tr>
<tr id="l20" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l20">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l20">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l20"> 20</a> <span class="kn">yield</span> <span class="n">n</span></td>
+ <td class="source followlines-btn-parent"><a href="#l20"> 20</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
</tr>
<tr id="l21" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l21">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l21">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l21"> 21</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l21"> 21</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
</tr>
<tr id="l22" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l22">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l22">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l22"> 22</a> <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
+ <td class="source followlines-btn-parent"><a href="#l22"> 22</a> </td>
</tr>
<tr id="l23" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l23">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l23">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l23"> 23</a> <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o"><</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
+ <td class="source followlines-btn-parent"><a href="#l23"> 23</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></td>
</tr>
<tr id="l24" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l24">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l24">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l24"> 24</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l24"> 24</a> <span class="kn">import</span> <span class="nn">sys</span></td>
</tr>
<tr id="l25" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l25">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l25">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l25"> 25</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">"__main__"</span><span class="p">:</span></td>
+ <td class="source followlines-btn-parent"><a href="#l25"> 25</a> <span class="kn">try</span><span class="p">:</span></td>
</tr>
<tr id="l26" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l26">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l26">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l26"> 26</a> <span class="kn">import</span> <span class="nn">sys</span></td>
+ <td class="source followlines-btn-parent"><a href="#l26"> 26</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
</tr>
<tr id="l27" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l27">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l27">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l27"> 27</a> <span class="kn">try</span><span class="p">:</span></td>
+ <td class="source followlines-btn-parent"><a href="#l27"> 27</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
</tr>
<tr id="l28" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l28">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l28">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
+ <td class="source followlines-btn-parent"><a href="#l28"> 28</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
</tr>
<tr id="l29" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l29">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l29">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l29"> 29</a> <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
+ <td class="source followlines-btn-parent"><a href="#l29"> 29</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
</tr>
<tr id="l30" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l30">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l30">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l30"> 30</a> <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
+ <td class="source followlines-btn-parent"><a href="#l30"> 30</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
</tr>
<tr id="l31" class="thisrev">
<td class="annotate parity0">
<div class="annotate-info">
<div>
- <a href="/annotate/06824edf55d0/primes.py#l31">
- 06824edf55d0</a>
- a
- </div>
- <div><em>test</em></div>
- <div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
- </div>
- </td>
- <td class="source"><a href="#l31"> 31</a> <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
- </tr>
- <tr id="l32" class="thisrev">
- <td class="annotate parity0">
-
- <div class="annotate-info">
- <div>
- <a href="/annotate/06824edf55d0/primes.py#l32">
- 06824edf55d0</a>
+ <a href="/annotate/1af356141006/primes.py#l31">
+ 1af356141006</a>
a
</div>
<div><em>test</em></div>
<div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
+ <a href="/diff/1af356141006/primes.py">diff</a>
+ <a href="/rev/1af356141006">changeset</a>
</div>
</td>
- <td class="source"><a href="#l32"> 32</a> <span class="kn">print</span> <span class="s">"The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">"</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
- </tr>
- <tr id="l33" class="thisrev">
- <td class="annotate parity0">
-
- <div class="annotate-info">
- <div>
- <a href="/annotate/06824edf55d0/primes.py#l33">
- 06824edf55d0</a>
- a
- </div>
- <div><em>test</em></div>
- <div>parents: </div>
- <a href="/diff/06824edf55d0/primes.py">diff</a>
- <a href="/rev/06824edf55d0">changeset</a>
- </div>
- </td>
- <td class="source"><a href="#l33"> 33</a> </td>
+ <td class="source followlines-btn-parent"><a href="#l31"> 31</a> </td>
</tr>
</tbody>
</table>
@@ -860,6 +832,8 @@
</div>
</div>
+ <script type="text/javascript" src="/static/followlines.js"></script>
+
</body>
@@ -946,7 +920,7 @@
test that fileset in highlightfiles works and primes.py is not highlighted
$ get-with-headers.py localhost:$HGPORT 'file/tip/primes.py' | grep 'id="l11"'
- <span id="l11">def primes():</span><a href="#l11"></a>
+ <span id="l11"> def sieve(ns):</span><a href="#l11"></a>
errors encountered
@@ -992,7 +966,7 @@
> EOF
$ cat > unknownfile << EOF
- > #!/usr/bin/python
+ > #!$PYTHON
> def foo():
> pass
> EOF
--- a/tests/test-histedit-arguments.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-arguments.t Wed Jul 19 07:51:41 2017 -0500
@@ -147,7 +147,7 @@
$ mv .hg/histedit-state.back .hg/histedit-state
$ hg histedit --continue
- saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg (glob)
+ saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-histedit.hg (glob)
$ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2::
@ 4 f5ed five
|
@@ -157,7 +157,7 @@
|
~
- $ hg unbundle -q $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg
+ $ hg unbundle -q $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-histedit.hg
$ hg strip -q -r f5ed --config extensions.strip=
$ hg up -q 08d98a8350f3
@@ -264,8 +264,7 @@
HG: user: test
HG: branch 'default'
HG: changed alpha
- saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob)
- saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob)
+ saved backup bundle to $TESTTMP/foo/.hg/strip-backup/c8e68270e35a-63d8b8d8-histedit.hg (glob)
$ hg update -q 2
$ echo x > x
@@ -279,7 +278,7 @@
Test that trimming description using multi-byte characters
--------------------------------------------------------------------
- $ python <<EOF
+ $ $PYTHON <<EOF
> fp = open('logfile', 'w')
> fp.write('12345678901234567890123456789012345678901234567890' +
> '12345') # there are 5 more columns for 80 columns
--- a/tests/test-histedit-bookmark-motion.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-bookmark-motion.t Wed Jul 19 07:51:41 2017 -0500
@@ -87,14 +87,7 @@
> fold e860deea161a 4 e
> pick 652413bf663e 5 f
> EOF
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
- histedit: moving bookmarks also-two from 177f92b77385 to b346ab9a313d
- histedit: moving bookmarks five from 652413bf663e to cacdfd884a93
- histedit: moving bookmarks four from e860deea161a to 59d9f330561f
- histedit: moving bookmarks three from 055a42cdd887 to 59d9f330561f
- histedit: moving bookmarks two from 177f92b77385 to b346ab9a313d
- histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-48787b8d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-45c027ab-histedit.hg (glob)
$ hg log --graph
@ changeset: 3:cacdfd884a93
| bookmark: five
@@ -148,10 +141,7 @@
> pick cacdfd884a93 3 f
> pick 59d9f330561f 2 d
> EOF
- histedit: moving bookmarks five from cacdfd884a93 to c04e50810e4b
- histedit: moving bookmarks four from 59d9f330561f to c04e50810e4b
- histedit: moving bookmarks three from 59d9f330561f to c04e50810e4b
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-backup.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-histedit.hg (glob)
We expect 'five' to stay at tip, since the tipmost bookmark is most
likely the useful signal.
--- a/tests/test-histedit-commute.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-commute.t Wed Jul 19 07:51:41 2017 -0500
@@ -417,8 +417,7 @@
> EOF
$ HGEDITOR="sh ./editor.sh" hg histedit 0
- saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob)
- saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob)
+ saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/b0f4233702ca-4cf5af69-histedit.hg (glob)
$ hg --config diff.git=yes export 0
# HG changeset patch
--- a/tests/test-histedit-edit.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-edit.t Wed Jul 19 07:51:41 2017 -0500
@@ -273,7 +273,7 @@
HG: user: test
HG: branch 'default'
HG: added f
- saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-backup.hg (glob)
+ saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg (glob)
$ hg status
@@ -437,7 +437,7 @@
(hg histedit --continue to resume)
[1]
$ HGEDITOR=true hg histedit --continue
- saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-backup.hg (glob)
+ saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg (glob)
$ hg log -G
@ changeset: 0:0efcea34f18a
--- a/tests/test-histedit-fold.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-fold.t Wed Jul 19 07:51:41 2017 -0500
@@ -154,7 +154,7 @@
> from mercurial import util
> def abortfolding(ui, repo, hooktype, **kwargs):
> ctx = repo[kwargs.get('node')]
- > if set(ctx.files()) == set(['c', 'd', 'f']):
+ > if set(ctx.files()) == {'c', 'd', 'f'}:
> return True # abort folding commit only
> ui.warn('allow non-folding commit\\n')
> EOF
@@ -305,7 +305,7 @@
continue: hg histedit --continue
$ hg histedit --continue
251d831eeec5: empty changeset
- saved backup bundle to $TESTTMP/*-backup.hg (glob)
+ saved backup bundle to $TESTTMP/fold-to-empty-test/.hg/strip-backup/888f9082bf99-daa0b8b3-histedit.hg (glob)
$ hg logt --graph
@ 1:617f94f13c0f +4
|
@@ -382,8 +382,7 @@
HG: user: test
HG: branch 'default'
HG: changed file
- saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/55c8d8dc79ce-4066cd98-backup.hg (glob)
- saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-a35700fc-backup.hg (glob)
+ saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-3d69522c-histedit.hg (glob)
$ hg logt -G
@ 1:10c647b2cdd5 +4
|
@@ -488,6 +487,7 @@
$ echo "foo" > amended.txt
$ hg add amended.txt
$ hg ci -q --config extensions.largefiles= --amend -I amended.txt
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
Test that folding multiple changes in a row doesn't show multiple
editors.
--- a/tests/test-histedit-obsolete.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-histedit-obsolete.t Wed Jul 19 07:51:41 2017 -0500
@@ -170,13 +170,13 @@
o 0:cb9a9f314b8b a
$ hg debugobsolete
- 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (*) {'user': 'test'} (glob)
- b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (*) {'user': 'test'} (glob)
d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (*) {'user': 'test'} (glob)
177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (*) {'user': 'test'} (glob)
055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob)
e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob)
652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (*) {'user': 'test'} (glob)
+ 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (*) {'user': 'test'} (glob)
+ b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (*) {'user': 'test'} (glob)
Ensure hidden revision does not prevent histedit
@@ -545,7 +545,7 @@
|
o 0:cb9a9f314b8b (public) a
- $ hg histedit -r 'b449568bf7fc' --commands - << EOF
+ $ hg histedit -r 'b449568bf7fc' --commands - << EOF --config experimental.evolution.track-operation=1
> pick b449568bf7fc 13 f
> pick 7395e1ff83bd 15 h
> pick 6b70183d2492 14 g
@@ -556,7 +556,7 @@
Editing (ee118ab9fa44), you may commit or record as needed now.
(hg histedit --continue to resume)
[1]
- $ hg histedit --continue
+ $ hg histedit --continue --config experimental.evolution.track-operation=1
$ hg log -G
@ 23:175d6b286a22 (secret) k
|
@@ -574,3 +574,5 @@
|
o 0:cb9a9f314b8b (public) a
+ $ hg debugobsolete --rev .
+ ee118ab9fa44ebb86be85996548b5517a39e5093 175d6b286a224c23f192e79a581ce83131a53fa2 0 (*) {'operation': 'histedit', 'user': 'test'} (glob)
--- a/tests/test-hook.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-hook.t Wed Jul 19 07:51:41 2017 -0500
@@ -4,7 +4,7 @@
$ cat > $TESTTMP/txnabort.checkargs.py <<EOF
> def showargs(ui, repo, hooktype, **kwargs):
- > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
+ > ui.write('%s Python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
> EOF
$ hg init a
@@ -95,13 +95,13 @@
test generic hooks
$ hg id
- pre-identify hook: HG_ARGS=id HG_HOOKNAME=pre-identify HG_HOOKTYPE=pre-identify HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
+ pre-identify hook: HG_ARGS=id HG_HOOKNAME=pre-identify HG_HOOKTYPE=pre-identify HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''} HG_PATS=[]
abort: pre-identify hook exited with status 1
[255]
$ hg cat b
- pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
+ pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat HG_HOOKTYPE=pre-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b']
b
- post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
+ post-cat hook: HG_ARGS=cat b HG_HOOKNAME=post-cat HG_HOOKTYPE=post-cat HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''} HG_PATS=['b'] HG_RESULT=0
$ cd ../b
$ hg pull ../a
@@ -175,7 +175,7 @@
5:6f611f8018c1
pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1 HG_HOOKTYPE=pretxncommit HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
transaction abort!
- txnabort python hook: txnid,txnname
+ txnabort Python hook: txnid,txnname
txnabort hook: HG_HOOKNAME=txnabort.1 HG_HOOKTYPE=txnabort HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
rollback completed
abort: pretxncommit.forbid1 hook exited with status 1
@@ -648,6 +648,7 @@
foo
committing manifest
committing changelog
+ updating the branch cache
committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
calling hook commit.auto: hgext_hookext.autohook
Automatically installed hook
@@ -729,6 +730,7 @@
$ hg ci -ma
223eafe2750c tip
$ hg up 0 --config extensions.largefiles=
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
cb9a9f314b8b
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-http-bad-server.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-http-bad-server.t Wed Jul 19 07:51:41 2017 -0500
@@ -71,13 +71,13 @@
TODO this error message is not very good
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(1 from (-1|65537)\) -> \(1\) G (re)
+ readline(1 from 65537) -> (1) G
read limit reached; closing socket
$ rm -f error.log
@@ -87,13 +87,13 @@
$ hg --config badserver.closeafterrecvbytes=40 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(40 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(40 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(7 from -1) -> (7) Accept-
read limit reached; closing socket
@@ -104,13 +104,13 @@
$ hg --config badserver.closeafterrecvbytes=210 serve -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(210 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(210 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(177 from -1) -> (27) Accept-Encoding: identity\r\n
readline(150 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(115 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -123,10 +123,10 @@
write(21) -> Content-Length: 405\r\n
write(2) -> \r\n
write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(4[12] from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(1? from -1) -> (1?) Accept-Encoding* (glob)
read limit reached; closing socket
- readline\(210 from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(210 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(184 from -1) -> (27) Accept-Encoding: identity\r\n
readline(157 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(128 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -143,13 +143,13 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
requesting all changes
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(292 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(259 from -1) -> (27) Accept-Encoding: identity\r\n
readline(232 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(197 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -162,13 +162,13 @@
write(21) -> Content-Length: 405\r\n
write(2) -> \r\n
write(405) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(12[34] from (-1|65537)\) -> \(2[67]\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline\(12[34] from 65537\) -> \(2[67]\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
readline(9? from -1) -> (27) Accept-Encoding: identity\r\n (glob)
readline(7? from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
readline(4? from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n (glob)
readline(1 from -1) -> (1) x (?)
read limit reached; closing socket
- readline\(292 from (-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(266 from -1) -> (27) Accept-Encoding: identity\r\n
readline(239 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(210 from -1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -184,9 +184,9 @@
write(20) -> Content-Length: 42\r\n
write(2) -> \r\n
write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\(1[23] from (-1|65537)\) -> \(1[23]\) GET /\?cmd=ge.? (re)
+ readline\(1[23] from 65537\) -> \(1[23]\) GET /\?cmd=ge.? (re)
read limit reached; closing socket
- readline\(292 from (-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(292 from 65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(262 from -1) -> (27) Accept-Encoding: identity\r\n
readline(235 from -1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(206 from -1) -> (206) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Ali
@@ -200,13 +200,13 @@
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
- abort: error: (''|) (re)
+ abort: error: ''
[255]
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\(315 from (-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(315 from 65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(282 from -1) -> (27) Accept-Encoding: identity\r\n
readline(255 from -1) -> (35) accept: application/mercurial-0.1\r\n
readline(220 from -1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -219,14 +219,14 @@
write(21) -> Content-Length: 418\r\n
write(2) -> \r\n
write(418) -> lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httppostargs httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\(14[67] from (-1|65537)\) -> \(2[67]\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline\(14[67] from 65537\) -> \(2[67]\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
readline\(1(19|20) from -1\) -> \(27\) Accept-Encoding: identity\\r\\n (re)
readline(9? from -1) -> (41) content-type: application/mercurial-0.1\r\n (glob)
readline(5? from -1) -> (19) vary: X-HgProto-1\r\n (glob)
readline(3? from -1) -> (19) x-hgargs-post: 28\r\n (glob)
readline(1? from -1) -> (1?) x-hgproto-1: * (glob)
read limit reached; closing socket
- readline\(315 from (-1|65537)\) -> \(27\) POST /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(315 from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n
readline(288 from -1) -> (27) Accept-Encoding: identity\r\n
readline(261 from -1) -> (41) content-type: application/mercurial-0.1\r\n
readline(220 from -1) -> (19) vary: X-HgProto-1\r\n
@@ -257,7 +257,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -282,7 +282,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -317,7 +317,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -330,7 +330,7 @@
write(21 from 21) -> (537) Content-Length: 405\r\n
write(2 from 2) -> (535) \r\n
write(405 from 405) -> (130) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -360,7 +360,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -373,7 +373,7 @@
write(21 from 21) -> (602) Content-Length: 405\r\n
write(2 from 2) -> (600) \r\n
write(405 from 405) -> (195) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -412,7 +412,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -425,7 +425,7 @@
write(21 from 21) -> (737) Content-Length: 405\r\n
write(2 from 2) -> (735) \r\n
write(405 from 405) -> (330) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -441,7 +441,7 @@
write(20 from 20) -> (173) Content-Length: 42\r\n
write(2 from 2) -> (171) \r\n
write(42 from 42) -> (129) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
@@ -473,7 +473,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -486,7 +486,7 @@
write(21 from 21) -> (775) Content-Length: 405\r\n
write(2 from 2) -> (773) \r\n
write(405 from 405) -> (368) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -502,7 +502,7 @@
write(20 from 20) -> (211) Content-Length: 42\r\n
write(2 from 2) -> (209) \r\n
write(42 from 42) -> (167) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
@@ -536,7 +536,7 @@
$ killdaemons.py $DAEMON_PIDS
$ cat error.log
- readline\((-1|65537)\) -> \(33\) GET /\?cmd=capabilities HTTP/1.1\\r\\n (re)
+ readline(65537) -> (33) GET /?cmd=capabilities HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (35) accept: application/mercurial-0.1\r\n
readline(-1) -> (2?) host: localhost:$HGPORT\r\n (glob)
@@ -549,7 +549,7 @@
write(21 from 21) -> (787) Content-Length: 405\r\n
write(2 from 2) -> (785) \r\n
write(405 from 405) -> (380) lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=none
- readline\((-1|65537)\) -> \(26\) GET /\?cmd=batch HTTP/1.1\\r\\n (re)
+ readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (41) x-hgarg-1: cmds=heads+%3Bknown+nodes%3D\r\n
@@ -565,7 +565,7 @@
write(20 from 20) -> (223) Content-Length: 42\r\n
write(2 from 2) -> (221) \r\n
write(42 from 42) -> (179) 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
- readline\((-1|65537)\) -> \(30\) GET /\?cmd=getbundle HTTP/1.1\\r\\n (re)
+ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(-1) -> (27) Accept-Encoding: identity\r\n
readline(-1) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n
readline(-1) -> (396) x-hgarg-1: bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n
--- a/tests/test-http-branchmap.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-http-branchmap.t Wed Jul 19 07:51:41 2017 -0500
@@ -87,7 +87,7 @@
> EOF
$ echo baz >> b/foo
$ hg -R b ci -m baz
- $ hg push -R b -e 'python oldhg' ssh://dummy/ --encoding latin1
+ $ hg push -R b -e "\"$PYTHON\" oldhg" ssh://dummy/ --encoding latin1
pushing to ssh://dummy/
searching for changes
remote: adding changesets
--- a/tests/test-http-bundle1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-http-bundle1.t Wed Jul 19 07:51:41 2017 -0500
@@ -58,6 +58,26 @@
try to clone via stream, should use pull instead
$ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+try to clone via stream but missing requirements, so should use pull instead
+
+ $ cat > $TESTTMP/removesupportedformat.py << EOF
+ > from mercurial import localrepo
+ > def extsetup(ui):
+ > localrepo.localrepository.supportedformats.remove('generaldelta')
+ > EOF
+
+ $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --uncompressed http://localhost:$HGPORT/ copy3
+ warning: stream clone requested but client is missing requirements: generaldelta
+ (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
requesting all changes
adding changesets
adding manifests
@@ -345,3 +365,41 @@
this is an exercise
[255]
$ cat error.log
+
+disable pull-based clones
+
+ $ hg -R test serve -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
+ $ cat hg4.pid >> $DAEMON_PIDS
+ $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
+ requesting all changes
+ abort: remote error:
+ server has pull-based clones disabled
+ [255]
+
+... but keep stream clones working
+
+ $ hg clone --uncompressed --noupdate http://localhost:$HGPORT1/ test-stream-clone
+ streaming all changes
+ * files to transfer, * of data (glob)
+ transferred * in * seconds (* KB/sec) (glob)
+ searching for changes
+ no changes found
+
+... and also keep partial clones and pulls working
+ $ hg clone http://localhost:$HGPORT1 --rev 0 test-partial-clone
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg pull -R test-partial-clone
+ pulling from http://localhost:$HGPORT1/
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ (run 'hg update' to get a working copy)
+
+ $ cat error.log
--- a/tests/test-http-proxy.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-http-proxy.t Wed Jul 19 07:51:41 2017 -0500
@@ -8,7 +8,7 @@
$ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
$ cat hg.pid >> $DAEMON_PIDS
$ cd ..
- $ tinyproxy.py $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
+ $ tinyproxy.py $HGPORT1 localhost 2>proxy.log >/dev/null </dev/null &
$ while [ ! -f proxy.pid ]; do sleep 0; done
$ cat proxy.pid >> $DAEMON_PIDS
--- a/tests/test-http.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-http.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,26 @@
try to clone via stream, should use pull instead
$ hg clone --uncompressed http://localhost:$HGPORT1/ copy2
+ warning: stream clone requested but server has them disabled
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+try to clone via stream but missing requirements, so should use pull instead
+
+ $ cat > $TESTTMP/removesupportedformat.py << EOF
+ > from mercurial import localrepo
+ > def extsetup(ui):
+ > localrepo.localrepository.supportedformats.remove('generaldelta')
+ > EOF
+
+ $ hg clone --config extensions.rsf=$TESTTMP/removesupportedformat.py --uncompressed http://localhost:$HGPORT/ copy3
+ warning: stream clone requested but client is missing requirements: generaldelta
+ (see https://www.mercurial-scm.org/wiki/MissingRequirement for more information)
requesting all changes
adding changesets
adding manifests
@@ -334,6 +354,44 @@
[255]
$ cat error.log
+disable pull-based clones
+
+ $ hg -R test serve -p $HGPORT1 -d --pid-file=hg4.pid -E error.log --config server.disablefullbundle=True
+ $ cat hg4.pid >> $DAEMON_PIDS
+ $ hg clone http://localhost:$HGPORT1/ disable-pull-clone
+ requesting all changes
+ remote: abort: server has pull-based clones disabled
+ abort: pull failed on remote
+ (remove --pull if specified or upgrade Mercurial)
+ [255]
+
+... but keep stream clones working
+
+ $ hg clone --uncompressed --noupdate http://localhost:$HGPORT1/ test-stream-clone
+ streaming all changes
+ * files to transfer, * of data (glob)
+ transferred * in * seconds (*/sec) (glob)
+ searching for changes
+ no changes found
+ $ cat error.log
+
+... and also keep partial clones and pulls working
+ $ hg clone http://localhost:$HGPORT1 --rev 0 test-partial-clone
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 4 changes to 4 files
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg pull -R test-partial-clone
+ pulling from http://localhost:$HGPORT1/
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 3 changes to 3 files
+ (run 'hg update' to get a working copy)
+
corrupt cookies file should yield a warning
$ cat > $TESTTMP/cookies.txt << EOF
--- a/tests/test-https.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-https.t Wed Jul 19 07:51:41 2017 -0500
@@ -84,6 +84,7 @@
$ hg clone https://localhost:$HGPORT/ copy-pull
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
(using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
#endif
@@ -128,6 +129,7 @@
$ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/client-cert.pem" clone https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
(an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
#else
@@ -295,6 +297,7 @@
$ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem"
pulling from https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
$ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem" \
@@ -313,6 +316,7 @@
> https://localhost:$HGPORT1/
pulling from https://localhost:$HGPORT1/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
@@ -324,6 +328,7 @@
> https://localhost:$HGPORT2/
pulling from https://localhost:$HGPORT2/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
@@ -333,20 +338,9 @@
> --config hostsecurity.disabletls10warning=true
5fed3813f7f5
-#if no-sslcontext no-py27+
-Setting ciphers doesn't work in Python 2.6
- $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
- warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
- abort: setting ciphers in [hostsecurity] is not supported by this version of Python
- (remove the config option or run Mercurial with a modern Python version (preferred))
- [255]
-#endif
+Error message for setting ciphers is different depending on SSLContext support
-Setting ciphers works in Python 2.7+ but the error message is different on
-legacy ssl. We test legacy once and do more feature checking on modern
-configs.
-
-#if py27+ no-sslcontext
+#if no-sslcontext
$ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
abort: *No cipher can be selected. (glob)
@@ -383,7 +377,7 @@
- works without cacerts (hostfingerprints)
$ hg -R copy-pull id https://localhost:$HGPORT/ --insecure --config hostfingerprints.localhost=ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
- works without cacerts (hostsecurity)
@@ -398,7 +392,7 @@
- multiple fingerprints specified and first matches
$ hg --config 'hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
$ hg --config 'hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
@@ -408,7 +402,7 @@
- multiple fingerprints specified and last matches
$ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/ --insecure
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
$ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/
@@ -440,7 +434,7 @@
- ignores that certificate doesn't match hostname
$ hg -R copy-pull id https://$LOCALIP:$HGPORT/ --config hostfingerprints.$LOCALIP=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
warning: connecting to $LOCALIP using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for $LOCALIP found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: $LOCALIP:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for $LOCALIP found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: $LOCALIP:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
5fed3813f7f5
Ports used by next test. Kill servers.
@@ -579,7 +573,7 @@
$ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://localhost:$HGPORT/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 --trace
pulling from https://*:$HGPORT/ (glob)
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
- (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, set the following config value in [hostsecurity] and remove the old one from [hostfingerprints] to upgrade to a more secure SHA-256 fingerprint: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
+ (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
searching for changes
no changes found
@@ -589,12 +583,14 @@
> --config web.cacerts="$CERTSDIR/pub-other.pem"
pulling from https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
$ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
> --config web.cacerts="$CERTSDIR/pub-expired.pem" https://localhost:$HGPORT2/
pulling from https://localhost:$HGPORT2/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *certificate verify failed* (glob)
[255]
@@ -603,9 +599,24 @@
#if sslcontext
+ $ cd test
+
+Missing certificate file(s) are detected
+
+ $ hg serve -p $HGPORT --certificate=/missing/certificate \
+ > --config devel.servercafile=$PRIV --config devel.serverrequirecert=true
+ abort: referenced certificate file (*/missing/certificate) does not exist (glob) (windows !)
+ abort: referenced certificate file (/missing/certificate) does not exist (no-windows !)
+ [255]
+
+ $ hg serve -p $HGPORT --certificate=$PRIV \
+ > --config devel.servercafile=/missing/cafile --config devel.serverrequirecert=true
+ abort: referenced certificate file (*/missing/cafile) does not exist (glob) (windows !)
+ abort: referenced certificate file (/missing/cafile) does not exist (no-windows !)
+ [255]
+
Start hgweb that requires client certificates:
- $ cd test
$ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \
> --config devel.servercafile=$PRIV --config devel.serverrequirecert=true
$ cat ../hg0.pid >> $DAEMON_PIDS
@@ -615,6 +626,7 @@
$ P="$CERTSDIR" hg id https://localhost:$HGPORT/
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
abort: error: *handshake failure* (glob)
[255]
@@ -642,4 +654,18 @@
abort: error: * (glob)
[255]
+Missing certficate and key files result in error
+
+ $ hg id https://localhost:$HGPORT/ --config auth.l.cert=/missing/cert
+ abort: certificate file (*/missing/cert) does not exist; cannot connect to localhost (glob) (windows !)
+ abort: certificate file (/missing/cert) does not exist; cannot connect to localhost (no-windows !)
+ (restore missing file or fix references in Mercurial config)
+ [255]
+
+ $ hg id https://localhost:$HGPORT/ --config auth.l.key=/missing/key
+ abort: certificate file (*/missing/key) does not exist; cannot connect to localhost (glob) (windows !)
+ abort: certificate file (/missing/key) does not exist; cannot connect to localhost (no-windows !)
+ (restore missing file or fix references in Mercurial config)
+ [255]
+
#endif
--- a/tests/test-i18n.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-i18n.t Wed Jul 19 07:51:41 2017 -0500
@@ -45,6 +45,6 @@
tool itself by doctest
$ cd "$TESTDIR"/../i18n
- $ python check-translation.py *.po
- $ python check-translation.py --doctest
+ $ $PYTHON check-translation.py *.po
+ $ $PYTHON check-translation.py --doctest
$ cd $TESTTMP
--- a/tests/test-identify.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-identify.t Wed Jul 19 07:51:41 2017 -0500
@@ -43,12 +43,43 @@
cb9a9f314b8b
$ hg id -n -t -b -i
cb9a9f314b8b 0 default tip
+ $ hg id -Tjson
+ [
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "dirty": "",
+ "id": "cb9a9f314b8b",
+ "node": "ffffffffffffffffffffffffffffffffffffffff",
+ "parents": [{"node": "cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b", "rev": 0}],
+ "tags": ["tip"]
+ }
+ ]
+
+test template keywords and functions which require changectx:
+
+ $ hg id -T '{rev} {node|shortest}\n'
+ 2147483647 ffff
+ $ hg id -T '{parents % "{rev} {node|shortest} {desc}\n"}'
+ 0 cb9a a
with modifications
$ echo b > a
$ hg id -n -t -b -i
cb9a9f314b8b+ 0+ default tip
+ $ hg id -Tjson
+ [
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "dirty": "+",
+ "id": "cb9a9f314b8b+",
+ "node": "ffffffffffffffffffffffffffffffffffffffff",
+ "parents": [{"node": "cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b", "rev": 0}],
+ "tags": ["tip"]
+ }
+ ]
other local repo
--- a/tests/test-impexp-branch.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-impexp-branch.t Wed Jul 19 07:51:41 2017 -0500
@@ -30,12 +30,12 @@
$ hg export 1 > ../r1.patch
$ cd ..
- $ if python findbranch.py < r0.patch; then
+ $ if $PYTHON findbranch.py < r0.patch; then
> echo "Export of default branch revision has Branch header" 1>&2
> exit 1
> fi
- $ if python findbranch.py < r1.patch; then
+ $ if $PYTHON findbranch.py < r1.patch; then
> : # Do nothing
> else
> echo "Export of branch revision is missing Branch header" 1>&2
--- a/tests/test-import-context.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-import-context.t Wed Jul 19 07:51:41 2017 -0500
@@ -26,10 +26,10 @@
$ hg init repo
$ cd repo
- $ python ../writepatterns.py a 0 5A 1B 5C 1D
- $ python ../writepatterns.py b 1 1A 1B
- $ python ../writepatterns.py c 1 5A
- $ python ../writepatterns.py d 1 5A 1B
+ $ $PYTHON ../writepatterns.py a 0 5A 1B 5C 1D
+ $ $PYTHON ../writepatterns.py b 1 1A 1B
+ $ $PYTHON ../writepatterns.py c 1 5A
+ $ $PYTHON ../writepatterns.py d 1 5A 1B
$ hg add
adding a
adding b
@@ -114,13 +114,13 @@
What's in a
- $ python ../cat.py a
+ $ $PYTHON ../cat.py a
'A\nA\nA\nA\nA\nE\nC\nC\nC\nC\nC\nF\nF\n'
- $ python ../cat.py newnoeol
+ $ $PYTHON ../cat.py newnoeol
'a\nb'
- $ python ../cat.py c
+ $ $PYTHON ../cat.py c
'A\nA\nA\nA\nA\nB\nB\n'
- $ python ../cat.py d
+ $ $PYTHON ../cat.py d
'A\nA\nA\nA\n'
$ cd ..
--- a/tests/test-import-eol.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-import-eol.t Wed Jul 19 07:51:41 2017 -0500
@@ -29,7 +29,7 @@
$ hg ci -Am adda
adding .hgignore
adding a
- $ python ../makepatch.py
+ $ $PYTHON ../makepatch.py
invalid eol
--- a/tests/test-import.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-import.t Wed Jul 19 07:51:41 2017 -0500
@@ -64,7 +64,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ HGEDITOR=cat hg --config ui.patch='python ../dummypatch.py' --cwd b import --edit ../exported-tip.patch
+ $ HGEDITOR=cat hg --config ui.patch="$PYTHON ../dummypatch.py" --cwd b import --edit ../exported-tip.patch
applying ../exported-tip.patch
second change
@@ -294,7 +294,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg.py diffed-tip.patch msg.patch
+ $ $PYTHON mkmsg.py diffed-tip.patch msg.patch
$ hg --cwd b import ../msg.patch
applying ../msg.patch
$ hg --cwd b tip | grep email
@@ -356,7 +356,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg.py exported-tip.patch msg.patch
+ $ $PYTHON mkmsg.py exported-tip.patch msg.patch
$ cat msg.patch | hg --cwd b import -
applying patch from stdin
$ hg --cwd b tip | grep second
@@ -387,7 +387,7 @@
added 1 changesets with 2 changes to 2 files
updating to branch default
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ python mkmsg2.py diffed-tip.patch msg.patch
+ $ $PYTHON mkmsg2.py diffed-tip.patch msg.patch
$ cat msg.patch | hg --cwd b import -
applying patch from stdin
$ hg --cwd b tip --template '{desc}\n'
--- a/tests/test-imports-checker.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-imports-checker.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,13 +1,14 @@
#require test-repo
$ . "$TESTDIR/helpers-testrepo.sh"
+ $ testrepohgenv
$ import_checker="$TESTDIR"/../contrib/import-checker.py
Run the doctests from the import checker, and make sure
it's working correctly.
$ TERM=dumb
$ export TERM
- $ python -m doctest $import_checker
+ $ $PYTHON -m doctest $import_checker
Run additional tests for the import checker
@@ -124,7 +125,7 @@
> from mercurial.node import hex
> EOF
- $ python "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
+ $ $PYTHON "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
testpackage/importalias.py:2: ui module must be "as" aliased to uimod
testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
--- a/tests/test-inherit-mode.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-inherit-mode.t Wed Jul 19 07:51:41 2017 -0500
@@ -45,7 +45,7 @@
store can be written by the group, other files cannot
store is setgid
- $ python ../printmodes.py .
+ $ $PYTHON ../printmodes.py .
00700 ./.hg/
00600 ./.hg/00changelog.i
00600 ./.hg/requires
@@ -61,7 +61,7 @@
(in particular, store/**, dirstate, branch cache file, undo files)
new directories are setgid
- $ python ../printmodes.py .
+ $ $PYTHON ../printmodes.py .
00700 ./.hg/
00600 ./.hg/00changelog.i
00770 ./.hg/cache/
@@ -69,6 +69,7 @@
00660 ./.hg/cache/rbc-names-v1
00660 ./.hg/cache/rbc-revs-v1
00660 ./.hg/dirstate
+ 00660 ./.hg/fsmonitor.state (fsmonitor !)
00660 ./.hg/last-message.txt
00600 ./.hg/requires
00770 ./.hg/store/
@@ -98,7 +99,7 @@
before push
group can write everything
- $ python ../printmodes.py ../push
+ $ $PYTHON ../printmodes.py ../push
00770 ../push/.hg/
00660 ../push/.hg/00changelog.i
00660 ../push/.hg/requires
@@ -110,13 +111,11 @@
after push
group can still write everything
- $ python ../printmodes.py ../push
+ $ $PYTHON ../printmodes.py ../push
00770 ../push/.hg/
00660 ../push/.hg/00changelog.i
00770 ../push/.hg/cache/
00660 ../push/.hg/cache/branch2-base
- 00660 ../push/.hg/cache/rbc-names-v1
- 00660 ../push/.hg/cache/rbc-revs-v1
00660 ../push/.hg/dirstate
00660 ../push/.hg/requires
00770 ../push/.hg/store/
@@ -148,8 +147,8 @@
$ mkdir dir
$ touch dir/file
$ hg ci -qAm 'add dir/file'
- $ storemode=`python ../mode.py .hg/store`
- $ dirmode=`python ../mode.py .hg/store/data/dir`
+ $ storemode=`$PYTHON ../mode.py .hg/store`
+ $ dirmode=`$PYTHON ../mode.py .hg/store/data/dir`
$ if [ "$storemode" != "$dirmode" ]; then
> echo "$storemode != $dirmode"
> fi
--- a/tests/test-init.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-init.t Wed Jul 19 07:51:41 2017 -0500
@@ -106,7 +106,7 @@
init+push to remote2
- $ hg init -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2
+ $ hg init -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2
$ hg incoming -R remote2 local
comparing with local
changeset: 0:08b9e9f63b32
@@ -116,7 +116,7 @@
summary: init
- $ hg push -R local -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2
+ $ hg push -R local -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2
pushing to ssh://user@dummy/remote2
searching for changes
remote: adding changesets
@@ -126,7 +126,7 @@
clone to remote1
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1
searching for changes
remote: adding changesets
remote: adding manifests
@@ -134,7 +134,9 @@
remote: added 1 changesets with 1 changes to 1 files
The largefiles extension doesn't crash
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remotelf --config extensions.largefiles=
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remotelf --config extensions.largefiles=
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
searching for changes
remote: adding changesets
remote: adding manifests
@@ -143,14 +145,14 @@
init to existing repo
- $ hg init -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote1
+ $ hg init -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote1
abort: repository remote1 already exists!
abort: could not create remote repo!
[255]
clone to existing repo
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1
abort: repository remote1 already exists!
abort: could not create remote repo!
[255]
@@ -249,7 +251,7 @@
$ hg -R local bookmark test
$ hg -R local bookmarks
* test 0:08b9e9f63b32
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote-bookmarks
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote-bookmarks
searching for changes
remote: adding changesets
remote: adding manifests
--- a/tests/test-install.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-install.t Wed Jul 19 07:51:41 2017 -0500
@@ -34,7 +34,7 @@
"editornotfound": false,
"encoding": "ascii",
"encodingerror": null,
- "extensionserror": null,
+ "extensionserror": null, (no-pure !)
"hgmodulepolicy": "*", (glob)
"hgmodules": "*mercurial", (glob)
"hgver": "*", (glob)
@@ -159,7 +159,7 @@
> print(' %s' % f)
> EOF
- $ python wixxml.py help
+ $ ( testrepohgenv; $PYTHON wixxml.py help )
Not installed:
help/common.txt
help/hg-ssh.8.txt
@@ -168,8 +168,45 @@
help/hgrc.5.txt
Not tracked:
- $ python wixxml.py templates
+ $ ( testrepohgenv; $PYTHON wixxml.py templates )
Not installed:
Not tracked:
#endif
+
+#if virtualenv
+
+Verify that Mercurial is installable with pip. Note that this MUST be
+the last test in this file, because we do some nasty things to the
+shell environment in order to make the virtualenv work reliably.
+
+ $ cd $TESTTMP
+Note: --no-site-packages is deprecated, but some places have an
+ancient virtualenv from their linux distro or similar and it's not yet
+the default for them.
+ $ unset PYTHONPATH
+ $ $PYTHON -m virtualenv --no-site-packages --never-download installenv >> pip.log
+Note: we use this weird path to run pip and hg to avoid platform differences,
+since it's bin on most platforms but Scripts on Windows.
+ $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
+ $ ./installenv/*/hg debuginstall || cat pip.log
+ checking encoding (ascii)...
+ checking Python executable (*) (glob)
+ checking Python version (2.*) (glob)
+ checking Python lib (*)... (glob)
+ checking Python security support (*) (glob)
+ TLS 1.2 not supported by Python install; network connections lack modern security (?)
+ SNI not supported by Python install; may have connectivity issues with some servers (?)
+ checking Mercurial version (*) (glob)
+ checking Mercurial custom build (*) (glob)
+ checking module policy (*) (glob)
+ checking installed modules (*/mercurial)... (glob)
+ checking registered compression engines (*) (glob)
+ checking available compression engines (*) (glob)
+ checking available compression engines for wire protocol (*) (glob)
+ checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
+ checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
+ checking commit editor... (*) (glob)
+ checking username (test)
+ no problems detected
+#endif
--- a/tests/test-issue1175.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-issue1175.t Wed Jul 19 07:51:41 2017 -0500
@@ -33,6 +33,7 @@
warning: can't find ancestor for 'b' copied from 'a'!
committing manifest
committing changelog
+ updating the branch cache
committed changeset 5:83a687e8a97c80992ba385bbfd766be181bfb1d1
$ hg verify
--- a/tests/test-issue4074.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-issue4074.t Wed Jul 19 07:51:41 2017 -0500
@@ -16,12 +16,12 @@
Check in a big file:
- $ python ../s.py > a
+ $ $PYTHON ../s.py > a
$ hg ci -qAm0
Modify it:
- $ python ../s.py > a
+ $ $PYTHON ../s.py > a
Time a check-in, should never take more than 10 seconds user time:
--- a/tests/test-issue672.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-issue672.t Wed Jul 19 07:51:41 2017 -0500
@@ -38,7 +38,6 @@
removing 1
1a: remote created -> g
getting 1a
- 2: remote unchanged -> k
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
(branch merge, don't forget to commit)
--- a/tests/test-keyword.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-keyword.t Wed Jul 19 07:51:41 2017 -0500
@@ -178,6 +178,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
running hook commit.test: cp a hooktest
$ hg status
@@ -650,6 +651,7 @@
committing manifest
committing changelog
overwriting c expanding keywords
+ updating the branch cache
committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
$ cat a c
expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
@@ -823,6 +825,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
$ rm log
@@ -866,6 +869,7 @@
committing files:
committing manifest
committing changelog
+ updating the branch cache
committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
$ hg status
? c
@@ -941,6 +945,7 @@
committing manifest
committing changelog
overwriting a expanding keywords
+ updating the branch cache
committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
$ hg export -o ../rejecttest.diff tip
$ cd ../Test
@@ -985,6 +990,7 @@
committing manifest
committing changelog
overwriting x/a expanding keywords
+ updating the branch cache
committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4
$ cat a
expand $Id: x/a b4560182a3f9 Thu, 01 Jan 1970 00:00:03 +0000 user $
@@ -1012,7 +1018,8 @@
#if serve
hg serve
- expand with hgweb file
- - no expansion with hgweb annotate/changeset/filediff
+ - no expansion with hgweb annotate/changeset/filediff/comparison
+ - expand with hgweb file, again
- check errors
$ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
@@ -1072,6 +1079,25 @@
+ $ get-with-headers.py localhost:$HGPORT 'comparison/bb948857c743/a' | grep '\$[a-zA-Z]'
+ <td class="source equal"><a href="#l1r1"> 1</a> expand $Id$</td>
+ <td class="source equal"><a href="#l1r1"> 1</a> expand $Id$</td>
+ <td class="source equal"><a href="#l2r2"> 2</a> do not process $Id:</td>
+ <td class="source equal"><a href="#l2r2"> 2</a> do not process $Id:</td>
+ <td class="source insert"><a href="#r4"> 4</a> $Xinfo$</td>
+
+(check "kwweb_skip"-ed webcommand doesn't suppress expanding keywords
+at subsequent webcommands)
+
+ $ get-with-headers.py localhost:$HGPORT 'file/tip/a/?style=raw'
+ 200 Script output follows
+
+ expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
+ do not process $Id:
+ xxx $
+ $Xinfo: User Name <user@example.com>: firstline $
+
+ $ killdaemons.py
$ cat errors.log
#endif
@@ -1372,4 +1398,78 @@
$Xinfo$
+xxxx
+Test that patch.diff(), which is implied by "hg diff" or so, doesn't
+suppress expanding keywords at subsequent commands
+
+#if windows
+ $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
+#else
+ $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
+#endif
+ $ export PYTHONPATH
+
+ $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
+ $ mv $HGRCPATH.new $HGRCPATH
+
+ >>> from __future__ import print_function
+ >>> from hgclient import readchannel, runcommand, check
+ >>> @check
+ ... def check(server):
+ ... # hello block
+ ... readchannel(server)
+ ...
+ ... runcommand(server, ['cat', 'm'])
+ ... runcommand(server, ['diff', '-c', '.', 'm'])
+ ... runcommand(server, ['cat', 'm'])
+ *** runcommand cat m
+ $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
+ bar
+ *** runcommand diff -c . m
+ *** runcommand cat m
+ $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
+ bar
+
$ cd ..
+
+#if serve
+
+Test that keywords are expanded only in repositories, which enable
+keyword extension, even if multiple repositories are served in a
+process
+
+ $ cat >> fetch-merge/.hg/hgrc <<EOF
+ > [extensions]
+ > keyword = !
+ > EOF
+
+ $ cat > paths.conf <<EOF
+ > [paths]
+ > enabled=Test
+ > disabled=fetch-merge
+ > EOF
+
+ $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E error.log --webdir-conf paths.conf
+ $ cat hg.pid >> $DAEMON_PIDS
+
+ $ get-with-headers.py localhost:$HGPORT 'enabled/file/tip/m/?style=raw'
+ 200 Script output follows
+
+ $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
+ bar
+
+ $ get-with-headers.py localhost:$HGPORT 'disabled/file/tip/m/?style=raw'
+ 200 Script output follows
+
+ $Id$
+ bar
+
+(check expansion again, for safety)
+
+ $ get-with-headers.py localhost:$HGPORT 'enabled/file/tip/m/?style=raw'
+ 200 Script output follows
+
+ $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
+ bar
+
+ $ killdaemons.py
+#endif
--- a/tests/test-largefiles-cache.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-largefiles-cache.t Wed Jul 19 07:51:41 2017 -0500
@@ -93,7 +93,7 @@
Portable way to print file permissions:
$ cat > ls-l.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import sys, os
> path = sys.argv[1]
> print '%03o' % (os.lstat(path).st_mode & 0777)
@@ -206,6 +206,7 @@
large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27 (glob)
0 largefiles updated, 0 removed
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "cd24c147f45c: modified"
[12] other heads for branch "default" (re)
$ hg st
! large
--- a/tests/test-largefiles-misc.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-largefiles-misc.t Wed Jul 19 07:51:41 2017 -0500
@@ -270,7 +270,7 @@
update: (current)
phases: 3 draft
$ hg ci -m "this commit should fail without -S"
- abort: uncommitted changes in subrepository 'subrepo'
+ abort: uncommitted changes in subrepository "subrepo"
(use --subrepos for recursive commit)
[255]
@@ -479,7 +479,7 @@
summary: anotherlarge
$ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge
- updated patterns: ['../.hglf/sub/../sub/anotherlarge', '../sub/anotherlarge']
+ updated patterns: ../.hglf/sub/../sub/anotherlarge, ../sub/anotherlarge
1: anotherlarge
$ hg log -G anotherlarge
@@ -498,18 +498,18 @@
summary: anotherlarge
$ hg --debug log -T '{rev}: {desc}\n' -G glob:another*
- updated patterns: ['glob:../.hglf/sub/another*', 'glob:another*']
+ updated patterns: glob:../.hglf/sub/another*, glob:another*
@ 1: anotherlarge
|
~
#if no-msys
$ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
- updated patterns: ['glob:../.hglf/sub/another*']
+ updated patterns: glob:../.hglf/sub/another*
1: anotherlarge
$ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
- updated patterns: ['glob:../.hglf/sub/another*']
+ updated patterns: glob:../.hglf/sub/another*
@ 1: anotherlarge
|
~
@@ -557,10 +557,10 @@
Log from outer space
$ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge'
- updated patterns: ['addrm2/.hglf/sub/anotherlarge', 'addrm2/sub/anotherlarge']
+ updated patterns: addrm2/.hglf/sub/anotherlarge, addrm2/sub/anotherlarge
1: anotherlarge
$ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge'
- updated patterns: ['addrm2/.hglf/sub/anotherlarge']
+ updated patterns: addrm2/.hglf/sub/anotherlarge
1: anotherlarge
--- a/tests/test-largefiles-update.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-largefiles-update.t Wed Jul 19 07:51:41 2017 -0500
@@ -71,6 +71,7 @@
$ hg up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f74e50bd9e55: #2"
1 other heads for branch "default"
$ hg debugdirstate --large --nodate
n 644 7 set large1
@@ -86,6 +87,7 @@
n 644 13 set large2
$ hg up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f74e50bd9e55: #2"
1 other heads for branch "default"
$ hg debugdirstate --large --nodate
n 644 7 set large1
@@ -212,13 +214,11 @@
$ hg commit -m 'will be rollback-ed soon'
$ echo largeY > largeY
$ hg add --large largeY
-#if windows
+
$ hg status -A large1
- large1: * (glob)
-#else
- $ hg status -A large1
- large1: No such file or directory
-#endif
+ large1: The system cannot find the file specified (windows !)
+ large1: No such file or directory (no-windows !)
+
$ hg status -A large2
? large2
$ hg status -A largeX
@@ -307,39 +307,33 @@
$ hg rebase -s 1 -d 2 --keep
rebasing 1:72518492caa6 "#1"
rebasing 4:07d6153b5c04 "#4" (tip)
-#if windows
+
$ hg status -A large1
- large1: * (glob)
-#else
- $ hg status -A large1
- large1: No such file or directory
-#endif
+ large1: The system cannot find the file specified (windows !)
+ large1: No such file or directory (no-windows !)
+
$ hg status -A largeX
C largeX
$ hg strip -q 5
$ hg update -q -C 2
$ hg transplant -q 1 4
-#if windows
+
$ hg status -A large1
- large1: * (glob)
-#else
- $ hg status -A large1
- large1: No such file or directory
-#endif
+ large1: The system cannot find the file specified (windows !)
+ large1: No such file or directory (no-windows !)
+
$ hg status -A largeX
C largeX
$ hg strip -q 5
$ hg update -q -C 2
$ hg transplant -q --merge 1 --merge 4
-#if windows
+
$ hg status -A large1
- large1: * (glob)
-#else
- $ hg status -A large1
- large1: No such file or directory
-#endif
+ large1: The system cannot find the file specified (windows !)
+ large1: No such file or directory (no-windows !)
+
$ hg status -A largeX
C largeX
$ hg strip -q 5
@@ -471,6 +465,7 @@
keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ updated to "d65e59e952a9: #5"
1 other heads for branch "default"
$ hg status -A large1
@@ -505,6 +500,7 @@
keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or
take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l
2 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ updated to "d65e59e952a9: #5"
1 other heads for branch "default"
$ hg status -A large1
--- a/tests/test-largefiles-wireproto.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-largefiles-wireproto.t Wed Jul 19 07:51:41 2017 -0500
@@ -112,7 +112,7 @@
#endif
vanilla clients locked out from largefiles ssh repos
- $ hg --config extensions.largefiles=! clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/r4 r5
+ $ hg --config extensions.largefiles=! clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/r4 r5
remote:
remote: This repository uses the largefiles extension.
remote:
--- a/tests/test-largefiles.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-largefiles.t Wed Jul 19 07:51:41 2017 -0500
@@ -215,21 +215,17 @@
Test largefiles can be loaded in hgweb (wrapcommand() shouldn't fail)
$ cat <<EOF > "$TESTTMP/hgweb.cgi"
- > #!/usr/bin/env python
+ > #!$PYTHON
> from mercurial import demandimport; demandimport.enable()
> from mercurial.hgweb import hgweb
> from mercurial.hgweb import wsgicgi
> application = hgweb('.', 'test repo')
> wsgicgi.launch(application)
> EOF
+ $ . "$TESTDIR/cgienv"
- $ PATH_INFO='/' \
- > QUERY_STRING='' \
- > REQUEST_METHOD='GET' \
- > SCRIPT_NAME='' \
- > SERVER_NAME='localhost' \
- > SERVER_PORT='80' \
- > python "$TESTTMP/hgweb.cgi" > /dev/null
+ $ SCRIPT_NAME='' \
+ > $PYTHON "$TESTTMP/hgweb.cgi" > /dev/null
Test archiving the various revisions. These hit corner cases known with
archiving.
@@ -1209,7 +1205,7 @@
Invoking status precommit hook
M sub/normal4
M sub2/large6
- saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob)
+ saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg (glob)
0 largefiles cached
$ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
$ hg log --template '{rev}:{node|short} {desc|firstline}\n'
@@ -1268,7 +1264,7 @@
Invoking status precommit hook
M sub/normal4
M sub2/large6
- saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob)
+ saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-rebase.hg (glob)
$ hg log --template '{rev}:{node|short} {desc|firstline}\n'
9:598410d3eb9a modify normal file largefile in repo d
8:a381d2c8c80e modify normal file and largefile in repo b
--- a/tests/test-lfconvert.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-lfconvert.t Wed Jul 19 07:51:41 2017 -0500
@@ -329,6 +329,7 @@
> evolution=createmarkers
> EOF
$ hg debugobsolete `hg log -r tip -T "{node}"`
+ obsoleted 1 changesets
$ cd ..
$ hg -R largefiles-repo-hg verify --large --lfa
--- a/tests/test-lock-badness.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-lock-badness.t Wed Jul 19 07:51:41 2017 -0500
@@ -14,10 +14,10 @@
Test that raising an exception in the release function doesn't cause the lock to choke
$ cat > testlock.py << EOF
- > from mercurial import cmdutil, error, error
+ > from mercurial import error, registrar
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> def acquiretestlock(repo, releaseexc):
> def unlock():
@@ -26,7 +26,7 @@
> l = repo._lock(repo.vfs, 'testlock', False, unlock, None, 'test lock')
> return l
>
- > @command('testlockexc')
+ > @command(b'testlockexc')
> def testlockexc(ui, repo):
> testlock = acquiretestlock(repo, True)
> try:
--- a/tests/test-lock.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-lock.py Wed Jul 19 07:51:41 2017 -0500
@@ -260,12 +260,10 @@
lock = state.makelock(inheritchecker=check)
state.assertacquirecalled(True)
- def tryinherit():
+ with self.assertRaises(error.LockInheritanceContractViolation):
with lock.inherit():
pass
- self.assertRaises(error.LockInheritanceContractViolation, tryinherit)
-
lock.release()
def testfrequentlockunlock(self):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-log-exthook.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,56 @@
+Test hg log changeset printer external hook
+-------------------------------------------
+
+ $ cat > $TESTTMP/logexthook.py <<EOF
+ > from mercurial import repair, commands
+ > from mercurial import cmdutil
+ > def rot13description(self, ctx):
+ > summary = "summary".encode('rot13')
+ > description = ctx.description().strip().splitlines()[0].encode('rot13')
+ > self.ui.write("%s: %s\n" % (summary, description))
+ > def reposetup(ui, repo):
+ > cmdutil.changeset_printer._exthook = rot13description
+ > EOF
+
+Prepare the repository
+
+ $ hg init empty
+ $ cd empty
+ $ touch ROOT
+ $ hg commit -A -m "Root" ROOT
+
+ $ touch a b c
+ $ hg commit -A -m "Add A, B, C" a b c
+
+Check the log
+
+ $ hg log --config extensions.t=$TESTTMP/logexthook.py
+ changeset: 1:70fc82b23320
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ fhzznel: Nqq N, O, P
+ summary: Add A, B, C
+
+ changeset: 0:b00443a54871
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ fhzznel: Ebbg
+ summary: Root
+
+Check that exthook is working with graph log too
+
+ $ hg log -G --config extensions.t=$TESTTMP/logexthook.py
+ @ changeset: 1:70fc82b23320
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | fhzznel: Nqq N, O, P
+ | summary: Add A, B, C
+ |
+ o changeset: 0:b00443a54871
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ fhzznel: Ebbg
+ summary: Root
+
--- a/tests/test-log.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-log.t Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,8 @@
Make sure largefiles doesn't interfere with logging a regular file
$ hg --debug log a -T '{rev}: {desc}\n' --config extensions.largefiles=
- updated patterns: ['.hglf/a', 'a']
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
+ updated patterns: .hglf/a, a
0: a
$ hg log a
changeset: 0:9161b9aeaf16
@@ -67,7 +68,8 @@
summary: a
$ hg --debug log glob:a* -T '{rev}: {desc}\n' --config extensions.largefiles=
- updated patterns: ['glob:.hglf/a*', 'glob:a*']
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
+ updated patterns: glob:.hglf/a*, glob:a*
3: d
0: a
@@ -1705,6 +1707,7 @@
1:a765632148dc55d38c35c4f247c618701886cb2f
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
$ hg debugobsolete a765632148dc55d38c35c4f247c618701886cb2f
+ obsoleted 1 changesets
$ hg up null -q
$ hg log --template='{rev}:{node}\n'
0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
@@ -1752,6 +1755,7 @@
$ hg bookmark -d X@foo
$ hg up null -q
$ hg debugobsolete 9f758d63dcde62d547ebfb08e1e7ee96535f2b05
+ obsoleted 1 changesets
$ echo f > b
$ hg ci -Am'b' -d '2 0'
adding b
@@ -1788,7 +1792,7 @@
$ hg init problematicencoding
$ cd problematicencoding
- $ python > setup.sh <<EOF
+ $ $PYTHON > setup.sh <<EOF
> print u'''
> echo a > text
> hg add text
@@ -1804,7 +1808,7 @@
$ sh < setup.sh
test in problematic encoding
- $ python > test.sh <<EOF
+ $ $PYTHON > test.sh <<EOF
> print u'''
> hg --encoding cp932 log --template '{rev}\\n' -u '\u30A2'
> echo ====
@@ -2077,6 +2081,7 @@
Ensure that largefiles doesn't interfere with following a normal file
$ hg --config extensions.largefiles= log -f d -T '{desc}' -G
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
@ c
|
o a
@@ -2204,6 +2209,7 @@
$ hg log -T '{node}\n' -r 1
2294ae80ad8447bc78383182eeac50cb049df623
$ hg debugobsolete 2294ae80ad8447bc78383182eeac50cb049df623
+ obsoleted 1 changesets
$ hg log -G
o changeset: 4:50b9b36e9c5d
| tag: tip
@@ -2253,6 +2259,7 @@
$ hg log -T '{node}\n' -r 4
50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2
$ hg debugobsolete 50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2
+ obsoleted 1 changesets
$ hg log -G a
@ changeset: 3:15b2327059e5
: tag: tip
--- a/tests/test-logtoprocess.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-logtoprocess.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,3 +1,5 @@
+#require no-windows
+
ATTENTION: logtoprocess runs commands asynchronously. Be sure to append "| cat"
to hg commands, to wait for the output, if you want to test its output.
Otherwise the test will be flaky.
@@ -6,11 +8,11 @@
$ hg init
$ cat > $TESTTMP/foocommand.py << EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> from time import sleep
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('foo', [])
+ > command = registrar.command(cmdtable)
+ > @command(b'foo', [])
> def foo(ui, repo):
> ui.log('foo', 'a message: %(bar)s\n', bar='spam')
> EOF
--- a/tests/test-mac-packages.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mac-packages.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,7 @@
#require test-repo slow osx osxpackaging
$ . "$TESTDIR/helpers-testrepo.sh"
+ $ testrepohgenv
$ OUTPUTDIR="`pwd`"
$ export OUTPUTDIR
@@ -25,7 +26,7 @@
Spot-check some randomly selected files:
$ grep bdiff boms.txt | cut -d ' ' -f 1,2,3
- ./Library/Python/2.7/site-packages/mercurial/bdiff.so 100755 0/0
+ ./Library/Python/2.7/site-packages/mercurial/cext/bdiff.so 100755 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.py 100644 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.pyc 100644 0/0
./Library/Python/2.7/site-packages/mercurial/pure/bdiff.pyo 100644 0/0
@@ -35,6 +36,7 @@
./usr/local/hg/contrib/hg-completion.bash 100644 0/0
$ egrep 'man[15]' boms.txt | cut -d ' ' -f 1,2,3
./usr/local/share/man/man1 40755 0/0
+ ./usr/local/share/man/man1/chg.1 100644 0/0
./usr/local/share/man/man1/hg.1 100644 0/0
./usr/local/share/man/man5 40755 0/0
./usr/local/share/man/man5/hgignore.5 100644 0/0
@@ -48,7 +50,8 @@
./Library/Python/2.7/site-packages/mercurial/localrepo.py 100644 0/0
./Library/Python/2.7/site-packages/mercurial/localrepo.pyc 100644 0/0
./Library/Python/2.7/site-packages/mercurial/localrepo.pyo 100644 0/0
- $ grep 'bin/hg ' boms.txt | cut -d ' ' -f 1,2,3
+ $ egrep 'bin/' boms.txt | cut -d ' ' -f 1,2,3
+ ./usr/local/bin/chg 100755 0/0
./usr/local/bin/hg 100755 0/0
Make sure the built binary uses the system Python interpreter
--- a/tests/test-mactext.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mactext.t Wed Jul 19 07:51:41 2017 -0500
@@ -24,7 +24,7 @@
$ hg add f
$ hg ci -m 1
- $ python unix2mac.py f
+ $ $PYTHON unix2mac.py f
$ hg ci -m 2
attempt to commit or push text file(s) using CR line endings
in dea860dc51ec: f
@@ -32,7 +32,7 @@
rollback completed
abort: pretxncommit.cr hook failed
[255]
- $ hg cat f | python print.py
+ $ hg cat f | $PYTHON print.py
hello<LF>
- $ cat f | python print.py
+ $ cat f | $PYTHON print.py
hello<CR>
--- a/tests/test-manifest.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-manifest.py Wed Jul 19 07:51:41 2017 -0500
@@ -10,107 +10,102 @@
match as matchmod,
)
-EMTPY_MANIFEST = ''
-EMTPY_MANIFEST_V2 = '\0\n'
+EMTPY_MANIFEST = b''
+EMTPY_MANIFEST_V2 = b'\0\n'
-HASH_1 = '1' * 40
+HASH_1 = b'1' * 40
BIN_HASH_1 = binascii.unhexlify(HASH_1)
-HASH_2 = 'f' * 40
+HASH_2 = b'f' * 40
BIN_HASH_2 = binascii.unhexlify(HASH_2)
-HASH_3 = '1234567890abcdef0987654321deadbeef0fcafe'
+HASH_3 = b'1234567890abcdef0987654321deadbeef0fcafe'
BIN_HASH_3 = binascii.unhexlify(HASH_3)
A_SHORT_MANIFEST = (
- 'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
- 'foo\0%(hash1)s%(flag1)s\n'
- ) % {'hash1': HASH_1,
- 'flag1': '',
- 'hash2': HASH_2,
- 'flag2': 'l',
+ b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
+ b'foo\0%(hash1)s%(flag1)s\n'
+ ) % {b'hash1': HASH_1,
+ b'flag1': b'',
+ b'hash2': HASH_2,
+ b'flag2': b'l',
}
# Same data as A_SHORT_MANIFEST
A_SHORT_MANIFEST_V2 = (
- '\0\n'
- '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
- '\x00foo\0%(flag1)s\n%(hash1)s\n'
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
+ b'\0\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+ b'\x00foo\0%(flag1)s\n%(hash1)s\n'
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
}
# Same data as A_SHORT_MANIFEST
A_METADATA_MANIFEST = (
- '\0foo\0bar\n'
- '\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata
- '\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
+ b'\0foo\0bar\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata
+ b'\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
}
A_STEM_COMPRESSED_MANIFEST = (
- '\0\n'
- '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
- '\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars
- '\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters
- '\x00%(verylongdir)sx/x\0\n%(hash1)s\n'
- '\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars
- ) % {'hash1': BIN_HASH_1,
- 'flag1': '',
- 'hash2': BIN_HASH_2,
- 'flag2': 'l',
- 'verylongdir': 255 * 'x',
+ b'\0\n'
+ b'\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+ b'\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars
+ b'\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters
+ b'\x00%(verylongdir)sx/x\0\n%(hash1)s\n'
+ b'\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars
+ ) % {b'hash1': BIN_HASH_1,
+ b'flag1': b'',
+ b'hash2': BIN_HASH_2,
+ b'flag2': b'l',
+ b'verylongdir': 255 * b'x',
}
A_DEEPER_MANIFEST = (
- 'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
- 'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n'
- 'a/b/c/foo.py\0%(hash3)s%(flag1)s\n'
- 'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n'
- 'a/b/d/baz.py\0%(hash3)s%(flag1)s\n'
- 'a/b/d/qux.py\0%(hash1)s%(flag2)s\n'
- 'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n'
- 'a/b/dog.py\0%(hash3)s%(flag1)s\n'
- 'a/b/fish.py\0%(hash2)s%(flag1)s\n'
- 'a/c/london.py\0%(hash3)s%(flag2)s\n'
- 'a/c/paper.txt\0%(hash2)s%(flag2)s\n'
- 'a/c/paris.py\0%(hash2)s%(flag1)s\n'
- 'a/d/apple.py\0%(hash3)s%(flag1)s\n'
- 'a/d/pizza.py\0%(hash3)s%(flag2)s\n'
- 'a/green.py\0%(hash1)s%(flag2)s\n'
- 'a/purple.py\0%(hash2)s%(flag1)s\n'
- 'app.py\0%(hash3)s%(flag1)s\n'
- 'readme.txt\0%(hash2)s%(flag1)s\n'
- ) % {'hash1': HASH_1,
- 'flag1': '',
- 'hash2': HASH_2,
- 'flag2': 'l',
- 'hash3': HASH_3,
+ b'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n'
+ b'a/b/c/foo.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n'
+ b'a/b/d/baz.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/d/qux.py\0%(hash1)s%(flag2)s\n'
+ b'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n'
+ b'a/b/dog.py\0%(hash3)s%(flag1)s\n'
+ b'a/b/fish.py\0%(hash2)s%(flag1)s\n'
+ b'a/c/london.py\0%(hash3)s%(flag2)s\n'
+ b'a/c/paper.txt\0%(hash2)s%(flag2)s\n'
+ b'a/c/paris.py\0%(hash2)s%(flag1)s\n'
+ b'a/d/apple.py\0%(hash3)s%(flag1)s\n'
+ b'a/d/pizza.py\0%(hash3)s%(flag2)s\n'
+ b'a/green.py\0%(hash1)s%(flag2)s\n'
+ b'a/purple.py\0%(hash2)s%(flag1)s\n'
+ b'app.py\0%(hash3)s%(flag1)s\n'
+ b'readme.txt\0%(hash2)s%(flag1)s\n'
+ ) % {b'hash1': HASH_1,
+ b'flag1': b'',
+ b'hash2': HASH_2,
+ b'flag2': b'l',
+ b'hash3': HASH_3,
}
HUGE_MANIFEST_ENTRIES = 200001
-A_HUGE_MANIFEST = ''.join(sorted(
- 'file%d\0%s%s\n' % (i, h, f) for i, h, f in
- itertools.izip(xrange(200001),
- itertools.cycle((HASH_1, HASH_2)),
- itertools.cycle(('', 'x', 'l')))))
+izip = getattr(itertools, 'izip', zip)
+if 'xrange' not in globals():
+ xrange = range
+
+A_HUGE_MANIFEST = b''.join(sorted(
+ b'file%d\0%s%s\n' % (i, h, f) for i, h, f in
+ izip(xrange(200001),
+ itertools.cycle((HASH_1, HASH_2)),
+ itertools.cycle((b'', b'x', b'l')))))
class basemanifesttests(object):
def parsemanifest(self, text):
raise NotImplementedError('parsemanifest not implemented by test case')
- def assertIn(self, thing, container, msg=None):
- # assertIn new in 2.7, use it if available, otherwise polyfill
- sup = getattr(unittest.TestCase, 'assertIn', False)
- if sup:
- return sup(self, thing, container, msg=msg)
- if not msg:
- msg = 'Expected %r in %r' % (thing, container)
- self.assert_(thing in container, msg)
-
def testEmptyManifest(self):
m = self.parsemanifest(EMTPY_MANIFEST)
self.assertEqual(0, len(m))
@@ -123,12 +118,13 @@
def testManifest(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- self.assertEqual(['bar/baz/qux.py', 'foo'], list(m))
- self.assertEqual(BIN_HASH_2, m['bar/baz/qux.py'])
- self.assertEqual('l', m.flags('bar/baz/qux.py'))
- self.assertEqual(BIN_HASH_1, m['foo'])
- self.assertEqual('', m.flags('foo'))
- self.assertRaises(KeyError, lambda : m['wat'])
+ self.assertEqual([b'bar/baz/qux.py', b'foo'], list(m))
+ self.assertEqual(BIN_HASH_2, m[b'bar/baz/qux.py'])
+ self.assertEqual(b'l', m.flags(b'bar/baz/qux.py'))
+ self.assertEqual(BIN_HASH_1, m[b'foo'])
+ self.assertEqual(b'', m.flags(b'foo'))
+ with self.assertRaises(KeyError):
+ m[b'wat']
def testParseManifestV2(self):
m1 = self.parsemanifest(A_SHORT_MANIFEST)
@@ -143,11 +139,11 @@
def testParseManifestStemCompression(self):
m = self.parsemanifest(A_STEM_COMPRESSED_MANIFEST)
- self.assertIn('bar/baz/qux.py', m)
- self.assertIn('bar/qux/foo.py', m)
- self.assertIn('bar/qux/foz.py', m)
- self.assertIn(256 * 'x' + '/x', m)
- self.assertIn(256 * 'x' + '/y', m)
+ self.assertIn(b'bar/baz/qux.py', m)
+ self.assertIn(b'bar/qux/foo.py', m)
+ self.assertIn(b'bar/qux/foz.py', m)
+ self.assertIn(256 * b'x' + b'/x', m)
+ self.assertIn(256 * b'x' + b'/y', m)
self.assertEqual(A_STEM_COMPRESSED_MANIFEST, m.text(usemanifestv2=True))
def testTextV2(self):
@@ -159,38 +155,38 @@
want = BIN_HASH_1
m = self.parsemanifest(EMTPY_MANIFEST)
- m['a'] = want
- self.assertIn('a', m)
- self.assertEqual(want, m['a'])
- self.assertEqual('a\0' + HASH_1 + '\n', m.text())
+ m[b'a'] = want
+ self.assertIn(b'a', m)
+ self.assertEqual(want, m[b'a'])
+ self.assertEqual(b'a\0' + HASH_1 + b'\n', m.text())
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['a'] = want
- self.assertEqual(want, m['a'])
- self.assertEqual('a\0' + HASH_1 + '\n' + A_SHORT_MANIFEST,
+ m[b'a'] = want
+ self.assertEqual(want, m[b'a'])
+ self.assertEqual(b'a\0' + HASH_1 + b'\n' + A_SHORT_MANIFEST,
m.text())
def testSetFlag(self):
- want = 'x'
+ want = b'x'
m = self.parsemanifest(EMTPY_MANIFEST)
# first add a file; a file-less flag makes no sense
- m['a'] = BIN_HASH_1
- m.setflag('a', want)
- self.assertEqual(want, m.flags('a'))
- self.assertEqual('a\0' + HASH_1 + want + '\n', m.text())
+ m[b'a'] = BIN_HASH_1
+ m.setflag(b'a', want)
+ self.assertEqual(want, m.flags(b'a'))
+ self.assertEqual(b'a\0' + HASH_1 + want + b'\n', m.text())
m = self.parsemanifest(A_SHORT_MANIFEST)
# first add a file; a file-less flag makes no sense
- m['a'] = BIN_HASH_1
- m.setflag('a', want)
- self.assertEqual(want, m.flags('a'))
- self.assertEqual('a\0' + HASH_1 + want + '\n' + A_SHORT_MANIFEST,
+ m[b'a'] = BIN_HASH_1
+ m.setflag(b'a', want)
+ self.assertEqual(want, m.flags(b'a'))
+ self.assertEqual(b'a\0' + HASH_1 + want + b'\n' + A_SHORT_MANIFEST,
m.text())
def testCopy(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['a'] = BIN_HASH_1
+ m[b'a'] = BIN_HASH_1
m2 = m.copy()
del m
del m2 # make sure we don't double free() anything
@@ -199,132 +195,135 @@
unhex = binascii.unhexlify
h1, h2 = unhex(HASH_1), unhex(HASH_2)
m = self.parsemanifest(A_SHORT_MANIFEST)
- m['alpha'] = h1
- m['beta'] = h2
- del m['foo']
- want = 'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
+ m[b'alpha'] = h1
+ m[b'beta'] = h2
+ del m[b'foo']
+ want = b'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
HASH_1, HASH_2, HASH_2)
self.assertEqual(want, m.text())
self.assertEqual(3, len(m))
- self.assertEqual(['alpha', 'bar/baz/qux.py', 'beta'], list(m))
- self.assertEqual(h1, m['alpha'])
- self.assertEqual(h2, m['bar/baz/qux.py'])
- self.assertEqual(h2, m['beta'])
- self.assertEqual('', m.flags('alpha'))
- self.assertEqual('l', m.flags('bar/baz/qux.py'))
- self.assertEqual('', m.flags('beta'))
- self.assertRaises(KeyError, lambda : m['foo'])
+ self.assertEqual([b'alpha', b'bar/baz/qux.py', b'beta'], list(m))
+ self.assertEqual(h1, m[b'alpha'])
+ self.assertEqual(h2, m[b'bar/baz/qux.py'])
+ self.assertEqual(h2, m[b'beta'])
+ self.assertEqual(b'', m.flags(b'alpha'))
+ self.assertEqual(b'l', m.flags(b'bar/baz/qux.py'))
+ self.assertEqual(b'', m.flags(b'beta'))
+ with self.assertRaises(KeyError):
+ m[b'foo']
def testSetGetNodeSuffix(self):
clean = self.parsemanifest(A_SHORT_MANIFEST)
m = self.parsemanifest(A_SHORT_MANIFEST)
- h = m['foo']
- f = m.flags('foo')
- want = h + 'a'
+ h = m[b'foo']
+ f = m.flags(b'foo')
+ want = h + b'a'
# Merge code wants to set 21-byte fake hashes at times
- m['foo'] = want
- self.assertEqual(want, m['foo'])
- self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
- ('foo', BIN_HASH_1 + 'a')],
+ m[b'foo'] = want
+ self.assertEqual(want, m[b'foo'])
+ self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
+ (b'foo', BIN_HASH_1 + b'a')],
list(m.iteritems()))
# Sometimes it even tries a 22-byte fake hash, but we can
# return 21 and it'll work out
- m['foo'] = want + '+'
- self.assertEqual(want, m['foo'])
+ m[b'foo'] = want + b'+'
+ self.assertEqual(want, m[b'foo'])
# make sure the suffix survives a copy
- match = matchmod.match('', '', ['re:foo'])
+ match = matchmod.match(b'', b'', [b're:foo'])
m2 = m.matches(match)
- self.assertEqual(want, m2['foo'])
+ self.assertEqual(want, m2[b'foo'])
self.assertEqual(1, len(m2))
m2 = m.copy()
- self.assertEqual(want, m2['foo'])
+ self.assertEqual(want, m2[b'foo'])
# suffix with iteration
- self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
- ('foo', want)],
+ self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
+ (b'foo', want)],
list(m.iteritems()))
# shows up in diff
- self.assertEqual({'foo': ((want, f), (h, ''))}, m.diff(clean))
- self.assertEqual({'foo': ((h, ''), (want, f))}, clean.diff(m))
+ self.assertEqual({b'foo': ((want, f), (h, b''))}, m.diff(clean))
+ self.assertEqual({b'foo': ((h, b''), (want, f))}, clean.diff(m))
def testMatchException(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- match = matchmod.match('', '', ['re:.*'])
+ match = matchmod.match(b'', b'', [b're:.*'])
def filt(path):
- if path == 'foo':
+ if path == b'foo':
assert False
return True
match.matchfn = filt
- self.assertRaises(AssertionError, m.matches, match)
+ with self.assertRaises(AssertionError):
+ m.matches(match)
def testRemoveItem(self):
m = self.parsemanifest(A_SHORT_MANIFEST)
- del m['foo']
- self.assertRaises(KeyError, lambda : m['foo'])
+ del m[b'foo']
+ with self.assertRaises(KeyError):
+ m[b'foo']
self.assertEqual(1, len(m))
self.assertEqual(1, len(list(m)))
# now restore and make sure everything works right
- m['foo'] = 'a' * 20
+ m[b'foo'] = b'a' * 20
self.assertEqual(2, len(m))
self.assertEqual(2, len(list(m)))
def testManifestDiff(self):
- MISSING = (None, '')
- addl = 'z-only-in-left\0' + HASH_1 + '\n'
- addr = 'z-only-in-right\0' + HASH_2 + 'x\n'
+ MISSING = (None, b'')
+ addl = b'z-only-in-left\0' + HASH_1 + b'\n'
+ addr = b'z-only-in-right\0' + HASH_2 + b'x\n'
left = self.parsemanifest(
- A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + 'x') + addl)
+ A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + b'x') + addl)
right = self.parsemanifest(A_SHORT_MANIFEST + addr)
want = {
- 'foo': ((BIN_HASH_3, 'x'),
- (BIN_HASH_1, '')),
- 'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
- 'z-only-in-right': (MISSING, (BIN_HASH_2, 'x')),
+ b'foo': ((BIN_HASH_3, b'x'),
+ (BIN_HASH_1, b'')),
+ b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
+ b'z-only-in-right': (MISSING, (BIN_HASH_2, b'x')),
}
self.assertEqual(want, left.diff(right))
want = {
- 'bar/baz/qux.py': (MISSING, (BIN_HASH_2, 'l')),
- 'foo': (MISSING, (BIN_HASH_3, 'x')),
- 'z-only-in-left': (MISSING, (BIN_HASH_1, '')),
+ b'bar/baz/qux.py': (MISSING, (BIN_HASH_2, b'l')),
+ b'foo': (MISSING, (BIN_HASH_3, b'x')),
+ b'z-only-in-left': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, self.parsemanifest(EMTPY_MANIFEST).diff(left))
want = {
- 'bar/baz/qux.py': ((BIN_HASH_2, 'l'), MISSING),
- 'foo': ((BIN_HASH_3, 'x'), MISSING),
- 'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
+ b'bar/baz/qux.py': ((BIN_HASH_2, b'l'), MISSING),
+ b'foo': ((BIN_HASH_3, b'x'), MISSING),
+ b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
}
self.assertEqual(want, left.diff(self.parsemanifest(EMTPY_MANIFEST)))
copy = right.copy()
- del copy['z-only-in-right']
- del right['foo']
+ del copy[b'z-only-in-right']
+ del right[b'foo']
want = {
- 'foo': (MISSING, (BIN_HASH_1, '')),
- 'z-only-in-right': ((BIN_HASH_2, 'x'), MISSING),
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
+ b'z-only-in-right': ((BIN_HASH_2, b'x'), MISSING),
}
self.assertEqual(want, right.diff(copy))
short = self.parsemanifest(A_SHORT_MANIFEST)
pruned = short.copy()
- del pruned['foo']
+ del pruned[b'foo']
want = {
- 'foo': ((BIN_HASH_1, ''), MISSING),
+ b'foo': ((BIN_HASH_1, b''), MISSING),
}
self.assertEqual(want, short.diff(pruned))
want = {
- 'foo': (MISSING, (BIN_HASH_1, '')),
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, pruned.diff(short))
want = {
- 'bar/baz/qux.py': None,
- 'foo': (MISSING, (BIN_HASH_1, '')),
+ b'bar/baz/qux.py': None,
+ b'foo': (MISSING, (BIN_HASH_1, b'')),
}
self.assertEqual(want, pruned.diff(short, clean=True))
def testReversedLines(self):
- backwards = ''.join(
- l + '\n' for l in reversed(A_SHORT_MANIFEST.split('\n')) if l)
+ backwards = b''.join(
+ l + b'\n' for l in reversed(A_SHORT_MANIFEST.split(b'\n')) if l)
try:
self.parsemanifest(backwards)
self.fail('Should have raised ValueError')
@@ -333,14 +332,14 @@
def testNoTerminalNewline(self):
try:
- self.parsemanifest(A_SHORT_MANIFEST + 'wat')
+ self.parsemanifest(A_SHORT_MANIFEST + b'wat')
self.fail('Should have raised ValueError')
except ValueError as v:
self.assertIn('Manifest did not end in a newline.', str(v))
def testNoNewLineAtAll(self):
try:
- self.parsemanifest('wat')
+ self.parsemanifest(b'wat')
self.fail('Should have raised ValueError')
except ValueError as v:
self.assertIn('Manifest did not end in a newline.', str(v))
@@ -356,13 +355,13 @@
the resulting manifest.'''
m = self.parsemanifest(A_HUGE_MANIFEST)
- match = matchmod.match('/', '',
- ['file1', 'file200', 'file300'], exact=True)
+ match = matchmod.match(b'/', b'',
+ [b'file1', b'file200', b'file300'], exact=True)
m2 = m.matches(match)
- w = ('file1\0%sx\n'
- 'file200\0%sl\n'
- 'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
+ w = (b'file1\0%sx\n'
+ b'file200\0%sl\n'
+ b'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
self.assertEqual(w, m2.text())
def testMatchesNonexistentFile(self):
@@ -371,13 +370,14 @@
'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '',
- ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt', 'nonexistent'],
+ match = matchmod.match(b'/', b'',
+ [b'a/b/c/bar.txt', b'a/b/d/qux.py',
+ b'readme.txt', b'nonexistent'],
exact=True)
m2 = m.matches(match)
self.assertEqual(
- ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt'],
+ [b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt'],
m2.keys())
def testMatchesNonexistentDirectory(self):
@@ -385,7 +385,7 @@
actually exist.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/f'], default='relpath')
+ match = matchmod.match(b'/', b'', [b'a/f'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([], m2.keys())
@@ -396,7 +396,7 @@
m = self.parsemanifest(A_HUGE_MANIFEST)
flist = m.keys()[80:300]
- match = matchmod.match('/', '', flist, exact=True)
+ match = matchmod.match(b'/', b'', flist, exact=True)
m2 = m.matches(match)
self.assertEqual(flist, m2.keys())
@@ -405,7 +405,7 @@
'''Tests matches() for what should be a full match.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', [''])
+ match = matchmod.match(b'/', b'', [b''])
m2 = m.matches(match)
self.assertEqual(m.keys(), m2.keys())
@@ -415,13 +415,14 @@
match against all files within said directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b'], default='relpath')
+ match = matchmod.match(b'/', b'', [b'a/b'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([
- 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
- 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
- 'a/b/fish.py'], m2.keys())
+ b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
+ b'a/b/c/foo.txt',
+ b'a/b/d/baz.py', b'a/b/d/qux.py', b'a/b/d/ten.txt', b'a/b/dog.py',
+ b'a/b/fish.py'], m2.keys())
def testMatchesExactPath(self):
'''Tests matches() on an exact match on a directory, which should
@@ -429,7 +430,7 @@
against a directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b'], exact=True)
+ match = matchmod.match(b'/', b'', [b'a/b'], exact=True)
m2 = m.matches(match)
self.assertEqual([], m2.keys())
@@ -439,24 +440,24 @@
when not in the root directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', 'a/b', ['.'], default='relpath')
+ match = matchmod.match(b'/', b'a/b', [b'.'], default=b'relpath')
m2 = m.matches(match)
self.assertEqual([
- 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
- 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
- 'a/b/fish.py'], m2.keys())
+ b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
+ b'a/b/c/foo.txt', b'a/b/d/baz.py', b'a/b/d/qux.py',
+ b'a/b/d/ten.txt', b'a/b/dog.py', b'a/b/fish.py'], m2.keys())
def testMatchesWithPattern(self):
'''Tests matches() for files matching a pattern that reside
deeper than the specified directory.'''
m = self.parsemanifest(A_DEEPER_MANIFEST)
- match = matchmod.match('/', '', ['a/b/*/*.txt'])
+ match = matchmod.match(b'/', b'', [b'a/b/*/*.txt'])
m2 = m.matches(match)
self.assertEqual(
- ['a/b/c/bar.txt', 'a/b/c/foo.txt', 'a/b/d/ten.txt'],
+ [b'a/b/c/bar.txt', b'a/b/c/foo.txt', b'a/b/d/ten.txt'],
m2.keys())
class testmanifestdict(unittest.TestCase, basemanifesttests):
@@ -465,21 +466,22 @@
class testtreemanifest(unittest.TestCase, basemanifesttests):
def parsemanifest(self, text):
- return manifestmod.treemanifest('', text)
+ return manifestmod.treemanifest(b'', text)
def testWalkSubtrees(self):
m = self.parsemanifest(A_DEEPER_MANIFEST)
dirs = [s._dir for s in m.walksubtrees()]
self.assertEqual(
- sorted(['', 'a/', 'a/c/', 'a/d/', 'a/b/', 'a/b/c/', 'a/b/d/']),
+ sorted([
+ b'', b'a/', b'a/c/', b'a/d/', b'a/b/', b'a/b/c/', b'a/b/d/']),
sorted(dirs)
)
- match = matchmod.match('/', '', ['path:a/b/'])
+ match = matchmod.match(b'/', b'', [b'path:a/b/'])
dirs = [s._dir for s in m.walksubtrees(matcher=match)]
self.assertEqual(
- sorted(['a/b/', 'a/b/c/', 'a/b/d/']),
+ sorted([b'a/b/', b'a/b/c/', b'a/b/d/']),
sorted(dirs)
)
--- a/tests/test-merge-changedelete.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-changedelete.t Wed Jul 19 07:51:41 2017 -0500
@@ -114,6 +114,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -178,6 +179,7 @@
$ hg co -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -254,6 +256,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=true <<EOF
@@ -316,6 +319,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :local
@@ -364,6 +368,7 @@
$ hg co -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :other
@@ -412,6 +417,7 @@
$ hg co -C
2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :fail
@@ -463,6 +469,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --config ui.interactive=True --tool :prompt
@@ -470,8 +477,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -522,6 +528,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :prompt
@@ -529,8 +536,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved? u
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3? u
0 files updated, 0 files merged, 0 files removed, 3 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -579,6 +585,7 @@
$ hg co -C
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ updated to "13910f48cf7b: changed file1, removed file2, changed file3"
1 other heads for branch "default"
$ hg merge --tool :merge3
@@ -676,8 +683,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
@@ -705,8 +711,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
@@ -724,8 +729,7 @@
use (c)hanged version, (d)elete, or leave (u)nresolved?
other [merge rev] changed file2 which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved?
- no tool found to merge file3
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for file3?
--- diff of status ---
(status identical)
--- a/tests/test-merge-default.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-default.t Wed Jul 19 07:51:41 2017 -0500
@@ -33,6 +33,7 @@
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "f25cbe84d8b3: e"
2 other heads for branch "default"
Should fail because > 2 heads:
@@ -48,6 +49,18 @@
$ hg merge 2
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
+ $ hg id -Tjson
+ [
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "dirty": "+",
+ "id": "f25cbe84d8b3+2d95304fed5d+",
+ "node": "ffffffffffffffffffffffffffffffffffffffff",
+ "parents": [{"node": "f25cbe84d8b320e298e7703f18a25a3959518c23", "rev": 4}, {"node": "2d95304fed5d89bc9d70b2a0d02f0d567469c3ab", "rev": 2}],
+ "tags": ["tip"]
+ }
+ ]
$ hg commit -mm1
Should succeed - 2 heads:
@@ -64,6 +77,17 @@
(branch merge, don't forget to commit)
$ hg commit -mm2
+ $ hg id -r 1 -Tjson
+ [
+ {
+ "bookmarks": [],
+ "branch": "default",
+ "id": "1846eede8b68",
+ "node": "1846eede8b6886d8cc8a88c96a687b7fe8f3b9d1",
+ "tags": []
+ }
+ ]
+
Should fail because at tip:
$ hg merge
--- a/tests/test-merge-force.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-force.t Wed Jul 19 07:51:41 2017 -0500
@@ -10,26 +10,26 @@
Create base changeset
- $ python $TESTDIR/generate-working-copy-states.py state 3 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 1
$ hg addremove -q --similarity 0
$ hg commit -qm 'base'
Create remote changeset
- $ python $TESTDIR/generate-working-copy-states.py state 3 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 2
$ hg addremove -q --similarity 0
$ hg commit -qm 'remote'
Create local changeset
$ hg update -q 0
- $ python $TESTDIR/generate-working-copy-states.py state 3 3
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 3
$ hg addremove -q --similarity 0
$ hg commit -qm 'local'
Set up working directory
- $ python $TESTDIR/generate-working-copy-states.py state 3 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*_*-untracked
$ rm *_*_*_missing-*
@@ -286,7 +286,7 @@
the remote side did not touch the file
$ checkstatus() {
- > for f in `python $TESTDIR/generate-working-copy-states.py filelist 3`
+ > for f in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 3`
> do
> echo
> hg status -A $f
@@ -667,7 +667,7 @@
missing_missing_missing_missing-untracked: * (glob)
<missing>
- $ for f in `python $TESTDIR/generate-working-copy-states.py filelist 3`
+ $ for f in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 3`
> do
> if test -f ${f}.orig
> then
@@ -784,7 +784,7 @@
$ hg -q update --clean 2
$ hg --config extensions.purge= purge
- $ python $TESTDIR/generate-working-copy-states.py state 3 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 3 wc
$ hg addremove -q --similarity 0
$ hg forget *_*_*_*-untracked
$ rm *_*_*_missing-*
--- a/tests/test-merge-subrepos.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-subrepos.t Wed Jul 19 07:51:41 2017 -0500
@@ -21,5 +21,118 @@
Should fail, since there are added files to subrepo:
$ hg merge
- abort: uncommitted changes in subrepository 'subrepo'
+ abort: uncommitted changes in subrepository "subrepo"
+ [255]
+
+Deleted files trigger a '+' marker in top level repos. Deleted files are also
+noticed by `update --check` in the top level repo.
+
+ $ hg ci -Sqm 'add b'
+ $ echo change > subrepo/b
+
+ $ hg ci -Sm 'change b'
+ committing subrepository subrepo
+
+ $ rm a
+ $ hg id
+ 9bfe45a197d7+ tip
+ $ hg sum
+ parent: 4:9bfe45a197d7 tip
+ change b
+ branch: default
+ commit: 1 deleted (clean)
+ update: 1 new changesets, 2 branch heads (merge)
+ phases: 5 draft
+
+ $ hg up --check -r '.^'
+ abort: uncommitted changes
[255]
+ $ hg st -S
+ ! a
+ $ hg up -Cq .
+
+Test that dirty is consistent through subrepos
+
+ $ rm subrepo/b
+
+A deleted subrepo file is flagged as dirty, like the top level repo
+
+ $ hg id --config extensions.blackbox= --config blackbox.dirty=True
+ 9bfe45a197d7+ tip
+ $ cat .hg/blackbox.log
+ * @9bfe45a197d7b0ab09bf287729dd57e9619c9da5+ (*)> id (glob)
+ * @9bfe45a197d7b0ab09bf287729dd57e9619c9da5+ (*)> id --config *extensions.blackbox=* --config *blackbox.dirty=True* exited 0 * (glob)
+
+TODO: a deleted file should be listed as such, like the top level repo
+
+ $ hg sum
+ parent: 4:9bfe45a197d7 tip
+ change b
+ branch: default
+ commit: (clean)
+ update: 1 new changesets, 2 branch heads (merge)
+ phases: 5 draft
+
+Modified subrepo files are noticed by `update --check` and `summary`
+
+ $ echo mod > subrepo/b
+ $ hg st -S
+ M subrepo/b
+
+ $ hg up -r '.^' --check
+ abort: uncommitted changes in subrepository "subrepo"
+ [255]
+
+ $ hg sum
+ parent: 4:9bfe45a197d7 tip
+ change b
+ branch: default
+ commit: 1 subrepos
+ update: 1 new changesets, 2 branch heads (merge)
+ phases: 5 draft
+
+TODO: why is -R needed here? If it's because the subrepo is treated as a
+discrete unit, then this should probably warn or something.
+ $ hg revert -R subrepo --no-backup subrepo/b -r .
+
+ $ rm subrepo/b
+ $ hg st -S
+ ! subrepo/b
+
+`hg update --check` notices a subrepo with a missing file, like it notices a
+missing file in the top level repo.
+
+ $ hg up -r '.^' --check
+ abort: uncommitted changes in subrepository "subrepo"
+ [255]
+
+ $ hg up -r '.^' --config ui.interactive=True << EOF
+ > d
+ > EOF
+ other [destination] changed b which local [working copy] deleted
+ use (c)hanged version, leave (d)eleted, or leave (u)nresolved? d
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+XXX: There's a difference between wdir() and '.', so there should be a status.
+`hg files -S` from the top is also missing 'subrepo/b'.
+
+ $ hg st -S
+ $ hg st -R subrepo
+ $ hg files -R subrepo
+ [1]
+ $ hg files -R subrepo -r '.'
+ subrepo/b (glob)
+
+ $ hg bookmark -r tip @other
+ $ echo xyz > subrepo/c
+ $ hg ci -SAm 'add c'
+ adding subrepo/c
+ committing subrepository subrepo
+ created new head
+ $ rm subrepo/c
+
+Merge sees deleted subrepo files as an uncommitted change
+
+ $ hg merge @other
+ abort: uncommitted changes in subrepository "subrepo"
+ [255]
--- a/tests/test-merge-symlinks.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-symlinks.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,5 +1,5 @@
$ cat > echo.py <<EOF
- > #!/usr/bin/env python
+ > #!$PYTHON
> import os, sys
> try:
> import msvcrt
@@ -36,7 +36,7 @@
Merge them and display *_ISLINK vars
merge heads
- $ hg merge --tool="python ../echo.py"
+ $ hg merge --tool="$PYTHON ../echo.py"
merging l
HG_FILE l
HG_MY_ISLINK 1
@@ -52,7 +52,7 @@
$ hg up -C 2
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg copy l l2
- $ HGMERGE="python ../echo.py" hg up 3
+ $ HGMERGE="$PYTHON ../echo.py" hg up 3
merging l2
HG_FILE l2
HG_MY_ISLINK 1
--- a/tests/test-merge-tools.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-tools.t Wed Jul 19 07:51:41 2017 -0500
@@ -392,9 +392,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -418,9 +418,9 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f
- couldn't find merge tool true specified for f
+ couldn't find merge tool true (for pattern f)
merging f failed!
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -531,8 +531,7 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -556,8 +555,7 @@
$ hg merge -r 2 --config ui.merge=:prompt --config ui.interactive=True << EOF
> u
> EOF
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -579,8 +577,7 @@
true.executable=cat
# hg update -C 1
$ hg merge -r 2 --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
[1]
@@ -593,8 +590,7 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
[1]
$ aftermerge
# cat f
@@ -607,8 +603,7 @@
U f
$ rm f
$ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved?
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f?
[1]
$ aftermerge
# cat f
@@ -619,8 +614,7 @@
# hg resolve --list
U f
$ hg resolve --all --config ui.merge=internal:prompt
- no tool found to merge f
- keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [merge rev], or leave (u)nresolved for f? u
[1]
$ aftermerge
# cat f
@@ -677,6 +671,72 @@
space
$ rm f.base f.local f.other
+check that internal:dump doesn't dump files if premerge runs
+successfully
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 3 --config ui.merge=internal:dump
+ merging f
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ revision 3
+ # hg stat
+ M f
+ # hg resolve --list
+ R f
+
+check that internal:forcedump dumps files, even if local and other can
+be merged easily
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+ $ hg merge -r 3 --config ui.merge=internal:forcedump
+ merging f
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ [1]
+ $ aftermerge
+ # cat f
+ revision 1
+ space
+ # hg stat
+ M f
+ ? f.base
+ ? f.local
+ ? f.orig
+ ? f.other
+ # hg resolve --list
+ U f
+
+ $ cat f.base
+ revision 0
+ space
+
+ $ cat f.local
+ revision 1
+ space
+
+ $ cat f.other
+ revision 0
+ space
+ revision 3
+
+ $ rm -f f.base f.local f.other
+
ui.merge specifies internal:other but is overruled by pattern for false:
$ beforemerge
@@ -1221,3 +1281,68 @@
*/f~base.?????? $TESTTMP/f.txt.orig */f~other.??????.txt $TESTTMP/f.txt (glob)
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
+
+Check that debugpicktool examines which merge tool is chosen for
+specified file as expected
+
+ $ beforemerge
+ [merge-tools]
+ false.whatever=
+ true.priority=1
+ true.executable=cat
+ # hg update -C 1
+
+(default behavior: checking files in the working parent context)
+
+ $ hg manifest
+ f
+ $ hg debugpickmergetool
+ f = true
+
+(-X/-I and file patterns limmit examination targets)
+
+ $ hg debugpickmergetool -X f
+ $ hg debugpickmergetool unknown
+ unknown: no such file in rev ef83787e2614
+
+(--changedelete emulates merging change and delete)
+
+ $ hg debugpickmergetool --changedelete
+ f = :prompt
+
+(-r REV causes checking files in specified revision)
+
+ $ hg manifest -r tip
+ f.txt
+ $ hg debugpickmergetool -r tip
+ f.txt = true
+
+#if symlink
+
+(symlink causes chosing :prompt)
+
+ $ hg debugpickmergetool -r 6d00b3726f6e
+ f = :prompt
+
+#endif
+
+(--verbose shows some configurations)
+
+ $ hg debugpickmergetool --tool foobar -v
+ with --tool 'foobar'
+ f = foobar
+
+ $ HGMERGE=false hg debugpickmergetool -v
+ with HGMERGE='false'
+ f = false
+
+ $ hg debugpickmergetool --config ui.merge=false -v
+ with ui.merge='false'
+ f = false
+
+(--debug shows errors detected intermediately)
+
+ $ hg debugpickmergetool --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool --debug f
+ couldn't find merge tool true (for pattern f)
+ couldn't find merge tool true
+ f = false
--- a/tests/test-merge-types.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge-types.t Wed Jul 19 07:51:41 2017 -0500
@@ -155,6 +155,7 @@
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "521a1e40188f: symlink"
1 other heads for branch "default"
$ hg st
? a.orig
@@ -171,11 +172,12 @@
preserving a for resolve of a
a: versions differ -> m (premerge)
(couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re)
+ no tool found to merge a
picked tool ':prompt' for a (binary False symlink True changedelete False)
- no tool found to merge a
- keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved? u
+ keep (l)ocal [working copy], take (o)ther [destination], or leave (u)nresolved for a? u
0 files updated, 0 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges
+ updated to "521a1e40188f: symlink"
1 other heads for branch "default"
[1]
$ hg diff --git
--- a/tests/test-merge1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge1.t Wed Jul 19 07:51:41 2017 -0500
@@ -10,7 +10,7 @@
>
> print "merging for", os.path.basename(sys.argv[1])
> EOF
- $ HGMERGE="python ../merge"; export HGMERGE
+ $ HGMERGE="$PYTHON ../merge"; export HGMERGE
$ hg init t
$ cd t
--- a/tests/test-merge6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-merge6.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,7 +2,7 @@
> import sys, os
> print "merging for", os.path.basename(sys.argv[1])
> EOF
- $ HGMERGE="python ../merge"; export HGMERGE
+ $ HGMERGE="$PYTHON ../merge"; export HGMERGE
$ hg init A1
$ cd A1
--- a/tests/test-mq-eol.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-eol.t Wed Jul 19 07:51:41 2017 -0500
@@ -48,7 +48,7 @@
$ hg ci -Am adda
adding .hgignore
adding a
- $ python ../makepatch.py
+ $ $PYTHON ../makepatch.py
$ hg qimport eol.diff
adding eol.diff to series file
@@ -85,7 +85,7 @@
applying eol.diff
now at: eol.diff
$ hg qrefresh
- $ python ../cateol.py .hg/patches/eol.diff
+ $ $PYTHON ../cateol.py .hg/patches/eol.diff
# HG changeset patch<LF>
# Parent 0d0bf99a8b7a3842c6f8ef09e34f69156c4bd9d0<LF>
test message<LF>
@@ -106,7 +106,7 @@
+d<CR><LF>
+z<LF>
\ No newline at end of file<LF>
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
@@ -121,7 +121,7 @@
$ hg --config patch.eol='CRLF' qpush
applying eol.diff
now at: eol.diff
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
@@ -136,7 +136,7 @@
$ hg qpush
applying eol.diff
now at: eol.diff
- $ python ../cateol.py a
+ $ $PYTHON ../cateol.py a
a<CR><LF>
y<CR><LF>
c<CR><LF>
--- a/tests/test-mq-missingfiles.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-missingfiles.t Wed Jul 19 07:51:41 2017 -0500
@@ -23,11 +23,11 @@
$ hg init normal
$ cd normal
- $ python ../writelines.py b 10 'a\n'
+ $ $PYTHON ../writelines.py b 10 'a\n'
$ hg ci -Am addb
adding b
$ echo a > a
- $ python ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
+ $ $PYTHON ../writelines.py b 2 'b\n' 10 'a\n' 2 'c\n'
$ echo c > c
$ hg add a c
$ hg qnew -f changeb
@@ -82,7 +82,7 @@
$ hg up -qC 0
$ echo a > a
$ hg mv b bb
- $ python ../writelines.py bb 2 'b\n' 10 'a\n' 2 'c\n'
+ $ $PYTHON ../writelines.py bb 2 'b\n' 10 'a\n' 2 'c\n'
$ echo c > c
$ hg add a c
$ hg qnew changebb
@@ -129,11 +129,11 @@
$ hg init git
$ cd git
- $ python ../writelines.py b 1 '\x00'
+ $ $PYTHON ../writelines.py b 1 '\x00'
$ hg ci -Am addb
adding b
$ echo a > a
- $ python ../writelines.py b 1 '\x01' 1 '\x00'
+ $ $PYTHON ../writelines.py b 1 '\x01' 1 '\x00'
$ echo c > c
$ hg add a c
$ hg qnew -f changeb
--- a/tests/test-mq-qimport.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-qimport.t Wed Jul 19 07:51:41 2017 -0500
@@ -151,10 +151,10 @@
build diff with CRLF
- $ python ../writelines.py b 5 'a\n' 5 'a\r\n'
+ $ $PYTHON ../writelines.py b 5 'a\n' 5 'a\r\n'
$ hg ci -Am addb
adding b
- $ python ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n'
+ $ $PYTHON ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n'
$ hg diff > b.diff
$ hg up -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-mq-qpush-fail.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-qpush-fail.t Wed Jul 19 07:51:41 2017 -0500
@@ -39,8 +39,9 @@
> from mercurial import extensions, transaction
> def wrapplayback(orig,
> journal, report, opener, vfsmap, entries, backupentries,
- > unlink=True):
- > orig(journal, report, opener, vfsmap, entries, backupentries, unlink)
+ > unlink=True, checkambigfiles=None):
+ > orig(journal, report, opener, vfsmap, entries, backupentries, unlink,
+ > checkambigfiles)
> # Touching files truncated at "transaction.abort" causes
> # forcible re-loading invalidated filecache properties
> # (including repo.changelog)
--- a/tests/test-mq-subrepo-svn.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-subrepo-svn.t Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,7 @@
$ cd ..
$ hg status -S # doesn't show status for svn subrepos (yet)
$ hg qnew -m1 1.diff
- abort: uncommitted changes in subrepository 'sub'
+ abort: uncommitted changes in subrepository "sub"
[255]
$ cd ..
--- a/tests/test-mq-subrepo.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-subrepo.t Wed Jul 19 07:51:41 2017 -0500
@@ -102,7 +102,7 @@
A .hgsub
A sub/a
% qnew -X path:no-effect -m0 0.diff
- abort: uncommitted changes in subrepository 'sub'
+ abort: uncommitted changes in subrepository "sub"
[255]
% update substate when adding .hgsub w/clean updated subrepo
A .hgsub
@@ -118,7 +118,7 @@
M .hgsub
A sub2/a
% qnew --cwd .. -R repo-2499-qnew -X path:no-effect -m1 1.diff
- abort: uncommitted changes in subrepository 'sub2'
+ abort: uncommitted changes in subrepository "sub2"
[255]
% update substate when modifying .hgsub w/clean updated subrepo
M .hgsub
@@ -163,7 +163,7 @@
A .hgsub
A sub/a
% qrefresh
- abort: uncommitted changes in subrepository 'sub'
+ abort: uncommitted changes in subrepository "sub"
[255]
% update substate when adding .hgsub w/clean updated subrepo
A .hgsub
@@ -180,7 +180,7 @@
M .hgsub
A sub2/a
% qrefresh
- abort: uncommitted changes in subrepository 'sub2'
+ abort: uncommitted changes in subrepository "sub2"
[255]
% update substate when modifying .hgsub w/clean updated subrepo
M .hgsub
@@ -305,7 +305,7 @@
warning: subrepo spec file '.hgsub' not found
warning: subrepo spec file '.hgsub' not found
- abort: uncommitted changes in subrepository 'sub'
+ abort: uncommitted changes in subrepository "sub"
[255]
% update substate when adding .hgsub w/clean updated subrepo
A .hgsub
@@ -342,7 +342,7 @@
+sub2 = sub2
record this change to '.hgsub'? [Ynesfdaq?] y
- abort: uncommitted changes in subrepository 'sub2'
+ abort: uncommitted changes in subrepository "sub2"
[255]
% update substate when modifying .hgsub w/clean updated subrepo
M .hgsub
--- a/tests/test-mq-symlinks.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq-symlinks.t Wed Jul 19 07:51:41 2017 -0500
@@ -51,6 +51,7 @@
a
committing manifest
committing changelog
+ updating the branch cache
now at: updatelink
$ readlink.py a
a -> c
--- a/tests/test-mq.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-mq.t Wed Jul 19 07:51:41 2017 -0500
@@ -932,27 +932,17 @@
#endif
$ hg add new
$ hg qrefresh
-#if execbit
+
$ cat .hg/patches/new
new file
diff --git a/new b/new
- new file mode 100755
+ new file mode 100755 (execbit !)
+ new file mode 100644 (no-execbit !)
--- /dev/null
+++ b/new
@@ -0,0 +1,1 @@
+foo
-#else
- $ cat .hg/patches/new
- new file
-
- diff --git a/new b/new
- new file mode 100644
- --- /dev/null
- +++ b/new
- @@ -0,0 +1,1 @@
- +foo
-#endif
$ hg qnew -m'copy file' copy
$ hg cp new copy
@@ -1137,9 +1127,9 @@
> path = sys.argv[1]
> open(path, 'wb').write('BIN\x00ARY')
> EOF
- $ python writebin.py bucephalus
+ $ $PYTHON writebin.py bucephalus
- $ python "$TESTDIR/md5sum.py" bucephalus
+ $ $PYTHON "$TESTDIR/md5sum.py" bucephalus
8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
$ hg add bucephalus
$ hg qnew -f --git addbucephalus
@@ -1158,7 +1148,7 @@
applying addbucephalus
now at: addbucephalus
$ test -f bucephalus
- $ python "$TESTDIR/md5sum.py" bucephalus
+ $ $PYTHON "$TESTDIR/md5sum.py" bucephalus
8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus
@@ -1584,7 +1574,7 @@
$ PATH_INFO=/tags; export PATH_INFO
#endif
$ QUERY_STRING='style=raw'
- $ python hgweb.cgi | grep '^tip'
+ $ $PYTHON hgweb.cgi | grep '^tip'
tip [0-9a-f]{40} (re)
$ cd ..
--- a/tests/test-newcgi.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-newcgi.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -31,7 +31,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
@@ -52,15 +52,15 @@
$ chmod 755 hgwebdir.cgi
$ . "$TESTDIR/cgienv"
- $ python hgweb.cgi > page1
- $ python hgwebdir.cgi > page2
+ $ $PYTHON hgweb.cgi > page1
+ $ $PYTHON hgwebdir.cgi > page2
$ PATH_INFO="/test/"
$ PATH_TRANSLATED="/var/something/test.cgi"
$ REQUEST_URI="/test/test/"
$ SCRIPT_URI="http://hg.omnifarious.org/test/test/"
$ SCRIPT_URL="/test/test/"
- $ python hgwebdir.cgi > page3
+ $ $PYTHON hgwebdir.cgi > page3
$ grep -i error page1 page2 page3
[1]
--- a/tests/test-newercgi.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-newercgi.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -28,7 +28,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
@@ -46,15 +46,15 @@
$ chmod 755 hgwebdir.cgi
$ . "$TESTDIR/cgienv"
- $ python hgweb.cgi > page1
- $ python hgwebdir.cgi > page2
+ $ $PYTHON hgweb.cgi > page1
+ $ $PYTHON hgwebdir.cgi > page2
$ PATH_INFO="/test/"
$ PATH_TRANSLATED="/var/something/test.cgi"
$ REQUEST_URI="/test/test/"
$ SCRIPT_URI="http://hg.omnifarious.org/test/test/"
$ SCRIPT_URL="/test/test/"
- $ python hgwebdir.cgi > page3
+ $ $PYTHON hgwebdir.cgi > page3
$ grep -i error page1 page2 page3
[1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsmarker-template.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,1270 @@
+This test file test the various templates related to obsmarkers.
+
+Global setup
+============
+
+ $ . $TESTDIR/testlib/obsmarker-common.sh
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > interactive = true
+ > [phases]
+ > publish=False
+ > [experimental]
+ > evolution=all
+ > [alias]
+ > tlog = log -G -T '{node|short}\
+ > {if(predecessors, "\n Predecessors: {predecessors}")}\
+ > {if(predecessors, "\n semi-colon: {join(predecessors, "; ")}")}\
+ > {if(predecessors, "\n json: {predecessors|json}")}\
+ > {if(predecessors, "\n map: {join(predecessors % "{rev}:{node}", " ")}")}\
+ > {if(successorssets, "\n Successors: {successorssets}")}\
+ > {if(successorssets, "\n multi-line: {join(successorssets, "\n multi-line: ")}")}\
+ > {if(successorssets, "\n json: {successorssets|json}")}\n'
+ > EOF
+
+Test templates on amended commit
+================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-amend
+ $ cd $TESTTMP/templates-local-amend
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ echo 42 >> A0
+ $ hg commit --amend -m "A1"
+ $ hg commit --amend -m "A2"
+
+ $ hg log --hidden -G
+ @ changeset: 4:d004c8f274b9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | x changeset: 3:a468dc9b3633
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 2:f137d23bb3e1
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: temporary amend commit for 471f378eab4c
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+ $ hg up 'desc(A0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o d004c8f274b9
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/ Successors: 4:d004c8f274b9
+ | multi-line: 4:d004c8f274b9
+ | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
+ o ea207398892e
+
+ $ hg up 'desc(A1)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | @ a468dc9b3633
+ |/ Successors: 4:d004c8f274b9
+ | multi-line: 4:d004c8f274b9
+ | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
+ o ea207398892e
+
+Predecessors template should show all the predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ o d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | @ a468dc9b3633
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | Successors: 4:d004c8f274b9
+ | multi-line: 4:d004c8f274b9
+ | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
+ | x f137d23bb3e1
+ | |
+ | x 471f378eab4c
+ |/ Successors: 3:a468dc9b3633
+ | multi-line: 3:a468dc9b3633
+ | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
+ o ea207398892e
+
+
+Predecessors template shouldn't show anything as all obsolete commit are not
+visible.
+ $ hg up 'desc(A2)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg tlog
+ @ d004c8f274b9
+ |
+ o ea207398892e
+
+ $ hg tlog --hidden
+ @ d004c8f274b9
+ | Predecessors: 3:a468dc9b3633
+ | semi-colon: 3:a468dc9b3633
+ | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
+ | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
+ | x a468dc9b3633
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | Successors: 4:d004c8f274b9
+ | multi-line: 4:d004c8f274b9
+ | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
+ | x f137d23bb3e1
+ | |
+ | x 471f378eab4c
+ |/ Successors: 3:a468dc9b3633
+ | multi-line: 3:a468dc9b3633
+ | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
+ o ea207398892e
+
+
+Test templates with splitted commit
+===================================
+
+ $ hg init $TESTTMP/templates-local-split
+ $ cd $TESTTMP/templates-local-split
+ $ mkcommit ROOT
+ $ echo 42 >> a
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding a
+ adding b
+ $ hg log --hidden -G
+ @ changeset: 1:471597cad322
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate split
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo 42 >> a
+ $ hg commit -A -m "A0"
+ adding a
+ created new head
+ $ echo 43 >> b
+ $ hg commit -A -m "A0"
+ adding b
+ $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 3:f257fde29c7a
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 2:337fec4d2edc
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ | x changeset: 1:471597cad322
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'obsolete()' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o f257fde29c7a
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ o 337fec4d2edc
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ | @ 471597cad322
+ |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
+ | multi-line: 2:337fec4d2edc 3:f257fde29c7a
+ | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
+ o ea207398892e
+
+ $ hg up f257fde29c7a
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show a predecessor as it's not displayed in
+the log
+ $ hg tlog
+ @ f257fde29c7a
+ |
+ o 337fec4d2edc
+ |
+ o ea207398892e
+
+Predecessors template should show both predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ f257fde29c7a
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ o 337fec4d2edc
+ | Predecessors: 1:471597cad322
+ | semi-colon: 1:471597cad322
+ | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
+ | map: 1:471597cad322d1f659bb169751be9133dad92ef3
+ | x 471597cad322
+ |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
+ | multi-line: 2:337fec4d2edc 3:f257fde29c7a
+ | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
+ o ea207398892e
+
+Test templates with folded commit
+=================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-fold
+ $ cd $TESTTMP/templates-local-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg log --hidden -G
+ @ changeset: 2:0dec01379d3b
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Simulate a fold
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg commit -A -m "C0"
+ adding A0
+ adding B0
+ created new head
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 3:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 2:0dec01379d3b
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/ Successors: 3:eb5a0daa2192
+ | multi-line: 3:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+ $ hg up 'desc(B0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show both predecessors as they should be both
+displayed
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 0dec01379d3b
+ | | Successors: 3:eb5a0daa2192
+ | | multi-line: 3:eb5a0daa2192
+ | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ | x 471f378eab4c
+ |/ Successors: 3:eb5a0daa2192
+ | multi-line: 3:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+ $ hg up 'desc(C0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show predecessors as they are not displayed in
+the log
+ $ hg tlog
+ @ eb5a0daa2192
+ |
+ o ea207398892e
+
+Predecessors template should show both predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 0dec01379d3b
+ | | Successors: 3:eb5a0daa2192
+ | | multi-line: 3:eb5a0daa2192
+ | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ | x 471f378eab4c
+ |/ Successors: 3:eb5a0daa2192
+ | multi-line: 3:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+
+Test templates with divergence
+==============================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-divergence
+ $ cd $TESTTMP/templates-local-divergence
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg commit --amend -m "A1"
+ $ hg log --hidden -G
+ @ changeset: 2:fdf9bde5129a
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg update --hidden 'desc(A0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit --amend -m "A2"
+ $ hg log --hidden -G
+ @ changeset: 3:65b757b745b9
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A2
+ |
+ | o changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg commit --amend -m 'A3'
+ $ hg log --hidden -G
+ @ changeset: 4:019fadeab383
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A3
+ |
+ | x changeset: 3:65b757b745b9
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | o changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o 019fadeab383
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | o fdf9bde5129a
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/ Successors: 2:fdf9bde5129a; 4:019fadeab383
+ | multi-line: 2:fdf9bde5129a
+ | multi-line: 4:019fadeab383
+ | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
+ o ea207398892e
+
+ $ hg up 'desc(A1)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should not show predecessors as they are not displayed in
+the log
+ $ hg tlog
+ o 019fadeab383
+ |
+ | @ fdf9bde5129a
+ |/
+ o ea207398892e
+
+Predecessors template should the predecessors as we force their display with
+--hidden
+ $ hg tlog --hidden
+ o 019fadeab383
+ | Predecessors: 3:65b757b745b9
+ | semi-colon: 3:65b757b745b9
+ | json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
+ | map: 3:65b757b745b935093c87a2bccd877521cccffcbd
+ | x 65b757b745b9
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | Successors: 4:019fadeab383
+ | multi-line: 4:019fadeab383
+ | json: [["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
+ | @ fdf9bde5129a
+ |/ Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/ Successors: 2:fdf9bde5129a; 3:65b757b745b9
+ | multi-line: 2:fdf9bde5129a
+ | multi-line: 3:65b757b745b9
+ | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["65b757b745b935093c87a2bccd877521cccffcbd"]]
+ o ea207398892e
+
+
+Test templates with amended + folded commit
+===========================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-amend-fold
+ $ cd $TESTTMP/templates-local-amend-fold
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg commit --amend -m "B1"
+ $ hg log --hidden -G
+ @ changeset: 3:b7ea6d14e664
+ | tag: tip
+ | parent: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B1
+ |
+ | x changeset: 2:0dec01379d3b
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: B0
+ |
+ o changeset: 1:471f378eab4c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+# Simulate a fold
+ $ hg up -r "desc(ROOT)"
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ echo "A0" > A0
+ $ echo "B0" > B0
+ $ hg commit -A -m "C0"
+ adding A0
+ adding B0
+ created new head
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
+ obsoleted 1 changesets
+
+ $ hg log --hidden -G
+ @ changeset: 4:eb5a0daa2192
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ | x changeset: 3:b7ea6d14e664
+ | | parent: 1:471f378eab4c
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B1
+ | |
+ | | x changeset: 2:0dec01379d3b
+ | |/ user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | summary: B0
+ | |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Check templates
+---------------
+
+ $ hg up 'desc(A0)' --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/ Successors: 4:eb5a0daa2192
+ | multi-line: 4:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+ $ hg up 'desc(B0)' --hidden
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should both predecessors as they are visible
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 2:0dec01379d3b 1:471f378eab4c
+ | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 0dec01379d3b
+ | | Successors: 4:eb5a0daa2192
+ | | multi-line: 4:eb5a0daa2192
+ | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ | x 471f378eab4c
+ |/ Successors: 4:eb5a0daa2192
+ | multi-line: 4:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+ $ hg up 'desc(B1)' --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should both predecessors as they are visible
+ $ hg tlog
+ o eb5a0daa2192
+ | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
+ | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
+ | @ b7ea6d14e664
+ | | Successors: 4:eb5a0daa2192
+ | | multi-line: 4:eb5a0daa2192
+ | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ | x 471f378eab4c
+ |/ Successors: 4:eb5a0daa2192
+ | multi-line: 4:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+ $ hg up 'desc(C0)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show no predecessors as they are both non visible
+ $ hg tlog
+ @ eb5a0daa2192
+ |
+ o ea207398892e
+
+Predecessors template should show all predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ eb5a0daa2192
+ | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
+ | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
+ | x b7ea6d14e664
+ | | Predecessors: 2:0dec01379d3b
+ | | semi-colon: 2:0dec01379d3b
+ | | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | | Successors: 4:eb5a0daa2192
+ | | multi-line: 4:eb5a0daa2192
+ | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ | | x 0dec01379d3b
+ | |/ Successors: 3:b7ea6d14e664
+ | | multi-line: 3:b7ea6d14e664
+ | | json: [["b7ea6d14e664bdc8922221f7992631b50da3fb07"]]
+ | x 471f378eab4c
+ |/ Successors: 4:eb5a0daa2192
+ | multi-line: 4:eb5a0daa2192
+ | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
+ o ea207398892e
+
+
+Test template with pushed and pulled obs markers
+================================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-remote-markers-1
+ $ cd $TESTTMP/templates-local-remote-markers-1
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg clone $TESTTMP/templates-local-remote-markers-1 $TESTTMP/templates-local-remote-markers-2
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd $TESTTMP/templates-local-remote-markers-2
+ $ hg log --hidden -G
+ @ changeset: 1:471f378eab4c
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ cd $TESTTMP/templates-local-remote-markers-1
+ $ hg commit --amend -m "A1"
+ $ hg commit --amend -m "A2"
+ $ hg log --hidden -G
+ @ changeset: 3:7a230b46bf61
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | x changeset: 2:fdf9bde5129a
+ |/ parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A1
+ |
+ | x changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ cd $TESTTMP/templates-local-remote-markers-2
+ $ hg pull
+ pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 1 files (+1 heads)
+ 2 new obsolescence markers
+ obsoleted 1 changesets
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg log --hidden -G
+ o changeset: 2:7a230b46bf61
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A2
+ |
+ | @ changeset: 1:471f378eab4c
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: A0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+ $ hg debugobsolete
+ 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Check templates
+---------------
+
+Predecessors template should show current revision as it is the working copy
+ $ hg tlog
+ o 7a230b46bf61
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/ Successors: 2:7a230b46bf61
+ | multi-line: 2:7a230b46bf61
+ | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
+ o ea207398892e
+
+ $ hg up 'desc(A2)'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Predecessors template should show no predecessors as they are non visible
+ $ hg tlog
+ @ 7a230b46bf61
+ |
+ o ea207398892e
+
+Predecessors template should show all predecessors as we force their display
+with --hidden
+ $ hg tlog --hidden
+ @ 7a230b46bf61
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | x 471f378eab4c
+ |/ Successors: 2:7a230b46bf61
+ | multi-line: 2:7a230b46bf61
+ | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
+ o ea207398892e
+
+
+Test template with obsmarkers cycle
+===================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-cycle
+ $ cd $TESTTMP/templates-local-cycle
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ mkcommit B0
+ $ hg up -r 0
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit C0
+ created new head
+
+Create the cycle
+
+ $ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
+
+Check templates
+---------------
+
+ $ hg tlog
+ @ f897c6137566
+ |
+ o ea207398892e
+
+
+ $ hg up -r "desc(B0)" --hidden
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ | Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | @ 0dec01379d3b
+ | | Predecessors: 1:471f378eab4c
+ | | semi-colon: 1:471f378eab4c
+ | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | | Successors: 3:f897c6137566; 1:471f378eab4c
+ | | multi-line: 3:f897c6137566
+ | | multi-line: 1:471f378eab4c
+ | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
+ | x 471f378eab4c
+ |/ Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | Successors: 2:0dec01379d3b
+ | multi-line: 2:0dec01379d3b
+ | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
+ o ea207398892e
+
+
+ $ hg up -r "desc(A0)" --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ | Predecessors: 1:471f378eab4c
+ | semi-colon: 1:471f378eab4c
+ | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | @ 471f378eab4c
+ |/
+ o ea207398892e
+
+
+ $ hg up -r "desc(ROOT)" --hidden
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg tlog
+ o f897c6137566
+ |
+ @ ea207398892e
+
+
+ $ hg tlog --hidden
+ o f897c6137566
+ | Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | x 0dec01379d3b
+ | | Predecessors: 1:471f378eab4c
+ | | semi-colon: 1:471f378eab4c
+ | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | | Successors: 3:f897c6137566; 1:471f378eab4c
+ | | multi-line: 3:f897c6137566
+ | | multi-line: 1:471f378eab4c
+ | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
+ | x 471f378eab4c
+ |/ Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | Successors: 2:0dec01379d3b
+ | multi-line: 2:0dec01379d3b
+ | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
+ @ ea207398892e
+
+Test template with split + divergence with cycles
+=================================================
+
+ $ hg log -G
+ o changeset: 3:f897c6137566
+ | tag: tip
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ @ changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg up
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Create a commit with three files
+ $ touch A B C
+ $ hg commit -A -m "Add A,B,C" A B C
+
+Split it
+ $ hg up 3
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ $ touch A
+ $ hg commit -A -m "Add A,B,C" A
+ created new head
+
+ $ touch B
+ $ hg commit -A -m "Add A,B,C" B
+
+ $ touch C
+ $ hg commit -A -m "Add A,B,C" C
+
+ $ hg log -G
+ @ changeset: 7:ba2ed02b0c9a
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 6:4a004186e638
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 5:dd800401bd8c
+ | parent: 3:f897c6137566
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ | o changeset: 4:9bd10a0775e4
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 3:f897c6137566
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+ $ hg debugobsolete `getid "4"` `getid "5"` `getid "6"` `getid "7"`
+ obsoleted 1 changesets
+ $ hg log -G
+ @ changeset: 7:ba2ed02b0c9a
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 6:4a004186e638
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 5:dd800401bd8c
+ | parent: 3:f897c6137566
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 3:f897c6137566
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+Diverge one of the splitted commit
+
+ $ hg up 6
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg commit --amend -m "Add only B"
+
+ $ hg up 6 --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg commit --amend -m "Add B only"
+
+ $ hg log -G
+ @ changeset: 9:0b997eb7ceee
+ | tag: tip
+ | parent: 5:dd800401bd8c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: Add B only
+ |
+ | o changeset: 8:b18bc8331526
+ |/ parent: 5:dd800401bd8c
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: Add only B
+ |
+ | o changeset: 7:ba2ed02b0c9a
+ | | user: test
+ | | date: Thu Jan 01 00:00:00 1970 +0000
+ | | trouble: unstable, divergent
+ | | summary: Add A,B,C
+ | |
+ | x changeset: 6:4a004186e638
+ |/ user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: Add A,B,C
+ |
+ o changeset: 5:dd800401bd8c
+ | parent: 3:f897c6137566
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | trouble: divergent
+ | summary: Add A,B,C
+ |
+ o changeset: 3:f897c6137566
+ | parent: 0:ea207398892e
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: C0
+ |
+ o changeset: 0:ea207398892e
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: ROOT
+
+
+Check templates
+---------------
+
+ $ hg tlog
+ @ 0b997eb7ceee
+ | Predecessors: 6:4a004186e638
+ | semi-colon: 6:4a004186e638
+ | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
+ | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
+ | o b18bc8331526
+ |/ Predecessors: 6:4a004186e638
+ | semi-colon: 6:4a004186e638
+ | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
+ | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
+ | o ba2ed02b0c9a
+ | |
+ | x 4a004186e638
+ |/ Successors: 8:b18bc8331526; 9:0b997eb7ceee
+ | multi-line: 8:b18bc8331526
+ | multi-line: 9:0b997eb7ceee
+ | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
+ o dd800401bd8c
+ |
+ o f897c6137566
+ |
+ o ea207398892e
+
+ $ hg tlog --hidden
+ @ 0b997eb7ceee
+ | Predecessors: 6:4a004186e638
+ | semi-colon: 6:4a004186e638
+ | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
+ | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
+ | o b18bc8331526
+ |/ Predecessors: 6:4a004186e638
+ | semi-colon: 6:4a004186e638
+ | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
+ | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
+ | o ba2ed02b0c9a
+ | | Predecessors: 4:9bd10a0775e4
+ | | semi-colon: 4:9bd10a0775e4
+ | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ | x 4a004186e638
+ |/ Predecessors: 4:9bd10a0775e4
+ | semi-colon: 4:9bd10a0775e4
+ | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ | Successors: 8:b18bc8331526; 9:0b997eb7ceee
+ | multi-line: 8:b18bc8331526
+ | multi-line: 9:0b997eb7ceee
+ | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
+ o dd800401bd8c
+ | Predecessors: 4:9bd10a0775e4
+ | semi-colon: 4:9bd10a0775e4
+ | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ | x 9bd10a0775e4
+ |/ Successors: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
+ | multi-line: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
+ | json: [["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]]
+ o f897c6137566
+ | Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | x 0dec01379d3b
+ | | Predecessors: 1:471f378eab4c
+ | | semi-colon: 1:471f378eab4c
+ | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
+ | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
+ | | Successors: 3:f897c6137566; 1:471f378eab4c
+ | | multi-line: 3:f897c6137566
+ | | multi-line: 1:471f378eab4c
+ | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
+ | x 471f378eab4c
+ |/ Predecessors: 2:0dec01379d3b
+ | semi-colon: 2:0dec01379d3b
+ | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
+ | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
+ | Successors: 2:0dec01379d3b
+ | multi-line: 2:0dec01379d3b
+ | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
+ o ea207398892e
+
+ $ hg up --hidden 4
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg rebase -r 7 -d 8 --config extensions.rebase=
+ rebasing 7:ba2ed02b0c9a "Add A,B,C"
+ $ hg tlog
+ o eceed8f98ffc
+ | Predecessors: 4:9bd10a0775e4
+ | semi-colon: 4:9bd10a0775e4
+ | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ | o 0b997eb7ceee
+ | | Predecessors: 4:9bd10a0775e4
+ | | semi-colon: 4:9bd10a0775e4
+ | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ o | b18bc8331526
+ |/ Predecessors: 4:9bd10a0775e4
+ | semi-colon: 4:9bd10a0775e4
+ | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ o dd800401bd8c
+ | Predecessors: 4:9bd10a0775e4
+ | semi-colon: 4:9bd10a0775e4
+ | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
+ | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
+ | @ 9bd10a0775e4
+ |/ Successors: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc; 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
+ | multi-line: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc
+ | multi-line: 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
+ | json: [["dd800401bd8c79d815329277739e433e883f784e", "0b997eb7ceeee06200a02f8aab185979092d514e", "eceed8f98ffc4186032e29a6542ab98888ebf68d"], ["dd800401bd8c79d815329277739e433e883f784e", "b18bc8331526a22cbb1801022bd1555bf291c48b", "eceed8f98ffc4186032e29a6542ab98888ebf68d"]]
+ o f897c6137566
+ |
+ o ea207398892e
+
+Test templates with pruned commits
+==================================
+
+Test setup
+----------
+
+ $ hg init $TESTTMP/templates-local-prune
+ $ cd $TESTTMP/templates-local-prune
+ $ mkcommit ROOT
+ $ mkcommit A0
+ $ hg debugobsolete --record-parent `getid "."`
+ obsoleted 1 changesets
+
+Check output
+------------
+
+ $ hg up "desc(A0)" --hidden
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg tlog
+ @ 471f378eab4c
+ |
+ o ea207398892e
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsolete-bundle-strip.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,1384 @@
+==================================================
+Test obsmarkers interaction with bundle and strip
+==================================================
+
+Setup a repository with various case
+====================================
+
+Config setup
+------------
+
+ $ cat >> $HGRCPATH <<EOF
+ > [ui]
+ > # simpler log output
+ > logtemplate = "{node|short}: {desc}\n"
+ >
+ > [experimental]
+ > # enable evolution
+ > evolution = all
+ >
+ > # include obsmarkers in bundle
+ > evolution.bundle-obsmarker = yes
+ >
+ > [extensions]
+ > # needed for some tests
+ > strip =
+ > [defaults]
+ > # we'll query many hidden changeset
+ > debugobsolete = --hidden
+ > EOF
+
+ $ mkcommit() {
+ > echo "$1" > "$1"
+ > hg add "$1"
+ > hg ci -m "$1"
+ > }
+
+ $ getid() {
+ > hg log --hidden --template '{node}\n' --rev "$1"
+ > }
+
+ $ mktestrepo () {
+ > [ -n "$1" ] || exit 1
+ > cd $TESTTMP
+ > hg init $1
+ > cd $1
+ > mkcommit ROOT
+ > }
+
+Function to compare the expected bundled obsmarkers with the actually bundled
+obsmarkers. It also check the obsmarkers backed up during strip.
+
+ $ testrevs () {
+ > revs="$1"
+ > testname=`basename \`pwd\``
+ > revsname=`hg --hidden log -T '-{desc}' --rev "${revs}"`
+ > prefix="${TESTTMP}/${testname}${revsname}"
+ > markersfile="${prefix}-relevant-markers.txt"
+ > exclufile="${prefix}-exclusive-markers.txt"
+ > bundlefile="${prefix}-bundle.hg"
+ > contentfile="${prefix}-bundle-markers.hg"
+ > stripcontentfile="${prefix}-bundle-markers.hg"
+ > hg debugobsolete --hidden --rev "${revs}" | sed 's/^/ /' > "${markersfile}"
+ > hg debugobsolete --hidden --rev "${revs}" --exclusive | sed 's/^/ /' > "${exclufile}"
+ > echo '### Matched revisions###'
+ > hg log --hidden --rev "${revs}" | sort
+ > echo '### Relevant markers ###'
+ > cat "${markersfile}"
+ > printf "# bundling: "
+ > hg bundle --hidden --base "parents(roots(${revs}))" --rev "${revs}" "${bundlefile}"
+ > hg debugbundle --part-type obsmarkers "${bundlefile}" | sed 1,3d > "${contentfile}"
+ > echo '### Bundled markers ###'
+ > cat "${contentfile}"
+ > echo '### diff <relevant> <bundled> ###'
+ > cmp "${markersfile}" "${contentfile}" || diff -u "${markersfile}" "${contentfile}"
+ > echo '#################################'
+ > echo '### Exclusive markers ###'
+ > cat "${exclufile}"
+ > # if the matched revs do not have children, we also check the result of strip
+ > children=`hg log --hidden --rev "((${revs})::) - (${revs})"`
+ > if [ -z "$children" ];
+ > then
+ > printf "# stripping: "
+ > prestripfile="${prefix}-pre-strip.txt"
+ > poststripfile="${prefix}-post-strip.txt"
+ > strippedfile="${prefix}-stripped-markers.txt"
+ > hg debugobsolete --hidden | sort | sed 's/^/ /' > "${prestripfile}"
+ > hg strip --hidden --rev "${revs}"
+ > hg debugobsolete --hidden | sort | sed 's/^/ /' > "${poststripfile}"
+ > hg debugbundle --part-type obsmarkers .hg/strip-backup/* | sed 1,3d > "${stripcontentfile}"
+ > echo '### Backup markers ###'
+ > cat "${stripcontentfile}"
+ > echo '### diff <relevant> <backed-up> ###'
+ > cmp "${markersfile}" "${stripcontentfile}" || diff -u "${markersfile}" "${stripcontentfile}"
+ > echo '#################################'
+ > cat "${prestripfile}" "${poststripfile}" | sort | uniq -u > "${strippedfile}"
+ > echo '### Stripped markers ###'
+ > cat "${strippedfile}"
+ > echo '### diff <exclusive> <stripped> ###'
+ > cmp "${exclufile}" "${strippedfile}" || diff -u "${exclufile}" "${strippedfile}"
+ > echo '#################################'
+ > # restore and clean up repo for the next test
+ > hg unbundle .hg/strip-backup/* | sed 's/^/# unbundling: /'
+ > # clean up directory for the next test
+ > rm .hg/strip-backup/*
+ > fi
+ > }
+
+root setup
+-------------
+
+simple chain
+============
+
+. A0
+. ⇠ø⇠◔ A1
+. |/
+. ●
+
+setup
+-----
+
+ $ mktestrepo simple-chain
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1
+ obsoleted 1 changesets
+ $ hg debugobsolete a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 `getid 'desc("C-A1")'`
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ ### Backup markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 2 new obsolescence markers
+ # unbundling: obsoleted 1 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/simple-chain/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1 cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with prune children
+=========================
+
+. ⇠⊗ B0
+. |
+. ⇠ø⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune
+ $ mkcommit 'C-A0'
+ $ mkcommit 'C-B0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
+ obsoleted 1 changesets
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 29f93b1df87b: C-B0
+ | |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+
+(The strip markers is considered exclusive to the pruned changeset even if it
+is also considered "relevant" to its parent. This allows to strip prune
+markers. This avoid leaving prune markers from dead-end that could be
+problematic)
+
+ $ testrevs 'desc("C-B0")'
+ ### Matched revisions###
+ 29f93b1df87b: C-B0
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/29f93b1df87b-7fb32101-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 1 new obsolescence markers
+ # unbundling: obsoleted 1 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+bundling multiple revisions
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 29f93b1df87b: C-B0
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 3 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune/.hg/strip-backup/cf2c22470d67-884c33b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 3 changesets with 3 changes to 3 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with precursors also pruned
+=================================
+
+. A0 (also pruned)
+. ⇠ø⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune-inline
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-A0")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ | x 84fcb0dfe17b: C-A0
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A0")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/84fcb0dfe17b-6454bbdc-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 84fcb0dfe17b: C-A0
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline/.hg/strip-backup/cf2c22470d67-fce4fc64-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+1 heads)
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+chain with missing prune
+========================
+
+. ⊗ B
+. |
+. ⇠◌⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo missing-prune
+ $ mkcommit 'C-A0'
+ $ mkcommit 'C-B0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete --record-parents `getid 'desc("C-B0")'`
+ obsoleted 1 changesets
+
+(it is annoying to create prune with parent data without the changeset, so we strip it after the fact)
+
+ $ hg strip --hidden --rev 'desc("C-A0")::' --no-backup --config devel.strip-obsmarkers=no
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/missing-prune/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 29f93b1df87baee1824e014080d8adf145f81783 0 {84fcb0dfe17b256ebae52e05572993b9194c018a} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+chain with precursors also pruned
+=================================
+
+. A0 (also pruned)
+. ⇠◌⇠◔ A1
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo prune-inline-missing
+ $ mkcommit 'C-A0'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-A1'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A0")'`
+ $ hg debugobsolete --record-parents `getid 'desc("C-A0")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid 'desc("C-A0")'` `getid 'desc("C-A1")'`
+
+(it is annoying to create prune with parent data without the changeset, so we strip it after the fact)
+
+ $ hg strip --hidden --rev 'desc("C-A0")::' --no-backup --config devel.strip-obsmarkers=no
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o cf2c22470d67: C-A1
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A1")'
+ ### Matched revisions###
+ cf2c22470d67: C-A1
+ ### Relevant markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/prune-inline-missing/.hg/strip-backup/cf2c22470d67-fa0f07b0-backup.hg (glob)
+ ### Backup markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 84fcb0dfe17b256ebae52e05572993b9194c018a 0 {ea207398892eb49e06441f10dda2a731f0450f20} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 84fcb0dfe17b256ebae52e05572993b9194c018a cf2c22470d67233004e934a31184ac2b35389914 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 84fcb0dfe17b256ebae52e05572993b9194c018a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 3 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+
+Chain with fold and split
+=========================
+
+setup
+-----
+
+ $ mktestrepo split-fold
+ $ mkcommit 'C-A'
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-B'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-C'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-D'
+ created new head
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit 'C-E'
+ created new head
+ $ hg debugobsolete a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 `getid 'desc("C-A")'`
+ $ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-B")'` `getid 'desc("C-C")'` # record split
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid 'desc("C-A")'` `getid 'desc("C-D")'` # other divergent
+ $ hg debugobsolete `getid 'desc("C-A")'` b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0
+ $ hg debugobsolete b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 `getid 'desc("C-E")'`
+ $ hg debugobsolete `getid 'desc("C-B")'` `getid 'desc("C-E")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid 'desc("C-C")'` `getid 'desc("C-E")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete `getid 'desc("C-D")'` `getid 'desc("C-E")'`
+ obsoleted 1 changesets
+ $ hg debugobsolete c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 `getid 'desc("C-E")'`
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ o 2f20ff6509f0: C-E
+ |
+ | x 06dc9da25ef0: C-D
+ |/
+ | x 27ec657ca21d: C-C
+ |/
+ | x a9b9da38ed96: C-B
+ |/
+ | x 9ac430e15fca: C-A
+ |/
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-81204eba-backup.hg (glob)
+ ### Backup markers ###
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-B")'
+ ### Matched revisions###
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-7465d6e9-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/27ec657ca21d-d5dd1c7c-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-D")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/06dc9da25ef0-9b1c0a91-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+ $ testrevs 'desc("C-E")'
+ ### Matched revisions###
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-8adeb22d-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files (+1 heads)
+ # unbundling: 6 new obsolescence markers
+ # unbundling: obsoleted 3 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+Bundle multiple revisions
+
+* each part of the split
+
+ $ testrevs 'desc("C-B") + desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-0daf625a-backup.hg (glob)
+ ### Backup markers ###
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and other divergent
+
+ $ testrevs 'desc("C-E") + desc("C-D")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-bf1b80f4-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 7 new obsolescence markers
+ # unbundling: obsoleted 2 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and initial precursors
+
+ $ testrevs 'desc("C-E") + desc("C-A")'
+ ### Matched revisions###
+ 2f20ff6509f0: C-E
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/9ac430e15fca-36b6476a-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 6 new obsolescence markers
+ # unbundling: obsoleted 3 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+* top one and one of the split
+
+ $ testrevs 'desc("C-E") + desc("C-C")'
+ ### Matched revisions###
+ 27ec657ca21d: C-C
+ 2f20ff6509f0: C-E
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/2f20ff6509f0-5fdfcd7d-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files (+2 heads)
+ # unbundling: 7 new obsolescence markers
+ # unbundling: obsoleted 2 changesets
+ # unbundling: (run 'hg heads' to see heads)
+
+* all
+
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 06dc9da25ef0: C-D
+ 27ec657ca21d: C-C
+ 2f20ff6509f0: C-E
+ 9ac430e15fca: C-A
+ a9b9da38ed96: C-B
+ ### Relevant markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 5 changesets found
+ ### Bundled markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/split-fold/.hg/strip-backup/a9b9da38ed96-eeb4258f-backup.hg (glob)
+ ### Backup markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 27ec657ca21dd27c36c99fa75586f72ff0d442f1 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 06dc9da25ef03e1ff7864dded5fcba42eff2a3f0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c a9b9da38ed96f8c6c14f429441f625a344eb4696 27ec657ca21dd27c36c99fa75586f72ff0d442f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0 9ac430e15fca923b0ba027ca85d4d75c5c9cb73c 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a9b9da38ed96f8c6c14f429441f625a344eb4696 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0 2f20ff6509f0e013e90c5c8efd996131c918b0ca 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 5 changesets with 5 changes to 5 files (+4 heads)
+ # unbundling: 9 new obsolescence markers
+ # unbundling: (run 'hg heads' to see heads)
+
+changeset pruned on its own
+===========================
+
+. ⊗ B
+. |
+. ◕ A
+. |
+. ●
+
+setup
+-----
+
+ $ mktestrepo lonely-prune
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ mkcommit 'C-A'
+ $ mkcommit 'C-B'
+ $ hg debugobsolete --record-parent `getid 'desc("C-B")'`
+ obsoleted 1 changesets
+
+ $ hg up 'desc("ROOT")'
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg log --hidden -G
+ x cefb651fc2fd: C-B
+ |
+ o 9ac430e15fca: C-A
+ |
+ @ ea207398892e: ROOT
+
+ $ hg debugobsolete
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Actual testing
+--------------
+ $ testrevs 'desc("C-A")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ $ testrevs 'desc("C-B")'
+ ### Matched revisions###
+ cefb651fc2fd: C-B
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 1 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/cefb651fc2fd-345c8dfa-backup.hg (glob)
+ ### Backup markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 1 changesets with 1 changes to 1 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
+ $ testrevs 'desc("C-")'
+ ### Matched revisions###
+ 9ac430e15fca: C-A
+ cefb651fc2fd: C-B
+ ### Relevant markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # bundling: 2 changesets found
+ ### Bundled markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <bundled> ###
+ #################################
+ ### Exclusive markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ # stripping: saved backup bundle to $TESTTMP/lonely-prune/.hg/strip-backup/9ac430e15fca-b9855b02-backup.hg (glob)
+ ### Backup markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <relevant> <backed-up> ###
+ #################################
+ ### Stripped markers ###
+ cefb651fc2fdc7bb75e588781de5e432c134e8a5 0 {9ac430e15fca923b0ba027ca85d4d75c5c9cb73c} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ### diff <exclusive> <stripped> ###
+ #################################
+ # unbundling: adding changesets
+ # unbundling: adding manifests
+ # unbundling: adding file changes
+ # unbundling: added 2 changesets with 2 changes to 2 files
+ # unbundling: 1 new obsolescence markers
+ # unbundling: (run 'hg update' to get a working copy)
--- a/tests/test-obsolete-changeset-exchange.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-obsolete-changeset-exchange.t Wed Jul 19 07:51:41 2017 -0500
@@ -34,6 +34,7 @@
o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
$ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
+ obsoleted 1 changesets
Push it. The bundle should not refer to the extinct changeset.
@@ -83,6 +84,23 @@
adding file changes
added 1 changesets with 0 changes to 1 files (+1 heads)
(run 'hg heads' to see heads)
+
+check-that bundle can contain markers:
+
+ $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
+ 1 changesets found
+ $ hg debugbundle ../f89bcc95eba5.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
+ $ hg debugbundle ../f89bcc95eba5-obs.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
+ f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
+ obsmarkers -- 'sortdict()'
+ version: 1 (70 bytes)
+ 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
$ cd ..
pull does not fetch excessive changesets when common node is hidden (issue4982)
@@ -115,6 +133,7 @@
$ echo 2b > foo
$ hg -q commit -m 2b
$ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
+ obsoleted 1 changesets
$ cd ..
client only pulls down 1 changeset
@@ -126,7 +145,7 @@
searching for changes
taking quick initial sample
query 2; still undecided: 2, sample size is: 2
- 2 total queries
+ 2 total queries in *.????s (glob)
1 changesets found
list of changesets:
bec0734cd68e84477ba7fc1d13e6cff53ab70129
@@ -144,11 +163,11 @@
adding file changes
adding foo revisions
added 1 changesets with 1 changes to 1 files (+1 heads)
- updating the branch cache
bundle2-input-part: total payload size 476
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: total payload size 58
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-bundle: 2 parts total
checking for updated bookmarks
+ updating the branch cache
(run 'hg heads' to see heads, 'hg merge' to merge)
--- a/tests/test-obsolete-checkheads.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-obsolete-checkheads.t Wed Jul 19 07:51:41 2017 -0500
@@ -47,6 +47,7 @@
$ mkcommit new
created new head
$ hg debugobsolete --flags 1 `getid old` `getid new`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 71e3228bffe1 (draft) add new
|
@@ -188,6 +189,7 @@
$ mkcommit desc2
created new head
$ hg debugobsolete `getid old` `getid new`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 5fe37041cc2b (draft) add desc2
|
@@ -296,6 +298,7 @@
$ mkcommit new-unrelated
created new head
$ hg debugobsolete `getid old`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 350a93b716be (draft) add new-unrelated
|
--- a/tests/test-obsolete-divergent.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-obsolete-divergent.t Wed Jul 19 07:51:41 2017 -0500
@@ -10,6 +10,8 @@
> logtemplate = {rev}:{node|short} {desc}\n
> [experimental]
> evolution=createmarkers
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
> [alias]
> debugobsolete = debugobsolete -d '0 0'
> [phases]
@@ -57,6 +59,7 @@
$ newcase direct
$ hg debugobsolete `getid A_0` `getid A_1`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
$ hg log -G --hidden
o 3:392fd25390da A_2
@@ -80,6 +83,23 @@
$ hg log -r 'divergent()'
2:82623d38b9ba A_1
3:392fd25390da A_2
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba
+ 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
check that mercurial refuse to push
@@ -98,10 +118,12 @@
$ newcase indirect_known
$ hg debugobsolete `getid A_0` `getid A_1`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
$ mkcommit A_3
created new head
$ hg debugobsolete `getid A_2` `getid A_3`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 4:01f36c5a8fda A_3
|
@@ -128,6 +150,25 @@
$ hg log -r 'divergent()'
2:82623d38b9ba A_1
4:01f36c5a8fda A_3
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 82623d38b9ba
+ 82623d38b9ba
+ 01f36c5a8fda
+ 01f36c5a8fda
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba
+ 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ 01f36c5a8fda
+ 01f36c5a8fda
$ cd ..
@@ -136,6 +177,7 @@
$ newcase indirect_unknown
$ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ obsoleted 1 changesets
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
$ hg debugobsolete `getid A_0` `getid A_2`
$ hg log -G --hidden
@@ -160,6 +202,23 @@
$ hg log -r 'divergent()'
2:82623d38b9ba A_1
3:392fd25390da A_2
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba
+ 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
$ cd ..
do not take unknown node in account if they are final
@@ -167,7 +226,9 @@
$ newcase final-unknown
$ hg debugobsolete `getid A_0` `getid A_1`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_1` `getid A_2`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
$ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
$ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
@@ -175,6 +236,10 @@
$ hg debugsuccessorssets --hidden 'desc('A_0')'
007dc284c1f8
392fd25390da
+ $ hg debugsuccessorssets 'desc('A_0')' --closest
+ $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
+ 007dc284c1f8
+ 82623d38b9ba
$ cd ..
@@ -183,11 +248,14 @@
$ newcase converged_divergence
$ hg debugobsolete `getid A_0` `getid A_1`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_2`
$ mkcommit A_3
created new head
$ hg debugobsolete `getid A_1` `getid A_3`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_2` `getid A_3`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 4:01f36c5a8fda A_3
|
@@ -211,6 +279,23 @@
01f36c5a8fda
01f36c5a8fda
$ hg log -r 'divergent()'
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 01f36c5a8fda
+ 01f36c5a8fda
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba
+ 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ 01f36c5a8fda
+ 01f36c5a8fda
$ cd ..
split is not divergences
@@ -218,6 +303,7 @@
$ newcase split
$ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
+ obsoleted 1 changesets
$ hg log -G --hidden
o 3:392fd25390da A_2
|
@@ -237,22 +323,41 @@
392fd25390da
392fd25390da
$ hg log -r 'divergent()'
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
Even when subsequent rewriting happen
$ mkcommit A_3
created new head
$ hg debugobsolete `getid A_1` `getid A_3`
+ obsoleted 1 changesets
$ hg up 0
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit A_4
created new head
$ hg debugobsolete `getid A_2` `getid A_4`
+ obsoleted 1 changesets
$ hg up 0
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit A_5
created new head
$ hg debugobsolete `getid A_4` `getid A_5`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 6:e442cfc57690 A_5
|
@@ -283,6 +388,28 @@
e442cfc57690
e442cfc57690
e442cfc57690
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 01f36c5a8fda
+ 01f36c5a8fda
+ e442cfc57690
+ e442cfc57690
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ 01f36c5a8fda
+ 01f36c5a8fda
+ 6a411f0d7a0a
+ e442cfc57690
+ e442cfc57690
+ e442cfc57690
$ hg log -r 'divergent()'
Check more complex obsolescence graft (with divergence)
@@ -290,6 +417,7 @@
$ mkcommit B_0; hg up 0
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ hg debugobsolete `getid B_0` `getid A_2`
+ obsoleted 1 changesets
$ mkcommit A_7; hg up 0
created new head
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -297,6 +425,7 @@
created new head
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
+ obsoleted 1 changesets
$ mkcommit A_9; hg up 0
created new head
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -352,6 +481,40 @@
14608b260df8
bed64f5d2f5a
bed64f5d2f5a
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 01f36c5a8fda
+ 01f36c5a8fda
+ 7ae126973a96
+ 7ae126973a96
+ 14608b260df8
+ 14608b260df8
+ bed64f5d2f5a
+ bed64f5d2f5a
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ 01f36c5a8fda
+ 01f36c5a8fda
+ 6a411f0d7a0a
+ e442cfc57690
+ e442cfc57690
+ e442cfc57690
+ 3750ebee865d
+ 392fd25390da
+ 7ae126973a96
+ 7ae126973a96
+ 14608b260df8
+ 14608b260df8
+ bed64f5d2f5a
+ bed64f5d2f5a
$ hg log -r 'divergent()'
4:01f36c5a8fda A_3
8:7ae126973a96 A_7
@@ -364,8 +527,11 @@
created new head
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `getid A_9` `getid A_A`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_7` `getid A_A`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_8` `getid A_A`
+ obsoleted 1 changesets
$ hg log -G --hidden
o 11:a139f71be9da A_A
|
@@ -416,6 +582,38 @@
a139f71be9da
a139f71be9da
a139f71be9da
+ $ hg debugsuccessorssets 'all()' --closest
+ d20a80d4def3
+ d20a80d4def3
+ 01f36c5a8fda
+ 01f36c5a8fda
+ a139f71be9da
+ a139f71be9da
+ $ hg debugsuccessorssets 'all()' --closest --hidden
+ d20a80d4def3
+ d20a80d4def3
+ 007dc284c1f8
+ 82623d38b9ba 392fd25390da
+ 82623d38b9ba
+ 82623d38b9ba
+ 392fd25390da
+ 392fd25390da
+ 01f36c5a8fda
+ 01f36c5a8fda
+ 6a411f0d7a0a
+ e442cfc57690
+ e442cfc57690
+ e442cfc57690
+ 3750ebee865d
+ 392fd25390da
+ 7ae126973a96
+ a139f71be9da
+ 14608b260df8
+ a139f71be9da
+ bed64f5d2f5a
+ a139f71be9da
+ a139f71be9da
+ a139f71be9da
$ hg log -r 'divergent()'
$ cd ..
@@ -429,9 +627,59 @@
$ newcase subset
$ hg debugobsolete `getid A_0` `getid A_2`
+ obsoleted 1 changesets
$ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
$ hg debugsuccessorssets --hidden 'desc('A_0')'
007dc284c1f8
82623d38b9ba 392fd25390da
+ $ hg debugsuccessorssets 'desc('A_0')' --closest
+ $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
+ 007dc284c1f8
+ 82623d38b9ba 392fd25390da
$ cd ..
+
+Use scmutil.cleanupnodes API to create divergence
+
+ $ hg init cleanupnodes
+ $ cd cleanupnodes
+ $ hg debugdrawdag <<'EOS'
+ > B1 B3 B4
+ > | \|
+ > A Z
+ > EOS
+
+ $ hg update -q B1
+ $ echo 3 >> B
+ $ hg commit --amend -m B2
+ $ cat > $TESTTMP/scmutilcleanup.py <<EOF
+ > from mercurial import registrar, scmutil
+ > cmdtable = {}
+ > command = registrar.command(cmdtable)
+ > @command('cleanup')
+ > def cleanup(ui, repo):
+ > def node(expr):
+ > unfi = repo.unfiltered()
+ > rev = unfi.revs(expr).first()
+ > return unfi.changelog.node(rev)
+ > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
+ > mapping = {node('desc(B1)'): [node('desc(B3)')],
+ > node('desc(B3)'): [node('desc(B4)')]}
+ > scmutil.cleanupnodes(repo, mapping, 'test')
+ > EOF
+
+ $ rm .hg/localtags
+ $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
+ $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
+ @ 5:1a2a9b5b0030 B2 divergent
+ |
+ | o 4:70d5a63ca112 B4 divergent
+ | |
+ | o 1:48b9aae0607f Z
+ |
+ o 0:426bada5c675 A
+
+ $ hg debugobsolete
+ a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
--- a/tests/test-obsolete-tag-cache.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-obsolete-tag-cache.t Wed Jul 19 07:51:41 2017 -0500
@@ -56,6 +56,7 @@
Hiding a non-tip changeset should change filtered hash and cause tags recompute
$ hg debugobsolete -d '0 0' c3cb30f2d2cd0aae008cc91a07876e3c5131fd22 -u dummyuser
+ obsoleted 1 changesets
$ hg tags
tip 5:2942a772f72a
@@ -77,7 +78,9 @@
Hiding another changeset should cause the filtered hash to change
$ hg debugobsolete -d '0 0' d75775ffbc6bca1794d300f5571272879bd280da -u dummyuser
+ obsoleted 1 changesets
$ hg debugobsolete -d '0 0' 5f97d42da03fd56f3b228b03dfe48af5c0adf75b -u dummyuser
+ obsoleted 1 changesets
$ hg tags
tip 5:2942a772f72a
--- a/tests/test-obsolete.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-obsolete.t Wed Jul 19 07:51:41 2017 -0500
@@ -51,6 +51,7 @@
abort: changeset references must be full hexadecimal node identifiers
[255]
$ hg debugobsolete -d '0 0' `getid kill_me` -u babar
+ obsoleted 1 changesets
$ hg debugobsolete
97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
@@ -84,6 +85,7 @@
created new head
$ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
$ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
+ obsoleted 1 changesets
$ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
2:245bde4270cd add original_c
$ hg debugrevlog -cd
@@ -107,6 +109,7 @@
$ mkcommit new_2_c
created new head
$ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
+ obsoleted 1 changesets
$ hg debugobsolete
245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
@@ -118,6 +121,7 @@
$ mkcommit new_3_c
created new head
$ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
+ obsoleted 1 changesets
$ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
$ hg debugobsolete
245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
@@ -256,6 +260,7 @@
$ hg ci -m 'add n3w_3_c'
created new head
$ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
+ obsoleted 1 changesets
$ hg log -r 'bumped()'
$ hg log -G
@ 6:6f9641995072 (draft) [tip ] add n3w_3_c
@@ -267,6 +272,42 @@
o 0:1f0dee641bb7 (public) [ ] add a
+Basic exclusive testing
+
+ $ hg log -G --hidden
+ @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
+ |
+ | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
+ |/
+ | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
+ |/
+ | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
+ |/
+ | o 2:245bde4270cd (public) [ ] add original_c
+ |/
+ o 1:7c3bad9141dc (public) [ ] add b
+ |
+ o 0:1f0dee641bb7 (public) [ ] add a
+
+ $ hg debugobsolete --rev 6f9641995072
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 6f9641995072 --exclusive
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 5601fb93a350 --hidden
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
+ $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
+ $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
+ 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
+ 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+ ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
+
$ cd ..
Revision 0 is hidden
@@ -278,6 +319,7 @@
$ mkcommit kill0
$ hg up -q null
$ hg debugobsolete `getid kill0`
+ obsoleted 1 changesets
$ mkcommit a
$ mkcommit b
@@ -472,6 +514,7 @@
$ mkcommit original_d
$ mkcommit original_e
$ hg debugobsolete --record-parents `getid original_d` -d '0 0'
+ obsoleted 1 changesets
$ hg debugobsolete | grep `getid original_d`
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
$ hg log -r 'obsolete()'
@@ -563,7 +606,9 @@
$ hg up -q 'desc(n3w_3_c)'
$ mkcommit obsolete_e
created new head
- $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
+ $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
+ > -u 'test <test@example.net>'
+ obsoleted 1 changesets
$ hg outgoing ../tmpf # parasite hg outgoing testin
comparing with ../tmpf
searching for changes
@@ -576,6 +621,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
test relevance computation
---------------------------------------
@@ -611,7 +657,7 @@
ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
List of changesets with no chain
@@ -620,7 +666,7 @@
List of changesets that are included on marker chain
$ hg debugobsolete --hidden --rev 6
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
List of changesets with a longer chain, (including a pruned children)
@@ -642,7 +688,7 @@
5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
- cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
+ cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
List of all markers in JSON
@@ -702,7 +748,7 @@
{
"date": *, (glob)
"flag": 0,
- "metadata": {"user": "test"},
+ "metadata": {"user": "test <test@example.net>"},
"precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
"succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
}
@@ -713,11 +759,11 @@
$ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
3de5eca88c00 ????-??-?? (glob)
$ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
- user=test
+ user=test <test@example.net>
$ hg debugobsolete -r6 -T '{metadata}\n'
- 'user': 'test'
+ 'user': 'test <test@example.net>'
$ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
- 0 test
+ 0 test <test@example.net>
Test the debug output for exchange
----------------------------------
@@ -741,6 +787,66 @@
> do
> hg debugobsolete $node
> done
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
+ obsoleted 1 changesets
$ hg up tip
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -802,6 +908,7 @@
Several troubles on the same changeset (create an unstable and bumped changeset)
$ hg debugobsolete `getid obsolete_e`
+ obsoleted 1 changesets
$ hg debugobsolete `getid original_c` `getid babar`
$ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
changeset: 7:50c51b361e60
@@ -1046,6 +1153,7 @@
grafting 1:1c9eddb02162 "content-1" (tip)
$ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
+ obsoleted 1 changesets
$ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
$ cat hg.pid >> $DAEMON_PIDS
@@ -1065,12 +1173,12 @@
Test heads computation on pending index changes with obsolescence markers
$ cd ..
$ cat >$TESTTMP/test_extension.py << EOF
- > from mercurial import cmdutil
+ > from mercurial import cmdutil, registrar
> from mercurial.i18n import _
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command("amendtransient",[], _('hg amendtransient [rev]'))
+ > command = registrar.command(cmdtable)
+ > @command(b"amendtransient",[], _('hg amendtransient [rev]'))
> def amend(ui, repo, *pats, **opts):
> def commitfunc(ui, repo, message, match, opts):
> return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
@@ -1093,25 +1201,6 @@
$ hg amendtransient
[1, 3]
-Check that corrupted hidden cache does not crash
-
- $ printf "" > .hg/cache/hidden
- $ hg log -r . -T '{node}' --debug
- corrupted hidden cache
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
- $ hg log -r . -T '{node}' --debug
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
-
-#if unix-permissions
-Check that wrong hidden cache permission does not crash
-
- $ chmod 000 .hg/cache/hidden
- $ hg log -r . -T '{node}' --debug
- cannot read hidden cache
- error writing hidden changesets cache
- 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
-#endif
-
Test cache consistency for the visible filter
1) We want to make sure that the cached filtered revs are invalidated when
bookmarks change
@@ -1130,7 +1219,7 @@
> bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
> orig(bkmstoreinst, *args, **kwargs)
> def extsetup(ui):
- > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
+ > extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
> _bookmarkchanged)
> EOF
@@ -1194,24 +1283,92 @@
o 0:a78f55e5508c (draft) [ ] 0
+ $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-39c978dc-backup.hg (glob)
+ $ hg debugobsolete
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ $ hg log -G
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg log -G --hidden
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8
+ f27abbcc1f77fb409cf9160482fe619541e2d605
+ obsmarkers -- 'sortdict()'
+ version: 1 (70 bytes)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ phase-heads -- 'sortdict()'
+ f27abbcc1f77fb409cf9160482fe619541e2d605 draft
+
+ $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
+ pulling from .hg/strip-backup/e008cf283490-39c978dc-backup.hg
+ searching for changes
+ no changes found
+ $ hg debugobsolete
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
+ $ hg log -G
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+ $ hg log -G --hidden
+ @ 2:b0551702f918 (draft) [tip ] 2
+ |
+ o 1:e016b03fd86f (draft) [ ] 1
+ |
+ o 0:a78f55e5508c (draft) [ ] 0
+
+
+Testing that strip remove markers:
+
$ hg strip -r 1 --config extensions.strip=
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
- saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
+ saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg (glob)
+ $ hg debugobsolete
$ hg log -G
@ 0:a78f55e5508c (draft) [tip ] 0
$ hg log -G --hidden
@ 0:a78f55e5508c (draft) [tip ] 0
+ $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+ e016b03fd86fcccc54817d120b90b751aaf367d6
+ b0551702f918510f01ae838ab03a463054c67b46
+ obsmarkers -- 'sortdict()'
+ version: 1 (139 bytes)
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ phase-heads -- 'sortdict()'
+ b0551702f918510f01ae838ab03a463054c67b46 draft
- $ hg pull .hg/strip-backup/*
- pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
- searching for changes
+ $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
adding changesets
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ 2 new obsolescence markers
(run 'hg update' to get a working copy)
+ $ hg debugobsolete | sort
+ e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (*) {'user': 'test'} (glob)
+ f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (*) {'user': 'test'} (glob)
$ hg log -G
o 2:b0551702f918 (draft) [tip ] 2
|
@@ -1245,14 +1402,14 @@
$ echo d > d
$ hg ci -Am d
adding d
- $ hg ci --amend -m dd
+ $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
$ hg debugobsolete --index --rev "3+7"
1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re)
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
$ hg debugobsolete --index --rev "3+7" -Tjson
[
{
- "date": *, (glob)
+ "date": [0.0, 0],
"flag": 0,
"index": 1,
"metadata": {"user": "test"},
@@ -1260,10 +1417,10 @@
"succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
},
{
- "date": *, (glob)
+ "date": [0.0, 0],
"flag": 0,
"index": 3,
- "metadata": {"user": "test"},
+ "metadata": {"operation": "amend", "user": "test"},
"precnode": "4715cf767440ed891755448016c2b8cf70760c30",
"succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
}
@@ -1271,14 +1428,45 @@
Test the --delete option of debugobsolete command
$ hg debugobsolete --index
- 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
- 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
- 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
- 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'user': 'test'} (re)
+ 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
$ hg debugobsolete --delete 1 --delete 3
deleted 2 obsolescence markers
$ hg debugobsolete
- cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
- 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
+ cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+
+Test adding changeset after obsmarkers affecting it
+(eg: during pull, or unbundle)
+
+ $ mkcommit e
+ $ hg bundle -r . --base .~1 ../bundle-2.hg
+ 1 changesets found
+ $ getid .
+ $ hg --config extensions.strip= strip -r .
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
+ $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
+ $ hg unbundle ../bundle-2.hg
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg log -G
+ @ 7:7ae79c5d60f0 (draft) [tip ] dd
+ |
+ | o 6:4715cf767440 (draft) [ ] d
+ |/
+ o 5:29346082e4a9 (draft) [ ] cc
+ |
+ o 3:d27fb9b06607 (draft) [ ] bb
+ |
+ | o 2:6fdef60fcbab (draft) [ ] b
+ |/
+ o 1:f9bd49731b0b (draft) [ ] aa
+
+
$ cd ..
-
--- a/tests/test-oldcgi.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-oldcgi.t Wed Jul 19 07:51:41 2017 -0500
@@ -4,7 +4,7 @@
$ hg init test
$ cat >hgweb.cgi <<HGWEB
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to use hgweb, edit as necessary
>
@@ -26,7 +26,7 @@
> HGWEBDIRCONF
$ cat >hgwebdir.cgi <<HGWEBDIR
- > #!/usr/bin/env python
+ > #!$PYTHON
> #
> # An example CGI script to export multiple hgweb repos, edit as necessary
>
@@ -62,15 +62,15 @@
$ chmod 755 hgwebdir.cgi
$ . "$TESTDIR/cgienv"
- $ python hgweb.cgi > page1
- $ python hgwebdir.cgi > page2
+ $ $PYTHON hgweb.cgi > page1
+ $ $PYTHON hgwebdir.cgi > page2
$ PATH_INFO="/test/"
$ PATH_TRANSLATED="/var/something/test.cgi"
$ REQUEST_URI="/test/test/"
$ SCRIPT_URI="http://hg.omnifarious.org/test/test/"
$ SCRIPT_URL="/test/test/"
- $ python hgwebdir.cgi > page3
+ $ $PYTHON hgwebdir.cgi > page3
$ grep -i error page1 page2 page3
[1]
--- a/tests/test-pager-legacy.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-pager-legacy.t Wed Jul 19 07:51:41 2017 -0500
@@ -14,7 +14,7 @@
> [extensions]
> pager=
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ hg init repo
@@ -22,7 +22,7 @@
$ echo a >> a
$ hg add a
$ hg ci -m 'add a'
- $ for x in `python $TESTDIR/seq.py 1 10`; do
+ $ for x in `$PYTHON $TESTDIR/seq.py 1 10`; do
> echo a $x >> a
> hg ci -m "modify a $x"
> done
@@ -214,10 +214,10 @@
Pager should not override the exit code of other commands
$ cat >> $TESTTMP/fortytwo.py <<'EOF'
- > from mercurial import cmdutil, commands
+ > from mercurial import registrar, commands
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('fortytwo', [], 'fortytwo', norepo=True)
+ > command = registrar.command(cmdtable)
+ > @command(b'fortytwo', [], 'fortytwo', norepo=True)
> def fortytwo(ui, *opts):
> ui.write('42\n')
> return 42
--- a/tests/test-pager.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-pager.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,7 +1,11 @@
$ cat >> fakepager.py <<EOF
> import sys
+ > printed = False
> for line in sys.stdin:
> sys.stdout.write('paged! %r\n' % line)
+ > printed = True
+ > if not printed:
+ > sys.stdout.write('paged empty output!\n')
> EOF
Enable ui.formatted because pager won't fire without it, and set up
@@ -12,7 +16,7 @@
> formatted = yes
> color = no
> [pager]
- > pager = python $TESTTMP/fakepager.py
+ > pager = $PYTHON $TESTTMP/fakepager.py
> EOF
$ hg init repo
@@ -20,7 +24,7 @@
$ echo a >> a
$ hg add a
$ hg ci -m 'add a'
- $ for x in `python $TESTDIR/seq.py 1 10`; do
+ $ for x in `$PYTHON $TESTDIR/seq.py 1 10`; do
> echo a $x >> a
> hg ci -m "modify a $x"
> done
@@ -223,10 +227,10 @@
Pager should not override the exit code of other commands
$ cat >> $TESTTMP/fortytwo.py <<'EOF'
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('fortytwo', [], 'fortytwo', norepo=True)
+ > command = registrar.command(cmdtable)
+ > @command(b'fortytwo', [], 'fortytwo', norepo=True)
> def fortytwo(ui, *opts):
> ui.write('42\n')
> return 42
@@ -281,6 +285,15 @@
9: a 9
10: a 10
+A command with --output option:
+
+ $ hg cat -r0 a
+ paged! 'a\n'
+ $ hg cat -r0 a --output=-
+ paged! 'a\n'
+ $ hg cat -r0 a --output=out
+ $ rm out
+
Put annotate in the ignore list for pager:
$ cat >> $HGRCPATH <<EOF
> [pager]
--- a/tests/test-parse-date.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-parse-date.t Wed Jul 19 07:51:41 2017 -0500
@@ -17,13 +17,13 @@
$ hg ci -d "1150000000 14400" -m "rev 4 (merge)"
$ echo "fail" >> a
$ hg ci -d "should fail" -m "fail"
- abort: invalid date: 'should fail'
+ hg: parse error: invalid date: 'should fail'
[255]
$ hg ci -d "100000000000000000 1400" -m "fail"
- abort: date exceeds 32 bits: 100000000000000000
+ hg: parse error: date exceeds 32 bits: 100000000000000000
[255]
$ hg ci -d "100000 1400000" -m "fail"
- abort: impossible time zone offset: 1400000
+ hg: parse error: impossible time zone offset: 1400000
[255]
Check with local timezone other than GMT and with DST
--- a/tests/test-parseindex.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-parseindex.t Wed Jul 19 07:51:41 2017 -0500
@@ -53,7 +53,7 @@
> for r in cl:
> print short(cl.node(r))
> EOF
- $ python test.py
+ $ $PYTHON test.py
2 revisions:
7c31755bf9b5
26333235a41c
@@ -66,7 +66,7 @@
$ cd a
- $ python <<EOF
+ $ $PYTHON <<EOF
> from mercurial import changelog, vfs
> cl = changelog.changelog(vfs.vfs('.hg/store'))
> print 'good heads:'
@@ -128,7 +128,7 @@
$ hg clone --pull -q --config phases.publish=False ../a segv
$ rm -R limit/.hg/cache segv/.hg/cache
- $ python <<EOF
+ $ $PYTHON <<EOF
> data = open("limit/.hg/store/00changelog.i", "rb").read()
> for n, p in [('limit', '\0\0\0\x02'), ('segv', '\0\x01\0\0')]:
> # corrupt p1 at rev0 and p2 at rev1
@@ -167,13 +167,13 @@
> print inst
> EOF
- $ python test.py limit/.hg/store
+ $ $PYTHON test.py limit/.hg/store
reachableroots: parent out of range
compute_phases_map_sets: parent out of range
index_headrevs: parent out of range
find_gca_candidates: parent out of range
find_deepest: parent out of range
- $ python test.py segv/.hg/store
+ $ $PYTHON test.py segv/.hg/store
reachableroots: parent out of range
compute_phases_map_sets: parent out of range
index_headrevs: parent out of range
--- a/tests/test-parseindex2.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-parseindex2.py Wed Jul 19 07:51:41 2017 -0500
@@ -14,9 +14,11 @@
nullrev,
)
from mercurial import (
- parsers,
+ policy,
)
+parsers = policy.importmod(r'parsers')
+
# original python implementation
def gettype(q):
return int(q & 0xFFFF)
@@ -114,7 +116,7 @@
# of the currently-running Python interpreter, so we monkey-patch
# sys.hexversion to simulate using different versions.
code = ("import sys; sys.hexversion=%s; "
- "import mercurial.parsers" % hexversion)
+ "import mercurial.cext.parsers" % hexversion)
cmd = "python -c \"%s\"" % code
# We need to do these tests inside a subprocess because parser.c's
# version-checking code happens inside the module init function, and
--- a/tests/test-patch-offset.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-patch-offset.t Wed Jul 19 07:51:41 2017 -0500
@@ -23,7 +23,7 @@
within this file. If the offset isn't tracked then the hunks can be
applied to the wrong lines of this file.
- $ python ../writepatterns.py a 34X 10A 1B 10A 1C 10A 1B 10A 1D 10A 1B 10A 1E 10A 1B 10A
+ $ $PYTHON ../writepatterns.py a 34X 10A 1B 10A 1C 10A 1B 10A 1D 10A 1B 10A 1E 10A 1B 10A
$ hg commit -Am adda
adding a
@@ -76,7 +76,7 @@
compare imported changes against reference file
- $ python ../writepatterns.py aref 34X 10A 1B 1a 9A 1C 10A 1B 10A 1D 10A 1B 1a 9A 1E 10A 1B 1a 9A
+ $ $PYTHON ../writepatterns.py aref 34X 10A 1B 1a 9A 1C 10A 1B 10A 1D 10A 1B 1a 9A 1E 10A 1B 1a 9A
$ diff aref a
$ cd ..
--- a/tests/test-patch.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-patch.t Wed Jul 19 07:51:41 2017 -0500
@@ -6,7 +6,7 @@
> EOF
$ echo "[ui]" >> $HGRCPATH
- $ echo "patch=python ../patchtool.py" >> $HGRCPATH
+ $ echo "patch=$PYTHON ../patchtool.py" >> $HGRCPATH
$ hg init a
$ cd a
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-patchbomb-bookmark.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,168 @@
+Create @ bookmark as main reference
+
+ $ hg init repo
+ $ cd repo
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "patchbomb=" >> $HGRCPATH
+ $ hg book @
+
+Create a dummy revision that must never be exported
+
+ $ echo no > no
+ $ hg ci -Amno -d '6 0'
+ adding no
+
+Create a feature and use -B
+
+ $ hg book booktest
+ $ echo first > a
+ $ hg ci -Amfirst -d '7 0'
+ adding a
+ $ echo second > b
+ $ hg ci -Amsecond -d '8 0'
+ adding b
+ $ hg email --date '1981-1-1 0:1' -n -t foo -s bookmark -B booktest
+ From [test]: test
+ this patch series consists of 2 patches.
+
+
+ Write the introductory message for the patch series.
+
+ Cc:
+
+ displaying [PATCH 0 of 2] bookmark ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 0 of 2] bookmark
+ Message-Id: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:00 +0000
+ From: test
+ To: foo
+
+
+ displaying [PATCH 1 of 2] first ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 1 of 2] first
+ X-Mercurial-Node: accde9b8b6dce861c185d0825c1affc09a79cb26
+ X-Mercurial-Series-Index: 1
+ X-Mercurial-Series-Total: 2
+ Message-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ X-Mercurial-Series-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ In-Reply-To: <patchbomb.347155260@*> (glob)
+ References: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:01 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 7 0
+ # Thu Jan 01 00:00:07 1970 +0000
+ # Node ID accde9b8b6dce861c185d0825c1affc09a79cb26
+ # Parent 043bd3889e5aaf7d88fe3713cf425f782ad2fb71
+ first
+
+ diff -r 043bd3889e5a -r accde9b8b6dc a
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/a Thu Jan 01 00:00:07 1970 +0000
+ @@ -0,0 +1,1 @@
+ +first
+
+ displaying [PATCH 2 of 2] second ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH 2 of 2] second
+ X-Mercurial-Node: 417defd1559c396ba06a44dce8dc1c2d2d653f3f
+ X-Mercurial-Series-Index: 2
+ X-Mercurial-Series-Total: 2
+ Message-Id: <417defd1559c396ba06a.347155262@*> (glob)
+ X-Mercurial-Series-Id: <accde9b8b6dce861c185.347155261@*> (glob)
+ In-Reply-To: <patchbomb.347155260@*> (glob)
+ References: <patchbomb.347155260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Thu, 01 Jan 1981 00:01:02 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 8 0
+ # Thu Jan 01 00:00:08 1970 +0000
+ # Node ID 417defd1559c396ba06a44dce8dc1c2d2d653f3f
+ # Parent accde9b8b6dce861c185d0825c1affc09a79cb26
+ second
+
+ diff -r accde9b8b6dc -r 417defd1559c b
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/b Thu Jan 01 00:00:08 1970 +0000
+ @@ -0,0 +1,1 @@
+ +second
+
+Do the same and combine with -o only one must be exported
+
+ $ cd ..
+ $ hg clone repo repo2
+ updating to bookmark @
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd repo
+ $ hg up @
+ 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ (activating bookmark @)
+ $ hg book outgoing
+ $ echo 1 > x
+ $ hg ci -Am1 -d '8 0'
+ adding x
+ created new head
+ $ hg push ../repo2 -B outgoing
+ pushing to ../repo2
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ exporting bookmark outgoing
+ $ echo 2 > y
+ $ hg ci -Am2 -d '9 0'
+ adding y
+ $ hg email --date '1982-1-1 0:1' -n -t foo -s bookmark -B outgoing -o ../repo2
+ comparing with ../repo2
+ From [test]: test
+ this patch series consists of 1 patches.
+
+ Cc:
+
+ displaying [PATCH] bookmark ...
+ Content-Type: text/plain; charset="us-ascii"
+ MIME-Version: 1.0
+ Content-Transfer-Encoding: 7bit
+ Subject: [PATCH] bookmark
+ X-Mercurial-Node: 8dab2639fd35f1e337ad866c372a5c44f1064e3c
+ X-Mercurial-Series-Index: 1
+ X-Mercurial-Series-Total: 1
+ Message-Id: <8dab2639fd35f1e337ad.378691260@*> (glob)
+ X-Mercurial-Series-Id: <8dab2639fd35f1e337ad.378691260@*> (glob)
+ User-Agent: Mercurial-patchbomb/* (glob)
+ Date: Fri, 01 Jan 1982 00:01:00 +0000
+ From: test
+ To: foo
+
+ # HG changeset patch
+ # User test
+ # Date 9 0
+ # Thu Jan 01 00:00:09 1970 +0000
+ # Node ID 8dab2639fd35f1e337ad866c372a5c44f1064e3c
+ # Parent 0b24b8316483bf30bfc3e4d4168e922b169dbe66
+ 2
+
+ diff -r 0b24b8316483 -r 8dab2639fd35 y
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/y Thu Jan 01 00:00:09 1970 +0000
+ @@ -0,0 +1,1 @@
+ +2
+
--- a/tests/test-patchbomb-tls.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-patchbomb-tls.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
$ CERTSDIR="$TESTDIR/sslcerts"
$ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
- $ python "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
+ $ $PYTHON "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
> --tls smtps --certificate `pwd`/server.pem
listening at localhost:$HGPORT (?)
$ cat a.pid >> $DAEMON_PIDS
@@ -67,6 +67,7 @@
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
(using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
(?i)abort: .*?certificate.verify.failed.* (re)
[255]
@@ -118,6 +119,7 @@
warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+ (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
(?i)abort: .*?certificate.verify.failed.* (re)
[255]
--- a/tests/test-patchbomb.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-patchbomb.t Wed Jul 19 07:51:41 2017 -0500
@@ -22,7 +22,7 @@
> skipblank = False
> print l,
> EOF
- $ FILTERBOUNDARY="python `pwd`/prune-blank-after-boundary.py"
+ $ FILTERBOUNDARY="$PYTHON `pwd`/prune-blank-after-boundary.py"
$ echo "[format]" >> $HGRCPATH
$ echo "usegeneraldelta=yes" >> $HGRCPATH
$ echo "[extensions]" >> $HGRCPATH
--- a/tests/test-paths.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-paths.t Wed Jul 19 07:51:41 2017 -0500
@@ -84,7 +84,46 @@
]
[1]
-password should be masked in plain output, but not in machine-readable output:
+log template:
+
+ (behaves as a {name: path-string} dict by default)
+
+ $ hg log -rnull -T '{peerpaths}\n'
+ dupe=$TESTTMP/b#tip expand=$TESTTMP/a/$SOMETHING/bar (glob)
+ $ hg log -rnull -T '{join(peerpaths, "\n")}\n'
+ dupe=$TESTTMP/b#tip (glob)
+ expand=$TESTTMP/a/$SOMETHING/bar (glob)
+ $ hg log -rnull -T '{peerpaths % "{name}: {path}\n"}'
+ dupe: $TESTTMP/a/$SOMETHING/bar (glob)
+ expand: $TESTTMP/a/$SOMETHING/bar (glob)
+ $ hg log -rnull -T '{get(peerpaths, "dupe")}\n'
+ $TESTTMP/a/$SOMETHING/bar (glob)
+
+ (but a path is actually a dict of url and sub-options)
+
+ $ hg log -rnull -T '{join(get(peerpaths, "dupe"), "\n")}\n'
+ url=$TESTTMP/b#tip (glob)
+ pushurl=https://example.com/dupe
+ $ hg log -rnull -T '{get(peerpaths, "dupe") % "{key}: {value}\n"}'
+ url: $TESTTMP/b#tip (glob)
+ pushurl: https://example.com/dupe
+ $ hg log -rnull -T '{get(get(peerpaths, "dupe"), "pushurl")}\n'
+ https://example.com/dupe
+
+ (so there's weird behavior)
+
+ $ hg log -rnull -T '{get(peerpaths, "dupe")|count}\n'
+ 2
+ $ hg log -rnull -T '{get(peerpaths, "dupe")|stringify|count}\n'
+ [0-9]{2,} (re)
+
+ (in JSON, it's a dict of dicts)
+
+ $ hg log -rnull -T '{peerpaths|json}\n' | sed 's|\\\\|/|g'
+ {"dupe": {"pushurl": "https://example.com/dupe", "url": "$TESTTMP/b#tip"}, "expand": {"url": "$TESTTMP/a/$SOMETHING/bar"}}
+
+password should be masked in plain output, but not in machine-readable/template
+output:
$ echo 'insecure = http://foo:insecure@example.com/' >> .hg/hgrc
$ hg paths insecure
@@ -96,6 +135,8 @@
"url": "http://foo:insecure@example.com/"
}
]
+ $ hg log -rnull -T '{get(peerpaths, "insecure")}\n'
+ http://foo:insecure@example.com/
zeroconf wraps ui.configitems(), which shouldn't crash at least:
--- a/tests/test-permissions.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-permissions.t Wed Jul 19 07:51:41 2017 -0500
@@ -61,10 +61,17 @@
M a
? dir/a
$ chmod -rx dir
+
+#if no-fsmonitor
+
+(fsmonitor makes "hg status" avoid accessing to "dir")
+
$ hg status
dir: Permission denied
M a
+#endif
+
Reenable perm to allow deletion:
$ chmod +rx dir
--- a/tests/test-phases-exchange.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-phases-exchange.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,5 +1,10 @@
#require killdaemons
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > phasereport=$TESTDIR/testlib/ext-phase-report.py
+ > EOF
+
$ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; }
$ mkcommit() {
@@ -13,9 +18,13 @@
$ hg init alpha
$ cd alpha
$ mkcommit a-A
+ test-debug-phase: new rev 0: x -> 1
$ mkcommit a-B
+ test-debug-phase: new rev 1: x -> 1
$ mkcommit a-C
+ test-debug-phase: new rev 2: x -> 1
$ mkcommit a-D
+ test-debug-phase: new rev 3: x -> 1
$ hgph
@ 3 draft a-D - b555f63b6063
|
@@ -34,6 +43,10 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: move rev 0: 1 -> 0
+ test-debug-phase: move rev 1: 1 -> 0
$ hgph
@ 3 draft a-D - b555f63b6063
|
@@ -52,6 +65,7 @@
$ hg up -q
$ mkcommit b-A
+ test-debug-phase: new rev 2: x -> 1
$ hgph
@ 2 draft b-A - f54f1bb90ff3
|
@@ -66,6 +80,8 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hgph
o 4 public a-D - b555f63b6063
@@ -96,6 +112,7 @@
pushing to ../beta
searching for changes
no changes found
+ test-debug-phase: move rev 2: 1 -> 0
[1]
$ hgph
@ 3 draft a-D - b555f63b6063
@@ -110,6 +127,7 @@
pushing to ../beta
searching for changes
no changes found
+ test-debug-phase: move rev 3: 1 -> 0
[1]
$ hgph
@ 3 public a-D - b555f63b6063
@@ -130,6 +148,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: new rev 4: x -> 0
(run 'hg heads' to see heads, 'hg merge' to merge)
$ cd ../beta
@@ -148,6 +167,7 @@
pulling from ../alpha
searching for changes
no changes found
+ test-debug-phase: move rev 2: 1 -> 0
$ hgph
o 4 public a-D - b555f63b6063
|
@@ -182,6 +202,11 @@
adding manifests
adding file changes
added 5 changesets with 5 changes to 5 files (+1 heads)
+ test-debug-phase: new rev 0: x -> 1
+ test-debug-phase: new rev 1: x -> 1
+ test-debug-phase: new rev 2: x -> 1
+ test-debug-phase: new rev 3: x -> 1
+ test-debug-phase: new rev 4: x -> 1
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hgph
o 4 draft a-D - b555f63b6063
@@ -210,6 +235,9 @@
adding manifests
adding file changes
added 3 changesets with 3 changes to 3 files
+ test-debug-phase: new rev 0: x -> 1
+ test-debug-phase: new rev 1: x -> 1
+ test-debug-phase: new rev 2: x -> 1
(run 'hg update' to get a working copy)
$ hgph
o 2 draft a-C - 54acac6f23ab
@@ -228,6 +256,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: new rev 3: x -> 1
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hgph
o 3 draft b-A - f54f1bb90ff3
@@ -250,6 +279,10 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ test-debug-phase: move rev 0: 1 -> 0
+ test-debug-phase: move rev 1: 1 -> 0
+ test-debug-phase: move rev 2: 1 -> 0
+ test-debug-phase: new rev 4: x -> 0
(run 'hg update' to get a working copy)
$ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
o 4 public a-D - b555f63b6063
@@ -267,7 +300,9 @@
$ hg up -q f54f1bb90ff3
$ mkcommit n-A
+ test-debug-phase: new rev 5: x -> 1
$ mkcommit n-B
+ test-debug-phase: new rev 6: x -> 1
$ hgph
@ 6 draft n-B - 145e75495359
|
@@ -291,6 +326,12 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: move rev 0: 1 -> 0
+ test-debug-phase: move rev 1: 1 -> 0
+ test-debug-phase: move rev 3: 1 -> 0
+ test-debug-phase: move rev 4: 1 -> 0
+ test-debug-phase: new rev 5: x -> 1
+ test-debug-phase: new rev 6: x -> 1
(run 'hg update' to get a working copy)
$ hgph
o 6 draft n-B - 145e75495359
@@ -330,6 +371,8 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: new rev 5: x -> 1
+ test-debug-phase: new rev 6: x -> 1
(run 'hg update' to get a working copy)
$ hgph
o 6 draft n-B - 145e75495359
@@ -355,6 +398,9 @@
pulling from ../alpha
searching for changes
no changes found
+ test-debug-phase: move rev 3: 1 -> 0
+ test-debug-phase: move rev 5: 1 -> 0
+ test-debug-phase: move rev 6: 1 -> 0
$ hgph
@ 6 public n-B - 145e75495359
|
@@ -385,6 +431,8 @@
pushing to ../alpha
searching for changes
no changes found
+ test-debug-phase: move rev 5: 1 -> 0
+ test-debug-phase: move rev 6: 1 -> 0
[1]
$ cd ..
$ cd alpha
@@ -448,10 +496,14 @@
summary: a-A
$ mkcommit a-E
+ test-debug-phase: new rev 7: x -> 1
$ mkcommit a-F
+ test-debug-phase: new rev 8: x -> 1
$ mkcommit a-G
+ test-debug-phase: new rev 9: x -> 1
$ hg up d6bcb4f74035 -q
$ mkcommit a-H
+ test-debug-phase: new rev 10: x -> 1
created new head
$ hgph
@ 10 draft a-H - 967b449fbc94
@@ -518,6 +570,8 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: new rev 7: x -> 1
+ test-debug-phase: new rev 8: x -> 1
$ hgph
@ 10 draft a-H - 967b449fbc94
|
@@ -573,6 +627,10 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: new rev 5: x -> 0
+ test-debug-phase: new rev 6: x -> 0
+ test-debug-phase: move rev 7: 1 -> 0
+ test-debug-phase: move rev 8: 1 -> 0
$ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
> # not ancestor of -r
o 8 public a-F - b740e3e5c05d
@@ -601,6 +659,8 @@
pushing to ../alpha
searching for changes
no changes found
+ test-debug-phase: move rev 7: 1 -> 0
+ test-debug-phase: move rev 8: 1 -> 0
[1]
$ hgph
o 6 public a-F - b740e3e5c05d
@@ -651,6 +711,9 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: move rev 2: 1 -> 0
+ test-debug-phase: move rev 5: 1 -> 0
+ test-debug-phase: new rev 9: x -> 1
$ hgph
@ 10 draft a-H - 967b449fbc94
|
@@ -706,6 +769,9 @@
pushing to ../alpha
searching for changes
no changes found
+ test-debug-phase: move rev 10: 1 -> 0
+ test-debug-phase: move rev 6: 1 -> 0
+ test-debug-phase: move rev 9: 1 -> 0
[1]
$ hgph
o 9 public a-H - 967b449fbc94
@@ -760,6 +826,8 @@
$ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg phase --force --draft b740e3e5c05d 967b449fbc94
+ test-debug-phase: move rev 8: 0 -> 1
+ test-debug-phase: move rev 9: 0 -> 1
$ hg push -fv ../alpha
pushing to ../alpha
searching for changes
@@ -772,6 +840,9 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: new rev 10: x -> 0
+ test-debug-phase: move rev 8: 1 -> 0
+ test-debug-phase: move rev 9: 1 -> 0
$ hgph
o 9 public a-H - 967b449fbc94
|
@@ -826,6 +897,7 @@
$ cd ../alpha
$ mkcommit A-secret --config phases.new-commit=2
+ test-debug-phase: new rev 11: x -> 2
$ hgph
@ 11 secret A-secret - 435b5d83910c
|
@@ -858,6 +930,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ test-debug-phase: new rev 10: x -> 1
(run 'hg update' to get a working copy)
$ hgph -R ../mu
o 10 draft A-secret - 435b5d83910c
@@ -886,6 +959,7 @@
pulling from ../mu
searching for changes
no changes found
+ test-debug-phase: move rev 11: 2 -> 1
$ hgph
@ 11 draft A-secret - 435b5d83910c
|
@@ -916,6 +990,8 @@
appear on the remote side.
$ hg -R ../mu phase --secret --force 967b449fbc94
+ test-debug-phase: move rev 9: 0 -> 2
+ test-debug-phase: move rev 10: 1 -> 2
$ hg push -r 435b5d83910c ../mu
pushing to ../mu
searching for changes
@@ -929,6 +1005,8 @@
adding manifests
adding file changes
added 0 changesets with 0 changes to 2 files
+ test-debug-phase: move rev 9: 2 -> 0
+ test-debug-phase: move rev 10: 2 -> 1
$ hgph -R ../mu
o 10 draft A-secret - 435b5d83910c
|
@@ -957,6 +1035,7 @@
$ hg up -q 967b449fbc94 # create a new root for draft
$ mkcommit 'alpha-more'
+ test-debug-phase: new rev 12: x -> 1
created new head
$ hg push -fr . ../mu
pushing to ../mu
@@ -965,10 +1044,13 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: new rev 11: x -> 1
$ cd ../mu
$ hg phase --secret --force 1c5cfd894796
+ test-debug-phase: move rev 11: 1 -> 2
$ hg up -q 435b5d83910c
$ mkcommit 'mu-more'
+ test-debug-phase: new rev 12: x -> 1
$ cd ../alpha
$ hg pull ../mu
pulling from ../mu
@@ -977,6 +1059,7 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ test-debug-phase: new rev 13: x -> 1
(run 'hg update' to get a working copy)
$ hgph
o 13 draft mu-more - 5237fb433fc8
@@ -1012,6 +1095,11 @@
$ cd ..
$ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
# pathological case are
#
@@ -1020,7 +1108,9 @@
# * repo have uncommon changeset
$ hg -R beta phase --secret --force f54f1bb90ff3
+ test-debug-phase: move rev 2: 0 -> 2
$ hg -R gamma phase --draft --force f54f1bb90ff3
+ test-debug-phase: move rev 2: 0 -> 1
$ cd gamma
$ hg pull ../beta
@@ -1030,6 +1120,8 @@
adding manifests
adding file changes
added 2 changesets with 2 changes to 2 files
+ test-debug-phase: new rev 5: x -> 0
+ test-debug-phase: new rev 6: x -> 0
(run 'hg update' to get a working copy)
$ hg phase f54f1bb90ff3
2: draft
@@ -1083,6 +1175,7 @@
# make sure there is no secret so we can use a copy clone
$ hg -R mu phase --draft 'secret()'
+ test-debug-phase: move rev 11: 2 -> 1
$ hg clone -U mu Tau
$ hgph -R Tau
@@ -1169,11 +1262,29 @@
adding manifests
adding file changes
added 14 changesets with 14 changes to 14 files (+3 heads)
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
+ test-debug-phase: new rev 5: x -> 0
+ test-debug-phase: new rev 6: x -> 0
+ test-debug-phase: new rev 7: x -> 0
+ test-debug-phase: new rev 8: x -> 0
+ test-debug-phase: new rev 9: x -> 0
+ test-debug-phase: new rev 10: x -> 0
+ test-debug-phase: new rev 11: x -> 0
+ test-debug-phase: new rev 12: x -> 0
+ test-debug-phase: new rev 13: x -> 0
$ chmod -R +w .hg
2. Test that failed phases movement are reported
$ hg phase --force --draft 3
+ test-debug-phase: move rev 3: 0 -> 1
+ test-debug-phase: move rev 7: 0 -> 1
+ test-debug-phase: move rev 8: 0 -> 1
+ test-debug-phase: move rev 9: 0 -> 1
$ chmod -R -w .hg
$ hg push ../Phi
pushing to ../Phi
@@ -1187,11 +1298,28 @@
#endif
-Test that clone behaves like pull and doesn't
-publish changesets as plain push does
+Test that clone behaves like pull and doesn't publish changesets as plain push
+does. The conditional output accounts for changes in the conditional block
+above.
+#if unix-permissions no-root
$ hg -R Upsilon phase -q --force --draft 2
+ test-debug-phase: move rev 2: 0 -> 1
+#else
+ $ hg -R Upsilon phase -q --force --draft 2
+ test-debug-phase: move rev 2: 0 -> 1
+ test-debug-phase: move rev 3: 0 -> 1
+ test-debug-phase: move rev 7: 0 -> 1
+ test-debug-phase: move rev 8: 0 -> 1
+ test-debug-phase: move rev 9: 0 -> 1
+#endif
+
$ hg clone -q Upsilon Pi -r 7
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
$ hgph Upsilon -r 'min(draft())'
o 2 draft a-C - 54acac6f23ab
|
@@ -1201,6 +1329,9 @@
pushing to Pi
searching for changes
no changes found
+ test-debug-phase: move rev 2: 1 -> 0
+ test-debug-phase: move rev 3: 1 -> 0
+ test-debug-phase: move rev 7: 1 -> 0
[1]
$ hgph Upsilon -r 'min(draft())'
o 8 draft a-F - b740e3e5c05d
@@ -1214,6 +1345,8 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files
+ test-debug-phase: new rev 5: x -> 0
+ test-debug-phase: move rev 8: 1 -> 0
$ hgph Upsilon -r 'min(draft())'
o 9 draft a-G - 3e27b6f1eee1
--- a/tests/test-phases.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-phases.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,3 +1,9 @@
+
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > phasereport=$TESTDIR/testlib/ext-phase-report.py
+ > EOF
+
$ hglog() { hg log --template "{rev} {phaseidx} {desc}\n" $*; }
$ mkcommit() {
> echo "$1" > "$1"
@@ -19,6 +25,7 @@
-1: public
$ mkcommit A
+ test-debug-phase: new rev 0: x -> 1
New commit are draft by default
@@ -28,6 +35,7 @@
Following commit are draft too
$ mkcommit B
+ test-debug-phase: new rev 1: x -> 1
$ hglog
1 1 B
@@ -36,6 +44,8 @@
Draft commit are properly created over public one:
$ hg phase --public .
+ test-debug-phase: move rev 0: 1 -> 0
+ test-debug-phase: move rev 1: 1 -> 0
$ hg phase
1: public
$ hglog
@@ -43,7 +53,9 @@
0 0 A
$ mkcommit C
+ test-debug-phase: new rev 2: x -> 1
$ mkcommit D
+ test-debug-phase: new rev 3: x -> 1
$ hglog
3 1 D
@@ -54,6 +66,7 @@
Test creating changeset as secret
$ mkcommit E --config phases.new-commit='secret'
+ test-debug-phase: new rev 4: x -> 2
$ hglog
4 2 E
3 1 D
@@ -64,6 +77,7 @@
Test the secret property is inherited
$ mkcommit H
+ test-debug-phase: new rev 5: x -> 2
$ hglog
5 2 H
4 2 E
@@ -76,6 +90,7 @@
$ hg up -q 1
$ mkcommit "B'"
+ test-debug-phase: new rev 6: x -> 1
created new head
$ hglog
6 1 B'
@@ -92,6 +107,8 @@
6: draft
4: secret
$ hg ci -m "merge B' and E"
+ test-debug-phase: new rev 7: x -> 2
+
$ hglog
7 2 merge B' and E
6 1 B'
@@ -133,6 +150,11 @@
adding manifests
adding file changes
added 5 changesets with 5 changes to 5 files (+1 heads)
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 1
+ test-debug-phase: new rev 3: x -> 1
+ test-debug-phase: new rev 4: x -> 1
$ hglog
7 2 merge B' and E
6 1 B'
@@ -158,6 +180,7 @@
$ hg up -q 4 # B'
$ mkcommit Z --config phases.new-commit=secret
+ test-debug-phase: new rev 5: x -> 2
$ hg phase .
5: secret
@@ -167,6 +190,7 @@
$ cd ../initialrepo
$ hg up -q 6 #B'
$ mkcommit I
+ test-debug-phase: new rev 8: x -> 1
created new head
$ hg push ../push-dest
pushing to ../push-dest
@@ -175,12 +199,14 @@
adding manifests
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
+ test-debug-phase: new rev 6: x -> 1
:note: The "(+1 heads)" is wrong as we do not had any visible head
check that branch cache with "served" filter are properly computed and stored
$ ls ../push-dest/.hg/cache/branch2*
+ ../push-dest/.hg/cache/branch2-base
../push-dest/.hg/cache/branch2-served
$ cat ../push-dest/.hg/cache/branch2-served
6d6770faffce199f1fddd1cf87f6f026138cf061 6 465891ffab3c47a3c23792f7dc84156e19a90722
@@ -191,6 +217,7 @@
5:2713879da13d6eea1ff22b442a5a87cb31a7ce6a secret
3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e draft
$ ls ../push-dest/.hg/cache/branch2*
+ ../push-dest/.hg/cache/branch2-base
../push-dest/.hg/cache/branch2-served
../push-dest/.hg/cache/branch2-visible
$ cat ../push-dest/.hg/cache/branch2-served
@@ -220,6 +247,11 @@
adding manifests
adding file changes
added 5 changesets with 5 changes to 5 files (+1 heads)
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
(run 'hg heads' to see heads, 'hg merge' to merge)
$ hglog
4 0 B'
@@ -240,6 +272,11 @@
(during local clone)
$ hg clone -qU initialrepo clone-dest
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
$ hglog -R clone-dest
4 0 B'
3 0 D
@@ -438,6 +475,7 @@
(with -r option)
$ hg phase --public -r 2
+ test-debug-phase: move rev 2: 1 -> 0
$ hg log -G --template "{rev} {phase} {desc}\n"
@ 7 secret merge B' and E
|\
@@ -461,6 +499,7 @@
(without -r option)
$ hg phase --draft --force 2
+ test-debug-phase: move rev 2: 0 -> 1
$ hg log -G --template "{rev} {phase} {desc}\n"
@ 7 secret merge B' and E
|\
@@ -482,6 +521,8 @@
move changeset forward and backward
$ hg phase --draft --force 1::4
+ test-debug-phase: move rev 1: 0 -> 1
+ test-debug-phase: move rev 4: 2 -> 1
$ hg log -G --template "{rev} {phase} {desc}\n"
@ 7 secret merge B' and E
|\
@@ -502,7 +543,14 @@
test partial failure
$ hg phase --public 7
+ test-debug-phase: move rev 1: 1 -> 0
+ test-debug-phase: move rev 2: 1 -> 0
+ test-debug-phase: move rev 3: 1 -> 0
+ test-debug-phase: move rev 4: 1 -> 0
+ test-debug-phase: move rev 6: 1 -> 0
+ test-debug-phase: move rev 7: 2 -> 0
$ hg phase --draft '5 or 7'
+ test-debug-phase: move rev 5: 2 -> 1
cannot move 1 changesets to a higher phase, use --force
phase changed for 1 changesets
[1]
@@ -545,6 +593,7 @@
(making a changeset hidden; H in that case)
$ hg debugobsolete `hg id --debug -r 5`
+ obsoleted 1 changesets
$ cd ..
$ hg clone initialrepo clonewithobs
@@ -553,6 +602,13 @@
adding manifests
adding file changes
added 7 changesets with 6 changes to 6 files
+ test-debug-phase: new rev 0: x -> 0
+ test-debug-phase: new rev 1: x -> 0
+ test-debug-phase: new rev 2: x -> 0
+ test-debug-phase: new rev 3: x -> 0
+ test-debug-phase: new rev 4: x -> 0
+ test-debug-phase: new rev 5: x -> 0
+ test-debug-phase: new rev 6: x -> 0
updating to branch default
6 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd clonewithobs
--- a/tests/test-profile.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-profile.t Wed Jul 19 07:51:41 2017 -0500
@@ -4,10 +4,24 @@
$ hg init a
$ cd a
-#if lsprof
test --profile
+ $ hg st --profile 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+Abreviated version
+
+ $ hg st --prof 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+In alias
+
+ $ hg --config "alias.profst=status --profile" profst 2>&1 | grep Sample
+ Sample count: \d+ (re)
+
+#if lsprof
+
$ prof='hg --config profiling.type=ls --profile'
$ $prof st 2>../out
@@ -51,10 +65,10 @@
$ cat >> sleepext.py << EOF
> import time
- > from mercurial import cmdutil, commands
+ > from mercurial import registrar, commands
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('sleep', [], 'hg sleep')
+ > command = registrar.command(cmdtable)
+ > @command(b'sleep', [], 'hg sleep')
> def sleep(ui, *args, **kwargs):
> time.sleep(0.1)
> EOF
@@ -99,3 +113,51 @@
[1]
$ cd ..
+
+profiler extension could be loaded before other extensions
+
+ $ cat > fooprof.py <<EOF
+ > from __future__ import absolute_import
+ > import contextlib
+ > @contextlib.contextmanager
+ > def profile(ui, fp):
+ > print('fooprof: start profile')
+ > yield
+ > print('fooprof: end profile')
+ > def extsetup(ui):
+ > ui.write('fooprof: loaded\n')
+ > EOF
+
+ $ cat > otherextension.py <<EOF
+ > from __future__ import absolute_import
+ > def extsetup(ui):
+ > ui.write('otherextension: loaded\n')
+ > EOF
+
+ $ hg init b
+ $ cd b
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > other = $TESTTMP/otherextension.py
+ > fooprof = $TESTTMP/fooprof.py
+ > EOF
+
+ $ hg root
+ otherextension: loaded
+ fooprof: loaded
+ $TESTTMP/b (glob)
+ $ HGPROF=fooprof hg root --profile
+ fooprof: loaded
+ fooprof: start profile
+ otherextension: loaded
+ $TESTTMP/b (glob)
+ fooprof: end profile
+
+ $ HGPROF=other hg root --profile 2>&1 | head -n 2
+ otherextension: loaded
+ unrecognized profiler 'other' - ignored
+
+ $ HGPROF=unknown hg root --profile 2>&1 | head -n 1
+ unrecognized profiler 'unknown' - ignored
+
+ $ cd ..
--- a/tests/test-progress.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-progress.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,10 +1,10 @@
$ cat > loop.py <<EOF
- > from mercurial import cmdutil, commands
+ > from mercurial import commands, registrar
> import time
>
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
+ > command = registrar.command(cmdtable)
>
> class incrementingtime(object):
> def __init__(self):
@@ -14,7 +14,7 @@
> return self._time
> time.time = incrementingtime()
>
- > @command('loop',
+ > @command(b'loop',
> [('', 'total', '', 'override for total'),
> ('', 'nested', False, 'show nested results'),
> ('', 'parallel', False, 'show parallel sets of results')],
--- a/tests/test-pull-branch.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-pull-branch.t Wed Jul 19 07:51:41 2017 -0500
@@ -133,6 +133,7 @@
adding file changes
added 4 changesets with 4 changes to 1 files (+1 heads)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "d740e1a584e7: a5.2"
1 other heads for branch "branchA"
Make changes on new branch on tt
--- a/tests/test-pull-update.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-pull-update.t Wed Jul 19 07:51:41 2017 -0500
@@ -41,6 +41,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "800c91d5bfc1: m"
1 other heads for branch "default"
$ cd ../tt
@@ -55,6 +56,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "107cefe13e42: m"
1 other heads for branch "default"
$ HGMERGE=true hg merge
--- a/tests/test-purge.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-purge.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,7 +49,7 @@
$ touch untracked_file
$ touch untracked_file_readonly
- $ python <<EOF
+ $ $PYTHON <<EOF
> import os, stat
> f= 'untracked_file_readonly'
> os.chmod(f, stat.S_IMODE(os.stat(f).st_mode) & ~stat.S_IWRITE)
--- a/tests/test-push-cgi.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-cgi.t Wed Jul 19 07:51:41 2017 -0500
@@ -38,7 +38,7 @@
expect failure because heads doesn't match (formerly known as 'unsynced changes')
$ QUERY_STRING="cmd=unbundle&heads=0000000000000000000000000000000000000000"; export QUERY_STRING
- $ python hgweb.cgi <bundle.hg >page1 2>&1
+ $ $PYTHON hgweb.cgi <bundle.hg >page1 2>&1
$ cat page1
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
@@ -50,7 +50,7 @@
successful force push
$ QUERY_STRING="cmd=unbundle&heads=666f726365"; export QUERY_STRING
- $ python hgweb.cgi <bundle.hg >page2 2>&1
+ $ $PYTHON hgweb.cgi <bundle.hg >page2 2>&1
$ cat page2
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
@@ -65,7 +65,7 @@
successful push, list of heads
$ QUERY_STRING="cmd=unbundle&heads=f7b1eb17ad24730a1651fccd46c43826d1bbc2ac"; export QUERY_STRING
- $ python hgweb.cgi <bundle.hg >page3 2>&1
+ $ $PYTHON hgweb.cgi <bundle.hg >page3 2>&1
$ cat page3
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
@@ -80,7 +80,7 @@
successful push, SHA1 hash of heads (unbundlehash capability)
$ QUERY_STRING="cmd=unbundle&heads=686173686564 5a785a5f9e0d433b88ed862b206b011b0c3a9d13"; export QUERY_STRING
- $ python hgweb.cgi <bundle.hg >page4 2>&1
+ $ $PYTHON hgweb.cgi <bundle.hg >page4 2>&1
$ cat page4
Status: 200 Script output follows\r (esc)
Content-Type: application/mercurial-0.1\r (esc)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-checkheads-partial-C1.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,83 @@
+====================================
+Testing head checking code: Case C-2
+====================================
+
+Mercurial checks for the introduction of new heads on push. Evolution comes
+into play to detect if existing branches on the server are being replaced by
+some of the new one we push.
+
+This case is part of a series of tests checking this behavior.
+
+Category C: checking case were the branch is only partially obsoleted.
+TestCase 1: 2 changeset branch, only the head is rewritten
+
+.. old-state:
+..
+.. * 2 changeset branch
+..
+.. new-state:
+..
+.. * 1 new changesets branches superceeding only the head of the old one
+.. * base of the old branch is still alive
+..
+.. expected-result:
+..
+.. * push denied
+..
+.. graph-summary:
+..
+.. B ø⇠◔ B'
+.. | |
+.. A ○ |
+.. |/
+.. ○
+
+ $ . $TESTDIR/testlib/push-checkheads-util.sh
+
+Test setup
+----------
+
+ $ mkdir C1
+ $ cd C1
+ $ setuprepos
+ creating basic server and client repo
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd server
+ $ mkcommit B0
+ $ cd ../client
+ $ hg pull
+ pulling from $TESTTMP/C1/server (glob)
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ mkcommit B1
+ created new head
+ $ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
+ $ hg log -G --hidden
+ @ 25c56d33e4c4 (draft): B1
+ |
+ | x d73caddc5533 (draft): B0
+ | |
+ | o 8aaa48160adc (draft): A0
+ |/
+ o 1e4be0697311 (public): root
+
+
+Actual testing
+--------------
+
+ $ hg push
+ pushing to $TESTTMP/C1/server (glob)
+ searching for changes
+ abort: push creates new remote head 25c56d33e4c4!
+ (merge or see 'hg help push' for details about pushing new heads)
+ [255]
+
+ $ cd ../..
--- a/tests/test-push-checkheads-partial-C2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-partial-C2.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,6 +59,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ f6082bc4ffef (draft): A1
|
--- a/tests/test-push-checkheads-partial-C3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-partial-C3.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,6 +59,7 @@
$ mkcommit C0
created new head
$ hg debugobsolete --record-parents `getid "desc(B0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
--- a/tests/test-push-checkheads-partial-C4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-partial-C4.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,6 +59,7 @@
$ mkcommit C0
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
--- a/tests/test-push-checkheads-pruned-B1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B1.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,7 @@
$ mkcommit B0
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 74ff5441d343 (draft): B0
|
@@ -68,5 +69,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B2.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,7 +59,9 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ f6082bc4ffef (draft): A1
|
@@ -81,5 +83,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B3.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,7 +59,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 25c56d33e4c4 (draft): B1
|
@@ -81,6 +83,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B4.t Wed Jul 19 07:51:41 2017 -0500
@@ -60,7 +60,9 @@
$ mkcommit C0
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
@@ -82,5 +84,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B5.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B5.t Wed Jul 19 07:51:41 2017 -0500
@@ -63,8 +63,11 @@
$ mkcommit B1
created new head
$ hg debugobsolete --record-parents `getid "desc(A0)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)"` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(C0)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 25c56d33e4c4 (draft): B1
|
@@ -88,5 +91,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
3 new obsolescence markers
+ obsoleted 3 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B6.t Wed Jul 19 07:51:41 2017 -0500
@@ -52,7 +52,9 @@
$ hg up 'desc(B0)'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
x ba93660aff8d (draft): A1
|
@@ -74,5 +76,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B7.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B7.t Wed Jul 19 07:51:41 2017 -0500
@@ -51,7 +51,9 @@
$ hg up 'desc(B0)'
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
x ba93660aff8d (draft): A1
|
@@ -73,5 +75,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-pruned-B8.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-pruned-B8.t Wed Jul 19 07:51:41 2017 -0500
@@ -66,9 +66,13 @@
$ mkcommit A2
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(A1)" ` `getid "desc(A2)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ c1f8d089020f (draft): A2
|
@@ -94,5 +98,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
4 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A1.t Wed Jul 19 07:51:41 2017 -0500
@@ -46,6 +46,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ f6082bc4ffef (draft): A1
|
@@ -65,5 +66,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A2.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,7 +59,9 @@
created new head
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 262c8c798096 (draft): B1
|
@@ -83,5 +85,6 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A3.t Wed Jul 19 07:51:41 2017 -0500
@@ -62,7 +62,9 @@
created new head
$ mkcommit A1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ c1c7524e9488 (draft): A1
|
@@ -86,5 +88,6 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A4.t Wed Jul 19 07:51:41 2017 -0500
@@ -48,6 +48,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ mkcommit B0
$ hg log -G --hidden
@ f40ded968333 (draft): B0
@@ -70,5 +71,6 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../../
--- a/tests/test-push-checkheads-superceed-A5.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A5.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,7 @@
created new head
$ mkcommit A1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ ba93660aff8d (draft): A1
|
@@ -70,6 +71,7 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A6.t Wed Jul 19 07:51:41 2017 -0500
@@ -68,7 +68,9 @@
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ d70a1f75a020 (draft): B1
|
@@ -94,5 +96,6 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A7.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A7.t Wed Jul 19 07:51:41 2017 -0500
@@ -68,7 +68,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 25c56d33e4c4 (draft): B1
|
@@ -94,5 +96,6 @@
adding file changes
added 2 changesets with 2 changes to 2 files (+1 heads)
2 new obsolescence markers
+ obsoleted 2 changesets
$ cd ../..
--- a/tests/test-push-checkheads-superceed-A8.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-superceed-A8.t Wed Jul 19 07:51:41 2017 -0500
@@ -53,7 +53,9 @@
$ mkcommit A2
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(A1)" ` `getid "desc(A2)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ c1f8d089020f (draft): A2
|
@@ -75,5 +77,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
2 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-unpushed-D1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D1.t Wed Jul 19 07:51:41 2017 -0500
@@ -49,6 +49,7 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg up 0
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit B0
--- a/tests/test-push-checkheads-unpushed-D2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D2.t Wed Jul 19 07:51:41 2017 -0500
@@ -63,7 +63,9 @@
$ mkcommit A1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(B0)"`
+ obsoleted 1 changesets
$ hg up 0
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit C0
--- a/tests/test-push-checkheads-unpushed-D3.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D3.t Wed Jul 19 07:51:41 2017 -0500
@@ -66,7 +66,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 25c56d33e4c4 (draft): B1
|
--- a/tests/test-push-checkheads-unpushed-D4.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D4.t Wed Jul 19 07:51:41 2017 -0500
@@ -82,7 +82,9 @@
1 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ mkcommit B1
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ d70a1f75a020 (draft): B1
|
@@ -118,5 +120,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-unpushed-D5.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D5.t Wed Jul 19 07:51:41 2017 -0500
@@ -71,7 +71,9 @@
$ mkcommit B1
created new head
$ hg debugobsolete `getid "desc(A0)" ` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(B0)" ` `getid "desc(B1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 25c56d33e4c4 (draft): B1
|
@@ -103,5 +105,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files
1 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- a/tests/test-push-checkheads-unpushed-D6.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D6.t Wed Jul 19 07:51:41 2017 -0500
@@ -56,7 +56,9 @@
$ mkcommit C0
created new head
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
--- a/tests/test-push-checkheads-unpushed-D7.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-checkheads-unpushed-D7.t Wed Jul 19 07:51:41 2017 -0500
@@ -65,8 +65,11 @@
$ mkcommit C0
created new head
$ hg debugobsolete `getid "desc(A0)"` `getid "desc(A1)"`
+ obsoleted 1 changesets
$ hg debugobsolete `getid "desc(A1)"` `getid "desc(A2)"`
+ obsoleted 1 changesets
$ hg debugobsolete --record-parents `getid "desc(A2)"`
+ obsoleted 1 changesets
$ hg log -G --hidden
@ 0f88766e02d6 (draft): C0
|
@@ -92,5 +95,6 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
3 new obsolescence markers
+ obsoleted 1 changesets
$ cd ../..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-push-race.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,1845 @@
+============================================================================================
+Test cases where there are race condition between two clients pushing to the same repository
+============================================================================================
+
+This file tests cases where two clients push to a server at the same time. The
+"raced" client is done preparing it push bundle when the "racing" client
+perform its push. The "raced" client starts its actual push after the "racing"
+client push is fully complete.
+
+A set of extension and shell functions ensures this scheduling.
+
+ $ cat >> delaypush.py << EOF
+ > """small extension orchestrate push race
+ >
+ > Client with the extensions will create a file when ready and get stuck until
+ > a file is created."""
+ >
+ > import atexit
+ > import errno
+ > import os
+ > import time
+ >
+ > from mercurial import (
+ > exchange,
+ > extensions,
+ > )
+ >
+ > def delaypush(orig, pushop):
+ > # notify we are done preparing
+ > ui = pushop.repo.ui
+ > readypath = ui.config('delaypush', 'ready-path', None)
+ > if readypath is not None:
+ > with open(readypath, 'w') as r:
+ > r.write('foo')
+ > ui.status('wrote ready: %s\n' % readypath)
+ > # now wait for the other process to be done
+ > watchpath = ui.config('delaypush', 'release-path', None)
+ > if watchpath is not None:
+ > ui.status('waiting on: %s\n' % watchpath)
+ > limit = 100
+ > while 0 < limit and not os.path.exists(watchpath):
+ > limit -= 1
+ > time.sleep(0.1)
+ > if limit <= 0:
+ > ui.warn('exiting without watchfile: %s' % watchpath)
+ > else:
+ > # delete the file at the end of the push
+ > def delete():
+ > try:
+ > os.unlink(watchpath)
+ > except OSError as exc:
+ > if exc.errno != errno.ENOENT:
+ > raise
+ > atexit.register(delete)
+ > return orig(pushop)
+ >
+ > def uisetup(ui):
+ > extensions.wrapfunction(exchange, '_pushbundle2', delaypush)
+ > EOF
+
+ $ waiton () {
+ > # wait for a file to be created (then delete it)
+ > count=100
+ > while [ ! -f $1 ] ;
+ > do
+ > sleep 0.1;
+ > count=`expr $count - 1`;
+ > if [ $count -lt 0 ];
+ > then
+ > break
+ > fi;
+ > done
+ > [ -f $1 ] || echo "ready file still missing: $1"
+ > rm -f $1
+ > }
+
+ $ release () {
+ > # create a file and wait for it be deleted
+ > count=100
+ > touch $1
+ > while [ -f $1 ] ;
+ > do
+ > sleep 0.1;
+ > count=`expr $count - 1`;
+ > if [ $count -lt 0 ];
+ > then
+ > break
+ > fi;
+ > done
+ > [ ! -f $1 ] || echo "delay file still exist: $1"
+ > }
+
+ $ cat >> $HGRCPATH << EOF
+ > [ui]
+ > ssh = $PYTHON "$TESTDIR/dummyssh"
+ > # simplify output
+ > logtemplate = {node|short} {desc} ({branch})
+ > [phases]
+ > publish = no
+ > [experimental]
+ > evolution = all
+ > [alias]
+ > graph = log -G --rev 'sort(all(), "topo")'
+ > EOF
+
+We tests multiple cases:
+* strict: no race detected,
+* unrelated: race on unrelated heads are allowed.
+
+#testcases strict unrelated
+
+#if unrelated
+
+ $ cat >> $HGRCPATH << EOF
+ > [server]
+ > concurrent-push-mode = check-related
+ > EOF
+
+#endif
+
+Setup
+-----
+
+create a repo with one root
+
+ $ hg init server
+ $ cd server
+ $ echo root > root
+ $ hg ci -Am "C-ROOT"
+ adding root
+ $ cd ..
+
+clone it in two clients
+
+ $ hg clone ssh://user@dummy/server client-racy
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg clone ssh://user@dummy/server client-other
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+setup one to allow race on push
+
+ $ cat >> client-racy/.hg/hgrc << EOF
+ > [extensions]
+ > delaypush = $TESTTMP/delaypush.py
+ > [delaypush]
+ > ready-path = $TESTTMP/readyfile
+ > release-path = $TESTTMP/watchfile
+ > EOF
+
+Simple race, both try to push to the server at the same time
+------------------------------------------------------------
+
+Both try to replace the same head
+
+# a
+# | b
+# |/
+# *
+
+Creating changesets
+
+ $ echo b > client-other/a
+ $ hg -R client-other/ add client-other/a
+ $ hg -R client-other/ commit -m "C-A"
+ $ echo b > client-racy/b
+ $ hg -R client-racy/ add client-racy/b
+ $ hg -R client-racy/ commit -m "C-B"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -r 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 98217d5a1659 C-A (default)
+ |
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Pushing on two different heads
+------------------------------
+
+Both try to replace a different head
+
+# a b
+# | |
+# * *
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o a9149a1428e2 C-B (default)
+ |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+ $ echo aa >> client-other/a
+ $ hg -R client-other/ commit -m "C-C"
+ $ echo bb >> client-racy/b
+ $ hg -R client-racy/ commit -m "C-D"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -r 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 51c544a58128 C-C (default)
+ |
+ o 98217d5a1659 C-A (default)
+ |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+#if unrelated
+
+(The two heads are unrelated, push should be allowed)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ hg -R server graph
+ o 59e76faf78bd C-D (default)
+ |
+ o a9149a1428e2 C-B (default)
+ |
+ | o 51c544a58128 C-C (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+Pushing while someone creates a new head
+-----------------------------------------
+
+Pushing a new changeset while someone creates a new branch.
+
+# a (raced)
+# |
+# * b
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server graph
+ o 59e76faf78bd C-D (default)
+ |
+ o a9149a1428e2 C-B (default)
+ |
+ | o 51c544a58128 C-C (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(new head)
+
+ $ hg -R client-other/ up 'desc("C-A")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-E"
+ created new head
+
+(children of existing head)
+
+ $ hg -R client-racy/ up 'desc("C-C")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-F"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o d603e2c0cdd7 C-E (default)
+ |
+ | o 51c544a58128 C-C (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+#if unrelated
+
+(The racing new head do not affect existing heads, push should go through)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ hg -R server graph
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+Pushing touching different named branch (same topo): new branch raced
+---------------------------------------------------------------------
+
+Pushing two children on the same head, one is a different named branch
+
+# a (raced, branch-a)
+# |
+# | b (default branch)
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update existing head)
+
+ $ hg -R client-other/ up 'desc("C-F")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-G"
+
+(new named branch from that existing head)
+
+ $ hg -R client-racy/ up 'desc("C-F")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch my-first-test-branch
+ marked working directory as branch my-first-test-branch
+ (branches are permanent and global, did you want a bookmark?)
+ $ hg -R client-racy/ commit -m "C-H"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' --new-branch > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip'
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 75d69cba5402 C-G (default)
+ |
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+#if unrelated
+
+(unrelated named branches are unrelated)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ hg -R server graph
+ o 833be552cfe6 C-H (my-first-test-branch)
+ |
+ | o 75d69cba5402 C-G (default)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+#endif
+
+The racing new head do not affect existing heads, push should go through
+
+pushing touching different named branch (same topo): old branch raced
+---------------------------------------------------------------------
+
+Pushing two children on the same head, one is a different named branch
+
+# a (raced, default-branch)
+# |
+# | b (new branch)
+# |/
+# * (default-branch)
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+#endif
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads)
+
+ $ hg -R server graph
+ o 833be552cfe6 C-H (my-first-test-branch)
+ |
+ | o 75d69cba5402 C-G (default)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(new named branch from one head)
+
+ $ hg -R client-other/ up 'desc("C-G")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch my-second-test-branch
+ marked working directory as branch my-second-test-branch
+ $ hg -R client-other/ commit -m "C-I"
+
+(children "updating" that same head)
+
+ $ hg -R client-racy/ up 'desc("C-G")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-J"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+#if strict
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o b35ed749f288 C-I (my-second-test-branch)
+ |
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+#if unrelated
+
+(unrelated named branches are unrelated)
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+
+ $ hg -R server graph
+ o 89420bf00fae C-J (default)
+ |
+ | o b35ed749f288 C-I (my-second-test-branch)
+ |/
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+#endif
+
+pushing racing push touch multiple heads
+----------------------------------------
+
+There are multiple heads, but the racing push touch all of them
+
+# a (raced)
+# | b
+# |/|
+# * *
+# |/
+# *
+
+(resync-all)
+
+#if strict
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+#endif
+
+#if unrelated
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ no changes found
+
+#endif
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 89420bf00fae C-J (default)
+ |
+ | o b35ed749f288 C-I (my-second-test-branch)
+ |/
+ o 75d69cba5402 C-G (default)
+ |
+ | o 833be552cfe6 C-H (my-first-test-branch)
+ |/
+ o d9e379a8c432 C-F (default)
+ |
+ o 51c544a58128 C-C (default)
+ |
+ | o d603e2c0cdd7 C-E (default)
+ |/
+ o 98217d5a1659 C-A (default)
+ |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(merges heads)
+
+ $ hg -R client-other/ up 'desc("C-E")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client-other/ merge 'desc("C-D")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg -R client-other/ commit -m "C-K"
+
+(update one head)
+
+ $ hg -R client-racy/ up 'desc("C-D")'
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/b
+ $ hg -R client-racy/ commit -m "C-L"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 0 files (-1 heads)
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o be705100c623 C-K (default)
+ |\
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+pushing raced push touch multiple heads
+---------------------------------------
+
+There are multiple heads, the raced push touch all of them
+
+# b
+# | a (raced)
+# |/|
+# * *
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server graph
+ o cac2cead0ff0 C-L (default)
+ |
+ | o be705100c623 C-K (default)
+ |/|
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update existing head)
+
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-M"
+
+(merge heads)
+
+ $ hg -R client-racy/ merge 'desc("C-K")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg -R client-racy/ commit -m "C-N"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 6fd3090135df C-M (default)
+ |
+ o be705100c623 C-K (default)
+ |\
+ | o d603e2c0cdd7 C-E (default)
+ | |
+ +---o cac2cead0ff0 C-L (default)
+ | |
+ o | 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ | | o 51c544a58128 C-C (default)
+ | |/
+ o | a9149a1428e2 C-B (default)
+ | |
+ | o 98217d5a1659 C-A (default)
+ |/
+ @ 842e2fac6304 C-ROOT (default)
+
+
+racing commit push a new head behind another named branch
+---------------------------------------------------------
+
+non-continuous branch are valid case, we tests for them.
+
+# b (branch default)
+# |
+# o (branch foo)
+# |
+# | a (raced, branch default)
+# |/
+# * (branch foo)
+# |
+# * (branch default)
+
+(resync-all + other branch)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+(creates named branch on head)
+
+ $ hg -R ./server/ up 'desc("C-N")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R ./server/ branch other
+ marked working directory as branch other
+ $ hg -R ./server/ ci -m "C-Z"
+ $ hg -R ./server/ up null
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+
+(sync client)
+
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update default head through another named branch one)
+
+ $ hg -R client-other/ up 'desc("C-Z")'
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-O"
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-other/ commit -m "C-P"
+ created new head
+
+(update default head)
+
+ $ hg -R client-racy/ up 'desc("C-Z")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-other/a
+ $ hg -R client-racy/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-racy/ commit -m "C-Q"
+ created new head
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 2 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+raced commit push a new head behind another named branch
+---------------------------------------------------------
+
+non-continuous branch are valid case, we tests for them.
+
+# b (raced branch default)
+# |
+# o (branch foo)
+# |
+# | a (branch default)
+# |/
+# * (branch foo)
+# |
+# * (branch default)
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o b0ee3d6f51bc C-Q (default)
+ |
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets
+
+(update 'other' named branch head)
+
+ $ hg -R client-other/ up 'desc("C-P")'
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ branch --force other
+ marked working directory as branch other
+ $ hg -R client-other/ commit -m "C-R"
+ created new head
+
+(update 'other named brnach through a 'default' changeset')
+
+ $ hg -R client-racy/ up 'desc("C-P")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-S"
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch --force other
+ marked working directory as branch other
+ $ hg -R client-racy/ commit -m "C-T"
+ created new head
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 1 changes to 1 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server graph
+ o de7b9e2ba3f6 C-R (other)
+ |
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ | o b0ee3d6f51bc C-Q (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+raced commit push a new head obsoleting the one touched by the racing push
+--------------------------------------------------------------------------
+
+# b (racing)
+# |
+# ø⇠◔ a (raced)
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files (+1 heads)
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+
+ $ hg -R server graph
+ o 3d57ed3c1091 C-T (other)
+ |
+ o 2efd43f7b5ba C-S (default)
+ |
+ | o de7b9e2ba3f6 C-R (other)
+ |/
+ o 1b58ee3f79e5 C-P (default)
+ |
+ o d0a85b2252a9 C-O (other)
+ |
+ | o b0ee3d6f51bc C-Q (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets and markers
+
+(continue existing head)
+
+ $ hg -R client-other/ up 'desc("C-Q")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo aaa >> client-other/a
+ $ hg -R client-other/ commit -m "C-U"
+
+(new topo branch obsoleting that same head)
+
+ $ hg -R client-racy/ up 'desc("C-Z")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-racy/a
+ $ hg -R client-racy/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-racy/ commit -m "C-V"
+ created new head
+ $ ID_Q=`hg -R client-racy log -T '{node}\n' -r 'desc("C-Q")'`
+ $ ID_V=`hg -R client-racy log -T '{node}\n' -r 'desc("C-V")'`
+ $ hg -R client-racy debugobsolete $ID_Q $ID_V
+ obsoleted 1 changesets
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 0 files
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server debugobsolete
+ $ hg -R server graph
+ o a98a47d8b85b C-U (default)
+ |
+ o b0ee3d6f51bc C-Q (default)
+ |
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+racing commit push a new head obsoleting the one touched by the raced push
+--------------------------------------------------------------------------
+
+(mirror test case of the previous one
+
+# a (raced branch default)
+# |
+# ø⇠◔ b (racing)
+# |/
+# *
+
+(resync-all)
+
+ $ hg -R ./server pull ./client-racy
+ pulling from ./client-racy
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-other pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ 1 new obsolescence markers
+ obsoleted 1 changesets
+ (run 'hg heads .' to see heads, 'hg merge' to merge)
+ $ hg -R ./client-racy pull
+ pulling from ssh://user@dummy/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 0 changes to 0 files
+ (run 'hg update' to get a working copy)
+
+ $ hg -R server debugobsolete
+ b0ee3d6f51bc4c0ca6d4f2907708027a6c376233 720c5163ecf64dcc6216bee2d62bf3edb1882499 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg -R server graph
+ o 720c5163ecf6 C-V (default)
+ |
+ | o a98a47d8b85b C-U (default)
+ | |
+ | x b0ee3d6f51bc C-Q (default)
+ |/
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
+
+Creating changesets and markers
+
+(new topo branch obsoleting that same head)
+
+ $ hg -R client-other/ up 'desc("C-Q")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo bbb >> client-other/a
+ $ hg -R client-other/ branch --force default
+ marked working directory as branch default
+ $ hg -R client-other/ commit -m "C-W"
+ created new head
+ $ ID_V=`hg -R client-other log -T '{node}\n' -r 'desc("C-V")'`
+ $ ID_W=`hg -R client-other log -T '{node}\n' -r 'desc("C-W")'`
+ $ hg -R client-other debugobsolete $ID_V $ID_W
+ obsoleted 1 changesets
+
+(continue the same head)
+
+ $ echo aaa >> client-racy/a
+ $ hg -R client-racy/ commit -m "C-X"
+
+Pushing
+
+ $ hg -R client-racy push -r 'tip' > ./push-log 2>&1 &
+
+ $ waiton $TESTTMP/readyfile
+
+ $ hg -R client-other push -fr 'tip' --new-branch
+ pushing to ssh://user@dummy/server
+ searching for changes
+ remote: adding changesets
+ remote: adding manifests
+ remote: adding file changes
+ remote: added 1 changesets with 0 changes to 1 files (+1 heads)
+ remote: 1 new obsolescence markers
+ remote: obsoleted 1 changesets
+
+ $ release $TESTTMP/watchfile
+
+Check the result of the push
+
+ $ cat ./push-log
+ pushing to ssh://user@dummy/server
+ searching for changes
+ wrote ready: $TESTTMP/readyfile
+ waiting on: $TESTTMP/watchfile
+ abort: push failed:
+ 'repository changed while pushing - please try again'
+
+ $ hg -R server debugobsolete
+ b0ee3d6f51bc4c0ca6d4f2907708027a6c376233 720c5163ecf64dcc6216bee2d62bf3edb1882499 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ 720c5163ecf64dcc6216bee2d62bf3edb1882499 39bc0598afe90ab18da460bafecc0fa953b77596 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
+ $ hg -R server graph --hidden
+ o 39bc0598afe9 C-W (default)
+ |
+ | o a98a47d8b85b C-U (default)
+ |/
+ x b0ee3d6f51bc C-Q (default)
+ |
+ | o 3d57ed3c1091 C-T (other)
+ | |
+ | o 2efd43f7b5ba C-S (default)
+ | |
+ | | o de7b9e2ba3f6 C-R (other)
+ | |/
+ | o 1b58ee3f79e5 C-P (default)
+ | |
+ | o d0a85b2252a9 C-O (other)
+ |/
+ | x 720c5163ecf6 C-V (default)
+ |/
+ o 55a6f1c01b48 C-Z (other)
+ |
+ o 866a66e18630 C-N (default)
+ |\
+ +---o 6fd3090135df C-M (default)
+ | |
+ | o cac2cead0ff0 C-L (default)
+ | |
+ o | be705100c623 C-K (default)
+ |\|
+ o | d603e2c0cdd7 C-E (default)
+ | |
+ | o 59e76faf78bd C-D (default)
+ | |
+ | | o 89420bf00fae C-J (default)
+ | | |
+ | | | o b35ed749f288 C-I (my-second-test-branch)
+ | | |/
+ | | o 75d69cba5402 C-G (default)
+ | | |
+ | | | o 833be552cfe6 C-H (my-first-test-branch)
+ | | |/
+ | | o d9e379a8c432 C-F (default)
+ | | |
+ +---o 51c544a58128 C-C (default)
+ | |
+ | o a9149a1428e2 C-B (default)
+ | |
+ o | 98217d5a1659 C-A (default)
+ |/
+ o 842e2fac6304 C-ROOT (default)
+
--- a/tests/test-push-warn.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push-warn.t Wed Jul 19 07:51:41 2017 -0500
@@ -41,7 +41,7 @@
searching for changes
taking quick initial sample
query 2; still undecided: 1, sample size is: 1
- 2 total queries
+ 2 total queries in *.????s (glob)
listing keys for "phases"
checking for updated bookmarks
listing keys for "bookmarks"
@@ -418,7 +418,7 @@
adding c
created new head
- $ for i in `python $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
$ hg -R i push h
pushing to h
--- a/tests/test-push.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-push.t Wed Jul 19 07:51:41 2017 -0500
@@ -297,3 +297,22 @@
lock: user *, process * (*s) (glob)
wlock: user *, process * (*s) (glob)
+Test bare push with multiple race checking options
+--------------------------------------------------
+
+ $ hg init test-bare-push-no-concurrency
+ $ hg init test-bare-push-unrelated-concurrency
+ $ hg -R test-revflag push -r 0 test-bare-push-no-concurrency --config server.concurrent-push-mode=strict
+ pushing to test-bare-push-no-concurrency
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ $ hg -R test-revflag push -r 0 test-bare-push-unrelated-concurrency --config server.concurrent-push-mode=check-related
+ pushing to test-bare-push-unrelated-concurrency
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-py3-commands.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,239 @@
+#require py3exe
+
+This test helps in keeping a track on which commands we can run on
+Python 3 and see what kind of errors are coming up.
+The full traceback is hidden to have a stable output.
+ $ HGBIN=`which hg`
+
+ $ for cmd in version debuginstall ; do
+ > echo $cmd
+ > $PYTHON3 $HGBIN $cmd 2>&1 2>&1 | tail -1
+ > done
+ version
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ debuginstall
+ no problems detected
+
+#if test-repo
+Make a clone so that any features in the developer's .hg/hgrc that
+might confuse Python 3 don't break this test. When we can do commit in
+Python 3, we'll stop doing this. We use e76ed1e480ef for the clone
+because it has different files than 273ce12ad8f1, so we can test both
+`files` from dirstate and `files` loaded from a specific revision.
+
+ $ hg clone -r e76ed1e480ef "`dirname "$TESTDIR"`" testrepo 2>&1 | tail -1
+ 15 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test using -R, which exercises some URL code:
+ $ $PYTHON3 $HGBIN -R testrepo files -r 273ce12ad8f1 | tail -1
+ testrepo/tkmerge
+
+Now prove `hg files` is reading the whole manifest. We have to grep
+out some potential warnings that come from hgrc as yet.
+ $ cd testrepo
+ $ $PYTHON3 $HGBIN files -r 273ce12ad8f1
+ .hgignore
+ PKG-INFO
+ README
+ hg
+ mercurial/__init__.py
+ mercurial/byterange.py
+ mercurial/fancyopts.py
+ mercurial/hg.py
+ mercurial/mdiff.py
+ mercurial/revlog.py
+ mercurial/transaction.py
+ notes.txt
+ setup.py
+ tkmerge
+
+ $ $PYTHON3 $HGBIN files -r 273ce12ad8f1 | wc -l
+ \s*14 (re)
+ $ $PYTHON3 $HGBIN files | wc -l
+ \s*15 (re)
+
+Test if log-like commands work:
+
+ $ $PYTHON3 $HGBIN tip
+ changeset: 10:e76ed1e480ef
+ tag: tip
+ user: oxymoron@cinder.waste.org
+ date: Tue May 03 23:37:43 2005 -0800
+ summary: Fix linking of changeset revs when merging
+
+
+ $ $PYTHON3 $HGBIN log -r0
+ changeset: 0:9117c6561b0b
+ user: mpm@selenic.com
+ date: Tue May 03 13:16:10 2005 -0800
+ summary: Add back links from file revisions to changeset revisions
+
+
+ $ cd ..
+#endif
+
+Test if `hg config` works:
+
+ $ $PYTHON3 $HGBIN config
+ devel.all-warnings=true
+ devel.default-date=0 0
+ largefiles.usercache=$TESTTMP/.cache/largefiles
+ ui.slash=True
+ ui.interactive=False
+ ui.mergemarkers=detailed
+ ui.promptecho=True
+ web.address=localhost
+ web.ipv6=False
+
+ $ cat > included-hgrc <<EOF
+ > [extensions]
+ > babar = imaginary_elephant
+ > EOF
+ $ cat >> $HGRCPATH <<EOF
+ > %include $TESTTMP/included-hgrc
+ > EOF
+ $ $PYTHON3 $HGBIN version | tail -1
+ *** failed to import extension babar from imaginary_elephant: *: 'imaginary_elephant' (glob)
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+ $ rm included-hgrc
+ $ touch included-hgrc
+
+Test bytes-ness of policy.policy with HGMODULEPOLICY
+
+ $ HGMODULEPOLICY=py
+ $ export HGMODULEPOLICY
+ $ $PYTHON3 `which hg` debuginstall 2>&1 2>&1 | tail -1
+ no problems detected
+
+`hg init` can create empty repos
+`hg status works fine`
+`hg summary` also works!
+
+ $ $PYTHON3 `which hg` init py3repo
+ $ cd py3repo
+ $ echo "This is the file 'iota'." > iota
+ $ $PYTHON3 $HGBIN status
+ ? iota
+ $ $PYTHON3 $HGBIN add iota
+ $ $PYTHON3 $HGBIN status
+ A iota
+ $ hg diff --nodates --git
+ diff --git a/iota b/iota
+ new file mode 100644
+ --- /dev/null
+ +++ b/iota
+ @@ -0,0 +1,1 @@
+ +This is the file 'iota'.
+ $ $PYTHON3 $HGBIN commit --message 'commit performed in Python 3'
+ $ $PYTHON3 $HGBIN status
+
+ $ mkdir A
+ $ echo "This is the file 'mu'." > A/mu
+ $ $PYTHON3 $HGBIN addremove
+ adding A/mu
+ $ $PYTHON3 $HGBIN status
+ A A/mu
+ $ HGEDITOR='echo message > ' $PYTHON3 $HGBIN commit
+ $ $PYTHON3 $HGBIN status
+ $ $PYHON3 $HGBIN summary
+ parent: 1:e1e9167203d4 tip
+ message
+ branch: default
+ commit: (clean)
+ update: (current)
+ phases: 2 draft
+
+Test weird unicode-vs-bytes stuff
+
+ $ $PYTHON3 $HGBIN help | egrep -v '^ |^$'
+ Mercurial Distributed SCM
+ list of commands:
+ additional help topics:
+ (use 'hg help -v' to show built-in aliases and global options)
+
+ $ $PYTHON3 $HGBIN help help | egrep -v '^ |^$'
+ hg help [-ecks] [TOPIC]
+ show help for a given topic or a help overview
+ options ([+] can be repeated):
+ (some details hidden, use --verbose to show complete help)
+
+ $ $PYTHON3 $HGBIN help -k notopic
+ abort: no matches
+ (try 'hg help' for a list of topics)
+ [255]
+
+Prove the repo is valid using the Python 2 `hg`:
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 2 files, 2 changesets, 2 total revisions
+ $ hg log
+ changeset: 1:e1e9167203d4
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: message
+
+ changeset: 0:71c96e924262
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: commit performed in Python 3
+
+
+ $ $PYTHON3 $HGBIN log -G
+ @ changeset: 1:e1e9167203d4
+ | tag: tip
+ | user: test
+ | date: Thu Jan 01 00:00:00 1970 +0000
+ | summary: message
+ |
+ o changeset: 0:71c96e924262
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: commit performed in Python 3
+
+ $ $PYTHON3 $HGBIN log -Tjson
+ [
+ {
+ "rev": 1,
+ "node": "e1e9167203d450ca2f558af628955b5f5afd4489",
+ "branch": "default",
+ "phase": "draft",
+ "user": "test",
+ "date": [0, 0],
+ "desc": "message",
+ "bookmarks": [],
+ "tags": ["tip"],
+ "parents": ["71c96e924262969ff0d8d3d695b0f75412ccc3d8"]
+ },
+ {
+ "rev": 0,
+ "node": "71c96e924262969ff0d8d3d695b0f75412ccc3d8",
+ "branch": "default",
+ "phase": "draft",
+ "user": "test",
+ "date": [0, 0],
+ "desc": "commit performed in Python 3",
+ "bookmarks": [],
+ "tags": [],
+ "parents": ["0000000000000000000000000000000000000000"]
+ }
+ ]
+
+Show that update works now!
+
+ $ $PYTHON3 $HGBIN up 0
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ $PYTHON3 $HGBIN identify
+ 71c96e924262
+
+branches and bookmarks also works!
+
+ $ $PYTHON3 $HGBIN branches
+ default 1:e1e9167203d4
+ $ $PYTHON3 $HGBIN bookmark book
+ $ $PYTHON3 $HGBIN bookmarks
+ * book 0:71c96e924262
--- a/tests/test-rebase-base.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-base.t Wed Jul 19 07:51:41 2017 -0500
@@ -65,7 +65,7 @@
> EOS
rebasing 2:c1e6b162678d "B" (B)
rebasing 3:d6003a550c2c "C" (C)
- rebasing 6:5251e0cb7302 "E" (E tip)
+ rebasing 6:54c8f00cb91c "E" (E tip)
o 6: E
|\
| o 5: C
@@ -92,7 +92,7 @@
> R
> EOS
rebasing 2:c1e6b162678d "B" (B)
- rebasing 5:5251e0cb7302 "E" (E tip)
+ rebasing 5:54c8f00cb91c "E" (E tip)
o 5: E
|\
| o 4: B
@@ -118,7 +118,7 @@
> EOS
rebasing 2:c1e6b162678d "B" (B)
rebasing 3:d6003a550c2c "C" (C)
- rebasing 5:5251e0cb7302 "E" (E tip)
+ rebasing 5:54c8f00cb91c "E" (E tip)
o 5: E
|\
| o 4: C
@@ -208,18 +208,18 @@
> A A A
> EOS
rebasing 2:dc0947a82db8 "C" (C)
- rebasing 8:215e7b0814e1 "D" (D)
+ rebasing 8:4e4f9194f9f1 "D" (D)
rebasing 9:03ca77807e91 "E" (E)
rebasing 10:afc707c82df0 "F" (F)
- rebasing 13:018caa673317 "G" (G)
- rebasing 14:4f710fbd68cb "H" (H)
+ rebasing 13:690dfff91e9e "G" (G)
+ rebasing 14:2893b886bb10 "H" (H)
rebasing 3:08ebfeb61bac "I" (I)
rebasing 4:a0a5005cec67 "J" (J)
rebasing 5:83780307a7e8 "K" (K)
rebasing 6:e131637a1cb6 "L" (L)
- rebasing 11:d6fe3d11d95d "M" (M)
- rebasing 12:fa1e02269063 "N" (N)
- rebasing 15:448b1a498430 "P" (P tip)
+ rebasing 11:d1f6d0c3c7e4 "M" (M)
+ rebasing 12:7aaec6f81888 "N" (N)
+ rebasing 15:325bc8f1760d "P" (P tip)
o 15: P
|\
| o 14: N
@@ -269,9 +269,9 @@
rebasing 6:06ca5dfe3b5b "B2" (B2)
rebasing 7:73508237b032 "C1" (C1)
rebasing 9:fdb955e2faed "A2" (A2)
- rebasing 11:1b2f368c3cb5 "A3" (A3)
+ rebasing 11:4e449bd1a643 "A3" (A3)
rebasing 10:0a33b0519128 "B1" (B1)
- rebasing 12:bd6a37b5b67a "B3" (B3 tip)
+ rebasing 12:209327807c3a "B3" (B3 tip)
o 12: B3
|\
| o 11: B1
@@ -298,18 +298,6 @@
|
o 0: M0
-Mixed rebasable and non-rebasable bases (unresolved, issue5422):
-
- $ rebasewithdag -b C+D -d B <<'EOS'
- > D
- > /
- > B C
- > |/
- > A
- > EOS
- nothing to rebase
- [1]
-
Disconnected graph:
$ rebasewithdag -b B -d Z <<'EOS'
@@ -346,7 +334,7 @@
> \|\|
> A C
> EOF
- nothing to rebase from 86d01f49c0d9+b70f76719894 to 262e37e34f63
+ nothing to rebase from f675d5a1c6a4+b70f76719894 to 262e37e34f63
[1]
Multiple roots. One root is not an ancestor of dest. Select using a merge:
@@ -358,8 +346,8 @@
> \|\|
> A C
> EOF
- rebasing 2:86d01f49c0d9 "B" (B)
- rebasing 5:539a0ff83ea9 "E" (E tip)
+ rebasing 2:f675d5a1c6a4 "B" (B)
+ rebasing 5:f68696fe6af8 "E" (E tip)
o 5: E
|\
| o 4: B
@@ -379,8 +367,8 @@
> \|\|\
> A C A
> EOF
- rebasing 2:86d01f49c0d9 "B" (B)
- rebasing 3:b7df2ca01aa8 "D" (D)
+ rebasing 2:f675d5a1c6a4 "B" (B)
+ rebasing 3:c2a779e13b56 "D" (D)
o 4: D
|\
+---o 3: B
--- a/tests/test-rebase-bookmarks.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-bookmarks.t Wed Jul 19 07:51:41 2017 -0500
@@ -75,7 +75,7 @@
$ hg rebase -s Y -d 3
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg (glob)
$ hg tglog
@ 3: 'C' bookmarks: Y Z
@@ -97,7 +97,7 @@
$ hg rebase -s W -d .
rebasing 3:41acb9dca9eb "D" (tip W)
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg (glob)
$ hg bookmarks
W 3:0d3554f74897
@@ -116,7 +116,7 @@
$ hg rebase -s 1 -d 3
rebasing 1:6c81ed0049f8 "B" (X)
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
$ hg tglog
@ 3: 'C' bookmarks: Y Z
@@ -139,7 +139,7 @@
$ hg rebase -d W
rebasing 1:6c81ed0049f8 "B" (X)
rebasing 2:49cb3485fa0c "C" (Y Z)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg (glob)
$ hg tglog
o 3: 'C' bookmarks: Y Z
@@ -179,7 +179,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 3:3d5fa227f4b5 "C" (Y Z)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg (glob)
$ hg tglog
@ 4: 'C' bookmarks: Y Z
|
@@ -209,4 +209,4 @@
rebasing 5:345c90f326a4 "bisect"
rebasing 6:f677a2907404 "bisect2"
rebasing 7:325c16001345 "bisect3" (tip bisect)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg (glob)
--- a/tests/test-rebase-cache.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-cache.t Wed Jul 19 07:51:41 2017 -0500
@@ -104,7 +104,7 @@
$ hg rebase -s 5 -d 8
rebasing 5:635859577d0b "D"
rebasing 6:5097051d331d "E"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-rebase.hg (glob)
$ hg branches
branch3 8:466cdfb14b62
@@ -166,7 +166,7 @@
$ hg rebase -s 8 -d 6
rebasing 8:4666b71e8e32 "F" (tip)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-rebase.hg (glob)
$ hg branches
branch2 8:6b4bdc1b5ac0
@@ -233,7 +233,7 @@
rebasing 7:653b9feb4616 "branch3"
note: rebase of 7:653b9feb4616 created no changes to commit
rebasing 8:4666b71e8e32 "F" (tip)
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-rebase.hg (glob)
$ hg branches
branch2 7:6b4bdc1b5ac0
@@ -480,4 +480,4 @@
HGEDITFORM=rebase.merge
rebasing 8:326cfedc031c "I" (tip)
HGEDITFORM=rebase.normal
- saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-rebase.hg (glob)
--- a/tests/test-rebase-check-restore.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-check-restore.t Wed Jul 19 07:51:41 2017 -0500
@@ -138,7 +138,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 5:01e6ebbd8272 "F" (tip)
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-rebase.hg (glob)
$ hg tglog
@ 5:draft 'F' notdefault
--- a/tests/test-rebase-collapse.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-collapse.t Wed Jul 19 07:51:41 2017 -0500
@@ -78,7 +78,7 @@
HG: added C
HG: added D
====
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglogp
@ 5:secret 'Collapsed revision
@@ -118,7 +118,7 @@
$ hg rebase --source 4 --collapse --dest 7
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
$ hg tglog
o 6: 'Collapsed revision
@@ -161,7 +161,7 @@
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
HGEDITFORM=rebase.collapse
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
$ hg tglog
o 6: 'custom message'
@@ -271,7 +271,7 @@
rebasing 4:8a5212ebc852 "E"
rebasing 5:7f219660301f "F"
rebasing 6:c772a8b2dc17 "G"
- saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-rebase.hg (glob)
$ hg tglog
o 5: 'Collapsed revision
@@ -424,7 +424,7 @@
merging E
rebasing 6:55a44ad28289 "G"
rebasing 7:417d3b648079 "H"
- saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-backup.hg (glob)
+ saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-rebase.hg (glob)
$ hg tglog
o 5: 'Collapsed revision
@@ -517,7 +517,7 @@
rebasing 2:f838bfaca5c7 "C"
rebasing 3:7bbcd6078bcc "D"
rebasing 4:0a42590ed746 "E"
- saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-backup.hg (glob)
+ saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-rebase.hg (glob)
$ hg tglog
o 2: 'Collapsed revision
@@ -602,7 +602,7 @@
$ hg rebase -s 5 -d 4
rebasing 5:fbfb97b1089a "E" (tip)
note: rebase of 5:fbfb97b1089a created no changes to commit
- saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-backup.hg (glob)
+ saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-rebase.hg (glob)
$ hg tglog
@ 4: 'E'
|
@@ -660,7 +660,7 @@
rebasing 3:338e84e2e558 "move2" (tip)
merging f and c to c
merging e and g to g
- saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-rebase.hg (glob)
$ hg st
$ hg st --copies --change tip
A d
@@ -701,7 +701,7 @@
$ hg rebase --collapse -b . -d 0
rebasing 1:1352765a01d4 "change"
rebasing 2:64b456429f67 "Collapsed revision" (tip)
- saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-rebase.hg (glob)
$ hg st --change tip --copies
M a
M c
@@ -793,7 +793,7 @@
$ hg rebase -d 0 -r "1::2" --collapse -m collapsed
rebasing 1:6d8d9f24eec3 "a"
rebasing 2:1cc73eca5ecc "b" (tip foo)
- saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-backup.hg (glob)
+ saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-rebase.hg (glob)
$ hg log -G --template "{rev}: '{desc}' {bookmarks}"
@ 1: 'collapsed' foo
|
@@ -834,7 +834,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 2:b8d8db2b242d "a-dev" (tip)
- saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/b8d8db2b242d-f474c19a-backup.hg (glob)
+ saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/b8d8db2b242d-f474c19a-rebase.hg (glob)
$ hg log
changeset: 2:45ba1d1a8665
tag: tip
--- a/tests/test-rebase-conflicts.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-conflicts.t Wed Jul 19 07:51:41 2017 -0500
@@ -3,6 +3,7 @@
> usegeneraldelta=yes
> [extensions]
> rebase=
+ > drawdag=$TESTDIR/drawdag.py
>
> [phases]
> publish=False
@@ -86,7 +87,7 @@
already rebased 3:3163e20567cc "L1" as 3e046f2ecedb
rebasing 4:46f0b057b5c0 "L2"
rebasing 5:8029388f38dc "L3" (mybook)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-rebase.hg (glob)
$ hg tglog
@ 5:secret 'L3' mybook
@@ -248,11 +249,12 @@
f1.txt
committing manifest
committing changelog
+ updating the branch cache
rebased as 19c888675e13
rebasing 10:2f2496ddf49d "merge" (tip)
future parents are 11 and 7
rebase status stored
- already in target
+ already in destination
merge against 10:2f2496ddf49d
detach base 9:e31216eec445
searching for copies back to rev 3
@@ -267,6 +269,7 @@
f1.txt
committing manifest
committing changelog
+ updating the branch cache
rebased as 2a7f09cac94c
rebase merging completed
update back to initial working directory parent
@@ -281,16 +284,18 @@
list of changesets:
e31216eec445e44352c5f01588856059466a24c9
2f2496ddf49d69b5ef23ad8cf9fb2e0e4faf0ac2
- bundle2-output-bundle: "HG20", (1 params) 1 parts total
+ bundle2-output-bundle: "HG20", (1 params) 2 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
- saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-backup.hg (glob)
+ bundle2-output-part: "phase-heads" 24 bytes payload
+ saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-rebase.hg (glob)
3 changesets found
list of changesets:
4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c
19c888675e133ab5dff84516926a65672eaf04d9
2a7f09cac94c7f4b73ebd5cd1a62d3b2e8e336bf
- bundle2-output-bundle: "HG20", 1 parts total
+ bundle2-output-bundle: "HG20", 2 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
+ bundle2-output-part: "phase-heads" 24 bytes payload
adding branch
bundle2-input-bundle: with-transaction
bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
@@ -303,11 +308,12 @@
adding f1.txt revisions
added 2 changesets with 2 changes to 1 files
bundle2-input-part: total payload size 1686
- bundle2-input-bundle: 0 parts total
+ bundle2-input-part: "phase-heads" supported
+ bundle2-input-part: total payload size 24
+ bundle2-input-bundle: 1 parts total
+ updating the branch cache
invalid branchheads cache (served): tip differs
- history modification detected - truncating revision branch cache to revision 9
rebase completed
- truncating cache/rbc-revs-v1 to 72
Test minimization of merge conflicts
$ hg up -q null
@@ -360,3 +366,54 @@
+b
+c
+>>>>>>> source: 7bc217434fc1 - test: abc
+
+Test rebase with obsstore turned on and off (issue5606)
+
+ $ cd $TESTTMP
+ $ hg init b
+ $ cd b
+ $ hg debugdrawdag <<'EOS'
+ > D
+ > |
+ > C
+ > |
+ > B E
+ > |/
+ > A
+ > EOS
+
+ $ hg update E -q
+ $ echo 3 > B
+ $ hg commit --amend -m E -A B -q
+ $ hg rebase -r B+D -d . --config experimental.evolution=all
+ rebasing 1:112478962961 "B" (B)
+ merging B
+ warning: conflicts while merging B! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+
+ $ echo 4 > B
+ $ hg resolve -m
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase --continue --config experimental.evolution=none
+ rebasing 1:112478962961 "B" (B)
+ not rebasing ignored 2:26805aba1e60 "C" (C)
+ rebasing 3:f585351a92f8 "D" (D)
+ warning: orphaned descendants detected, not stripping 112478962961
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/f585351a92f8-e536a9e4-rebase.hg (glob)
+
+ $ rm .hg/localtags
+ $ hg tglog
+ o 5:draft 'D'
+ |
+ o 4:draft 'B'
+ |
+ @ 3:draft 'E'
+ |
+ | o 2:draft 'C'
+ | |
+ | o 1:draft 'B'
+ |/
+ o 0:draft 'A'
+
--- a/tests/test-rebase-dest.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-dest.t Wed Jul 19 07:51:41 2017 -0500
@@ -21,15 +21,15 @@
[255]
$ hg rebase -d 1
rebasing 2:5db65b93a12b "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg (glob)
$ hg rebase -d 0 -r . -q
$ HGPLAIN=1 hg rebase
rebasing 2:889b0bc6a730 "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/889b0bc6a730-41ec4f81-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/889b0bc6a730-41ec4f81-rebase.hg (glob)
$ hg rebase -d 0 -r . -q
$ hg --config commands.rebase.requiredest=False rebase
rebasing 2:279de9495438 "cc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/279de9495438-ab0a5128-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/279de9495438-ab0a5128-rebase.hg (glob)
Requiring dest should not break continue or other rebase options
$ hg up 1 -q
@@ -56,7 +56,7 @@
continue: hg rebase --continue
$ hg rebase --continue
rebasing 3:0537f6b50def "dc" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0537f6b50def-be4c7386-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0537f6b50def-be4c7386-rebase.hg (glob)
$ cd ..
--- a/tests/test-rebase-detach.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-detach.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,172 +2,243 @@
> [extensions]
> rebase=
>
- > [phases]
- > publish=False
+ > [alias]
+ > tglog = log -G --template "{rev}: '{desc}'\n"
>
- > [alias]
- > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
+ > [extensions]
+ > drawdag=$TESTDIR/drawdag.py
> EOF
+Rebasing D onto B detaching from C (one commit):
- $ hg init a
- $ cd a
- $ hg unbundle "$TESTDIR/bundles/rebase.hg"
- adding changesets
- adding manifests
- adding file changes
- added 8 changesets with 7 changes to 7 files (+2 heads)
- (run 'hg heads' to see heads, 'hg merge' to merge)
- $ hg up tip
- 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
- $ cd ..
-
-
-Rebasing D onto H detaching from C:
-
- $ hg clone -q -u . a a1
+ $ hg init a1
$ cd a1
- $ hg tglog
- @ 7: 'H'
- |
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
- |/
- | o 3: 'D'
- | |
- | o 2: 'C'
- | |
- | o 1: 'B'
- |/
- o 0: 'A'
-
- $ hg phase --force --secret 3
- $ hg rebase -s 3 -d 7
- rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
+ $ hg debugdrawdag <<EOF
+ > D
+ > |
+ > C B
+ > |/
+ > A
+ > EOF
+ $ hg phase --force --secret D
+
+ $ hg rebase -s D -d B
+ rebasing 3:e7b3f00ed42e "D" (D tip)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/e7b3f00ed42e-6f368371-rebase.hg (glob)
$ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
- o 7:secret 'D'
- |
- @ 6:draft 'H'
+ o 3:secret 'D'
|
- | o 5:draft 'G'
- |/|
- o | 4:draft 'F'
- | |
- | o 3:draft 'E'
- |/
| o 2:draft 'C'
| |
- | o 1:draft 'B'
+ o | 1:draft 'B'
|/
o 0:draft 'A'
$ hg manifest --rev tip
A
+ B
D
- F
- H
$ cd ..
-Rebasing C onto H detaching from B:
+Rebasing D onto B detaching from C (two commits):
+
+ $ hg init a2
+ $ cd a2
- $ hg clone -q -u . a a2
- $ cd a2
+ $ hg debugdrawdag <<EOF
+ > E
+ > |
+ > D
+ > |
+ > C B
+ > |/
+ > A
+ > EOF
+
+ $ hg rebase -s D -d B
+ rebasing 3:e7b3f00ed42e "D" (D)
+ rebasing 4:69a34c08022a "E" (E tip)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg (glob)
$ hg tglog
- @ 7: 'H'
+ o 4: 'E'
|
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
- |/
- | o 3: 'D'
- | |
+ o 3: 'D'
+ |
| o 2: 'C'
| |
- | o 1: 'B'
- |/
- o 0: 'A'
-
- $ hg rebase -s 2 -d 7
- rebasing 2:5fddd98957c8 "C"
- rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
-
- $ hg tglog
- o 7: 'D'
- |
- o 6: 'C'
- |
- @ 5: 'H'
- |
- | o 4: 'G'
- |/|
- o | 3: 'F'
- | |
- | o 2: 'E'
- |/
- | o 1: 'B'
+ o | 1: 'B'
|/
o 0: 'A'
$ hg manifest --rev tip
A
+ B
+ D
+ E
+
+ $ cd ..
+
+Rebasing C onto B using detach (same as not using it):
+
+ $ hg init a3
+ $ cd a3
+
+ $ hg debugdrawdag <<EOF
+ > D
+ > |
+ > C B
+ > |/
+ > A
+ > EOF
+
+ $ hg rebase -s C -d B
+ rebasing 2:dc0947a82db8 "C" (C)
+ rebasing 3:e7b3f00ed42e "D" (D tip)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/dc0947a82db8-b8481714-rebase.hg (glob)
+
+ $ hg tglog
+ o 3: 'D'
+ |
+ o 2: 'C'
+ |
+ o 1: 'B'
+ |
+ o 0: 'A'
+
+ $ hg manifest --rev tip
+ A
+ B
C
D
- F
- H
$ cd ..
-Rebasing B onto H using detach (same as not using it):
+Rebasing D onto B detaching from C and collapsing:
+
+ $ hg init a4
+ $ cd a4
+
+ $ hg debugdrawdag <<EOF
+ > E
+ > |
+ > D
+ > |
+ > C B
+ > |/
+ > A
+ > EOF
+ $ hg phase --force --secret E
+
+ $ hg rebase --collapse -s D -d B
+ rebasing 3:e7b3f00ed42e "D" (D)
+ rebasing 4:69a34c08022a "E" (E tip)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg (glob)
- $ hg clone -q -u . a a3
- $ cd a3
+ $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
+ o 3:secret 'Collapsed revision
+ | * D
+ | * E'
+ | o 2:draft 'C'
+ | |
+ o | 1:draft 'B'
+ |/
+ o 0:draft 'A'
+
+ $ hg manifest --rev tip
+ A
+ B
+ D
+ E
+
+ $ cd ..
+
+Rebasing across null as ancestor
+ $ hg init a5
+ $ cd a5
+
+ $ hg debugdrawdag <<EOF
+ > E
+ > |
+ > D
+ > |
+ > C
+ > |
+ > A B
+ > EOF
+
+ $ hg rebase -s C -d B
+ rebasing 2:dc0947a82db8 "C" (C)
+ rebasing 3:e7b3f00ed42e "D" (D)
+ rebasing 4:69a34c08022a "E" (E tip)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/dc0947a82db8-3eefec98-rebase.hg (glob)
$ hg tglog
- @ 7: 'H'
+ o 4: 'E'
+ |
+ o 3: 'D'
+ |
+ o 2: 'C'
|
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
+ o 1: 'B'
+
+ o 0: 'A'
+
+ $ hg rebase -d 1 -s 3
+ rebasing 3:e9153d36a1af "D"
+ rebasing 4:e3d0c70d606d "E" (tip)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/e9153d36a1af-db7388ed-rebase.hg (glob)
+ $ hg tglog
+ o 4: 'E'
+ |
+ o 3: 'D'
+ |
+ | o 2: 'C'
|/
- | o 3: 'D'
- | |
- | o 2: 'C'
- | |
- | o 1: 'B'
- |/
+ o 1: 'B'
+
o 0: 'A'
- $ hg rebase -s 1 -d 7
- rebasing 1:42ccdea3bb16 "B"
- rebasing 2:5fddd98957c8 "C"
- rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ $ cd ..
+
+Verify that target is not selected as external rev (issue3085)
+
+ $ hg init a6
+ $ cd a6
+ $ hg debugdrawdag <<EOF
+ > H
+ > | G
+ > |/|
+ > F E
+ > |/
+ > A
+ > EOF
+ $ hg up -q G
+
+ $ echo "I" >> E
+ $ hg ci -m "I"
+ $ hg tag --local I
+ $ hg merge H
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg ci -m "Merge"
+ $ echo "J" >> F
+ $ hg ci -m "J"
$ hg tglog
- o 7: 'D'
- |
- o 6: 'C'
+ @ 7: 'J'
|
- o 5: 'B'
- |
- @ 4: 'H'
- |
+ o 6: 'Merge'
+ |\
+ | o 5: 'I'
+ | |
+ o | 4: 'H'
+ | |
| o 3: 'G'
|/|
o | 2: 'F'
@@ -176,116 +247,17 @@
|/
o 0: 'A'
- $ hg manifest --rev tip
- A
- B
- C
- D
- F
- H
-
- $ cd ..
-
-
-Rebasing C onto H detaching from B and collapsing:
-
- $ hg clone -q -u . a a4
- $ cd a4
- $ hg phase --force --secret 3
+ $ hg rebase -s I -d H --collapse --config ui.merge=internal:other
+ rebasing 5:b92d164ad3cb "I" (I)
+ rebasing 6:0cfbc7e8faaf "Merge"
+ rebasing 7:c6aaf0d259c0 "J" (tip)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/b92d164ad3cb-88fd7ab7-rebase.hg (glob)
$ hg tglog
- @ 7: 'H'
- |
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
- |/
- | o 3: 'D'
- | |
- | o 2: 'C'
- | |
- | o 1: 'B'
- |/
- o 0: 'A'
-
- $ hg rebase --collapse -s 2 -d 7
- rebasing 2:5fddd98957c8 "C"
- rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
-
- $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
- o 6:secret 'Collapsed revision
- | * C
- | * D'
- @ 5:draft 'H'
- |
- | o 4:draft 'G'
- |/|
- o | 3:draft 'F'
- | |
- | o 2:draft 'E'
- |/
- | o 1:draft 'B'
- |/
- o 0:draft 'A'
-
- $ hg manifest --rev tip
- A
- C
- D
- F
- H
-
- $ cd ..
-
-Rebasing across null as ancestor
- $ hg clone -q -U a a5
-
- $ cd a5
-
- $ echo x > x
-
- $ hg add x
-
- $ hg ci -m "extra branch"
- created new head
-
- $ hg tglog
- @ 8: 'extra branch'
-
- o 7: 'H'
- |
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
- |/
- | o 3: 'D'
- | |
- | o 2: 'C'
- | |
- | o 1: 'B'
- |/
- o 0: 'A'
-
- $ hg rebase -s 1 -d tip
- rebasing 1:42ccdea3bb16 "B"
- rebasing 2:5fddd98957c8 "C"
- rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
-
- $ hg tglog
- o 8: 'D'
- |
- o 7: 'C'
- |
- o 6: 'B'
- |
- @ 5: 'extra branch'
-
+ @ 5: 'Collapsed revision
+ | * I
+ | * Merge
+ | * J'
o 4: 'H'
|
| o 3: 'G'
@@ -297,76 +269,8 @@
o 0: 'A'
- $ hg rebase -d 5 -s 7
- rebasing 7:13547172c9c0 "C"
- rebasing 8:4e27a76c371a "D" (tip)
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/13547172c9c0-35685ded-backup.hg (glob)
- $ hg tglog
- o 8: 'D'
- |
- o 7: 'C'
- |
- | o 6: 'B'
- |/
- @ 5: 'extra branch'
-
- o 4: 'H'
- |
- | o 3: 'G'
- |/|
- o | 2: 'F'
- | |
- | o 1: 'E'
- |/
- o 0: 'A'
-
- $ cd ..
-
-Verify that target is not selected as external rev (issue3085)
-
- $ hg clone -q -U a a6
- $ cd a6
- $ hg up -q 6
-
- $ echo "I" >> E
- $ hg ci -m "I"
- $ hg merge 7
- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- (branch merge, don't forget to commit)
- $ hg ci -m "Merge"
- $ echo "J" >> F
- $ hg ci -m "J"
-
- $ hg rebase -s 8 -d 7 --collapse --config ui.merge=internal:other
- rebasing 8:9790e768172d "I"
- rebasing 9:5d7b11f5fb97 "Merge"
- rebasing 10:9427d4d5af81 "J" (tip)
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/9790e768172d-c2111e9d-backup.hg (glob)
-
- $ hg tglog
- @ 8: 'Collapsed revision
- | * I
- | * Merge
- | * J'
- o 7: 'H'
- |
- | o 6: 'G'
- |/|
- o | 5: 'F'
- | |
- | o 4: 'E'
- |/
- | o 3: 'D'
- | |
- | o 2: 'C'
- | |
- | o 1: 'B'
- |/
- o 0: 'A'
-
-
$ hg log --rev tip
- changeset: 8:9472f4b1d736
+ changeset: 5:65079693dac4
tag: tip
user: test
date: Thu Jan 01 00:00:00 1970 +0000
@@ -376,40 +280,36 @@
$ cd ..
Ensure --continue restores a correct state (issue3046) and phase:
- $ hg clone -q a a7
+ $ hg init a7
$ cd a7
- $ hg up -q 3
- $ echo 'H2' > H
- $ hg ci -A -m 'H2'
- adding H
- $ hg phase --force --secret 8
- $ hg rebase -s 8 -d 7 --config ui.merge=internal:fail
- rebasing 8:6215fafa5447 "H2" (tip)
- merging H
- warning: conflicts while merging H! (edit, then use 'hg resolve --mark')
+
+ $ hg debugdrawdag <<EOF
+ > C B
+ > |/
+ > A
+ > EOF
+ $ hg up -q C
+ $ echo 'B2' > B
+ $ hg ci -A -m 'B2'
+ adding B
+ $ hg phase --force --secret .
+ $ hg rebase -s . -d B --config ui.merge=internal:fail
+ rebasing 3:17b4880d2402 "B2" (tip)
+ merging B
+ warning: conflicts while merging B! (edit, then use 'hg resolve --mark')
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
$ hg resolve --all -t internal:local
(no more unresolved files)
continue: hg rebase --continue
$ hg rebase -c
- rebasing 8:6215fafa5447 "H2" (tip)
- note: rebase of 8:6215fafa5447 created no changes to commit
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6215fafa5447-5804ebd5-backup.hg (glob)
+ rebasing 3:17b4880d2402 "B2" (tip)
+ note: rebase of 3:17b4880d2402 created no changes to commit
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/17b4880d2402-1ae1f6cc-rebase.hg (glob)
$ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
- @ 7:draft 'H'
+ o 2:draft 'C'
|
- | o 6:draft 'G'
- |/|
- o | 5:draft 'F'
- | |
- | o 4:draft 'E'
- |/
- | o 3:draft 'D'
- | |
- | o 2:draft 'C'
- | |
- | o 1:draft 'B'
+ | @ 1:draft 'B'
|/
o 0:draft 'A'
--- a/tests/test-rebase-interruptions.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-interruptions.t Wed Jul 19 07:51:41 2017 -0500
@@ -112,7 +112,7 @@
$ hg rebase --continue
already rebased 1:27547f69f254 "B" as 45396c49d53b
rebasing 2:965c486023db "C"
- warning: new changesets detected on source branch, not stripping
+ warning: orphaned descendants detected, not stripping 27547f69f254, 965c486023db
$ hg tglogp
o 7:draft 'C'
@@ -186,7 +186,7 @@
Abort the rebasing:
$ hg rebase --abort
- warning: new changesets detected on target branch, can't strip
+ warning: new changesets detected on destination branch, can't strip
rebase aborted
$ hg tglog
@@ -271,7 +271,6 @@
|/
o 0:public 'A'
-
Test rebase interrupted by hooks
$ hg up 2
@@ -312,7 +311,7 @@
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-precommit/.hg/strip-backup/965c486023db-aa6250e7-backup.hg (glob)
+ saved backup bundle to $TESTTMP/hook-precommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
$ hg tglogp
@ 6:secret 'F'
|
@@ -334,7 +333,12 @@
$ cp -R a3 hook-pretxncommit
$ cd hook-pretxncommit
- $ hg rebase --source 2 --dest 5 --tool internal:other --config 'hooks.pretxncommit=hg log -r $HG_NODE | grep "summary: C"'
+#if windows
+ $ NODE="%HG_NODE%"
+#else
+ $ NODE="\$HG_NODE"
+#endif
+ $ hg rebase --source 2 --dest 5 --tool internal:other --config "hooks.pretxncommit=hg log -r $NODE | grep \"summary: C\""
rebasing 2:965c486023db "C"
summary: C
rebasing 6:a0b2430ebfb8 "F" (tip)
@@ -362,7 +366,7 @@
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-pretxncommit/.hg/strip-backup/965c486023db-aa6250e7-backup.hg (glob)
+ saved backup bundle to $TESTTMP/hook-pretxncommit/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
$ hg tglogp
@ 6:secret 'F'
|
@@ -412,7 +416,7 @@
$ hg rebase --continue
already rebased 2:965c486023db "C" as 401ccec5e39f
rebasing 6:a0b2430ebfb8 "F"
- saved backup bundle to $TESTTMP/hook-pretxnclose/.hg/strip-backup/965c486023db-aa6250e7-backup.hg (glob)
+ saved backup bundle to $TESTTMP/hook-pretxnclose/.hg/strip-backup/965c486023db-aa6250e7-rebase.hg (glob)
$ hg tglogp
@ 6:secret 'F'
|
@@ -429,3 +433,33 @@
o 0:public 'A'
$ cd ..
+
+Make sure merge state is cleaned up after a no-op rebase merge (issue5494)
+ $ hg init repo
+ $ cd repo
+ $ echo a > a
+ $ hg commit -qAm base
+ $ echo b >> a
+ $ hg commit -qm b
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo c >> a
+ $ hg commit -qm c
+ $ hg rebase -s 1 -d 2 --noninteractive
+ rebasing 1:fdaca8533b86 "b"
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+ $ echo a > a
+ $ echo c >> a
+ $ hg resolve --mark a
+ (no more unresolved files)
+ continue: hg rebase --continue
+ $ hg rebase --continue
+ rebasing 1:fdaca8533b86 "b"
+ note: rebase of 1:fdaca8533b86 created no changes to commit
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fdaca8533b86-7fd70513-rebase.hg (glob)
+ $ hg resolve --list
+ $ test -f .hg/merge
+ [1]
--- a/tests/test-rebase-issue-noparam-single-rev.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-issue-noparam-single-rev.t Wed Jul 19 07:51:41 2017 -0500
@@ -53,7 +53,7 @@
$ hg rebase
rebasing 2:87c180a611f2 "l1"
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-rebase.hg (glob)
$ hg tglog
@ 4: 'l1'
@@ -113,7 +113,7 @@
$ hg rebase
rebasing 2:87c180a611f2 "l1"
rebasing 3:1ac923b736ef "l2"
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-rebase.hg (glob)
$ hg tglog
@ 4: 'l2'
--- a/tests/test-rebase-mq-skip.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-mq-skip.t Wed Jul 19 07:51:41 2017 -0500
@@ -74,7 +74,7 @@
324 (manifests)
129 p0
129 p1
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-rebase.hg (glob)
2 changesets found
uncompressed size of bundle content:
403 (changelog)
@@ -166,7 +166,7 @@
rebasing 5:681a378595ba "r5" (r5)
rebasing 6:512a1f24768b "r6" (qtip r6)
note: rebase of 6:512a1f24768b created no changes to commit
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-rebase.hg (glob)
$ hg tglog
@ 8: 'r5' tags: qtip r5 tip
--- a/tests/test-rebase-mq.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-mq.t Wed Jul 19 07:51:41 2017 -0500
@@ -88,7 +88,7 @@
$ hg rebase -c
already rebased 2:3504f44bffc0 "P0" (f.patch qbase) as ebe9914c0d1c
rebasing 3:929394423cd3 "P1" (f2.patch qtip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-rebase.hg (glob)
$ hg tglog
@ 3: 'P1' tags: f2.patch qtip tip
@@ -205,7 +205,7 @@
$ hg rebase -s 2 -d 1
rebasing 2:0c587ffcb480 "P0 (git)" (f_git.patch qbase)
rebasing 3:c7f18665e4bc "P1" (f.patch qtip tip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-rebase.hg (glob)
$ hg qci -m 'save patch state'
--- a/tests/test-rebase-named-branches.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-named-branches.t Wed Jul 19 07:51:41 2017 -0500
@@ -72,7 +72,7 @@
rebasing 6:eea13746799a "G"
rebasing 7:02de42196ebe "H"
rebasing 9:cb039b7cae8e "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-rebase.hg (glob)
$ hg tglog
@ 9: 'dev-two named branch' dev-two
@@ -101,7 +101,7 @@
rebasing 7:4b988a958030 "G"
rebasing 8:31d0e4ba75e6 "H"
rebasing 9:9e70cd31750f "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-rebase.hg (glob)
$ hg tglog
@ 9: 'dev-two named branch' dev-two
@@ -161,7 +161,7 @@
rebasing 7:1a1e6f72ec38 "G"
rebasing 8:904590360559 "H"
rebasing 9:59c2e59309fe "dev-two named branch"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-rebase.hg (glob)
$ hg tglog
o 9: 'dev-two named branch' dev-two
@@ -190,7 +190,7 @@
rebasing 7:549f007a9f5f "G"
rebasing 8:12b2bc666e20 "H"
rebasing 9:71325f8bc082 "dev-two named branch" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-rebase.hg (glob)
$ hg tglog
o 9: 'dev-two named branch' dev-two
@@ -221,7 +221,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
o 9: 'D'
@@ -253,7 +253,7 @@
rebasing 7:3bdb949809d9 "B"
rebasing 8:a0d543090fa4 "C"
rebasing 9:e9f862ce8bad "D" (tip)
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-rebase.hg (glob)
$ hg tglog
o 9: 'D'
@@ -290,7 +290,7 @@
rebasing 7:160b0930ccc6 "B"
rebasing 8:810110211f50 "C"
rebasing 9:e522577ccdbd "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-rebase.hg (glob)
$ cd ..
@@ -329,7 +329,7 @@
$ hg rebase
rebasing 2:792845bb77ee "b2"
note: rebase of 2:792845bb77ee created no changes to commit
- saved backup bundle to $TESTTMP/case1/.hg/strip-backup/792845bb77ee-627120ee-backup.hg (glob)
+ saved backup bundle to $TESTTMP/case1/.hg/strip-backup/792845bb77ee-627120ee-rebase.hg (glob)
$ hg tglog
o 2: 'c1' c
|
@@ -344,7 +344,7 @@
$ hg up -qr 1
$ hg rebase
rebasing 1:40039acb7ca5 "b1"
- saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-backup.hg (glob)
+ saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-rebase.hg (glob)
$ hg tglog
@ 3: 'b1' b
|
@@ -395,7 +395,7 @@
rebasing 3:76abc1c6f8c7 "b1"
rebasing 4:8427af5d86f2 "c2 closed" (tip)
note: rebase of 4:8427af5d86f2 created no changes to commit
- saved backup bundle to $TESTTMP/case2/.hg/strip-backup/76abc1c6f8c7-cd698d13-backup.hg (glob)
+ saved backup bundle to $TESTTMP/case2/.hg/strip-backup/76abc1c6f8c7-cd698d13-rebase.hg (glob)
$ hg tglog
o 3: 'b1' x
|
--- a/tests/test-rebase-newancestor.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-newancestor.t Wed Jul 19 07:51:41 2017 -0500
@@ -44,7 +44,7 @@
merging a
rebasing 2:30ae917c0e4f "C"
merging a
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-rebase.hg (glob)
$ hg tglog
o 3: 'C'
@@ -138,7 +138,7 @@
other [source] changed f-default which local [dest] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-backup.hg (glob)
+ saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-rebase.hg (glob)
$ hg tglog
o 6: 'dev: merge default'
|
@@ -167,7 +167,7 @@
other [source] changed f-default which local [dest] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c
rebasing 6:9455ee510502 "dev: merge default"
- saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-backup.hg (glob)
+ saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-rebase.hg (glob)
$ hg tglog
o 7: 'dev: merge default'
|
@@ -239,7 +239,7 @@
$ hg rebase -r 4 -d 2
rebasing 4:6990226659be "merge p1 3=outside p2 1=ancestor"
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-rebase.hg (glob)
$ hg tip
changeset: 5:cca50676b1c5
tag: tip
@@ -251,7 +251,7 @@
$ hg rebase -r 4 -d 2
rebasing 4:a57575f79074 "merge p1 1=ancestor p2 3=outside"
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-backup.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-rebase.hg (glob)
$ hg tip
changeset: 5:f9daf77ffe76
tag: tip
@@ -307,7 +307,7 @@
199 (changelog)
216 (manifests)
182 other
- saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-backup.hg (glob)
+ saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-rebase.hg (glob)
1 changesets found
uncompressed size of bundle content:
254 (changelog)
--- a/tests/test-rebase-obsolete.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-obsolete.t Wed Jul 19 07:51:41 2017 -0500
@@ -555,6 +555,7 @@
$ hg add J
$ hg commit -m J
$ hg debugobsolete `hg log --rev . -T '{node}'`
+ obsoleted 1 changesets
$ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
rebasing 9:4bde274eefcf "I"
@@ -710,6 +711,7 @@
o 0:4a2df7238c3b A
$ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all
+ obsoleted 1 changesets
$ hg rebase -d 6 -r "4::"
rebasing 4:ff2c4d47b71d "C"
note: not rebasing 7:360bbaa7d3ce "O", it has no successor
@@ -737,6 +739,7 @@
$ hg commit -m nonrelevant
created new head
$ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
+ obsoleted 1 changesets
$ hg rebase -r . -d 10
note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
@@ -861,6 +864,7 @@
$ hg add L
$ hg commit -m "dummy change"
$ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all
+ obsoleted 1 changesets
$ hg log -G -r 17::
@ 22:7bdc8a87673d dummy change
@@ -902,7 +906,7 @@
$ hg up 9520eea781bc
1 files updated, 0 files merged, 2 files removed, 0 files unresolved
$ echo 1 >> E
- $ hg commit --amend -m "E'"
+ $ hg commit --amend -m "E'" -d "0 0"
$ hg log -G
@ 9:69abe8906104 E'
|
@@ -967,14 +971,19 @@
$ hg up 2 && hg log -r . # working dir is at rev 2 again
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
2:1e9a3c00cbe9 b (no-eol)
- $ hg rebase -r 2 -d 3
+ $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
Check that working directory was updated to rev 3 although rev 2 was skipped
during the rebase operation
$ hg log -r .
3:be1832deae9a b (no-eol)
-Check that bookmark was moved to rev 3 although rev 2 was skipped
-during the rebase operation
+Check that bookmark was not moved to rev 3 if rev 2 was skipped during the
+rebase operation. This makes sense because if rev 2 has a successor, the
+operation generating that successor (ex. rebase) should be responsible for
+moving bookmarks. If the bookmark is on a precursor, like rev 2, that means the
+user manually moved it back. In that case we should not move it again.
$ hg bookmarks
- mybook 3:be1832deae9a
+ mybook 2:1e9a3c00cbe9
+ $ hg debugobsolete --rev tip
+ 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
--- a/tests/test-rebase-parameters.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-parameters.t Wed Jul 19 07:51:41 2017 -0500
@@ -134,7 +134,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
@ 6: 'D'
@@ -168,7 +168,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
@ 6: 'D'
@@ -197,7 +197,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
@ 8: 'D'
@@ -229,7 +229,7 @@
$ hg rebase --source 'desc("C")'
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
$ hg tglog
o 6: 'D'
@@ -258,7 +258,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
@ 8: 'D'
@@ -291,7 +291,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
o 6: 'D'
@@ -319,7 +319,7 @@
$ hg rebase --source 2 --dest 7
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
$ hg tglog
o 8: 'D'
@@ -352,7 +352,7 @@
rebasing 1:42ccdea3bb16 "B"
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-rebase.hg (glob)
$ hg tglog
o 8: 'D'
@@ -384,7 +384,7 @@
$ hg rebase --rev 'desc("C")::'
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
$ hg tglog
o 6: 'D'
@@ -410,7 +410,7 @@
$ hg rebase -r 3 -r 6 --dest 8
rebasing 3:32af7686d403 "D"
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-backup.hg (glob)
+ saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-rebase.hg (glob)
$ cd ..
Test --tool parameter:
@@ -440,7 +440,7 @@
$ hg rebase -s 2 -d 1 --tool internal:local
rebasing 2:e4e3f3546619 "c2b" (tip)
note: rebase of 2:e4e3f3546619 created no changes to commit
- saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
$ hg cat c2
c2
@@ -453,7 +453,7 @@
$ hg rebase -s 2 -d 1 --tool internal:other
rebasing 2:e4e3f3546619 "c2b" (tip)
- saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
$ hg cat c2
c2b
@@ -493,7 +493,7 @@
$ hg rebase -c --tool internal:fail
rebasing 2:e4e3f3546619 "c2b" (tip)
note: rebase of 2:e4e3f3546619 created no changes to commit
- saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg (glob)
$ hg rebase -i
abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-partial.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,95 @@
+Tests rebasing with part of the rebase set already in the
+destination (issue5422)
+
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > rebase=
+ > drawdag=$TESTDIR/drawdag.py
+ >
+ > [experimental]
+ > evolution=createmarkers,allowunstable
+ >
+ > [alias]
+ > tglog = log -G --template "{rev}: {desc}"
+ > EOF
+
+ $ rebasewithdag() {
+ > N=`$PYTHON -c "print($N+1)"`
+ > hg init repo$N && cd repo$N
+ > hg debugdrawdag
+ > hg rebase "$@" > _rebasetmp
+ > r=$?
+ > grep -v 'saved backup bundle' _rebasetmp
+ > [ $r -eq 0 ] && hg tglog
+ > cd ..
+ > return $r
+ > }
+
+Rebase two commits, of which one is already in the right place
+
+ $ rebasewithdag -r C+D -d B <<EOF
+ > C
+ > |
+ > B D
+ > |/
+ > A
+ > EOF
+ rebasing 2:b18e25de2cf5 "D" (D)
+ already rebased 3:26805aba1e60 "C" (C tip)
+ o 4: D
+ |
+ | o 3: C
+ |/
+ | x 2: D
+ | |
+ o | 1: B
+ |/
+ o 0: A
+
+Can collapse commits even if one is already in the right place
+
+ $ rebasewithdag --collapse -r C+D -d B <<EOF
+ > C
+ > |
+ > B D
+ > |/
+ > A
+ > EOF
+ rebasing 2:b18e25de2cf5 "D" (D)
+ rebasing 3:26805aba1e60 "C" (C tip)
+ o 4: Collapsed revision
+ | * D
+ | * C
+ | x 3: C
+ |/
+ | x 2: D
+ | |
+ o | 1: B
+ |/
+ o 0: A
+
+Rebase with "holes". The commits after the hole should end up on the parent of
+the hole (B below), not on top of the destination (A).
+
+ $ rebasewithdag -r B+D -d A <<EOF
+ > D
+ > |
+ > C
+ > |
+ > B
+ > |
+ > A
+ > EOF
+ already rebased 1:112478962961 "B" (B)
+ not rebasing ignored 2:26805aba1e60 "C" (C)
+ rebasing 3:f585351a92f8 "D" (D tip)
+ o 4: D
+ |
+ | x 3: D
+ | |
+ | o 2: C
+ |/
+ o 1: B
+ |
+ o 0: A
+
--- a/tests/test-rebase-pull.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-pull.t Wed Jul 19 07:51:41 2017 -0500
@@ -55,7 +55,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
rebasing 2:ff8d69a621f9 "L1"
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg (glob)
$ hg tglog
@ 3: 'L1'
@@ -127,7 +127,7 @@
$ echo a > s/a
$ hg -R s add s/a
$ hg pull --rebase
- abort: uncommitted changes in subrepository 's'
+ abort: uncommitted changes in subrepository "s"
(cannot pull with rebase: please commit or shelve your changes first)
[255]
@@ -211,7 +211,7 @@
adding file changes
added 2 changesets with 2 changes to 2 files
rebasing 3:ff8d69a621f9 "L1"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-rebase.hg (glob)
$ hg tglog
@ 5: 'L1'
|
@@ -253,7 +253,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files (+1 heads)
rebasing 5:518d153c0ba3 "L1"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-backup.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-rebase.hg (glob)
$ hg tglog
@ 6: 'L1'
|
@@ -306,7 +306,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
rebasing 6:0d0727eb7ce0 "L1"
rebasing 7:c1f58876e3bf "L2"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-backup.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/0d0727eb7ce0-ef61ccb2-rebase.hg (glob)
$ hg tglog
o 8: 'L2'
|
@@ -347,6 +347,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
nothing to rebase - updating instead
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "65bc164c1d9b: R6"
1 other heads for branch "default"
$ hg tglog
@ 9: 'R6'
@@ -421,7 +422,7 @@
added 1 changesets with 1 changes to 1 files (+1 heads)
rebasing 7:864e0a2d2614 "L1"
rebasing 8:6dc0ea5dcf55 "L2"
- saved backup bundle to $TESTTMP/c/.hg/strip-backup/864e0a2d2614-2f72c89c-backup.hg (glob)
+ saved backup bundle to $TESTTMP/c/.hg/strip-backup/864e0a2d2614-2f72c89c-rebase.hg (glob)
$ hg tglog
@ 12: 'L2'
|
--- a/tests/test-rebase-rename.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-rename.t Wed Jul 19 07:51:41 2017 -0500
@@ -61,7 +61,7 @@
$ hg rebase -s 3 -d 2
rebasing 3:73a3ee40125d "rename A" (tip)
- saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-rebase.hg (glob)
$ hg tglog
@ 3: 'rename A'
@@ -152,7 +152,7 @@
$ hg rebase -s 3 -d 2
rebasing 3:0a8162ff18a8 "copy A" (tip)
- saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-backup.hg (glob)
+ saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-rebase.hg (glob)
$ hg tglog
@ 3: 'copy A'
@@ -236,7 +236,7 @@
$ hg rebase -s 4 -d 3
rebasing 4:b918d683b091 "Another unrelated change" (tip)
- saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-backup.hg (glob)
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-rebase.hg (glob)
$ hg diff --stat -c .
unrelated.txt | 1 +
@@ -287,7 +287,7 @@
rebasing 1:79d255d24ad2 "File b created as copy of a and modified"
rebasing 2:327f772bc074 "File c created as copy of b and modified"
rebasing 3:421b7e82bb85 "File d created as copy of c and modified"
- saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-rebase.hg (glob)
$ hg update 4
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -314,7 +314,7 @@
merging b and c to c
rebasing 4:dbb9ba033561 "File d created as copy of c and modified"
merging c and d to d
- saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-backup.hg (glob)
+ saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-rebase.hg (glob)
$ hg co tip
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rebase-scenario-global.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebase-scenario-global.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,6 +1,7 @@
$ cat >> $HGRCPATH <<EOF
> [extensions]
> rebase=
+ > drawdag=$TESTDIR/drawdag.py
>
> [phases]
> publish=False
@@ -64,7 +65,7 @@
HG: user: Nicolas Dumazet <nicdumz.commits@gmail.com>
HG: branch 'default'
HG: added D
- saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg (glob)
$ cat D.orig
collide
$ rm D.orig
@@ -99,7 +100,7 @@
$ HGEDITOR=cat hg rebase -s 3 -d 5 --config merge.checkunknown=ignore
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-rebase.hg (glob)
$ cat D.orig
collide
$ rm D.orig
@@ -137,7 +138,7 @@
rebasing 4:9520eea781bc "E"
rebasing 6:eea13746799a "G"
note: rebase of 6:eea13746799a created no changes to commit
- saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-rebase.hg (glob)
$ f E.orig
E.orig: file not found
@@ -169,7 +170,7 @@
rebasing 6:eea13746799a "G"
note: rebase of 6:eea13746799a created no changes to commit
rebasing 7:02de42196ebe "H" (tip)
- saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg (glob)
$ hg tglog
@ 6: 'H'
@@ -196,7 +197,7 @@
$ hg rebase -s 6 -d 7
rebasing 6:eea13746799a "G"
- saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-rebase.hg (glob)
$ hg tglog
o 7: 'G'
@@ -227,7 +228,7 @@
rebasing 5:24b6387c8c8c "F"
rebasing 6:eea13746799a "G"
rebasing 7:02de42196ebe "H" (tip)
- saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg (glob)
$ hg tglog
@ 7: 'H'
@@ -300,7 +301,7 @@
$ hg rebase -d 0 -s 2
rebasing 2:5fddd98957c8 "C"
rebasing 3:32af7686d403 "D"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-rebase.hg (glob)
$ hg tglog
o 7: 'D'
|
@@ -344,31 +345,31 @@
5
$ hg rebase -s9 -d0
rebasing 9:2b23e52411f4 "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-rebase.hg (glob)
$ hg id -n # check we updated back to parent
5
$ hg log --template "{phase}\n" -r 9
draft
$ hg rebase -s9 -d1
rebasing 9:2cb10d0cfc6c "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-rebase.hg (glob)
$ hg log --template "{phase}\n" -r 9
draft
$ hg phase --force --secret 9
$ hg rebase -s9 -d0
rebasing 9:c5b12b67163a "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-rebase.hg (glob)
$ hg log --template "{phase}\n" -r 9
secret
$ hg rebase -s9 -d1
rebasing 9:2a0524f868ac "D" (tip)
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-rebase.hg (glob)
$ hg log --template "{phase}\n" -r 9
secret
Source phase lower than destination phase: new changeset get the phase of destination:
$ hg rebase -s8 -d9
rebasing 8:6d4f22462821 "C"
- saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-rebase.hg (glob)
$ hg log --template "{phase}\n" -r 'rev(9)'
secret
@@ -596,7 +597,7 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
- saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-backup.hg (glob)
+ saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-rebase.hg (glob)
$ hg tglog
o 8: 'I'
|
@@ -631,7 +632,7 @@
rebasing 6:3d8a618087a7 "G"
rebasing 7:72434a4e60b0 "H"
rebasing 8:479ddb54a924 "I" (tip)
- saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-backup.hg (glob)
+ saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-rebase.hg (glob)
$ hg tglog
o 8: 'I'
|
@@ -700,7 +701,7 @@
$ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
rebasing 8:e7ec4e813ba6 "I"
rebasing 10:23a4ace37988 "K" (tip)
- saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-backup.hg (glob)
+ saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-rebase.hg (glob)
$ hg log --rev 'children(desc(G))'
changeset: 9:adb617877056
parent: 6:eea13746799a
@@ -761,19 +762,13 @@
$ touch subfile
$ hg add subfile
$ hg commit -m 'second source with subdir'
-#if rmcwd
+
$ hg rebase -b . -d 1 --traceback
rebasing 2:779a07b1b7a0 "first source commit"
- current directory was removed
- (consider changing to repo root: $TESTTMP/cwd-vanish)
+ current directory was removed (rmcwd !)
+ (consider changing to repo root: $TESTTMP/cwd-vanish) (rmcwd !)
rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
- saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob)
-#else
- $ hg rebase -b . -d 1 --traceback
- rebasing 2:779a07b1b7a0 "first source commit"
- rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
- saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob)
-#endif
+ saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-rebase.hg (glob)
Get back to the root of cwd-vanish. Note that even though `cd ..`
works on most systems, it does not work on FreeBSD 10, so we use an
@@ -824,7 +819,7 @@
rebasing 4:82ae8dc7a9b7 "E"
rebasing 3:ab709c9f7171 "D"
rebasing 5:412b391de760 "F"
- saved backup bundle to $TESTTMP/order/.hg/strip-backup/76035bbd54bd-e341bc99-backup.hg (glob)
+ saved backup bundle to $TESTTMP/order/.hg/strip-backup/76035bbd54bd-e341bc99-rebase.hg (glob)
$ hg tglog
o 6: 'F'
@@ -913,3 +908,42 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: second source with subdir
+Testing rebase being called inside another transaction
+
+ $ cd $TESTTMP
+ $ hg init tr-state
+ $ cd tr-state
+ $ cat > $TESTTMP/wraprebase.py <<EOF
+ > from __future__ import absolute_import
+ > from mercurial import extensions
+ > def _rebase(orig, ui, repo, *args, **kwargs):
+ > with repo.wlock():
+ > with repo.lock():
+ > with repo.transaction('wrappedrebase'):
+ > return orig(ui, repo, *args, **kwargs)
+ > def wraprebase(loaded):
+ > assert loaded
+ > rebasemod = extensions.find('rebase')
+ > extensions.wrapcommand(rebasemod.cmdtable, 'rebase', _rebase)
+ > def extsetup(ui):
+ > extensions.afterloaded('rebase', wraprebase)
+ > EOF
+
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > wraprebase=$TESTTMP/wraprebase.py
+ > [experimental]
+ > evolution=all
+ > EOF
+
+ $ hg debugdrawdag <<'EOS'
+ > B C
+ > |/
+ > A
+ > EOS
+
+ $ hg rebase -s C -d B
+ rebasing 2:dc0947a82db8 "C" (C tip)
+
+ $ [ -f .hg/rebasestate ] && echo 'WRONG: rebasestate should not exist'
+ [1]
--- a/tests/test-rebuildstate.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rebuildstate.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,9 +1,9 @@
$ cat > adddrop.py <<EOF
- > from mercurial import cmdutil
+ > from mercurial import registrar
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('debugadddrop',
+ > command = registrar.command(cmdtable)
+ > @command(b'debugadddrop',
> [('', 'drop', False, 'drop file from dirstate', 'FILE'),
> ('', 'normal-lookup', False, 'add file to dirstate', 'FILE')],
> 'hg debugadddrop')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-formatting.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,378 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+ $ hg init simple-repo
+ $ cd simple-repo
+
+A fix with a single line results in a bullet point in the appropriate section
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > single line fix
+ >
+ > .. fix::
+ >
+ > Simple fix with a single line content entry.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-single-line
+
+ $ cat $TESTTMP/relnotes-single-line
+ Bug Fixes
+ =========
+
+ * Simple fix with a single line content entry.
+
+A fix with multiple lines is handled correctly
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > multi line fix
+ >
+ > .. fix::
+ >
+ > First line of fix entry.
+ > A line after it without a space.
+ >
+ > A new paragraph in the fix entry. And this is a really long line. It goes on for a while.
+ > And it wraps around to a new paragraph.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-multi-line
+ $ cat $TESTTMP/relnotes-multi-line
+ Bug Fixes
+ =========
+
+ * First line of fix entry. A line after it without a space.
+
+ A new paragraph in the fix entry. And this is a really long line. It goes on
+ for a while. And it wraps around to a new paragraph.
+
+A release note with a title results in a sub-section being written
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > fix with title
+ >
+ > .. fix:: Fix Title
+ >
+ > First line of fix with title.
+ >
+ > Another paragraph of fix with title. But this is a paragraph
+ > with multiple lines.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-fix-with-title
+ $ cat $TESTTMP/relnotes-fix-with-title
+ Bug Fixes
+ =========
+
+ Fix Title
+ ---------
+
+ First line of fix with title.
+
+ Another paragraph of fix with title. But this is a paragraph with multiple
+ lines.
+
+ $ cd ..
+
+Formatting of multiple bullet points works
+
+ $ hg init multiple-bullets
+ $ cd multiple-bullets
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > first fix
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix::
+ >
+ > second fix
+ >
+ > Second paragraph of second fix.
+ > EOF
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > third fix
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-bullets
+ $ cat $TESTTMP/relnotes-multiple-bullets
+ Bug Fixes
+ =========
+
+ * first fix
+
+ * second fix
+
+ Second paragraph of second fix.
+
+ * third fix
+
+ $ cd ..
+
+Formatting of multiple sections works
+
+ $ hg init multiple-sections
+ $ cd multiple-sections
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > first fix
+ > EOF
+
+ $ touch feature1
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature::
+ >
+ > description of the new feature
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > second fix
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-sections
+ $ cat $TESTTMP/relnotes-multiple-sections
+ New Features
+ ============
+
+ * description of the new feature
+
+ Bug Fixes
+ =========
+
+ * first fix
+
+ * second fix
+
+ $ cd ..
+
+Section with subsections and bullets
+
+ $ hg init multiple-subsections
+ $ cd multiple-subsections
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix:: Title of First Fix
+ >
+ > First paragraph of first fix.
+ >
+ > Second paragraph of first fix.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix:: Title of Second Fix
+ >
+ > First paragraph of second fix.
+ >
+ > Second paragraph of second fix.
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-subsections
+ $ cat $TESTTMP/relnotes-multiple-subsections
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of first fix.
+
+ Second paragraph of first fix.
+
+ Title of Second Fix
+ -------------------
+
+ First paragraph of second fix.
+
+ Second paragraph of second fix.
+
+Now add bullet points to sections having sub-sections
+
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > Short summary of fix 3
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-subsections-with-bullets
+ $ cat $TESTTMP/relnotes-multiple-subsections-with-bullets
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of first fix.
+
+ Second paragraph of first fix.
+
+ Title of Second Fix
+ -------------------
+
+ First paragraph of second fix.
+
+ Second paragraph of second fix.
+
+ Other Changes
+ -------------
+
+ * Short summary of fix 3
+
+ $ cd ..
+
+Multiple 'Other Changes' sub-sections for every section
+
+ $ hg init multiple-otherchanges
+ $ cd multiple-otherchanges
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix:: Title of First Fix
+ >
+ > First paragraph of fix 1.
+ > EOF
+
+ $ touch feature1
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature:: Title of First Feature
+ >
+ > First paragraph of feature 1.
+ > EOF
+
+ $ touch feature2
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. feature::
+ >
+ > Short summary of feature 2.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 4
+ >
+ > .. fix::
+ >
+ > Short summary of fix 2
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-multiple-otherchanges
+ $ cat $TESTTMP/relnotes-multiple-otherchanges
+ New Features
+ ============
+
+ Title of First Feature
+ ----------------------
+
+ First paragraph of feature 1.
+
+ Other Changes
+ -------------
+
+ * Short summary of feature 2.
+
+ Bug Fixes
+ =========
+
+ Title of First Fix
+ ------------------
+
+ First paragraph of fix 1.
+
+ Other Changes
+ -------------
+
+ * Short summary of fix 2
+
+ $ cd ..
+
+Using custom sections in notes
+
+ $ hg init custom-section
+ $ cd custom-section
+ $ cat >> .hgreleasenotes << EOF
+ > [sections]
+ > testsection=Name of Section
+ > EOF
+
+ $ touch a
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. testsection::
+ >
+ > First paragraph under this admonition.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-custom-section
+ $ cat $TESTTMP/relnotes-custom-section
+ Name of Section
+ ===============
+
+ * First paragraph under this admonition.
+
+Overriding default sections (For eg. by default feature = New Features)
+
+ $ cat >> .hgreleasenotes << EOF
+ > [sections]
+ > feature=Feature Additions
+ > EOF
+
+ $ touch b
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. feature::
+ >
+ > Adds a new feature.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-override-section
+ $ cat $TESTTMP/relnotes-override-section
+ Feature Additions
+ =================
+
+ * Adds a new feature.
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-merging.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,160 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+ $ hg init simple-repo
+ $ cd simple-repo
+
+A fix directive from commit message is added to release notes
+
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > Fix from commit message.
+ > EOF
+
+ $ cat >> $TESTTMP/single-fix-bullet << EOF
+ > Bug Fixes
+ > =========
+ >
+ > * Fix from release notes.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/single-fix-bullet
+
+ $ cat $TESTTMP/single-fix-bullet
+ Bug Fixes
+ =========
+
+ * Fix from release notes.
+
+ * Fix from commit message.
+
+Processing again ignores the already added bullet.
+
+ $ hg releasenotes -r . $TESTTMP/single-fix-bullet
+
+ $ cat $TESTTMP/single-fix-bullet
+ Bug Fixes
+ =========
+
+ * Fix from release notes.
+
+ * Fix from commit message.
+
+ $ cd ..
+
+Sections are unioned
+
+ $ hg init subsections
+ $ cd subsections
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > Commit 1
+ >
+ > .. feature:: Commit Message Feature
+ >
+ > This describes a feature from a commit message.
+ > EOF
+
+ $ cat >> $TESTTMP/single-feature-section << EOF
+ > New Features
+ > ============
+ >
+ > Notes Feature
+ > -------------
+ >
+ > This describes a feature from a release notes file.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/single-feature-section
+
+ $ cat $TESTTMP/single-feature-section
+ New Features
+ ============
+
+ Notes Feature
+ -------------
+
+ This describes a feature from a release notes file.
+
+ Commit Message Feature
+ ----------------------
+
+ This describes a feature from a commit message.
+
+Doing it again won't add another section
+
+ $ hg releasenotes -r . $TESTTMP/single-feature-section
+ Commit Message Feature already exists in feature section; ignoring
+
+ $ cat $TESTTMP/single-feature-section
+ New Features
+ ============
+
+ Notes Feature
+ -------------
+
+ This describes a feature from a release notes file.
+
+ Commit Message Feature
+ ----------------------
+
+ This describes a feature from a commit message.
+
+ $ cd ..
+
+Bullets from rev merge with those from notes file.
+
+ $ hg init bullets
+ $ cd bullets
+ $ touch fix1
+ $ hg -q commit -A -l - << EOF
+ > commit 1
+ >
+ > .. fix::
+ >
+ > this is fix1.
+ > EOF
+
+ $ touch fix2
+ $ hg -q commit -A -l - << EOF
+ > commit 2
+ >
+ > .. fix::
+ >
+ > this is fix2.
+ > EOF
+
+ $ hg releasenotes -r 'all()' $TESTTMP/relnotes-bullet-problem
+ $ cat $TESTTMP/relnotes-bullet-problem
+ Bug Fixes
+ =========
+
+ * this is fix1.
+
+ * this is fix2.
+ $ touch fix3
+ $ hg -q commit -A -l - << EOF
+ > commit 3
+ >
+ > .. fix::
+ >
+ > this is fix3.
+ > EOF
+
+ $ hg releasenotes -r . $TESTTMP/relnotes-bullet-problem
+ $ cat $TESTTMP/relnotes-bullet-problem
+ Bug Fixes
+ =========
+
+ * this is fix1.
+
+ * this is fix2.
+
+ * this is fix3.
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-releasenotes-parsing.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,177 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > releasenotes=
+ > EOF
+
+Bullet point with a single item spanning a single line
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point item with a single line
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point item with a single line
+
+Bullet point that spans multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point with a paragraph
+ > that spans multiple lines.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point with a paragraph that spans multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Bullet point with a paragraph
+ > that spans multiple lines.
+ >
+ > And has an empty line between lines too.
+ > With a line cuddling that.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Bullet point with a paragraph that spans multiple lines.
+ paragraph: And has an empty line between lines too. With a line cuddling that.
+
+Multiple bullet points. With some entries being multiple lines.
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * First bullet point. It has a single line.
+ >
+ > * Second bullet point.
+ > It consists of multiple lines.
+ >
+ > * Third bullet point. It has a single line.
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: First bullet point. It has a single line.
+ bullet point:
+ paragraph: Second bullet point. It consists of multiple lines.
+ bullet point:
+ paragraph: Third bullet point. It has a single line.
+
+Bullet point without newline between items
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * First bullet point
+ > * Second bullet point
+ > And it has multiple lines
+ > * Third bullet point
+ > * Fourth bullet point
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: First bullet point
+ bullet point:
+ paragraph: Second bullet point And it has multiple lines
+ bullet point:
+ paragraph: Third bullet point
+ bullet point:
+ paragraph: Fourth bullet point
+
+Sub-section contents are read
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > First Feature
+ > -------------
+ >
+ > This is the first new feature that was implemented.
+ >
+ > And a second paragraph about it.
+ >
+ > Second Feature
+ > --------------
+ >
+ > This is the second new feature that was implemented.
+ >
+ > Paragraph two.
+ >
+ > Paragraph three.
+ > EOF
+ section: feature
+ subsection: First Feature
+ paragraph: This is the first new feature that was implemented.
+ paragraph: And a second paragraph about it.
+ subsection: Second Feature
+ paragraph: This is the second new feature that was implemented.
+ paragraph: Paragraph two.
+ paragraph: Paragraph three.
+
+Multiple sections are read
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > * Feature 1
+ > * Feature 2
+ >
+ > Bug Fixes
+ > =========
+ >
+ > * Fix 1
+ > * Fix 2
+ > EOF
+ section: feature
+ bullet point:
+ paragraph: Feature 1
+ bullet point:
+ paragraph: Feature 2
+ section: fix
+ bullet point:
+ paragraph: Fix 1
+ bullet point:
+ paragraph: Fix 2
+
+Mixed sub-sections and bullet list
+
+ $ hg debugparsereleasenotes - << EOF
+ > New Features
+ > ============
+ >
+ > Feature 1
+ > ---------
+ >
+ > Some words about the first feature.
+ >
+ > Feature 2
+ > ---------
+ >
+ > Some words about the second feature.
+ > That span multiple lines.
+ >
+ > Other Changes
+ > -------------
+ >
+ > * Bullet item 1
+ > * Bullet item 2
+ > EOF
+ section: feature
+ subsection: Feature 1
+ paragraph: Some words about the first feature.
+ subsection: Feature 2
+ paragraph: Some words about the second feature. That span multiple lines.
+ bullet point:
+ paragraph: Bullet item 1
+ bullet point:
+ paragraph: Bullet item 2
--- a/tests/test-relink.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-relink.t Wed Jul 19 07:51:41 2017 -0500
@@ -93,8 +93,8 @@
check hardlinks
- $ python arelinked.py repo/.hg/store/data/a.i clone/.hg/store/data/a.i
+ $ $PYTHON arelinked.py repo/.hg/store/data/a.i clone/.hg/store/data/a.i
repo/.hg/store/data/a.i == clone/.hg/store/data/a.i
- $ python arelinked.py repo/.hg/store/data/b.i clone/.hg/store/data/b.i
+ $ $PYTHON arelinked.py repo/.hg/store/data/b.i clone/.hg/store/data/b.i
repo/.hg/store/data/b.i != clone/.hg/store/data/b.i
--- a/tests/test-rename-merge2.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rename-merge2.t Wed Jul 19 07:51:41 2017 -0500
@@ -47,7 +47,7 @@
> echo "--------------"
> echo "test L:$1 R:$2 W:$3 - $4"
> echo "--------------"
- > hg merge -y --debug --traceback --tool="python ../merge"
+ > hg merge -y --debug --traceback --tool="$PYTHON ../merge"
>
> echo "--------------"
> hg status -camC -X rev
@@ -89,20 +89,19 @@
preserving a for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: remote unchanged -> k
b: remote copied from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging a and b to b
my b@e300d1c794ec+ other b@4ce40f5aca24 ancestor a@924404dff337
premerge successful
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -131,18 +130,18 @@
a: remote is newer -> g
getting a
b: local copied/moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b and a to b
my b@86a2aa42fc76+ other a@f4db7e329e71 ancestor a@924404dff337
premerge successful
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -171,18 +170,18 @@
removing a
starting 4 threads for background file closing (?)
b: remote moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging a and b to b
my b@e300d1c794ec+ other b@bdb19105162a ancestor a@924404dff337
premerge successful
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -209,18 +208,18 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
b: local copied/moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b and a to b
my b@02963e448370+ other a@f4db7e329e71 ancestor a@924404dff337
premerge successful
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -247,13 +246,13 @@
b: remote created -> g
getting b
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -279,13 +278,13 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -314,13 +313,13 @@
b: remote created -> g
getting b
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 1 files merged, 1 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -345,13 +344,13 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -374,22 +373,22 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
b: both renamed from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
b: both renamed from a -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -424,13 +423,13 @@
c: remote created -> g
getting c
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 1 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -454,22 +453,22 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -495,22 +494,22 @@
removing a
starting 4 threads for background file closing (?)
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 1 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -534,22 +533,22 @@
a: remote is newer -> g
getting a
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -575,22 +574,22 @@
removing a
starting 4 threads for background file closing (?)
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 1 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -614,22 +613,22 @@
a: remote is newer -> g
getting a
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -652,24 +651,23 @@
preserving b for resolve of b
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
- a: remote unchanged -> k
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -697,22 +695,22 @@
other [merge rev] changed a which local [working copy] deleted
use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -742,22 +740,22 @@
local [working copy] changed a which other [merge rev] deleted
use (c)hanged version, (d)elete, or leave (u)nresolved? u
b: both created -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b
my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
b: both created -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 1 files unresolved
use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
@@ -786,22 +784,22 @@
removing a
starting 4 threads for background file closing (?)
b: remote moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging a and b to b
my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
b: remote moved from a -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -828,22 +826,22 @@
preserving rev for resolve of rev
starting 4 threads for background file closing (?)
b: local copied/moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b and a to b
my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
b: local copied/moved from a -> m (merge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/b* * * (glob)
merge tool returned: 0
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
0 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
@@ -876,18 +874,18 @@
c: remote created -> g
getting c
b: local copied/moved from a -> m (premerge)
- picked tool 'python ../merge' for b (binary False symlink False changedelete False)
+ picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
merging b and a to b
my b@02963e448370+ other a@2b958612230f ancestor a@924404dff337
premerge successful
rev: versions differ -> m (premerge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
merging rev
my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
rev: versions differ -> m (merge)
- picked tool 'python ../merge' for rev (binary False symlink False changedelete False)
+ picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
- launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob)
+ launching merge tool: * ../merge *$TESTTMP/t/t/rev* * * (glob)
merge tool returned: 0
1 files updated, 2 files merged, 0 files removed, 0 files unresolved
(branch merge, don't forget to commit)
--- a/tests/test-rename.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-rename.t Wed Jul 19 07:51:41 2017 -0500
@@ -625,6 +625,7 @@
abort: path contains illegal component: .hg/a1 (glob)
[255]
$ hg --config extensions.largefiles= rename d1/d11/a1 .hg
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
abort: path contains illegal component: .hg/a1 (glob)
[255]
$ hg status -C
@@ -632,6 +633,7 @@
abort: ../a1 not under root '$TESTTMP' (glob)
[255]
$ hg --config extensions.largefiles= rename d1/d11/a1 ..
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
abort: ../a1 not under root '$TESTTMP' (glob)
[255]
$ hg status -C
--- a/tests/test-repair-strip.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-repair-strip.t Wed Jul 19 07:51:41 2017 -0500
@@ -21,7 +21,7 @@
> hg verify
> echo % journal contents
> if [ -f .hg/store/journal ]; then
- > cat .hg/store/journal | python $TESTTMP/dumpjournal.py
+ > cat .hg/store/journal | $PYTHON $TESTTMP/dumpjournal.py
> else
> echo "(no journal)"
> fi
--- a/tests/test-requires.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-requires.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
$ hg commit -m test
$ rm .hg/requires
$ hg tip
- abort: index 00changelog.i unknown format 2!
+ abort: unknown version (2) in revlog 00changelog.i!
[255]
$ echo indoor-pool > .hg/requires
$ hg tip
@@ -37,7 +37,7 @@
> for name, module in extensions.extensions(ui):
> if __name__ == module.__name__:
> # support specific feature locally
- > supported |= set(['featuresetup-test'])
+ > supported |= {'featuresetup-test'}
> return
> def uisetup(ui):
> localrepo.localrepository.featuresetupfuncs.add(featuresetup)
--- a/tests/test-resolve.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-resolve.t Wed Jul 19 07:51:41 2017 -0500
@@ -85,10 +85,10 @@
$ cat > $TESTTMP/markdriver.py << EOF
> '''mark and unmark files as driver-resolved'''
- > from mercurial import cmdutil, merge, scmutil
+ > from mercurial import merge, registrar, scmutil
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('markdriver',
+ > command = registrar.command(cmdtable)
+ > @command(b'markdriver',
> [('u', 'unmark', None, '')],
> 'FILE...')
> def markdriver(ui, repo, *pats, **opts):
--- a/tests/test-revert.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-revert.t Wed Jul 19 07:51:41 2017 -0500
@@ -495,7 +495,7 @@
check list of planned files
- $ python $TESTDIR/generate-working-copy-states.py filelist 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py filelist 2
content1_content1_content1-tracked
content1_content1_content1-untracked
content1_content1_content3-tracked
@@ -550,7 +550,7 @@
Generate base changeset
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove --similarity 0
adding content1_content1_content1-tracked
adding content1_content1_content1-untracked
@@ -597,7 +597,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-base.txt
+ $ $PYTHON ../dircontent.py > ../content-base.txt
$ cat ../content-base.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -622,7 +622,7 @@
Create parent changeset
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove --similarity 0
removing content1_missing_content1-tracked
removing content1_missing_content1-untracked
@@ -661,7 +661,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-parent.txt
+ $ $PYTHON ../dircontent.py > ../content-parent.txt
$ cat ../content-parent.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -686,7 +686,7 @@
Setup working directory
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove --similarity 0
adding content1_missing_content1-tracked
adding content1_missing_content1-untracked
@@ -754,7 +754,7 @@
(create a simple text version of the content)
- $ python ../dircontent.py > ../content-wc.txt
+ $ $PYTHON ../dircontent.py > ../content-wc.txt
$ cat ../content-wc.txt
content1 content1_content1_content1-tracked
content1 content1_content1_content1-untracked
@@ -818,7 +818,7 @@
The diff is filtered to include change only. The only difference should be
additional `.orig` backup file when applicable.
- $ python ../dircontent.py > ../content-parent-all.txt
+ $ $PYTHON ../dircontent.py > ../content-parent-all.txt
$ cd ..
$ diff -U 0 -- content-parent.txt content-parent-all.txt | grep _
+content3 content1_content1_content3-tracked.orig
@@ -875,7 +875,7 @@
The diff is filtered to include change only. The only difference should be
additional `.orig` backup file when applicable.
- $ python ../dircontent.py > ../content-base-all.txt
+ $ $PYTHON ../dircontent.py > ../content-base-all.txt
$ cd ..
$ diff -U 0 -- content-base.txt content-base-all.txt | grep _
+content3 content1_content1_content3-tracked.orig
@@ -902,7 +902,7 @@
revert all files individually and check the output
(output is expected to be different than in the --all case)
- $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
+ $ for file in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 2`; do
> echo '### revert for:' $file;
> hg revert $file;
> echo
@@ -979,7 +979,7 @@
check resulting directory against the --all run
(There should be no difference)
- $ python ../dircontent.py > ../content-parent-explicit.txt
+ $ $PYTHON ../dircontent.py > ../content-parent-explicit.txt
$ cd ..
$ diff -U 0 -- content-parent-all.txt content-parent-explicit.txt | grep _
[1]
@@ -995,7 +995,7 @@
revert all files individually and check the output
(output is expected to be different than in the --all case)
- $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
+ $ for file in `$PYTHON $TESTDIR/generate-working-copy-states.py filelist 2`; do
> echo '### revert for:' $file;
> hg revert $file --rev 'desc(base)';
> echo
@@ -1072,7 +1072,7 @@
check resulting directory against the --all run
(There should be no difference)
- $ python ../dircontent.py > ../content-base-explicit.txt
+ $ $PYTHON ../dircontent.py > ../content-base-explicit.txt
$ cd ..
$ diff -U 0 -- content-base-all.txt content-base-explicit.txt | grep _
[1]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revlog-v2.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,62 @@
+A repo with unknown revlogv2 requirement string cannot be opened
+
+ $ hg init invalidreq
+ $ cd invalidreq
+ $ echo exp-revlogv2.unknown >> .hg/requires
+ $ hg log
+ abort: repository requires features unknown to this Mercurial: exp-revlogv2.unknown!
+ (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
+ [255]
+ $ cd ..
+
+Can create and open repo with revlog v2 requirement
+
+ $ cat >> $HGRCPATH << EOF
+ > [experimental]
+ > revlogv2 = enable-unstable-format-and-corrupt-my-data
+ > EOF
+
+ $ hg init empty-repo
+ $ cd empty-repo
+ $ cat .hg/requires
+ dotencode
+ exp-revlogv2.0
+ fncache
+ store
+
+ $ hg log
+
+Unknown flags to revlog are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x04\xde\xad')
+
+ $ hg log
+ abort: unknown flags (0x04) in version 57005 revlog 00changelog.i!
+ [255]
+
+ $ cd ..
+
+Writing a simple revlog v2 works
+
+ $ hg init simple
+ $ cd simple
+ $ touch foo
+ $ hg -q commit -A -m initial
+
+ $ hg log
+ changeset: 0:96ee1d7354c4
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: initial
+
+Header written as expected (changelog always disables generaldelta)
+
+ $ f --hexdump --bytes 4 .hg/store/00changelog.i
+ .hg/store/00changelog.i:
+ 0000: 00 01 de ad |....|
+
+ $ f --hexdump --bytes 4 .hg/store/data/foo.i
+ .hg/store/data/foo.i:
+ 0000: 00 03 de ad |....|
--- a/tests/test-revlog.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-revlog.t Wed Jul 19 07:51:41 2017 -0500
@@ -1,3 +1,35 @@
+ $ hg init empty-repo
+ $ cd empty-repo
+
+Flags on revlog version 0 are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x01\x00\x00')
+
+ $ hg log
+ abort: unknown flags (0x01) in version 0 revlog 00changelog.i!
+ [255]
+
+Unknown flags on revlog version 1 are rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x04\x00\x01')
+
+ $ hg log
+ abort: unknown flags (0x04) in version 1 revlog 00changelog.i!
+ [255]
+
+Unknown version is rejected
+
+ >>> with open('.hg/store/00changelog.i', 'wb') as fh:
+ ... fh.write('\x00\x00\x00\x02')
+
+ $ hg log
+ abort: unknown version (2) in revlog 00changelog.i!
+ [255]
+
+ $ cd ..
+
Test for CVE-2016-3630
$ hg init
@@ -12,4 +44,4 @@
0 0 19 -1 2 99e0332bd498 000000000000 000000000000
1 19 12 0 3 6674f57a23d8 99e0332bd498 000000000000
$ hg debugdata a.i 1 2>&1 | egrep 'Error:.*decoded'
- (mercurial.mpatch.)?mpatchError: patch cannot be decoded (re)
+ (mercurial\.\w+\.mpatch\.)?mpatchError: patch cannot be decoded (re)
--- a/tests/test-revset.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-revset.t Wed Jul 19 07:51:41 2017 -0500
@@ -20,6 +20,7 @@
> EOF
$ cat >> $HGRCPATH << EOF
> [extensions]
+ > drawdag=$TESTDIR/drawdag.py
> testrevset=$TESTTMP/testrevset.py
> EOF
@@ -37,15 +38,15 @@
$ cat <<EOF > debugrevlistspec.py
> from __future__ import absolute_import
> from mercurial import (
- > cmdutil,
> node as nodemod,
+ > registrar,
> revset,
> revsetlang,
> smartset,
> )
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('debugrevlistspec',
+ > command = registrar.command(cmdtable)
+ > @command(b'debugrevlistspec',
> [('', 'optimize', None, 'print parsed tree after optimizing'),
> ('', 'bin', None, 'unhexlify arguments')])
> def debugrevlistspec(ui, repo, fmt, *args, **opts):
@@ -157,7 +158,7 @@
('symbol', '0')
('symbol', '1'))
* set:
- <spanset+ 0:1>
+ <spanset+ 0:2>
0
1
$ try --optimize :
@@ -168,7 +169,7 @@
None
define)
* set:
- <spanset+ 0:9>
+ <spanset+ 0:10>
0
1
2
@@ -266,7 +267,7 @@
(rangepost
('symbol', '+a+b+c+'))
* set:
- <spanset+ 3:9>
+ <spanset+ 3:10>
3
4
5
@@ -278,7 +279,7 @@
(rangepre
('symbol', '+a+b+c+'))
* set:
- <spanset+ 0:3>
+ <spanset+ 0:4>
0
1
2
@@ -288,7 +289,7 @@
('symbol', '-a-b-c-')
('symbol', '+a+b+c+'))
* set:
- <spanset- 3:4>
+ <spanset- 3:5>
4
3
$ log '-a-b-c-:+a+b+c+'
@@ -413,7 +414,7 @@
hg: parse error: invalid \x escape
[255]
$ log 'date(tip)'
- abort: invalid date: 'tip'
+ hg: parse error: invalid date: 'tip'
[255]
$ log '0:date'
abort: unknown revision 'date'!
@@ -499,6 +500,158 @@
hg: parse error: can't use a key-value pair in this context
[255]
+relation-subscript operator has the highest binding strength (as function call):
+
+ $ hg debugrevspec -p parsed 'tip:tip^#generations[-1]'
+ * parsed:
+ (range
+ ('symbol', 'tip')
+ (relsubscript
+ (parentpost
+ ('symbol', 'tip'))
+ ('symbol', 'generations')
+ (negate
+ ('symbol', '1'))))
+ 9
+ 8
+ 7
+ 6
+ 5
+ 4
+
+ $ hg debugrevspec -p parsed --no-show-revs 'not public()#generations[0]'
+ * parsed:
+ (not
+ (relsubscript
+ (func
+ ('symbol', 'public')
+ None)
+ ('symbol', 'generations')
+ ('symbol', '0')))
+
+left-hand side of relation-subscript operator should be optimized recursively:
+
+ $ hg debugrevspec -p analyzed -p optimized --no-show-revs \
+ > '(not public())#generations[0]'
+ * analyzed:
+ (relsubscript
+ (not
+ (func
+ ('symbol', 'public')
+ None
+ any)
+ define)
+ ('symbol', 'generations')
+ ('symbol', '0')
+ define)
+ * optimized:
+ (relsubscript
+ (func
+ ('symbol', '_notpublic')
+ None
+ any)
+ ('symbol', 'generations')
+ ('symbol', '0')
+ define)
+
+resolution of subscript and relation-subscript ternary operators:
+
+ $ hg debugrevspec -p analyzed 'tip[0]'
+ * analyzed:
+ (subscript
+ ('symbol', 'tip')
+ ('symbol', '0')
+ define)
+ hg: parse error: can't use a subscript in this context
+ [255]
+
+ $ hg debugrevspec -p analyzed 'tip#rel[0]'
+ * analyzed:
+ (relsubscript
+ ('symbol', 'tip')
+ ('symbol', 'rel')
+ ('symbol', '0')
+ define)
+ hg: parse error: unknown identifier: rel
+ [255]
+
+ $ hg debugrevspec -p analyzed '(tip#rel)[0]'
+ * analyzed:
+ (subscript
+ (relation
+ ('symbol', 'tip')
+ ('symbol', 'rel')
+ define)
+ ('symbol', '0')
+ define)
+ hg: parse error: can't use a subscript in this context
+ [255]
+
+ $ hg debugrevspec -p analyzed 'tip#rel[0][1]'
+ * analyzed:
+ (subscript
+ (relsubscript
+ ('symbol', 'tip')
+ ('symbol', 'rel')
+ ('symbol', '0')
+ define)
+ ('symbol', '1')
+ define)
+ hg: parse error: can't use a subscript in this context
+ [255]
+
+ $ hg debugrevspec -p analyzed 'tip#rel0#rel1[1]'
+ * analyzed:
+ (relsubscript
+ (relation
+ ('symbol', 'tip')
+ ('symbol', 'rel0')
+ define)
+ ('symbol', 'rel1')
+ ('symbol', '1')
+ define)
+ hg: parse error: unknown identifier: rel1
+ [255]
+
+ $ hg debugrevspec -p analyzed 'tip#rel0[0]#rel1[1]'
+ * analyzed:
+ (relsubscript
+ (relsubscript
+ ('symbol', 'tip')
+ ('symbol', 'rel0')
+ ('symbol', '0')
+ define)
+ ('symbol', 'rel1')
+ ('symbol', '1')
+ define)
+ hg: parse error: unknown identifier: rel1
+ [255]
+
+parse errors of relation, subscript and relation-subscript operators:
+
+ $ hg debugrevspec '[0]'
+ hg: parse error at 0: not a prefix: [
+ [255]
+ $ hg debugrevspec '.#'
+ hg: parse error at 2: not a prefix: end
+ [255]
+ $ hg debugrevspec '#rel'
+ hg: parse error at 0: not a prefix: #
+ [255]
+ $ hg debugrevspec '.#rel[0'
+ hg: parse error at 7: unexpected token: end
+ [255]
+ $ hg debugrevspec '.]'
+ hg: parse error at 1: invalid token
+ [255]
+
+ $ hg debugrevspec '.#generations[a]'
+ hg: parse error: relation subscript must be an integer
+ [255]
+ $ hg debugrevspec '.#generations[1-2]'
+ hg: parse error: relation subscript must be an integer
+ [255]
+
parsed tree at stages:
$ hg debugrevspec -p all '()'
@@ -626,7 +779,7 @@
None
define)
* set:
- <spanset+ 0:9>
+ <spanset+ 0:10>
0
1
2
@@ -643,7 +796,7 @@
('symbol', '1')
define)
* set:
- <spanset+ 0:1>
+ <spanset+ 0:2>
0
1
$ try -p analyzed ':(1|2)'
@@ -656,7 +809,7 @@
define)
define)
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -681,7 +834,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -702,7 +855,7 @@
(parentpost
('symbol', '9')))
* set:
- <spanset+ 8:9>
+ <spanset+ 8:10>
8
9
@@ -727,7 +880,7 @@
('symbol', '1'))
('symbol', '2')))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -742,7 +895,7 @@
('symbol', '4'))))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -770,7 +923,7 @@
(parentpost
('symbol', '9'))))))
* set:
- <spanset+ 4:9>
+ <spanset+ 4:10>
4
5
6
@@ -788,7 +941,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -803,7 +956,7 @@
('symbol', '1'))
('symbol', '2'))
* set:
- <spanset+ 0:2>
+ <spanset+ 0:3>
0
1
2
@@ -842,6 +995,20 @@
test ancestors
+ $ hg log -G -T '{rev}\n' --config experimental.graphshorten=True
+ @ 9
+ o 8
+ | o 7
+ | o 6
+ |/|
+ | o 5
+ o | 4
+ | o 3
+ o | 2
+ |/
+ o 1
+ o 0
+
$ log 'ancestors(5)'
0
1
@@ -855,6 +1022,194 @@
2
3
+test ancestors with depth limit
+
+ (depth=0 selects the node itself)
+
+ $ log 'reverse(ancestors(9, depth=0))'
+ 9
+
+ (interleaved: '4' would be missing if heap queue were higher depth first)
+
+ $ log 'reverse(ancestors(8:9, depth=1))'
+ 9
+ 8
+ 4
+
+ (interleaved: '2' would be missing if heap queue were higher depth first)
+
+ $ log 'reverse(ancestors(7+8, depth=2))'
+ 8
+ 7
+ 6
+ 5
+ 4
+ 2
+
+ (walk example above by separate queries)
+
+ $ log 'reverse(ancestors(8, depth=2)) + reverse(ancestors(7, depth=2))'
+ 8
+ 4
+ 2
+ 7
+ 6
+ 5
+
+ (walk 2nd and 3rd ancestors)
+
+ $ log 'reverse(ancestors(7, depth=3, startdepth=2))'
+ 5
+ 4
+ 3
+ 2
+
+ (interleaved: '4' would be missing if higher-depth ancestors weren't scanned)
+
+ $ log 'reverse(ancestors(7+8, depth=2, startdepth=2))'
+ 5
+ 4
+ 2
+
+ (note that 'ancestors(x, depth=y, startdepth=z)' does not identical to
+ 'ancestors(x, depth=y) - ancestors(x, depth=z-1)' because a node may have
+ multiple depths)
+
+ $ log 'reverse(ancestors(7+8, depth=2) - ancestors(7+8, depth=1))'
+ 5
+ 2
+
+test bad arguments passed to ancestors()
+
+ $ log 'ancestors(., depth=-1)'
+ hg: parse error: negative depth
+ [255]
+ $ log 'ancestors(., depth=foo)'
+ hg: parse error: ancestors expects an integer depth
+ [255]
+
+test descendants
+
+ $ hg log -G -T '{rev}\n' --config experimental.graphshorten=True
+ @ 9
+ o 8
+ | o 7
+ | o 6
+ |/|
+ | o 5
+ o | 4
+ | o 3
+ o | 2
+ |/
+ o 1
+ o 0
+
+ (null is ultimate root and has optimized path)
+
+ $ log 'null:4 & descendants(null)'
+ -1
+ 0
+ 1
+ 2
+ 3
+ 4
+
+ (including merge)
+
+ $ log ':8 & descendants(2)'
+ 2
+ 4
+ 6
+ 7
+ 8
+
+ (multiple roots)
+
+ $ log ':8 & descendants(2+5)'
+ 2
+ 4
+ 5
+ 6
+ 7
+ 8
+
+test descendants with depth limit
+
+ (depth=0 selects the node itself)
+
+ $ log 'descendants(0, depth=0)'
+ 0
+ $ log 'null: & descendants(null, depth=0)'
+ -1
+
+ (p2 = null should be ignored)
+
+ $ log 'null: & descendants(null, depth=2)'
+ -1
+ 0
+ 1
+
+ (multiple paths: depth(6) = (2, 3))
+
+ $ log 'descendants(1+3, depth=2)'
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+
+ (multiple paths: depth(5) = (1, 2), depth(6) = (2, 3))
+
+ $ log 'descendants(3+1, depth=2, startdepth=2)'
+ 4
+ 5
+ 6
+
+ (multiple depths: depth(6) = (0, 2, 4), search for depth=2)
+
+ $ log 'descendants(0+3+6, depth=3, startdepth=1)'
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+
+ (multiple depths: depth(6) = (0, 4), no match)
+
+ $ log 'descendants(0+6, depth=3, startdepth=1)'
+ 1
+ 2
+ 3
+ 4
+ 5
+ 7
+
+test ancestors/descendants relation subscript:
+
+ $ log 'tip#generations[0]'
+ 9
+ $ log '.#generations[-1]'
+ 8
+ $ log '.#g[(-1)]'
+ 8
+
+ $ hg debugrevspec -p parsed 'roots(:)#g[2]'
+ * parsed:
+ (relsubscript
+ (func
+ ('symbol', 'roots')
+ (rangeall
+ None))
+ ('symbol', 'g')
+ ('symbol', '2'))
+ 2
+ 3
+
+test author
+
$ log 'author(bob)'
2
$ log 'author("re:bob|test")'
@@ -957,7 +1312,7 @@
('string', '\x08issue\\d+'))
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<grep '\x08issue\\d+'>>
$ try 'grep(r"\bissue\d+")'
(func
@@ -965,7 +1320,7 @@
('string', '\\bissue\\d+'))
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<grep '\\bissue\\d+'>>
6
$ try 'grep(r"\")'
@@ -986,6 +1341,9 @@
$ log 'keyword(issue)'
6
$ log 'keyword("test a")'
+
+Test first (=limit) and last
+
$ log 'limit(head(), 1)'
0
$ log 'limit(author("re:bob|test"), 3, 5)'
@@ -998,6 +1356,173 @@
$ log 'limit(all(), 1, -1)'
hg: parse error: negative offset
[255]
+ $ log 'limit(all(), -1)'
+ hg: parse error: negative number to select
+ [255]
+ $ log 'limit(all(), 0)'
+
+ $ log 'last(all(), -1)'
+ hg: parse error: negative number to select
+ [255]
+ $ log 'last(all(), 0)'
+ $ log 'last(all(), 1)'
+ 9
+ $ log 'last(all(), 2)'
+ 8
+ 9
+
+Test smartset.slice() by first/last()
+
+ (using unoptimized set, filteredset as example)
+
+ $ hg debugrevspec --no-show-revs -s '0:7 & branch("re:")'
+ * set:
+ <filteredset
+ <spanset+ 0:8>,
+ <branch 're:'>>
+ $ log 'limit(0:7 & branch("re:"), 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(7:0 & branch("re:"), 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'last(0:7 & branch("re:"), 2)'
+ 6
+ 7
+
+ (using baseset)
+
+ $ hg debugrevspec --no-show-revs -s 0+1+2+3+4+5+6+7
+ * set:
+ <baseset [0, 1, 2, 3, 4, 5, 6, 7]>
+ $ hg debugrevspec --no-show-revs -s 0::7
+ * set:
+ <baseset+ [0, 1, 2, 3, 4, 5, 6, 7]>
+ $ log 'limit(0+1+2+3+4+5+6+7, 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(sort(0::7, rev), 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(sort(0::7, -rev), 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'last(sort(0::7, rev), 2)'
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(sort(0::7, rev), 3, 6)'
+ * set:
+ <baseset+ [6, 7]>
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(sort(0::7, rev), 3, 9)'
+ * set:
+ <baseset+ []>
+ $ hg debugrevspec -s 'limit(sort(0::7, -rev), 3, 6)'
+ * set:
+ <baseset- [0, 1]>
+ 1
+ 0
+ $ hg debugrevspec -s 'limit(sort(0::7, -rev), 3, 9)'
+ * set:
+ <baseset- []>
+ $ hg debugrevspec -s 'limit(0::7, 0)'
+ * set:
+ <baseset+ []>
+
+ (using spanset)
+
+ $ hg debugrevspec --no-show-revs -s 0:7
+ * set:
+ <spanset+ 0:8>
+ $ log 'limit(0:7, 3, 4)'
+ 4
+ 5
+ 6
+ $ log 'limit(7:0, 3, 4)'
+ 3
+ 2
+ 1
+ $ log 'limit(0:7, 3, 6)'
+ 6
+ 7
+ $ log 'limit(7:0, 3, 6)'
+ 1
+ 0
+ $ log 'last(0:7, 2)'
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(0:7, 3, 6)'
+ * set:
+ <spanset+ 6:8>
+ 6
+ 7
+ $ hg debugrevspec -s 'limit(0:7, 3, 9)'
+ * set:
+ <spanset+ 8:8>
+ $ hg debugrevspec -s 'limit(7:0, 3, 6)'
+ * set:
+ <spanset- 0:2>
+ 1
+ 0
+ $ hg debugrevspec -s 'limit(7:0, 3, 9)'
+ * set:
+ <spanset- 0:0>
+ $ hg debugrevspec -s 'limit(0:7, 0)'
+ * set:
+ <spanset+ 0:0>
+
+Test order of first/last revisions
+
+ $ hg debugrevspec -s 'first(4:0, 3) & 3:'
+ * set:
+ <filteredset
+ <spanset- 2:5>,
+ <spanset+ 3:10>>
+ 4
+ 3
+
+ $ hg debugrevspec -s '3: & first(4:0, 3)'
+ * set:
+ <filteredset
+ <spanset+ 3:10>,
+ <spanset- 2:5>>
+ 3
+ 4
+
+ $ hg debugrevspec -s 'last(4:0, 3) & :1'
+ * set:
+ <filteredset
+ <spanset- 0:3>,
+ <spanset+ 0:2>>
+ 1
+ 0
+
+ $ hg debugrevspec -s ':1 & last(4:0, 3)'
+ * set:
+ <filteredset
+ <spanset+ 0:2>,
+ <spanset+ 0:3>>
+ 0
+ 1
+
+Test scmutil.revsingle() should return the last revision
+
+ $ hg debugrevspec -s 'last(0::)'
+ * set:
+ <baseset slice=0:1
+ <generatorset->>
+ 9
+ $ hg identify -r '0::' --num
+ 9
+
+Test matching
+
$ log 'matching(6)'
6
$ log 'matching(6:7, "phase parents user date branch summary files description substate")'
@@ -1210,10 +1735,10 @@
$ log 'reverse(null:)' | tail -2
0
-1
+ $ log 'first(null:)'
+ -1
+ $ log 'min(null:)'
BROKEN: should be '-1'
- $ log 'first(null:)'
-BROKEN: should be '-1'
- $ log 'min(null:)'
$ log 'tip:null and all()' | tail -2
1
0
@@ -1221,6 +1746,42 @@
Test working-directory revision
$ hg debugrevspec 'wdir()'
2147483647
+ $ hg debugrevspec 'wdir()^'
+ 9
+ $ hg up 7
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg debugrevspec 'wdir()^'
+ 7
+ $ hg debugrevspec 'wdir()^0'
+ 2147483647
+ $ hg debugrevspec 'wdir()~3'
+ 5
+ $ hg debugrevspec 'ancestors(wdir())'
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 2147483647
+ $ hg debugrevspec 'wdir()~0'
+ 2147483647
+ $ hg debugrevspec 'p1(wdir())'
+ 7
+ $ hg debugrevspec 'p2(wdir())'
+ $ hg debugrevspec 'parents(wdir())'
+ 7
+ $ hg debugrevspec 'wdir()^1'
+ 7
+ $ hg debugrevspec 'wdir()^2'
+ $ hg debugrevspec 'wdir()^3'
+ hg: parse error: ^ expects a number 0, 1, or 2
+ [255]
+For tests consistency
+ $ hg up 9
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ hg debugrevspec 'tip or wdir()'
9
2147483647
@@ -1239,9 +1800,104 @@
9
$ log '(all() + wdir()) & max(. + wdir())'
2147483647
- $ log '(all() + wdir()) & first(wdir() + .)'
+ $ log 'first(wdir() + .)'
+ 2147483647
+ $ log 'last(. + wdir())'
+ 2147483647
+
+Test working-directory integer revision and node id
+(BUG: '0:wdir()' is still needed to populate wdir revision)
+
+ $ hg debugrevspec '0:wdir() & 2147483647'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & rev(2147483647)'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & ffffffffffffffffffffffffffffffffffffffff'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & ffffffffffff'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & id(ffffffffffffffffffffffffffffffffffffffff)'
+ 2147483647
+ $ hg debugrevspec '0:wdir() & id(ffffffffffff)'
2147483647
- $ log '(all() + wdir()) & last(. + wdir())'
+
+ $ cd ..
+
+Test short 'ff...' hash collision
+(BUG: '0:wdir()' is still needed to populate wdir revision)
+
+ $ hg init wdir-hashcollision
+ $ cd wdir-hashcollision
+ $ cat <<EOF >> .hg/hgrc
+ > [experimental]
+ > evolution = createmarkers
+ > EOF
+ $ echo 0 > a
+ $ hg ci -qAm 0
+ $ for i in 2463 2961 6726 78127; do
+ > hg up -q 0
+ > echo $i > a
+ > hg ci -qm $i
+ > done
+ $ hg up -q null
+ $ hg log -r '0:wdir()' -T '{rev}:{node} {shortest(node, 3)}\n'
+ 0:b4e73ffab476aa0ee32ed81ca51e07169844bc6a b4e
+ 1:fffbae3886c8fbb2114296380d276fd37715d571 fffba
+ 2:fffb6093b00943f91034b9bdad069402c834e572 fffb6
+ 3:fff48a9b9de34a4d64120c29548214c67980ade3 fff4
+ 4:ffff85cff0ff78504fcdc3c0bc10de0c65379249 ffff8
+ 2147483647:ffffffffffffffffffffffffffffffffffffffff fffff
+ $ hg debugobsolete fffbae3886c8fbb2114296380d276fd37715d571
+ obsoleted 1 changesets
+
+ $ hg debugrevspec '0:wdir() & fff'
+ abort: 00changelog.i@fff: ambiguous identifier!
+ [255]
+ $ hg debugrevspec '0:wdir() & ffff'
+ abort: 00changelog.i@ffff: ambiguous identifier!
+ [255]
+ $ hg debugrevspec '0:wdir() & fffb'
+ abort: 00changelog.i@fffb: ambiguous identifier!
+ [255]
+BROKEN should be '2' (node lookup uses unfiltered repo since dc25ed84bee8)
+ $ hg debugrevspec '0:wdir() & id(fffb)'
+ 2
+ $ hg debugrevspec '0:wdir() & ffff8'
+ 4
+ $ hg debugrevspec '0:wdir() & fffff'
+ 2147483647
+
+ $ cd ..
+
+Test branch() with wdir()
+
+ $ cd repo
+
+ $ log '0:wdir() & branch("literal:é")'
+ 8
+ 9
+ 2147483647
+ $ log '0:wdir() & branch("re:é")'
+ 8
+ 9
+ 2147483647
+ $ log '0:wdir() & branch("re:^a")'
+ 0
+ 2
+ $ log '0:wdir() & branch(8)'
+ 8
+ 9
+ 2147483647
+
+branch(wdir()) returns all revisions belonging to the working branch. The wdir
+itself isn't returned unless it is explicitly populated.
+
+ $ log 'branch(wdir())'
+ 8
+ 9
+ $ log '0:wdir() & branch(wdir())'
+ 8
+ 9
2147483647
$ log 'outgoing()'
@@ -1358,10 +2014,10 @@
* set:
<filteredset
<filteredset
- <spanset- 0:3>,
- <spanset+ 0:3>>,
+ <spanset- 0:4>,
+ <spanset+ 0:4>>,
<not
- <spanset+ 1:2>>>
+ <spanset+ 1:3>>>
3
0
@@ -1392,7 +2048,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1429,10 +2085,10 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<addset
<baseset [2]>,
- <spanset+ 0:1>>>
+ <spanset+ 0:2>>>
2
1
0
@@ -1460,7 +2116,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset+ [0, 1, 2]>>
2
1
@@ -1488,7 +2144,7 @@
* set:
<filteredset
<baseset [0, 2, 1]>,
- <spanset- 0:2>>
+ <spanset- 0:3>>
0
2
1
@@ -1516,7 +2172,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1564,7 +2220,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
<baseset [0, 1]>>>
2
@@ -1589,7 +2245,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
<baseset [0, 1]>>>
2
@@ -1640,7 +2296,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [0, 1, 2]>>
2
1
@@ -1674,8 +2330,8 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
- <spanset+ 0:9>>
+ <spanset+ 0:3>,
+ <spanset+ 0:10>>
0
1
2
@@ -1713,8 +2369,8 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
- <spanset+ 0:9>>
+ <spanset+ 0:3>,
+ <spanset+ 0:10>>
0
1
2
@@ -1757,10 +2413,9 @@
follow)
define)
* set:
- <baseset
- <limit n=1, offset=0,
- <spanset- 0:2>,
- <baseset [1, 0, 2]>>>
+ <filteredset
+ <baseset [1]>,
+ <spanset- 0:3>>
1
$ try --optimize '2:0 & not last(0 + 2 + 1)'
@@ -1792,12 +2447,9 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<not
- <baseset
- <last n=1,
- <fullreposet+ 0:9>,
- <baseset [1, 2, 0]>>>>>
+ <baseset [1]>>>
2
0
@@ -1840,7 +2492,7 @@
define)
* set:
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<baseset [1]>>
1
@@ -1941,11 +2593,11 @@
define)
* set:
<filteredset
- <spanset+ 0:2>,
+ <spanset+ 0:3>,
<addset
<baseset [2]>,
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<contains 'a'>>>>
0
1
@@ -1973,7 +2625,7 @@
* set:
<addset
<filteredset
- <spanset- 0:2>,
+ <spanset- 0:3>,
<contains 'a'>>,
<baseset [2]>>
1
@@ -2398,7 +3050,7 @@
* set:
<addset
<baseset [0, 1]>,
- <spanset+ 2:3>>
+ <spanset+ 2:4>>
0
1
2
@@ -2436,10 +3088,10 @@
* set:
<addset
<addset
- <spanset+ 0:1>,
+ <spanset+ 0:2>,
<baseset [2]>>,
<addset
- <spanset+ 3:4>,
+ <spanset+ 3:5>,
<baseset [5, 6]>>>
0
1
@@ -2554,13 +3206,13 @@
* set:
<addset
<addset
- <spanset+ 0:1>,
- <spanset+ 1:2>>,
+ <spanset+ 0:2>,
+ <spanset+ 1:3>>,
<addset
- <spanset+ 2:3>,
+ <spanset+ 2:4>,
<addset
- <spanset+ 3:4>,
- <spanset+ 4:5>>>>
+ <spanset+ 3:5>,
+ <spanset+ 4:6>>>>
0
1
2
@@ -2588,14 +3240,14 @@
test that chained `or` operations never eat up stack (issue4624)
(uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
- $ hg log -T '{rev}\n' -r `python -c "print '+'.join(['0:1'] * 500)"`
+ $ hg log -T '{rev}\n' -r `$PYTHON -c "print '+'.join(['0:1'] * 500)"`
0
1
test that repeated `-r` options never eat up stack (issue4565)
(uses `-r 0::1` to avoid possible optimization at old-style parser)
- $ hg log -T '{rev}\n' `python -c "for i in xrange(500): print '-r 0::1 ',"`
+ $ hg log -T '{rev}\n' `$PYTHON -c "for i in xrange(500): print '-r 0::1 ',"`
0
1
@@ -2694,6 +3346,67 @@
hg: parse error: missing argument
[255]
+optimization to only() works only if ancestors() takes only one argument
+
+ $ hg debugrevspec -p optimized 'ancestors(6) - ancestors(4, 1)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '6')
+ define)
+ (func
+ ('symbol', 'ancestors')
+ (list
+ ('symbol', '4')
+ ('symbol', '1'))
+ any)
+ define)
+ 0
+ 1
+ 3
+ 5
+ 6
+ $ hg debugrevspec -p optimized 'ancestors(6, 1) - ancestors(4)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ (list
+ ('symbol', '6')
+ ('symbol', '1'))
+ define)
+ (func
+ ('symbol', 'ancestors')
+ ('symbol', '4')
+ any)
+ define)
+ 5
+ 6
+
+optimization disabled if keyword arguments passed (because we're too lazy
+to support it)
+
+ $ hg debugrevspec -p optimized 'ancestors(set=6) - ancestors(set=4)'
+ * optimized:
+ (difference
+ (func
+ ('symbol', 'ancestors')
+ (keyvalue
+ ('symbol', 'set')
+ ('symbol', '6'))
+ define)
+ (func
+ ('symbol', 'ancestors')
+ (keyvalue
+ ('symbol', 'set')
+ ('symbol', '4'))
+ any)
+ define)
+ 3
+ 5
+ 6
+
invalid function call should not be optimized to only()
$ log '"ancestors"(6) and not ancestors(4)'
@@ -2845,6 +3558,16 @@
$ log 'merge()^^^'
1
+ $ hg debugrevspec -s '(merge() | 0)~-1'
+ * set:
+ <baseset+ [1, 7]>
+ 1
+ 7
+ $ log 'merge()~-1'
+ 7
+ $ log 'tip~-1'
+ $ log '(tip | merge())~-1'
+ 7
$ log 'merge()~0'
6
$ log 'merge()~1'
@@ -2865,6 +3588,10 @@
hg: parse error: ^ expects a number 0, 1, or 2
[255]
+ $ log 'branchpoint()~-1'
+ abort: revision in set has more than one child!
+ [255]
+
Bogus function gets suggestions
$ log 'add()'
hg: parse error: unknown identifier: add
@@ -2965,7 +3692,7 @@
None)
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<merge>>
6
@@ -2986,7 +3713,7 @@
None)
* set:
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<merge>>
6
@@ -3042,11 +3769,7 @@
('symbol', 'merge')
None))
* set:
- <addset+
- <filteredset
- <fullreposet+ 0:9>,
- <merge>>,
- <generatorset+>>
+ <generatorset+>
6
7
@@ -3107,8 +3830,8 @@
* set:
<baseset
<max
- <fullreposet+ 0:9>,
- <spanset+ 2:5>>>
+ <fullreposet+ 0:10>,
+ <spanset+ 2:6>>>
5
test chained `or` operations are flattened at parsing phase
@@ -3141,10 +3864,10 @@
('symbol', '3'))))
* set:
<addset
- <spanset+ 0:1>,
+ <spanset+ 0:2>,
<addset
- <spanset+ 1:2>,
- <spanset+ 2:3>>>
+ <spanset+ 1:3>,
+ <spanset+ 2:4>>>
0
1
2
@@ -3189,7 +3912,7 @@
* set:
<filteredset
<baseset [0]>,
- <spanset+ 0:9>>
+ <spanset+ 0:10>>
0
test unknown reference:
@@ -3238,7 +3961,7 @@
<addset
<baseset [9]>,
<filteredset
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<desc '$1'>>>
9
@@ -3409,10 +4132,7 @@
('symbol', '2')))
* set:
<filteredset
- <baseset
- <limit n=2, offset=0,
- <fullreposet+ 0:9>,
- <baseset [1, 2, 3]>>>,
+ <baseset [1, 2]>,
<not
<baseset [2]>>>
1
@@ -3430,7 +4150,7 @@
<filteredset
<baseset
<max
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<baseset [1, 2]>>>,
<not
<baseset [2]>>>
@@ -3448,7 +4168,7 @@
<filteredset
<baseset
<min
- <fullreposet+ 0:9>,
+ <fullreposet+ 0:10>,
<baseset [1, 2]>>>,
<not
<baseset [1]>>>
@@ -3466,10 +4186,7 @@
('symbol', '2')))
* set:
<filteredset
- <baseset
- <last n=1,
- <fullreposet+ 0:9>,
- <baseset [2, 1]>>>,
+ <baseset [2]>,
<not
<baseset [2]>>>
@@ -3638,7 +4355,7 @@
$ hg init problematicencoding
$ cd problematicencoding
- $ python > setup.sh <<EOF
+ $ $PYTHON > setup.sh <<EOF
> print u'''
> echo a > text
> hg add text
@@ -3654,7 +4371,7 @@
$ sh < setup.sh
test in problematic encoding
- $ python > test.sh <<EOF
+ $ $PYTHON > test.sh <<EOF
> print u'''
> hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)'
> echo ====
@@ -3717,4 +4434,80 @@
hg: parse error: unknown identifier: custom1
[255]
+Test repo.anyrevs with customized revset overrides
+
+ $ cat > $TESTTMP/printprevset.py <<EOF
+ > from mercurial import encoding
+ > def reposetup(ui, repo):
+ > alias = {}
+ > p = encoding.environ.get('P')
+ > if p:
+ > alias['P'] = p
+ > revs = repo.anyrevs(['P'], user=True, localalias=alias)
+ > ui.write('P=%r' % list(revs))
+ > EOF
+
+ $ cat >> .hg/hgrc <<EOF
+ > custompredicate = !
+ > printprevset = $TESTTMP/printprevset.py
+ > EOF
+
+ $ hg --config revsetalias.P=1 log -r . -T '\n'
+ P=[1]
+ $ P=3 hg --config revsetalias.P=2 log -r . -T '\n'
+ P=[3]
+
$ cd ..
+
+Test obsstore related revsets
+
+ $ hg init repo1
+ $ cd repo1
+ $ cat <<EOF >> .hg/hgrc
+ > [experimental]
+ > evolution = createmarkers
+ > EOF
+
+ $ hg debugdrawdag <<'EOS'
+ > F G
+ > |/ # split: B -> E, F
+ > B C D E # amend: B -> C -> D
+ > \|/ | # amend: F -> G
+ > A A Z # amend: A -> Z
+ > EOS
+
+ $ hg log -r 'successors(Z)' -T '{desc}\n'
+ Z
+
+ $ hg log -r 'successors(F)' -T '{desc}\n'
+ F
+ G
+
+ $ hg tag --remove --local C D E F G
+
+ $ hg log -r 'successors(B)' -T '{desc}\n'
+ B
+ D
+ E
+ G
+
+ $ hg log -r 'successors(B)' -T '{desc}\n' --hidden
+ B
+ C
+ D
+ E
+ F
+ G
+
+ $ hg log -r 'successors(B)-obsolete()' -T '{desc}\n' --hidden
+ D
+ E
+ G
+
+ $ hg log -r 'successors(B+A)-divergent()' -T '{desc}\n'
+ A
+ Z
+ B
+
+ $ hg log -r 'successors(B+A)-divergent()-obsolete()' -T '{desc}\n'
+ Z
--- a/tests/test-run-tests.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-run-tests.t Wed Jul 19 07:51:41 2017 -0500
@@ -9,7 +9,7 @@
$ run-tests.py $HGTEST_RUN_TESTS_PURE -l
- # Ran 0 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 0 skipped, 0 failed.
Define a helper to avoid the install step
=============
@@ -25,7 +25,7 @@
$ run-tests.py --with-hg=./hg
warning: --with-hg should specify an hg script
- # Ran 0 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 0 skipped, 0 failed.
$ rm hg
#endif
@@ -58,7 +58,7 @@
$ touch test-empty.t
$ rt
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm test-empty.t
a succesful test
@@ -91,7 +91,7 @@
$ rt
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
failing test
==================
@@ -115,10 +115,49 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
+test diff colorisation
+
+#if no-windows pygments
+ $ rt test-failure.t --color always
+
+ \x1b[38;5;124m--- $TESTTMP/test-failure.t\x1b[39m (esc)
+ \x1b[38;5;34m+++ $TESTTMP/test-failure.t.err\x1b[39m (esc)
+ \x1b[38;5;90;01m@@ -1,3 +1,3 @@\x1b[39;00m (esc)
+ $ echo "bar-baz"; echo "bar-bad"
+ \x1b[38;5;34m+ bar*baz (glob)\x1b[39m (esc)
+ bar*bad (glob)
+ \x1b[38;5;124m- bar*baz (glob)\x1b[39m (esc)
+
+ ERROR: test-failure.t output changed
+ !
+ Failed test-failure.t: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+ $ rt test-failure.t 2> tmp.log
+ [1]
+ $ cat tmp.log
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/test-failure.t.err
+ @@ -1,3 +1,3 @@
+ $ echo "bar-baz"; echo "bar-bad"
+ + bar*baz (glob)
+ bar*bad (glob)
+ - bar*baz (glob)
+
+ ERROR: test-failure.t output changed
+ !
+ Failed test-failure.t: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+#endif
+
basic failing test
$ cat > test-failure.t << EOF
> $ echo babar
@@ -162,10 +201,47 @@
!
Failed test-failure.t: output changed
Failed test-failure-unicode.t: output changed
- # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 3 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
+test --outputdir
+ $ mkdir output
+ $ rt --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !.
+ --- $TESTTMP/test-failure-unicode.t
+ +++ $TESTTMP/output/test-failure-unicode.t.err
+ @@ -1,2 +1,2 @@
+ $ echo babar\xce\xb1 (esc)
+ - l\xce\xb5\xce\xb5t (esc)
+ + babar\xce\xb1 (esc)
+
+ ERROR: test-failure-unicode.t output changed
+ !
+ Failed test-failure.t: output changed
+ Failed test-failure-unicode.t: output changed
+ # Ran 3 tests, 0 skipped, 2 failed.
+ python hash seed: * (glob)
+ [1]
+ $ ls -a output
+ .
+ ..
+ .testtimes
+ test-failure-unicode.t.err
+ test-failure.t.err
+
test --xunit support
$ rt --xunit=xunit.xml
@@ -192,7 +268,7 @@
!
Failed test-failure.t: output changed
Failed test-failure-unicode.t: output changed
- # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 3 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
$ cat xunit.xml
@@ -200,14 +276,17 @@
<testsuite errors="0" failures="2" name="run-tests" skipped="0" tests="3">
<testcase name="test-success.t" time="*"/> (glob)
<testcase name="test-failure-unicode.t" time="*"> (glob)
+ <failure message="output changed" type="output-mismatch">
<![CDATA[--- $TESTTMP/test-failure-unicode.t
+++ $TESTTMP/test-failure-unicode.t.err
@@ -1,2 +1,2 @@
$ echo babar\xce\xb1 (esc)
- l\xce\xb5\xce\xb5t (esc)
+ babar\xce\xb1 (esc)
- ]]> </testcase>
+ ]]> </failure>
+ </testcase>
<testcase name="test-failure.t" time="*"> (glob)
+ <failure message="output changed" type="output-mismatch">
<![CDATA[--- $TESTTMP/test-failure.t
+++ $TESTTMP/test-failure.t.err
@@ -1,5 +1,5 @@
@@ -217,13 +296,68 @@
This is a noop statement so that
this test is still more bytes than success.
pad pad pad pad............................................................
- ]]> </testcase>
+ ]]> </failure>
+ </testcase>
</testsuite>
$ cat .testtimes
test-failure-unicode.t * (glob)
test-failure.t * (glob)
test-success.t * (glob)
+
+ $ rt --list-tests
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+
+ $ rt --list-tests --json
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+ $ cat report.json
+ testreport ={
+ "test-failure-unicode.t": {
+ "result": "success"
+ },
+ "test-failure.t": {
+ "result": "success"
+ },
+ "test-success.t": {
+ "result": "success"
+ }
+ } (no-eol)
+
+ $ rt --list-tests --xunit=xunit.xml
+ test-failure-unicode.t
+ test-failure.t
+ test-success.t
+ $ cat xunit.xml
+ <?xml version="1.0" encoding="utf-8"?>
+ <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0">
+ <testcase name="test-failure-unicode.t"/>
+ <testcase name="test-failure.t"/>
+ <testcase name="test-success.t"/>
+ </testsuite>
+
+ $ rt --list-tests test-failure* --json --xunit=xunit.xml --outputdir output
+ test-failure-unicode.t
+ test-failure.t
+ $ cat output/report.json
+ testreport ={
+ "test-failure-unicode.t": {
+ "result": "success"
+ },
+ "test-failure.t": {
+ "result": "success"
+ }
+ } (no-eol)
+ $ cat xunit.xml
+ <?xml version="1.0" encoding="utf-8"?>
+ <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0">
+ <testcase name="test-failure-unicode.t"/>
+ <testcase name="test-failure.t"/>
+ </testsuite>
+
$ rm test-failure-unicode.t
test for --retest
@@ -244,7 +378,30 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--retest works with --outputdir
+ $ rm -r output
+ $ mkdir output
+ $ mv test-failure.t.err output
+ $ rt --retest --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !
+ Failed test-failure.t: output changed
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -255,12 +412,12 @@
$ rt test-success.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
success w/ keyword
$ rt -k xyzzy
.
- # Ran 2 tests, 1 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 1 skipped, 0 failed.
failed
@@ -279,7 +436,7 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -299,7 +456,7 @@
ERROR: test-failure.t output changed
!
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -314,7 +471,7 @@
ERROR: test-serve-fail.t output changed
!
Failed test-serve-fail.t: server failed to start (HGPORT=*) (glob)
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rm test-serve-fail.t
@@ -330,7 +487,7 @@
> EOF
$ rt test-serve-inuse.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm test-serve-inuse.t
$ killdaemons.py $DAEMON_PIDS
$ rm $DAEMON_PIDS
@@ -368,7 +525,7 @@
+ echo *SALT* 22 0 (glob)
*SALT* 22 0 (glob)
.
- # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 0 skipped, 0 failed.
Parallel runs
==============
@@ -380,7 +537,7 @@
!!
Failed test-failure*.t: output changed (glob)
Failed test-failure*.t: output changed (glob)
- # Ran 2 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 2 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
@@ -402,7 +559,7 @@
Failed test-failure*.t: output changed (glob)
Failed test-nothing.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 2 failed.
+ # Ran 2 tests, 0 skipped, 2 failed.
python hash seed: * (glob)
[1]
@@ -434,7 +591,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -458,7 +615,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -470,7 +627,7 @@
ERROR: test-failure.t output changed
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -503,7 +660,7 @@
saved backup bundle to $TESTTMP/foo.hg* (glob)
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
Accept this change? [n] ..
- # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 0 skipped, 0 failed.
$ sed -e 's,(glob)$,&<,g' test-failure.t
$ echo babar
@@ -523,6 +680,85 @@
$ echo 'saved backup bundle to $TESTTMP/foo.hg'
saved backup bundle to $TESTTMP/*.hg (glob)<
+Race condition - test file was modified when test is running
+
+ $ TESTRACEDIR=`pwd`
+ $ export TESTRACEDIR
+ $ cat > test-race.t <<EOF
+ > $ echo 1
+ > $ echo "# a new line" >> $TESTRACEDIR/test-race.t
+ > EOF
+
+ $ rt -i test-race.t
+
+ --- $TESTTMP/test-race.t
+ +++ $TESTTMP/test-race.t.err
+ @@ -1,2 +1,3 @@
+ $ echo 1
+ + 1
+ $ echo "# a new line" >> $TESTTMP/test-race.t
+ Reference output has changed (run again to prompt changes)
+ ERROR: test-race.t output changed
+ !
+ Failed test-race.t: output changed
+ # Ran 1 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+ $ rm test-race.t
+
+When "#testcases" is used in .t files
+
+ $ cat >> test-cases.t <<EOF
+ > #testcases a b
+ > #if a
+ > $ echo 1
+ > #endif
+ > #if b
+ > $ echo 2
+ > #endif
+ > EOF
+
+ $ cat <<EOF | rt -i test-cases.t 2>&1
+ > y
+ > y
+ > EOF
+
+ --- $TESTTMP/test-cases.t
+ +++ $TESTTMP/test-cases.t.a.err
+ @@ -1,6 +1,7 @@
+ #testcases a b
+ #if a
+ $ echo 1
+ + 1
+ #endif
+ #if b
+ $ echo 2
+ Accept this change? [n] .
+ --- $TESTTMP/test-cases.t
+ +++ $TESTTMP/test-cases.t.b.err
+ @@ -5,4 +5,5 @@
+ #endif
+ #if b
+ $ echo 2
+ + 2
+ #endif
+ Accept this change? [n] .
+ # Ran 2 tests, 0 skipped, 0 failed.
+
+ $ cat test-cases.t
+ #testcases a b
+ #if a
+ $ echo 1
+ 1
+ #endif
+ #if b
+ $ echo 2
+ 2
+ #endif
+
+ $ rm test-cases.t
+
(reinstall)
$ mv backup test-failure.t
@@ -532,7 +768,7 @@
$ rt --nodiff
!.
Failed test-failure.t: output changed
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -542,7 +778,7 @@
Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t (glob)
Keeping threadtmp dir: $TESTTMP/keep/child1 (glob)
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
timeouts
========
@@ -555,16 +791,16 @@
> cat test-timeout.t >> test-slow-timeout.t
$ rt --timeout=1 --slowtimeout=3 test-timeout.t test-slow-timeout.t
st
- Skipped test-slow-timeout.t: missing feature: allow slow tests
+ Skipped test-slow-timeout.t: missing feature: allow slow tests (use --allow-slow-tests)
Failed test-timeout.t: timed out
- # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rt --timeout=1 --slowtimeout=3 \
> test-timeout.t test-slow-timeout.t --allow-slow-tests
.t
Failed test-timeout.t: timed out
- # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rm test-timeout.t test-slow-timeout.t
@@ -574,7 +810,7 @@
$ rt test-success.t --time
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
# Producing time report
start end cuser csys real Test
\s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re)
@@ -584,7 +820,7 @@
$ rt test-success.t --time --jobs 2
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
# Producing time report
start end cuser csys real Test
\s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re)
@@ -599,25 +835,29 @@
!.s
Skipped test-skip.t: missing feature: nail clipper
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
$ rt --keyword xyzzy
.s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 2 skipped, 0 failed.
Skips with xml
$ rt --keyword xyzzy \
> --xunit=xunit.xml
.s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 2 skipped, 0 failed.
$ cat xunit.xml
<?xml version="1.0" encoding="utf-8"?>
<testsuite errors="0" failures="0" name="run-tests" skipped="2" tests="2">
<testcase name="test-success.t" time="*"/> (glob)
+ <testcase name="test-skip.t">
+ <skipped>
+ <![CDATA[missing feature: nail clipper]]> </skipped>
+ </testcase>
</testsuite>
Missing skips or blacklisted skips don't count as executed:
@@ -627,7 +867,7 @@
ss
Skipped test-bogus.t: Doesn't exist
Skipped test-failure.t: blacklisted
- # Ran 0 tests, 2 skipped, 0 warned, 0 failed.
+ # Ran 0 tests, 2 skipped, 0 failed.
$ cat report.json
testreport ={
"test-bogus.t": {
@@ -657,7 +897,7 @@
!
Skipped test-bogus.t: Doesn't exist
Failed test-failure.t: output changed
- # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -680,7 +920,7 @@
!.s
Skipped test-skip.t: missing feature: nail clipper
Failed test-failure.t: output changed
- # Ran 2 tests, 1 skipped, 0 warned, 1 failed.
+ # Ran 2 tests, 1 skipped, 1 failed.
python hash seed: * (glob)
[1]
@@ -714,6 +954,68 @@
"time": "\s*[\d\.]{4,5}" (re)
}
} (no-eol)
+--json with --outputdir
+
+ $ rm report.json
+ $ rm -r output
+ $ mkdir output
+ $ rt --json --outputdir output
+
+ --- $TESTTMP/test-failure.t
+ +++ $TESTTMP/output/test-failure.t.err
+ @@ -1,5 +1,5 @@
+ $ echo babar
+ - rataxes
+ + babar
+ This is a noop statement so that
+ this test is still more bytes than success.
+ pad pad pad pad............................................................
+
+ ERROR: test-failure.t output changed
+ !.s
+ Skipped test-skip.t: missing feature: nail clipper
+ Failed test-failure.t: output changed
+ # Ran 2 tests, 1 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+ $ f report.json
+ report.json: file not found
+ $ cat output/report.json
+ testreport ={
+ "test-failure.t": [\{] (re)
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "---.+\+\+\+.+", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "failure", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }, ? (re)
+ "test-skip.t": {
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "skip", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }, ? (re)
+ "test-success.t": [\{] (re)
+ "csys": "\s*[\d\.]{4,5}", ? (re)
+ "cuser": "\s*[\d\.]{4,5}", ? (re)
+ "diff": "", ? (re)
+ "end": "\s*[\d\.]{4,5}", ? (re)
+ "result": "success", ? (re)
+ "start": "\s*[\d\.]{4,5}", ? (re)
+ "time": "\s*[\d\.]{4,5}" (re)
+ }
+ } (no-eol)
+ $ ls -a output
+ .
+ ..
+ .testtimes
+ report.json
+ test-failure.t.err
Test that failed test accepted through interactive are properly reported:
@@ -731,7 +1033,7 @@
pad pad pad pad............................................................
Accept this change? [n] ..s
Skipped test-skip.t: missing feature: nail clipper
- # Ran 2 tests, 1 skipped, 0 warned, 0 failed.
+ # Ran 2 tests, 1 skipped, 0 failed.
$ cat report.json
testreport ={
@@ -774,7 +1076,7 @@
$ rt test-glob-backslash.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
$ rm -f test-glob-backslash.t
@@ -800,7 +1102,7 @@
> EOF
$ rt test-hghave.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
test that RUNTESTDIR refers the directory, in which `run-tests.py` now
running is placed.
@@ -816,14 +1118,14 @@
> $ test "\$TESTDIR" = "$TESTTMP"/anothertests
> #endif
> $ test "\$RUNTESTDIR" = "$TESTDIR"
- > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py
- > #!/usr/bin/env python
+ > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@'
+ > #!USRBINENVPY
> #
> # check-code - a style and portability checker for Mercurial
> EOF
$ rt test-runtestdir.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
#if execbit
@@ -840,7 +1142,7 @@
> EOF
$ rt test-testdir-path.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
#endif
@@ -852,11 +1154,11 @@
> EOF
$ rt test-very-slow-test.t
s
- Skipped test-very-slow-test.t: missing feature: allow slow tests
- # Ran 0 tests, 1 skipped, 0 warned, 0 failed.
+ Skipped test-very-slow-test.t: missing feature: allow slow tests (use --allow-slow-tests)
+ # Ran 0 tests, 1 skipped, 0 failed.
$ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
support for running a test outside the current directory
$ mkdir nonlocal
@@ -866,7 +1168,7 @@
> EOF
$ rt nonlocal/test-is-not-here.t
.
- # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
+ # Ran 1 tests, 0 skipped, 0 failed.
support for bisecting failed tests automatically
$ hg init bisect
@@ -897,6 +1199,130 @@
!
Failed test-bisect.t: output changed
test-bisect.t broken by 72cbf122d116 (bad)
- # Ran 1 tests, 0 skipped, 0 warned, 1 failed.
+ # Ran 1 tests, 0 skipped, 1 failed.
python hash seed: * (glob)
[1]
+
+ $ cd ..
+
+Test a broken #if statement doesn't break run-tests threading.
+==============================================================
+ $ mkdir broken
+ $ cd broken
+ $ cat > test-broken.t <<EOF
+ > true
+ > #if notarealhghavefeature
+ > $ false
+ > #endif
+ > EOF
+ $ for f in 1 2 3 4 ; do
+ > cat > test-works-$f.t <<EOF
+ > This is test case $f
+ > $ sleep 1
+ > EOF
+ > done
+ $ rt -j 2
+ ....
+ # Ran 5 tests, 0 skipped, 0 failed.
+ skipped: unknown feature: notarealhghavefeature
+
+ $ cd ..
+ $ rm -rf broken
+
+Test cases in .t files
+======================
+ $ mkdir cases
+ $ cd cases
+ $ cat > test-cases-abc.t <<'EOF'
+ > #testcases A B C
+ > $ V=B
+ > #if A
+ > $ V=A
+ > #endif
+ > #if C
+ > $ V=C
+ > #endif
+ > $ echo $V | sed 's/A/C/'
+ > C
+ > #if C
+ > $ [ $V = C ]
+ > #endif
+ > #if A
+ > $ [ $V = C ]
+ > [1]
+ > #endif
+ > #if no-C
+ > $ [ $V = C ]
+ > [1]
+ > #endif
+ > $ [ $V = D ]
+ > [1]
+ > EOF
+ $ rt
+ .
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 3 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--restart works
+
+ $ rt --restart
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
+
+--restart works with outputdir
+
+ $ mkdir output
+ $ mv test-cases-abc.t.B.err output
+ $ rt --restart --outputdir output
+
+ --- $TESTTMP/anothertests/cases/test-cases-abc.t
+ +++ $TESTTMP/anothertests/cases/output/test-cases-abc.t.B.err
+ @@ -7,7 +7,7 @@
+ $ V=C
+ #endif
+ $ echo $V | sed 's/A/C/'
+ - C
+ + B
+ #if C
+ $ [ $V = C ]
+ #endif
+
+ ERROR: test-cases-abc.t (case B) output changed
+ !.
+ Failed test-cases-abc.t (case B): output changed
+ # Ran 2 tests, 0 skipped, 1 failed.
+ python hash seed: * (glob)
+ [1]
--- a/tests/test-setdiscovery.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-setdiscovery.t Wed Jul 19 07:51:41 2017 -0500
@@ -83,7 +83,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: bebd167eb94d
% -- b -> a tree
@@ -99,10 +99,9 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 2, sample size is: 2
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: bebd167eb94d
-
Both sides many new with stub:
$ testdesc '-ra1 -ra2' '-rb' '
@@ -122,7 +121,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 2dc09a01254d
% -- b -> a tree
@@ -138,7 +137,7 @@
taking initial sample
searching: 2 queries
query 2; still undecided: 29, sample size is: 29
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 2dc09a01254d
@@ -161,7 +160,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
% -- b -> a tree
@@ -177,7 +176,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
@@ -200,7 +199,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 51, sample size is: 51
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
% -- b -> a tree
@@ -216,7 +215,7 @@
taking quick initial sample
searching: 2 queries
query 2; still undecided: 31, sample size is: 31
- 2 total queries
+ 2 total queries in *.????s (glob)
common heads: 66f7d451a68b
@@ -242,7 +241,7 @@
sampling from both directions
searching: 3 queries
query 3; still undecided: 31, sample size is: 31
- 3 total queries
+ 3 total queries in *.????s (glob)
common heads: 7ead0cba2838
% -- b -> a tree
@@ -261,7 +260,7 @@
sampling from both directions
searching: 3 queries
query 3; still undecided: 15, sample size is: 15
- 3 total queries
+ 3 total queries in *.????s (glob)
common heads: 7ead0cba2838
@@ -324,7 +323,7 @@
sampling from both directions
searching: 6 queries
query 6; still undecided: \d+, sample size is: \d+ (re)
- 6 total queries
+ 6 total queries in *.????s (glob)
common heads: 3ee37d65064a
Test actual protocol when pulling one new head in addition to common heads
@@ -364,9 +363,9 @@
#if false
generate new bundles:
$ hg init r1
- $ for i in `python $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
$ hg clone -q r1 r2
- $ for i in `python $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
+ $ for i in `$PYTHON $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
$ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
$ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
$ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
@@ -399,8 +398,13 @@
> unrandomsample = $TESTTMP/unrandomsample.py
> EOF
- $ hg -R r1 outgoing r2 -T'{rev} '
+ $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox=
comparing with r2
searching for changes
101 102 103 104 105 106 107 108 109 110 (no-eol)
+ $ hg -R r1 --config extensions.blackbox= blackbox
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> outgoing r2 *-T{rev} * (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
+ * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> blackbox (glob)
$ cd ..
--- a/tests/test-share.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-share.t Wed Jul 19 07:51:41 2017 -0500
@@ -24,6 +24,20 @@
$ test -d .hg/store
[1]
+share shouldn't have a cache dir, original repo should
+
+ $ hg branches
+ default 0:d3873e73d99e
+ $ hg tags
+ tip 0:d3873e73d99e
+ $ test -d .hg/cache
+ [1]
+ $ ls -1 ../repo1/.hg/cache
+ branch2-served
+ rbc-names-v1
+ rbc-revs-v1
+ tags2-visible
+
Some sed versions appends newline, some don't, and some just fails
$ cat .hg/sharedpath; echo
@@ -243,6 +257,8 @@
non largefiles repos won't enable largefiles
$ hg share --config extensions.largefiles= repo3 sharedrepo
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
+ The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
updating working directory
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ [ -f sharedrepo/.hg/hgrc ]
--- a/tests/test-shelve.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-shelve.t Wed Jul 19 07:51:41 2017 -0500
@@ -626,7 +626,7 @@
$ hg rebase -d 1 --config extensions.rebase=
rebasing 2:323bfa07f744 "xyz" (tip)
merging x
- saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-backup.hg (glob)
+ saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-rebase.hg (glob)
$ hg unshelve
unshelving change 'default'
rebasing shelved changes
@@ -685,6 +685,7 @@
shelved as default
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
$ hg debugobsolete `hg --debug id -i -r 1`
+ obsoleted 1 changesets
$ hg unshelve
unshelving change 'default'
@@ -1591,9 +1592,8 @@
Removing restore branch information from shelvedstate file(making it looks like
in previous versions) and running unshelve --continue
- $ head -n 6 < .hg/shelvedstate > .hg/shelvedstate_oldformat
- $ rm .hg/shelvedstate
- $ mv .hg/shelvedstate_oldformat .hg/shelvedstate
+ $ cp .hg/shelvedstate .hg/shelvedstate_old
+ $ cat .hg/shelvedstate_old | grep -v 'branchtorestore' > .hg/shelvedstate
$ echo "aaabbbccc" > a
$ rm a.orig
@@ -1737,3 +1737,48 @@
[255]
$ hg st
! a
+ $ cd ..
+
+New versions of Mercurial know how to read onld shelvedstate files
+ $ hg init oldshelvedstate
+ $ cd oldshelvedstate
+ $ echo root > root && hg ci -Am root
+ adding root
+ $ echo 1 > a
+ $ hg add a
+ $ hg shelve --name ashelve
+ shelved as ashelve
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo 2 > a
+ $ hg ci -Am a
+ adding a
+ $ hg unshelve
+ unshelving change 'ashelve'
+ rebasing shelved changes
+ rebasing 2:003d2d94241c "changes to: root" (tip)
+ merging a
+ warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+ [1]
+putting v1 shelvedstate file in place of a created v2
+ $ cat << EOF > .hg/shelvedstate
+ > 1
+ > ashelve
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d
+ > 8b058dae057a5a78f393f4535d9e363dd5efac9d 003d2d94241cc7aff0c3a148e966d6a4a377f3a7
+ > 003d2d94241cc7aff0c3a148e966d6a4a377f3a7
+ >
+ > nokeep
+ > :no-active-bookmark
+ > EOF
+ $ echo 1 > a
+ $ hg resolve --mark a
+ (no more unresolved files)
+ continue: hg unshelve --continue
+mercurial does not crash
+ $ hg unshelve --continue
+ rebasing 2:003d2d94241c "changes to: root" (tip)
+ unshelve of 'ashelve' complete
+ $ cd ..
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-show-stack.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,220 @@
+ $ cat >> $HGRCPATH << EOF
+ > [extensions]
+ > show =
+ > EOF
+
+ $ hg init repo0
+ $ cd repo0
+
+Empty repo / no checkout results in error
+
+ $ hg show stack
+ abort: stack view only available when there is a working directory
+ [255]
+
+Stack displays single draft changeset as root revision
+
+ $ echo 0 > foo
+ $ hg -q commit -A -m 'commit 0'
+ $ hg show stack
+ @ 9f171 commit 0
+
+Stack displays multiple draft changesets
+
+ $ echo 1 > foo
+ $ hg commit -m 'commit 1'
+ $ echo 2 > foo
+ $ hg commit -m 'commit 2'
+ $ echo 3 > foo
+ $ hg commit -m 'commit 3'
+ $ echo 4 > foo
+ $ hg commit -m 'commit 4'
+ $ hg show stack
+ @ 2737b commit 4
+ o d1a69 commit 3
+ o 128c8 commit 2
+ o 181cc commit 1
+ o 9f171 commit 0
+
+Public parent of draft base is displayed, separated from stack
+
+ $ hg phase --public -r 0
+ $ hg show stack
+ @ 2737b commit 4
+ o d1a69 commit 3
+ o 128c8 commit 2
+ o 181cc commit 1
+ / (stack base)
+ o 9f171 commit 0
+
+ $ hg phase --public -r 1
+ $ hg show stack
+ @ 2737b commit 4
+ o d1a69 commit 3
+ o 128c8 commit 2
+ / (stack base)
+ o 181cc commit 1
+
+Draft descendants are shown
+
+ $ hg -q up 2
+ $ hg show stack
+ o 2737b commit 4
+ o d1a69 commit 3
+ @ 128c8 commit 2
+ / (stack base)
+ o 181cc commit 1
+
+ $ hg -q up 3
+ $ hg show stack
+ o 2737b commit 4
+ @ d1a69 commit 3
+ o 128c8 commit 2
+ / (stack base)
+ o 181cc commit 1
+
+working dir on public changeset should display special message
+
+ $ hg -q up 1
+ $ hg show stack
+ (empty stack; working directory parent is a published changeset)
+
+Branch point in descendants displayed at top of graph
+
+ $ hg -q up 3
+ $ echo b > foo
+ $ hg commit -m 'commit 5 (new dag branch)'
+ created new head
+ $ hg -q up 2
+ $ hg show stack
+ \ / (multiple children)
+ |
+ o d1a69 commit 3
+ @ 128c8 commit 2
+ / (stack base)
+ o 181cc commit 1
+
+ $ cd ..
+
+Base is stopped at merges
+
+ $ hg init merge-base
+ $ cd merge-base
+ $ echo 0 > foo
+ $ hg -q commit -A -m initial
+ $ echo h1 > foo
+ $ hg commit -m 'head 1'
+ $ hg -q up 0
+ $ echo h2 > foo
+ $ hg -q commit -m 'head 2'
+ $ hg phase --public -r 0:tip
+ $ hg -q up 1
+ $ hg merge -t :local 2
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ hg commit -m 'merge heads'
+
+TODO doesn't yet handle case where wdir is a draft merge
+
+ $ hg show stack
+ @ 8ee90 merge heads
+ / (stack base)
+ o 59478 head 1
+
+ $ echo d1 > foo
+ $ hg commit -m 'draft 1'
+ $ echo d2 > foo
+ $ hg commit -m 'draft 2'
+
+ $ hg show stack
+ @ 430d5 draft 2
+ o 787b1 draft 1
+ / (stack base)
+ o 8ee90 merge heads
+
+ $ cd ..
+
+Now move on to stacks when there are more commits after the base branchpoint
+
+ $ hg init public-rebase
+ $ cd public-rebase
+ $ echo 0 > foo
+ $ hg -q commit -A -m 'base'
+ $ hg phase --public -r .
+ $ echo d1 > foo
+ $ hg commit -m 'draft 1'
+ $ echo d2 > foo
+ $ hg commit -m 'draft 2'
+ $ hg -q up 0
+ $ echo 1 > foo
+ $ hg commit -m 'new 1'
+ created new head
+ $ echo 2 > foo
+ $ hg commit -m 'new 2'
+ $ hg -q up 2
+
+Newer draft heads don't impact output
+
+ $ hg show stack
+ @ eaffc draft 2
+ o 2b218 draft 1
+ / (stack base)
+ o b66bb base
+
+Newer public heads are rendered
+
+ $ hg phase --public -r '::tip'
+
+ $ hg show stack
+ o baa4b new 2
+ / (2 commits ahead)
+ :
+ : (stack head)
+ : @ eaffc draft 2
+ : o 2b218 draft 1
+ :/ (stack base)
+ o b66bb base
+
+If rebase is available, we show a hint how to rebase to that head
+
+ $ hg --config extensions.rebase= show stack
+ o baa4b new 2
+ / (2 commits ahead; hg rebase --source 2b218 --dest baa4b)
+ :
+ : (stack head)
+ : @ eaffc draft 2
+ : o 2b218 draft 1
+ :/ (stack base)
+ o b66bb base
+
+Similar tests but for multiple heads
+
+ $ hg -q up 0
+ $ echo h2 > foo
+ $ hg -q commit -m 'new head 2'
+ $ hg phase --public -r .
+ $ hg -q up 2
+
+ $ hg show stack
+ o baa4b new 2
+ / (2 commits ahead)
+ : o 9a848 new head 2
+ :/ (1 commits ahead)
+ :
+ : (stack head)
+ : @ eaffc draft 2
+ : o 2b218 draft 1
+ :/ (stack base)
+ o b66bb base
+
+ $ hg --config extensions.rebase= show stack
+ o baa4b new 2
+ / (2 commits ahead; hg rebase --source 2b218 --dest baa4b)
+ : o 9a848 new head 2
+ :/ (1 commits ahead; hg rebase --source 2b218 --dest 9a848)
+ :
+ : (stack head)
+ : @ eaffc draft 2
+ : o 2b218 draft 1
+ :/ (stack base)
+ o b66bb base
--- a/tests/test-show-work.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-show-work.t Wed Jul 19 07:51:41 2017 -0500
@@ -166,3 +166,72 @@
~
$ cd ..
+
+Tags are rendered
+
+ $ hg init tags
+ $ cd tags
+ $ echo 0 > foo
+ $ hg -q commit -A -m 'commit 1'
+ $ echo 1 > foo
+ $ hg commit -m 'commit 2'
+ $ hg tag 0.1
+ $ hg phase --public -r .
+ $ echo 2 > foo
+ $ hg commit -m 'commit 3'
+ $ hg tag 0.2
+
+ $ hg show work
+ @ 37582 Added tag 0.2 for changeset 6379c25b76f1
+ o 6379c (0.2) commit 3
+ o a2ad9 Added tag 0.1 for changeset 6a75536ea0b1
+ |
+ ~
+
+ $ cd ..
+
+Multiple names on same changeset render properly
+
+ $ hg init multiplenames
+ $ cd multiplenames
+ $ echo 0 > foo
+ $ hg -q commit -A -m 'commit 1'
+ $ hg phase --public -r .
+ $ hg branch mybranch
+ marked working directory as branch mybranch
+ (branches are permanent and global, did you want a bookmark?)
+ $ hg bookmark mybook
+ $ echo 1 > foo
+ $ hg commit -m 'commit 2'
+
+ $ hg show work
+ @ 34834 (mybook) (mybranch) commit 2
+ o 97fcc commit 1
+
+Multiple bookmarks on same changeset render properly
+
+ $ hg book mybook2
+ $ hg show work
+ @ 34834 (mybook mybook2) (mybranch) commit 2
+ o 97fcc commit 1
+
+ $ cd ..
+
+Extra namespaces are rendered
+
+ $ hg init extranamespaces
+ $ cd extranamespaces
+ $ echo 0 > foo
+ $ hg -q commit -A -m 'commit 1'
+ $ hg phase --public -r .
+ $ echo 1 > foo
+ $ hg commit -m 'commit 2'
+ $ echo 2 > foo
+ $ hg commit -m 'commit 3'
+
+ $ hg --config extensions.revnames=$TESTDIR/revnamesext.py show work
+ @ 32f3e (r2) commit 3
+ o 6a755 (r1) commit 2
+ o 97fcc (r0) commit 1
+
+ $ cd ..
--- a/tests/test-show.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-show.t Wed Jul 19 07:51:41 2017 -0500
@@ -11,6 +11,7 @@
available views:
bookmarks -- bookmarks and their associated changeset
+ stack -- current line of work
work -- changesets that aren't finished
abort: no view requested
@@ -40,6 +41,8 @@
bookmarks bookmarks and their associated changeset
+ stack current line of work
+
work changesets that aren't finished
(use 'hg help -e show' to show help for the show extension)
@@ -127,4 +130,42 @@
[
]
+commands.show.aliasprefix aliases values to `show <view>`
+
+ $ hg --config commands.show.aliasprefix=s sbookmarks
+ (no bookmarks set)
+
+ $ hg --config commands.show.aliasprefix=sh shwork
+ @ 7b570 commit for book2
+ o b757f commit for book1
+ o ba592 initial
+
+ $ hg --config commands.show.aliasprefix='s sh' swork
+ @ 7b570 commit for book2
+ o b757f commit for book1
+ o ba592 initial
+
+ $ hg --config commands.show.aliasprefix='s sh' shwork
+ @ 7b570 commit for book2
+ o b757f commit for book1
+ o ba592 initial
+
+The aliases don't appear in `hg config`
+
+ $ hg --config commands.show.aliasprefix=s config alias
+ [1]
+
+Doesn't overwrite existing alias
+
+ $ hg --config alias.swork='log -r .' --config commands.show.aliasprefix=s swork
+ changeset: 2:7b5709ab64cb
+ tag: tip
+ user: test
+ date: Thu Jan 01 00:00:00 1970 +0000
+ summary: commit for book2
+
+
+ $ hg --config alias.swork='log -r .' --config commands.show.aliasprefix=s config alias
+ alias.swork=log -r .
+
$ cd ..
--- a/tests/test-simple-update.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-simple-update.t Wed Jul 19 07:51:41 2017 -0500
@@ -77,7 +77,7 @@
> [worker]
> numcpus = 4
> EOF
- $ for i in `python $TESTDIR/seq.py 1 100`; do
+ $ for i in `$PYTHON $TESTDIR/seq.py 1 100`; do
> echo $i > $i
> done
$ hg ci -qAm 'add 100 files'
--- a/tests/test-simplekeyvaluefile.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-simplekeyvaluefile.py Wed Jul 19 07:51:41 2017 -0500
@@ -33,7 +33,8 @@
return mockfile(path, self).read()
def readlines(self, path):
- return mockfile(path, self).read().split('\n')
+ # lines need to contain the trailing '\n' to mock the real readlines
+ return [l for l in mockfile(path, self).read().splitlines(True)]
def __call__(self, path, mode, atomictemp):
return mockfile(path, self)
@@ -42,32 +43,42 @@
def setUp(self):
self.vfs = mockvfs()
- def testbasicwriting(self):
- d = {'key1': 'value1', 'Key2': 'value2'}
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+ def testbasicwritingiandreading(self):
+ dw = {'key1': 'value1', 'Key2': 'value2'}
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(dw)
self.assertEqual(sorted(self.vfs.read('kvfile').split('\n')),
['', 'Key2=value2', 'key1=value1'])
+ dr = scmutil.simplekeyvaluefile(self.vfs, 'kvfile').read()
+ self.assertEqual(dr, dw)
def testinvalidkeys(self):
d = {'0key1': 'value1', 'Key2': 'value2'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError,
+ 'keys must start with a letter.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
+
d = {'key1@': 'value1', 'Key2': 'value2'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError, 'invalid key.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
def testinvalidvalues(self):
d = {'key1': 'value1', 'Key2': 'value2\n'}
- self.assertRaises(error.ProgrammingError,
- scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write,
- d)
+ with self.assertRaisesRegexp(error.ProgrammingError, 'invalid val.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
def testcorruptedfile(self):
self.vfs.contents['badfile'] = 'ababagalamaga\n'
- self.assertRaises(error.CorruptedState,
- scmutil.simplekeyvaluefile(self.vfs, 'badfile').read)
+ with self.assertRaisesRegexp(error.CorruptedState,
+ 'dictionary.*element.*'):
+ scmutil.simplekeyvaluefile(self.vfs, 'badfile').read()
+
+ def testfirstline(self):
+ dw = {'key1': 'value1'}
+ scmutil.simplekeyvaluefile(self.vfs, 'fl').write(dw, firstline='1.0')
+ self.assertEqual(self.vfs.read('fl'), '1.0\nkey1=value1\n')
+ dr = scmutil.simplekeyvaluefile(self.vfs, 'fl')\
+ .read(firstlinenonkeyval=True)
+ self.assertEqual(dr, {'__firstline': '1.0', 'key1': 'value1'})
if __name__ == "__main__":
silenttestrunner.main(__name__)
--- a/tests/test-simplemerge.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-simplemerge.py Wed Jul 19 07:51:41 2017 -0500
@@ -326,7 +326,8 @@
self.assertEquals(ml, MERGED_RESULT)
def test_binary(self):
- self.assertRaises(error.Abort, Merge3, ['\x00'], ['a'], ['b'])
+ with self.assertRaises(error.Abort):
+ Merge3(['\x00'], ['a'], ['b'])
def test_dos_text(self):
base_text = 'a\r\n'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-clear.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,73 @@
+test sparse
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > sparse=
+ > purge=
+ > strip=
+ > rebase=
+ > EOF
+
+ $ echo a > index.html
+ $ echo x > data.py
+ $ echo z > readme.txt
+ $ cat > base.sparse <<EOF
+ > [include]
+ > *.sparse
+ > EOF
+ $ hg ci -Aqm 'initial'
+ $ cat > webpage.sparse <<EOF
+ > %include base.sparse
+ > [include]
+ > *.html
+ > EOF
+ $ hg ci -Aqm 'initial'
+
+Clear rules when there are includes
+
+ $ hg debugsparse --include *.py
+ $ ls
+ data.py
+ $ hg debugsparse --clear-rules
+ $ ls
+ base.sparse
+ data.py
+ index.html
+ readme.txt
+ webpage.sparse
+
+Clear rules when there are excludes
+
+ $ hg debugsparse --exclude *.sparse
+ $ ls
+ data.py
+ index.html
+ readme.txt
+ $ hg debugsparse --clear-rules
+ $ ls
+ base.sparse
+ data.py
+ index.html
+ readme.txt
+ webpage.sparse
+
+Clearing rules should not alter profiles
+
+ $ hg debugsparse --enable-profile webpage.sparse
+ $ ls
+ base.sparse
+ index.html
+ webpage.sparse
+ $ hg debugsparse --include *.py
+ $ ls
+ base.sparse
+ data.py
+ index.html
+ webpage.sparse
+ $ hg debugsparse --clear-rules
+ $ ls
+ base.sparse
+ index.html
+ webpage.sparse
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-clone.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,72 @@
+test sparse
+
+ $ cat >> $HGRCPATH << EOF
+ > [ui]
+ > ssh = python "$RUNTESTDIR/dummyssh"
+ > username = nobody <no.reply@fb.com>
+ > [extensions]
+ > sparse=
+ > purge=
+ > strip=
+ > rebase=
+ > EOF
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ echo a > index.html
+ $ echo x > data.py
+ $ echo z > readme.txt
+ $ cat > webpage.sparse <<EOF
+ > [include]
+ > *.html
+ > EOF
+ $ cat > backend.sparse <<EOF
+ > [include]
+ > *.py
+ > EOF
+ $ hg ci -Aqm 'initial'
+ $ cd ..
+
+Verify local clone with a sparse profile works
+
+ $ hg clone --enable-profile webpage.sparse myrepo clone1
+ updating to branch default
+ warning: sparse profile 'webpage.sparse' not found in rev 000000000000 - ignoring it
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd clone1
+ $ ls
+ index.html
+ $ cd ..
+
+Verify local clone with include works
+
+ $ hg clone --include *.sparse myrepo clone2
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd clone2
+ $ ls
+ backend.sparse
+ webpage.sparse
+ $ cd ..
+
+Verify local clone with exclude works
+
+ $ hg clone --exclude data.py myrepo clone3
+ updating to branch default
+ 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd clone3
+ $ ls
+ backend.sparse
+ index.html
+ readme.txt
+ webpage.sparse
+ $ cd ..
+
+Verify sparse clone profile over ssh works
+
+ $ hg clone -q --enable-profile webpage.sparse ssh://user@dummy/myrepo clone4
+ warning: sparse profile 'webpage.sparse' not found in rev 000000000000 - ignoring it
+ $ cd clone4
+ $ ls
+ index.html
+ $ cd ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-fsmonitor.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,44 @@
+This test doesn't yet work due to the way fsmonitor is integrated with test runner
+
+ $ exit 80
+
+test sparse interaction with other extensions
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > strip=
+ > EOF
+
+Test fsmonitor integration (if available)
+TODO: make fully isolated integration test a'la https://github.com/facebook/watchman/blob/master/tests/integration/WatchmanInstance.py
+(this one is using the systemwide watchman instance)
+
+ $ touch .watchmanconfig
+ $ echo "ignoredir1/" >> .hgignore
+ $ hg commit -Am ignoredir1
+ adding .hgignore
+ $ echo "ignoredir2/" >> .hgignore
+ $ hg commit -m ignoredir2
+
+ $ hg sparse --reset
+ $ hg sparse -I ignoredir1 -I ignoredir2 -I dir1
+
+ $ mkdir ignoredir1 ignoredir2 dir1
+ $ touch ignoredir1/file ignoredir2/file dir1/file
+
+Run status twice to compensate for a condition in fsmonitor where it will check
+ignored files the second time it runs, regardless of previous state (ask @sid0)
+ $ hg status --config extensions.fsmonitor=
+ ? dir1/file
+ $ hg status --config extensions.fsmonitor=
+ ? dir1/file
+
+Test that fsmonitor ignore hash check updates when .hgignore changes
+
+ $ hg up -q ".^"
+ $ hg status --config extensions.fsmonitor=
+ ? dir1/file
+ ? ignoredir2/file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-import.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,181 @@
+test sparse
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > sparse=
+ > purge=
+ > strip=
+ > rebase=
+ > EOF
+
+ $ echo a > index.html
+ $ echo x > data.py
+ $ echo z > readme.txt
+ $ cat > base.sparse <<EOF
+ > [include]
+ > *.sparse
+ > EOF
+ $ hg ci -Aqm 'initial'
+ $ cat > webpage.sparse <<EOF
+ > %include base.sparse
+ > [include]
+ > *.html
+ > EOF
+ $ hg ci -Aqm 'initial'
+
+Import a rules file against a 'blank' sparse profile
+
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > [include]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import
+ $ ls
+ data.py
+
+ $ hg debugsparse --reset
+ $ rm .hg/sparse
+
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > %include base.sparse
+ > [include]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import
+ $ ls
+ base.sparse
+ data.py
+ webpage.sparse
+
+ $ hg debugsparse --reset
+ $ rm .hg/sparse
+
+Start against an existing profile; rules *already active* should be ignored
+
+ $ hg debugsparse --enable-profile webpage.sparse
+ $ hg debugsparse --include *.py
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > %include base.sparse
+ > [include]
+ > *.html
+ > *.txt
+ > [exclude]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import
+ $ ls
+ base.sparse
+ index.html
+ readme.txt
+ webpage.sparse
+ $ cat .hg/sparse
+ %include webpage.sparse
+ [include]
+ *.py
+ *.txt
+ [exclude]
+ *.py
+
+ $ hg debugsparse --reset
+ $ rm .hg/sparse
+
+Same tests, with -Tjson enabled to output summaries
+
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > [include]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
+ [
+ {
+ "exclude_rules_added": 0,
+ "files_added": 0,
+ "files_conflicting": 0,
+ "files_dropped": 4,
+ "include_rules_added": 1,
+ "profiles_added": 0
+ }
+ ]
+
+ $ hg debugsparse --reset
+ $ rm .hg/sparse
+
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > %include base.sparse
+ > [include]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
+ [
+ {
+ "exclude_rules_added": 0,
+ "files_added": 0,
+ "files_conflicting": 0,
+ "files_dropped": 2,
+ "include_rules_added": 1,
+ "profiles_added": 1
+ }
+ ]
+
+ $ hg debugsparse --reset
+ $ rm .hg/sparse
+
+ $ hg debugsparse --enable-profile webpage.sparse
+ $ hg debugsparse --include *.py
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > %include base.sparse
+ > [include]
+ > *.html
+ > *.txt
+ > [exclude]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
+ [
+ {
+ "exclude_rules_added": 1,
+ "files_added": 1,
+ "files_conflicting": 0,
+ "files_dropped": 1,
+ "include_rules_added": 1,
+ "profiles_added": 0
+ }
+ ]
+
+If importing results in no new rules being added, no refresh should take place!
+
+ $ cat > $TESTTMP/trap_sparse_refresh.py <<EOF
+ > from mercurial import error, sparse
+ > def extsetup(ui):
+ > def abort_refresh(*args, **kwargs):
+ > raise error.Abort('sparse._refresh called!')
+ > sparse.refreshwdir = abort_refresh
+ > EOF
+ $ cat >> $HGRCPATH <<EOF
+ > [extensions]
+ > trap_sparse_refresh=$TESTTMP/trap_sparse_refresh.py
+ > EOF
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > [include]
+ > *.py
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import
+
+If an exception is raised during refresh, restore the existing rules again.
+
+ $ cat > $TESTTMP/rules_to_import <<EOF
+ > [exclude]
+ > *.html
+ > EOF
+ $ hg debugsparse --import-rules $TESTTMP/rules_to_import
+ abort: sparse._refresh called!
+ [255]
+ $ cat .hg/sparse
+ %include webpage.sparse
+ [include]
+ *.py
+ *.txt
+ [exclude]
+ *.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-merges.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,62 @@
+test merging things outside of the sparse checkout
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > EOF
+
+ $ echo foo > foo
+ $ echo bar > bar
+ $ hg add foo bar
+ $ hg commit -m initial
+
+ $ hg branch feature
+ marked working directory as branch feature
+ (branches are permanent and global, did you want a bookmark?)
+ $ echo bar2 >> bar
+ $ hg commit -m 'feature - bar2'
+
+ $ hg update -q default
+ $ hg debugsparse --exclude 'bar**'
+
+ $ hg merge feature
+ temporarily included 1 file(s) in the sparse checkout for merging
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+
+Verify bar was merged temporarily
+
+ $ ls
+ bar
+ foo
+ $ hg status
+ M bar
+
+Verify bar disappears automatically when the working copy becomes clean
+
+ $ hg commit -m "merged"
+ cleaned up 1 temporarily added file(s) from the sparse checkout
+ $ hg status
+ $ ls
+ foo
+
+ $ hg cat -r . bar
+ bar
+ bar2
+
+Test merging things outside of the sparse checkout that are not in the working
+copy
+
+ $ hg strip -q -r . --config extensions.strip=
+ $ hg up -q feature
+ $ touch branchonly
+ $ hg ci -Aqm 'add branchonly'
+
+ $ hg up -q default
+ $ hg debugsparse -X branchonly
+ $ hg merge feature
+ temporarily included 2 file(s) in the sparse checkout for merging
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-profiles.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,288 @@
+test sparse
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > purge=
+ > strip=
+ > rebase=
+ > EOF
+
+Config file without [section] is rejected
+
+ $ cat > bad.sparse <<EOF
+ > *.html
+ > EOF
+
+ $ hg debugsparse --import-rules bad.sparse
+ abort: sparse config entry outside of section: *.html
+ (add an [include] or [exclude] line to declare the entry type)
+ [255]
+ $ rm bad.sparse
+
+ $ echo a > index.html
+ $ echo x > data.py
+ $ echo z > readme.txt
+ $ cat > webpage.sparse <<EOF
+ > # frontend sparse profile
+ > [include]
+ > *.html
+ > EOF
+ $ cat > backend.sparse <<EOF
+ > # backend sparse profile
+ > [include]
+ > *.py
+ > EOF
+ $ hg ci -Aqm 'initial'
+
+ $ hg debugsparse --include '*.sparse'
+
+Verify enabling a single profile works
+
+ $ hg debugsparse --enable-profile webpage.sparse
+ $ ls
+ backend.sparse
+ index.html
+ webpage.sparse
+
+Verify enabling two profiles works
+
+ $ hg debugsparse --enable-profile backend.sparse
+ $ ls
+ backend.sparse
+ data.py
+ index.html
+ webpage.sparse
+
+Verify disabling a profile works
+
+ $ hg debugsparse --disable-profile webpage.sparse
+ $ ls
+ backend.sparse
+ data.py
+ webpage.sparse
+
+Verify that a profile is updated across multiple commits
+
+ $ cat > webpage.sparse <<EOF
+ > # frontend sparse profile
+ > [include]
+ > *.html
+ > EOF
+ $ cat > backend.sparse <<EOF
+ > # backend sparse profile
+ > [include]
+ > *.py
+ > *.txt
+ > EOF
+
+ $ echo foo >> data.py
+
+ $ hg ci -m 'edit profile'
+ $ ls
+ backend.sparse
+ data.py
+ readme.txt
+ webpage.sparse
+
+ $ hg up -q 0
+ $ ls
+ backend.sparse
+ data.py
+ webpage.sparse
+
+ $ hg up -q 1
+ $ ls
+ backend.sparse
+ data.py
+ readme.txt
+ webpage.sparse
+
+Introduce a conflicting .hgsparse change
+
+ $ hg up -q 0
+ $ cat > backend.sparse <<EOF
+ > # Different backend sparse profile
+ > [include]
+ > *.html
+ > EOF
+ $ echo bar >> data.py
+
+ $ hg ci -qAm "edit profile other"
+ $ ls
+ backend.sparse
+ index.html
+ webpage.sparse
+
+Verify conflicting merge pulls in the conflicting changes
+
+ $ hg merge 1
+ temporarily included 1 file(s) in the sparse checkout for merging
+ merging backend.sparse
+ merging data.py
+ warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
+ warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 2 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ [1]
+
+ $ rm *.orig
+ $ ls
+ backend.sparse
+ data.py
+ index.html
+ webpage.sparse
+
+Verify resolving the merge removes the temporarily unioned files
+
+ $ cat > backend.sparse <<EOF
+ > # backend sparse profile
+ > [include]
+ > *.html
+ > *.txt
+ > EOF
+ $ hg resolve -m backend.sparse
+
+ $ cat > data.py <<EOF
+ > x
+ > foo
+ > bar
+ > EOF
+ $ hg resolve -m data.py
+ (no more unresolved files)
+
+ $ hg ci -qAm "merge profiles"
+ $ ls
+ backend.sparse
+ index.html
+ readme.txt
+ webpage.sparse
+
+ $ hg cat -r . data.py
+ x
+ foo
+ bar
+
+Verify stripping refreshes dirstate
+
+ $ hg strip -q -r .
+ $ ls
+ backend.sparse
+ index.html
+ webpage.sparse
+
+Verify rebase conflicts pulls in the conflicting changes
+
+ $ hg up -q 1
+ $ ls
+ backend.sparse
+ data.py
+ readme.txt
+ webpage.sparse
+
+ $ hg rebase -d 2
+ rebasing 1:a2b1de640a62 "edit profile"
+ temporarily included 1 file(s) in the sparse checkout for merging
+ merging backend.sparse
+ merging data.py
+ warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
+ warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+ $ rm *.orig
+ $ ls
+ backend.sparse
+ data.py
+ index.html
+ webpage.sparse
+
+Verify resolving conflict removes the temporary files
+
+ $ cat > backend.sparse <<EOF
+ > [include]
+ > *.html
+ > *.txt
+ > EOF
+ $ hg resolve -m backend.sparse
+
+ $ cat > data.py <<EOF
+ > x
+ > foo
+ > bar
+ > EOF
+ $ hg resolve -m data.py
+ (no more unresolved files)
+ continue: hg rebase --continue
+
+ $ hg rebase -q --continue
+ $ ls
+ backend.sparse
+ index.html
+ readme.txt
+ webpage.sparse
+
+ $ hg cat -r . data.py
+ x
+ foo
+ bar
+
+Test checking out a commit that does not contain the sparse profile. The
+warning message can be suppressed by setting missingwarning = false in
+[sparse] section of your config:
+
+ $ hg debugsparse --reset
+ $ hg rm *.sparse
+ $ hg commit -m "delete profiles"
+ $ hg up -q ".^"
+ $ hg debugsparse --enable-profile backend.sparse
+ $ ls
+ index.html
+ readme.txt
+ $ hg up tip | grep warning
+ warning: sparse profile 'backend.sparse' not found in rev bfcb76de99cc - ignoring it
+ [1]
+ $ ls
+ data.py
+ index.html
+ readme.txt
+ $ hg debugsparse --disable-profile backend.sparse | grep warning
+ warning: sparse profile 'backend.sparse' not found in rev bfcb76de99cc - ignoring it
+ [1]
+ $ cat >> .hg/hgrc <<EOF
+ > [sparse]
+ > missingwarning = false
+ > EOF
+ $ hg debugsparse --enable-profile backend.sparse
+
+ $ cd ..
+
+#if unix-permissions
+
+Test file permissions changing across a sparse profile change
+ $ hg init sparseperm
+ $ cd sparseperm
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > EOF
+ $ touch a b
+ $ cat > .hgsparse <<EOF
+ > [include]
+ > a
+ > EOF
+ $ hg commit -Aqm 'initial'
+ $ chmod a+x b
+ $ hg commit -qm 'make executable'
+ $ cat >> .hgsparse <<EOF
+ > b
+ > EOF
+ $ hg commit -qm 'update profile'
+ $ hg up -q 0
+ $ hg debugsparse --enable-profile .hgsparse
+ $ hg up -q 2
+ $ ls -l b
+ -rwxr-xr-x* b (glob)
+
+#endif
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-requirement.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,65 @@
+ $ hg init repo
+ $ cd repo
+
+ $ touch a.html b.html c.py d.py
+
+ $ cat > frontend.sparse << EOF
+ > [include]
+ > *.html
+ > EOF
+
+ $ hg -q commit -A -m initial
+
+ $ echo 1 > a.html
+ $ echo 1 > c.py
+ $ hg commit -m 'commit 1'
+
+Enable sparse profile
+
+ $ cat .hg/requires
+ dotencode
+ fncache
+ generaldelta
+ revlogv1
+ store
+
+ $ hg debugsparse --config extensions.sparse= --enable-profile frontend.sparse
+ $ ls
+ a.html
+ b.html
+
+Requirement for sparse added when sparse is enabled
+
+ $ cat .hg/requires
+ dotencode
+ exp-sparse
+ fncache
+ generaldelta
+ revlogv1
+ store
+
+Client without sparse enabled reacts properly
+
+ $ hg files
+ abort: repository is using sparse feature but sparse is not enabled; enable the "sparse" extensions to access!
+ [255]
+
+Requirement for sparse is removed when sparse is disabled
+
+ $ hg debugsparse --reset --config extensions.sparse=
+
+ $ cat .hg/requires
+ dotencode
+ fncache
+ generaldelta
+ revlogv1
+ store
+
+And client without sparse can access
+
+ $ hg files
+ a.html
+ b.html
+ c.py
+ d.py
+ frontend.sparse
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse-verbose-json.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,82 @@
+test sparse with --verbose and -T json
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > strip=
+ > EOF
+
+ $ echo a > show
+ $ echo x > hide
+ $ hg ci -Aqm 'initial'
+
+ $ echo b > show
+ $ echo y > hide
+ $ echo aa > show2
+ $ echo xx > hide2
+ $ hg ci -Aqm 'two'
+
+Verify basic --include and --reset
+
+ $ hg up -q 0
+ $ hg debugsparse --include 'hide' -Tjson
+ [
+ {
+ "exclude_rules_added": 0,
+ "files_added": 0,
+ "files_conflicting": 0,
+ "files_dropped": 1,
+ "include_rules_added": 1,
+ "profiles_added": 0
+ }
+ ]
+ $ hg debugsparse --clear-rules
+ $ hg debugsparse --include 'hide' --verbose
+ removing show
+ Profiles changed: 0
+ Include rules changed: 1
+ Exclude rules changed: 0
+
+ $ hg debugsparse --reset -Tjson
+ [
+ {
+ "exclude_rules_added": 0,
+ "files_added": 1,
+ "files_conflicting": 0,
+ "files_dropped": 0,
+ "include_rules_added": -1,
+ "profiles_added": 0
+ }
+ ]
+ $ hg debugsparse --include 'hide'
+ $ hg debugsparse --reset --verbose
+ getting show
+ Profiles changed: 0
+ Include rules changed: -1
+ Exclude rules changed: 0
+
+Verifying that problematic files still allow us to see the deltas when forcing:
+
+ $ hg debugsparse --include 'show*'
+ $ touch hide
+ $ hg debugsparse --delete 'show*' --force -Tjson
+ pending changes to 'hide'
+ [
+ {
+ "exclude_rules_added": 0,
+ "files_added": 0,
+ "files_conflicting": 1,
+ "files_dropped": 0,
+ "include_rules_added": -1,
+ "profiles_added": 0
+ }
+ ]
+ $ hg debugsparse --include 'show*' --force
+ pending changes to 'hide'
+ $ hg debugsparse --delete 'show*' --force --verbose
+ pending changes to 'hide'
+ Profiles changed: 0
+ Include rules changed: -1
+ Exclude rules changed: 0
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-sparse.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,370 @@
+test sparse
+
+ $ hg init myrepo
+ $ cd myrepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > strip=
+ > EOF
+
+ $ echo a > show
+ $ echo x > hide
+ $ hg ci -Aqm 'initial'
+
+ $ echo b > show
+ $ echo y > hide
+ $ echo aa > show2
+ $ echo xx > hide2
+ $ hg ci -Aqm 'two'
+
+Verify basic --include
+
+ $ hg up -q 0
+ $ hg debugsparse --include 'hide'
+ $ ls
+ hide
+
+Absolute paths outside the repo should just be rejected
+
+#if no-windows
+ $ hg debugsparse --include /foo/bar
+ warning: paths cannot start with /, ignoring: ['/foo/bar']
+ $ hg debugsparse --include '$TESTTMP/myrepo/hide'
+
+ $ hg debugsparse --include '/root'
+ warning: paths cannot start with /, ignoring: ['/root']
+#else
+TODO: See if this can be made to fail the same way as on Unix
+ $ hg debugsparse --include /c/foo/bar
+ abort: c:/foo/bar not under root '$TESTTMP/myrepo' (glob)
+ [255]
+ $ hg debugsparse --include '$TESTTMP/myrepo/hide'
+
+ $ hg debugsparse --include '/c/root'
+ abort: c:/root not under root '$TESTTMP/myrepo' (glob)
+ [255]
+#endif
+
+Verify commiting while sparse includes other files
+
+ $ echo z > hide
+ $ hg ci -Aqm 'edit hide'
+ $ ls
+ hide
+ $ hg manifest
+ hide
+ show
+
+Verify --reset brings files back
+
+ $ hg debugsparse --reset
+ $ ls
+ hide
+ show
+ $ cat hide
+ z
+ $ cat show
+ a
+
+Verify 'hg debugsparse' default output
+
+ $ hg up -q null
+ $ hg debugsparse --include 'show*'
+
+ $ hg debugsparse
+ [include]
+ show*
+
+Verify update only writes included files
+
+ $ hg up -q 0
+ $ ls
+ show
+
+ $ hg up -q 1
+ $ ls
+ show
+ show2
+
+Verify status only shows included files
+
+ $ touch hide
+ $ touch hide3
+ $ echo c > show
+ $ hg status
+ M show
+
+Adding an excluded file should fail
+
+ $ hg add hide3
+ abort: cannot add 'hide3' - it is outside the sparse checkout
+ (include file with `hg debugsparse --include <pattern>` or use `hg add -s <file>` to include file directory while adding)
+ [255]
+
+Verify deleting sparseness while a file has changes fails
+
+ $ hg debugsparse --delete 'show*'
+ pending changes to 'hide'
+ abort: cannot change sparseness due to pending changes (delete the files or use --force to bring them back dirty)
+ [255]
+
+Verify deleting sparseness with --force brings back files
+
+ $ hg debugsparse --delete -f 'show*'
+ pending changes to 'hide'
+ $ ls
+ hide
+ hide2
+ hide3
+ show
+ show2
+ $ hg st
+ M hide
+ M show
+ ? hide3
+
+Verify editing sparseness fails if pending changes
+
+ $ hg debugsparse --include 'show*'
+ pending changes to 'hide'
+ abort: could not update sparseness due to pending changes
+ [255]
+
+Verify adding sparseness hides files
+
+ $ hg debugsparse --exclude -f 'hide*'
+ pending changes to 'hide'
+ $ ls
+ hide
+ hide3
+ show
+ show2
+ $ hg st
+ M show
+
+ $ hg up -qC .
+TODO: add an option to purge to also purge files outside the sparse config?
+ $ hg purge --all --config extensions.purge=
+ $ ls
+ hide
+ hide3
+ show
+ show2
+For now, manually remove the files
+ $ rm hide hide3
+
+Verify rebase temporarily includes excluded files
+
+ $ hg rebase -d 1 -r 2 --config extensions.rebase=
+ rebasing 2:b91df4f39e75 "edit hide" (tip)
+ temporarily included 1 file(s) in the sparse checkout for merging
+ merging hide
+ warning: conflicts while merging hide! (edit, then use 'hg resolve --mark')
+ unresolved conflicts (see hg resolve, then hg rebase --continue)
+ [1]
+
+ $ hg debugsparse
+ [exclude]
+ hide*
+
+ Temporarily Included Files (for merge/rebase):
+ hide
+
+ $ cat hide
+ <<<<<<< dest: 39278f7c08a9 - test: two
+ y
+ =======
+ z
+ >>>>>>> source: b91df4f39e75 - test: edit hide
+
+Verify aborting a rebase cleans up temporary files
+
+ $ hg rebase --abort --config extensions.rebase=
+ cleaned up 1 temporarily added file(s) from the sparse checkout
+ rebase aborted
+ $ rm hide.orig
+
+ $ ls
+ show
+ show2
+
+Verify merge fails if merging excluded files
+
+ $ hg up -q 1
+ $ hg merge -r 2
+ temporarily included 1 file(s) in the sparse checkout for merging
+ merging hide
+ warning: conflicts while merging hide! (edit, then use 'hg resolve --mark')
+ 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+ [1]
+ $ hg debugsparse
+ [exclude]
+ hide*
+
+ Temporarily Included Files (for merge/rebase):
+ hide
+
+ $ hg up -C .
+ cleaned up 1 temporarily added file(s) from the sparse checkout
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg debugsparse
+ [exclude]
+ hide*
+
+
+Verify strip -k resets dirstate correctly
+
+ $ hg status
+ $ hg debugsparse
+ [exclude]
+ hide*
+
+ $ hg log -r . -T '{rev}\n' --stat
+ 1
+ hide | 2 +-
+ hide2 | 1 +
+ show | 2 +-
+ show2 | 1 +
+ 4 files changed, 4 insertions(+), 2 deletions(-)
+
+ $ hg strip -r . -k
+ saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/39278f7c08a9-ce59e002-backup.hg (glob)
+ $ hg status
+ M show
+ ? show2
+
+Verify rebase succeeds if all changed files are in sparse checkout
+
+ $ hg commit -Aqm "add show2"
+ $ hg rebase -d 1 --config extensions.rebase=
+ rebasing 2:bdde55290160 "add show2" (tip)
+ saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/bdde55290160-216ed9c6-rebase.hg (glob)
+
+Verify log --sparse only shows commits that affect the sparse checkout
+
+ $ hg log -T '{rev} '
+ 2 1 0 (no-eol)
+ $ hg log --sparse -T '{rev} '
+ 2 0 (no-eol)
+
+Test status on a file in a subdir
+
+ $ mkdir -p dir1/dir2
+ $ touch dir1/dir2/file
+ $ hg debugsparse -I dir1/dir2
+ $ hg status
+ ? dir1/dir2/file
+
+Test that add -s adds dirs to sparse profile
+
+ $ hg debugsparse --reset
+ $ hg debugsparse --include empty
+ $ hg debugsparse
+ [include]
+ empty
+
+
+ $ mkdir add
+ $ touch add/foo
+ $ touch add/bar
+ $ hg add add/foo
+ abort: cannot add 'add/foo' - it is outside the sparse checkout
+ (include file with `hg debugsparse --include <pattern>` or use `hg add -s <file>` to include file directory while adding)
+ [255]
+ $ hg add -s add/foo
+ $ hg st
+ A add/foo
+ ? add/bar
+ $ hg debugsparse
+ [include]
+ add
+ empty
+
+ $ hg add -s add/*
+ add/foo already tracked!
+ $ hg st
+ A add/bar
+ A add/foo
+ $ hg debugsparse
+ [include]
+ add
+ empty
+
+
+ $ cd ..
+
+Test non-sparse repos work while sparse is loaded
+ $ hg init sparserepo
+ $ hg init nonsparserepo
+ $ cd sparserepo
+ $ cat > .hg/hgrc <<EOF
+ > [extensions]
+ > sparse=
+ > EOF
+ $ cd ../nonsparserepo
+ $ echo x > x && hg add x && hg commit -qAm x
+ $ cd ../sparserepo
+ $ hg clone ../nonsparserepo ../nonsparserepo2
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test debugrebuilddirstate
+ $ cd ../sparserepo
+ $ touch included
+ $ touch excluded
+ $ hg add included excluded
+ $ hg commit -m 'a commit' -q
+ $ cp .hg/dirstate ../dirstateboth
+ $ hg debugsparse -X excluded
+ $ cp ../dirstateboth .hg/dirstate
+ $ hg debugrebuilddirstate
+ $ hg debugdirstate
+ n 0 -1 unset included
+
+Test debugdirstate --minimal where file is in the parent manifest but not the
+dirstate
+ $ hg debugsparse -X included
+ $ hg debugdirstate
+ $ cp .hg/dirstate ../dirstateallexcluded
+ $ hg debugsparse --reset
+ $ hg debugsparse -X excluded
+ $ cp ../dirstateallexcluded .hg/dirstate
+ $ touch includedadded
+ $ hg add includedadded
+ $ hg debugdirstate --nodates
+ a 0 -1 unset includedadded
+ $ hg debugrebuilddirstate --minimal
+ $ hg debugdirstate --nodates
+ n 0 -1 unset included
+ a 0 -1 * includedadded (glob)
+
+Test debugdirstate --minimal where a file is not in parent manifest
+but in the dirstate. This should take into account excluded files in the
+manifest
+ $ cp ../dirstateboth .hg/dirstate
+ $ touch includedadded
+ $ hg add includedadded
+ $ touch excludednomanifest
+ $ hg add excludednomanifest
+ $ cp .hg/dirstate ../moreexcluded
+ $ hg forget excludednomanifest
+ $ rm excludednomanifest
+ $ hg debugsparse -X excludednomanifest
+ $ cp ../moreexcluded .hg/dirstate
+ $ hg manifest
+ excluded
+ included
+We have files in the dirstate that are included and excluded. Some are in the
+manifest and some are not.
+ $ hg debugdirstate --nodates
+ n 644 0 * excluded (glob)
+ a 0 -1 * excludednomanifest (glob)
+ n 644 0 * included (glob)
+ a 0 -1 * includedadded (glob)
+ $ hg debugrebuilddirstate --minimal
+ $ hg debugdirstate --nodates
+ n 644 0 * included (glob)
+ a 0 -1 * includedadded (glob)
+
--- a/tests/test-ssh-bundle1.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ssh-bundle1.t Wed Jul 19 07:51:41 2017 -0500
@@ -44,21 +44,21 @@
repo not found error
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
remote: abort: repository nonexistent not found!
abort: no suitable response from remote hg!
[255]
non-existent absolute path
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
remote: abort: repository /$TESTTMP/nonexistent not found!
abort: no suitable response from remote hg!
[255]
clone remote via stream
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
streaming all changes
4 files to transfer, 602 bytes of data
transferred 602 bytes in * seconds (*) (glob)
@@ -80,7 +80,7 @@
clone bookmarks via stream
$ hg -R local-stream book mybook
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
streaming all changes
4 files to transfer, 602 bytes of data
transferred 602 bytes in * seconds (*) (glob)
@@ -96,7 +96,7 @@
clone remote via pull
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
requesting all changes
adding changesets
adding manifests
@@ -123,14 +123,14 @@
$ hg paths
default = ssh://user@dummy/remote
- $ hg pull -e "python \"$TESTDIR/dummyssh\""
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
pulling from ssh://user@dummy/remote
searching for changes
no changes found
pull from wrong ssh URL
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
pulling from ssh://user@dummy/doesnotexist
remote: abort: repository doesnotexist not found!
abort: no suitable response from remote hg!
@@ -145,7 +145,7 @@
$ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
$ echo "[ui]" >> .hg/hgrc
- $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
+ $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
find outgoing
@@ -162,7 +162,7 @@
find incoming on the remote side
- $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
+ $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
comparing with ssh://user@dummy/local
searching for changes
changeset: 3:a28a9d1a809c
@@ -175,7 +175,7 @@
find incoming on the remote side (using absolute path)
- $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
+ $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
comparing with ssh://user@dummy/$TESTTMP/local
searching for changes
changeset: 3:a28a9d1a809c
@@ -222,7 +222,7 @@
test pushkeys and bookmarks
$ cd ../local
- $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
+ $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
bookmarks
namespaces
phases
@@ -237,7 +237,7 @@
no changes found
exporting bookmark foo
[1]
- $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
+ $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
foo 1160648e36cec0054048a7edc4110c6f84fde594
$ hg book -f foo
$ hg push --traceback
@@ -272,7 +272,7 @@
> EOF
$ echo '[hooks]' >> ../remote/.hg/hgrc
- $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
+ $ echo "changegroup.stdout = \"$PYTHON\" $TESTTMP/badhook" >> ../remote/.hg/hgrc
$ echo r > r
$ hg ci -A -m z r
@@ -307,7 +307,7 @@
$ hg -R ../remote bookmark test
$ hg -R ../remote bookmarks
* test 4:6c0482d977a3
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
requesting all changes
adding changesets
adding manifests
@@ -334,21 +334,21 @@
Test remote paths with spaces (issue2983):
- $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
$ touch "$TESTTMP/a repo/test"
$ hg -R 'a repo' commit -A -m "test"
adding test
$ hg -R 'a repo' tag tag
- $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
73649e48688a
- $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
+ $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
abort: unknown revision 'noNoNO'!
[255]
Test (non-)escaping of remote paths with spaces when cloning (issue3145):
- $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
destination directory: a repo
abort: destination 'a repo' is not empty
[255]
@@ -363,7 +363,7 @@
> export SSH_ORIGINAL_COMMAND
> PYTHONPATH="$PYTHONPATH"
> export PYTHONPATH
- > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
+ > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
> EOF
$ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
@@ -379,7 +379,7 @@
abort: no suitable response from remote hg!
[255]
- $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" python "$TESTDIR/../contrib/hg-ssh"
+ $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
Illegal command "'hg' serve -R 'a'repo' --stdio": No closing quotation
[255]
@@ -391,7 +391,7 @@
> export SSH_ORIGINAL_COMMAND
> PYTHONPATH="$PYTHONPATH"
> export PYTHONPATH
- > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
+ > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
> EOF
$ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
@@ -440,7 +440,7 @@
> [paths]
> default-push = ssh://user@dummy/remote
> [ui]
- > ssh = python "$TESTDIR/dummyssh"
+ > ssh = "$PYTHON" "$TESTDIR/dummyssh"
> [extensions]
> localwrite = localwrite.py
> EOF
@@ -461,7 +461,7 @@
$ hg pull --debug ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
- running python ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
+ running .* ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
sending hello command
sending between command
remote: 355
@@ -526,11 +526,11 @@
$ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
- $ hg -q --config ui.ssh="python $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
+ $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
$ cd hookout
$ touch hookfailure
$ hg -q commit -A -m 'remote hook failure'
- $ hg --config ui.ssh="python $TESTDIR/dummyssh" push
+ $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
pushing to ssh://user@dummy/remote
searching for changes
remote: adding changesets
@@ -551,7 +551,7 @@
> [extensions]
> crash = ${TESTDIR}/crashgetbundler.py
> EOF
- $ hg --config ui.ssh="python $TESTDIR/dummyssh" pull
+ $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
pulling from ssh://user@dummy/remote
searching for changes
adding changesets
--- a/tests/test-ssh-clone-r.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ssh-clone-r.t Wed Jul 19 07:51:41 2017 -0500
@@ -17,7 +17,7 @@
clone remote via stream
$ for i in 0 1 2 3 4 5 6 7 8; do
- > hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed -r "$i" ssh://user@dummy/remote test-"$i"
+ > hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed -r "$i" ssh://user@dummy/remote test-"$i"
> if cd test-"$i"; then
> hg verify
> cd ..
@@ -139,7 +139,7 @@
4 files, 9 changesets, 7 total revisions
$ cd ..
$ cd test-1
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" -r 4 ssh://user@dummy/remote
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" -r 4 ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
adding changesets
@@ -153,7 +153,7 @@
crosschecking files in changesets and manifests
checking files
1 files, 3 changesets, 2 total revisions
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
adding changesets
@@ -163,7 +163,7 @@
(run 'hg update' to get a working copy)
$ cd ..
$ cd test-2
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" -r 5 ssh://user@dummy/remote
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" -r 5 ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
adding changesets
@@ -177,7 +177,7 @@
crosschecking files in changesets and manifests
checking files
1 files, 5 changesets, 3 total revisions
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
searching for changes
adding changesets
--- a/tests/test-ssh.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ssh.t Wed Jul 19 07:51:41 2017 -0500
@@ -38,21 +38,21 @@
repo not found error
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
remote: abort: repository nonexistent not found!
abort: no suitable response from remote hg!
[255]
non-existent absolute path
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
remote: abort: repository $TESTTMP/nonexistent not found!
abort: no suitable response from remote hg!
[255]
clone remote via stream
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
streaming all changes
4 files to transfer, 602 bytes of data
transferred 602 bytes in * seconds (*) (glob)
@@ -74,7 +74,7 @@
clone bookmarks via stream
$ hg -R local-stream book mybook
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
streaming all changes
4 files to transfer, 602 bytes of data
transferred 602 bytes in * seconds (*) (glob)
@@ -90,7 +90,7 @@
clone remote via pull
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
requesting all changes
adding changesets
adding manifests
@@ -117,14 +117,14 @@
$ hg paths
default = ssh://user@dummy/remote
- $ hg pull -e "python \"$TESTDIR/dummyssh\""
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
pulling from ssh://user@dummy/remote
searching for changes
no changes found
pull from wrong ssh URL
- $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
+ $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
pulling from ssh://user@dummy/doesnotexist
remote: abort: repository doesnotexist not found!
abort: no suitable response from remote hg!
@@ -139,7 +139,7 @@
$ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
$ echo "[ui]" >> .hg/hgrc
- $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
+ $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
find outgoing
@@ -156,7 +156,7 @@
find incoming on the remote side
- $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
+ $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
comparing with ssh://user@dummy/local
searching for changes
changeset: 3:a28a9d1a809c
@@ -169,7 +169,7 @@
find incoming on the remote side (using absolute path)
- $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
+ $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
comparing with ssh://user@dummy/$TESTTMP/local
searching for changes
changeset: 3:a28a9d1a809c
@@ -216,7 +216,7 @@
test pushkeys and bookmarks
$ cd ../local
- $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
+ $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
bookmarks
namespaces
phases
@@ -231,7 +231,7 @@
no changes found
exporting bookmark foo
[1]
- $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
+ $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
foo 1160648e36cec0054048a7edc4110c6f84fde594
$ hg book -f foo
$ hg push --traceback
@@ -273,7 +273,7 @@
$ cat <<EOF >> ../remote/.hg/hgrc
> [hooks]
- > changegroup.stdout = python $TESTTMP/badhook
+ > changegroup.stdout = $PYTHON $TESTTMP/badhook
> changegroup.pystdout = python:$TESTTMP/badpyhook.py:hook
> EOF
$ echo r > r
@@ -311,7 +311,7 @@
$ hg -R ../remote bookmark test
$ hg -R ../remote bookmarks
* test 4:6c0482d977a3
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
requesting all changes
adding changesets
adding manifests
@@ -338,21 +338,21 @@
Test remote paths with spaces (issue2983):
- $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
$ touch "$TESTTMP/a repo/test"
$ hg -R 'a repo' commit -A -m "test"
adding test
$ hg -R 'a repo' tag tag
- $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
73649e48688a
- $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
+ $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
abort: unknown revision 'noNoNO'!
[255]
Test (non-)escaping of remote paths with spaces when cloning (issue3145):
- $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
+ $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
destination directory: a repo
abort: destination 'a repo' is not empty
[255]
@@ -380,7 +380,7 @@
> export SSH_ORIGINAL_COMMAND
> PYTHONPATH="$PYTHONPATH"
> export PYTHONPATH
- > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
+ > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
> EOF
$ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
@@ -396,7 +396,7 @@
abort: no suitable response from remote hg!
[255]
- $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
+ $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
[255]
@@ -408,7 +408,7 @@
> export SSH_ORIGINAL_COMMAND
> PYTHONPATH="$PYTHONPATH"
> export PYTHONPATH
- > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
+ > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
> EOF
$ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
@@ -455,7 +455,7 @@
> [paths]
> default-push = ssh://user@dummy/remote
> [ui]
- > ssh = python "$TESTDIR/dummyssh"
+ > ssh = "$PYTHON" "$TESTDIR/dummyssh"
> [extensions]
> localwrite = localwrite.py
> EOF
@@ -477,7 +477,7 @@
$ hg pull --debug ssh://user@dummy/remote
pulling from ssh://user@dummy/remote
- running python ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
+ running .* ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
sending hello command
sending between command
remote: 355
@@ -543,11 +543,11 @@
$ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
- $ hg -q --config ui.ssh="python $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
+ $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
$ cd hookout
$ touch hookfailure
$ hg -q commit -A -m 'remote hook failure'
- $ hg --config ui.ssh="python $TESTDIR/dummyssh" push
+ $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
pushing to ssh://user@dummy/remote
searching for changes
remote: adding changesets
@@ -569,7 +569,7 @@
> [extensions]
> crash = ${TESTDIR}/crashgetbundler.py
> EOF
- $ hg --config ui.ssh="python $TESTDIR/dummyssh" pull
+ $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
pulling from ssh://user@dummy/remote
searching for changes
remote: abort: this is an exercise
--- a/tests/test-static-http.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-static-http.t Wed Jul 19 07:51:41 2017 -0500
@@ -9,7 +9,7 @@
This server doesn't do range requests so it's basically only good for
one pull
- $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
+ $ $PYTHON "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
$ cat dumb.pid >> $DAEMON_PIDS
$ hg init remote
$ cd remote
--- a/tests/test-status-rev.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-status-rev.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
First commit
- $ python $TESTDIR/generate-working-copy-states.py state 2 1
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 1
$ hg addremove --similarity 0
adding content1_content1_content1-tracked
adding content1_content1_content1-untracked
@@ -31,7 +31,7 @@
Second commit
- $ python $TESTDIR/generate-working-copy-states.py state 2 2
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 2
$ hg addremove --similarity 0
removing content1_missing_content1-tracked
removing content1_missing_content1-untracked
@@ -49,7 +49,7 @@
Working copy
- $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+ $ $PYTHON $TESTDIR/generate-working-copy-states.py state 2 wc
$ hg addremove --similarity 0
adding content1_missing_content1-tracked
adding content1_missing_content1-untracked
@@ -81,13 +81,10 @@
$ hg status -A --rev 1 'glob:missing_*_missing-tracked'
! missing_content2_missing-tracked
! missing_missing_missing-tracked
-#if windows
+
$ hg status -A --rev 1 'glob:missing_missing_missing-untracked'
- missing_missing_missing-untracked: The system cannot find the file specified
-#else
- $ hg status -A --rev 1 'glob:missing_missing_missing-untracked'
- missing_missing_missing-untracked: No such file or directory
-#endif
+ missing_missing_missing-untracked: The system cannot find the file specified (windows !)
+ missing_missing_missing-untracked: No such file or directory (no-windows !)
Status between first and second commit. Should ignore dirstate status.
--- a/tests/test-status.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-status.t Wed Jul 19 07:51:41 2017 -0500
@@ -107,6 +107,29 @@
? a/in_a
? b/in_b
+tweaking defaults works
+ $ hg status --cwd a --config ui.tweakdefaults=yes
+ ? 1/in_a_1
+ ? in_a
+ ? ../b/1/in_b_1
+ ? ../b/2/in_b_2
+ ? ../b/in_b
+ ? ../in_root
+ $ HGPLAIN=1 hg status --cwd a --config ui.tweakdefaults=yes
+ ? a/1/in_a_1 (glob)
+ ? a/in_a (glob)
+ ? b/1/in_b_1 (glob)
+ ? b/2/in_b_2 (glob)
+ ? b/in_b (glob)
+ ? in_root
+ $ HGPLAINEXCEPT=tweakdefaults hg status --cwd a --config ui.tweakdefaults=yes
+ ? 1/in_a_1 (glob)
+ ? in_a
+ ? ../b/1/in_b_1 (glob)
+ ? ../b/2/in_b_2 (glob)
+ ? ../b/in_b (glob)
+ ? ../in_root (glob)
+
relative paths can be requested
$ cat >> $HGRCPATH <<EOF
@@ -128,6 +151,19 @@
? b/in_b (glob)
? in_root
+if relative paths are explicitly off, tweakdefaults doesn't change it
+ $ cat >> $HGRCPATH <<EOF
+ > [commands]
+ > status.relative = False
+ > EOF
+ $ hg status --cwd a --config ui.tweakdefaults=yes
+ ? a/1/in_a_1
+ ? a/in_a
+ ? b/1/in_b_1
+ ? b/2/in_b_2
+ ? b/in_b
+ ? in_root
+
$ cd ..
$ hg init repo2
--- a/tests/test-strip.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-strip.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,6 +2,7 @@
$ echo "usegeneraldelta=yes" >> $HGRCPATH
$ echo "[extensions]" >> $HGRCPATH
$ echo "strip=" >> $HGRCPATH
+ $ echo "drawdag=$TESTDIR/drawdag.py" >> $HGRCPATH
$ restore() {
> hg unbundle -q .hg/strip-backup/*
@@ -213,6 +214,8 @@
Stream params: sortdict([('Compression', 'BZ')])
changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
264128213d290d868c54642d13aeaa3675551a78
+ phase-heads -- 'sortdict()'
+ 264128213d290d868c54642d13aeaa3675551a78 draft
$ hg pull .hg/strip-backup/*
pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
searching for changes
@@ -287,6 +290,7 @@
$ hg up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "264128213d29: c"
1 other heads for branch "default"
$ hg log -G
@ changeset: 4:264128213d29
@@ -838,9 +842,11 @@
list of changesets:
6625a516847449b6f0fa3737b9ba56e9f0f3032c
d8db9d1372214336d2b5570f20ee468d2c72fa8b
- bundle2-output-bundle: "HG20", (1 params) 1 parts total
+ bundle2-output-bundle: "HG20", (1 params) 2 parts total
bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
+ bundle2-output-part: "phase-heads" 24 bytes payload
saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg (glob)
+ updating the branch cache
invalid branchheads cache (served): tip differs
truncating cache/rbc-revs-v1 to 24
$ hg log -G
@@ -922,7 +928,7 @@
> def reposetup(ui, repo):
> class crashstriprepo(repo.__class__):
> def transaction(self, desc, *args, **kwargs):
- > tr = super(crashstriprepo, self).transaction(self, desc, *args, **kwargs)
+ > tr = super(crashstriprepo, self).transaction(desc, *args, **kwargs)
> if desc == 'strip':
> def crash(tra): raise error.Abort('boom')
> tr.addpostclose('crash', crash)
@@ -935,4 +941,159 @@
abort: boom
[255]
+Use delayedstrip to strip inside a transaction
+ $ cd $TESTTMP
+ $ hg init delayedstrip
+ $ cd delayedstrip
+ $ hg debugdrawdag <<'EOS'
+ > D
+ > |
+ > C F H # Commit on top of "I",
+ > | |/| # Strip B+D+I+E+G+H+Z
+ > I B E G
+ > \|/
+ > A Z
+ > EOS
+ $ cp -R . ../scmutilcleanup
+
+ $ hg up -C I
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo 3 >> I
+ $ cat > $TESTTMP/delayedstrip.py <<EOF
+ > from mercurial import repair, commands
+ > def reposetup(ui, repo):
+ > def getnodes(expr):
+ > return [repo.changelog.node(r) for r in repo.revs(expr)]
+ > with repo.wlock():
+ > with repo.lock():
+ > with repo.transaction('delayedstrip'):
+ > repair.delayedstrip(ui, repo, getnodes('B+I+Z+D+E'), 'J')
+ > repair.delayedstrip(ui, repo, getnodes('G+H+Z'), 'I')
+ > commands.commit(ui, repo, message='J', date='0 0')
+ > EOF
+ $ hg log -r . -T '\n' --config extensions.t=$TESTTMP/delayedstrip.py
+ warning: orphaned descendants detected, not stripping 08ebfeb61bac, 112478962961, 7fb047a69f22
+ saved backup bundle to $TESTTMP/delayedstrip/.hg/strip-backup/f585351a92f8-17475721-I.hg (glob)
+
+ $ hg log -G -T '{rev}:{node|short} {desc}' -r 'sort(all(), topo)'
+ @ 6:2f2d51af6205 J
+ |
+ o 3:08ebfeb61bac I
+ |
+ | o 5:64a8289d2492 F
+ | |
+ | o 2:7fb047a69f22 E
+ |/
+ | o 4:26805aba1e60 C
+ | |
+ | o 1:112478962961 B
+ |/
+ o 0:426bada5c675 A
+
+Test high-level scmutil.cleanupnodes API
+
+ $ cd $TESTTMP/scmutilcleanup
+ $ hg debugdrawdag <<'EOS'
+ > D2 F2 G2 # D2, F2, G2 are replacements for D, F, G
+ > | | |
+ > C H G
+ > EOS
+ $ for i in B C D F G I Z; do
+ > hg bookmark -i -r $i b-$i
+ > done
+ $ hg bookmark -i -r E 'b-F@divergent1'
+ $ hg bookmark -i -r H 'b-F@divergent2'
+ $ hg bookmark -i -r G 'b-F@divergent3'
+ $ cp -R . ../scmutilcleanup.obsstore
+
+ $ cat > $TESTTMP/scmutilcleanup.py <<EOF
+ > from mercurial import scmutil
+ > def reposetup(ui, repo):
+ > def nodes(expr):
+ > return [repo.changelog.node(r) for r in repo.revs(expr)]
+ > def node(expr):
+ > return nodes(expr)[0]
+ > with repo.wlock():
+ > with repo.lock():
+ > with repo.transaction('delayedstrip'):
+ > mapping = {node('F'): [node('F2')],
+ > node('D'): [node('D2')],
+ > node('G'): [node('G2')]}
+ > scmutil.cleanupnodes(repo, mapping, 'replace')
+ > scmutil.cleanupnodes(repo, nodes('((B::)+I+Z)-D2'), 'replace')
+ > EOF
+ $ hg log -r . -T '\n' --config extensions.t=$TESTTMP/scmutilcleanup.py
+ warning: orphaned descendants detected, not stripping 112478962961, 1fc8102cda62, 26805aba1e60
+ saved backup bundle to $TESTTMP/scmutilcleanup/.hg/strip-backup/f585351a92f8-73fb7c03-replace.hg (glob)
+
+ $ hg log -G -T '{rev}:{node|short} {desc} {bookmarks}' -r 'sort(all(), topo)'
+ o 8:1473d4b996d1 G2 b-F@divergent3 b-G
+ |
+ | o 7:d11b3456a873 F2 b-F
+ | |
+ | o 5:5cb05ba470a7 H
+ |/|
+ | o 3:7fb047a69f22 E b-F@divergent1
+ | |
+ | | o 6:7c78f703e465 D2 b-D
+ | | |
+ | | o 4:26805aba1e60 C
+ | | |
+ | | o 2:112478962961 B
+ | |/
+ o | 1:1fc8102cda62 G
+ /
+ o 0:426bada5c675 A b-B b-C b-I
+
+ $ hg bookmark
+ b-B 0:426bada5c675
+ b-C 0:426bada5c675
+ b-D 6:7c78f703e465
+ b-F 7:d11b3456a873
+ b-F@divergent1 3:7fb047a69f22
+ b-F@divergent3 8:1473d4b996d1
+ b-G 8:1473d4b996d1
+ b-I 0:426bada5c675
+ b-Z -1:000000000000
+
+Test the above using obsstore "by the way". Not directly related to strip, but
+we have reusable code here
+
+ $ cd $TESTTMP/scmutilcleanup.obsstore
+ $ cat >> .hg/hgrc <<EOF
+ > [experimental]
+ > evolution=all
+ > evolution.track-operation=1
+ > EOF
+
+ $ hg log -r . -T '\n' --config extensions.t=$TESTTMP/scmutilcleanup.py
+
+ $ rm .hg/localtags
+ $ hg log -G -T '{rev}:{node|short} {desc} {bookmarks}' -r 'sort(all(), topo)'
+ o 12:1473d4b996d1 G2 b-F@divergent3 b-G
+ |
+ | o 11:d11b3456a873 F2 b-F
+ | |
+ | o 8:5cb05ba470a7 H
+ |/|
+ | o 4:7fb047a69f22 E b-F@divergent1
+ | |
+ | | o 10:7c78f703e465 D2 b-D
+ | | |
+ | | x 6:26805aba1e60 C
+ | | |
+ | | x 3:112478962961 B
+ | |/
+ x | 1:1fc8102cda62 G
+ /
+ o 0:426bada5c675 A b-B b-C b-I
+
+ $ hg debugobsolete
+ 1fc8102cda6204549f031015641606ccf5513ec3 1473d4b996d1d1b121de6b39fab6a04fbf9d873e 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 64a8289d249234b9886244d379f15e6b650b28e3 d11b3456a873daec7c7bc53e5622e8df6d741bd2 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ f585351a92f85104bff7c284233c338b10eb1df7 7c78f703e465d73102cc8780667ce269c5208a40 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 48b9aae0607f43ff110d84e6883c151942add5ab 0 {0000000000000000000000000000000000000000} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 112478962961147124edd43549aedd1a335e44bf 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 08ebfeb61bac6e3f12079de774d285a0d6689eba 0 {426bada5c67598ca65036d57d9e4b64b0c1ce7a0} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
+ 26805aba1e600a82e93661149f2313866a221a7b 0 {112478962961147124edd43549aedd1a335e44bf} (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'replace', 'user': 'test'}
--- a/tests/test-subrepo-deep-nested-change.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-deep-nested-change.t Wed Jul 19 07:51:41 2017 -0500
@@ -238,6 +238,37 @@
committing subrepository sub1
committing subrepository sub1/sub2 (glob)
+ $ rm -r main
+ $ hg archive -S -qr 'wdir()' ../wdir
+ $ cat ../wdir/.hg_archival.txt
+ repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
+ node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
+ branch: default
+ latesttag: null
+ latesttagdistance: 4
+ changessincelatesttag: 4
+ $ hg update -Cq .
+
+A deleted subrepo file is flagged as dirty, like the top level repo
+
+ $ rm -r ../wdir sub1/sub2/folder/test.txt
+ $ hg archive -S -qr 'wdir()' ../wdir
+ $ cat ../wdir/.hg_archival.txt
+ repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
+ node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+
+ branch: default
+ latesttag: null
+ latesttagdistance: 4
+ changessincelatesttag: 4
+ $ hg update -Cq .
+ $ rm -r ../wdir
+
+ $ hg archive -S -qr 'wdir()' ../wdir \
+ > --config 'experimental.archivemetatemplate=archived {node|short}\n'
+ $ cat ../wdir/.hg_archival.txt
+ archived ffffffffffff
+ $ rm -r ../wdir
+
.. but first take a detour through some deep removal testing
$ hg remove -S -I 're:.*.txt' .
--- a/tests/test-subrepo-git.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-git.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,6 +2,11 @@
make git commits repeatable
+ $ cat >> $HGRCPATH <<EOF
+ > [defaults]
+ > commit = -d "0 0"
+ > EOF
+
$ echo "[core]" >> $HOME/.gitconfig
$ echo "autocrlf = false" >> $HOME/.gitconfig
$ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
@@ -151,7 +156,7 @@
$ hg push 2>/dev/null
pushing to $TESTTMP/t (glob)
- pushing branch testing of subrepo s
+ pushing branch testing of subrepository "s"
searching for changes
adding changesets
adding manifests
@@ -195,7 +200,7 @@
revision f47b465e1bce645dbf37232a00574aa1546ca8d3
$ hg push 2>/dev/null
pushing to $TESTTMP/t (glob)
- pushing branch testing of subrepo s
+ pushing branch testing of subrepository "s"
searching for changes
adding changesets
adding manifests
@@ -219,7 +224,7 @@
$ hg clone . ../td 2>&1 | egrep -v '^Cloning into|^done\.'
updating to branch default
cloning subrepo s from $TESTTMP/gitroot
- checking out detached HEAD in subrepo s
+ checking out detached HEAD in subrepository "s"
check out a git branch if you intend to make changes
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cd ../td
@@ -384,10 +389,10 @@
$ hg sum | grep commit
commit: 1 subrepos
$ hg push -q
- abort: subrepo s is missing (in subrepo s)
+ abort: subrepo s is missing (in subrepository "s")
[255]
$ hg commit --subrepos -qm missing
- abort: subrepo s is missing (in subrepo s)
+ abort: subrepo s is missing (in subrepository "s")
[255]
#if symlink
@@ -395,10 +400,10 @@
$ ln -s broken s
$ hg status -S
$ hg push -q
- abort: subrepo s is missing (in subrepo s)
+ abort: subrepo s is missing (in subrepository "s")
[255]
$ hg commit --subrepos -qm missing
- abort: subrepo s is missing (in subrepo s)
+ abort: subrepo s is missing (in subrepository "s")
[255]
$ rm s
#endif
@@ -697,7 +702,7 @@
$ mkdir s/.git/.hg
$ echo '.hg/hgrc in git metadata area' > s/.git/.hg/hgrc
$ hg update -q -C af6d2edbb0d3
- checking out detached HEAD in subrepo s
+ checking out detached HEAD in subrepository "s"
check out a git branch if you intend to make changes
check differences made by most recent change
@@ -1132,6 +1137,8 @@
? s/foobar.orig
? s/snake.python.orig
+#if git19
+
test for Git CVE-2016-3068
$ hg init malicious-subrepository
$ cd malicious-subrepository
@@ -1153,7 +1160,7 @@
fatal: transport 'ext' not allowed
updating to branch default
cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
- abort: git clone error 128 in s (in subrepo s)
+ abort: git clone error 128 in s (in subrepository "s")
[255]
$ f -Dq pwned.txt
pwned.txt: file not found
@@ -1169,7 +1176,9 @@
and the repository exists.
updating to branch default
cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
- abort: git clone error 128 in s (in subrepo s)
+ abort: git clone error 128 in s (in subrepository "s")
[255]
$ f -Dq pwned.txt
pwned: you asked for it
+
+#endif
--- a/tests/test-subrepo-missing.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-missing.t Wed Jul 19 07:51:41 2017 -0500
@@ -34,7 +34,6 @@
$ hg revert .hgsub
warning: subrepo spec file '.hgsub' not found
warning: subrepo spec file '.hgsub' not found
- warning: subrepo spec file '.hgsub' not found
delete .hgsubstate and revert it
@@ -93,7 +92,7 @@
$ hg commit --amend -m "addb (amended)"
$ cd ..
$ hg update --clean .
- revision 102a90ea7b4a in subrepo subrepo is hidden
+ revision 102a90ea7b4a in subrepository "subrepo" is hidden
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
check that --hidden is propagated to the subrepo
@@ -109,7 +108,7 @@
verify will warn if locked-in subrepo revisions are hidden or missing
$ hg ci -m "amended subrepo (again)"
- $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip'
+ $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip' --config devel.strip-obsmarkers=no
$ hg verify
checking changesets
checking manifests
--- a/tests/test-subrepo-recursion.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-recursion.t Wed Jul 19 07:51:41 2017 -0500
@@ -59,7 +59,7 @@
Commits:
$ hg commit -m fails
- abort: uncommitted changes in subrepository 'foo'
+ abort: uncommitted changes in subrepository "foo"
(use --subrepos for recursive commit)
[255]
@@ -495,7 +495,7 @@
$ echo f > foo/f
$ hg archive --subrepos -r tip archive
cloning subrepo foo from $TESTTMP/empty/foo
- abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepo foo) (glob)
+ abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepository "foo") (glob)
[255]
Clone and test outgoing:
--- a/tests/test-subrepo-relative-path.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-relative-path.t Wed Jul 19 07:51:41 2017 -0500
@@ -74,7 +74,7 @@
subrepo paths with ssh urls
- $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/cloned sshclone
+ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/cloned sshclone
requesting all changes
adding changesets
adding manifests
@@ -89,7 +89,7 @@
added 1 changesets with 1 changes to 1 files
3 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg -R sshclone push -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/cloned
+ $ hg -R sshclone push -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/cloned
pushing to ssh://user@dummy/$TESTTMP/cloned
pushing subrepo sub to ssh://user@dummy/$TESTTMP/sub
searching for changes
--- a/tests/test-subrepo-svn.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo-svn.t Wed Jul 19 07:51:41 2017 -0500
@@ -113,7 +113,7 @@
$ rm s/alpha
$ hg commit --subrepos -m 'abort on missing file'
committing subrepository s
- abort: cannot commit missing svn entries (in subrepo s)
+ abort: cannot commit missing svn entries (in subrepository "s")
[255]
$ svn revert s/alpha > /dev/null
@@ -170,7 +170,7 @@
$ echo zzz > s/externals/other
$ hg ci --subrepos -m 'amend externals from hg'
committing subrepository s
- abort: cannot commit svn externals (in subrepo s)
+ abort: cannot commit svn externals (in subrepository "s")
[255]
$ hg diff --subrepos -r 1:2 | grep -v diff
--- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
@@ -192,7 +192,7 @@
property 'svn:mime-type' set on 's/externals/other' (glob)
$ hg ci --subrepos -m 'amend externals from hg'
committing subrepository s
- abort: cannot commit svn externals (in subrepo s)
+ abort: cannot commit svn externals (in subrepository "s")
[255]
$ svn revert -q s/externals/other
--- a/tests/test-subrepo.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-subrepo.t Wed Jul 19 07:51:41 2017 -0500
@@ -111,7 +111,7 @@
$ echo b >> s/a
$ hg backout tip
- abort: uncommitted changes in subrepository 's'
+ abort: uncommitted changes in subrepository "s"
[255]
$ hg revert -C -R s s/a
@@ -163,7 +163,7 @@
$ echo c > s/a
$ hg --config ui.commitsubrepos=no ci -m4
- abort: uncommitted changes in subrepository 's'
+ abort: uncommitted changes in subrepository "s"
(use --subrepos for recursive commit)
[255]
$ hg id
@@ -305,7 +305,7 @@
subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
starting 4 threads for background file closing (?)
(M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
- merging subrepo t
+ merging subrepository "t"
searching for copies back to rev 2
resolving manifests
branchmerge: True, force: False, partial: False
@@ -516,7 +516,7 @@
no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
pushing subrepo s to $TESTTMP/t/s
searching for changes
- abort: push creates new remote head 12a213df6fa9! (in subrepo s)
+ abort: push creates new remote head 12a213df6fa9! (in subrepository "s")
(merge or see 'hg help push' for details about pushing new heads)
[255]
$ hg push -f
@@ -680,6 +680,7 @@
$ cd ../t
$ hg up -C # discard our earlier merge
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "c373c8102e68: 12"
2 other heads for branch "default"
$ echo blah > t/t
$ hg ci -m13
@@ -694,6 +695,7 @@
$ hg up -C # discard changes
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
pull
@@ -736,6 +738,7 @@
adding file changes
added 1 changesets with 1 changes to 1 files
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
$ cat t/t
blah
@@ -744,7 +747,7 @@
$ echo 'bogus=[boguspath' >> .hgsub
$ hg ci -m 'bogus subrepo path'
- abort: missing ] in subrepo source
+ abort: missing ] in subrepository source
[255]
Issue1986: merge aborts when trying to merge a subrepo that
@@ -952,7 +955,7 @@
created new head
$ hg -R repo2 ci -m3
$ hg -q -R repo2 push
- abort: push creates new remote head cc505f09a8b2! (in subrepo s)
+ abort: push creates new remote head cc505f09a8b2! (in subrepository "s")
(merge or see 'hg help push' for details about pushing new heads)
[255]
$ hg -R repo update
@@ -980,7 +983,7 @@
> EOF
$ hg -R repo update
b: untracked file differs
- abort: untracked files in working directory differ from files in requested revision (in subrepo s)
+ abort: untracked files in working directory differ from files in requested revision (in subrepository "s")
[255]
$ cat >> repo/.hg/hgrc <<EOF
> [extensions]
@@ -1020,6 +1023,14 @@
$ hg cat sub/repo/foo
test
test
+ $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
+ [
+ {
+ "abspath": "foo",
+ "data": "test\ntest\n",
+ "path": "sub/repo/foo"
+ }
+ ]
$ mkdir -p tmp/sub/repo
$ hg cat -r 0 --output tmp/%p_p sub/repo/foo
$ cat tmp/sub/repo/foo_p
@@ -1044,7 +1055,7 @@
added 2 changesets with 3 changes to 2 files
(run 'hg update' to get a working copy)
$ hg -R issue1852b update
- abort: default path for subrepository not found (in subrepo sub/repo) (glob)
+ abort: default path for subrepository not found (in subrepository "sub/repo") (glob)
[255]
Ensure a full traceback, not just the SubrepoAbort part
@@ -1204,6 +1215,7 @@
? s/c
$ hg update -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "925c17564ef8: 13"
2 other heads for branch "default"
$ hg status -S
? s/b
--- a/tests/test-tag.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-tag.t Wed Jul 19 07:51:41 2017 -0500
@@ -230,7 +230,7 @@
Issue601: hg tag doesn't do the right thing if .hgtags or localtags
doesn't end with EOL
- $ python << EOF
+ $ $PYTHON << EOF
> f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close()
> f = file('.hg/localtags', 'w'); f.write(last); f.close()
> EOF
@@ -242,7 +242,7 @@
c2899151f4e76890c602a2597a650a72666681bf localnewline
- $ python << EOF
+ $ $PYTHON << EOF
> f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close()
> f = file('.hgtags', 'w'); f.write(last); f.close()
> EOF
--- a/tests/test-tags.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-tags.t Wed Jul 19 07:51:41 2017 -0500
@@ -118,13 +118,17 @@
And again, but now unable to write tag cache or lock file:
-#if unix-permissions
+#if unix-permissions no-fsmonitor
+
$ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
$ chmod 555 .hg/cache
$ hg identify
b9154636be93 tip
$ chmod 755 .hg/cache
+(this block should be protected by no-fsmonitor, because "chmod 555 .hg"
+makes watchman fail at accessing to files under .hg)
+
$ chmod 555 .hg
$ hg identify
b9154636be93 tip
@@ -671,13 +675,11 @@
Missing tags2* files means the cache wasn't written through the normal mechanism.
$ ls tagsclient/.hg/cache
- branch2-served
+ branch2-base
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
hgtagsfnodes1
- rbc-names-v1
- rbc-revs-v1
Cache should contain the head only, even though other nodes have tags data
@@ -698,13 +700,11 @@
0.1 0:96ee1d7354c4
$ ls tagsclient/.hg/cache
- branch2-served
+ branch2-base
checkisexec (execbit !)
checklink (symlink !)
checklink-target (symlink !)
hgtagsfnodes1
- rbc-names-v1
- rbc-revs-v1
tags2-visible
$ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
@@ -716,3 +716,23 @@
0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....|
0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
+Check that the bundle includes cache data
+
+ $ hg -R tagsclient bundle --all ./test-cache-in-bundle-all-rev.hg
+ 4 changesets found
+ $ hg debugbundle ./test-cache-in-bundle-all-rev.hg
+ Stream params: sortdict([('Compression', 'BZ')])
+ changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
+ 96ee1d7354c4ad7372047672c36a1f561e3a6a4c
+ c4dab0c2fd337eb9191f80c3024830a4889a8f34
+ f63cc8fe54e4d326f8d692805d70e092f851ddb1
+ 40f0358cb314c824a5929ee527308d90e023bc10
+ hgtagsfnodes -- 'sortdict()'
+
+Check that local clone includes cache data
+
+ $ hg clone tagsclient tags-local-clone
+ updating to branch default
+ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
+ tags2-visible
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-terse-status.t Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,185 @@
+ $ mkdir folder
+ $ cd folder
+ $ hg init
+ $ mkdir x x/l x/m x/n x/l/u x/l/u/a
+ $ touch a b x/aa.o x/bb.o
+ $ hg status
+ ? a
+ ? b
+ ? x/aa.o
+ ? x/bb.o
+
+ $ hg status --terse u
+ ? a
+ ? b
+ ? x/
+ $ hg status --terse maudric
+ ? a
+ ? b
+ ? x/
+ $ hg status --terse madric
+ ? a
+ ? b
+ ? x/aa.o
+ ? x/bb.o
+ $ hg status --terse f
+ abort: 'f' not recognized
+ [255]
+
+Add a .hgignore so that we can also have ignored files
+
+ $ echo ".*\.o" > .hgignore
+ $ hg status
+ ? .hgignore
+ ? a
+ ? b
+ $ hg status -i
+ I x/aa.o
+ I x/bb.o
+
+Tersing ignored files
+ $ hg status -t i --ignored
+ I x/
+
+Adding more files
+ $ mkdir y
+ $ touch x/aa x/bb y/l y/m y/l.o y/m.o
+ $ touch x/l/aa x/m/aa x/n/aa x/l/u/bb x/l/u/a/bb
+
+ $ hg status
+ ? .hgignore
+ ? a
+ ? b
+ ? x/aa
+ ? x/bb
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
+
+ $ hg status --terse u
+ ? .hgignore
+ ? a
+ ? b
+ ? x/
+ ? y/
+
+ $ hg add x/aa x/bb .hgignore
+ $ hg status --terse au
+ A .hgignore
+ A x/aa
+ A x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Including ignored files
+
+ $ hg status --terse aui
+ A .hgignore
+ A x/aa
+ A x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+ $ hg status --terse au -i
+ I x/aa.o
+ I x/bb.o
+ I y/l.o
+ I y/m.o
+
+Committing some of the files
+
+ $ hg commit x/aa x/bb .hgignore -m "First commit"
+ $ hg status
+ ? a
+ ? b
+ ? x/l/aa
+ ? x/l/u/a/bb
+ ? x/l/u/bb
+ ? x/m/aa
+ ? x/n/aa
+ ? y/l
+ ? y/m
+ $ hg status --terse mardu
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Modifying already committed files
+
+ $ echo "Hello" >> x/aa
+ $ echo "World" >> x/bb
+ $ hg status --terse maurdc
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Respecting other flags
+
+ $ hg status --terse marduic --all
+ M x/aa
+ M x/bb
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+ I x/aa.o
+ I x/bb.o
+ I y/l.o
+ I y/m.o
+ C .hgignore
+ $ hg status --terse marduic -a
+ $ hg status --terse marduic -c
+ C .hgignore
+ $ hg status --terse marduic -m
+ M x/aa
+ M x/bb
+
+Passing 'i' in terse value will consider the ignored files while tersing
+
+ $ hg status --terse marduic -u
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/l
+ ? y/m
+
+Omitting 'i' in terse value does not consider ignored files while tersing
+
+ $ hg status --terse marduc -u
+ ? a
+ ? b
+ ? x/l/
+ ? x/m/
+ ? x/n/
+ ? y/
+
+Trying with --rev
+
+ $ hg status --terse marduic --rev 0 --rev 1
+ abort: cannot use --terse with --rev
+ [255]
--- a/tests/test-tools.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-tools.t Wed Jul 19 07:51:41 2017 -0500
@@ -44,10 +44,10 @@
#endif
#if no-windows
- $ python $TESTDIR/seq.py 10 > bar
+ $ $PYTHON $TESTDIR/seq.py 10 > bar
#else
Convert CRLF -> LF for consistency
- $ python $TESTDIR/seq.py 10 | sed "s/$//" > bar
+ $ $PYTHON $TESTDIR/seq.py 10 | sed "s/$//" > bar
#endif
#if unix-permissions symlink
@@ -88,23 +88,14 @@
$ cd ..
Yadda is a symlink
-#if symlink
$ f -qr dir -HB 17
- dir: directory with 3 files
- dir/bar:
- 0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
- 0010: 39 |9|
- dir/foo:
- 0000: 66 6f 6f 0a |foo.|
- dir/l:
- 0000: 79 61 64 64 61 |yadda|
-#else
- $ f -qr dir -HB 17
- dir: directory with 2 files (esc)
+ dir: directory with 3 files (symlink !)
+ dir: directory with 2 files (no-symlink !)
dir/bar: (glob)
0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
0010: 39 |9|
dir/foo: (glob)
0000: 66 6f 6f 0a |foo.|
-#endif
+ dir/l: (symlink !)
+ 0000: 79 61 64 64 61 |yadda| (symlink !)
--- a/tests/test-transplant.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-transplant.t Wed Jul 19 07:51:41 2017 -0500
@@ -419,6 +419,7 @@
$ hg up -C
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "e8643552fde5: foobar"
1 other heads for branch "default"
$ rm added
$ hg transplant --continue
--- a/tests/test-treemanifest.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-treemanifest.t Wed Jul 19 07:51:41 2017 -0500
@@ -4,7 +4,7 @@
> [format]
> usegeneraldelta=yes
> [ui]
- > ssh=python "$TESTDIR/dummyssh"
+ > ssh=$PYTHON "$TESTDIR/dummyssh"
> EOF
Set up repo
@@ -862,7 +862,7 @@
$ hg commit -Aqm 'pre-empty commit'
$ hg rm z
$ hg commit --amend -m 'empty commit'
- saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-de37743b-amend-backup.hg (glob)
+ saved backup bundle to $TESTTMP/grafted-dir-repo-clone/.hg/strip-backup/cb99d5717cea-de37743b-amend.hg (glob)
$ hg log -r 'tip + tip^' -T '{manifest}\n'
1:678d3574b88c
1:678d3574b88c
--- a/tests/test-ui-config.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ui-config.py Wed Jul 19 07:51:41 2017 -0500
@@ -32,6 +32,9 @@
'lists.list16="longer quotation" with "no ending quotation',
'lists.list17=this is \\" "not a quotation mark"',
'lists.list18=\n \n\nding\ndong',
+ 'date.epoch=0 0',
+ 'date.birth=2005-04-19T00:00:00',
+ 'date.invalid=0'
])
print(repr(testui.configitems('values')))
@@ -82,6 +85,9 @@
print(repr(testui.configlist('lists', 'unknown', 'foo, bar')))
print(repr(testui.configlist('lists', 'unknown', ['foo bar'])))
print(repr(testui.configlist('lists', 'unknown', ['foo', 'bar'])))
+print("---")
+print(repr(testui.configdate('date', 'epoch')))
+print(repr(testui.configdate('date', 'birth')))
print(repr(testui.config('values', 'String')))
@@ -101,3 +107,7 @@
testui.configint('values', 'intinvalid')
except error.ConfigError:
print('intinvalid')
+try:
+ testui.configdate('date', 'invalid')
+except error.ConfigError:
+ print('dateinvalid')
--- a/tests/test-ui-config.py.out Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-ui-config.py.out Wed Jul 19 07:51:41 2017 -0500
@@ -43,7 +43,11 @@
['foo', 'bar']
['foo bar']
['foo', 'bar']
+---
+(0, 0)
+(1113868800, 0)
None
True
boolinvalid
intinvalid
+dateinvalid
--- a/tests/test-up-local-change.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-up-local-change.t Wed Jul 19 07:51:41 2017 -0500
@@ -172,6 +172,7 @@
$ hg --debug up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "1e71731e6fbb: 2"
1 other heads for branch "default"
test conflicting untracked files
--- a/tests/test-update-branches.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-update-branches.t Wed Jul 19 07:51:41 2017 -0500
@@ -94,6 +94,7 @@
$ norevtest 'none clean same' clean 2
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
@@ -141,6 +142,7 @@
$ norevtest 'none dirty cross' dirty 2
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
M foo
@@ -171,12 +173,13 @@
M foo
$ revtest '-c dirtysub linear' dirtysub 1 2 -c
- abort: uncommitted changes in subrepository 'sub'
+ abort: uncommitted changes in subrepository "sub"
parent=1
M sub/suba
$ norevtest '-c clean same' clean 2 -c
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ updated to "bd10386d478c: 2"
1 other heads for branch "default"
parent=2
@@ -499,6 +502,7 @@
$ hg id --debug -i -r 4
d047485b3896813b2a624e86201983520f003206
$ hg debugobsolete 6efa171f091b00a3c35edc15d48c52a498929953 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ obsoleted 1 changesets
$ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa d047485b3896813b2a624e86201983520f003206
Test that 5 is detected as a valid destination from 3 and also accepts moving
@@ -545,6 +549,7 @@
$ hg up --quiet 0
$ hg up --quiet 2
$ hg debugobsolete bd10386d478cd5a9faf2e604114c8e6da62d3889
+ obsoleted 1 changesets
$ hg up
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-verify.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-verify.t Wed Jul 19 07:51:41 2017 -0500
@@ -317,3 +317,47 @@
checking files
1 files, 1 changesets, 1 total revisions
$ cd ..
+
+test flag processor and skipflags
+
+ $ hg init skipflags
+ $ cd skipflags
+ $ cat >> .hg/hgrc <<EOF
+ > [extensions]
+ > flagprocessor=$RUNTESTDIR/flagprocessorext.py
+ > EOF
+ $ echo '[BASE64]content' > base64
+ $ hg commit -Aqm 'flag processor content' base64
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 1 files, 1 changesets, 1 total revisions
+
+ $ cat >> $TESTTMP/break-base64.py <<EOF
+ > from __future__ import absolute_import
+ > import base64
+ > base64.b64decode=lambda x: x
+ > EOF
+ $ cat >> .hg/hgrc <<EOF
+ > breakbase64=$TESTTMP/break-base64.py
+ > EOF
+
+ $ hg verify
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ base64@0: unpacking 794cee7777cb: integrity check failed on data/base64.i:0
+ 1 files, 1 changesets, 1 total revisions
+ 1 integrity errors encountered!
+ (first damaged changeset appears to be 0)
+ [1]
+ $ hg verify --config verify.skipflags=2147483647
+ checking changesets
+ checking manifests
+ crosschecking files in changesets and manifests
+ checking files
+ 1 files, 1 changesets, 1 total revisions
+
--- a/tests/test-walk.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-walk.t Wed Jul 19 07:51:41 2017 -0500
@@ -29,6 +29,7 @@
$ hg commit -m "commit #0"
$ hg debugwalk
+ matcher: <alwaysmatcher>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -43,6 +44,7 @@
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk -I.
+ matcher: <includematcher includes='(?:)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -59,6 +61,7 @@
$ cd mammals
$ hg debugwalk
+ matcher: <alwaysmatcher>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -73,6 +76,7 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -X ../beans
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
@@ -81,24 +85,31 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -I '*k'
+ matcher: <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
f mammals/skunk skunk
$ hg debugwalk -I 'glob:*k'
+ matcher: <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
f mammals/skunk skunk
$ hg debugwalk -I 'relglob:*k'
+ matcher: <includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'relglob:*k' .
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>>
f mammals/skunk skunk
$ hg debugwalk -I 're:.*k$'
+ matcher: <includematcher includes='(?:.*k$)'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'relre:.*k$'
+ matcher: <includematcher includes='(?:.*.*k$)'>
f beans/black ../beans/black
f fenugreek ../fenugreek
f mammals/skunk skunk
$ hg debugwalk -I 'path:beans'
+ matcher: <includematcher includes='(?:beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -106,6 +117,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk -I 'relpath:detour/../../beans'
+ matcher: <includematcher includes='(?:beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -114,22 +126,27 @@
f beans/turtle ../beans/turtle
$ hg debugwalk 'rootfilesin:'
+ matcher: <patternmatcher patterns='(?:[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -I 'rootfilesin:'
+ matcher: <includematcher includes='(?:[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk 'rootfilesin:.'
+ matcher: <patternmatcher patterns='(?:[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -I 'rootfilesin:.'
+ matcher: <includematcher includes='(?:[^/]+$)'>
f fennel ../fennel
f fenugreek ../fenugreek
f fiddlehead ../fiddlehead
$ hg debugwalk -X 'rootfilesin:'
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:[^/]+$)'>>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -141,10 +158,15 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk 'rootfilesin:fennel'
+ matcher: <patternmatcher patterns='(?:fennel/[^/]+$)'>
$ hg debugwalk -I 'rootfilesin:fennel'
+ matcher: <includematcher includes='(?:fennel/[^/]+$)'>
$ hg debugwalk 'rootfilesin:skunk'
+ matcher: <patternmatcher patterns='(?:skunk/[^/]+$)'>
$ hg debugwalk -I 'rootfilesin:skunk'
+ matcher: <includematcher includes='(?:skunk/[^/]+$)'>
$ hg debugwalk 'rootfilesin:beans'
+ matcher: <patternmatcher patterns='(?:beans/[^/]+$)'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -152,6 +174,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk -I 'rootfilesin:beans'
+ matcher: <includematcher includes='(?:beans/[^/]+$)'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -159,14 +182,19 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk 'rootfilesin:mammals'
+ matcher: <patternmatcher patterns='(?:mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -I 'rootfilesin:mammals'
+ matcher: <includematcher includes='(?:mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk 'rootfilesin:mammals/'
+ matcher: <patternmatcher patterns='(?:mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -I 'rootfilesin:mammals/'
+ matcher: <includematcher includes='(?:mammals/[^/]+$)'>
f mammals/skunk skunk
$ hg debugwalk -X 'rootfilesin:mammals'
+ matcher: <differencematcher m1=<alwaysmatcher>, m2=<includematcher includes='(?:mammals/[^/]+$)'>>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -181,26 +209,31 @@
f mammals/Procyonidae/raccoon Procyonidae/raccoon
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk -I.
+ matcher: <includematcher includes='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
f mammals/skunk skunk
$ hg debugwalk Procyonidae
+ matcher: <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
$ cd Procyonidae
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
f mammals/Procyonidae/cacomistle cacomistle
f mammals/Procyonidae/coatimundi coatimundi
f mammals/Procyonidae/raccoon raccoon
$ hg debugwalk ..
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle cacomistle
f mammals/Procyonidae/coatimundi coatimundi
f mammals/Procyonidae/raccoon raccoon
@@ -208,6 +241,7 @@
$ cd ..
$ hg debugwalk ../beans
+ matcher: <patternmatcher patterns='(?:beans(?:/|$))'>
f beans/black ../beans/black
f beans/borlotti ../beans/borlotti
f beans/kidney ../beans/kidney
@@ -215,6 +249,7 @@
f beans/pinto ../beans/pinto
f beans/turtle ../beans/turtle
$ hg debugwalk .
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
f mammals/Procyonidae/raccoon Procyonidae/raccoon
@@ -228,6 +263,7 @@
$ cd ..
$ hg debugwalk -Ibeans
+ matcher: <includematcher includes='(?:beans(?:/|$))'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -235,41 +271,56 @@
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -I '{*,{b,m}*/*}k'
+ matcher: <includematcher includes='(?:(?:[^/]*|(?:b|m)[^/]*\\/[^/]*)k(?:/|$))'>
f beans/black beans/black
f fenugreek fenugreek
f mammals/skunk mammals/skunk
$ hg debugwalk -Ibeans mammals
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
$ hg debugwalk -Inon-existent
+ matcher: <includematcher includes='(?:non\\-existent(?:/|$))'>
$ hg debugwalk -Inon-existent -Ibeans/black
+ matcher: <includematcher includes='(?:non\\-existent(?:/|$)|beans\\/black(?:/|$))'>
f beans/black beans/black
$ hg debugwalk -Ibeans beans/black
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Ibeans/black beans
+ matcher: <intersectionmatcher m1=<patternmatcher patterns='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/black beans/black
$ hg debugwalk -Xbeans/black beans
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
f beans/navy beans/navy
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -Xbeans/black -Ibeans
+ matcher: <differencematcher m1=<includematcher includes='(?:beans(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
f beans/navy beans/navy
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk -Xbeans/black beans/black
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Xbeans/black -Ibeans/black
+ matcher: <differencematcher m1=<includematcher includes='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
$ hg debugwalk -Xbeans beans/black
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
f beans/black beans/black exact
$ hg debugwalk -Xbeans -Ibeans/black
+ matcher: <differencematcher m1=<includematcher includes='(?:beans\\/black(?:/|$))'>, m2=<includematcher includes='(?:beans(?:/|$))'>>
$ hg debugwalk 'glob:mammals/../beans/b*'
+ matcher: <patternmatcher patterns='(?:beans\\/b[^/]*$)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
$ hg debugwalk '-X*/Procyonidae' mammals
+ matcher: <differencematcher m1=<patternmatcher patterns='(?:mammals(?:/|$))'>, m2=<includematcher includes='(?:[^/]*\\/Procyonidae(?:/|$))'>>
f mammals/skunk mammals/skunk
$ hg debugwalk path:mammals
+ matcher: <patternmatcher patterns='(?:mammals(?:/|$))'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
@@ -296,6 +347,7 @@
Test absolute paths:
$ hg debugwalk `pwd`/beans
+ matcher: <patternmatcher patterns='(?:beans(?:/|$))'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -309,6 +361,7 @@
Test patterns:
$ hg debugwalk glob:\*
+ matcher: <patternmatcher patterns='(?:[^/]*$)'>
f fennel fennel
f fenugreek fenugreek
f fiddlehead fiddlehead
@@ -318,15 +371,19 @@
adding glob:glob
warning: filename contains ':', which is reserved on Windows: 'glob:glob'
$ hg debugwalk glob:\*
+ matcher: <patternmatcher patterns='(?:[^/]*$)'>
f fennel fennel
f fenugreek fenugreek
f fiddlehead fiddlehead
f glob:glob glob:glob
$ hg debugwalk glob:glob
+ matcher: <patternmatcher patterns='(?:glob$)'>
glob: No such file or directory
$ hg debugwalk glob:glob:glob
+ matcher: <patternmatcher patterns='(?:glob\\:glob$)'>
f glob:glob glob:glob exact
$ hg debugwalk path:glob:glob
+ matcher: <patternmatcher patterns='(?:glob\\:glob(?:/|$))'>
f glob:glob glob:glob exact
$ rm glob:glob
$ hg addremove
@@ -334,30 +391,38 @@
#endif
$ hg debugwalk 'glob:**e'
+ matcher: <patternmatcher patterns='(?:.*e$)'>
f beans/turtle beans/turtle
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
$ hg debugwalk 're:.*[kb]$'
+ matcher: <patternmatcher patterns='(?:.*[kb]$)'>
f beans/black beans/black
f fenugreek fenugreek
f mammals/skunk mammals/skunk
$ hg debugwalk path:beans/black
+ matcher: <patternmatcher patterns='(?:beans\\/black(?:/|$))'>
f beans/black beans/black exact
$ hg debugwalk path:beans//black
+ matcher: <patternmatcher patterns='(?:beans\\/black(?:/|$))'>
f beans/black beans/black exact
$ hg debugwalk relglob:Procyonidae
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae$)'>
$ hg debugwalk 'relglob:Procyonidae/**'
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$)'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
$ hg debugwalk 'relglob:Procyonidae/**' fennel
+ matcher: <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$|fennel(?:/|$))'>
f fennel fennel exact
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
$ hg debugwalk beans 'glob:beans/*'
+ matcher: <patternmatcher patterns='(?:beans(?:/|$)|beans\\/[^/]*$)'>
f beans/black beans/black
f beans/borlotti beans/borlotti
f beans/kidney beans/kidney
@@ -365,63 +430,78 @@
f beans/pinto beans/pinto
f beans/turtle beans/turtle
$ hg debugwalk 'glob:mamm**'
+ matcher: <patternmatcher patterns='(?:mamm.*$)'>
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk 'glob:mamm**' fennel
+ matcher: <patternmatcher patterns='(?:mamm.*$|fennel(?:/|$))'>
f fennel fennel exact
f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
f mammals/skunk mammals/skunk
$ hg debugwalk 'glob:j*'
+ matcher: <patternmatcher patterns='(?:j[^/]*$)'>
$ hg debugwalk NOEXIST
+ matcher: <patternmatcher patterns='(?:NOEXIST(?:/|$))'>
NOEXIST: * (glob)
#if fifo
$ mkfifo fifo
$ hg debugwalk fifo
+ matcher: <patternmatcher patterns='(?:fifo(?:/|$))'>
fifo: unsupported file type (type is fifo)
#endif
$ rm fenugreek
$ hg debugwalk fenugreek
+ matcher: <patternmatcher patterns='(?:fenugreek(?:/|$))'>
f fenugreek fenugreek exact
$ hg rm fenugreek
$ hg debugwalk fenugreek
+ matcher: <patternmatcher patterns='(?:fenugreek(?:/|$))'>
f fenugreek fenugreek exact
$ touch new
$ hg debugwalk new
+ matcher: <patternmatcher patterns='(?:new(?:/|$))'>
f new new exact
$ mkdir ignored
$ touch ignored/file
$ echo '^ignored$' > .hgignore
$ hg debugwalk ignored
+ matcher: <patternmatcher patterns='(?:ignored(?:/|$))'>
$ hg debugwalk ignored/file
+ matcher: <patternmatcher patterns='(?:ignored\\/file(?:/|$))'>
f ignored/file ignored/file exact
Test listfile and listfile0
$ $PYTHON -c "file('listfile0', 'wb').write('fenugreek\0new\0')"
$ hg debugwalk -I 'listfile0:listfile0'
+ matcher: <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$))'>
f fenugreek fenugreek
f new new
$ $PYTHON -c "file('listfile', 'wb').write('fenugreek\nnew\r\nmammals/skunk\n')"
$ hg debugwalk -I 'listfile:listfile'
+ matcher: <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$)|mammals\\/skunk(?:/|$))'>
f fenugreek fenugreek
f mammals/skunk mammals/skunk
f new new
$ cd ..
$ hg debugwalk -R t t/mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk t/mammals/skunk exact
$ mkdir t2
$ cd t2
$ hg debugwalk -R ../t ../t/mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk ../t/mammals/skunk exact
$ hg debugwalk --cwd ../t mammals/skunk
+ matcher: <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
f mammals/skunk mammals/skunk exact
$ cd ..
@@ -432,7 +512,7 @@
$ echo fennel > overflow.list
$ $PYTHON -c "for i in xrange(20000 / 100): print 'x' * 100" >> overflow.list
$ echo fenugreek >> overflow.list
- $ hg debugwalk 'listfile:overflow.list' 2>&1 | grep -v '^xxx'
+ $ hg debugwalk 'listfile:overflow.list' 2>&1 | egrep -v '(^matcher: |^xxx)'
f fennel fennel exact
f fenugreek fenugreek exact
$ cd ..
--- a/tests/test-win32text.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-win32text.t Wed Jul 19 07:51:41 2017 -0500
@@ -28,7 +28,7 @@
updating to branch default
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
$ cp .hg/hgrc ../zoz/.hg
- $ python unix2dos.py f
+ $ $PYTHON unix2dos.py f
commit should fail
@@ -102,7 +102,7 @@
$ mkdir d
$ echo hello > d/f2
- $ python unix2dos.py d/f2
+ $ $PYTHON unix2dos.py d/f2
$ hg add d/f2
$ hg ci -m 3
attempt to commit or push text file(s) using CRLF line endings
@@ -181,7 +181,7 @@
adding dupe/b (glob)
adding dupe/c (glob)
adding dupe/d (glob)
- $ python unix2dos.py dupe/b dupe/c dupe/d
+ $ $PYTHON unix2dos.py dupe/b dupe/c dupe/d
$ hg -R dupe ci -m a dupe/a
$ hg -R dupe ci -m b/c dupe/[bc]
$ hg -R dupe ci -m d dupe/d
--- a/tests/test-wireproto.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-wireproto.t Wed Jul 19 07:51:41 2017 -0500
@@ -147,13 +147,13 @@
SSH (try to exercise the ssh functionality with a dummy script):
- $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo uno due tre quattro
+ $ hg debugwireargs --ssh "\"$PYTHON\" $TESTDIR/dummyssh" ssh://user@dummy/repo uno due tre quattro
uno due tre quattro None
- $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --four vier
+ $ hg debugwireargs --ssh "\"$PYTHON\" $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --four vier
eins zwei None vier None
- $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei
+ $ hg debugwireargs --ssh "\"$PYTHON\" $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei
eins zwei None None None
- $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --five fuenf
+ $ hg debugwireargs --ssh "\"$PYTHON\" $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --five fuenf
eins zwei None None None
Explicitly kill daemons to let the test exit on Windows
--- a/tests/test-worker.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-worker.t Wed Jul 19 07:51:41 2017 -0500
@@ -2,9 +2,10 @@
$ cat > t.py <<EOF
> from __future__ import absolute_import, print_function
+ > import time
> from mercurial import (
- > cmdutil,
> error,
+ > registrar,
> ui as uimod,
> worker,
> )
@@ -22,14 +23,15 @@
> for arg in args:
> ui.status('run\n')
> yield 1, arg
+ > time.sleep(0.1) # easier to trigger killworkers code path
> functable = {
> 'abort': abort,
> 'exc': exc,
> 'runme': runme,
> }
> cmdtable = {}
- > command = cmdutil.command(cmdtable)
- > @command('test', [], 'hg test [COST] [FUNC]')
+ > command = registrar.command(cmdtable)
+ > @command(b'test', [], 'hg test [COST] [FUNC]')
> def t(ui, repo, cost=1.0, func='runme'):
> cost = float(cost)
> func = functable[func]
@@ -74,21 +76,53 @@
Known exception should be caught, but printed if --traceback is enabled
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 abort
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 abort 2>&1
start
abort: known exception
[255]
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 abort --traceback 2>&1 | grep '^Traceback'
- Traceback (most recent call last):
- Traceback (most recent call last):
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 abort --traceback 2>&1 | egrep '^(SystemExit|Abort)'
+ Abort: known exception
+ SystemExit: 255
Traceback must be printed for unknown exceptions
- $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=2' \
- > test 100000.0 exc 2>&1 | grep '^Traceback'
- Traceback (most recent call last):
+ $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
+ > test 100000.0 exc 2>&1 | grep '^Exception'
+ Exception: unknown exception
+
+Workers should not do cleanups in all cases
+
+ $ cat > $TESTTMP/detectcleanup.py <<EOF
+ > from __future__ import absolute_import
+ > import atexit
+ > import os
+ > import time
+ > oldfork = os.fork
+ > count = 0
+ > parentpid = os.getpid()
+ > def delayedfork():
+ > global count
+ > count += 1
+ > pid = oldfork()
+ > # make it easier to test SIGTERM hitting other workers when they have
+ > # not set up error handling yet.
+ > if count > 1 and pid == 0:
+ > time.sleep(0.1)
+ > return pid
+ > os.fork = delayedfork
+ > def cleanup():
+ > if os.getpid() != parentpid:
+ > os.write(1, 'should never happen\n')
+ > atexit.register(cleanup)
+ > EOF
+
+ $ hg --config "extensions.t=$abspath" --config worker.numcpus=8 --config \
+ > "extensions.d=$TESTTMP/detectcleanup.py" test 100000 abort
+ start
+ abort: known exception
+ [255]
#endif
--- a/tests/test-xdg.t Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/test-xdg.t Wed Jul 19 07:51:41 2017 -0500
@@ -5,7 +5,7 @@
$ echo 'username = foobar' >> xdgconf/hg/hgrc
$ XDG_CONFIG_HOME="`pwd`/xdgconf" ; export XDG_CONFIG_HOME
$ unset HGRCPATH
- $ hg config ui.username
+ $ hg config ui.username 2>/dev/null
foobar
#endif
--- a/tests/testlib/exchange-obsmarker-util.sh Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/testlib/exchange-obsmarker-util.sh Wed Jul 19 07:51:41 2017 -0500
@@ -32,6 +32,9 @@
# we need to strip some changeset for some test cases
hgext.strip=
+[devel]
+strip-obsmarkers = no
+
[alias]
# fix date used to create obsolete markers.
debugobsolete=debugobsolete -d '0 0'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/ext-phase-report.py Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,22 @@
+# tiny extension to report phase changes during transaction
+
+from __future__ import absolute_import
+
+def reposetup(ui, repo):
+
+ def reportphasemove(tr):
+ for rev, move in sorted(tr.changes['phases'].iteritems()):
+ if move[0] is None:
+ ui.write(('test-debug-phase: new rev %d: x -> %d\n'
+ % (rev, move[1])))
+ else:
+ ui.write(('test-debug-phase: move rev %d: %s -> %d\n'
+ % (rev, move[0], move[1])))
+
+ class reportphaserepo(repo.__class__):
+ def transaction(self, *args, **kwargs):
+ tr = super(reportphaserepo, self).transaction(*args, **kwargs)
+ tr.addpostclose('report-phase', reportphasemove)
+ return tr
+
+ repo.__class__ = reportphaserepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/obsmarker-common.sh Wed Jul 19 07:51:41 2017 -0500
@@ -0,0 +1,14 @@
+mkcommit() {
+ echo "$1" > "$1"
+ hg add "$1"
+ hg ci -m "$1"
+}
+
+getid() {
+ hg log --hidden --template '{node}\n' --rev "$1"
+}
+
+cat >> $HGRCPATH <<EOF
+[alias]
+debugobsolete=debugobsolete -d '0 0'
+EOF
--- a/tests/tinyproxy.py Wed Jul 05 11:24:22 2017 -0400
+++ b/tests/tinyproxy.py Wed Jul 19 07:51:41 2017 -0500
@@ -53,6 +53,9 @@
self.log_message('"%s" %s %s%s',
self.requestline, str(code), str(size),
''.join([' %s:%s' % h for h in sorted(xheaders)]))
+ # Flush for Windows, so output isn't lost on TerminateProcess()
+ sys.stdout.flush()
+ sys.stderr.flush()
def _connect_to(self, netloc, soc):
i = netloc.find(':')