merge with stable
authorMatt Mackall <mpm@selenic.com>
Tue, 14 Jun 2016 14:52:58 -0500
changeset 29349 8bf84295e59b
parent 29348 2188f170f5b6 (diff)
parent 29329 f359cdc91e21 (current diff)
child 29350 35861cc1076e
merge with stable
hgext/largefiles/lfutil.py
--- a/contrib/bdiff-torture.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/bdiff-torture.py	Tue Jun 14 14:52:58 2016 -0500
@@ -1,7 +1,9 @@
 # Randomized torture test generation for bdiff
 
 from __future__ import absolute_import, print_function
-import random, sys
+import random
+import sys
+
 from mercurial import (
     bdiff,
     mpatch,
--- a/contrib/check-code.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/check-code.py	Tue Jun 14 14:52:58 2016 -0500
@@ -26,6 +26,15 @@
 import os
 import re
 import sys
+if sys.version_info[0] < 3:
+    opentext = open
+else:
+    def opentext(f):
+        return open(f, encoding='ascii')
+try:
+    xrange
+except NameError:
+    xrange = range
 try:
     import re2
 except ImportError:
@@ -42,22 +51,21 @@
     return re.compile(pat)
 
 def repquote(m):
-    fromc = '.:'
-    tochr = 'pq'
+    # check "rules depending on implementation of repquote()" in each
+    # patterns (especially pypats), before changing this function
+    fixedmap = {' ': ' ', '\n': '\n', '.': 'p', ':': 'q',
+                '%': '%', '\\': 'b', '*': 'A', '+': 'P', '-': 'M'}
     def encodechr(i):
         if i > 255:
             return 'u'
         c = chr(i)
-        if c in ' \n':
-            return c
+        if c in fixedmap:
+            return fixedmap[c]
         if c.isalpha():
             return 'x'
         if c.isdigit():
             return 'n'
-        try:
-            return tochr[fromc.find(c)]
-        except (ValueError, IndexError):
-            return 'o'
+        return 'o'
     t = m.group('text')
     tt = ''.join(encodechr(i) for i in xrange(256))
     t = t.translate(tt)
@@ -103,7 +111,7 @@
     (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
     (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
     (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
-    (r'printf.*[^\\]\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
+    (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
     (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
     (r'\$\(.*\)', "don't use $(expr), use `expr`"),
     (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
@@ -114,7 +122,7 @@
     (r'export .*=', "don't export and assign at once"),
     (r'^source\b', "don't use 'source', use '.'"),
     (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
-    (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
+    (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
     (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
     (r'^stop\(\)', "don't use 'stop' as a shell function name"),
     (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
@@ -133,6 +141,7 @@
     (r'\|&', "don't use |&, use 2>&1"),
     (r'\w =  +\w', "only one space after = allowed"),
     (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
+    (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'")
   ],
   # warnings
   [
@@ -238,7 +247,6 @@
     (r'^\s+(\w|\.)+=\w[^,()\n]*$', "missing whitespace in assignment"),
     (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
     (r'.{81}', "line too long"),
-    (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
     (r'[^\n]\Z', "no trailing newline"),
     (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
 #    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
@@ -305,8 +313,6 @@
     (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
      'legacy exception syntax; use "as" instead of ","'),
     (r':\n(    )*( ){1,3}[^ ]', "must indent 4 spaces"),
-    (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
-     "missing _() in ui message (use () to hide false-positives)"),
     (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
     (r'\b__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
     (r'os\.path\.join\(.*, *(""|\'\')\)',
@@ -318,9 +324,17 @@
     (r'^import Queue', "don't use Queue, use util.queue + util.empty"),
     (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
     (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
+    (r'\.next\(\)', "don't use .next(), use next(...)"),
+
+    # rules depending on implementation of repquote()
+    (r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
+     'string join across lines with no space'),
+    (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
+     "missing _() in ui message (use () to hide false-positives)"),
   ],
   # warnings
   [
+    # rules depending on implementation of repquote()
     (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
   ]
 ]
@@ -365,9 +379,13 @@
     (r'^\s*#import\b', "use only #include in standard C code"),
     (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
     (r'strcat\(', "don't use strcat"),
+
+    # rules depending on implementation of repquote()
   ],
   # warnings
-  []
+  [
+    # rules depending on implementation of repquote()
+  ]
 ]
 
 cfilters = [
@@ -486,12 +504,15 @@
     result = True
 
     try:
-        fp = open(f)
+        with opentext(f) as fp:
+            try:
+                pre = post = fp.read()
+            except UnicodeDecodeError as e:
+                print("%s while reading %s" % (e, f))
+                return result
     except IOError as e:
         print("Skipping %s, %s" % (f, str(e).split(':', 1)[0]))
         return result
-    pre = post = fp.read()
-    fp.close()
 
     for name, match, magic, filters, pats in checks:
         if debug:
--- a/contrib/check-commit	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/check-commit	Tue Jun 14 14:52:58 2016 -0500
@@ -15,7 +15,11 @@
 #
 # See also: https://mercurial-scm.org/wiki/ContributingChanges
 
-import re, sys, os
+from __future__ import absolute_import, print_function
+
+import os
+import re
+import sys
 
 commitheader = r"^(?:# [^\n]*\n)*"
 afterheader = commitheader + r"(?!#)"
@@ -69,9 +73,9 @@
                     break
                 if not printed:
                     printed = True
-                    print "node: %s" % node
-                print "%d: %s" % (n, msg)
-                print " %s" % nonempty(l, last)[:-1]
+                    print("node: %s" % node)
+                print("%d: %s" % (n, msg))
+                print(" %s" % nonempty(l, last)[:-1])
                 if "BYPASS" not in os.environ:
                     exitcode = 1
                 del hits[0]
--- a/contrib/chg/README	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/chg/README	Tue Jun 14 14:52:58 2016 -0500
@@ -28,3 +28,5 @@
 
  * CHGDEBUG enables debug messages.
  * CHGSOCKNAME specifies the socket path of the background cmdserver.
+ * CHGTIMEOUT specifies how many seconds chg will wait before giving up
+   connecting to a cmdserver. If it is 0, chg will wait forever. Default: 10
--- a/contrib/chg/chg.c	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/chg/chg.c	Tue Jun 14 14:52:58 2016 -0500
@@ -249,7 +249,13 @@
 	int pst = 0;
 
 	debugmsg("try connect to %s repeatedly", opts->sockname);
-	for (unsigned int i = 0; i < 10 * 100; i++) {
+
+	unsigned int timeoutsec = 10;  /* default: 10 seconds */
+	const char *timeoutenv = getenv("CHGTIMEOUT");
+	if (timeoutenv)
+		sscanf(timeoutenv, "%u", &timeoutsec);
+
+	for (unsigned int i = 0; !timeoutsec || i < timeoutsec * 100; i++) {
 		hgclient_t *hgc = hgc_open(opts->sockname);
 		if (hgc)
 			return hgc;
@@ -417,21 +423,22 @@
 	abortmsgerrno("failed to set up signal handlers");
 }
 
-/* This implementation is based on hgext/pager.py (pre 369741ef7253) */
-static void setuppager(hgclient_t *hgc, const char *const args[],
+/* This implementation is based on hgext/pager.py (post 369741ef7253)
+ * Return 0 if pager is not started, or pid of the pager */
+static pid_t setuppager(hgclient_t *hgc, const char *const args[],
 		       size_t argsize)
 {
 	const char *pagercmd = hgc_getpager(hgc, args, argsize);
 	if (!pagercmd)
-		return;
+		return 0;
 
 	int pipefds[2];
 	if (pipe(pipefds) < 0)
-		return;
+		return 0;
 	pid_t pid = fork();
 	if (pid < 0)
 		goto error;
-	if (pid == 0) {
+	if (pid > 0) {
 		close(pipefds[0]);
 		if (dup2(pipefds[1], fileno(stdout)) < 0)
 			goto error;
@@ -441,7 +448,7 @@
 		}
 		close(pipefds[1]);
 		hgc_attachio(hgc);  /* reattach to pager */
-		return;
+		return pid;
 	} else {
 		dup2(pipefds[0], fileno(stdin));
 		close(pipefds[0]);
@@ -451,13 +458,27 @@
 		if (r < 0) {
 			abortmsgerrno("cannot start pager '%s'", pagercmd);
 		}
-		return;
+		return 0;
 	}
 
 error:
 	close(pipefds[0]);
 	close(pipefds[1]);
 	abortmsgerrno("failed to prepare pager");
+	return 0;
+}
+
+static void waitpager(pid_t pid)
+{
+	/* close output streams to notify the pager its input ends */
+	fclose(stdout);
+	fclose(stderr);
+	while (1) {
+		pid_t ret = waitpid(pid, NULL, 0);
+		if (ret == -1 && errno == EINTR)
+			continue;
+		break;
+	}
 }
 
 /* Run instructions sent from the server like unlink and set redirect path
@@ -585,9 +606,12 @@
 	}
 
 	setupsignalhandler(hgc_peerpid(hgc));
-	setuppager(hgc, argv + 1, argc - 1);
+	pid_t pagerpid = setuppager(hgc, argv + 1, argc - 1);
 	int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1);
 	hgc_close(hgc);
 	freecmdserveropts(&opts);
+	if (pagerpid)
+		waitpager(pagerpid);
+
 	return exitcode;
 }
--- a/contrib/dirstatenonnormalcheck.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/dirstatenonnormalcheck.py	Tue Jun 14 14:52:58 2016 -0500
@@ -25,10 +25,10 @@
     """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
     nonnormalcomputedmap = nonnormalentries(dmap)
     if _nonnormalset != nonnormalcomputedmap:
-        ui.develwarn("%s call to %s\n" % (label, orig))
-        ui.develwarn("inconsistency in nonnormalset\n")
-        ui.develwarn("[nonnormalset] %s\n" % _nonnormalset)
-        ui.develwarn("[map] %s\n" % nonnormalcomputedmap)
+        ui.develwarn("%s call to %s\n" % (label, orig), config='dirstate')
+        ui.develwarn("inconsistency in nonnormalset\n", config='dirstate')
+        ui.develwarn("[nonnormalset] %s\n" % _nonnormalset, config='dirstate')
+        ui.develwarn("[map] %s\n" % nonnormalcomputedmap, config='dirstate')
 
 def _checkdirstate(orig, self, arg):
     """Check nonnormal set consistency before and after the call to orig"""
--- a/contrib/dumprevlog	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/dumprevlog	Tue Jun 14 14:52:58 2016 -0500
@@ -2,8 +2,14 @@
 # Dump revlogs as raw data stream
 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
 
+from __future__ import absolute_import, print_function
+
 import sys
-from mercurial import revlog, node, util
+from mercurial import (
+    node,
+    revlog,
+    util,
+)
 
 for fp in (sys.stdin, sys.stdout, sys.stderr):
     util.setbinary(fp)
@@ -11,15 +17,15 @@
 for f in sys.argv[1:]:
     binopen = lambda fn: open(fn, 'rb')
     r = revlog.revlog(binopen, f)
-    print "file:", f
+    print("file:", f)
     for i in r:
         n = r.node(i)
         p = r.parents(n)
         d = r.revision(n)
-        print "node:", node.hex(n)
-        print "linkrev:", r.linkrev(i)
-        print "parents:", node.hex(p[0]), node.hex(p[1])
-        print "length:", len(d)
-        print "-start-"
-        print d
-        print "-end-"
+        print("node:", node.hex(n))
+        print("linkrev:", r.linkrev(i))
+        print("parents:", node.hex(p[0]), node.hex(p[1]))
+        print("length:", len(d))
+        print("-start-")
+        print(d)
+        print("-end-")
--- a/contrib/import-checker.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/import-checker.py	Tue Jun 14 14:52:58 2016 -0500
@@ -11,8 +11,9 @@
 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
 # to work when run from a virtualenv.  The modules were chosen empirically
 # so that the return value matches the return value without virtualenv.
-import BaseHTTPServer
-import zlib
+if True: # disable lexical sorting checks
+    import BaseHTTPServer
+    import zlib
 
 # Whitelist of modules that symbols can be directly imported from.
 allowsymbolimports = (
@@ -126,9 +127,15 @@
     False
     >>> fromlocal(None, 1)
     ('foo', 'foo.__init__', True)
+    >>> fromlocal('foo1', 1)
+    ('foo.foo1', 'foo.foo1', False)
     >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
     >>> fromlocal2(None, 2)
     ('foo', 'foo.__init__', True)
+    >>> fromlocal2('bar2', 1)
+    False
+    >>> fromlocal2('bar', 2)
+    ('foo.bar', 'foo.bar.__init__', True)
     """
     prefix = '.'.join(modulename.split('.')[:-1])
     if prefix:
@@ -140,8 +147,12 @@
             assert level > 0
             candidates = ['.'.join(modulename.split('.')[:-level])]
         else:
-            # Check relative name first.
-            candidates = [prefix + name, name]
+            if not level:
+                # Check relative name first.
+                candidates = [prefix + name, name]
+            else:
+                candidates = ['.'.join(modulename.split('.')[:-level]) +
+                              '.' + name]
 
         for n in candidates:
             if n in localmods:
@@ -360,7 +371,7 @@
     * Symbols can only be imported from specific modules (see
       `allowsymbolimports`). For other modules, first import the module then
       assign the symbol to a module-level variable. In addition, these imports
-      must be performed before other relative imports. This rule only
+      must be performed before other local imports. This rule only
       applies to import statements outside of any blocks.
     * Relative imports from the standard library are not allowed.
     * Certain modules must be aliased to alternate names to avoid aliasing
@@ -371,8 +382,8 @@
 
     # Whether a local/non-stdlib import has been performed.
     seenlocal = None
-    # Whether a relative, non-symbol import has been seen.
-    seennonsymbolrelative = False
+    # Whether a local/non-stdlib, non-symbol import has been seen.
+    seennonsymbollocal = False
     # The last name to be imported (for sorting).
     lastname = None
     # Relative import levels encountered so far.
@@ -446,26 +457,26 @@
 
             # Direct symbol import is only allowed from certain modules and
             # must occur before non-symbol imports.
+            found = fromlocal(node.module, node.level)
+            if found and found[2]:  # node.module is a package
+                prefix = found[0] + '.'
+                symbols = [n.name for n in node.names
+                           if not fromlocal(prefix + n.name)]
+            else:
+                symbols = [n.name for n in node.names]
             if node.module and node.col_offset == root_col_offset:
-                found = fromlocal(node.module, node.level)
-                if found and found[2]:  # node.module is a package
-                    prefix = found[0] + '.'
-                    symbols = [n.name for n in node.names
-                               if not fromlocal(prefix + n.name)]
-                else:
-                    symbols = [n.name for n in node.names]
-
                 if symbols and fullname not in allowsymbolimports:
                     yield msg('direct symbol import %s from %s',
                               ', '.join(symbols), fullname)
 
-                if symbols and seennonsymbolrelative:
+                if symbols and seennonsymbollocal:
                     yield msg('symbol import follows non-symbol import: %s',
                               fullname)
+            if not symbols and fullname not in stdlib_modules:
+                seennonsymbollocal = True
 
             if not node.module:
                 assert node.level
-                seennonsymbolrelative = True
 
                 # Only allow 1 group per level.
                 if (node.level in seenlevels
@@ -652,7 +663,7 @@
     the input file.
     """
     py = False
-    if f.endswith('.py'):
+    if not f.endswith('.t'):
         with open(f) as src:
             yield src.read(), modname, f, 0
             py = True
--- a/contrib/revsetbenchmarks.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/revsetbenchmarks.py	Tue Jun 14 14:52:58 2016 -0500
@@ -10,41 +10,32 @@
 
 from __future__ import absolute_import, print_function
 import math
+import optparse  # cannot use argparse, python 2.7 only
 import os
 import re
+import subprocess
 import sys
-from subprocess import (
-    CalledProcessError,
-    check_call,
-    PIPE,
-    Popen,
-    STDOUT,
-)
-# cannot use argparse, python 2.7 only
-from optparse import (
-    OptionParser,
-)
 
 DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
                    'reverse', 'reverse+first', 'reverse+last',
                    'sort', 'sort+first', 'sort+last']
 
 def check_output(*args, **kwargs):
-    kwargs.setdefault('stderr', PIPE)
-    kwargs.setdefault('stdout', PIPE)
-    proc = Popen(*args, **kwargs)
+    kwargs.setdefault('stderr', subprocess.PIPE)
+    kwargs.setdefault('stdout', subprocess.PIPE)
+    proc = subprocess.Popen(*args, **kwargs)
     output, error = proc.communicate()
     if proc.returncode != 0:
-        raise CalledProcessError(proc.returncode, ' '.join(args[0]))
+        raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
     return output
 
 def update(rev):
     """update the repo to a revision"""
     try:
-        check_call(['hg', 'update', '--quiet', '--check', str(rev)])
+        subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
         check_output(['make', 'local'],
                      stderr=None)  # suppress output except for error/warning
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print('update to revision %s failed, aborting'%rev, file=sys.stderr)
         sys.exit(exc.returncode)
 
@@ -60,7 +51,7 @@
     fullcmd += ['--config',
                 'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
     fullcmd += cmd
-    return check_output(fullcmd, stderr=STDOUT)
+    return check_output(fullcmd, stderr=subprocess.STDOUT)
 
 def perf(revset, target=None, contexts=False):
     """run benchmark for this very revset"""
@@ -70,7 +61,7 @@
             args.append('--contexts')
         output = hg(args, repo=target)
         return parseoutput(output)
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
         if getattr(exc, 'output', None) is None: # no output before 2.7
             print('(no output)', file=sys.stderr)
@@ -103,9 +94,9 @@
     """print data about a revision"""
     sys.stdout.write("Revision ")
     sys.stdout.flush()
-    check_call(['hg', 'log', '--rev', str(rev), '--template',
-                '{if(tags, " ({tags})")} '
-                '{rev}:{node|short}: {desc|firstline}\n'])
+    subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
+                           '{if(tags, " ({tags})")} '
+                           '{rev}:{node|short}: {desc|firstline}\n'])
 
 def idxwidth(nbidx):
     """return the max width of number used for index
@@ -215,7 +206,7 @@
     """get the list of rev matched by a revset"""
     try:
         out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print("abort, can't get revision from %s"%spec, file=sys.stderr)
         sys.exit(exc.returncode)
     return [r for r in out.split() if r]
@@ -234,8 +225,8 @@
 point regressions. Revsets to run are specified in a file (or from stdin), one
 revsets per line. Line starting with '#' will be ignored, allowing insertion of
 comments."""
-parser = OptionParser(usage="usage: %prog [options] <revs>",
-                      description=helptext)
+parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
+                               description=helptext)
 parser.add_option("-f", "--file",
                   help="read revset from FILE (stdin if omitted)",
                   metavar="FILE")
--- a/contrib/synthrepo.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/synthrepo.py	Tue Jun 14 14:52:58 2016 -0500
@@ -45,6 +45,13 @@
 import random
 import sys
 import time
+
+from mercurial.i18n import _
+from mercurial.node import (
+    nullid,
+    nullrev,
+    short,
+)
 from mercurial import (
     cmdutil,
     context,
@@ -54,12 +61,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
-from mercurial.node import (
-    nullid,
-    nullrev,
-    short,
-)
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -506,7 +507,7 @@
             head = rename(head)
         else:
             head = ''
-        renamed = os.path.join(head, wordgen.next())
+        renamed = os.path.join(head, next(wordgen))
         replacements[dirpath] = renamed
         return renamed
     result = []
--- a/contrib/undumprevlog	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/undumprevlog	Tue Jun 14 14:52:58 2016 -0500
@@ -3,8 +3,16 @@
 # $ hg init
 # $ undumprevlog < repo.dump
 
+from __future__ import absolute_import
+
 import sys
-from mercurial import revlog, node, scmutil, util, transaction
+from mercurial import (
+    node,
+    revlog,
+    scmutil,
+    transaction,
+    util,
+)
 
 for fp in (sys.stdin, sys.stdout, sys.stderr):
     util.setbinary(fp)
--- a/contrib/win32/mercurial.ini	Tue Jun 07 08:32:33 2016 +0200
+++ b/contrib/win32/mercurial.ini	Tue Jun 14 14:52:58 2016 -0500
@@ -46,7 +46,6 @@
 ;extdiff =
 ;fetch =
 ;gpg =
-;hgcia =
 ;hgk =
 ;highlight = 
 ;histedit =
--- a/doc/docchecker	Tue Jun 07 08:32:33 2016 +0200
+++ b/doc/docchecker	Tue Jun 14 14:52:58 2016 -0500
@@ -6,8 +6,11 @@
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import, print_function
+
+import re
 import sys
-import re
 
 leadingline = re.compile(r'(^\s*)(\S.*)$')
 
--- a/doc/hgmanpage.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/doc/hgmanpage.py	Tue Jun 14 14:52:58 2016 -0500
@@ -415,7 +415,7 @@
         else:
             self._docinfo[name] = node.astext()
         self._docinfo_keys.append(name)
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def depart_docinfo_item(self, node):
         pass
@@ -469,7 +469,7 @@
 
     def visit_citation_reference(self, node):
         self.body.append('['+node.astext()+']')
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_classifier(self, node):
         pass
@@ -489,7 +489,7 @@
     def visit_comment(self, node,
                       sub=re.compile('-(?=-)').sub):
         self.body.append(self.comment(node.astext()))
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_contact(self, node):
         self.visit_docinfo_item(node, 'contact')
@@ -643,7 +643,7 @@
             name_normalized = self._field_name.lower().replace(" ","_")
             self._docinfo_names[name_normalized] = self._field_name
             self.visit_docinfo_item(node, name_normalized)
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
 
     def depart_field_body(self, node):
         pass
@@ -657,7 +657,7 @@
     def visit_field_name(self, node):
         if self._in_docinfo:
             self._field_name = node.astext()
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         else:
             self.body.append(self.defs['field_name'][0])
 
@@ -693,7 +693,7 @@
 
     def visit_footnote_reference(self, node):
         self.body.append('['+self.deunicode(node.astext())+']')
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def depart_footnote_reference(self, node):
         pass
@@ -705,7 +705,7 @@
         pass
 
     def visit_header(self, node):
-        raise NotImplementedError, node.astext()
+        raise NotImplementedError(node.astext())
 
     def depart_header(self, node):
         pass
@@ -742,7 +742,7 @@
         if 'uri' in node.attributes:
             text.append(node.attributes['uri'])
         self.body.append('[image: %s]\n' % ('/'.join(text)))
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_important(self, node):
         self.visit_admonition(node, 'important')
@@ -753,7 +753,7 @@
         # footnote and citation
         if (isinstance(node.parent, nodes.footnote)
             or isinstance(node.parent, nodes.citation)):
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         self.document.reporter.warning('"unsupported "label"',
                 base_node=node)
         self.body.append('[')
@@ -793,7 +793,7 @@
     def visit_list_item(self, node):
         # man 7 man argues to use ".IP" instead of ".TP"
         self.body.append('.IP %s %d\n' % (
-                self._list_char[-1].next(),
+                next(self._list_char[-1]),
                 self._list_char[-1].get_width(),))
 
     def depart_list_item(self, node):
@@ -814,7 +814,7 @@
         self.body.append(self.defs['literal_block'][1])
 
     def visit_meta(self, node):
-        raise NotImplementedError, node.astext()
+        raise NotImplementedError(node.astext())
 
     def depart_meta(self, node):
         pass
@@ -924,7 +924,7 @@
         if node.get('format') == 'manpage':
             self.body.append(node.astext() + "\n")
         # Keep non-manpage raw text out of output:
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_reference(self, node):
         """E.g. link or email address."""
@@ -963,7 +963,7 @@
 
     def visit_substitution_definition(self, node):
         """Internal only."""
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_substitution_reference(self, node):
         self.document.reporter.warning('"substitution_reference" not supported',
@@ -1009,7 +1009,7 @@
 
     def visit_target(self, node):
         # targets are in-document hyper targets, without any use for man-pages.
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_tbody(self, node):
         pass
@@ -1053,7 +1053,7 @@
             self._docinfo['title'] = node.astext()
             # document title for .TH
             self._docinfo['title_upper'] = node.astext().upper()
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         elif self.section_level == 1:
             self.body.append('.SH ')
             for n in node.traverse(nodes.Text):
--- a/hg	Tue Jun 07 08:32:33 2016 +0200
+++ b/hg	Tue Jun 14 14:52:58 2016 -0500
@@ -11,9 +11,11 @@
 import sys
 
 if os.environ.get('HGUNICODEPEDANTRY', False):
-    reload(sys)
-    sys.setdefaultencoding("undefined")
-
+    try:
+        reload(sys)
+        sys.setdefaultencoding("undefined")
+    except NameError:
+        pass
 
 libdir = '@LIBDIR@'
 
@@ -26,9 +28,9 @@
 
 # enable importing on demand to reduce startup time
 try:
-    from mercurial import demandimport; demandimport.enable()
+    if sys.version_info[0] < 3:
+        from mercurial import demandimport; demandimport.enable()
 except ImportError:
-    import sys
     sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
                      ' '.join(sys.path))
     sys.stderr.write("(check your install and PYTHONPATH)\n")
--- a/hgext/automv.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/automv.py	Tue Jun 14 14:52:58 2016 -0500
@@ -26,6 +26,7 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     commands,
     copies,
@@ -34,7 +35,6 @@
     scmutil,
     similar
 )
-from mercurial.i18n import _
 
 def extsetup(ui):
     entry = extensions.wrapcommand(
--- a/hgext/chgserver.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/chgserver.py	Tue Jun 14 14:52:58 2016 -0500
@@ -43,6 +43,7 @@
 import SocketServer
 import errno
 import gc
+import hashlib
 import inspect
 import os
 import random
@@ -76,10 +77,11 @@
 
 def _hashlist(items):
     """return sha1 hexdigest for a list"""
-    return util.sha1(str(items)).hexdigest()
+    return hashlib.sha1(str(items)).hexdigest()
 
 # sensitive config sections affecting confighash
 _configsections = [
+    'alias',  # affects global state commands.table
     'extdiff',  # uisetup will register new commands
     'extensions',
 ]
@@ -213,18 +215,6 @@
         ui.setconfig('ui', 'interactive', False, 'pager')
         return p
 
-_envvarre = re.compile(r'\$[a-zA-Z_]+')
-
-def _clearenvaliases(cmdtable):
-    """Remove stale command aliases referencing env vars; variable expansion
-    is done at dispatch.addaliases()"""
-    for name, tab in cmdtable.items():
-        cmddef = tab[0]
-        if (isinstance(cmddef, dispatch.cmdalias) and
-            not cmddef.definition.startswith('!') and  # shell alias
-            _envvarre.search(cmddef.definition)):
-            del cmdtable[name]
-
 def _newchgui(srcui, csystem):
     class chgui(srcui.__class__):
         def __init__(self, src=None):
@@ -525,7 +515,6 @@
         _log('setenv: %r\n' % sorted(newenv.keys()))
         os.environ.clear()
         os.environ.update(newenv)
-        _clearenvaliases(commands.table)
 
     capabilities = commandserver.server.capabilities.copy()
     capabilities.update({'attachio': attachio,
--- a/hgext/color.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/color.py	Tue Jun 14 14:52:58 2016 -0500
@@ -156,6 +156,8 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -165,7 +167,6 @@
     ui as uimod,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/convert/__init__.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/__init__.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,11 +9,11 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     registrar,
 )
-from mercurial.i18n import _
 
 from . import (
     convcmd,
--- a/hgext/convert/bzr.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/bzr.py	Tue Jun 14 14:52:58 2016 -0500
@@ -10,11 +10,12 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     demandimport,
     error
 )
-from mercurial.i18n import _
 from . import common
 
 # these do not work with demandimport, blacklist
--- a/hgext/convert/common.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/common.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,20 +7,20 @@
 from __future__ import absolute_import
 
 import base64
-import cPickle as pickle
 import datetime
 import errno
 import os
 import re
 import subprocess
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     phases,
     util,
 )
-from mercurial.i18n import _
 
+pickle = util.pickle
 propertycache = util.propertycache
 
 def encodeargs(args):
--- a/hgext/convert/convcmd.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/convcmd.py	Tue Jun 14 14:52:58 2016 -0500
@@ -10,13 +10,13 @@
 import shlex
 import shutil
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     hg,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     bzr,
--- a/hgext/convert/cvs.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/cvs.py	Tue Jun 14 14:52:58 2016 -0500
@@ -11,12 +11,12 @@
 import re
 import socket
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     common,
--- a/hgext/convert/cvsps.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/cvsps.py	Tue Jun 14 14:52:58 2016 -0500
@@ -6,15 +6,16 @@
 # GNU General Public License version 2 or any later version.
 from __future__ import absolute_import
 
-import cPickle as pickle
 import os
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     hook,
     util,
 )
-from mercurial.i18n import _
+
+pickle = util.pickle
 
 class logentry(object):
     '''Class logentry has the following attributes:
--- a/hgext/convert/filemap.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/filemap.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,10 +7,11 @@
 
 import posixpath
 import shlex
+
+from mercurial.i18n import _
 from mercurial import (
     error,
 )
-from mercurial.i18n import _
 from . import common
 SKIPREV = common.SKIPREV
 
--- a/hgext/convert/git.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/git.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,12 +7,13 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     config,
     error,
     node as nodemod,
 )
-from mercurial.i18n import _
 
 from . import (
     common,
--- a/hgext/convert/gnuarch.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/gnuarch.py	Tue Jun 14 14:52:58 2016 -0500
@@ -12,12 +12,13 @@
 import shutil
 import stat
 import tempfile
+
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     util,
 )
-from mercurial.i18n import _
 from . import common
 
 class gnuarch_source(common.converter_source, common.commandline):
--- a/hgext/convert/hg.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/hg.py	Tue Jun 14 14:52:58 2016 -0500
@@ -22,6 +22,7 @@
 import re
 import time
 
+from mercurial.i18n import _
 from mercurial import (
     bookmarks,
     context,
@@ -37,7 +38,6 @@
 )
 stringio = util.stringio
 
-from mercurial.i18n import _
 from . import common
 mapfile = common.mapfile
 NoRepo = common.NoRepo
--- a/hgext/convert/monotone.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/monotone.py	Tue Jun 14 14:52:58 2016 -0500
@@ -10,11 +10,11 @@
 import os
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
--- a/hgext/convert/p4.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/p4.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,11 +9,11 @@
 import marshal
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
--- a/hgext/convert/subversion.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/convert/subversion.py	Tue Jun 14 14:52:58 2016 -0500
@@ -3,13 +3,13 @@
 # Copyright(C) 2007 Daniel Holth et al
 from __future__ import absolute_import
 
-import cPickle as pickle
 import os
 import re
 import sys
 import tempfile
 import xml.dom.minidom
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
@@ -17,10 +17,10 @@
     strutil,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
+pickle = util.pickle
 stringio = util.stringio
 propertycache = util.propertycache
 urlerr = util.urlerr
--- a/hgext/fetch.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/fetch.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,12 +7,23 @@
 
 '''pull, update and merge in one command (DEPRECATED)'''
 
+from __future__ import absolute_import
+
 from mercurial.i18n import _
-from mercurial.node import short
-from mercurial import commands, cmdutil, hg, util, error
-from mercurial.lock import release
-from mercurial import exchange
+from mercurial.node import (
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    error,
+    exchange,
+    hg,
+    lock,
+    util,
+)
 
+release = lock.release
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 # Note for extension authors: ONLY specify testedwith = 'internal' for
--- a/hgext/fsmonitor/__init__.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/fsmonitor/__init__.py	Tue Jun 14 14:52:58 2016 -0500
@@ -91,10 +91,12 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import os
 import stat
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     context,
     extensions,
@@ -105,7 +107,6 @@
     util,
 )
 from mercurial import match as matchmod
-from mercurial.i18n import _
 
 from . import (
     state,
@@ -141,7 +142,7 @@
     copy.
 
     """
-    sha1 = util.sha1()
+    sha1 = hashlib.sha1()
     if util.safehasattr(ignore, 'includepat'):
         sha1.update(ignore.includepat)
     sha1.update('\0\0')
--- a/hgext/fsmonitor/state.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/fsmonitor/state.py	Tue Jun 14 14:52:58 2016 -0500
@@ -12,8 +12,8 @@
 import socket
 import struct
 
+from mercurial.i18n import _
 from mercurial import pathutil
-from mercurial.i18n import _
 
 _version = 4
 _versionformat = ">I"
--- a/hgext/gpg.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/gpg.py	Tue Jun 14 14:52:58 2016 -0500
@@ -5,10 +5,21 @@
 
 '''commands to sign and verify changesets'''
 
-import os, tempfile, binascii
-from mercurial import util, commands, match, cmdutil, error
-from mercurial import node as hgnode
+from __future__ import absolute_import
+
+import binascii
+import os
+import tempfile
+
 from mercurial.i18n import _
+from mercurial import (
+    cmdutil,
+    commands,
+    error,
+    match,
+    node as hgnode,
+    util,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -187,7 +198,7 @@
         return
 
     # print summary
-    ui.write("%s is signed by:\n" % hgnode.short(rev))
+    ui.write(_("%s is signed by:\n") % hgnode.short(rev))
     for key in keys:
         ui.write(" %s\n" % keystr(ui, key))
 
--- a/hgext/graphlog.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/graphlog.py	Tue Jun 14 14:52:58 2016 -0500
@@ -15,8 +15,13 @@
 revision graph is also shown.
 '''
 
+from __future__ import absolute_import
+
 from mercurial.i18n import _
-from mercurial import cmdutil, commands
+from mercurial import (
+    cmdutil,
+    commands,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/hgcia.py	Tue Jun 07 08:32:33 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,285 +0,0 @@
-# Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-"""hooks for integrating with the CIA.vc notification service
-
-This is meant to be run as a changegroup or incoming hook. To
-configure it, set the following options in your hgrc::
-
-  [cia]
-  # your registered CIA user name
-  user = foo
-  # the name of the project in CIA
-  project = foo
-  # the module (subproject) (optional)
-  #module = foo
-  # Append a diffstat to the log message (optional)
-  #diffstat = False
-  # Template to use for log messages (optional)
-  #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
-  # Style to use (optional)
-  #style = foo
-  # The URL of the CIA notification service (optional)
-  # You can use mailto: URLs to send by email, e.g.
-  # mailto:cia@cia.vc
-  # Make sure to set email.from if you do this.
-  #url = http://cia.vc/
-  # print message instead of sending it (optional)
-  #test = False
-  # number of slashes to strip for url paths
-  #strip = 0
-
-  [hooks]
-  # one of these:
-  changegroup.cia = python:hgcia.hook
-  #incoming.cia = python:hgcia.hook
-
-  [web]
-  # If you want hyperlinks (optional)
-  baseurl = http://server/path/to/repo
-"""
-
-from mercurial.i18n import _
-from mercurial.node import bin, short
-from mercurial import cmdutil, patch, util, mail, error
-import email.Parser
-
-import socket, xmlrpclib
-from xml.sax import saxutils
-# Note for extension authors: ONLY specify testedwith = 'internal' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = 'internal'
-
-socket_timeout = 30 # seconds
-if util.safehasattr(socket, 'setdefaulttimeout'):
-    # set a timeout for the socket so you don't have to wait so looooong
-    # when cia.vc is having problems. requires python >= 2.3:
-    socket.setdefaulttimeout(socket_timeout)
-
-HGCIA_VERSION = '0.1'
-HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
-
-
-class ciamsg(object):
-    """ A CIA message """
-    def __init__(self, cia, ctx):
-        self.cia = cia
-        self.ctx = ctx
-        self.url = self.cia.url
-        if self.url:
-            self.url += self.cia.root
-
-    def fileelem(self, path, uri, action):
-        if uri:
-            uri = ' uri=%s' % saxutils.quoteattr(uri)
-        return '<file%s action=%s>%s</file>' % (
-            uri, saxutils.quoteattr(action), saxutils.escape(path))
-
-    def fileelems(self):
-        n = self.ctx.node()
-        f = self.cia.repo.status(self.ctx.p1().node(), n)
-        url = self.url or ''
-        if url and url[-1] == '/':
-            url = url[:-1]
-        elems = []
-        for path in f.modified:
-            uri = '%s/diff/%s/%s' % (url, short(n), path)
-            elems.append(self.fileelem(path, url and uri, 'modify'))
-        for path in f.added:
-            # TODO: copy/rename ?
-            uri = '%s/file/%s/%s' % (url, short(n), path)
-            elems.append(self.fileelem(path, url and uri, 'add'))
-        for path in f.removed:
-            elems.append(self.fileelem(path, '', 'remove'))
-
-        return '\n'.join(elems)
-
-    def sourceelem(self, project, module=None, branch=None):
-        msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
-        if module:
-            msg.append('<module>%s</module>' % saxutils.escape(module))
-        if branch:
-            msg.append('<branch>%s</branch>' % saxutils.escape(branch))
-        msg.append('</source>')
-
-        return '\n'.join(msg)
-
-    def diffstat(self):
-        class patchbuf(object):
-            def __init__(self):
-                self.lines = []
-                # diffstat is stupid
-                self.name = 'cia'
-            def write(self, data):
-                self.lines += data.splitlines(True)
-            def close(self):
-                pass
-
-        n = self.ctx.node()
-        pbuf = patchbuf()
-        cmdutil.export(self.cia.repo, [n], fp=pbuf)
-        return patch.diffstat(pbuf.lines) or ''
-
-    def logmsg(self):
-        if self.cia.diffstat:
-            diffstat = self.diffstat()
-        else:
-            diffstat = ''
-        self.cia.ui.pushbuffer()
-        self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
-                                baseurl=self.cia.ui.config('web', 'baseurl'),
-                                url=self.url, diffstat=diffstat,
-                                webroot=self.cia.root)
-        return self.cia.ui.popbuffer()
-
-    def xml(self):
-        n = short(self.ctx.node())
-        src = self.sourceelem(self.cia.project, module=self.cia.module,
-                              branch=self.ctx.branch())
-        # unix timestamp
-        dt = self.ctx.date()
-        timestamp = dt[0]
-
-        author = saxutils.escape(self.ctx.user())
-        rev = '%d:%s' % (self.ctx.rev(), n)
-        log = saxutils.escape(self.logmsg())
-
-        url = self.url
-        if url and url[-1] == '/':
-            url = url[:-1]
-        url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
-
-        msg = """
-<message>
-  <generator>
-    <name>Mercurial (hgcia)</name>
-    <version>%s</version>
-    <url>%s</url>
-    <user>%s</user>
-  </generator>
-  %s
-  <body>
-    <commit>
-      <author>%s</author>
-      <version>%s</version>
-      <log>%s</log>
-      %s
-      <files>%s</files>
-    </commit>
-  </body>
-  <timestamp>%d</timestamp>
-</message>
-""" % \
-            (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
-            saxutils.escape(self.cia.user), src, author, rev, log, url,
-            self.fileelems(), timestamp)
-
-        return msg
-
-
-class hgcia(object):
-    """ CIA notification class """
-
-    deftemplate = '{desc}'
-    dstemplate = '{desc}\n-- \n{diffstat}'
-
-    def __init__(self, ui, repo):
-        self.ui = ui
-        self.repo = repo
-
-        self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
-        self.user = self.ui.config('cia', 'user')
-        self.project = self.ui.config('cia', 'project')
-        self.module = self.ui.config('cia', 'module')
-        self.diffstat = self.ui.configbool('cia', 'diffstat')
-        self.emailfrom = self.ui.config('email', 'from')
-        self.dryrun = self.ui.configbool('cia', 'test')
-        self.url = self.ui.config('web', 'baseurl')
-        # Default to -1 for backward compatibility
-        self.stripcount = int(self.ui.config('cia', 'strip', -1))
-        self.root = self.strip(self.repo.root)
-
-        style = self.ui.config('cia', 'style')
-        template = self.ui.config('cia', 'template')
-        if not template:
-            if self.diffstat:
-                template = self.dstemplate
-            else:
-                template = self.deftemplate
-        t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
-                                        template, style, False)
-        self.templater = t
-
-    def strip(self, path):
-        '''strip leading slashes from local path, turn into web-safe path.'''
-
-        path = util.pconvert(path)
-        count = self.stripcount
-        if count < 0:
-            return ''
-        while count > 0:
-            c = path.find('/')
-            if c == -1:
-                break
-            path = path[c + 1:]
-            count -= 1
-        return path
-
-    def sendrpc(self, msg):
-        srv = xmlrpclib.Server(self.ciaurl)
-        res = srv.hub.deliver(msg)
-        if res is not True and res != 'queued.':
-            raise error.Abort(_('%s returned an error: %s') %
-                             (self.ciaurl, res))
-
-    def sendemail(self, address, data):
-        p = email.Parser.Parser()
-        msg = p.parsestr(data)
-        msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
-        msg['To'] = address
-        msg['From'] = self.emailfrom
-        msg['Subject'] = 'DeliverXML'
-        msg['Content-type'] = 'text/xml'
-        msgtext = msg.as_string()
-
-        self.ui.status(_('hgcia: sending update to %s\n') % address)
-        mail.sendmail(self.ui, util.email(self.emailfrom),
-                      [address], msgtext)
-
-
-def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
-    """ send CIA notification """
-    def sendmsg(cia, ctx):
-        msg = ciamsg(cia, ctx).xml()
-        if cia.dryrun:
-            ui.write(msg)
-        elif cia.ciaurl.startswith('mailto:'):
-            if not cia.emailfrom:
-                raise error.Abort(_('email.from must be defined when '
-                                   'sending by email'))
-            cia.sendemail(cia.ciaurl[7:], msg)
-        else:
-            cia.sendrpc(msg)
-
-    n = bin(node)
-    cia = hgcia(ui, repo)
-    if not cia.user:
-        ui.debug('cia: no user specified')
-        return
-    if not cia.project:
-        ui.debug('cia: no project specified')
-        return
-    if hooktype == 'changegroup':
-        start = repo.changelog.rev(n)
-        end = len(repo.changelog)
-        for rev in xrange(start, end):
-            n = repo.changelog.node(rev)
-            ctx = repo.changectx(n)
-            sendmsg(cia, ctx)
-    else:
-        ctx = repo.changectx(n)
-        sendmsg(cia, ctx)
--- a/hgext/hgk.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/hgk.py	Tue Jun 14 14:52:58 2016 -0500
@@ -34,10 +34,23 @@
 vdiff on hovered and selected revisions.
 '''
 
+from __future__ import absolute_import
+
 import os
-from mercurial import cmdutil, commands, patch, scmutil, obsolete
-from mercurial.node import nullid, nullrev, short
+
 from mercurial.i18n import _
+from mercurial.node import (
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    obsolete,
+    patch,
+    scmutil,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/highlight/highlight.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/highlight/highlight.py	Tue Jun 14 14:52:58 2016 -0500
@@ -68,7 +68,7 @@
     coloriter = (s.encode(encoding.encoding, 'replace')
                  for s in colorized.splitlines())
 
-    tmpl.filters['colorize'] = lambda x: coloriter.next()
+    tmpl.filters['colorize'] = lambda x: next(coloriter)
 
     oldl = tmpl.cache[field]
     newl = oldl.replace('line|escape', 'line|colorize')
--- a/hgext/histedit.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/histedit.py	Tue Jun 14 14:52:58 2016 -0500
@@ -169,30 +169,35 @@
 
 """
 
-import pickle
+from __future__ import absolute_import
+
 import errno
 import os
 import sys
 
-from mercurial import bundle2
-from mercurial import cmdutil
-from mercurial import discovery
-from mercurial import error
-from mercurial import copies
-from mercurial import context
-from mercurial import destutil
-from mercurial import exchange
-from mercurial import extensions
-from mercurial import hg
-from mercurial import node
-from mercurial import repair
-from mercurial import scmutil
-from mercurial import util
-from mercurial import obsolete
-from mercurial import merge as mergemod
-from mercurial.lock import release
 from mercurial.i18n import _
+from mercurial import (
+    bundle2,
+    cmdutil,
+    context,
+    copies,
+    destutil,
+    discovery,
+    error,
+    exchange,
+    extensions,
+    hg,
+    lock,
+    merge as mergemod,
+    node,
+    obsolete,
+    repair,
+    scmutil,
+    util,
+)
 
+pickle = util.pickle
+release = lock.release
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 
@@ -408,7 +413,7 @@
             raise error.ParseError(_('unknown changeset %s listed')
                               % ha[:12])
 
-    def torule(self):
+    def torule(self, initial=False):
         """build a histedit rule line for an action
 
         by default lines are in the form:
@@ -418,6 +423,14 @@
         summary = ''
         if ctx.description():
             summary = ctx.description().splitlines()[0]
+
+        fword = summary.split(' ', 1)[0].lower()
+        # if it doesn't end with the special character '!' just skip this
+        if (self.repo.ui.configbool("experimental", "histedit.autoverb") and
+            initial and fword.endswith('!')):
+            fword = fword[:-1]
+            if fword in primaryactions | secondaryactions | tertiaryactions:
+                self.verb = fword
         line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
@@ -1304,7 +1317,7 @@
 
     rules are in the format [ [act, ctx], ...] like in state.rules
     """
-    rules = '\n'.join([act.torule() for act in actions])
+    rules = '\n'.join([act.torule(initial=True) for act in actions])
     rules += '\n\n'
     rules += editcomment
     rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'})
--- a/hgext/keyword.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/keyword.py	Tue Jun 14 14:52:58 2016 -0500
@@ -89,8 +89,8 @@
 import re
 import tempfile
 
+from mercurial.i18n import _
 from mercurial.hgweb import webcommands
-from mercurial.i18n import _
 
 from mercurial import (
     cmdutil,
@@ -735,7 +735,7 @@
     def kwfilectx_cmp(orig, self, fctx):
         # keyword affects data size, comparing wdir and filelog size does
         # not make sense
-        if (fctx._filerev is None and
+        if (fctx._filenode is None and
             (self._repo._encodefilterpats or
              kwt.match(fctx.path()) and 'l' not in fctx.flags() or
              self.size() - 4 == fctx.size()) or
--- a/hgext/largefiles/__init__.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/__init__.py	Tue Jun 14 14:52:58 2016 -0500
@@ -104,14 +104,20 @@
 explicitly do so with the --large flag passed to the :hg:`add`
 command.
 '''
+from __future__ import absolute_import
 
-from mercurial import hg, localrepo
+from mercurial import (
+    hg,
+    localrepo,
+)
 
-import lfcommands
-import proto
-import reposetup
-import uisetup as uisetupmod
-import overrides
+from . import (
+    lfcommands,
+    overrides,
+    proto,
+    reposetup,
+    uisetup as uisetupmod,
+)
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/hgext/largefiles/basestore.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/basestore.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,13 +7,13 @@
 # GNU General Public License version 2 or any later version.
 
 '''base class for store implementations and store-related utility code'''
+from __future__ import absolute_import
 
-import re
-
-from mercurial import util, node, hg, error
 from mercurial.i18n import _
 
-import lfutil
+from mercurial import node, util
+
+from . import lfutil
 
 class StoreError(Exception):
     '''Raised when there is a problem getting files from or putting
@@ -116,19 +116,26 @@
         '''Verify the existence (and, optionally, contents) of every big
         file revision referenced by every changeset in revs.
         Return 0 if all is well, non-zero on any errors.'''
-        failed = False
 
         self.ui.status(_('searching %d changesets for largefiles\n') %
                        len(revs))
         verified = set()                # set of (filename, filenode) tuples
-
+        filestocheck = []               # list of (cset, filename, expectedhash)
         for rev in revs:
             cctx = self.repo[rev]
             cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
 
             for standin in cctx:
-                if self._verifyfile(cctx, cset, contents, standin, verified):
-                    failed = True
+                filename = lfutil.splitstandin(standin)
+                if filename:
+                    fctx = cctx[standin]
+                    key = (filename, fctx.filenode())
+                    if key not in verified:
+                        verified.add(key)
+                        expectedhash = fctx.data()[0:40]
+                        filestocheck.append((cset, filename, expectedhash))
+
+        failed = self._verifyfiles(contents, filestocheck)
 
         numrevs = len(verified)
         numlfiles = len(set([fname for (fname, fnode) in verified]))
@@ -150,72 +157,10 @@
         exist in the store).'''
         raise NotImplementedError('abstract method')
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        '''Perform the actual verification of a file in the store.
-        'cset' is only used in warnings.
+    def _verifyfiles(self, contents, filestocheck):
+        '''Perform the actual verification of files in the store.
         'contents' controls verification of content hash.
-        'standin' is the standin path of the largefile to verify.
-        'verified' is maintained as a set of already verified files.
-        Returns _true_ if it is a standin and any problems are found!
+        'filestocheck' is list of files to check.
+        Returns _true_ if any problems are found!
         '''
         raise NotImplementedError('abstract method')
-
-import localstore, wirestore
-
-_storeprovider = {
-    'file':  [localstore.localstore],
-    'http':  [wirestore.wirestore],
-    'https': [wirestore.wirestore],
-    'ssh': [wirestore.wirestore],
-    }
-
-_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
-
-# During clone this function is passed the src's ui object
-# but it needs the dest's ui object so it can read out of
-# the config file. Use repo.ui instead.
-def _openstore(repo, remote=None, put=False):
-    ui = repo.ui
-
-    if not remote:
-        lfpullsource = getattr(repo, 'lfpullsource', None)
-        if lfpullsource:
-            path = ui.expandpath(lfpullsource)
-        elif put:
-            path = ui.expandpath('default-push', 'default')
-        else:
-            path = ui.expandpath('default')
-
-        # ui.expandpath() leaves 'default-push' and 'default' alone if
-        # they cannot be expanded: fallback to the empty string,
-        # meaning the current directory.
-        if path == 'default-push' or path == 'default':
-            path = ''
-            remote = repo
-        else:
-            path, _branches = hg.parseurl(path)
-            remote = hg.peer(repo, {}, path)
-
-    # The path could be a scheme so use Mercurial's normal functionality
-    # to resolve the scheme to a repository and use its path
-    path = util.safehasattr(remote, 'url') and remote.url() or remote.path
-
-    match = _scheme_re.match(path)
-    if not match:                       # regular filesystem path
-        scheme = 'file'
-    else:
-        scheme = match.group(1)
-
-    try:
-        storeproviders = _storeprovider[scheme]
-    except KeyError:
-        raise error.Abort(_('unsupported URL scheme %r') % scheme)
-
-    for classobj in storeproviders:
-        try:
-            return classobj(ui, repo, remote)
-        except lfutil.storeprotonotcapable:
-            pass
-
-    raise error.Abort(_('%s does not appear to be a largefile store') %
-                     util.hidepassword(path))
--- a/hgext/largefiles/lfcommands.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/lfcommands.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,20 +7,39 @@
 # GNU General Public License version 2 or any later version.
 
 '''High-level command function for lfconvert, plus the cmdtable.'''
+from __future__ import absolute_import
 
-import os, errno
+import errno
+import hashlib
+import os
 import shutil
 
-from mercurial import util, match as match_, hg, node, context, error, \
-    cmdutil, scmutil, commands
 from mercurial.i18n import _
-from mercurial.lock import release
 
-from hgext.convert import convcmd
-from hgext.convert import filemap
+from mercurial import (
+    cmdutil,
+    commands,
+    context,
+    error,
+    hg,
+    lock,
+    match as matchmod,
+    node,
+    scmutil,
+    util,
+)
 
-import lfutil
-import basestore
+from ..convert import (
+    convcmd,
+    filemap,
+)
+
+from . import (
+    lfutil,
+    storefactory
+)
+
+release = lock.release
 
 # -- Commands ----------------------------------------------------------
 
@@ -92,7 +111,7 @@
             if not pats:
                 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
             if pats:
-                matcher = match_.match(rsrc.root, '', list(pats))
+                matcher = matchmod.match(rsrc.root, '', list(pats))
             else:
                 matcher = None
 
@@ -211,7 +230,7 @@
                         raise error.Abort(_('largefile %s becomes symlink') % f)
 
                 # largefile was modified, update standins
-                m = util.sha1('')
+                m = hashlib.sha1('')
                 m.update(ctx[f].data())
                 hash = m.hexdigest()
                 if f not in lfiletohash or lfiletohash[f] != hash:
@@ -337,7 +356,7 @@
     if not files:
         return
 
-    store = basestore._openstore(rsrc, rdst, put=True)
+    store = storefactory._openstore(rsrc, rdst, put=True)
 
     at = 0
     ui.debug("sending statlfile command for %d largefiles\n" % len(files))
@@ -368,7 +387,7 @@
     else:
         revs = ['.']
 
-    store = basestore._openstore(repo)
+    store = storefactory._openstore(repo)
     return store.verify(revs, contents=contents)
 
 def cachelfiles(ui, repo, node, filelist=None):
@@ -394,7 +413,7 @@
             toget.append((lfile, expectedhash))
 
     if toget:
-        store = basestore._openstore(repo)
+        store = storefactory._openstore(repo)
         ret = store.get(toget)
         return ret
 
--- a/hgext/largefiles/lfutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/lfutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,15 +7,25 @@
 # GNU General Public License version 2 or any later version.
 
 '''largefiles utility code: must not import other modules in this package.'''
+from __future__ import absolute_import
 
+import copy
+import hashlib
 import os
 import platform
 import stat
-import copy
+
+from mercurial.i18n import _
 
-from mercurial import dirstate, httpconnection, match as match_, util, scmutil
-from mercurial.i18n import _
-from mercurial import node, error
+from mercurial import (
+    dirstate,
+    error,
+    httpconnection,
+    match as matchmod,
+    node,
+    scmutil,
+    util,
+)
 
 shortname = '.hglf'
 shortnameslash = shortname + '/'
@@ -152,7 +162,7 @@
 
 def lfdirstatestatus(lfdirstate, repo):
     wctx = repo['.']
-    match = match_.always(repo.root, repo.getcwd())
+    match = matchmod.always(repo.root, repo.getcwd())
     unsure, s = lfdirstate.status(match, [], False, False, False)
     modified, clean = s.modified, s.clean
     for lfile in unsure:
@@ -350,7 +360,7 @@
 def copyandhash(instream, outfile):
     '''Read bytes from instream (iterable) and write them to outfile,
     computing the SHA-1 hash of the data along the way. Return the hash.'''
-    hasher = util.sha1('')
+    hasher = hashlib.sha1('')
     for data in instream:
         hasher.update(data)
         outfile.write(data)
@@ -362,7 +372,7 @@
 def hashfile(file):
     if not os.path.exists(file):
         return ''
-    hasher = util.sha1('')
+    hasher = hashlib.sha1('')
     fd = open(file, 'rb')
     for data in util.filechunkiter(fd, 128 * 1024):
         hasher.update(data)
@@ -391,7 +401,7 @@
 def hexsha1(data):
     """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
     object data"""
-    h = util.sha1()
+    h = hashlib.sha1()
     for chunk in util.filechunkiter(data):
         h.update(chunk)
     return h.hexdigest()
@@ -533,7 +543,7 @@
         # otherwise to update all standins if the largefiles are
         # large.
         lfdirstate = openlfdirstate(ui, repo)
-        dirtymatch = match_.always(repo.root, repo.getcwd())
+        dirtymatch = matchmod.always(repo.root, repo.getcwd())
         unsure, s = lfdirstate.status(dirtymatch, [], False, False,
                                       False)
         modifiedfiles = unsure + s.modified + s.added + s.removed
--- a/hgext/largefiles/localstore.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/localstore.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,11 +7,14 @@
 # GNU General Public License version 2 or any later version.
 
 '''store class for local filesystem'''
+from __future__ import absolute_import
 
 from mercurial.i18n import _
 
-import lfutil
-import basestore
+from . import (
+    basestore,
+    lfutil,
+)
 
 class localstore(basestore.basestore):
     '''localstore first attempts to grab files out of the store in the remote
@@ -42,29 +45,20 @@
         with open(path, 'rb') as fd:
             return lfutil.copyandhash(fd, tmpfile)
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        filename = lfutil.splitstandin(standin)
-        if not filename:
-            return False
-        fctx = cctx[standin]
-        key = (filename, fctx.filenode())
-        if key in verified:
-            return False
-
-        expecthash = fctx.data()[0:40]
-        storepath, exists = lfutil.findstorepath(self.remote, expecthash)
-        verified.add(key)
-        if not exists:
-            self.ui.warn(
-                _('changeset %s: %s references missing %s\n')
-                % (cset, filename, storepath))
-            return True                 # failed
-
-        if contents:
-            actualhash = lfutil.hashfile(storepath)
-            if actualhash != expecthash:
+    def _verifyfiles(self, contents, filestocheck):
+        failed = False
+        for cset, filename, expectedhash in filestocheck:
+            storepath, exists = lfutil.findstorepath(self.remote, expectedhash)
+            if not exists:
                 self.ui.warn(
-                    _('changeset %s: %s references corrupted %s\n')
+                    _('changeset %s: %s references missing %s\n')
                     % (cset, filename, storepath))
-                return True             # failed
-        return False
+                failed = True
+            elif contents:
+                actualhash = lfutil.hashfile(storepath)
+                if actualhash != expectedhash:
+                    self.ui.warn(
+                        _('changeset %s: %s references corrupted %s\n')
+                        % (cset, filename, storepath))
+                    failed = True
+        return failed
--- a/hgext/largefiles/overrides.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/overrides.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,17 +7,31 @@
 # GNU General Public License version 2 or any later version.
 
 '''Overridden Mercurial commands and functions for the largefiles extension'''
+from __future__ import absolute_import
 
-import os
 import copy
+import os
 
-from mercurial import hg, util, cmdutil, scmutil, match as match_, \
-        archival, pathutil, registrar, revset, error
 from mercurial.i18n import _
 
-import lfutil
-import lfcommands
-import basestore
+from mercurial import (
+    archival,
+    cmdutil,
+    error,
+    hg,
+    match as matchmod,
+    pathutil,
+    registrar,
+    revset,
+    scmutil,
+    util,
+)
+
+from . import (
+    lfcommands,
+    lfutil,
+    storefactory,
+)
 
 # -- Utility functions: commonly/repeatedly needed functionality ---------------
 
@@ -99,13 +113,13 @@
     if lfutil.islfilesrepo(repo):
         lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
         if lfpats:
-            lfmatcher = match_.match(repo.root, '', list(lfpats))
+            lfmatcher = matchmod.match(repo.root, '', list(lfpats))
 
     lfnames = []
     m = matcher
 
     wctx = repo[None]
-    for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
+    for f in repo.walk(matchmod.badmatch(m, lambda x, y: None)):
         exact = m.exact(f)
         lfile = lfutil.standin(f) in wctx
         nfile = f in wctx
@@ -307,7 +321,7 @@
             if pat.startswith('set:'):
                 return pat
 
-            kindpat = match_._patsplit(pat, None)
+            kindpat = matchmod._patsplit(pat, None)
 
             if kindpat[0] is not None:
                 return kindpat[0] + ':' + tostandin(kindpat[1])
@@ -626,7 +640,7 @@
             # The patterns were previously mangled to add the standin
             # directory; we need to remove that now
             for pat in pats:
-                if match_.patkind(pat) is None and lfutil.shortname in pat:
+                if matchmod.patkind(pat) is None and lfutil.shortname in pat:
                     newpats.append(pat.replace(lfutil.shortname, ''))
                 else:
                     newpats.append(pat)
@@ -644,7 +658,7 @@
         oldmatch = installmatchfn(overridematch)
         listpats = []
         for pat in pats:
-            if match_.patkind(pat) is not None:
+            if matchmod.patkind(pat) is not None:
                 listpats.append(pat)
             else:
                 listpats.append(makestandin(pat))
@@ -977,7 +991,7 @@
     if subrepos:
         for subpath in sorted(ctx.substate):
             sub = ctx.workingsub(subpath)
-            submatch = match_.subdirmatcher(subpath, matchfn)
+            submatch = matchmod.subdirmatcher(subpath, matchfn)
             sub._repo.lfstatus = True
             sub.archive(archiver, prefix, submatch)
 
@@ -1025,7 +1039,7 @@
 
     for subpath in sorted(ctx.substate):
         sub = ctx.workingsub(subpath)
-        submatch = match_.subdirmatcher(subpath, match)
+        submatch = matchmod.subdirmatcher(subpath, match)
         sub._repo.lfstatus = True
         sub.archive(archiver, prefix + repo._path + '/', submatch)
 
@@ -1109,7 +1123,7 @@
             lfhashes.add(lfhash)
     lfutil.getlfilestoupload(repo, missing, dedup)
     if lfhashes:
-        lfexists = basestore._openstore(repo, other).exists(lfhashes)
+        lfexists = storefactory._openstore(repo, other).exists(lfhashes)
         for fn, lfhash in knowns:
             if not lfexists[lfhash]: # lfhash doesn't exist on "other"
                 addfunc(fn, lfhash)
@@ -1190,7 +1204,7 @@
         return orig(repo, matcher, prefix, opts, dry_run, similarity)
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-    unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
+    unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), [],
                                   False, False, False)
 
     # Call into the normal remove code, but the removing of the standin, we want
@@ -1338,7 +1352,7 @@
         else:
             hash = lfutil.readstandin(repo, lf, ctx.rev())
             if not lfutil.inusercache(repo.ui, hash):
-                store = basestore._openstore(repo)
+                store = storefactory._openstore(repo)
                 success, missing = store.get([(lf, hash)])
                 if len(success) != 1:
                     raise error.Abort(
@@ -1375,7 +1389,7 @@
         # (*1) deprecated, but used internally (e.g: "rebase --collapse")
 
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-        unsure, s = lfdirstate.status(match_.always(repo.root,
+        unsure, s = lfdirstate.status(matchmod.always(repo.root,
                                                     repo.getcwd()),
                                       [], False, False, False)
         pctx = repo['.']
--- a/hgext/largefiles/proto.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/proto.py	Tue Jun 14 14:52:58 2016 -0500
@@ -2,18 +2,27 @@
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
+from __future__ import absolute_import
 
 import os
 import re
 
-from mercurial import error, httppeer, util, wireproto
 from mercurial.i18n import _
 
+from mercurial import (
+    error,
+    httppeer,
+    util,
+    wireproto,
+)
+
+from . import (
+    lfutil,
+)
+
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-import lfutil
-
 LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
                            '\n\nPlease enable it in your Mercurial config '
                            'file.\n')
--- a/hgext/largefiles/remotestore.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/remotestore.py	Tue Jun 14 14:52:58 2016 -0500
@@ -5,20 +5,30 @@
 # GNU General Public License version 2 or any later version.
 
 '''remote largefile store; the base class for wirestore'''
+from __future__ import absolute_import
 
-from mercurial import util, wireproto, error
 from mercurial.i18n import _
 
+from mercurial import (
+    error,
+    util,
+    wireproto,
+)
+
+from . import (
+    basestore,
+    lfutil,
+    localstore,
+)
+
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-import lfutil
-import basestore
-
 class remotestore(basestore.basestore):
     '''a largefile store accessed over a network'''
     def __init__(self, ui, repo, url):
         super(remotestore, self).__init__(ui, repo, url)
+        self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
 
     def put(self, source, hash):
         if self.sendfile(source, hash):
@@ -65,34 +75,43 @@
 
         return lfutil.copyandhash(chunks, tmpfile)
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        filename = lfutil.splitstandin(standin)
-        if not filename:
-            return False
-        fctx = cctx[standin]
-        key = (filename, fctx.filenode())
-        if key in verified:
-            return False
+    def _hashesavailablelocally(self, hashes):
+        existslocallymap = self._lstore.exists(hashes)
+        localhashes = [hash for hash in hashes if existslocallymap[hash]]
+        return localhashes
 
-        verified.add(key)
+    def _verifyfiles(self, contents, filestocheck):
+        failed = False
+        expectedhashes = [expectedhash
+                          for cset, filename, expectedhash in filestocheck]
+        localhashes = self._hashesavailablelocally(expectedhashes)
+        stats = self._stat([expectedhash for expectedhash in expectedhashes
+                            if expectedhash not in localhashes])
 
-        expecthash = fctx.data()[0:40]
-        stat = self._stat([expecthash])[expecthash]
-        if not stat:
-            return False
-        elif stat == 1:
-            self.ui.warn(
-                _('changeset %s: %s: contents differ\n')
-                % (cset, filename))
-            return True # failed
-        elif stat == 2:
-            self.ui.warn(
-                _('changeset %s: %s missing\n')
-                % (cset, filename))
-            return True # failed
-        else:
-            raise RuntimeError('verify failed: unexpected response from '
-                               'statlfile (%r)' % stat)
+        for cset, filename, expectedhash in filestocheck:
+            if expectedhash in localhashes:
+                filetocheck = (cset, filename, expectedhash)
+                verifyresult = self._lstore._verifyfiles(contents,
+                                                         [filetocheck])
+                if verifyresult:
+                    failed = True
+            else:
+                stat = stats[expectedhash]
+                if stat:
+                    if stat == 1:
+                        self.ui.warn(
+                            _('changeset %s: %s: contents differ\n')
+                            % (cset, filename))
+                        failed = True
+                    elif stat == 2:
+                        self.ui.warn(
+                            _('changeset %s: %s missing\n')
+                            % (cset, filename))
+                        failed = True
+                    else:
+                        raise RuntimeError('verify failed: unexpected response '
+                                           'from statlfile (%r)' % stat)
+        return failed
 
     def batch(self):
         '''Support for remote batching.'''
--- a/hgext/largefiles/reposetup.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/reposetup.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,14 +7,23 @@
 # GNU General Public License version 2 or any later version.
 
 '''setup for largefiles repositories: reposetup'''
+from __future__ import absolute_import
+
 import copy
 
-from mercurial import error, match as match_, error
 from mercurial.i18n import _
-from mercurial import scmutil, localrepo
 
-import lfcommands
-import lfutil
+from mercurial import (
+    error,
+    localrepo,
+    match as matchmod,
+    scmutil,
+)
+
+from . import (
+    lfcommands,
+    lfutil,
+)
 
 def reposetup(ui, repo):
     # wire repositories should be given new wireproto functions
@@ -94,7 +103,7 @@
             parentworking = working and ctx1 == self['.']
 
             if match is None:
-                match = match_.always(self.root, self.getcwd())
+                match = matchmod.always(self.root, self.getcwd())
 
             wlock = None
             try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/largefiles/storefactory.py	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,78 @@
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import re
+
+from mercurial.i18n import _
+
+from mercurial import (
+    error,
+    hg,
+    util,
+)
+
+from . import (
+    lfutil,
+    localstore,
+    wirestore,
+)
+
+# During clone this function is passed the src's ui object
+# but it needs the dest's ui object so it can read out of
+# the config file. Use repo.ui instead.
+def _openstore(repo, remote=None, put=False):
+    ui = repo.ui
+
+    if not remote:
+        lfpullsource = getattr(repo, 'lfpullsource', None)
+        if lfpullsource:
+            path = ui.expandpath(lfpullsource)
+        elif put:
+            path = ui.expandpath('default-push', 'default')
+        else:
+            path = ui.expandpath('default')
+
+        # ui.expandpath() leaves 'default-push' and 'default' alone if
+        # they cannot be expanded: fallback to the empty string,
+        # meaning the current directory.
+        if path == 'default-push' or path == 'default':
+            path = ''
+            remote = repo
+        else:
+            path, _branches = hg.parseurl(path)
+            remote = hg.peer(repo, {}, path)
+
+    # The path could be a scheme so use Mercurial's normal functionality
+    # to resolve the scheme to a repository and use its path
+    path = util.safehasattr(remote, 'url') and remote.url() or remote.path
+
+    match = _scheme_re.match(path)
+    if not match:                       # regular filesystem path
+        scheme = 'file'
+    else:
+        scheme = match.group(1)
+
+    try:
+        storeproviders = _storeprovider[scheme]
+    except KeyError:
+        raise error.Abort(_('unsupported URL scheme %r') % scheme)
+
+    for classobj in storeproviders:
+        try:
+            return classobj(ui, repo, remote)
+        except lfutil.storeprotonotcapable:
+            pass
+
+    raise error.Abort(_('%s does not appear to be a largefile store') %
+                     util.hidepassword(path))
+
+_storeprovider = {
+    'file':  [localstore.localstore],
+    'http':  [wirestore.wirestore],
+    'https': [wirestore.wirestore],
+    'ssh': [wirestore.wirestore],
+    }
+
+_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
--- a/hgext/largefiles/uisetup.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/uisetup.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,14 +7,36 @@
 # GNU General Public License version 2 or any later version.
 
 '''setup for largefiles extension: uisetup'''
+from __future__ import absolute_import
 
-from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
-    httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies, exchange
 from mercurial.i18n import _
-from mercurial.hgweb import hgweb_mod, webcommands
+
+from mercurial.hgweb import (
+    hgweb_mod,
+    webcommands,
+)
 
-import overrides
-import proto
+from mercurial import (
+    archival,
+    cmdutil,
+    commands,
+    copies,
+    exchange,
+    extensions,
+    filemerge,
+    hg,
+    httppeer,
+    merge,
+    scmutil,
+    sshpeer,
+    subrepo,
+    wireproto,
+)
+
+from . import (
+    overrides,
+    proto,
+)
 
 def uisetup(ui):
     # Disable auto-status for some commands which assume that all
--- a/hgext/largefiles/wirestore.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/largefiles/wirestore.py	Tue Jun 14 14:52:58 2016 -0500
@@ -4,9 +4,12 @@
 # GNU General Public License version 2 or any later version.
 
 '''largefile store working over Mercurial's wire protocol'''
+from __future__ import absolute_import
 
-import lfutil
-import remotestore
+from . import (
+    lfutil,
+    remotestore,
+)
 
 class wirestore(remotestore.remotestore):
     def __init__(self, ui, repo, remote):
--- a/hgext/mq.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/mq.py	Tue Jun 14 14:52:58 2016 -0500
@@ -62,19 +62,39 @@
 in the strip extension.
 '''
 
+from __future__ import absolute_import
+
+import errno
+import os
+import re
+import shutil
 from mercurial.i18n import _
-from mercurial.node import bin, hex, short, nullid, nullrev
-from mercurial.lock import release
-from mercurial import commands, cmdutil, hg, scmutil, util, revset
-from mercurial import dispatch
-from mercurial import extensions, error, phases
-from mercurial import patch as patchmod
-from mercurial import lock as lockmod
-from mercurial import localrepo
-from mercurial import registrar
-from mercurial import subrepo
-import os, re, errno, shutil
-
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    dispatch,
+    error,
+    extensions,
+    hg,
+    localrepo,
+    lock as lockmod,
+    patch as patchmod,
+    phases,
+    registrar,
+    revset,
+    scmutil,
+    subrepo,
+    util,
+)
+
+release = lockmod.release
 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
 
 cmdtable = {}
--- a/hgext/notify.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/notify.py	Tue Jun 14 14:52:58 2016 -0500
@@ -139,6 +139,7 @@
 import socket
 import time
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
@@ -146,7 +147,6 @@
     patch,
     util,
 )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -363,7 +363,7 @@
             s = patch.diffstat(difflines)
             # s may be nil, don't include the header if it is
             if s:
-                self.ui.write('\ndiffstat:\n\n%s' % s)
+                self.ui.write(_('\ndiffstat:\n\n%s') % s)
 
         if maxdiff == 0:
             return
--- a/hgext/pager.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/pager.py	Tue Jun 14 14:52:58 2016 -0500
@@ -66,6 +66,7 @@
 import subprocess
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -73,7 +74,6 @@
     extensions,
     util,
     )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/hgext/patchbomb.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/patchbomb.py	Tue Jun 14 14:52:58 2016 -0500
@@ -71,6 +71,7 @@
 import socket
 import tempfile
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -83,7 +84,6 @@
     util,
 )
 stringio = util.stringio
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -708,13 +708,7 @@
                 fp.close()
         else:
             if not sendmail:
-                verifycert = ui.config('smtp', 'verifycert', 'strict')
-                if opts.get('insecure'):
-                    ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
-                try:
-                    sendmail = mail.connect(ui, mbox=mbox)
-                finally:
-                    ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
+                sendmail = mail.connect(ui, mbox=mbox)
             ui.status(_('sending '), subj, ' ...\n')
             ui.progress(_('sending'), i, item=subj, total=len(msgs),
                         unit=_('emails'))
--- a/hgext/purge.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/purge.py	Tue Jun 14 14:52:58 2016 -0500
@@ -27,6 +27,7 @@
 
 import os
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -34,7 +35,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -84,13 +84,13 @@
     list of files that this program would delete, use the --print
     option.
     '''
-    act = not opts['print']
+    act = not opts.get('print')
     eol = '\n'
-    if opts['print0']:
+    if opts.get('print0'):
         eol = '\0'
         act = False # --print0 implies --print
-    removefiles = opts['files']
-    removedirs = opts['dirs']
+    removefiles = opts.get('files')
+    removedirs = opts.get('dirs')
     if not removefiles and not removedirs:
         removefiles = True
         removedirs = True
@@ -101,7 +101,7 @@
                 remove_func(repo.wjoin(name))
             except OSError:
                 m = _('%s cannot be removed') % name
-                if opts['abort_on_err']:
+                if opts.get('abort_on_err'):
                     raise error.Abort(m)
                 ui.warn(_('warning: %s\n') % m)
         else:
@@ -111,7 +111,7 @@
     if removedirs:
         directories = []
         match.explicitdir = match.traversedir = directories.append
-    status = repo.status(match=match, ignored=opts['all'], unknown=True)
+    status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
 
     if removefiles:
         for f in sorted(status.unknown + status.ignored):
--- a/hgext/rebase.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/rebase.py	Tue Jun 14 14:52:58 2016 -0500
@@ -14,14 +14,42 @@
 https://mercurial-scm.org/wiki/RebaseExtension
 '''
 
-from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
-from mercurial import extensions, patch, scmutil, phases, obsolete, error
-from mercurial import copies, destutil, repoview, registrar, revset
-from mercurial.commands import templateopts
-from mercurial.node import nullrev, nullid, hex, short
-from mercurial.lock import release
+from __future__ import absolute_import
+
+import errno
+import os
+
 from mercurial.i18n import _
-import os, errno
+from mercurial.node import (
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    bookmarks,
+    cmdutil,
+    commands,
+    copies,
+    destutil,
+    error,
+    extensions,
+    hg,
+    lock,
+    merge,
+    obsolete,
+    patch,
+    phases,
+    registrar,
+    repair,
+    repoview,
+    revset,
+    scmutil,
+    util,
+)
+
+release = lock.release
+templateopts = commands.templateopts
 
 # The following constants are used throughout the rebase module. The ordering of
 # their values must be maintained.
@@ -390,7 +418,8 @@
                 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
                             _('changesets'), total)
                 p1, p2, base = defineparents(repo, rev, target, state,
-                                             targetancestors)
+                                             targetancestors,
+                                             obsoletenotrebased)
                 storestatus(repo, originalwd, target, state, collapsef, keepf,
                             keepbranchesf, external, activebookmark)
                 storecollapsemsg(repo, collapsemsg)
@@ -455,7 +484,8 @@
 
         if collapsef and not keepopen:
             p1, p2, _base = defineparents(repo, min(state), target,
-                                          state, targetancestors)
+                                          state, targetancestors,
+                                          obsoletenotrebased)
             editopt = opts.get('edit')
             editform = 'rebase.collapse'
             if collapsemsg:
@@ -744,10 +774,12 @@
                  'experimental.rebaseskipobsolete to False')
         raise error.Abort(msg, hint=hint)
 
-def defineparents(repo, rev, target, state, targetancestors):
+def defineparents(repo, rev, target, state, targetancestors,
+                  obsoletenotrebased):
     'Return the new parent relationship of the revision that will be rebased'
     parents = repo[rev].parents()
     p1 = p2 = nullrev
+    rp1 = None
 
     p1n = parents[0].rev()
     if p1n in targetancestors:
@@ -771,6 +803,8 @@
         if p2n in state:
             if p1 == target: # p1n in targetancestors or external
                 p1 = state[p2n]
+                if p1 == revprecursor:
+                    rp1 = obsoletenotrebased[p2n]
             elif state[p2n] in revskipped:
                 p2 = nearestrebased(repo, p2n, state)
                 if p2 is None:
@@ -784,7 +818,7 @@
                         'would have 3 parents') % rev)
             p2 = p2n
     repo.ui.debug(" future parents are %d and %d\n" %
-                            (repo[p1].rev(), repo[p2].rev()))
+                            (repo[rp1 or p1].rev(), repo[p2].rev()))
 
     if not any(p.rev() in state for p in parents):
         # Case (1) root changeset of a non-detaching rebase set.
@@ -828,6 +862,8 @@
         # make it feasible to consider different cases separately. In these
         # other cases we currently just leave it to the user to correctly
         # resolve an impossible merge using a wrong ancestor.
+        #
+        # xx, p1 could be -4, and both parents could probably be -4...
         for p in repo[rev].parents():
             if state.get(p.rev()) == p1:
                 base = p.rev()
@@ -838,7 +874,7 @@
             # Raise because this function is called wrong (see issue 4106)
             raise AssertionError('no base found to rebase on '
                                  '(defineparents called wrong)')
-    return p1, p2, base
+    return rp1 or p1, p2, base
 
 def isagitpatch(repo, patchname):
     'Return true if the given patch is in git format'
--- a/hgext/record.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/record.py	Tue Jun 14 14:52:58 2016 -0500
@@ -12,13 +12,13 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
     error,
     extensions,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/relink.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/relink.py	Tue Jun 14 14:52:58 2016 -0500
@@ -11,13 +11,13 @@
 import os
 import stat
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
     hg,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/schemes.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/schemes.py	Tue Jun 14 14:52:58 2016 -0500
@@ -43,6 +43,8 @@
 
 import os
 import re
+
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
@@ -51,7 +53,6 @@
     templater,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/shelve.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/shelve.py	Tue Jun 14 14:52:58 2016 -0500
@@ -25,6 +25,8 @@
 import collections
 import errno
 import itertools
+
+from mercurial.i18n import _
 from mercurial import (
     bundle2,
     bundlerepo,
@@ -45,7 +47,6 @@
     templatefilters,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     rebase,
@@ -225,28 +226,10 @@
 def _aborttransaction(repo):
     '''Abort current transaction for shelve/unshelve, but keep dirstate
     '''
-    backupname = 'dirstate.shelve'
-    dirstatebackup = None
-    try:
-        # create backup of (un)shelved dirstate, because aborting transaction
-        # should restore dirstate to one at the beginning of the
-        # transaction, which doesn't include the result of (un)shelving
-        fp = repo.vfs.open(backupname, "w")
-        dirstatebackup = backupname
-        # clearing _dirty/_dirtypl of dirstate by _writedirstate below
-        # is unintentional. but it doesn't cause problem in this case,
-        # because no code path refers them until transaction is aborted.
-        repo.dirstate._writedirstate(fp) # write in-memory changes forcibly
-
-        tr = repo.currenttransaction()
-        tr.abort()
-
-        # restore to backuped dirstate
-        repo.vfs.rename(dirstatebackup, 'dirstate')
-        dirstatebackup = None
-    finally:
-        if dirstatebackup:
-            repo.vfs.unlink(dirstatebackup)
+    tr = repo.currenttransaction()
+    repo.dirstate.savebackup(tr, suffix='.shelve')
+    tr.abort()
+    repo.dirstate.restorebackup(None, suffix='.shelve')
 
 def createcmd(ui, repo, pats, opts):
     """subcommand that creates a new shelve"""
--- a/hgext/strip.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/strip.py	Tue Jun 14 14:52:58 2016 -0500
@@ -5,6 +5,7 @@
 """
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     bookmarks as bookmarksmod,
     cmdutil,
@@ -17,7 +18,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
 nullid = nodemod.nullid
 release = lockmod.release
 
--- a/hgext/win32mbcs.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/hgext/win32mbcs.py	Tue Jun 14 14:52:58 2016 -0500
@@ -49,11 +49,11 @@
 import os
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
 )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/i18n/hggettext	Tue Jun 07 08:32:33 2016 +0200
+++ b/i18n/hggettext	Tue Jun 14 14:52:58 2016 -0500
@@ -20,7 +20,11 @@
 join the message cataloges to get the final catalog.
 """
 
-import os, sys, inspect
+from __future__ import absolute_import, print_function
+
+import inspect
+import os
+import sys
 
 
 def escape(s):
@@ -95,7 +99,7 @@
     if mod.__doc__:
         src = open(path).read()
         lineno = 1 + offset(src, mod.__doc__, path, 7)
-        print poentry(path, lineno, mod.__doc__)
+        print(poentry(path, lineno, mod.__doc__))
 
     functions = list(getattr(mod, 'i18nfunctions', []))
     functions = [(f, True) for f in functions]
@@ -115,12 +119,12 @@
             if rstrip:
                 doc = doc.rstrip()
             lineno += offset(src, doc, name, 1)
-            print poentry(path, lineno, doc)
+            print(poentry(path, lineno, doc))
 
 
 def rawtext(path):
     src = open(path).read()
-    print poentry(path, 1, src)
+    print(poentry(path, 1, src))
 
 
 if __name__ == "__main__":
--- a/i18n/posplit	Tue Jun 07 08:32:33 2016 +0200
+++ b/i18n/posplit	Tue Jun 14 14:52:58 2016 -0500
@@ -5,9 +5,11 @@
 # license: MIT/X11/Expat
 #
 
+from __future__ import absolute_import, print_function
+
+import polib
 import re
 import sys
-import polib
 
 def addentry(po, entry, cache):
     e = cache.get(entry.msgid)
@@ -67,8 +69,8 @@
                             continue
                         else:
                             # lines following directly, unexpected
-                            print 'Warning: text follows line with directive' \
-                                  ' %s' % directive
+                            print('Warning: text follows line with directive' \
+                                  ' %s' % directive)
                     comment = 'do not translate: .. %s::' % directive
                     if not newentry.comment:
                         newentry.comment = comment
--- a/mercurial/__init__.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/__init__.py	Tue Jun 14 14:52:58 2016 -0500
@@ -12,36 +12,13 @@
 import sys
 import zipimport
 
+from . import (
+    policy
+)
+
 __all__ = []
 
-# Rules for how modules can be loaded. Values are:
-#
-#    c - require C extensions
-#    allow - allow pure Python implementation when C loading fails
-#    py - only load pure Python modules
-#
-# By default, require the C extensions for performance reasons.
-modulepolicy = 'c'
-try:
-    from . import __modulepolicy__
-    modulepolicy = __modulepolicy__.modulepolicy
-except ImportError:
-    pass
-
-# PyPy doesn't load C extensions.
-#
-# The canonical way to do this is to test platform.python_implementation().
-# But we don't import platform and don't bloat for it here.
-if '__pypy__' in sys.builtin_module_names:
-    modulepolicy = 'py'
-
-# Our C extensions aren't yet compatible with Python 3. So use pure Python
-# on Python 3 for now.
-if sys.version_info[0] >= 3:
-    modulepolicy = 'py'
-
-# Environment variable can always force settings.
-modulepolicy = os.environ.get('HGMODULEPOLICY', modulepolicy)
+modulepolicy = policy.policy
 
 # Modules that have both Python and C implementations. See also the
 # set of .py files under mercurial/pure/.
--- a/mercurial/ancestor.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/ancestor.py	Tue Jun 14 14:52:58 2016 -0500
@@ -291,7 +291,7 @@
     def __nonzero__(self):
         """False if the set is empty, True otherwise."""
         try:
-            iter(self).next()
+            next(iter(self))
             return True
         except StopIteration:
             return False
--- a/mercurial/bookmarks.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/bookmarks.py	Tue Jun 14 14:52:58 2016 -0500
@@ -109,39 +109,6 @@
                             location='plain')
         tr.hookargs['bookmark_moved'] = '1'
 
-    def write(self):
-        '''Write bookmarks
-
-        Write the given bookmark => hash dictionary to the .hg/bookmarks file
-        in a format equal to those of localtags.
-
-        We also store a backup of the previous state in undo.bookmarks that
-        can be copied back on rollback.
-        '''
-        msg = 'bm.write() is deprecated, use bm.recordchange(transaction)'
-        self._repo.ui.deprecwarn(msg, '3.7')
-        # TODO: writing the active bookmark should probably also use a
-        # transaction.
-        self._writeactive()
-        if self._clean:
-            return
-        repo = self._repo
-        if (repo.ui.configbool('devel', 'all-warnings')
-                or repo.ui.configbool('devel', 'check-locks')):
-            l = repo._wlockref and repo._wlockref()
-            if l is None or not l.held:
-                repo.ui.develwarn('bookmarks write with no wlock')
-
-        tr = repo.currenttransaction()
-        if tr:
-            self.recordchange(tr)
-            # invalidatevolatilesets() is omitted because this doesn't
-            # write changes out actually
-            return
-
-        self._writerepo(repo)
-        repo.invalidatevolatilesets()
-
     def _writerepo(self, repo):
         """Factored out for extensibility"""
         rbm = repo._bookmarks
@@ -150,7 +117,8 @@
             rbm._writeactive()
 
         with repo.wlock():
-            file_ = repo.vfs('bookmarks', 'w', atomictemp=True)
+            file_ = repo.vfs('bookmarks', 'w', atomictemp=True,
+                             checkambig=True)
             try:
                 self._write(file_)
             except: # re-raises
@@ -164,7 +132,8 @@
             return
         with self._repo.wlock():
             if self._active is not None:
-                f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True)
+                f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
+                                   checkambig=True)
                 try:
                     f.write(encoding.fromlocal(self._active))
                 finally:
--- a/mercurial/changegroup.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/changegroup.py	Tue Jun 14 14:52:58 2016 -0500
@@ -530,6 +530,17 @@
     def fileheader(self, fname):
         return chunkheader(len(fname)) + fname
 
+    # Extracted both for clarity and for overriding in extensions.
+    def _sortgroup(self, revlog, nodelist, lookup):
+        """Sort nodes for change group and turn them into revnums."""
+        # for generaldelta revlogs, we linearize the revs; this will both be
+        # much quicker and generate a much smaller bundle
+        if (revlog._generaldelta and self._reorder is None) or self._reorder:
+            dag = dagutil.revlogdag(revlog)
+            return dag.linearize(set(revlog.rev(n) for n in nodelist))
+        else:
+            return sorted([revlog.rev(n) for n in nodelist])
+
     def group(self, nodelist, revlog, lookup, units=None):
         """Calculate a delta group, yielding a sequence of changegroup chunks
         (strings).
@@ -549,14 +560,7 @@
             yield self.close()
             return
 
-        # for generaldelta revlogs, we linearize the revs; this will both be
-        # much quicker and generate a much smaller bundle
-        if (revlog._generaldelta and self._reorder is None) or self._reorder:
-            dag = dagutil.revlogdag(revlog)
-            revs = set(revlog.rev(n) for n in nodelist)
-            revs = dag.linearize(revs)
-        else:
-            revs = sorted([revlog.rev(n) for n in nodelist])
+        revs = self._sortgroup(revlog, nodelist, lookup)
 
         # add the parent of the first rev
         p = revlog.parentrevs(revs[0])[0]
--- a/mercurial/cmdutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/cmdutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -83,7 +83,7 @@
         else:
             recordfn = crecordmod.chunkselector
 
-        return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
+        return crecordmod.filterpatch(ui, originalhunks, recordfn)
 
     else:
         return patch.filterpatch(ui, originalhunks, operation)
@@ -91,9 +91,9 @@
 def recordfilter(ui, originalhunks, operation=None):
     """ Prompts the user to filter the originalhunks and return a list of
     selected hunks.
-    *operation* is used for ui purposes to indicate the user
-    what kind of filtering they are doing: reverting, committing, shelving, etc.
-    *operation* has to be a translated string.
+    *operation* is used for to build ui messages to indicate the user what
+    kind of filtering they are doing: reverting, committing, shelving, etc.
+    (see patch.filterpatch).
     """
     usecurses = crecordmod.checkcurses(ui)
     testfile = ui.config('experimental', 'crecordtest', None)
@@ -1998,7 +1998,7 @@
         followfirst = 0
     # --follow with FILE behavior depends on revs...
     it = iter(revs)
-    startrev = it.next()
+    startrev = next(it)
     followdescendants = startrev < next(it, startrev)
 
     # branch and only_branch are really aliases and must be handled at
@@ -2147,7 +2147,8 @@
     if opts.get('rev'):
         # User-specified revs might be unsorted, but don't sort before
         # _makelogrevset because it might depend on the order of revs
-        revs.sort(reverse=True)
+        if not (revs.isdescending() or revs.istopo()):
+            revs.sort(reverse=True)
     if expr:
         # Revset matchers often operate faster on revisions in changelog
         # order, because most filters deal with the changelog.
@@ -3071,7 +3072,7 @@
 
             # tell newly modified apart.
             dsmodified &= modified
-            dsmodified |= modified & dsadded # dirstate added may needs backup
+            dsmodified |= modified & dsadded # dirstate added may need backup
             modified -= dsmodified
 
             # We need to wait for some post-processing to update this set
@@ -3301,10 +3302,12 @@
         else:
             diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
         originalchunks = patch.parsepatch(diff)
+        operation = 'discard' if node == parent else 'revert'
 
         try:
 
-            chunks, opts = recordfilter(repo.ui, originalchunks)
+            chunks, opts = recordfilter(repo.ui, originalchunks,
+                                        operation=operation)
             if reversehunks:
                 chunks = patch.reversehunks(chunks)
 
@@ -3518,7 +3521,7 @@
     def __init__(self, repo, name):
         self._repo = repo
         self._suffix = '.backup.%s.%d' % (name, id(self))
-        repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
+        repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
         self._active = True
         self._closed = False
 
@@ -3536,13 +3539,13 @@
                    % self._suffix)
             raise error.Abort(msg)
 
-        self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
+        self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
                                          self._suffix)
         self._active = False
         self._closed = True
 
     def _abort(self):
-        self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
+        self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
                                            self._suffix)
         self._active = False
 
--- a/mercurial/commands.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/commands.py	Tue Jun 14 14:52:58 2016 -0500
@@ -59,6 +59,7 @@
     obsolete,
     patch,
     phases,
+    policy,
     pvec,
     repair,
     revlog,
@@ -2089,51 +2090,56 @@
         gen = exchange.readbundle(ui, f, bundlepath)
         if isinstance(gen, bundle2.unbundle20):
             return _debugbundle2(ui, gen, all=all, **opts)
-        if all:
-            ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
-
-            def showchunks(named):
-                ui.write("\n%s\n" % named)
-                chain = None
-                while True:
-                    chunkdata = gen.deltachunk(chain)
-                    if not chunkdata:
-                        break
-                    node = chunkdata['node']
-                    p1 = chunkdata['p1']
-                    p2 = chunkdata['p2']
-                    cs = chunkdata['cs']
-                    deltabase = chunkdata['deltabase']
-                    delta = chunkdata['delta']
-                    ui.write("%s %s %s %s %s %s\n" %
-                             (hex(node), hex(p1), hex(p2),
-                              hex(cs), hex(deltabase), len(delta)))
-                    chain = node
-
-            chunkdata = gen.changelogheader()
-            showchunks("changelog")
-            chunkdata = gen.manifestheader()
-            showchunks("manifest")
-            while True:
-                chunkdata = gen.filelogheader()
-                if not chunkdata:
-                    break
-                fname = chunkdata['filename']
-                showchunks(fname)
-        else:
-            if isinstance(gen, bundle2.unbundle20):
-                raise error.Abort(_('use debugbundle2 for this file'))
-            chunkdata = gen.changelogheader()
+        _debugchangegroup(ui, gen, all=all, **opts)
+
+def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
+    indent_string = ' ' * indent
+    if all:
+        ui.write("%sformat: id, p1, p2, cset, delta base, len(delta)\n"
+                 % indent_string)
+
+        def showchunks(named):
+            ui.write("\n%s%s\n" % (indent_string, named))
             chain = None
             while True:
                 chunkdata = gen.deltachunk(chain)
                 if not chunkdata:
                     break
                 node = chunkdata['node']
-                ui.write("%s\n" % hex(node))
+                p1 = chunkdata['p1']
+                p2 = chunkdata['p2']
+                cs = chunkdata['cs']
+                deltabase = chunkdata['deltabase']
+                delta = chunkdata['delta']
+                ui.write("%s%s %s %s %s %s %s\n" %
+                         (indent_string, hex(node), hex(p1), hex(p2),
+                          hex(cs), hex(deltabase), len(delta)))
                 chain = node
 
-def _debugbundle2(ui, gen, **opts):
+        chunkdata = gen.changelogheader()
+        showchunks("changelog")
+        chunkdata = gen.manifestheader()
+        showchunks("manifest")
+        while True:
+            chunkdata = gen.filelogheader()
+            if not chunkdata:
+                break
+            fname = chunkdata['filename']
+            showchunks(fname)
+    else:
+        if isinstance(gen, bundle2.unbundle20):
+            raise error.Abort(_('use debugbundle2 for this file'))
+        chunkdata = gen.changelogheader()
+        chain = None
+        while True:
+            chunkdata = gen.deltachunk(chain)
+            if not chunkdata:
+                break
+            node = chunkdata['node']
+            ui.write("%s%s\n" % (indent_string, hex(node)))
+            chain = node
+
+def _debugbundle2(ui, gen, all=None, **opts):
     """lists the contents of a bundle2"""
     if not isinstance(gen, bundle2.unbundle20):
         raise error.Abort(_('not a bundle2 file'))
@@ -2143,15 +2149,7 @@
         if part.type == 'changegroup':
             version = part.params.get('version', '01')
             cg = changegroup.getunbundler(version, part, 'UN')
-            chunkdata = cg.changelogheader()
-            chain = None
-            while True:
-                chunkdata = cg.deltachunk(chain)
-                if not chunkdata:
-                    break
-                node = chunkdata['node']
-                ui.write("    %s\n" % hex(node))
-                chain = node
+            _debugchangegroup(ui, cg, all=all, indent=4, **opts)
 
 @command('debugcreatestreamclonebundle', [], 'FILE')
 def debugcreatestreamclonebundle(ui, repo, fname):
@@ -2524,15 +2522,16 @@
                             break
             if ignored:
                 if ignored == nf:
-                    ui.write("%s is ignored\n" % f)
+                    ui.write(_("%s is ignored\n") % f)
                 else:
-                    ui.write("%s is ignored because of containing folder %s\n"
+                    ui.write(_("%s is ignored because of "
+                               "containing folder %s\n")
                              % (f, ignored))
                 ignorefile, lineno, line = ignoredata
-                ui.write("(ignore rule in %s, line %d: '%s')\n"
+                ui.write(_("(ignore rule in %s, line %d: '%s')\n")
                          % (ignorefile, lineno, line))
             else:
-                ui.write("%s is not ignored\n" % f)
+                ui.write(_("%s is not ignored\n") % f)
 
 @command('debugindex', debugrevlogopts +
     [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
@@ -2743,7 +2742,16 @@
     fm.write('pythonlib', _("checking Python lib (%s)...\n"),
              os.path.dirname(os.__file__))
 
+    # hg version
+    hgver = util.version()
+    fm.write('hgver', _("checking Mercurial version (%s)\n"),
+             hgver.split('+')[0])
+    fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
+             '+'.join(hgver.split('+')[1:]))
+
     # compiled modules
+    fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
+             policy.policy)
     fm.write('hgmodules', _("checking installed modules (%s)...\n"),
              os.path.dirname(__file__))
 
@@ -3517,7 +3525,7 @@
         if newtree != tree:
             ui.note("* concatenated:\n", revset.prettyformat(newtree), "\n")
         if opts["optimize"]:
-            weight, optimizedtree = revset.optimize(newtree, True)
+            optimizedtree = revset.optimize(newtree)
             ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
     func = revset.match(ui, expr, repo)
     revs = func(repo)
@@ -4406,7 +4414,7 @@
             if not opts.get('files_with_matches'):
                 ui.write(sep, label='grep.sep')
                 if not opts.get('text') and binary():
-                    ui.write(" Binary file matches")
+                    ui.write(_(" Binary file matches"))
                 else:
                     for s, label in l:
                         ui.write(s, label=label)
@@ -4570,7 +4578,10 @@
     Returns 0 if successful.
     """
 
-    textwidth = min(ui.termwidth(), 80) - 2
+    textwidth = ui.configint('ui', 'textwidth', 78)
+    termwidth = ui.termwidth() - 2
+    if textwidth <= 0 or termwidth < textwidth:
+        textwidth = termwidth
 
     keep = opts.get('system') or []
     if len(keep) == 0:
@@ -6300,7 +6311,10 @@
     related method.
 
     Modified files are saved with a .orig suffix before reverting.
-    To disable these backups, use --no-backup.
+    To disable these backups, use --no-backup. It is possible to store
+    the backup files in a custom directory relative to the root of the
+    repository by setting the ``ui.origbackuppath`` configuration
+    option.
 
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
@@ -6380,6 +6394,11 @@
       commit transaction if it isn't checked out. Use --force to
       override this protection.
 
+      The rollback command can be entirely disabled by setting the
+      ``ui.rollback`` configuration setting to false. If you're here
+      because you want to use rollback and it's disabled, you can
+      re-enable the command by setting ``ui.rollback`` to true.
+
     This command is not intended for use on public repositories. Once
     changes are visible for pull by other users, rolling a transaction
     back locally is ineffective (someone else may already have pulled
@@ -6389,6 +6408,9 @@
 
     Returns 0 on success, 1 if no rollback data is available.
     """
+    if not ui.configbool('ui', 'rollback', True):
+        raise error.Abort(_('rollback is disabled because it is unsafe'),
+                          hint=('see `hg help -v rollback` for information'))
     return repo.rollback(dryrun=opts.get('dry_run'),
                          force=opts.get('force'))
 
--- a/mercurial/context.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/context.py	Tue Jun 14 14:52:58 2016 -0500
@@ -930,16 +930,20 @@
         this returns fixed value(False is used) as linenumber,
         if "linenumber" parameter is "False".'''
 
+        def lines(text):
+            if text.endswith("\n"):
+                return text.count("\n")
+            return text.count("\n") + 1
+
         if linenumber is None:
             def decorate(text, rev):
-                return ([rev] * len(text.splitlines()), text)
+                return ([rev] * lines(text), text)
         elif linenumber:
             def decorate(text, rev):
-                size = len(text.splitlines())
-                return ([(rev, i) for i in xrange(1, size + 1)], text)
+                return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
         else:
             def decorate(text, rev):
-                return ([(rev, False)] * len(text.splitlines()), text)
+                return ([(rev, False)] * lines(text), text)
 
         def pair(parent, child):
             blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
--- a/mercurial/copies.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/copies.py	Tue Jun 14 14:52:58 2016 -0500
@@ -484,16 +484,16 @@
             f1r, f2r = f1.linkrev(), f2.linkrev()
 
             if f1r is None:
-                f1 = g1.next()
+                f1 = next(g1)
             if f2r is None:
-                f2 = g2.next()
+                f2 = next(g2)
 
             while True:
                 f1r, f2r = f1.linkrev(), f2.linkrev()
                 if f1r > f2r:
-                    f1 = g1.next()
+                    f1 = next(g1)
                 elif f2r > f1r:
-                    f2 = g2.next()
+                    f2 = next(g2)
                 elif f1 == f2:
                     return f1 # a match
                 elif f1r == f2r or f1r < limit or f2r < limit:
--- a/mercurial/crecord.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/crecord.py	Tue Jun 14 14:52:58 2016 -0500
@@ -91,6 +91,7 @@
     def allchildren(self):
         "Return a list of all of the direct children of this node"
         raise NotImplementedError("method must be implemented by subclass")
+
     def nextsibling(self):
         """
         Return the closest next item of the same type where there are no items
@@ -110,18 +111,12 @@
     def parentitem(self):
         raise NotImplementedError("method must be implemented by subclass")
 
-
-    def nextitem(self, constrainlevel=True, skipfolded=True):
+    def nextitem(self, skipfolded=True):
         """
-        If constrainLevel == True, return the closest next item
-        of the same type where there are no items of different types between
-        the current item and this closest item.
+        Try to return the next item closest to this item, regardless of item's
+        type (header, hunk, or hunkline).
 
-        If constrainLevel == False, then try to return the next item
-        closest to this item, regardless of item's type (header, hunk, or
-        HunkLine).
-
-        If skipFolded == True, and the current item is folded, then the child
+        If skipfolded == True, and the current item is folded, then the child
         items that are hidden due to folding will be skipped when determining
         the next item.
 
@@ -131,9 +126,7 @@
             itemfolded = self.folded
         except AttributeError:
             itemfolded = False
-        if constrainlevel:
-            return self.nextsibling()
-        elif skipfolded and itemfolded:
+        if skipfolded and itemfolded:
             nextitem = self.nextsibling()
             if nextitem is None:
                 try:
@@ -164,43 +157,31 @@
             except AttributeError: # parent and/or grandparent was None
                 return None
 
-    def previtem(self, constrainlevel=True, skipfolded=True):
+    def previtem(self):
         """
-        If constrainLevel == True, return the closest previous item
-        of the same type where there are no items of different types between
-        the current item and this closest item.
-
-        If constrainLevel == False, then try to return the previous item
-        closest to this item, regardless of item's type (header, hunk, or
-        HunkLine).
-
-        If skipFolded == True, and the current item is folded, then the items
-        that are hidden due to folding will be skipped when determining the
-        next item.
+        Try to return the previous item closest to this item, regardless of
+        item's type (header, hunk, or hunkline).
 
         If it is not possible to get the previous item, return None.
         """
-        if constrainlevel:
-            return self.prevsibling()
-        else:
-            # try previous sibling's last child's last child,
-            # else try previous sibling's last child, else try previous sibling
-            prevsibling = self.prevsibling()
-            if prevsibling is not None:
-                prevsiblinglastchild = prevsibling.lastchild()
-                if ((prevsiblinglastchild is not None) and
-                    not prevsibling.folded):
-                    prevsiblinglclc = prevsiblinglastchild.lastchild()
-                    if ((prevsiblinglclc is not None) and
-                        not prevsiblinglastchild.folded):
-                        return prevsiblinglclc
-                    else:
-                        return prevsiblinglastchild
+        # try previous sibling's last child's last child,
+        # else try previous sibling's last child, else try previous sibling
+        prevsibling = self.prevsibling()
+        if prevsibling is not None:
+            prevsiblinglastchild = prevsibling.lastchild()
+            if ((prevsiblinglastchild is not None) and
+                not prevsibling.folded):
+                prevsiblinglclc = prevsiblinglastchild.lastchild()
+                if ((prevsiblinglclc is not None) and
+                    not prevsiblinglastchild.folded):
+                    return prevsiblinglclc
                 else:
-                    return prevsibling
+                    return prevsiblinglastchild
+            else:
+                return prevsibling
 
-            # try parent (or None)
-            return self.parentitem()
+        # try parent (or None)
+        return self.parentitem()
 
 class patch(patchnode, list): # todo: rename patchroot
     """
@@ -236,7 +217,6 @@
         self.neverunfolded = True
         self.hunks = [uihunk(h, self) for h in self.hunks]
 
-
     def prettystr(self):
         x = stringio()
         self.pretty(x)
@@ -392,6 +372,7 @@
     def allchildren(self):
         "return a list of all of the direct children of this node"
         return self.changedlines
+
     def countchanges(self):
         """changedlines -> (n+,n-)"""
         add = len([l for l in self.changedlines if l.applied
@@ -455,14 +436,12 @@
 
     def __getattr__(self, name):
         return getattr(self._hunk, name)
+
     def __repr__(self):
         return '<hunk %r@%d>' % (self.filename(), self.fromline)
 
-def filterpatch(ui, chunks, chunkselector, operation=None):
+def filterpatch(ui, chunks, chunkselector):
     """interactively filter patch chunks into applied-only chunks"""
-
-    if operation is None:
-        operation = _('confirm')
     chunks = list(chunks)
     # convert chunks list into structure suitable for displaying/modifying
     # with curses.  create a list of headers only.
@@ -603,13 +582,10 @@
         the last hunkline of the hunk prior to the selected hunk.  or, if
         the first hunkline of a hunk is currently selected, then select the
         hunk itself.
-
-        if the currently selected item is already at the top of the screen,
-        scroll the screen down to show the new-selected item.
         """
         currentitem = self.currentselecteditem
 
-        nextitem = currentitem.previtem(constrainlevel=False)
+        nextitem = currentitem.previtem()
 
         if nextitem is None:
             # if no parent item (i.e. currentitem is the first header), then
@@ -623,13 +599,10 @@
         select (if possible) the previous item on the same level as the
         currently selected item.  otherwise, select (if possible) the
         parent-item of the currently selected item.
-
-        if the currently selected item is already at the top of the screen,
-        scroll the screen down to show the new-selected item.
         """
         currentitem = self.currentselecteditem
-        nextitem = currentitem.previtem()
-        # if there's no previous item on this level, try choosing the parent
+        nextitem = currentitem.prevsibling()
+        # if there's no previous sibling, try choosing the parent
         if nextitem is None:
             nextitem = currentitem.parentitem()
         if nextitem is None:
@@ -646,14 +619,11 @@
         the first hunkline of the selected hunk.  or, if the last hunkline of
         a hunk is currently selected, then select the next hunk, if one exists,
         or if not, the next header if one exists.
-
-        if the currently selected item is already at the bottom of the screen,
-        scroll the screen up to show the new-selected item.
         """
         #self.startprintline += 1 #debug
         currentitem = self.currentselecteditem
 
-        nextitem = currentitem.nextitem(constrainlevel=False)
+        nextitem = currentitem.nextitem()
         # if there's no next item, keep the selection as-is
         if nextitem is None:
             nextitem = currentitem
@@ -662,24 +632,21 @@
 
     def downarrowshiftevent(self):
         """
-        if the cursor is already at the bottom chunk, scroll the screen up and
-        move the cursor-position to the subsequent chunk.  otherwise, only move
-        the cursor position down one chunk.
+        select (if possible) the next item on the same level as the currently
+        selected item.  otherwise, select (if possible) the next item on the
+        same level as the parent item of the currently selected item.
         """
-        # todo: update docstring
-
         currentitem = self.currentselecteditem
-        nextitem = currentitem.nextitem()
-        # if there's no previous item on this level, try choosing the parent's
-        # nextitem.
+        nextitem = currentitem.nextsibling()
+        # if there's no next sibling, try choosing the parent's nextsibling
         if nextitem is None:
             try:
-                nextitem = currentitem.parentitem().nextitem()
+                nextitem = currentitem.parentitem().nextsibling()
             except AttributeError:
-                # parentitem returned None, so nextitem() can't be called
+                # parentitem returned None, so nextsibling() can't be called
                 nextitem = None
         if nextitem is None:
-            # if no next item on parent-level, then no change...
+            # if parent has no next sibling, then no change...
             nextitem = currentitem
 
         self.currentselecteditem = nextitem
@@ -766,7 +733,6 @@
             # negative values scroll in pgup direction
             self.scrolllines(selstart - padstartbuffered)
 
-
     def scrolllines(self, numlines):
         "scroll the screen up (down) by numlines when numlines >0 (<0)."
         self.firstlineofpadtoprint += numlines
@@ -894,7 +860,6 @@
         if isinstance(item, (uiheader, uihunk)):
             item.folded = not item.folded
 
-
     def alignstring(self, instr, window):
         """
         add whitespace to the end of a string in order to make it fill
@@ -1133,7 +1098,6 @@
         lineprefix = " "*self.hunkindentnumchars + checkbox
         frtoline = "   " + hunk.getfromtoline().strip("\n")
 
-
         outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
                                    align=False) # add uncolored checkbox/indent
         outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair,
@@ -1377,7 +1341,7 @@
                       F : fold / unfold parent item and all of its ancestors
                       m : edit / resume editing the commit message
                       e : edit the currently selected hunk
-                      a : toggle amend mode (hg rev >= 2.2), only with commit -i
+                      a : toggle amend mode, only with commit -i
                       c : confirm selected changes
                       r : review/edit and confirm selected changes
                       q : quit without confirming (no changes will be made)
--- a/mercurial/destutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/destutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -95,6 +95,10 @@
     wc = repo[None]
     movemark = node = None
     currentbranch = wc.branch()
+
+    if clean:
+        currentbranch = repo['.'].branch()
+
     if currentbranch in repo.branchmap():
         heads = repo.branchheads(currentbranch)
         if heads:
--- a/mercurial/dirstate.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/dirstate.py	Tue Jun 14 14:52:58 2016 -0500
@@ -74,6 +74,8 @@
                 raise
     return (vfs(filename), False)
 
+_token = object()
+
 class dirstate(object):
 
     def __init__(self, opener, ui, root, validate):
@@ -365,7 +367,7 @@
 
     def setbranch(self, branch):
         self._branch = encoding.fromlocal(branch)
-        f = self._opener('branch', 'w', atomictemp=True)
+        f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
         try:
             f.write(self._branch + '\n')
             f.close()
@@ -580,6 +582,8 @@
             del self._map[f]
             if f in self._nonnormalset:
                 self._nonnormalset.remove(f)
+            if f in self._copymap:
+                del self._copymap[f]
 
     def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
         if exists is None:
@@ -688,16 +692,15 @@
         self._pl = (parent, nullid)
         self._dirty = True
 
-    def write(self, tr=False):
+    def write(self, tr=_token):
         if not self._dirty:
             return
 
         filename = self._filename
-        if tr is False: # not explicitly specified
-            if (self._ui.configbool('devel', 'all-warnings')
-                or self._ui.configbool('devel', 'check-dirstate-write')):
-                self._ui.develwarn('use dirstate.write with '
-                                   'repo.currenttransaction()')
+        if tr is _token: # not explicitly specified
+            self._ui.deprecwarn('use dirstate.write with '
+                               'repo.currenttransaction()',
+                               '3.9')
 
             if self._opener.lexists(self._pendingfilename):
                 # if pending file already exists, in-memory changes
@@ -727,7 +730,7 @@
                                 self._writedirstate, location='plain')
             return
 
-        st = self._opener(filename, "w", atomictemp=True)
+        st = self._opener(filename, "w", atomictemp=True, checkambig=True)
         self._writedirstate(st)
 
     def _writedirstate(self, st):
@@ -1206,14 +1209,16 @@
         else:
             return self._filename
 
-    def _savebackup(self, tr, suffix):
+    def savebackup(self, tr, suffix='', prefix=''):
         '''Save current dirstate into backup file with suffix'''
+        assert len(suffix) > 0 or len(prefix) > 0
         filename = self._actualfilename(tr)
 
         # use '_writedirstate' instead of 'write' to write changes certainly,
         # because the latter omits writing out if transaction is running.
         # output file will be used to create backup of dirstate at this point.
-        self._writedirstate(self._opener(filename, "w", atomictemp=True))
+        self._writedirstate(self._opener(filename, "w", atomictemp=True,
+                                         checkambig=True))
 
         if tr:
             # ensure that subsequent tr.writepending returns True for
@@ -1227,17 +1232,21 @@
             # end of this transaction
             tr.registertmp(filename, location='plain')
 
-        self._opener.write(filename + suffix, self._opener.tryread(filename))
+        self._opener.write(prefix + self._filename + suffix,
+                           self._opener.tryread(filename))
 
-    def _restorebackup(self, tr, suffix):
+    def restorebackup(self, tr, suffix='', prefix=''):
         '''Restore dirstate by backup file with suffix'''
+        assert len(suffix) > 0 or len(prefix) > 0
         # this "invalidate()" prevents "wlock.release()" from writing
         # changes of dirstate out after restoring from backup file
         self.invalidate()
         filename = self._actualfilename(tr)
-        self._opener.rename(filename + suffix, filename)
+        # using self._filename to avoid having "pending" in the backup filename
+        self._opener.rename(prefix + self._filename + suffix, filename)
 
-    def _clearbackup(self, tr, suffix):
+    def clearbackup(self, tr, suffix='', prefix=''):
         '''Clear backup file with suffix'''
-        filename = self._actualfilename(tr)
-        self._opener.unlink(filename + suffix)
+        assert len(suffix) > 0 or len(prefix) > 0
+        # using self._filename to avoid having "pending" in the backup filename
+        self._opener.unlink(prefix + self._filename + suffix)
--- a/mercurial/dispatch.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/dispatch.py	Tue Jun 14 14:52:58 2016 -0500
@@ -384,7 +384,7 @@
         self.cmdname = ''
         self.definition = definition
         self.fn = None
-        self.args = []
+        self.givenargs = []
         self.opts = []
         self.help = ''
         self.badalias = None
@@ -432,7 +432,7 @@
                              % (self.name, inst))
             return
         self.cmdname = cmd = args.pop(0)
-        args = map(util.expandpath, args)
+        self.givenargs = args
 
         for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
             if _earlygetopt([invalidarg], args):
@@ -448,7 +448,6 @@
             else:
                 self.fn, self.opts = tableentry
 
-            self.args = aliasargs(self.fn, args)
             if self.help.startswith("hg " + cmd):
                 # drop prefix in old-style help lines so hg shows the alias
                 self.help = self.help[4 + len(cmd):]
@@ -462,6 +461,11 @@
             self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
                              % (self.name, cmd))
 
+    @property
+    def args(self):
+        args = map(util.expandpath, self.givenargs)
+        return aliasargs(self.fn, args)
+
     def __getattr__(self, name):
         adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
         if name not in adefaults:
@@ -629,10 +633,16 @@
     # run pre-hook, and abort if it fails
     hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
               pats=cmdpats, opts=cmdoptions)
-    ret = _runcommand(ui, options, cmd, d)
-    # run post-hook, passing command result
-    hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
-              result=ret, pats=cmdpats, opts=cmdoptions)
+    try:
+        ret = _runcommand(ui, options, cmd, d)
+        # run post-hook, passing command result
+        hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
+                  result=ret, pats=cmdpats, opts=cmdoptions)
+    except Exception:
+        # run failure hook and re-raise
+        hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
+                  pats=cmdpats, opts=cmdoptions)
+        raise
     return ret
 
 def _getlocal(ui, rpath, wd=None):
@@ -660,12 +670,8 @@
 
     return path, lui
 
-def _checkshellalias(lui, ui, args, precheck=True):
-    """Return the function to run the shell alias, if it is required
-
-    'precheck' is whether this function is invoked before adding
-    aliases or not.
-    """
+def _checkshellalias(lui, ui, args):
+    """Return the function to run the shell alias, if it is required"""
     options = {}
 
     try:
@@ -676,16 +682,11 @@
     if not args:
         return
 
-    if precheck:
-        strict = True
-        cmdtable = commands.table.copy()
-        addaliases(lui, cmdtable)
-    else:
-        strict = False
-        cmdtable = commands.table
+    cmdtable = commands.table
 
     cmd = args[0]
     try:
+        strict = ui.configbool("ui", "strict")
         aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
     except (error.AmbiguousCommand, error.UnknownCommand):
         return
@@ -735,12 +736,6 @@
     rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
     path, lui = _getlocal(ui, rpath)
 
-    # Now that we're operating in the right directory/repository with
-    # the right config settings, check for shell aliases
-    shellaliasfn = _checkshellalias(lui, ui, args)
-    if shellaliasfn:
-        return shellaliasfn()
-
     # Configure extensions in phases: uisetup, extsetup, cmdtable, and
     # reposetup. Programs like TortoiseHg will call _dispatch several
     # times so we keep track of configured extensions in _loaded.
@@ -762,13 +757,11 @@
 
     addaliases(lui, commands.table)
 
-    if not lui.configbool("ui", "strict"):
-        # All aliases and commands are completely defined, now.
-        # Check abbreviation/ambiguity of shell alias again, because shell
-        # alias may cause failure of "_parse" (see issue4355)
-        shellaliasfn = _checkshellalias(lui, ui, args, precheck=False)
-        if shellaliasfn:
-            return shellaliasfn()
+    # All aliases and commands are completely defined, now.
+    # Check abbreviation/ambiguity of shell alias.
+    shellaliasfn = _checkshellalias(lui, ui, args)
+    if shellaliasfn:
+        return shellaliasfn()
 
     # check for fallback encoding
     fallback = lui.config('ui', 'fallbackencoding')
@@ -825,7 +818,7 @@
 
     if cmdoptions.get('insecure', False):
         for ui_ in uis:
-            ui_.setconfig('web', 'cacerts', '!', '--insecure')
+            ui_.insecureconnections = True
 
     if options['version']:
         return commands.version_(ui)
--- a/mercurial/exchange.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/exchange.py	Tue Jun 14 14:52:58 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 
 from .i18n import _
 from .node import (
@@ -1646,7 +1647,7 @@
     Used by peer for unbundling.
     """
     heads = repo.heads()
-    heads_hash = util.sha1(''.join(sorted(heads))).digest()
+    heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
     if not (their_heads == ['force'] or their_heads == heads or
             their_heads == ['hashed', heads_hash]):
         # someone else committed/pushed/unbundled while we
--- a/mercurial/extensions.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/extensions.py	Tue Jun 14 14:52:58 2016 -0500
@@ -25,7 +25,7 @@
 _aftercallbacks = {}
 _order = []
 _builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
-                'inotify'])
+                'inotify', 'hgcia'])
 
 def extensions(ui=None):
     if ui:
--- a/mercurial/formatter.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/formatter.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,7 +7,6 @@
 
 from __future__ import absolute_import
 
-import cPickle
 import os
 
 from .i18n import _
@@ -20,8 +19,11 @@
     encoding,
     error,
     templater,
+    util,
 )
 
+pickle = util.pickle
+
 class baseformatter(object):
     def __init__(self, ui, topic, opts):
         self._ui = ui
@@ -107,7 +109,7 @@
         self._data.append(self._item)
     def end(self):
         baseformatter.end(self)
-        self._ui.write(cPickle.dumps(self._data))
+        self._ui.write(pickle.dumps(self._data))
 
 def _jsonifyobj(v):
     if isinstance(v, tuple):
--- a/mercurial/graphmod.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/graphmod.py	Tue Jun 14 14:52:58 2016 -0500
@@ -19,8 +19,6 @@
 
 from __future__ import absolute_import
 
-import heapq
-
 from .node import nullrev
 from . import (
     revset,
@@ -32,207 +30,11 @@
 GRANDPARENT = 'G'
 MISSINGPARENT = 'M'
 # Style of line to draw. None signals a line that ends and is removed at this
-# point.
+# point. A number prefix means only the last N characters of the current block
+# will use that style, the rest will use the PARENT style. Add a - sign
+# (so making N negative) and all but the first N characters use that style.
 EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None}
 
-def groupbranchiter(revs, parentsfunc, firstbranch=()):
-    """Yield revisions from heads to roots one (topo) branch at a time.
-
-    This function aims to be used by a graph generator that wishes to minimize
-    the number of parallel branches and their interleaving.
-
-    Example iteration order (numbers show the "true" order in a changelog):
-
-      o  4
-      |
-      o  1
-      |
-      | o  3
-      | |
-      | o  2
-      |/
-      o  0
-
-    Note that the ancestors of merges are understood by the current
-    algorithm to be on the same branch. This means no reordering will
-    occur behind a merge.
-    """
-
-    ### Quick summary of the algorithm
-    #
-    # This function is based around a "retention" principle. We keep revisions
-    # in memory until we are ready to emit a whole branch that immediately
-    # "merges" into an existing one. This reduces the number of parallel
-    # branches with interleaved revisions.
-    #
-    # During iteration revs are split into two groups:
-    # A) revision already emitted
-    # B) revision in "retention". They are stored as different subgroups.
-    #
-    # for each REV, we do the following logic:
-    #
-    #   1) if REV is a parent of (A), we will emit it. If there is a
-    #   retention group ((B) above) that is blocked on REV being
-    #   available, we emit all the revisions out of that retention
-    #   group first.
-    #
-    #   2) else, we'll search for a subgroup in (B) awaiting for REV to be
-    #   available, if such subgroup exist, we add REV to it and the subgroup is
-    #   now awaiting for REV.parents() to be available.
-    #
-    #   3) finally if no such group existed in (B), we create a new subgroup.
-    #
-    #
-    # To bootstrap the algorithm, we emit the tipmost revision (which
-    # puts it in group (A) from above).
-
-    revs.sort(reverse=True)
-
-    # Set of parents of revision that have been emitted. They can be considered
-    # unblocked as the graph generator is already aware of them so there is no
-    # need to delay the revisions that reference them.
-    #
-    # If someone wants to prioritize a branch over the others, pre-filling this
-    # set will force all other branches to wait until this branch is ready to be
-    # emitted.
-    unblocked = set(firstbranch)
-
-    # list of groups waiting to be displayed, each group is defined by:
-    #
-    #   (revs:    lists of revs waiting to be displayed,
-    #    blocked: set of that cannot be displayed before those in 'revs')
-    #
-    # The second value ('blocked') correspond to parents of any revision in the
-    # group ('revs') that is not itself contained in the group. The main idea
-    # of this algorithm is to delay as much as possible the emission of any
-    # revision.  This means waiting for the moment we are about to display
-    # these parents to display the revs in a group.
-    #
-    # This first implementation is smart until it encounters a merge: it will
-    # emit revs as soon as any parent is about to be emitted and can grow an
-    # arbitrary number of revs in 'blocked'. In practice this mean we properly
-    # retains new branches but gives up on any special ordering for ancestors
-    # of merges. The implementation can be improved to handle this better.
-    #
-    # The first subgroup is special. It corresponds to all the revision that
-    # were already emitted. The 'revs' lists is expected to be empty and the
-    # 'blocked' set contains the parents revisions of already emitted revision.
-    #
-    # You could pre-seed the <parents> set of groups[0] to a specific
-    # changesets to select what the first emitted branch should be.
-    groups = [([], unblocked)]
-    pendingheap = []
-    pendingset = set()
-
-    heapq.heapify(pendingheap)
-    heappop = heapq.heappop
-    heappush = heapq.heappush
-    for currentrev in revs:
-        # Heap works with smallest element, we want highest so we invert
-        if currentrev not in pendingset:
-            heappush(pendingheap, -currentrev)
-            pendingset.add(currentrev)
-        # iterates on pending rev until after the current rev have been
-        # processed.
-        rev = None
-        while rev != currentrev:
-            rev = -heappop(pendingheap)
-            pendingset.remove(rev)
-
-            # Seek for a subgroup blocked, waiting for the current revision.
-            matching = [i for i, g in enumerate(groups) if rev in g[1]]
-
-            if matching:
-                # The main idea is to gather together all sets that are blocked
-                # on the same revision.
-                #
-                # Groups are merged when a common blocking ancestor is
-                # observed. For example, given two groups:
-                #
-                # revs [5, 4] waiting for 1
-                # revs [3, 2] waiting for 1
-                #
-                # These two groups will be merged when we process
-                # 1. In theory, we could have merged the groups when
-                # we added 2 to the group it is now in (we could have
-                # noticed the groups were both blocked on 1 then), but
-                # the way it works now makes the algorithm simpler.
-                #
-                # We also always keep the oldest subgroup first. We can
-                # probably improve the behavior by having the longest set
-                # first. That way, graph algorithms could minimise the length
-                # of parallel lines their drawing. This is currently not done.
-                targetidx = matching.pop(0)
-                trevs, tparents = groups[targetidx]
-                for i in matching:
-                    gr = groups[i]
-                    trevs.extend(gr[0])
-                    tparents |= gr[1]
-                # delete all merged subgroups (except the one we kept)
-                # (starting from the last subgroup for performance and
-                # sanity reasons)
-                for i in reversed(matching):
-                    del groups[i]
-            else:
-                # This is a new head. We create a new subgroup for it.
-                targetidx = len(groups)
-                groups.append(([], set([rev])))
-
-            gr = groups[targetidx]
-
-            # We now add the current nodes to this subgroups. This is done
-            # after the subgroup merging because all elements from a subgroup
-            # that relied on this rev must precede it.
-            #
-            # we also update the <parents> set to include the parents of the
-            # new nodes.
-            if rev == currentrev: # only display stuff in rev
-                gr[0].append(rev)
-            gr[1].remove(rev)
-            parents = [p for p in parentsfunc(rev) if p > nullrev]
-            gr[1].update(parents)
-            for p in parents:
-                if p not in pendingset:
-                    pendingset.add(p)
-                    heappush(pendingheap, -p)
-
-            # Look for a subgroup to display
-            #
-            # When unblocked is empty (if clause), we were not waiting for any
-            # revisions during the first iteration (if no priority was given) or
-            # if we emitted a whole disconnected set of the graph (reached a
-            # root).  In that case we arbitrarily take the oldest known
-            # subgroup. The heuristic could probably be better.
-            #
-            # Otherwise (elif clause) if the subgroup is blocked on
-            # a revision we just emitted, we can safely emit it as
-            # well.
-            if not unblocked:
-                if len(groups) > 1:  # display other subset
-                    targetidx = 1
-                    gr = groups[1]
-            elif not gr[1] & unblocked:
-                gr = None
-
-            if gr is not None:
-                # update the set of awaited revisions with the one from the
-                # subgroup
-                unblocked |= gr[1]
-                # output all revisions in the subgroup
-                for r in gr[0]:
-                    yield r
-                # delete the subgroup that you just output
-                # unless it is groups[0] in which case you just empty it.
-                if targetidx:
-                    del groups[targetidx]
-                else:
-                    gr[0][:] = []
-    # Check if we have some subgroup waiting for revisions we are not going to
-    # iterate over
-    for g in groups:
-        for r in g[0]:
-            yield r
-
 def dagwalker(repo, revs):
     """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples
 
@@ -250,16 +52,6 @@
 
     gpcache = {}
 
-    if repo.ui.configbool('experimental', 'graph-group-branches', False):
-        firstbranch = ()
-        firstbranchrevset = repo.ui.config(
-            'experimental', 'graph-group-branches.firstbranch', '')
-        if firstbranchrevset:
-            firstbranch = repo.revs(firstbranchrevset)
-        parentrevs = repo.changelog.parentrevs
-        revs = groupbranchiter(revs, parentrevs, firstbranch)
-        revs = revset.baseset(revs)
-
     for rev in revs:
         ctx = repo[rev]
         # partition into parents in the rev set and missing parents, then
@@ -653,6 +445,22 @@
     while len(text) < len(lines):
         text.append("")
 
+    if any(len(char) > 1 for char in edgemap.values()):
+        # limit drawing an edge to the first or last N lines of the current
+        # section the rest of the edge is drawn like a parent line.
+        parent = state['styles'][PARENT][-1]
+        def _drawgp(char, i):
+            # should a grandparent character be drawn for this line?
+            if len(char) < 2:
+                return True
+            num = int(char[:-1])
+            # either skip first num lines or take last num lines, based on sign
+            return -num <= i if num < 0 else (len(lines) - i) <= num
+        for i, line in enumerate(lines):
+            line[:] = [c[-1] if _drawgp(c, i) else parent for c in line]
+        edgemap.update(
+            (e, (c if len(c) < 2 else parent)) for e, c in edgemap.items())
+
     # print lines
     indentation_level = max(ncols, ncols + coldiff)
     for (line, logstr) in zip(lines, text):
--- a/mercurial/help/config.txt	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/help/config.txt	Tue Jun 14 14:52:58 2016 -0500
@@ -811,6 +811,15 @@
   dictionary of options (with unspecified options set to their defaults).
   ``$HG_PATS`` is a list of arguments. Hook failure is ignored.
 
+``fail-<command>``
+  Run after a failed invocation of an associated command. The contents
+  of the command line are passed as ``$HG_ARGS``. Parsed command line
+  arguments are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain
+  string representations of the python data internally passed to
+  <command>. ``$HG_OPTS`` is a dictionary of options (with unspecified
+  options set to their defaults). ``$HG_PATS`` is a list of arguments.
+  Hook failure is ignored.
+
 ``pre-<command>``
   Run before executing the associated command. The contents of the
   command line are passed as ``$HG_ARGS``. Parsed command line arguments
@@ -967,6 +976,8 @@
 ``hostfingerprints``
 --------------------
 
+(Deprecated. Use ``[hostsecurity]``'s ``fingerprints`` options instead.)
+
 Fingerprints of the certificates of known HTTPS servers.
 
 A HTTPS connection to a server with a fingerprint configured here will
@@ -986,6 +997,67 @@
     hg.intevation.de = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
     hg.intevation.org = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
 
+``hostsecurity``
+----------------
+
+Used to specify per-host security settings.
+
+Options in this section have the form ``hostname``:``setting``. This allows
+multiple settings to be defined on a per-host basis.
+
+The following per-host settings can be defined.
+
+``fingerprints``
+    A list of hashes of the DER encoded peer/remote certificate. Values have
+    the form ``algorithm``:``fingerprint``. e.g.
+    ``sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2``.
+
+    The following algorithms/prefixes are supported: ``sha1``, ``sha256``,
+    ``sha512``.
+
+    Use of ``sha256`` or ``sha512`` is preferred.
+
+    If a fingerprint is specified, the CA chain is not validated for this
+    host and Mercurial will require the remote certificate to match one
+    of the fingerprints specified. This means if the server updates its
+    certificate, Mercurial will abort until a new fingerprint is defined.
+    This can provide stronger security than traditional CA-based validation
+    at the expense of convenience.
+
+    This option takes precedence over ``verifycertsfile``.
+
+``verifycertsfile``
+    Path to file a containing a list of PEM encoded certificates used to
+    verify the server certificate. Environment variables and ``~user``
+    constructs are expanded in the filename.
+
+    The server certificate or the certificate's certificate authority (CA)
+    must match a certificate from this file or certificate verification
+    will fail and connections to the server will be refused.
+
+    If defined, only certificates provided by this file will be used:
+    ``web.cacerts`` and any system/default certificates will not be
+    used.
+
+    This option has no effect if the per-host ``fingerprints`` option
+    is set.
+
+    The format of the file is as follows:
+
+        -----BEGIN CERTIFICATE-----
+        ... (certificate in base64 PEM encoding) ...
+        -----END CERTIFICATE-----
+        -----BEGIN CERTIFICATE-----
+        ... (certificate in base64 PEM encoding) ...
+        -----END CERTIFICATE-----
+
+For example::
+
+    [hostsecurity]
+    hg.example.com:fingerprints = sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2
+    hg2.example.com:fingerprints = sha1:914f1aff87249c09b6859b88b1906d30756491ca, sha1:fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
+    foo.example.com:verifycertsfile = /etc/ssl/trusted-ca-certs.pem
+
 ``http_proxy``
 --------------
 
@@ -1020,8 +1092,8 @@
    file in the changeset being merged or updated to, and has different
    contents. Options are ``abort``, ``warn`` and ``ignore``. With ``abort``,
    abort on such files. With ``warn``, warn on such files and back them up as
-   .orig. With ``ignore``, don't print a warning and back them up as
-   .orig. (default: ``abort``)
+   ``.orig``. With ``ignore``, don't print a warning and back them up as
+   ``.orig``. (default: ``abort``)
 
 ``checkunknown``
    Controls behavior when an unknown file that isn't ignored has the same name
@@ -1442,16 +1514,6 @@
     Optional. Method to enable TLS when connecting to mail server: starttls,
     smtps or none. (default: none)
 
-``verifycert``
-    Optional. Verification for the certificate of mail server, when
-    ``tls`` is starttls or smtps. "strict", "loose" or False. For
-    "strict" or "loose", the certificate is verified as same as the
-    verification for HTTPS connections (see ``[hostfingerprints]`` and
-    ``[web] cacerts`` also). For "strict", sending email is also
-    aborted, if there is no configuration for mail server in
-    ``[hostfingerprints]`` and ``[web] cacerts``.  --insecure for
-    :hg:`email` overwrites this as "loose". (default: strict)
-
 ``username``
     Optional. User name for authenticating with the SMTP server.
     (default: None)
@@ -1737,6 +1799,13 @@
     large organisation with its own Mercurial deployment process and crash
     reports should be addressed to your internal support.
 
+``textwidth``
+    Maximum width of help text. A longer line generated by ``hg help`` or
+    ``hg subcommand --help`` will be broken after white space to get this
+    width or the terminal width, whichever comes first.
+    A non-positive value will disable this and the terminal width will be
+    used. (default: 78)
+
 ``timeout``
     The timeout used when a lock is held (in seconds), a negative value
     means no timeout. (default: 600)
--- a/mercurial/help/templates.txt	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/help/templates.txt	Tue Jun 14 14:52:58 2016 -0500
@@ -81,6 +81,10 @@
 
    $ hg log -r 0 --template "files: {join(files, ', ')}\n"
 
+- Separate non-empty arguments by a " "::
+
+   $ hg log -r 0 --template "{separate(' ', node, bookmarks, tags}\n"
+
 - Modify each line of a commit description::
 
    $ hg log --template "{splitlines(desc) % '**** {line}\n'}"
--- a/mercurial/hg.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/hg.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import shutil
 
@@ -480,7 +481,8 @@
                 ui.status(_('(not using pooled storage: '
                             'unable to resolve identity of remote)\n'))
         elif sharenamemode == 'remote':
-            sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
+            sharepath = os.path.join(
+                sharepool, hashlib.sha1(source).hexdigest())
         else:
             raise error.Abort('unknown share naming mode: %s' % sharenamemode)
 
--- a/mercurial/hgweb/webcommands.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/hgweb/webcommands.py	Tue Jun 14 14:52:58 2016 -0500
@@ -139,7 +139,7 @@
             yield {"line": t,
                    "lineid": "l%d" % (lineno + 1),
                    "linenumber": "% 6d" % (lineno + 1),
-                   "parity": parity.next()}
+                   "parity": next(parity)}
 
     return tmpl("filerevision",
                 file=f,
@@ -278,7 +278,7 @@
             files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
 
             yield tmpl('searchentry',
-                       parity=parity.next(),
+                       parity=next(parity),
                        changelogtag=showtags,
                        files=files,
                        **webutil.commonentry(web.repo, ctx))
@@ -375,7 +375,7 @@
                 break
 
             entry = webutil.changelistentry(web, web.repo[rev], tmpl)
-            entry['parity'] = parity.next()
+            entry['parity'] = next(parity)
             yield entry
 
     if shortlog:
@@ -527,7 +527,7 @@
 
             fctx = ctx.filectx(full)
             yield {"file": full,
-                   "parity": parity.next(),
+                   "parity": next(parity),
                    "basename": f,
                    "date": fctx.date(),
                    "size": fctx.size(),
@@ -545,7 +545,7 @@
                 h = v
 
             path = "%s%s" % (abspath, d)
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "path": path,
                    "emptydirs": "/".join(emptydirs),
                    "basename": d}
@@ -554,7 +554,7 @@
                 symrev=symrev,
                 path=abspath,
                 up=webutil.up(abspath),
-                upparity=parity.next(),
+                upparity=next(parity),
                 fentries=filelist,
                 dentries=dirlist,
                 archives=web.archivelist(hex(node)),
@@ -582,7 +582,7 @@
         if latestonly:
             t = t[:1]
         for k, n in t:
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "tag": k,
                    "date": web.repo[n].date(),
                    "node": hex(n)}
@@ -615,7 +615,7 @@
         if latestonly:
             t = i[:1]
         for k, n in t:
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "bookmark": k,
                    "date": web.repo[n].date(),
                    "node": hex(n)}
@@ -677,7 +677,7 @@
                 break
 
             yield tmpl("tagentry",
-                       parity=parity.next(),
+                       parity=next(parity),
                        tag=k,
                        node=hex(n),
                        date=web.repo[n].date())
@@ -688,7 +688,7 @@
         sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
         marks = sorted(marks, key=sortkey, reverse=True)
         for k, n in marks[:10]:  # limit to 10 bookmarks
-            yield {'parity': parity.next(),
+            yield {'parity': next(parity),
                    'bookmark': k,
                    'date': web.repo[n].date(),
                    'node': hex(n)}
@@ -704,7 +704,7 @@
 
             l.append(tmpl(
                 'shortlogentry',
-                parity=parity.next(),
+                parity=next(parity),
                 **webutil.commonentry(web.repo, ctx)))
 
         l.reverse()
@@ -864,7 +864,6 @@
                                      section='annotate', whitespace=True)
 
     def annotate(**map):
-        last = None
         if util.binary(fctx.data()):
             mt = (mimetypes.guess_type(fctx.path())[0]
                   or 'application/octet-stream')
@@ -874,12 +873,7 @@
             lines = enumerate(fctx.annotate(follow=True, linenumber=True,
                                             diffopts=diffopts))
         for lineno, ((f, targetline), l) in lines:
-            fnode = f.filenode()
-
-            if last != fnode:
-                last = fnode
-
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "node": f.hex(),
                    "rev": f.rev(),
                    "author": f.user(),
@@ -963,7 +957,7 @@
             iterfctx = fctx.filectx(i)
 
             l.append(dict(
-                parity=parity.next(),
+                parity=next(parity),
                 filerev=i,
                 file=f,
                 rename=webutil.renamelink(iterfctx),
--- a/mercurial/hgweb/webutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/hgweb/webutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -75,7 +75,7 @@
     def _first(self):
         """return the minimum non-filtered changeset or None"""
         try:
-            return iter(self._revlog).next()
+            return next(iter(self._revlog))
         except StopIteration:
             return None
 
@@ -247,7 +247,7 @@
             else:
                 status = 'open'
             yield {
-                'parity': parity.next(),
+                'parity': next(parity),
                 'branch': ctx.branch(),
                 'status': status,
                 'node': ctx.hex(),
@@ -369,7 +369,7 @@
         template = f in ctx and 'filenodelink' or 'filenolink'
         files.append(tmpl(template,
                           node=ctx.hex(), file=f, blockno=blockno + 1,
-                          parity=parity.next()))
+                          parity=next(parity)))
 
     basectx = basechangectx(web.repo, req)
     if basectx is None:
@@ -450,15 +450,15 @@
     block = []
     for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
         if chunk.startswith('diff') and block:
-            blockno = blockcount.next()
-            yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+            blockno = next(blockcount)
+            yield tmpl('diffblock', parity=next(parity), blockno=blockno,
                        lines=prettyprintlines(''.join(block), blockno))
             block = []
         if chunk.startswith('diff') and style != 'raw':
             chunk = ''.join(chunk.splitlines(True)[1:])
         block.append(chunk)
-    blockno = blockcount.next()
-    yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+    blockno = next(blockcount)
+    yield tmpl('diffblock', parity=next(parity), blockno=blockno,
                lines=prettyprintlines(''.join(block), blockno))
 
 def compare(tmpl, context, leftlines, rightlines):
@@ -521,14 +521,14 @@
 def diffsummary(statgen):
     '''Return a short summary of the diff.'''
 
-    stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
+    stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
     return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
              len(stats), addtotal, removetotal)
 
 def diffstat(tmpl, ctx, statgen, parity):
     '''Return a diffstat template for each file in the diff.'''
 
-    stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
+    stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
     files = ctx.files()
 
     def pct(i):
@@ -543,7 +543,7 @@
         fileno += 1
         yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
                    total=total, addpct=pct(adds), removepct=pct(removes),
-                   parity=parity.next())
+                   parity=next(parity))
 
 class sessionvars(object):
     def __init__(self, vars, start='?'):
--- a/mercurial/httpclient/__init__.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/httpclient/__init__.py	Tue Jun 14 14:52:58 2016 -0500
@@ -41,18 +41,29 @@
 # Many functions in this file have too many arguments.
 # pylint: disable=R0913
 
-import cStringIO
 import errno
-import httplib
+import inspect
 import logging
 import rfc822
 import select
 import socket
 
+try:
+    import cStringIO as io
+    io.StringIO
+except ImportError:
+    import io
+
+try:
+    import httplib
+    httplib.HTTPException
+except ImportError:
+    import http.client as httplib
+
 from . import (
     _readers,
     socketutil,
-    )
+)
 
 logger = logging.getLogger(__name__)
 
@@ -242,7 +253,7 @@
         self.status = int(self.status)
         if self._eol != EOL:
             hdrs = hdrs.replace(self._eol, '\r\n')
-        headers = rfc822.Message(cStringIO.StringIO(hdrs))
+        headers = rfc822.Message(io.StringIO(hdrs))
         content_len = None
         if HDR_CONTENT_LENGTH in headers:
             content_len = int(headers[HDR_CONTENT_LENGTH])
@@ -296,6 +307,46 @@
     """
     return dict((k.lower(), (k, v)) for k, v in headers.iteritems())
 
+try:
+    inspect.signature
+    def _handlesarg(func, arg):
+        """ Try to determine if func accepts arg
+
+        If it takes arg, return True
+        If it happens to take **args, then it could do anything:
+            * It could throw a different TypeError, just for fun
+            * It could throw an ArgumentError or anything else
+            * It could choose not to throw an Exception at all
+        ... return 'unknown'
+
+        Otherwise, return False
+        """
+        params = inspect.signature(func).parameters
+        if arg in params:
+            return True
+        for p in params:
+            if params[p].kind == inspect._ParameterKind.VAR_KEYWORD:
+                return 'unknown'
+        return False
+except AttributeError:
+    def _handlesarg(func, arg):
+        """ Try to determine if func accepts arg
+
+        If it takes arg, return True
+        If it happens to take **args, then it could do anything:
+            * It could throw a different TypeError, just for fun
+            * It could throw an ArgumentError or anything else
+            * It could choose not to throw an Exception at all
+        ... return 'unknown'
+
+        Otherwise, return False
+        """
+        spec = inspect.getargspec(func)
+        if arg in spec.args:
+            return True
+        if spec.keywords:
+            return 'unknown'
+        return False
 
 class HTTPConnection(object):
     """Connection to a single http server.
@@ -346,9 +397,31 @@
             if '[' in host:
                 host = host[1:-1]
         if ssl_wrap_socket is not None:
-            self._ssl_wrap_socket = ssl_wrap_socket
+            _wrap_socket = ssl_wrap_socket
         else:
-            self._ssl_wrap_socket = socketutil.wrap_socket
+            _wrap_socket = socketutil.wrap_socket
+        call_wrap_socket = None
+        handlesubar = _handlesarg(_wrap_socket, 'server_hostname')
+        if handlesubar is True:
+            # supports server_hostname
+            call_wrap_socket = _wrap_socket
+        handlesnobar = _handlesarg(_wrap_socket, 'serverhostname')
+        if handlesnobar is True and handlesubar is not True:
+            # supports serverhostname
+            def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
+                return _wrap_socket(sock, serverhostname=server_hostname,
+                                    **ssl_opts)
+        if handlesubar is False and handlesnobar is False:
+            # does not support either
+            def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
+                return _wrap_socket(sock, **ssl_opts)
+        if call_wrap_socket is None:
+            # we assume it takes **args
+            def call_wrap_socket(sock, **ssl_opts):
+                if 'server_hostname' in ssl_opts:
+                    ssl_opts['serverhostname'] = ssl_opts['server_hostname']
+                return _wrap_socket(sock, **ssl_opts)
+        self._ssl_wrap_socket = call_wrap_socket
         if use_ssl is None and port is None:
             use_ssl = False
             port = 80
@@ -429,7 +502,8 @@
             sock.setblocking(1)
             logger.debug('wrapping socket for ssl with options %r',
                          self.ssl_opts)
-            sock = self._ssl_wrap_socket(sock, **self.ssl_opts)
+            sock = self._ssl_wrap_socket(sock, server_hostname=self.host,
+                                         **self.ssl_opts)
             if self._ssl_validator:
                 self._ssl_validator(sock)
         sock.setblocking(0)
--- a/mercurial/httpclient/_readers.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/httpclient/_readers.py	Tue Jun 14 14:52:58 2016 -0500
@@ -33,7 +33,12 @@
 """
 from __future__ import absolute_import
 
-import httplib
+try:
+    import httplib
+    httplib.HTTPException
+except ImportError:
+    import http.client as httplib
+
 import logging
 
 logger = logging.getLogger(__name__)
--- a/mercurial/httpclient/socketutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/httpclient/socketutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -122,7 +122,8 @@
                 server_side=False, cert_reqs=CERT_NONE,
                 ssl_version=_PROTOCOL_SSLv23, ca_certs=None,
                 do_handshake_on_connect=True,
-                suppress_ragged_eofs=True):
+                suppress_ragged_eofs=True,
+                server_hostname=None):
         """Backport of ssl.wrap_socket from Python 2.6."""
         if cert_reqs != CERT_NONE and ca_certs:
             raise CertificateValidationUnsupported(
--- a/mercurial/httpconnection.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/httpconnection.py	Tue Jun 14 14:52:58 2016 -0500
@@ -280,10 +280,9 @@
         kwargs['keyfile'] = keyfile
         kwargs['certfile'] = certfile
 
-        kwargs.update(sslutil.sslkwargs(self.ui, host))
-
         con = HTTPConnection(host, port, use_ssl=True,
                              ssl_wrap_socket=sslutil.wrapsocket,
-                             ssl_validator=sslutil.validator(self.ui, host),
+                             ssl_validator=sslutil.validatesocket,
+                             ui=self.ui,
                              **kwargs)
         return con
--- a/mercurial/httppeer.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/httppeer.py	Tue Jun 14 14:52:58 2016 -0500
@@ -302,7 +302,7 @@
     except error.RepoError as httpexception:
         try:
             r = statichttprepo.instance(ui, "static-" + path, create)
-            ui.note('(falling back to static-http)\n')
+            ui.note(_('(falling back to static-http)\n'))
             return r
         except error.RepoError:
             raise httpexception # use the original http RepoError instead
--- a/mercurial/keepalive.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/keepalive.py	Tue Jun 14 14:52:58 2016 -0500
@@ -110,6 +110,7 @@
 from __future__ import absolute_import, print_function
 
 import errno
+import hashlib
 import httplib
 import socket
 import sys
@@ -624,8 +625,7 @@
     keepalive_handler.close_all()
 
 def continuity(url):
-    from . import util
-    md5 = util.md5
+    md5 = hashlib.md5
     format = '%25s: %s'
 
     # first fetch the file with the normal http handler
--- a/mercurial/localrepo.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/localrepo.py	Tue Jun 14 14:52:58 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import inspect
 import os
 import random
@@ -57,12 +58,10 @@
 )
 
 release = lockmod.release
-propertycache = util.propertycache
 urlerr = util.urlerr
 urlreq = util.urlreq
-filecache = scmutil.filecache
 
-class repofilecache(filecache):
+class repofilecache(scmutil.filecache):
     """All filecache usage on repo are done for logic that should be unfiltered
     """
 
@@ -78,7 +77,7 @@
     def join(self, obj, fname):
         return obj.sjoin(fname)
 
-class unfilteredpropertycache(propertycache):
+class unfilteredpropertycache(util.propertycache):
     """propertycache that apply to unfiltered repo only"""
 
     def __get__(self, repo, type=None):
@@ -87,7 +86,7 @@
             return super(unfilteredpropertycache, self).__get__(unfi)
         return getattr(unfi, self.name)
 
-class filteredpropertycache(propertycache):
+class filteredpropertycache(util.propertycache):
     """propertycache that must take filtering in account"""
 
     def cachevalue(self, obj, value):
@@ -881,12 +880,6 @@
             f = f[1:]
         return filelog.filelog(self.svfs, f)
 
-    def parents(self, changeid=None):
-        '''get list of changectxs for parents of changeid'''
-        msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
-        self.ui.deprecwarn(msg, '3.7')
-        return self[changeid].parents()
-
     def changectx(self, changeid):
         return self[changeid]
 
@@ -1008,7 +1001,8 @@
                 or self.ui.configbool('devel', 'check-locks')):
             l = self._lockref and self._lockref()
             if l is None or not l.held:
-                self.ui.develwarn('transaction with no lock')
+                raise RuntimeError('programming error: transaction requires '
+                                   'locking')
         tr = self.currenttransaction()
         if tr is not None:
             return tr.nest()
@@ -1019,11 +1013,8 @@
                 _("abandoned transaction found"),
                 hint=_("run 'hg recover' to clean up transaction"))
 
-        # make journal.dirstate contain in-memory changes at this point
-        self.dirstate.write(None)
-
         idbase = "%.40f#%f" % (random.random(), time.time())
-        txnid = 'TXN:' + util.sha1(idbase).hexdigest()
+        txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
         self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
 
         self._writejournal(desc)
@@ -1049,13 +1040,9 @@
                 # transaction running
                 repo.dirstate.write(None)
             else:
-                # prevent in-memory changes from being written out at
-                # the end of outer wlock scope or so
-                repo.dirstate.invalidate()
-
                 # discard all changes (including ones already written
                 # out) in this transaction
-                repo.vfs.rename('journal.dirstate', 'dirstate')
+                repo.dirstate.restorebackup(None, prefix='journal.')
 
                 repo.invalidate(clearfilecache=True)
 
@@ -1110,8 +1097,7 @@
         return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
 
     def _writejournal(self, desc):
-        self.vfs.write("journal.dirstate",
-                          self.vfs.tryread("dirstate"))
+        self.dirstate.savebackup(None, prefix='journal.')
         self.vfs.write("journal.branch",
                           encoding.fromlocal(self.dirstate.branch()))
         self.vfs.write("journal.desc",
@@ -1197,7 +1183,7 @@
             # prevent dirstateguard from overwriting already restored one
             dsguard.close()
 
-            self.vfs.rename('undo.dirstate', 'dirstate')
+            self.dirstate.restorebackup(None, prefix='undo.')
             try:
                 branch = self.vfs.read('undo.branch')
                 self.dirstate.setbranch(encoding.tolocal(branch))
@@ -1206,7 +1192,6 @@
                           'current branch is still \'%s\'\n')
                         % self.dirstate.branch())
 
-            self.dirstate.invalidate()
             parents = tuple([p.rev() for p in self[None].parents()])
             if len(parents) > 1:
                 ui.status(_('working directory now based on '
--- a/mercurial/mail.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/mail.py	Tue Jun 14 14:52:58 2016 -0500
@@ -41,16 +41,16 @@
     kw['continuation_ws'] = ' '
     _oldheaderinit(self, *args, **kw)
 
-email.Header.Header.__dict__['__init__'] = _unifiedheaderinit
+setattr(email.header.Header, '__init__', _unifiedheaderinit)
 
 class STARTTLS(smtplib.SMTP):
     '''Derived class to verify the peer certificate for STARTTLS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
-    def __init__(self, sslkwargs, host=None, **kwargs):
+    def __init__(self, ui, host=None, **kwargs):
         smtplib.SMTP.__init__(self, **kwargs)
-        self._sslkwargs = sslkwargs
+        self._ui = ui
         self._host = host
 
     def starttls(self, keyfile=None, certfile=None):
@@ -60,8 +60,8 @@
         (resp, reply) = self.docmd("STARTTLS")
         if resp == 220:
             self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile,
-                                           serverhostname=self._host,
-                                           **self._sslkwargs)
+                                           ui=self._ui,
+                                           serverhostname=self._host)
             self.file = smtplib.SSLFakeFile(self.sock)
             self.helo_resp = None
             self.ehlo_resp = None
@@ -74,14 +74,14 @@
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
-    def __init__(self, sslkwargs, keyfile=None, certfile=None, host=None,
+    def __init__(self, ui, keyfile=None, certfile=None, host=None,
                  **kwargs):
         self.keyfile = keyfile
         self.certfile = certfile
         smtplib.SMTP.__init__(self, **kwargs)
         self._host = host
         self.default_port = smtplib.SMTP_SSL_PORT
-        self._sslkwargs = sslkwargs
+        self._ui = ui
 
     def _get_socket(self, host, port, timeout):
         if self.debuglevel > 0:
@@ -89,8 +89,8 @@
         new_socket = socket.create_connection((host, port), timeout)
         new_socket = sslutil.wrapsocket(new_socket,
                                         self.keyfile, self.certfile,
-                                        serverhostname=self._host,
-                                        **self._sslkwargs)
+                                        ui=self._ui,
+                                        serverhostname=self._host)
         self.file = smtplib.SSLFakeFile(new_socket)
         return new_socket
 
@@ -106,22 +106,11 @@
     mailhost = ui.config('smtp', 'host')
     if not mailhost:
         raise error.Abort(_('smtp.host not configured - cannot send mail'))
-    verifycert = ui.config('smtp', 'verifycert', 'strict')
-    if verifycert not in ['strict', 'loose']:
-        if util.parsebool(verifycert) is not False:
-            raise error.Abort(_('invalid smtp.verifycert configuration: %s')
-                             % (verifycert))
-        verifycert = False
-    if (starttls or smtps) and verifycert:
-        sslkwargs = sslutil.sslkwargs(ui, mailhost)
-    else:
-        # 'ui' is required by sslutil.wrapsocket() and set by sslkwargs()
-        sslkwargs = {'ui': ui}
     if smtps:
         ui.note(_('(using smtps)\n'))
-        s = SMTPS(sslkwargs, local_hostname=local_hostname, host=mailhost)
+        s = SMTPS(ui, local_hostname=local_hostname, host=mailhost)
     elif starttls:
-        s = STARTTLS(sslkwargs, local_hostname=local_hostname, host=mailhost)
+        s = STARTTLS(ui, local_hostname=local_hostname, host=mailhost)
     else:
         s = smtplib.SMTP(local_hostname=local_hostname)
     if smtps:
@@ -137,9 +126,9 @@
         s.ehlo()
         s.starttls()
         s.ehlo()
-    if (starttls or smtps) and verifycert:
+    if starttls or smtps:
         ui.note(_('(verifying remote certificate)\n'))
-        sslutil.validator(ui, mailhost)(s.sock, verifycert == 'strict')
+        sslutil.validatesocket(s.sock)
     username = ui.config('smtp', 'username')
     password = ui.config('smtp', 'password')
     if username and not password:
--- a/mercurial/manifest.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/manifest.py	Tue Jun 14 14:52:58 2016 -0500
@@ -211,8 +211,10 @@
 
     def filesnotin(self, m2):
         '''Set of files in this manifest that are not in the other'''
-        files = set(self)
-        files.difference_update(m2)
+        diff = self.diff(m2)
+        files = set(filepath
+                    for filepath, hashflags in diff.iteritems()
+                    if hashflags[1][0] is None)
         return files
 
     @propertycache
--- a/mercurial/merge.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/merge.py	Tue Jun 14 14:52:58 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import shutil
 import struct
@@ -373,7 +374,7 @@
         """Write current state on disk in a version 1 file"""
         f = self._repo.vfs(self.statepathv1, 'w')
         irecords = iter(records)
-        lrecords = irecords.next()
+        lrecords = next(irecords)
         assert lrecords[0] == 'L'
         f.write(hex(self._local) + '\n')
         for rtype, data in irecords:
@@ -408,7 +409,7 @@
         if fcl.isabsent():
             hash = nullhex
         else:
-            hash = util.sha1(fcl.path()).hexdigest()
+            hash = hashlib.sha1(fcl.path()).hexdigest()
             self._repo.vfs.write('merge/' + hash, fcl.data())
         self._state[fd] = ['u', hash, fcl.path(),
                            fca.path(), hex(fca.filenode()),
@@ -989,19 +990,19 @@
             if len(bids) == 1: # all bids are the same kind of method
                 m, l = bids.items()[0]
                 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
-                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
+                    repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
                     actions[f] = l[0]
                     continue
             # If keep is an option, just do it.
             if 'k' in bids:
-                repo.ui.note(" %s: picking 'keep' action\n" % f)
+                repo.ui.note(_(" %s: picking 'keep' action\n") % f)
                 actions[f] = bids['k'][0]
                 continue
             # If there are gets and they all agree [how could they not?], do it.
             if 'g' in bids:
                 ga0 = bids['g'][0]
                 if all(a == ga0 for a in bids['g'][1:]):
-                    repo.ui.note(" %s: picking 'get' action\n" % f)
+                    repo.ui.note(_(" %s: picking 'get' action\n") % f)
                     actions[f] = ga0
                     continue
             # TODO: Consider other simple actions such as mode changes
@@ -1442,9 +1443,7 @@
             pas = [repo[ancestor]]
 
         if node is None:
-            if (repo.ui.configbool('devel', 'all-warnings')
-                    or repo.ui.configbool('devel', 'oldapi')):
-                repo.ui.develwarn('update with no target')
+            repo.ui.deprecwarn('update with no target', '3.9')
             rev, _mark, _act = destutil.destupdate(repo)
             node = repo[rev].node()
 
--- a/mercurial/obsolete.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/obsolete.py	Tue Jun 14 14:52:58 2016 -0500
@@ -1171,7 +1171,7 @@
                                    ignoreflags=bumpedfix):
             prev = torev(pnode) # unfiltered! but so is phasecache
             if (prev is not None) and (phase(repo, prev) <= public):
-                # we have a public precursors
+                # we have a public precursor
                 bumped.add(rev)
                 break # Next draft!
     return bumped
--- a/mercurial/parser.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/parser.py	Tue Jun 14 14:52:58 2016 -0500
@@ -325,13 +325,13 @@
         >>> builddecl('foo')
         ('foo', None, None)
         >>> builddecl('$foo')
-        ('$foo', None, "'$' not for alias arguments")
+        ('$foo', None, "invalid symbol '$foo'")
         >>> builddecl('foo::bar')
         ('foo::bar', None, 'invalid format')
         >>> builddecl('foo()')
         ('foo', [], None)
         >>> builddecl('$foo()')
-        ('$foo()', None, "'$' not for alias arguments")
+        ('$foo()', None, "invalid function '$foo'")
         >>> builddecl('foo($1, $2)')
         ('foo', ['$1', '$2'], None)
         >>> builddecl('foo(bar_bar, baz.baz)')
@@ -358,7 +358,7 @@
             # "name = ...." style
             name = tree[1]
             if name.startswith('$'):
-                return (decl, None, _("'$' not for alias arguments"))
+                return (decl, None, _("invalid symbol '%s'") % name)
             return (name, None, None)
 
         func = cls._trygetfunc(tree)
@@ -366,7 +366,7 @@
             # "name(arg, ....) = ...." style
             name, args = func
             if name.startswith('$'):
-                return (decl, None, _("'$' not for alias arguments"))
+                return (decl, None, _("invalid function '%s'") % name)
             if any(t[0] != cls._symbolnode for t in args):
                 return (decl, None, _("invalid argument list"))
             if len(args) != len(set(args)):
@@ -389,7 +389,7 @@
         if sym in args:
             op = '_aliasarg'
         elif sym.startswith('$'):
-            raise error.ParseError(_("'$' not for alias arguments"))
+            raise error.ParseError(_("invalid symbol '%s'") % sym)
         return (op, sym)
 
     @classmethod
@@ -423,7 +423,7 @@
         ...     builddefn('$1 or $bar', args)
         ... except error.ParseError as inst:
         ...     print parseerrordetail(inst)
-        '$' not for alias arguments
+        invalid symbol '$bar'
         >>> args = ['$1', '$10', 'foo']
         >>> pprint(builddefn('$10 or baz', args))
         (or
@@ -447,15 +447,13 @@
         repl = efmt = None
         name, args, err = cls._builddecl(decl)
         if err:
-            efmt = _('failed to parse the declaration of %(section)s '
-                     '"%(name)s": %(error)s')
+            efmt = _('bad declaration of %(section)s "%(name)s": %(error)s')
         else:
             try:
                 repl = cls._builddefn(defn, args)
             except error.ParseError as inst:
                 err = parseerrordetail(inst)
-                efmt = _('failed to parse the definition of %(section)s '
-                         '"%(name)s": %(error)s')
+                efmt = _('bad definition of %(section)s "%(name)s": %(error)s')
         if err:
             err = efmt % {'section': cls._section, 'name': name, 'error': err}
         return alias(name, args, err, repl)
--- a/mercurial/patch.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/patch.py	Tue Jun 14 14:52:58 2016 -0500
@@ -12,6 +12,7 @@
 import copy
 import email
 import errno
+import hashlib
 import os
 import posixpath
 import re
@@ -978,7 +979,19 @@
 def filterpatch(ui, headers, operation=None):
     """Interactively filter patch chunks into applied-only chunks"""
     if operation is None:
-        operation = _('record')
+        operation = 'record'
+    messages = {
+        'multiple': {
+            'discard': _("discard change %d/%d to '%s'?"),
+            'record': _("record change %d/%d to '%s'?"),
+            'revert': _("revert change %d/%d to '%s'?"),
+        }[operation],
+        'single': {
+            'discard': _("discard this change to '%s'?"),
+            'record': _("record this change to '%s'?"),
+            'revert': _("revert this change to '%s'?"),
+        }[operation],
+    }
 
     def prompt(skipfile, skipall, query, chunk):
         """prompt query, and process base inputs
@@ -1109,11 +1122,10 @@
             if skipfile is None and skipall is None:
                 chunk.pretty(ui)
             if total == 1:
-                msg = _("record this change to '%s'?") % chunk.filename()
+                msg = messages['single'] % chunk.filename()
             else:
                 idx = pos - len(h.hunks) + i
-                msg = _("record change %d/%d to '%s'?") % (idx, total,
-                                                           chunk.filename())
+                msg = messages['multiple'] % (idx, total, chunk.filename())
             r, skipfile, skipall, newpatches = prompt(skipfile,
                     skipall, msg, chunk)
             if r:
@@ -2401,7 +2413,7 @@
         if not text:
             text = ""
         l = len(text)
-        s = util.sha1('blob %d\0' % l)
+        s = hashlib.sha1('blob %d\0' % l)
         s.update(text)
         return s.hexdigest()
 
--- a/mercurial/pathencode.c	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/pathencode.c	Tue Jun 14 14:52:58 2016 -0500
@@ -653,24 +653,24 @@
 	PyObject *shaobj, *hashobj;
 
 	if (shafunc == NULL) {
-		PyObject *util, *name = PyString_FromString("mercurial.util");
+		PyObject *hashlib, *name = PyString_FromString("hashlib");
 
 		if (name == NULL)
 			return -1;
 
-		util = PyImport_Import(name);
+		hashlib = PyImport_Import(name);
 		Py_DECREF(name);
 
-		if (util == NULL) {
-			PyErr_SetString(PyExc_ImportError, "mercurial.util");
+		if (hashlib == NULL) {
+			PyErr_SetString(PyExc_ImportError, "hashlib");
 			return -1;
 		}
-		shafunc = PyObject_GetAttrString(util, "sha1");
-		Py_DECREF(util);
+		shafunc = PyObject_GetAttrString(hashlib, "sha1");
+		Py_DECREF(hashlib);
 
 		if (shafunc == NULL) {
 			PyErr_SetString(PyExc_AttributeError,
-					"module 'mercurial.util' has no "
+					"module 'hashlib' has no "
 					"attribute 'sha1'");
 			return -1;
 		}
--- a/mercurial/peer.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/peer.py	Tue Jun 14 14:52:58 2016 -0500
@@ -98,12 +98,12 @@
     '''
     def plain(*args, **opts):
         batchable = f(*args, **opts)
-        encargsorres, encresref = batchable.next()
+        encargsorres, encresref = next(batchable)
         if not encresref:
             return encargsorres # a local result in this case
         self = args[0]
         encresref.set(self._submitone(f.func_name, encargsorres))
-        return batchable.next()
+        return next(batchable)
     setattr(plain, 'batchable', f)
     return plain
 
--- a/mercurial/phases.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/phases.py	Tue Jun 14 14:52:58 2016 -0500
@@ -251,7 +251,7 @@
     def write(self):
         if not self.dirty:
             return
-        f = self.opener('phaseroots', 'w', atomictemp=True)
+        f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
         try:
             self._write(f)
         finally:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/policy.py	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,40 @@
+# policy.py - module policy logic for Mercurial.
+#
+# Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+# Rules for how modules can be loaded. Values are:
+#
+#    c - require C extensions
+#    allow - allow pure Python implementation when C loading fails
+#    py - only load pure Python modules
+#
+# By default, require the C extensions for performance reasons.
+policy = 'c'
+try:
+    from . import __modulepolicy__
+    policy = __modulepolicy__.modulepolicy
+except ImportError:
+    pass
+
+# PyPy doesn't load C extensions.
+#
+# The canonical way to do this is to test platform.python_implementation().
+# But we don't import platform and don't bloat for it here.
+if '__pypy__' in sys.builtin_module_names:
+    policy = 'py'
+
+# Our C extensions aren't yet compatible with Python 3. So use pure Python
+# on Python 3 for now.
+if sys.version_info[0] >= 3:
+    policy = 'py'
+
+# Environment variable can always force settings.
+policy = os.environ.get('HGMODULEPOLICY', policy)
--- a/mercurial/pure/parsers.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/pure/parsers.py	Tue Jun 14 14:52:58 2016 -0500
@@ -25,49 +25,111 @@
     # x is a tuple
     return x
 
-def parse_index2(data, inline):
-    def gettype(q):
-        return int(q & 0xFFFF)
+indexformatng = ">Qiiiiii20s12x"
+indexfirst = struct.calcsize('Q')
+sizeint = struct.calcsize('i')
+indexsize = struct.calcsize(indexformatng)
+
+def gettype(q):
+    return int(q & 0xFFFF)
 
-    def offset_type(offset, type):
-        return long(long(offset) << 16 | type)
+def offset_type(offset, type):
+    return long(long(offset) << 16 | type)
+
+class BaseIndexObject(object):
+    def __len__(self):
+        return self._lgt + len(self._extra) + 1
+
+    def insert(self, i, tup):
+        assert i == -1
+        self._extra.append(tup)
 
-    indexformatng = ">Qiiiiii20s12x"
+    def _fix_index(self, i):
+        if not isinstance(i, int):
+            raise TypeError("expecting int indexes")
+        if i < 0:
+            i = len(self) + i
+        if i < 0 or i >= len(self):
+            raise IndexError
+        return i
 
-    s = struct.calcsize(indexformatng)
-    index = []
-    cache = None
-    off = 0
+    def __getitem__(self, i):
+        i = self._fix_index(i)
+        if i == len(self) - 1:
+            return (0, 0, 0, -1, -1, -1, -1, nullid)
+        if i >= self._lgt:
+            return self._extra[i - self._lgt]
+        index = self._calculate_index(i)
+        r = struct.unpack(indexformatng, self._data[index:index + indexsize])
+        if i == 0:
+            e = list(r)
+            type = gettype(e[0])
+            e[0] = offset_type(0, type)
+            return tuple(e)
+        return r
+
+class IndexObject(BaseIndexObject):
+    def __init__(self, data):
+        assert len(data) % indexsize == 0
+        self._data = data
+        self._lgt = len(data) // indexsize
+        self._extra = []
+
+    def _calculate_index(self, i):
+        return i * indexsize
 
-    l = len(data) - s
-    append = index.append
-    if inline:
-        cache = (0, data)
-        while off <= l:
-            e = _unpack(indexformatng, data[off:off + s])
-            append(e)
-            if e[1] < 0:
-                break
-            off += e[1] + s
-    else:
-        while off <= l:
-            e = _unpack(indexformatng, data[off:off + s])
-            append(e)
-            off += s
+    def __delitem__(self, i):
+        if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+            raise ValueError("deleting slices only supports a:-1 with step 1")
+        i = self._fix_index(i.start)
+        if i < self._lgt:
+            self._data = self._data[:i * indexsize]
+            self._lgt = i
+            self._extra = []
+        else:
+            self._extra = self._extra[:i - self._lgt]
+
+class InlinedIndexObject(BaseIndexObject):
+    def __init__(self, data, inline=0):
+        self._data = data
+        self._lgt = self._inline_scan(None)
+        self._inline_scan(self._lgt)
+        self._extra = []
 
-    if off != len(data):
-        raise ValueError('corrupt index file')
+    def _inline_scan(self, lgt):
+        off = 0
+        if lgt is not None:
+            self._offsets = [0] * lgt
+        count = 0
+        while off <= len(self._data) - indexsize:
+            s, = struct.unpack('>i',
+                self._data[off + indexfirst:off + sizeint + indexfirst])
+            if lgt is not None:
+                self._offsets[count] = off
+            count += 1
+            off += indexsize + s
+        if off != len(self._data):
+            raise ValueError("corrupted data")
+        return count
 
-    if index:
-        e = list(index[0])
-        type = gettype(e[0])
-        e[0] = offset_type(0, type)
-        index[0] = tuple(e)
+    def __delitem__(self, i):
+        if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+            raise ValueError("deleting slices only supports a:-1 with step 1")
+        i = self._fix_index(i.start)
+        if i < self._lgt:
+            self._offsets = self._offsets[:i]
+            self._lgt = i
+            self._extra = []
+        else:
+            self._extra = self._extra[:i - self._lgt]
 
-    # add the magic null revision at -1
-    index.append((0, 0, 0, -1, -1, -1, -1, nullid))
+    def _calculate_index(self, i):
+        return self._offsets[i]
 
-    return index, cache
+def parse_index2(data, inline):
+    if not inline:
+        return IndexObject(data), None
+    return InlinedIndexObject(data, inline), (0, data)
 
 def parse_dirstate(dmap, copymap, st):
     parents = [st[:20], st[20: 40]]
--- a/mercurial/pycompat.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/pycompat.py	Tue Jun 14 14:52:58 2016 -0500
@@ -11,6 +11,12 @@
 from __future__ import absolute_import
 
 try:
+    import cPickle as pickle
+    pickle.dumps
+except ImportError:
+    import pickle
+
+try:
     import cStringIO as io
     stringio = io.StringIO
 except ImportError:
--- a/mercurial/repair.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/repair.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 
 from .i18n import _
 from .node import short
@@ -35,7 +36,7 @@
     # Include a hash of all the nodes in the filename for uniqueness
     allcommits = repo.set('%ln::%ln', bases, heads)
     allhashes = sorted(c.hex() for c in allcommits)
-    totalhash = util.sha1(''.join(allhashes)).hexdigest()
+    totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
     name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
 
     comp = None
--- a/mercurial/repoview.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/repoview.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import copy
+import hashlib
 import heapq
 import struct
 
@@ -18,7 +19,6 @@
     obsolete,
     phases,
     tags as tagsmod,
-    util,
 )
 
 def hideablerevs(repo):
@@ -102,7 +102,7 @@
     it to the cache. Upon reading we can easily validate by checking the hash
     against the stored one and discard the cache in case the hashes don't match.
     """
-    h = util.sha1()
+    h = hashlib.sha1()
     h.update(''.join(repo.heads()))
     h.update(str(hash(frozenset(hideable))))
     return h.digest()
--- a/mercurial/revlog.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/revlog.py	Tue Jun 14 14:52:58 2016 -0500
@@ -15,6 +15,7 @@
 
 import collections
 import errno
+import hashlib
 import os
 import struct
 import zlib
@@ -40,7 +41,6 @@
 _unpack = struct.unpack
 _compress = zlib.compress
 _decompress = zlib.decompress
-_sha = util.sha1
 
 # revlog header flags
 REVLOGV0 = 0
@@ -74,7 +74,7 @@
 def offset_type(offset, type):
     return long(long(offset) << 16 | type)
 
-_nullhash = _sha(nullid)
+_nullhash = hashlib.sha1(nullid)
 
 def hash(text, p1, p2):
     """generate a hash from the given text and its parent hashes
@@ -92,7 +92,7 @@
         # none of the parent nodes are nullid
         l = [p1, p2]
         l.sort()
-        s = _sha(l[0])
+        s = hashlib.sha1(l[0])
         s.update(l[1])
     s.update(text)
     return s.digest()
--- a/mercurial/revset.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/revset.py	Tue Jun 14 14:52:58 2016 -0500
@@ -330,13 +330,12 @@
     s = methods[x[0]](repo, subset, *x[1:])
     if util.safehasattr(s, 'isascending'):
         return s
-    if (repo.ui.configbool('devel', 'all-warnings')
-            or repo.ui.configbool('devel', 'old-revset')):
-        # else case should not happen, because all non-func are internal,
-        # ignoring for now.
-        if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
-            repo.ui.develwarn('revset "%s" use list instead of smartset, '
-                              '(upgrade your code)' % x[1][1])
+    # else case should not happen, because all non-func are internal,
+    # ignoring for now.
+    if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
+        repo.ui.deprecwarn('revset "%s" uses list instead of smartset'
+                           % x[1][1],
+                           '3.9')
     return baseset(s)
 
 def _getrevsource(repo, r):
@@ -387,9 +386,7 @@
     r = fullreposet(repo)
     xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
                          includepath=True)
-    # XXX We should combine with subset first: 'subset & baseset(...)'. This is
-    # necessary to ensure we preserve the order in subset.
-    return xs & subset
+    return subset & xs
 
 def andset(repo, subset, x, y):
     return getset(repo, getset(repo, subset, x), y)
@@ -1837,7 +1834,16 @@
         return True
     return subset & s.filter(filter, condrepr='<roots>')
 
-@predicate('sort(set[, [-]key...])', safe=True)
+_sortkeyfuncs = {
+    'rev': lambda c: c.rev(),
+    'branch': lambda c: c.branch(),
+    'desc': lambda c: c.description(),
+    'user': lambda c: c.user(),
+    'author': lambda c: c.user(),
+    'date': lambda c: c.date()[0],
+}
+
+@predicate('sort(set[, [-]key... [, ...]])', safe=True)
 def sort(repo, subset, x):
     """Sort set by keys. The default sort order is ascending, specify a key
     as ``-key`` to sort in descending order.
@@ -1849,50 +1855,265 @@
     - ``desc`` for the commit message (description),
     - ``user`` for user name (``author`` can be used as an alias),
     - ``date`` for the commit date
+    - ``topo`` for a reverse topographical sort
+
+    The ``topo`` sort order cannot be combined with other sort keys. This sort
+    takes one optional argument, ``topo.firstbranch``, which takes a revset that
+    specifies what topographical branches to prioritize in the sort.
+
     """
-    # i18n: "sort" is a keyword
-    l = getargs(x, 1, 2, _("sort requires one or two arguments"))
+    args = getargsdict(x, 'sort', 'set keys topo.firstbranch')
+    if 'set' not in args:
+        # i18n: "sort" is a keyword
+        raise error.ParseError(_('sort requires one or two arguments'))
     keys = "rev"
-    if len(l) == 2:
+    if 'keys' in args:
         # i18n: "sort" is a keyword
-        keys = getstring(l[1], _("sort spec must be a string"))
-
-    s = l[0]
+        keys = getstring(args['keys'], _("sort spec must be a string"))
+
+    s = args['set']
     keys = keys.split()
     revs = getset(repo, subset, s)
+
+    if len(keys) > 1 and any(k.lstrip('-') == 'topo' for k in keys):
+        # i18n: "topo" is a keyword
+        raise error.ParseError(_(
+            'topo sort order cannot be combined with other sort keys'))
+
+    firstbranch = ()
+    if 'topo.firstbranch' in args:
+        if any(k.lstrip('-') == 'topo' for k in keys):
+            firstbranch = getset(repo, subset, args['topo.firstbranch'])
+        else:
+            # i18n: "topo" and "topo.firstbranch" are keywords
+            raise error.ParseError(_(
+                'topo.firstbranch can only be used when using the topo sort '
+                'key'))
+
     if keys == ["rev"]:
         revs.sort()
         return revs
     elif keys == ["-rev"]:
         revs.sort(reverse=True)
         return revs
+    elif keys[0] in ("topo", "-topo"):
+        revs = baseset(_toposort(revs, repo.changelog.parentrevs, firstbranch),
+                       istopo=True)
+        if keys[0][0] == '-':
+            revs.reverse()
+        return revs
+
     # sort() is guaranteed to be stable
     ctxs = [repo[r] for r in revs]
     for k in reversed(keys):
-        if k == 'rev':
-            ctxs.sort(key=lambda c: c.rev())
-        elif k == '-rev':
-            ctxs.sort(key=lambda c: c.rev(), reverse=True)
-        elif k == 'branch':
-            ctxs.sort(key=lambda c: c.branch())
-        elif k == '-branch':
-            ctxs.sort(key=lambda c: c.branch(), reverse=True)
-        elif k == 'desc':
-            ctxs.sort(key=lambda c: c.description())
-        elif k == '-desc':
-            ctxs.sort(key=lambda c: c.description(), reverse=True)
-        elif k in 'user author':
-            ctxs.sort(key=lambda c: c.user())
-        elif k in '-user -author':
-            ctxs.sort(key=lambda c: c.user(), reverse=True)
-        elif k == 'date':
-            ctxs.sort(key=lambda c: c.date()[0])
-        elif k == '-date':
-            ctxs.sort(key=lambda c: c.date()[0], reverse=True)
-        else:
-            raise error.ParseError(_("unknown sort key %r") % k)
+        fk = k
+        reverse = (k[0] == '-')
+        if reverse:
+            k = k[1:]
+        try:
+            ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
+        except KeyError:
+            raise error.ParseError(_("unknown sort key %r") % fk)
     return baseset([c.rev() for c in ctxs])
 
+def _toposort(revs, parentsfunc, firstbranch=()):
+    """Yield revisions from heads to roots one (topo) branch at a time.
+
+    This function aims to be used by a graph generator that wishes to minimize
+    the number of parallel branches and their interleaving.
+
+    Example iteration order (numbers show the "true" order in a changelog):
+
+      o  4
+      |
+      o  1
+      |
+      | o  3
+      | |
+      | o  2
+      |/
+      o  0
+
+    Note that the ancestors of merges are understood by the current
+    algorithm to be on the same branch. This means no reordering will
+    occur behind a merge.
+    """
+
+    ### Quick summary of the algorithm
+    #
+    # This function is based around a "retention" principle. We keep revisions
+    # in memory until we are ready to emit a whole branch that immediately
+    # "merges" into an existing one. This reduces the number of parallel
+    # branches with interleaved revisions.
+    #
+    # During iteration revs are split into two groups:
+    # A) revision already emitted
+    # B) revision in "retention". They are stored as different subgroups.
+    #
+    # for each REV, we do the following logic:
+    #
+    #   1) if REV is a parent of (A), we will emit it. If there is a
+    #   retention group ((B) above) that is blocked on REV being
+    #   available, we emit all the revisions out of that retention
+    #   group first.
+    #
+    #   2) else, we'll search for a subgroup in (B) awaiting for REV to be
+    #   available, if such subgroup exist, we add REV to it and the subgroup is
+    #   now awaiting for REV.parents() to be available.
+    #
+    #   3) finally if no such group existed in (B), we create a new subgroup.
+    #
+    #
+    # To bootstrap the algorithm, we emit the tipmost revision (which
+    # puts it in group (A) from above).
+
+    revs.sort(reverse=True)
+
+    # Set of parents of revision that have been emitted. They can be considered
+    # unblocked as the graph generator is already aware of them so there is no
+    # need to delay the revisions that reference them.
+    #
+    # If someone wants to prioritize a branch over the others, pre-filling this
+    # set will force all other branches to wait until this branch is ready to be
+    # emitted.
+    unblocked = set(firstbranch)
+
+    # list of groups waiting to be displayed, each group is defined by:
+    #
+    #   (revs:    lists of revs waiting to be displayed,
+    #    blocked: set of that cannot be displayed before those in 'revs')
+    #
+    # The second value ('blocked') correspond to parents of any revision in the
+    # group ('revs') that is not itself contained in the group. The main idea
+    # of this algorithm is to delay as much as possible the emission of any
+    # revision.  This means waiting for the moment we are about to display
+    # these parents to display the revs in a group.
+    #
+    # This first implementation is smart until it encounters a merge: it will
+    # emit revs as soon as any parent is about to be emitted and can grow an
+    # arbitrary number of revs in 'blocked'. In practice this mean we properly
+    # retains new branches but gives up on any special ordering for ancestors
+    # of merges. The implementation can be improved to handle this better.
+    #
+    # The first subgroup is special. It corresponds to all the revision that
+    # were already emitted. The 'revs' lists is expected to be empty and the
+    # 'blocked' set contains the parents revisions of already emitted revision.
+    #
+    # You could pre-seed the <parents> set of groups[0] to a specific
+    # changesets to select what the first emitted branch should be.
+    groups = [([], unblocked)]
+    pendingheap = []
+    pendingset = set()
+
+    heapq.heapify(pendingheap)
+    heappop = heapq.heappop
+    heappush = heapq.heappush
+    for currentrev in revs:
+        # Heap works with smallest element, we want highest so we invert
+        if currentrev not in pendingset:
+            heappush(pendingheap, -currentrev)
+            pendingset.add(currentrev)
+        # iterates on pending rev until after the current rev have been
+        # processed.
+        rev = None
+        while rev != currentrev:
+            rev = -heappop(pendingheap)
+            pendingset.remove(rev)
+
+            # Seek for a subgroup blocked, waiting for the current revision.
+            matching = [i for i, g in enumerate(groups) if rev in g[1]]
+
+            if matching:
+                # The main idea is to gather together all sets that are blocked
+                # on the same revision.
+                #
+                # Groups are merged when a common blocking ancestor is
+                # observed. For example, given two groups:
+                #
+                # revs [5, 4] waiting for 1
+                # revs [3, 2] waiting for 1
+                #
+                # These two groups will be merged when we process
+                # 1. In theory, we could have merged the groups when
+                # we added 2 to the group it is now in (we could have
+                # noticed the groups were both blocked on 1 then), but
+                # the way it works now makes the algorithm simpler.
+                #
+                # We also always keep the oldest subgroup first. We can
+                # probably improve the behavior by having the longest set
+                # first. That way, graph algorithms could minimise the length
+                # of parallel lines their drawing. This is currently not done.
+                targetidx = matching.pop(0)
+                trevs, tparents = groups[targetidx]
+                for i in matching:
+                    gr = groups[i]
+                    trevs.extend(gr[0])
+                    tparents |= gr[1]
+                # delete all merged subgroups (except the one we kept)
+                # (starting from the last subgroup for performance and
+                # sanity reasons)
+                for i in reversed(matching):
+                    del groups[i]
+            else:
+                # This is a new head. We create a new subgroup for it.
+                targetidx = len(groups)
+                groups.append(([], set([rev])))
+
+            gr = groups[targetidx]
+
+            # We now add the current nodes to this subgroups. This is done
+            # after the subgroup merging because all elements from a subgroup
+            # that relied on this rev must precede it.
+            #
+            # we also update the <parents> set to include the parents of the
+            # new nodes.
+            if rev == currentrev: # only display stuff in rev
+                gr[0].append(rev)
+            gr[1].remove(rev)
+            parents = [p for p in parentsfunc(rev) if p > node.nullrev]
+            gr[1].update(parents)
+            for p in parents:
+                if p not in pendingset:
+                    pendingset.add(p)
+                    heappush(pendingheap, -p)
+
+            # Look for a subgroup to display
+            #
+            # When unblocked is empty (if clause), we were not waiting for any
+            # revisions during the first iteration (if no priority was given) or
+            # if we emitted a whole disconnected set of the graph (reached a
+            # root).  In that case we arbitrarily take the oldest known
+            # subgroup. The heuristic could probably be better.
+            #
+            # Otherwise (elif clause) if the subgroup is blocked on
+            # a revision we just emitted, we can safely emit it as
+            # well.
+            if not unblocked:
+                if len(groups) > 1:  # display other subset
+                    targetidx = 1
+                    gr = groups[1]
+            elif not gr[1] & unblocked:
+                gr = None
+
+            if gr is not None:
+                # update the set of awaited revisions with the one from the
+                # subgroup
+                unblocked |= gr[1]
+                # output all revisions in the subgroup
+                for r in gr[0]:
+                    yield r
+                # delete the subgroup that you just output
+                # unless it is groups[0] in which case you just empty it.
+                if targetidx:
+                    del groups[targetidx]
+                else:
+                    gr[0][:] = []
+    # Check if we have some subgroup waiting for revisions we are not going to
+    # iterate over
+    for g in groups:
+        for r in g[0]:
+            yield r
+
 @predicate('subrepo([pattern])')
 def subrepo(repo, subset, x):
     """Changesets that add, modify or remove the given subrepo.  If no subrepo
@@ -2073,7 +2294,22 @@
     "parentpost": p1,
 }
 
-def optimize(x, small):
+def _matchonly(revs, bases):
+    """
+    >>> f = lambda *args: _matchonly(*map(parse, args))
+    >>> f('ancestors(A)', 'not ancestors(B)')
+    ('list', ('symbol', 'A'), ('symbol', 'B'))
+    """
+    if (revs is not None
+        and revs[0] == 'func'
+        and getstring(revs[1], _('not a symbol')) == 'ancestors'
+        and bases is not None
+        and bases[0] == 'not'
+        and bases[1][0] == 'func'
+        and getstring(bases[1][1], _('not a symbol')) == 'ancestors'):
+        return ('list', revs[2], bases[1][2])
+
+def _optimize(x, small):
     if x is None:
         return 0, x
 
@@ -2083,47 +2319,36 @@
 
     op = x[0]
     if op == 'minus':
-        return optimize(('and', x[1], ('not', x[2])), small)
+        return _optimize(('and', x[1], ('not', x[2])), small)
     elif op == 'only':
-        return optimize(('func', ('symbol', 'only'),
-                         ('list', x[1], x[2])), small)
+        t = ('func', ('symbol', 'only'), ('list', x[1], x[2]))
+        return _optimize(t, small)
     elif op == 'onlypost':
-        return optimize(('func', ('symbol', 'only'), x[1]), small)
+        return _optimize(('func', ('symbol', 'only'), x[1]), small)
     elif op == 'dagrangepre':
-        return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
+        return _optimize(('func', ('symbol', 'ancestors'), x[1]), small)
     elif op == 'dagrangepost':
-        return optimize(('func', ('symbol', 'descendants'), x[1]), small)
+        return _optimize(('func', ('symbol', 'descendants'), x[1]), small)
     elif op == 'rangeall':
-        return optimize(('range', ('string', '0'), ('string', 'tip')), small)
+        return _optimize(('range', ('string', '0'), ('string', 'tip')), small)
     elif op == 'rangepre':
-        return optimize(('range', ('string', '0'), x[1]), small)
+        return _optimize(('range', ('string', '0'), x[1]), small)
     elif op == 'rangepost':
-        return optimize(('range', x[1], ('string', 'tip')), small)
+        return _optimize(('range', x[1], ('string', 'tip')), small)
     elif op == 'negate':
-        return optimize(('string',
-                         '-' + getstring(x[1], _("can't negate that"))), small)
+        s = getstring(x[1], _("can't negate that"))
+        return _optimize(('string', '-' + s), small)
     elif op in 'string symbol negate':
         return smallbonus, x # single revisions are small
     elif op == 'and':
-        wa, ta = optimize(x[1], True)
-        wb, tb = optimize(x[2], True)
+        wa, ta = _optimize(x[1], True)
+        wb, tb = _optimize(x[2], True)
+        w = min(wa, wb)
 
         # (::x and not ::y)/(not ::y and ::x) have a fast path
-        def isonly(revs, bases):
-            return (
-                revs is not None
-                and revs[0] == 'func'
-                and getstring(revs[1], _('not a symbol')) == 'ancestors'
-                and bases is not None
-                and bases[0] == 'not'
-                and bases[1][0] == 'func'
-                and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
-
-        w = min(wa, wb)
-        if isonly(ta, tb):
-            return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2]))
-        if isonly(tb, ta):
-            return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2]))
+        tm = _matchonly(ta, tb) or _matchonly(tb, ta)
+        if tm:
+            return w, ('func', ('symbol', 'only'), tm)
 
         if tb is not None and tb[0] == 'not':
             return wa, ('difference', ta, tb[1])
@@ -2143,12 +2368,12 @@
             else:
                 s = '\0'.join(t[1] for w, t in ss)
                 y = ('func', ('symbol', '_list'), ('string', s))
-                w, t = optimize(y, False)
+                w, t = _optimize(y, False)
             ws.append(w)
             ts.append(t)
             del ss[:]
         for y in x[1:]:
-            w, t = optimize(y, False)
+            w, t = _optimize(y, False)
             if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
                 ss.append((w, t))
                 continue
@@ -2166,34 +2391,34 @@
         # Optimize not public() to _notpublic() because we have a fast version
         if x[1] == ('func', ('symbol', 'public'), None):
             newsym = ('func', ('symbol', '_notpublic'), None)
-            o = optimize(newsym, not small)
+            o = _optimize(newsym, not small)
             return o[0], o[1]
         else:
-            o = optimize(x[1], not small)
+            o = _optimize(x[1], not small)
             return o[0], (op, o[1])
     elif op == 'parentpost':
-        o = optimize(x[1], small)
+        o = _optimize(x[1], small)
         return o[0], (op, o[1])
     elif op == 'group':
-        return optimize(x[1], small)
+        return _optimize(x[1], small)
     elif op in 'dagrange range parent ancestorspec':
         if op == 'parent':
             # x^:y means (x^) : y, not x ^ (:y)
             post = ('parentpost', x[1])
             if x[2][0] == 'dagrangepre':
-                return optimize(('dagrange', post, x[2][1]), small)
+                return _optimize(('dagrange', post, x[2][1]), small)
             elif x[2][0] == 'rangepre':
-                return optimize(('range', post, x[2][1]), small)
-
-        wa, ta = optimize(x[1], small)
-        wb, tb = optimize(x[2], small)
+                return _optimize(('range', post, x[2][1]), small)
+
+        wa, ta = _optimize(x[1], small)
+        wb, tb = _optimize(x[2], small)
         return wa + wb, (op, ta, tb)
     elif op == 'list':
-        ws, ts = zip(*(optimize(y, small) for y in x[1:]))
+        ws, ts = zip(*(_optimize(y, small) for y in x[1:]))
         return sum(ws), (op,) + ts
     elif op == 'func':
         f = getstring(x[1], _("not a symbol"))
-        wa, ta = optimize(x[2], small)
+        wa, ta = _optimize(x[2], small)
         if f in ("author branch closed date desc file grep keyword "
                  "outgoing user"):
             w = 10 # slow
@@ -2212,33 +2437,32 @@
         return w + wa, (op, x[1], ta)
     return 1, x
 
+def optimize(tree):
+    _weight, newtree = _optimize(tree, small=True)
+    return newtree
+
 # the set of valid characters for the initial letter of symbols in
 # alias declarations and definitions
 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
                            if c.isalnum() or c in '._@$' or ord(c) > 127)
 
-def _tokenizealias(program, lookup=None):
-    """Parse alias declaration/definition into a stream of tokens
-
-    This allows symbol names to use also ``$`` as an initial letter
-    (for backward compatibility), and callers of this function should
-    examine whether ``$`` is used also for unexpected symbols or not.
-    """
-    return tokenize(program, lookup=lookup,
-                    syminitletters=_aliassyminitletters)
-
-def _parsealias(spec):
-    """Parse alias declaration/definition ``spec``
-
-    >>> _parsealias('foo($1)')
+def _parsewith(spec, lookup=None, syminitletters=None):
+    """Generate a parse tree of given spec with given tokenizing options
+
+    >>> _parsewith('foo($1)', syminitletters=_aliassyminitletters)
     ('func', ('symbol', 'foo'), ('symbol', '$1'))
-    >>> _parsealias('foo bar')
+    >>> _parsewith('$1')
+    Traceback (most recent call last):
+      ...
+    ParseError: ("syntax error in revset '$1'", 0)
+    >>> _parsewith('foo bar')
     Traceback (most recent call last):
       ...
     ParseError: ('invalid token', 4)
     """
     p = parser.parser(elements)
-    tree, pos = p.parse(_tokenizealias(spec))
+    tree, pos = p.parse(tokenize(spec, lookup=lookup,
+                                 syminitletters=syminitletters))
     if pos != len(spec):
         raise error.ParseError(_('invalid token'), pos)
     return parser.simplifyinfixops(tree, ('list', 'or'))
@@ -2246,7 +2470,16 @@
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of revset aliases"""
     _section = _('revset alias')
-    _parse = staticmethod(_parsealias)
+
+    @staticmethod
+    def _parse(spec):
+        """Parse alias declaration/definition ``spec``
+
+        This allows symbol names to use also ``$`` as an initial letter
+        (for backward compatibility), and callers of this function should
+        examine whether ``$`` is used also for unexpected symbols or not.
+        """
+        return _parsewith(spec, syminitletters=_aliassyminitletters)
 
     @staticmethod
     def _trygetfunc(tree):
@@ -2286,11 +2519,7 @@
         return tuple(foldconcat(t) for t in tree)
 
 def parse(spec, lookup=None):
-    p = parser.parser(elements)
-    tree, pos = p.parse(tokenize(spec, lookup=lookup))
-    if pos != len(spec):
-        raise error.ParseError(_("invalid token"), pos)
-    return parser.simplifyinfixops(tree, ('list', 'or'))
+    return _parsewith(spec, lookup=lookup)
 
 def posttreebuilthook(tree, repo):
     # hook for extensions to execute code on the optimized tree
@@ -2327,7 +2556,7 @@
     if ui:
         tree = expandaliases(ui, tree, showwarning=ui.warn)
     tree = foldconcat(tree)
-    weight, tree = optimize(tree, True)
+    tree = optimize(tree)
     posttreebuilthook(tree, repo)
     def mfunc(repo, subset=None):
         if subset is None:
@@ -2506,6 +2735,10 @@
         """True if the set will iterate in descending order"""
         raise NotImplementedError()
 
+    def istopo(self):
+        """True if the set will iterate in topographical order"""
+        raise NotImplementedError()
+
     @util.cachefunc
     def min(self):
         """return the minimum element in the set"""
@@ -2591,12 +2824,13 @@
 
     Every method in this class should be implemented by any smartset class.
     """
-    def __init__(self, data=(), datarepr=None):
+    def __init__(self, data=(), datarepr=None, istopo=False):
         """
         datarepr: a tuple of (format, obj, ...), a function or an object that
                   provides a printable representation of the given data.
         """
         self._ascending = None
+        self._istopo = istopo
         if not isinstance(data, list):
             if isinstance(data, set):
                 self._set = data
@@ -2639,12 +2873,14 @@
 
     def sort(self, reverse=False):
         self._ascending = not bool(reverse)
+        self._istopo = False
 
     def reverse(self):
         if self._ascending is None:
             self._list.reverse()
         else:
             self._ascending = not self._ascending
+        self._istopo = False
 
     def __len__(self):
         return len(self._list)
@@ -2665,6 +2901,14 @@
             return True
         return self._ascending is not None and not self._ascending
 
+    def istopo(self):
+        """Is the collection is in topographical order or not.
+
+        This is part of the mandatory API for smartset."""
+        if len(self) <= 1:
+            return True
+        return self._istopo
+
     def first(self):
         if self:
             if self._ascending is None:
@@ -2741,9 +2985,16 @@
         return lambda: self._iterfilter(it())
 
     def __nonzero__(self):
-        fast = self.fastasc
-        if fast is None:
-            fast = self.fastdesc
+        fast = None
+        candidates = [self.fastasc if self.isascending() else None,
+                      self.fastdesc if self.isdescending() else None,
+                      self.fastasc,
+                      self.fastdesc]
+        for candidate in candidates:
+            if candidate is not None:
+                fast = candidate
+                break
+
         if fast is not None:
             it = fast()
         else:
@@ -2773,6 +3024,9 @@
     def isdescending(self):
         return self._subset.isdescending()
 
+    def istopo(self):
+        return self._subset.istopo()
+
     def first(self):
         for x in self:
             return x
@@ -2816,14 +3070,14 @@
         # Consume both iterators in an ordered way until one is empty
         while True:
             if val1 is None:
-                val1 = iter1.next()
+                val1 = next(iter1)
             if val2 is None:
-                val2 = iter2.next()
-            next = choice(val1, val2)
-            yield next
-            if val1 == next:
+                val2 = next(iter2)
+            n = choice(val1, val2)
+            yield n
+            if val1 == n:
                 val1 = None
-            if val2 == next:
+            if val2 == n:
                 val2 = None
     except StopIteration:
         # Flush any remaining values and consume the other one
@@ -3019,6 +3273,12 @@
     def isdescending(self):
         return self._ascending is not None and not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def reverse(self):
         if self._ascending is None:
             self._list.reverse()
@@ -3186,6 +3446,12 @@
     def isdescending(self):
         return not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def first(self):
         if self._ascending:
             it = self.fastasc
@@ -3248,6 +3514,12 @@
     def reverse(self):
         self._ascending = not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def _iterfilter(self, iterrange):
         s = self._hiddenrevs
         for r in iterrange:
--- a/mercurial/scmutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/scmutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -10,6 +10,7 @@
 import contextlib
 import errno
 import glob
+import hashlib
 import os
 import re
 import shutil
@@ -224,7 +225,7 @@
     key = None
     revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
     if revs:
-        s = util.sha1()
+        s = hashlib.sha1()
         for rev in revs:
             s.update('%s;' % rev)
         key = s.digest()
@@ -377,8 +378,18 @@
     def readlock(self, path):
         return util.readlock(self.join(path))
 
-    def rename(self, src, dst):
-        return util.rename(self.join(src), self.join(dst))
+    def rename(self, src, dst, checkambig=False):
+        dstpath = self.join(dst)
+        oldstat = checkambig and util.filestat(dstpath)
+        if oldstat and oldstat.stat:
+            ret = util.rename(self.join(src), dstpath)
+            newstat = util.filestat(dstpath)
+            if newstat.isambig(oldstat):
+                # stat of renamed file is ambiguous to original one
+                advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                os.utime(dstpath, (advanced, advanced))
+            return ret
+        return util.rename(self.join(src), dstpath)
 
     def readlink(self, path):
         return os.readlink(self.join(path))
@@ -502,7 +513,7 @@
         os.chmod(name, self.createmode & 0o666)
 
     def __call__(self, path, mode="r", text=False, atomictemp=False,
-                 notindexed=False, backgroundclose=False):
+                 notindexed=False, backgroundclose=False, checkambig=False):
         '''Open ``path`` file, which is relative to vfs root.
 
         Newly created directories are marked as "not to be indexed by
@@ -521,6 +532,8 @@
            closing a file on a background thread and reopening it. (If the
            file were opened multiple times, there could be unflushed data
            because the original file handle hasn't been flushed/closed yet.)
+
+        ``checkambig`` is passed to atomictempfile (valid only for writing).
         '''
         if self._audit:
             r = util.checkosfilename(path)
@@ -540,7 +553,8 @@
             if basename:
                 if atomictemp:
                     util.makedirs(dirname, self.createmode, notindexed)
-                    return util.atomictempfile(f, mode, self.createmode)
+                    return util.atomictempfile(f, mode, self.createmode,
+                                               checkambig=checkambig)
                 try:
                     if 'w' in mode:
                         util.unlink(f)
@@ -751,7 +765,7 @@
 
 def _pairspec(revspec):
     tree = revset.parse(revspec)
-    tree = revset.optimize(tree, True)[1]  # fix up "x^:y" -> "(x^):y"
+    tree = revset.optimize(tree)  # fix up "x^:y" -> "(x^):y"
     return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
 
 def revpair(repo, revs):
@@ -1375,4 +1389,3 @@
             return
 
         self._queue.put(fh, block=True, timeout=None)
-
--- a/mercurial/similar.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/similar.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,6 +7,8 @@
 
 from __future__ import absolute_import
 
+import hashlib
+
 from .i18n import _
 from . import (
     bdiff,
@@ -27,14 +29,14 @@
     for i, fctx in enumerate(removed):
         repo.ui.progress(_('searching for exact renames'), i, total=numfiles,
                          unit=_('files'))
-        h = util.sha1(fctx.data()).digest()
+        h = hashlib.sha1(fctx.data()).digest()
         hashes[h] = fctx
 
     # For each added file, see if it corresponds to a removed file.
     for i, fctx in enumerate(added):
         repo.ui.progress(_('searching for exact renames'), i + len(removed),
                 total=numfiles, unit=_('files'))
-        h = util.sha1(fctx.data()).digest()
+        h = hashlib.sha1(fctx.data()).digest()
         if h in hashes:
             yield (hashes[h], fctx)
 
@@ -106,4 +108,3 @@
         for (a, b, score) in _findsimilarmatches(repo,
                 sorted(addedfiles), sorted(removedfiles), threshold):
             yield (a.path(), b.path(), score)
-
--- a/mercurial/sslutil.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/sslutil.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,6 +9,7 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import os
 import ssl
 import sys
@@ -106,8 +107,115 @@
 
             return ssl.wrap_socket(socket, **args)
 
-def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE,
-               ca_certs=None, serverhostname=None):
+def _hostsettings(ui, hostname):
+    """Obtain security settings for a hostname.
+
+    Returns a dict of settings relevant to that hostname.
+    """
+    s = {
+        # Whether we should attempt to load default/available CA certs
+        # if an explicit ``cafile`` is not defined.
+        'allowloaddefaultcerts': True,
+        # List of 2-tuple of (hash algorithm, hash).
+        'certfingerprints': [],
+        # Path to file containing concatenated CA certs. Used by
+        # SSLContext.load_verify_locations().
+        'cafile': None,
+        # Whether certificate verification should be disabled.
+        'disablecertverification': False,
+        # Whether the legacy [hostfingerprints] section has data for this host.
+        'legacyfingerprint': False,
+        # ssl.CERT_* constant used by SSLContext.verify_mode.
+        'verifymode': None,
+    }
+
+    # Look for fingerprints in [hostsecurity] section. Value is a list
+    # of <alg>:<fingerprint> strings.
+    fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % hostname,
+                                 [])
+    for fingerprint in fingerprints:
+        if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
+            raise error.Abort(_('invalid fingerprint for %s: %s') % (
+                                hostname, fingerprint),
+                              hint=_('must begin with "sha1:", "sha256:", '
+                                     'or "sha512:"'))
+
+        alg, fingerprint = fingerprint.split(':', 1)
+        fingerprint = fingerprint.replace(':', '').lower()
+        s['certfingerprints'].append((alg, fingerprint))
+
+    # Fingerprints from [hostfingerprints] are always SHA-1.
+    for fingerprint in ui.configlist('hostfingerprints', hostname, []):
+        fingerprint = fingerprint.replace(':', '').lower()
+        s['certfingerprints'].append(('sha1', fingerprint))
+        s['legacyfingerprint'] = True
+
+    # If a host cert fingerprint is defined, it is the only thing that
+    # matters. No need to validate CA certs.
+    if s['certfingerprints']:
+        s['verifymode'] = ssl.CERT_NONE
+
+    # If --insecure is used, don't take CAs into consideration.
+    elif ui.insecureconnections:
+        s['disablecertverification'] = True
+        s['verifymode'] = ssl.CERT_NONE
+
+    if ui.configbool('devel', 'disableloaddefaultcerts'):
+        s['allowloaddefaultcerts'] = False
+
+    # If both fingerprints and a per-host ca file are specified, issue a warning
+    # because users should not be surprised about what security is or isn't
+    # being performed.
+    cafile = ui.config('hostsecurity', '%s:verifycertsfile' % hostname)
+    if s['certfingerprints'] and cafile:
+        ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
+                  'fingerprints defined; using host fingerprints for '
+                  'verification)\n') % hostname)
+
+    # Try to hook up CA certificate validation unless something above
+    # makes it not necessary.
+    if s['verifymode'] is None:
+        # Look at per-host ca file first.
+        if cafile:
+            cafile = util.expandpath(cafile)
+            if not os.path.exists(cafile):
+                raise error.Abort(_('path specified by %s does not exist: %s') %
+                                  ('hostsecurity.%s:verifycertsfile' % hostname,
+                                   cafile))
+            s['cafile'] = cafile
+        else:
+            # Find global certificates file in config.
+            cafile = ui.config('web', 'cacerts')
+
+            if cafile:
+                cafile = util.expandpath(cafile)
+                if not os.path.exists(cafile):
+                    raise error.Abort(_('could not find web.cacerts: %s') %
+                                      cafile)
+            else:
+                # No global CA certs. See if we can load defaults.
+                cafile = _defaultcacerts()
+                if cafile:
+                    ui.debug('using %s to enable OS X system CA\n' % cafile)
+
+            s['cafile'] = cafile
+
+        # Require certificate validation if CA certs are being loaded and
+        # verification hasn't been disabled above.
+        if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
+            s['verifymode'] = ssl.CERT_REQUIRED
+        else:
+            # At this point we don't have a fingerprint, aren't being
+            # explicitly insecure, and can't load CA certs. Connecting
+            # at this point is insecure. But we do it for BC reasons.
+            # TODO abort here to make secure by default.
+            s['verifymode'] = ssl.CERT_NONE
+
+    assert s['verifymode'] is not None
+
+    return s
+
+def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
     """Add SSL/TLS to a socket.
 
     This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
@@ -120,6 +228,11 @@
       server (and client) support SNI, this tells the server which certificate
       to use.
     """
+    if not serverhostname:
+        raise error.Abort('serverhostname argument is required')
+
+    settings = _hostsettings(ui, serverhostname)
+
     # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
     # that both ends support, including TLS protocols. On legacy stacks,
     # the highest it likely goes in TLS 1.0. On modern stacks, it can
@@ -145,7 +258,7 @@
     sslcontext.options |= OP_NO_SSLv2 | OP_NO_SSLv3
 
     # This still works on our fake SSLContext.
-    sslcontext.verify_mode = cert_reqs
+    sslcontext.verify_mode = settings['verifymode']
 
     if certfile is not None:
         def password():
@@ -153,11 +266,15 @@
             return ui.getpass(_('passphrase for %s: ') % f, '')
         sslcontext.load_cert_chain(certfile, keyfile, password)
 
-    if ca_certs is not None:
-        sslcontext.load_verify_locations(cafile=ca_certs)
-    else:
+    if settings['cafile'] is not None:
+        sslcontext.load_verify_locations(cafile=settings['cafile'])
+        caloaded = True
+    elif settings['allowloaddefaultcerts']:
         # This is a no-op on old Python.
         sslcontext.load_default_certs()
+        caloaded = True
+    else:
+        caloaded = False
 
     sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
     # check if wrap_socket failed silently because socket had been
@@ -165,6 +282,14 @@
     # - see http://bugs.python.org/issue13721
     if not sslsocket.cipher():
         raise error.Abort(_('ssl connection failed'))
+
+    sslsocket._hgstate = {
+        'caloaded': caloaded,
+        'hostname': serverhostname,
+        'settings': settings,
+        'ui': ui,
+    }
+
     return sslsocket
 
 def _verifycert(cert, hostname):
@@ -222,96 +347,86 @@
             exe.startswith('/system/library/frameworks/python.framework/'))
 
 def _defaultcacerts():
-    """return path to CA certificates; None for system's store; ! to disable"""
+    """return path to default CA certificates or None."""
     if _plainapplepython():
         dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
         if os.path.exists(dummycert):
             return dummycert
-    if _canloaddefaultcerts:
-        return None
-    return '!'
+
+    return None
+
+def validatesocket(sock):
+    """Validate a socket meets security requiremnets.
+
+    The passed socket must have been created with ``wrapsocket()``.
+    """
+    host = sock._hgstate['hostname']
+    ui = sock._hgstate['ui']
+    settings = sock._hgstate['settings']
 
-def sslkwargs(ui, host):
-    kws = {'ui': ui}
-    hostfingerprint = ui.config('hostfingerprints', host)
-    if hostfingerprint:
-        return kws
-    cacerts = ui.config('web', 'cacerts')
-    if cacerts == '!':
-        pass
-    elif cacerts:
-        cacerts = util.expandpath(cacerts)
-        if not os.path.exists(cacerts):
-            raise error.Abort(_('could not find web.cacerts: %s') % cacerts)
-    else:
-        cacerts = _defaultcacerts()
-        if cacerts and cacerts != '!':
-            ui.debug('using %s to enable OS X system CA\n' % cacerts)
-        ui.setconfig('web', 'cacerts', cacerts, 'defaultcacerts')
-    if cacerts != '!':
-        kws.update({'ca_certs': cacerts,
-                    'cert_reqs': ssl.CERT_REQUIRED,
-                    })
-    return kws
+    try:
+        peercert = sock.getpeercert(True)
+        peercert2 = sock.getpeercert()
+    except AttributeError:
+        raise error.Abort(_('%s ssl connection error') % host)
+
+    if not peercert:
+        raise error.Abort(_('%s certificate error: '
+                           'no certificate received') % host)
 
-class validator(object):
-    def __init__(self, ui, host):
-        self.ui = ui
-        self.host = host
-
-    def __call__(self, sock, strict=False):
-        host = self.host
-
-        if not sock.cipher(): # work around http://bugs.python.org/issue13721
-            raise error.Abort(_('%s ssl connection error') % host)
-        try:
-            peercert = sock.getpeercert(True)
-            peercert2 = sock.getpeercert()
-        except AttributeError:
-            raise error.Abort(_('%s ssl connection error') % host)
+    if settings['disablecertverification']:
+        # We don't print the certificate fingerprint because it shouldn't
+        # be necessary: if the user requested certificate verification be
+        # disabled, they presumably already saw a message about the inability
+        # to verify the certificate and this message would have printed the
+        # fingerprint. So printing the fingerprint here adds little to no
+        # value.
+        ui.warn(_('warning: connection security to %s is disabled per current '
+                  'settings; communication is susceptible to eavesdropping '
+                  'and tampering\n') % host)
+        return
 
-        if not peercert:
-            raise error.Abort(_('%s certificate error: '
-                               'no certificate received') % host)
+    # If a certificate fingerprint is pinned, use it and only it to
+    # validate the remote cert.
+    peerfingerprints = {
+        'sha1': hashlib.sha1(peercert).hexdigest(),
+        'sha256': hashlib.sha256(peercert).hexdigest(),
+        'sha512': hashlib.sha512(peercert).hexdigest(),
+    }
+
+    def fmtfingerprint(s):
+        return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
+
+    nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
+
+    if settings['certfingerprints']:
+        for hash, fingerprint in settings['certfingerprints']:
+            if peerfingerprints[hash].lower() == fingerprint:
+                ui.debug('%s certificate matched fingerprint %s:%s\n' %
+                         (host, hash, fmtfingerprint(fingerprint)))
+                return
 
-        # If a certificate fingerprint is pinned, use it and only it to
-        # validate the remote cert.
-        hostfingerprints = self.ui.configlist('hostfingerprints', host)
-        peerfingerprint = util.sha1(peercert).hexdigest()
-        nicefingerprint = ":".join([peerfingerprint[x:x + 2]
-            for x in xrange(0, len(peerfingerprint), 2)])
-        if hostfingerprints:
-            fingerprintmatch = False
-            for hostfingerprint in hostfingerprints:
-                if peerfingerprint.lower() == \
-                        hostfingerprint.replace(':', '').lower():
-                    fingerprintmatch = True
-                    break
-            if not fingerprintmatch:
-                raise error.Abort(_('certificate for %s has unexpected '
-                                   'fingerprint %s') % (host, nicefingerprint),
-                                 hint=_('check hostfingerprint configuration'))
-            self.ui.debug('%s certificate matched fingerprint %s\n' %
-                          (host, nicefingerprint))
-            return
+        # Pinned fingerprint didn't match. This is a fatal error.
+        if settings['legacyfingerprint']:
+            section = 'hostfingerprint'
+            nice = fmtfingerprint(peerfingerprints['sha1'])
+        else:
+            section = 'hostsecurity'
+            nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
+        raise error.Abort(_('certificate for %s has unexpected '
+                            'fingerprint %s') % (host, nice),
+                          hint=_('check %s configuration') % section)
 
-        # No pinned fingerprint. Establish trust by looking at the CAs.
-        cacerts = self.ui.config('web', 'cacerts')
-        if cacerts != '!':
-            msg = _verifycert(peercert2, host)
-            if msg:
-                raise error.Abort(_('%s certificate error: %s') % (host, msg),
-                                 hint=_('configure hostfingerprint %s or use '
-                                        '--insecure to connect insecurely') %
-                                      nicefingerprint)
-            self.ui.debug('%s certificate successfully verified\n' % host)
-        elif strict:
-            raise error.Abort(_('%s certificate with fingerprint %s not '
-                               'verified') % (host, nicefingerprint),
-                             hint=_('check hostfingerprints or web.cacerts '
-                                     'config setting'))
-        else:
-            self.ui.warn(_('warning: %s certificate with fingerprint %s not '
-                           'verified (check hostfingerprints or web.cacerts '
-                           'config setting)\n') %
-                         (host, nicefingerprint))
+    if not sock._hgstate['caloaded']:
+        ui.warn(_('warning: certificate for %s not verified '
+                  '(set hostsecurity.%s:certfingerprints=%s or web.cacerts '
+                  'config settings)\n') % (host, host, nicefingerprint))
+        return
+
+    msg = _verifycert(peercert2, host)
+    if msg:
+        raise error.Abort(_('%s certificate error: %s') % (host, msg),
+                         hint=_('set hostsecurity.%s:certfingerprints=%s '
+                                'config setting or use --insecure to connect '
+                                'insecurely') %
+                              (host, nicefingerprint))
--- a/mercurial/store.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/store.py	Tue Jun 14 14:52:58 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import stat
 
@@ -19,8 +20,6 @@
     util,
 )
 
-_sha = util.sha1
-
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
 def _encodedir(path):
@@ -57,6 +56,23 @@
             .replace(".i.hg/", ".i/")
             .replace(".hg.hg/", ".hg/"))
 
+def _reserved():
+    ''' characters that are problematic for filesystems
+
+    * ascii escapes (0..31)
+    * ascii hi (126..255)
+    * windows specials
+
+    these characters will be escaped by encodefunctions
+    '''
+    winreserved = [ord(x) for x in '\\:*?"<>|']
+    for x in range(32):
+        yield x
+    for x in range(126, 256):
+        yield x
+    for x in winreserved:
+        yield x
+
 def _buildencodefun():
     '''
     >>> enc, dec = _buildencodefun()
@@ -82,11 +98,10 @@
     'the\\x07quick\\xadshot'
     '''
     e = '_'
-    winreserved = [ord(x) for x in '\\:*?"<>|']
     cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
-    for x in (range(32) + range(126, 256) + winreserved):
+    for x in _reserved():
         cmap[chr(x)] = "~%02x" % x
-    for x in range(ord("A"), ord("Z") + 1) + [ord(e)]:
+    for x in list(range(ord("A"), ord("Z") + 1)) + [ord(e)]:
         cmap[chr(x)] = e + chr(x).lower()
     dmap = {}
     for k, v in cmap.iteritems():
@@ -134,9 +149,8 @@
     >>> f('the\x07quick\xADshot')
     'the~07quick~adshot'
     '''
-    winreserved = [ord(x) for x in '\\:*?"<>|']
     cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
-    for x in (range(32) + range(126, 256) + winreserved):
+    for x in _reserved():
         cmap[chr(x)] = "~%02x" % x
     for x in range(ord("A"), ord("Z") + 1):
         cmap[chr(x)] = chr(x).lower()
@@ -196,7 +210,7 @@
 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
 
 def _hashencode(path, dotencode):
-    digest = _sha(path).hexdigest()
+    digest = hashlib.sha1(path).hexdigest()
     le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
     parts = _auxencode(le, dotencode)
     basename = parts[-1]
--- a/mercurial/subrepo.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/subrepo.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,6 +9,7 @@
 
 import copy
 import errno
+import hashlib
 import os
 import posixpath
 import re
@@ -50,7 +51,7 @@
 
 def _getstorehashcachename(remotepath):
     '''get a unique filename for the store hash cache of a remote repository'''
-    return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
+    return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
 
 class SubrepoAbort(error.Abort):
     """Exception class used to avoid handling a subrepo error more than once"""
@@ -585,7 +586,7 @@
         return 1
 
     def revert(self, substate, *pats, **opts):
-        self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
+        self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
             % (substate[0], substate[2]))
         return []
 
@@ -659,7 +660,7 @@
         yield '# %s\n' % _expandedabspath(remotepath)
         vfs = self._repo.vfs
         for relname in filelist:
-            filehash = util.sha1(vfs.tryread(relname)).hexdigest()
+            filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
             yield '%s = %s\n' % (relname, filehash)
 
     @propertycache
--- a/mercurial/tags.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/tags.py	Tue Jun 14 14:52:58 2016 -0500
@@ -292,7 +292,7 @@
     cachehash = None
     if cachefile:
         try:
-            validline = cachelines.next()
+            validline = next(cachelines)
             validline = validline.split()
             cacherev = int(validline[0])
             cachenode = bin(validline[1])
--- a/mercurial/templater.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/templater.py	Tue Jun 14 14:52:58 2016 -0500
@@ -724,6 +724,25 @@
 
     return minirst.format(text, style=style, keep=['verbose'])
 
+@templatefunc('separate(sep, args)')
+def separate(context, mapping, args):
+    """Add a separator between non-empty arguments."""
+    if not args:
+        # i18n: "separate" is a keyword
+        raise error.ParseError(_("separate expects at least one argument"))
+
+    sep = evalstring(context, mapping, args[0])
+    first = True
+    for arg in args[1:]:
+        argstr = evalstring(context, mapping, arg)
+        if not argstr:
+            continue
+        if first:
+            first = False
+        else:
+            yield sep
+        yield argstr
+
 @templatefunc('shortest(node, minlength=4)')
 def shortest(context, mapping, args):
     """Obtain the shortest representation of
--- a/mercurial/transaction.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/transaction.py	Tue Jun 14 14:52:58 2016 -0500
@@ -31,10 +31,9 @@
     'dirstate'
 ])
 
-class GenerationGroup(object):
-    ALL='all'
-    PREFINALIZE='prefinalize'
-    POSTFINALIZE='postfinalize'
+gengroupall='all'
+gengroupprefinalize='prefinalize'
+gengrouppostfinalize='postfinalize'
 
 def active(func):
     def _active(self, *args, **kwds):
@@ -289,7 +288,7 @@
         # but for bookmarks that are handled outside this mechanism.
         self._filegenerators[genid] = (order, filenames, genfunc, location)
 
-    def _generatefiles(self, suffix='', group=GenerationGroup.ALL):
+    def _generatefiles(self, suffix='', group=gengroupall):
         # write files registered for generation
         any = False
         for id, entry in sorted(self._filegenerators.iteritems()):
@@ -297,8 +296,8 @@
             order, filenames, genfunc, location = entry
 
             # for generation at closing, check if it's before or after finalize
-            postfinalize = group == GenerationGroup.POSTFINALIZE
-            if (group != GenerationGroup.ALL and
+            postfinalize = group == gengrouppostfinalize
+            if (group != gengroupall and
                 (id in postfinalizegenerators) != (postfinalize)):
                 continue
 
@@ -311,7 +310,8 @@
                         self.registertmp(name, location=location)
                     else:
                         self.addbackup(name, location=location)
-                    files.append(vfs(name, 'w', atomictemp=True))
+                    files.append(vfs(name, 'w', atomictemp=True,
+                                     checkambig=not suffix))
                 genfunc(*files)
             finally:
                 for f in files:
@@ -427,13 +427,13 @@
         '''commit the transaction'''
         if self.count == 1:
             self.validator(self)  # will raise exception if needed
-            self._generatefiles(group=GenerationGroup.PREFINALIZE)
+            self._generatefiles(group=gengroupprefinalize)
             categories = sorted(self._finalizecallback)
             for cat in categories:
                 self._finalizecallback[cat](self)
             # Prevent double usage and help clear cycles.
             self._finalizecallback = None
-            self._generatefiles(group=GenerationGroup.POSTFINALIZE)
+            self._generatefiles(group=gengrouppostfinalize)
 
         self.count -= 1
         if self.count != 0:
--- a/mercurial/ui.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/ui.py	Tue Jun 14 14:52:58 2016 -0500
@@ -107,6 +107,8 @@
         self._trustusers = set()
         self._trustgroups = set()
         self.callhooks = True
+        # Insecure server connections requested.
+        self.insecureconnections = False
 
         if src:
             self.fout = src.fout
@@ -120,6 +122,7 @@
             self._trustgroups = src._trustgroups.copy()
             self.environ = src.environ
             self.callhooks = src.callhooks
+            self.insecureconnections = src.insecureconnections
             self.fixconfig()
         else:
             self.fout = sys.stdout
@@ -1135,12 +1138,15 @@
         '''
         return msg
 
-    def develwarn(self, msg, stacklevel=1):
+    def develwarn(self, msg, stacklevel=1, config=None):
         """issue a developer warning message
 
         Use 'stacklevel' to report the offender some layers further up in the
         stack.
         """
+        if not self.configbool('devel', 'all-warnings'):
+            if config is not None and not self.configbool('devel', config):
+                return
         msg = 'devel-warn: ' + msg
         stacklevel += 1 # get in develwarn
         if self.tracebackflag:
@@ -1166,7 +1172,7 @@
             return
         msg += ("\n(compatibility will be dropped after Mercurial-%s,"
                 " update your code.)") % version
-        self.develwarn(msg, stacklevel=2)
+        self.develwarn(msg, stacklevel=2, config='deprec-warn')
 
 class paths(dict):
     """Represents a collection of paths and their configs.
--- a/mercurial/url.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/url.py	Tue Jun 14 14:52:58 2016 -0500
@@ -354,9 +354,9 @@
                 _generic_proxytunnel(self)
                 host = self.realhostport.rsplit(':', 1)[0]
             self.sock = sslutil.wrapsocket(
-                self.sock, self.key_file, self.cert_file, serverhostname=host,
-                **sslutil.sslkwargs(self.ui, host))
-            sslutil.validator(self.ui, host)(self.sock)
+                self.sock, self.key_file, self.cert_file, ui=self.ui,
+                serverhostname=host)
+            sslutil.validatesocket(self.sock)
 
     class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
         def __init__(self, ui):
--- a/mercurial/util.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/util.py	Tue Jun 14 14:52:58 2016 -0500
@@ -47,6 +47,7 @@
 
 for attr in (
     'empty',
+    'pickle',
     'queue',
     'urlerr',
     # we do import urlreq, but we do it outside the loop
@@ -63,9 +64,6 @@
 else:
     from . import posix as platform
 
-md5 = hashlib.md5
-sha1 = hashlib.sha1
-sha512 = hashlib.sha512
 _ = i18n._
 
 cachestat = platform.cachestat
@@ -136,9 +134,9 @@
     return getattr(thing, attr, _notset) is not _notset
 
 DIGESTS = {
-    'md5': md5,
-    'sha1': sha1,
-    'sha512': sha512,
+    'md5': hashlib.md5,
+    'sha1': hashlib.sha1,
+    'sha512': hashlib.sha512,
 }
 # List of digest types from strongest to weakest
 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
@@ -1010,10 +1008,14 @@
 
     return check
 
-def copyfile(src, dest, hardlink=False, copystat=False):
+def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
     '''copy a file, preserving mode and optionally other stat info like
     atime/mtime'''
+    assert not (copystat and checkambig)
+    oldstat = None
     if os.path.lexists(dest):
+        if checkambig:
+            oldstat = checkambig and filestat(dest)
         unlink(dest)
     # hardlinks are problematic on CIFS, quietly ignore this flag
     # until we find a way to work around it cleanly (issue4546)
@@ -1035,6 +1037,12 @@
                 shutil.copystat(src, dest)
             else:
                 shutil.copymode(src, dest)
+                if oldstat and oldstat.stat:
+                    newstat = filestat(dest)
+                    if newstat.isambig(oldstat):
+                        # stat of copied file is ambiguous to original one
+                        advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                        os.utime(dest, (advanced, advanced))
         except shutil.Error as inst:
             raise Abort(str(inst))
 
@@ -1381,6 +1389,72 @@
         raise
     return temp
 
+class filestat(object):
+    """help to exactly detect change of a file
+
+    'stat' attribute is result of 'os.stat()' if specified 'path'
+    exists. Otherwise, it is None. This can avoid preparative
+    'exists()' examination on client side of this class.
+    """
+    def __init__(self, path):
+        try:
+            self.stat = os.stat(path)
+        except OSError as err:
+            if err.errno != errno.ENOENT:
+                raise
+            self.stat = None
+
+    __hash__ = object.__hash__
+
+    def __eq__(self, old):
+        try:
+            # if ambiguity between stat of new and old file is
+            # avoided, comparision of size, ctime and mtime is enough
+            # to exactly detect change of a file regardless of platform
+            return (self.stat.st_size == old.stat.st_size and
+                    self.stat.st_ctime == old.stat.st_ctime and
+                    self.stat.st_mtime == old.stat.st_mtime)
+        except AttributeError:
+            return False
+
+    def isambig(self, old):
+        """Examine whether new (= self) stat is ambiguous against old one
+
+        "S[N]" below means stat of a file at N-th change:
+
+        - S[n-1].ctime  < S[n].ctime: can detect change of a file
+        - S[n-1].ctime == S[n].ctime
+          - S[n-1].ctime  < S[n].mtime: means natural advancing (*1)
+          - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
+          - S[n-1].ctime  > S[n].mtime: never occurs naturally (don't care)
+        - S[n-1].ctime  > S[n].ctime: never occurs naturally (don't care)
+
+        Case (*2) above means that a file was changed twice or more at
+        same time in sec (= S[n-1].ctime), and comparison of timestamp
+        is ambiguous.
+
+        Base idea to avoid such ambiguity is "advance mtime 1 sec, if
+        timestamp is ambiguous".
+
+        But advancing mtime only in case (*2) doesn't work as
+        expected, because naturally advanced S[n].mtime in case (*1)
+        might be equal to manually advanced S[n-1 or earlier].mtime.
+
+        Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
+        treated as ambiguous regardless of mtime, to avoid overlooking
+        by confliction between such mtime.
+
+        Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
+        S[n].mtime", even if size of a file isn't changed.
+        """
+        try:
+            return (self.stat.st_ctime == old.stat.st_ctime)
+        except AttributeError:
+            return False
+
+    def __ne__(self, other):
+        return not self == other
+
 class atomictempfile(object):
     '''writable file object that atomically updates a file
 
@@ -1390,11 +1464,12 @@
     visible. If the object is destroyed without being closed, all your
     writes are discarded.
     '''
-    def __init__(self, name, mode='w+b', createmode=None):
+    def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
         self.__name = name      # permanent name
         self._tempname = mktempcopy(name, emptyok=('w' in mode),
                                     createmode=createmode)
         self._fp = posixfile(self._tempname, mode)
+        self._checkambig = checkambig
 
         # delegated methods
         self.write = self._fp.write
@@ -1405,7 +1480,17 @@
     def close(self):
         if not self._fp.closed:
             self._fp.close()
-            rename(self._tempname, localpath(self.__name))
+            filename = localpath(self.__name)
+            oldstat = self._checkambig and filestat(filename)
+            if oldstat and oldstat.stat:
+                rename(self._tempname, filename)
+                newstat = filestat(filename)
+                if newstat.isambig(oldstat):
+                    # stat of changed file is ambiguous to original one
+                    advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                    os.utime(filename, (advanced, advanced))
+            else:
+                rename(self._tempname, filename)
 
     def discard(self):
         if not self._fp.closed:
--- a/mercurial/wireproto.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/mercurial/wireproto.py	Tue Jun 14 14:52:58 2016 -0500
@@ -7,6 +7,7 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import itertools
 import os
 import sys
@@ -97,7 +98,7 @@
             batchablefn = getattr(mtd, 'batchable', None)
             if batchablefn is not None:
                 batchable = batchablefn(mtd.im_self, *args, **opts)
-                encargsorres, encresref = batchable.next()
+                encargsorres, encresref = next(batchable)
                 if encresref:
                     req.append((name, encargsorres,))
                     rsp.append((batchable, encresref, resref,))
@@ -115,7 +116,7 @@
         for encres, r in zip(encresults, rsp):
             batchable, encresref, resref = r
             encresref.set(encres)
-            resref.set(batchable.next())
+            resref.set(next(batchable))
 
 class remoteiterbatcher(peer.iterbatcher):
     def __init__(self, remote):
@@ -138,7 +139,7 @@
         for name, args, opts, resref in self.calls:
             mtd = getattr(self._remote, name)
             batchable = mtd.batchable(mtd.im_self, *args, **opts)
-            encargsorres, encresref = batchable.next()
+            encargsorres, encresref = next(batchable)
             assert encresref
             req.append((name, encargsorres))
             rsp.append((batchable, encresref))
@@ -150,7 +151,7 @@
         for (batchable, encresref), encres in itertools.izip(
                 self._rsp, self._resultiter):
             encresref.set(encres)
-            yield batchable.next()
+            yield next(batchable)
 
 # Forward a couple of names from peer to make wireproto interactions
 # slightly more sensible.
@@ -231,17 +232,19 @@
                             for k, v in argsdict.iteritems())
             cmds.append('%s %s' % (op, args))
         rsp = self._callstream("batch", cmds=';'.join(cmds))
-        # TODO this response parsing is probably suboptimal for large
-        # batches with large responses.
-        work = rsp.read(1024)
-        chunk = work
+        chunk = rsp.read(1024)
+        work = [chunk]
         while chunk:
-            while ';' in work:
-                one, work = work.split(';', 1)
+            while ';' not in chunk and chunk:
+                chunk = rsp.read(1024)
+                work.append(chunk)
+            merged = ''.join(work)
+            while ';' in merged:
+                one, merged = merged.split(';', 1)
                 yield unescapearg(one)
             chunk = rsp.read(1024)
-            work += chunk
-        yield unescapearg(work)
+            work = [merged, chunk]
+        yield unescapearg(''.join(work))
 
     def _submitone(self, op, args):
         return self._call(op, **args)
@@ -408,7 +411,7 @@
 
         if heads != ['force'] and self.capable('unbundlehash'):
             heads = encodelist(['hashed',
-                                util.sha1(''.join(sorted(heads))).digest()])
+                                hashlib.sha1(''.join(sorted(heads))).digest()])
         else:
             heads = encodelist(heads)
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/dummysmtpd.py	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+"""dummy SMTP server for use in tests"""
+
+from __future__ import absolute_import
+
+import asyncore
+import optparse
+import smtpd
+import ssl
+import sys
+
+from mercurial import (
+    cmdutil,
+)
+
+def log(msg):
+    sys.stdout.write(msg)
+    sys.stdout.flush()
+
+class dummysmtpserver(smtpd.SMTPServer):
+    def __init__(self, localaddr):
+        smtpd.SMTPServer.__init__(self, localaddr, remoteaddr=None)
+
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        log('%s from=%s to=%s\n' % (peer[0], mailfrom, ', '.join(rcpttos)))
+
+class dummysmtpsecureserver(dummysmtpserver):
+    def __init__(self, localaddr, certfile):
+        dummysmtpserver.__init__(self, localaddr)
+        self._certfile = certfile
+
+    def handle_accept(self):
+        pair = self.accept()
+        if not pair:
+            return
+        conn, addr = pair
+        try:
+            # wrap_socket() would block, but we don't care
+            conn = ssl.wrap_socket(conn, server_side=True,
+                                   certfile=self._certfile,
+                                   ssl_version=ssl.PROTOCOL_TLSv1)
+        except ssl.SSLError:
+            log('%s ssl error\n' % addr[0])
+            conn.close()
+            return
+        smtpd.SMTPChannel(self, conn, addr)
+
+def run():
+    try:
+        asyncore.loop()
+    except KeyboardInterrupt:
+        pass
+
+def main():
+    op = optparse.OptionParser()
+    op.add_option('-d', '--daemon', action='store_true')
+    op.add_option('--daemon-postexec', action='append')
+    op.add_option('-p', '--port', type=int, default=8025)
+    op.add_option('-a', '--address', default='localhost')
+    op.add_option('--pid-file', metavar='FILE')
+    op.add_option('--tls', choices=['none', 'smtps'], default='none')
+    op.add_option('--certificate', metavar='FILE')
+
+    opts, args = op.parse_args()
+    if opts.tls == 'smtps' and not opts.certificate:
+        op.error('--certificate must be specified')
+
+    addr = (opts.address, opts.port)
+    def init():
+        if opts.tls == 'none':
+            dummysmtpserver(addr)
+        else:
+            dummysmtpsecureserver(addr, opts.certificate)
+        log('listening at %s:%d\n' % addr)
+
+    cmdutil.service(vars(opts), initfn=init, runfn=run,
+                    runargs=[sys.executable, __file__] + sys.argv[1:])
+
+if __name__ == '__main__':
+    main()
--- a/tests/dummyssh	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/dummyssh	Tue Jun 14 14:52:58 2016 -0500
@@ -1,7 +1,9 @@
 #!/usr/bin/env python
 
+from __future__ import absolute_import
+
+import os
 import sys
-import os
 
 os.chdir(os.getenv('TESTTMP'))
 
--- a/tests/f	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/f	Tue Jun 14 14:52:58 2016 -0500
@@ -23,7 +23,14 @@
   md5sum.py
 """
 
-import sys, os, errno, re, glob, optparse
+from __future__ import absolute_import
+
+import glob
+import hashlib
+import optparse
+import os
+import re
+import sys
 
 def visit(opts, filenames, outfile):
     """Process filenames in the way specified in opts, writing output to
@@ -74,17 +81,11 @@
                 else:
                     facts.append('older than %s' % opts.newer)
         if opts.md5 and content is not None:
-            try:
-                from hashlib import md5
-            except ImportError:
-                from md5 import md5
-            facts.append('md5=%s' % md5(content).hexdigest()[:opts.bytes])
+            h = hashlib.md5(content)
+            facts.append('md5=%s' % h.hexdigest()[:opts.bytes])
         if opts.sha1 and content is not None:
-            try:
-                from hashlib import sha1
-            except ImportError:
-                from sha import sha as sha1
-            facts.append('sha1=%s' % sha1(content).hexdigest()[:opts.bytes])
+            h = hashlib.sha1(content)
+            facts.append('sha1=%s' % h.hexdigest()[:opts.bytes])
         if isstdin:
             outfile.write(', '.join(facts) + '\n')
         elif facts:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/helper-runtests.sh	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,7 @@
+#
+# Avoid interference from actual test env:
+
+unset HGTEST_JOBS
+unset HGTEST_TIMEOUT
+unset HGTEST_PORT
+unset HGTEST_SHELL
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/helpers-testrepo.sh	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,19 @@
+# The test-repo is a live hg repository which may have evolution
+# markers created, e.g. when a ~/.hgrc enabled evolution.
+#
+# Tests are run using a custom HGRCPATH, which do not
+# enable evolution markers by default.
+#
+# If test-repo includes evolution markers, and we do not
+# enable evolution markers, hg will occasionally complain
+# when it notices them, which disrupts tests resulting in
+# sporadic failures.
+#
+# Since we aren't performing any write operations on the
+# test-repo, there's no harm in telling hg that we support
+# evolution markers, which is what the following lines
+# for the hgrc file do:
+cat >> $HGRCPATH << EOF
+[experimental]
+evolution=createmarkers
+EOF
--- a/tests/hghave	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/hghave	Tue Jun 14 14:52:58 2016 -0500
@@ -4,22 +4,23 @@
 prefixed with "no-", the absence of feature is tested.
 """
 
-from __future__ import print_function
+from __future__ import absolute_import, print_function
 
+import hghave
 import optparse
-import os, sys
-import hghave
+import os
+import sys
 
 checks = hghave.checks
 
 def list_features():
-    for name, feature in sorted(checks.iteritems()):
+    for name, feature in sorted(checks.items()):
         desc = feature[1]
         print(name + ':', desc)
 
 def test_features():
     failed = 0
-    for name, feature in checks.iteritems():
+    for name, feature in checks.items():
         check, _ = feature
         try:
             check()
@@ -48,6 +49,7 @@
     sys.path.insert(0, path)
     try:
         import hghaveaddon
+        assert hghaveaddon  # silence pyflakes
     except BaseException as inst:
         sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n'
                          % (path, inst))
--- a/tests/hghave.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/hghave.py	Tue Jun 14 14:52:58 2016 -0500
@@ -104,7 +104,7 @@
 
 @check("baz", "GNU Arch baz client")
 def has_baz():
-    return matchoutput('baz --version 2>&1', r'baz Bazaar version')
+    return matchoutput('baz --version 2>&1', br'baz Bazaar version')
 
 @check("bzr", "Canonical's Bazaar client")
 def has_bzr():
@@ -130,27 +130,27 @@
 
 @check("cvs", "cvs client/server")
 def has_cvs():
-    re = r'Concurrent Versions System.*?server'
+    re = br'Concurrent Versions System.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
 def has_cvs112():
-    re = r'Concurrent Versions System \(CVS\) 1.12.*?server'
+    re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
 @check("cvsnt", "cvsnt client/server")
 def has_cvsnt():
-    re = r'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
+    re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
     return matchoutput('cvsnt --version 2>&1', re)
 
 @check("darcs", "darcs client")
 def has_darcs():
-    return matchoutput('darcs --version', r'2\.[2-9]', True)
+    return matchoutput('darcs --version', br'2\.[2-9]', True)
 
 @check("mtn", "monotone client (>= 1.0)")
 def has_mtn():
-    return matchoutput('mtn --version', r'monotone', True) and not matchoutput(
-        'mtn --version', r'monotone 0\.', True)
+    return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
+        'mtn --version', br'monotone 0\.', True)
 
 @check("eol-in-paths", "end-of-lines in paths")
 def has_eol_in_paths():
@@ -236,7 +236,7 @@
         return False
 
 def gethgversion():
-    m = matchoutput('hg --version --quiet 2>&1', r'(\d+)\.(\d+)')
+    m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
@@ -267,11 +267,11 @@
 
 @check("gettext", "GNU Gettext (msgfmt)")
 def has_gettext():
-    return matchoutput('msgfmt --version', 'GNU gettext-tools')
+    return matchoutput('msgfmt --version', br'GNU gettext-tools')
 
 @check("git", "git command line client")
 def has_git():
-    return matchoutput('git --version 2>&1', r'^git version')
+    return matchoutput('git --version 2>&1', br'^git version')
 
 @check("docutils", "Docutils text processing library")
 def has_docutils():
@@ -283,7 +283,7 @@
         return False
 
 def getsvnversion():
-    m = matchoutput('svn --version --quiet 2>&1', r'^(\d+)\.(\d+)')
+    m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
@@ -295,8 +295,8 @@
 
 @check("svn", "subversion client and admin tools")
 def has_svn():
-    return matchoutput('svn --version 2>&1', r'^svn, version') and \
-        matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
+    return matchoutput('svn --version 2>&1', br'^svn, version') and \
+        matchoutput('svnadmin --version 2>&1', br'^svnadmin, version')
 
 @check("svn-bindings", "subversion python bindings")
 def has_svn_bindings():
@@ -311,8 +311,8 @@
 
 @check("p4", "Perforce server and client")
 def has_p4():
-    return (matchoutput('p4 -V', r'Rev\. P4/') and
-            matchoutput('p4d -V', r'Rev\. P4D/'))
+    return (matchoutput('p4 -V', br'Rev\. P4/') and
+            matchoutput('p4d -V', br'Rev\. P4D/'))
 
 @check("symlink", "symbolic links")
 def has_symlink():
@@ -343,11 +343,11 @@
 
 @check("tla", "GNU Arch tla client")
 def has_tla():
-    return matchoutput('tla --version 2>&1', r'The GNU Arch Revision')
+    return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
 
 @check("gpg", "gpg client")
 def has_gpg():
-    return matchoutput('gpg --version 2>&1', r'GnuPG')
+    return matchoutput('gpg --version 2>&1', br'GnuPG')
 
 @check("unix-permissions", "unix-style permissions")
 def has_unix_permissions():
@@ -377,7 +377,7 @@
 @check("pyflakes", "Pyflakes python linter")
 def has_pyflakes():
     return matchoutput("sh -c \"echo 'import re' 2>&1 | pyflakes\"",
-                       r"<stdin>:1: 're' imported but unused",
+                       br"<stdin>:1: 're' imported but unused",
                        True)
 
 @check("pygments", "Pygments source highlighting library")
@@ -393,7 +393,7 @@
 def has_outer_repo():
     # failing for other reasons than 'no repo' imply that there is a repo
     return not matchoutput('hg root 2>&1',
-                           r'abort: no repository found', True)
+                           br'abort: no repository found', True)
 
 @check("ssl", "ssl module available")
 def has_ssl():
@@ -416,7 +416,7 @@
 @check("defaultcacerts", "can verify SSL certs by system's CA certs store")
 def has_defaultcacerts():
     from mercurial import sslutil
-    return sslutil._defaultcacerts() != '!'
+    return sslutil._defaultcacerts() or sslutil._canloaddefaultcerts
 
 @check("windows", "Windows")
 def has_windows():
@@ -440,7 +440,7 @@
     try:
         import curses
         curses.COLOR_BLUE
-        return matchoutput('test -x "`which tic`"', '')
+        return matchoutput('test -x "`which tic`"', br'')
     except ImportError:
         return False
 
@@ -459,19 +459,19 @@
 @check("osxpackaging", "OS X packaging tools")
 def has_osxpackaging():
     try:
-        return (matchoutput('pkgbuild', 'Usage: pkgbuild ', ignorestatus=1)
+        return (matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
                 and matchoutput(
-                    'productbuild', 'Usage: productbuild ',
+                    'productbuild', br'Usage: productbuild ',
                     ignorestatus=1)
-                and matchoutput('lsbom', 'Usage: lsbom', ignorestatus=1)
+                and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
                 and matchoutput(
-                    'xar --help', 'Usage: xar', ignorestatus=1))
+                    'xar --help', br'Usage: xar', ignorestatus=1))
     except ImportError:
         return False
 
 @check("docker", "docker support")
 def has_docker():
-    pat = r'A self-sufficient runtime for'
+    pat = br'A self-sufficient runtime for'
     if matchoutput('docker --help', pat):
         if 'linux' not in sys.platform:
             # TODO: in theory we should be able to test docker-based
@@ -489,11 +489,11 @@
 @check("debhelper", "debian packaging tools")
 def has_debhelper():
     dpkg = matchoutput('dpkg --version',
-                       "Debian `dpkg' package management program")
+                       br"Debian `dpkg' package management program")
     dh = matchoutput('dh --help',
-                     'dh is a part of debhelper.', ignorestatus=True)
+                     br'dh is a part of debhelper.', ignorestatus=True)
     dh_py2 = matchoutput('dh_python2 --help',
-                         'other supported Python versions')
+                         br'other supported Python versions')
     return dpkg and dh and dh_py2
 
 @check("absimport", "absolute_import in __future__")
--- a/tests/readlink.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/readlink.py	Tue Jun 14 14:52:58 2016 -0500
@@ -1,13 +1,15 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
+
 import errno, os, sys
 
 for f in sys.argv[1:]:
     try:
-        print f, '->', os.readlink(f)
+        print(f, '->', os.readlink(f))
     except OSError as err:
         if err.errno != errno.EINVAL:
             raise
-        print f, 'not a symlink'
+        print(f, '->', f, 'not a symlink')
 
 sys.exit(0)
--- a/tests/run-tests.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/run-tests.py	Tue Jun 14 14:52:58 2016 -0500
@@ -69,6 +69,13 @@
 from xml.dom import minidom
 import unittest
 
+if os.environ.get('RTUNICODEPEDANTRY', False):
+    try:
+        reload(sys)
+        sys.setdefaultencoding("undefined")
+    except NameError:
+        pass
+
 osenvironb = getattr(os, 'environb', os.environ)
 processlock = threading.Lock()
 
@@ -1836,7 +1843,8 @@
                                 tres = {'result': res}
 
                             outcome[tc.name] = tres
-                    jsonout = json.dumps(outcome, sort_keys=True, indent=4)
+                    jsonout = json.dumps(outcome, sort_keys=True, indent=4,
+                                         separators=(',', ': '))
                     fp.writelines(("testreport =", jsonout))
 
             self._runner._checkhglib('Tested')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/README	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,26 @@
+Certificates created with:
+ printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \
+ openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem
+Can be dumped with:
+ openssl x509 -in pub.pem -text
+
+ - priv.pem
+ - pub.pem
+ - pub-other.pem
+
+pub.pem patched with other notBefore / notAfter:
+
+ - pub-not-yet.pem
+ - pub-expired.pem
+
+Client certificates created with:
+ openssl genrsa -aes128 -passout pass:1234 -out client-key.pem 512
+ openssl rsa -in client-key.pem -passin pass:1234 -out client-key-decrypted.pem
+ printf '.\n.\n.\n.\n.\n.\nhg-client@localhost\n.\n.\n' | \
+ openssl req -new -key client-key.pem -passin pass:1234 -out client-csr.pem
+ openssl x509 -req -days 9000 -in client-csr.pem -CA pub.pem -CAkey priv.pem \
+ -set_serial 01 -out client-cert.pem
+
+ - client-key.pem
+ - client-key-decrypted.pem
+ - client-cert.pem
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-cert.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,9 @@
+-----BEGIN CERTIFICATE-----
+MIIBPjCB6QIBATANBgkqhkiG9w0BAQsFADAxMRIwEAYDVQQDDAlsb2NhbGhvc3Qx
+GzAZBgkqhkiG9w0BCQEWDGhnQGxvY2FsaG9zdDAeFw0xNTA1MDcwNjI5NDVaFw0z
+OTEyMjcwNjI5NDVaMCQxIjAgBgkqhkiG9w0BCQEWE2hnLWNsaWVudEBsb2NhbGhv
+c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAmzgtLeCUBhT3ZuDmQ+BE81bzh7AH
+R9Yl8ApxwKnUAIcB1k95opsUKKdUxgoBVtWoGTKtn9PKvxpJ8zPjE7j4qwIDAQAB
+MA0GCSqGSIb3DQEBCwUAA0EAfBTqBG5pYhuGk+ZnyUufgS+d7Nk/sZAZjNdCAEj/
+NFPo5fR1jM6jlEWoWbeg298+SkjV7tfO+2nt0otUFkdM6A==
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-key-decrypted.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,9 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIBOgIBAAJBAJs4LS3glAYU92bg5kPgRPNW84ewB0fWJfAKccCp1ACHAdZPeaKb
+FCinVMYKAVbVqBkyrZ/Tyr8aSfMz4xO4+KsCAwEAAQJAeKDr25+Q6jkZHEbkLRP6
+AfMtR+Ixhk6TJT24sbZKIC2V8KuJTDEvUhLU0CAr1nH79bDqiSsecOiVCr2HHyfT
+AQIhAM2C5rHbTs9R3PkywFEqq1gU3ztCnpiWglO7/cIkuGBhAiEAwVpMSAf77kop
+4h/1kWsgMALQTJNsXd4CEUK4BOxvJIsCIQCbarVAKBQvoT81jfX27AfscsxnKnh5
++MjSvkanvdFZwQIgbbcTefwt1LV4trtz2SR0i0nNcOZmo40Kl0jIquKO3qkCIH01
+mJHzZr3+jQqeIFtr5P+Xqi30DJxgrnEobbJ0KFjY
+-----END RSA PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-key.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,12 @@
+-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-128-CBC,C8B8F103A61A336FB0716D1C0F8BB2E8
+
+JolMlCFjEW3q3JJjO9z99NJWeJbFgF5DpUOkfSCxH56hxxtZb9x++rBvBZkxX1bF
+BAIe+iI90+jdCLwxbILWuFcrJUaLC5WmO14XDKYVmr2eW9e4MiCYOlO0Q6a9rDFS
+jctRCfvubOXFHbBGLH8uKEMpXEkP7Lc60FiIukqjuQEivJjrQirVtZCGwyk3qUi7
+Eyh4Lo63IKGu8T1Bkmn2kaMvFhu7nC/CQLBjSq0YYI1tmCOkVb/3tPrz8oqgDJp2
+u7bLS3q0xDNZ52nVrKIoZC/UlRXGlPyzPpa70/jPIdfCbkwDaBpRVXc+62Pj2n5/
+CnO2xaKwfOG6pDvanBhFD72vuBOkAYlFZPiEku4sc2WlNggsSWCPCIFwzmiHjKIl
+bWmdoTq3nb7sNfnBbV0OCa7fS1dFwCm4R1NC7ELENu0=
+-----END RSA PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/priv.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,10 @@
+-----BEGIN PRIVATE KEY-----
+MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH
+aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8
+j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc
+EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG
+MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR
++wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy
+aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh
+HY8gUVkVRVs=
+-----END PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-expired.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,10 @@
+-----BEGIN CERTIFICATE-----
+MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
+aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx
+NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
+c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
+EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
++ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
+BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt
+2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ=
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-not-yet.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,10 @@
+-----BEGIN CERTIFICATE-----
+MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
+aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw
+NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
+c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
+EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
++ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
+BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb
+/12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0=
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-other.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,11 @@
+-----BEGIN CERTIFICATE-----
+MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
+MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
+ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo
+K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN
+y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6
+bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig=
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub.pem	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,11 @@
+-----BEGIN CERTIFICATE-----
+MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
+MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
+ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX
+6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm
+r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl
+t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c=
+-----END CERTIFICATE-----
--- a/tests/svn-safe-append.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/svn-safe-append.py	Tue Jun 14 14:52:58 2016 -0500
@@ -1,9 +1,12 @@
 #!/usr/bin/env python
 
+from __future__ import absolute_import
+
 __doc__ = """Same as `echo a >> b`, but ensures a changed mtime of b.
 Without this svn will not detect workspace changes."""
 
-import sys, os
+import os
+import sys
 
 text = sys.argv[1]
 fname = sys.argv[2]
--- a/tests/test-addremove-similar.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-addremove-similar.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,7 +1,7 @@
   $ hg init rep; cd rep
 
   $ touch empty-file
-  $ $PYTHON -c 'for x in range(10000): print x' > large-file
+  $ $PYTHON -c 'for x in range(10000): print(x)' > large-file
 
   $ hg addremove
   adding empty-file
@@ -10,7 +10,7 @@
   $ hg commit -m A
 
   $ rm large-file empty-file
-  $ $PYTHON -c 'for x in range(10,10000): print x' > another-file
+  $ $PYTHON -c 'for x in range(10,10000): print(x)' > another-file
 
   $ hg addremove -s50
   adding another-file
@@ -34,8 +34,8 @@
 
   $ hg init rep2; cd rep2
 
-  $ $PYTHON -c 'for x in range(10000): print x' > large-file
-  $ $PYTHON -c 'for x in range(50): print x' > tiny-file
+  $ $PYTHON -c 'for x in range(10000): print(x)' > large-file
+  $ $PYTHON -c 'for x in range(50): print(x)' > tiny-file
 
   $ hg addremove
   adding large-file
@@ -43,7 +43,7 @@
 
   $ hg commit -m A
 
-  $ $PYTHON -c 'for x in range(70): print x' > small-file
+  $ $PYTHON -c 'for x in range(70): print(x)' > small-file
   $ rm tiny-file
   $ rm large-file
 
--- a/tests/test-alias.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-alias.t	Tue Jun 14 14:52:58 2016 -0500
@@ -525,6 +525,24 @@
   (use "hg help" for the full list of commands or "hg -v" for details)
   [255]
 
+environment variable changes in alias commands
+
+  $ cat > $TESTTMP/setcount.py <<EOF
+  > import os
+  > def uisetup(ui):
+  >     os.environ['COUNT'] = '2'
+  > EOF
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [extensions]
+  > setcount = $TESTTMP/setcount.py
+  > [alias]
+  > showcount = log -T "$COUNT\n" -r .
+  > EOF
+
+  $ COUNT=1 hg showcount
+  2
+
 This should show id:
 
   $ hg --config alias.log='id' log
--- a/tests/test-archive.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-archive.t	Tue Jun 14 14:52:58 2016 -0500
@@ -69,7 +69,12 @@
   $ TIP=`hg id -v | cut -f1 -d' '`
   $ QTIP=`hg id -q`
   $ cat > getarchive.py <<EOF
-  > import os, sys, urllib2
+  > from __future__ import absolute_import
+  > import os
+  > import sys
+  > from mercurial import (
+  >     util,
+  > )
   > try:
   >     # Set stdout to binary mode for win32 platforms
   >     import msvcrt
@@ -83,10 +88,14 @@
   >     node, archive, file = sys.argv[1:]
   >     requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
   > try:
-  >     f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
+  >     stdout = sys.stdout.buffer
+  > except AttributeError:
+  >     stdout = sys.stdout
+  > try:
+  >     f = util.urlreq.urlopen('http://127.0.0.1:%s/?%s'
   >                     % (os.environ['HGPORT'], requeststr))
-  >     sys.stdout.write(f.read())
-  > except urllib2.HTTPError, e:
+  >     stdout.write(f.read())
+  > except util.urlerr.httperror as e:
   >     sys.stderr.write(str(e) + '\n')
   > EOF
   $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
@@ -195,15 +204,16 @@
   > done
 
   $ cat > md5comp.py <<EOF
+  > from __future__ import print_function
   > try:
   >     from hashlib import md5
   > except ImportError:
   >     from md5 import md5
   > import sys
   > f1, f2 = sys.argv[1:3]
-  > h1 = md5(file(f1, 'rb').read()).hexdigest()
-  > h2 = md5(file(f2, 'rb').read()).hexdigest()
-  > print h1 == h2 or "md5 differ: " + repr((h1, h2))
+  > h1 = md5(open(f1, 'rb').read()).hexdigest()
+  > h2 = md5(open(f2, 'rb').read()).hexdigest()
+  > print(h1 == h2 or "md5 differ: " + repr((h1, h2)))
   > EOF
 
 archive name is stored in the archive, so create similar archives and
@@ -343,8 +353,9 @@
   $ hg -R repo add repo/a
   $ hg -R repo commit -m '#0' -d '456789012 21600'
   $ cat > show_mtime.py <<EOF
+  > from __future__ import print_function
   > import sys, os
-  > print int(os.stat(sys.argv[1]).st_mtime)
+  > print(int(os.stat(sys.argv[1]).st_mtime))
   > EOF
 
   $ hg -R repo archive --prefix tar-extracted archive.tar
--- a/tests/test-atomictempfile.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-atomictempfile.py	Tue Jun 14 14:52:58 2016 -0500
@@ -1,9 +1,13 @@
+from __future__ import absolute_import
+
+import glob
 import os
-import glob
 import unittest
-import silenttestrunner
 
-from mercurial.util import atomictempfile
+from mercurial import (
+    util,
+)
+atomictempfile = util.atomictempfile
 
 class testatomictempfile(unittest.TestCase):
     def test1_simple(self):
@@ -14,7 +18,7 @@
         self.assertFalse(os.path.isfile('foo'))
         self.assertTrue(basename in glob.glob('.foo-*'))
 
-        file.write('argh\n')
+        file.write(b'argh\n')
         file.close()
 
         self.assertTrue(os.path.isfile('foo'))
@@ -27,7 +31,7 @@
         file = atomictempfile('foo')
         (dir, basename) = os.path.split(file._tempname)
 
-        file.write('yo\n')
+        file.write(b'yo\n')
         file.discard()
 
         self.assertFalse(os.path.isfile('foo'))
@@ -38,5 +42,46 @@
     def test3_oops(self):
         self.assertRaises(TypeError, atomictempfile)
 
+    # checkambig=True avoids ambiguity of timestamp
+    def test4_checkambig(self):
+        def atomicwrite(checkambig):
+            f = atomictempfile('foo', checkambig=checkambig)
+            f.write('FOO')
+            f.close()
+
+        # try some times, because reproduction of ambiguity depends on
+        # "filesystem time"
+        for i in xrange(5):
+            atomicwrite(False)
+            oldstat = os.stat('foo')
+            if oldstat.st_ctime != oldstat.st_mtime:
+                # subsequent changing never causes ambiguity
+                continue
+
+            repetition = 3
+
+            # repeat atomic write with checkambig=True, to examine
+            # whether st_mtime is advanced multiple times as expecetd
+            for j in xrange(repetition):
+                atomicwrite(True)
+            newstat = os.stat('foo')
+            if oldstat.st_ctime != newstat.st_ctime:
+                # timestamp ambiguity was naturally avoided while repetition
+                continue
+
+            # st_mtime should be advanced "repetition" times, because
+            # all atomicwrite() occured at same time (in sec)
+            self.assertTrue(newstat.st_mtime ==
+                            ((oldstat.st_mtime + repetition) & 0x7fffffff))
+            # no more examination is needed, if assumption above is true
+            break
+        else:
+            # This platform seems too slow to examine anti-ambiguity
+            # of file timestamp (or test happened to be executed at
+            # bad timing). Exit silently in this case, because running
+            # on other faster platforms can detect problems
+            pass
+
 if __name__ == '__main__':
+    import silenttestrunner
     silenttestrunner.main(__name__)
--- a/tests/test-bugzilla.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-bugzilla.t	Tue Jun 14 14:52:58 2016 -0500
@@ -54,7 +54,7 @@
   $ cat bzmock.log && rm bzmock.log
   update bugid=123, newstate={}, committer='test'
   ----
-  changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123.
+  changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123. (glob)
   details:
   	Fixes bug 123
   ----
--- a/tests/test-check-code.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-code.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ check_code="$TESTDIR"/../contrib/check-code.py
   $ cd "$TESTDIR"/..
 
--- a/tests/test-check-commit.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-commit.t	Tue Jun 14 14:52:58 2016 -0500
@@ -2,10 +2,7 @@
 
 Enable obsolescence to avoid the warning issue when obsmarker are found
 
-  $ cat >> $HGRCPATH << EOF
-  > [experimental]
-  > evolution=createmarkers
-  > EOF
+  $ . "$TESTDIR/helpers-testrepo.sh"
 
 Go back in the hg repo
 
--- a/tests/test-check-config.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-config.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "$TESTDIR"/..
 
 New errors are not allowed. Warnings are strongly discouraged.
--- a/tests/test-check-execute.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-execute.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo execbit
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 look for python scripts without the execute bit
--- a/tests/test-check-module-imports.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-module-imports.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ import_checker="$TESTDIR"/../contrib/import-checker.py
 
 Run the doctests from the import checker, and make sure
@@ -11,6 +12,7 @@
 Run additional tests for the import checker
 
   $ mkdir testpackage
+  $ touch testpackage/__init__.py
 
   $ cat > testpackage/multiple.py << EOF
   > from __future__ import absolute_import
@@ -113,7 +115,16 @@
   > from testpackage.unsorted import foo
   > EOF
 
-  $ python "$import_checker" testpackage/*.py testpackage/subpackage/*.py
+  $ mkdir testpackage2
+  $ touch testpackage2/__init__.py
+
+  $ cat > testpackage2/latesymbolimport.py << EOF
+  > from __future__ import absolute_import
+  > from testpackage import unsorted
+  > from mercurial.node import hex
+  > EOF
+
+  $ python "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
   testpackage/importalias.py:2: ui module must be "as" aliased to uimod
   testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
   testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
@@ -131,6 +142,7 @@
   testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority
   testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted
   testpackage/unsorted.py:3: imports not lexically sorted: os < sys
+  testpackage2/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node
   [1]
 
   $ cd "$TESTDIR"/..
@@ -144,8 +156,13 @@
 Known-bad files are excluded by -X as some of them would produce unstable
 outputs, which should be fixed later.
 
-  $ hg locate 'mercurial/**.py' 'hgext/**.py' 'tests/**.py' \
+  $ hg locate 'set:**.py or grep(r"^#!.*?python")' \
   > 'tests/**.t' \
+  > -X contrib/debugshell.py \
+  > -X contrib/win32/hgwebdir_wsgi.py \
+  > -X doc/gendoc.py \
+  > -X doc/hgmanpage.py \
+  > -X i18n/posplit \
   > -X tests/test-hgweb-auth.py \
   > -X tests/hypothesishelpers.py \
   > -X tests/test-ctxmanager.py \
@@ -162,5 +179,3 @@
   > -X tests/test-hgweb-no-request-uri.t \
   > -X tests/test-hgweb-non-interactive.t \
   > | sed 's-\\-/-g' | python "$import_checker" -
-  Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore
-  [1]
--- a/tests/test-check-py3-compat.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-py3-compat.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,33 +1,15 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "$TESTDIR"/..
 
   $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
-  hgext/fetch.py not using absolute_import
   hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
   hgext/fsmonitor/pywatchman/__init__.py requires print_function
   hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
   hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
-  hgext/gpg.py not using absolute_import
-  hgext/graphlog.py not using absolute_import
-  hgext/hgcia.py not using absolute_import
-  hgext/hgk.py not using absolute_import
   hgext/highlight/__init__.py not using absolute_import
   hgext/highlight/highlight.py not using absolute_import
-  hgext/histedit.py not using absolute_import
-  hgext/largefiles/__init__.py not using absolute_import
-  hgext/largefiles/basestore.py not using absolute_import
-  hgext/largefiles/lfcommands.py not using absolute_import
-  hgext/largefiles/lfutil.py not using absolute_import
-  hgext/largefiles/localstore.py not using absolute_import
-  hgext/largefiles/overrides.py not using absolute_import
-  hgext/largefiles/proto.py not using absolute_import
-  hgext/largefiles/remotestore.py not using absolute_import
-  hgext/largefiles/reposetup.py not using absolute_import
-  hgext/largefiles/uisetup.py not using absolute_import
-  hgext/largefiles/wirestore.py not using absolute_import
-  hgext/mq.py not using absolute_import
-  hgext/rebase.py not using absolute_import
   hgext/share.py not using absolute_import
   hgext/win32text.py not using absolute_import
   i18n/check-translation.py not using absolute_import
@@ -36,15 +18,11 @@
   tests/heredoctest.py requires print_function
   tests/md5sum.py not using absolute_import
   tests/readlink.py not using absolute_import
-  tests/readlink.py requires print_function
   tests/run-tests.py not using absolute_import
-  tests/svn-safe-append.py not using absolute_import
-  tests/test-atomictempfile.py not using absolute_import
   tests/test-demandimport.py not using absolute_import
 
 #if py3exe
   $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
-  contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob)
   doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
   hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
   hgext/blackbox.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
@@ -56,10 +34,9 @@
   hgext/clonebundles.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
   hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
   hgext/convert/convcmd.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
   hgext/convert/cvs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
+  hgext/convert/cvsps.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/convert/darcs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
   hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
   hgext/convert/git.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
@@ -67,16 +44,15 @@
   hgext/convert/hg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/convert/monotone.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
   hgext/convert/p*.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
+  hgext/convert/subversion.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
   hgext/eol.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/extdiff.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
-  hgext/factotum.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob)
-  hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
+  hgext/factotum.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
+  hgext/fetch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/fsmonitor/watchmanclient.py: error importing module: <SystemError> Parent module 'hgext.fsmonitor' not loaded, cannot perform relative import (line *) (glob)
-  hgext/gpg.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
+  hgext/gpg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/graphlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/hgcia.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/hgk.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
   hgext/keyword.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
@@ -90,12 +66,12 @@
   hgext/largefiles/reposetup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
   hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
-  hgext/mq.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
+  hgext/mq.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/notify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/pager.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/patchbomb.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/purge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/rebase.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
+  hgext/rebase.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/record.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/relink.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   hgext/schemes.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
@@ -123,7 +99,7 @@
   mercurial/filelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/filemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/fileset.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
+  mercurial/formatter.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/graphmod.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/help.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/hg.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
@@ -137,8 +113,7 @@
   mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hook.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
-  mercurial/httpconnection.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob)
+  mercurial/httpconnection.py: error importing: <ImportError> No module named 'rfc822' (error at __init__.py:*) (glob)
   mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
   mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
   mercurial/localrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
@@ -164,13 +139,12 @@
   mercurial/templatefilters.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/templatekw.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/templater.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/ui.py: error importing: <ImportError> No module named 'cPickle' (error at formatter.py:*) (glob)
+  mercurial/ui.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/unionrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/url.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
   mercurial/verify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
   mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
   mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
   mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
 
 #endif
--- a/tests/test-check-pyflakes.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-pyflakes.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,11 +1,12 @@
-#require test-repo pyflakes
+#require test-repo pyflakes hg10
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 run pyflakes on all tracked files ending in .py or without a file ending
 (skipping binary file random-seed)
 
-  $ hg locate 'set:**.py or grep("^!#.*python")' 2>/dev/null \
+  $ hg locate 'set:**.py or grep("^#!.*python")' 2>/dev/null \
   > | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
   tests/filterpyflakes.py:61: undefined name 'undefinedname'
   
--- a/tests/test-check-shbang.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-check-shbang.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 look for python scripts that do not use /usr/bin/env
--- a/tests/test-chg.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-chg.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,12 +1,86 @@
+#require chg
+
+  $ cp $HGRCPATH $HGRCPATH.orig
+
 init repo
 
-  $ hg init foo
+  $ chg init foo
   $ cd foo
 
 ill-formed config
 
-  $ hg status
+  $ chg status
   $ echo '=brokenconfig' >> $HGRCPATH
-  $ hg status
+  $ chg status
   hg: parse error at * (glob)
   [255]
+
+  $ cp $HGRCPATH.orig $HGRCPATH
+  $ cd ..
+
+server lifecycle
+----------------
+
+chg server should be restarted on code change, and old server will shut down
+automatically. In this test, we use the following time parameters:
+
+ - "sleep 1" to make mtime different
+ - "sleep 2" to notice mtime change (polling interval is 1 sec)
+
+set up repository with an extension:
+
+  $ chg init extreload
+  $ cd extreload
+  $ touch dummyext.py
+  $ cat <<EOF >> .hg/hgrc
+  > [extensions]
+  > dummyext = dummyext.py
+  > EOF
+
+isolate socket directory for stable result:
+
+  $ OLDCHGSOCKNAME=$CHGSOCKNAME
+  $ mkdir chgsock
+  $ CHGSOCKNAME=`pwd`/chgsock/server
+
+warm up server:
+
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+new server should be started if extension modified:
+
+  $ sleep 1
+  $ touch dummyext.py
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
+  chg: debug: instruction: reconnect
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+old server will shut down, while new server should still be reachable:
+
+  $ sleep 2
+  $ CHGDEBUG= chg log 2>&1 | (egrep 'instruction|start' || true)
+
+socket file should never be unlinked by old server:
+(simulates unowned socket by updating mtime, which makes sure server exits
+at polling cycle)
+
+  $ ls chgsock/server-*
+  chgsock/server-* (glob)
+  $ touch chgsock/server-*
+  $ sleep 2
+  $ ls chgsock/server-*
+  chgsock/server-* (glob)
+
+since no server is reachable from socket file, new server should be started:
+(this test makes sure that old server shut down automatically)
+
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+shut down servers and restore environment:
+
+  $ rm -R chgsock
+  $ CHGSOCKNAME=$OLDCHGSOCKNAME
+  $ cd ..
--- a/tests/test-command-template.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-command-template.t	Tue Jun 14 14:52:58 2016 -0500
@@ -3320,6 +3320,15 @@
   hg: parse error: pad() expects an integer width
   [255]
 
+Test separate function
+
+  $ hg log -r 0 -T '{separate("-", "", "a", "b", "", "", "c", "")}\n'
+  a-b-c
+  $ hg log -r 0 -T '{separate(" ", "{rev}:{node|short}", author|user, branch)}\n'
+  0:f7769ec2ab97 test default
+  $ hg log -r 0 --color=always -T '{separate(" ", "a", label(red, "b"), "c", label(red, ""), "d")}\n'
+  a \x1b[0;31mb\x1b[0m c d (esc)
+
 Test ifcontains function
 
   $ hg log --template '{rev} {ifcontains(rev, "2 two 0", "is in the string", "is not")}\n'
@@ -3768,10 +3777,10 @@
   $ hg debugtemplate --config templatealias.bad='x(' -v '{bad}'
   (template
     ('symbol', 'bad'))
-  abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end
+  abort: bad definition of template alias "bad": at 2: not a prefix: end
   [255]
   $ hg log --config templatealias.bad='x(' -T '{bad}'
-  abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end
+  abort: bad definition of template alias "bad": at 2: not a prefix: end
   [255]
 
   $ cd ..
--- a/tests/test-context.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-context.py	Tue Jun 14 14:52:58 2016 -0500
@@ -14,7 +14,7 @@
 
 # create 'foo' with fixed time stamp
 f = open('foo', 'wb')
-f.write('foo\n')
+f.write(b'foo\n')
 f.close()
 os.utime('foo', (1000, 1000))
 
--- a/tests/test-contrib-check-code.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-contrib-check-code.t	Tue Jun 14 14:52:58 2016 -0500
@@ -248,3 +248,43 @@
    > {desc|escape}
    warning: follow desc keyword with either firstline or websub
   [1]
+
+'string join across lines with no space' detection
+
+  $ cat > stringjoin.py <<EOF
+  > foo = (' foo'
+  >        'bar foo.'
+  >        'bar foo:'
+  >        'bar foo@'
+  >        'bar foo%'
+  >        'bar foo*'
+  >        'bar foo+'
+  >        'bar foo-'
+  >        'bar')
+  > EOF
+  $ "$check_code" stringjoin.py
+  stringjoin.py:1:
+   > foo = (' foo'
+   string join across lines with no space
+  stringjoin.py:2:
+   >        'bar foo.'
+   string join across lines with no space
+  stringjoin.py:3:
+   >        'bar foo:'
+   string join across lines with no space
+  stringjoin.py:4:
+   >        'bar foo@'
+   string join across lines with no space
+  stringjoin.py:5:
+   >        'bar foo%'
+   string join across lines with no space
+  stringjoin.py:6:
+   >        'bar foo*'
+   string join across lines with no space
+  stringjoin.py:7:
+   >        'bar foo+'
+   string join across lines with no space
+  stringjoin.py:8:
+   >        'bar foo-'
+   string join across lines with no space
+  [1]
--- a/tests/test-contrib-perf.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-contrib-perf.t	Tue Jun 14 14:52:58 2016 -0500
@@ -2,6 +2,7 @@
 
 Set vars:
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ CONTRIBDIR="$TESTDIR/../contrib"
 
 Prepare repo:
--- a/tests/test-convert.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-convert.t	Tue Jun 14 14:52:58 2016 -0500
@@ -422,7 +422,7 @@
   assuming destination emptydir-hg
   initializing destination emptydir-hg repository
   emptydir does not look like a CVS checkout
-  $TESTTMP/emptydir does not look like a Git repository
+  $TESTTMP/emptydir does not look like a Git repository (glob)
   emptydir does not look like a Subversion repository
   emptydir is not a local Mercurial repository
   emptydir does not look like a darcs repository
--- a/tests/test-debian-packages.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-debian-packages.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,7 @@
 #require test-repo slow debhelper
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
 Ensure debuild doesn't run the testsuite, as that could get silly.
   $ DEB_BUILD_OPTIONS=nocheck
   $ export DEB_BUILD_OPTIONS
--- a/tests/test-debugbundle.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-debugbundle.t	Tue Jun 14 14:52:58 2016 -0500
@@ -13,6 +13,13 @@
        282 (manifests)
         93  b
         93  c
+  $ hg bundle --base 0 --rev tip bundle2.hg -v --type none-v2
+  2 changesets found
+  uncompressed size of bundle content:
+       372 (changelog)
+       322 (manifests)
+       113  b
+       113  c
 
 Terse output:
 
@@ -20,6 +27,14 @@
   0e067c57feba1a5694ca4844f05588bb1bf82342
   991a3460af53952d10ec8a295d3d2cc2e5fa9690
 
+Terse output:
+
+  $ hg debugbundle bundle2.hg
+  Stream params: {}
+  changegroup -- "{'version': '02'}"
+      0e067c57feba1a5694ca4844f05588bb1bf82342
+      991a3460af53952d10ec8a295d3d2cc2e5fa9690
+
 Verbose output:
 
   $ hg debugbundle --all bundle.hg
@@ -39,4 +54,23 @@
   c
   b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0000000000000000000000000000000000000000 0
 
+  $ hg debugbundle --all bundle2.hg
+  Stream params: {}
+  changegroup -- "{'version': '02'}"
+      format: id, p1, p2, cset, delta base, len(delta)
+  
+      changelog
+      0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 80
+      991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 80
+  
+      manifest
+      686dbf0aeca417636fa26a9121c681eabbb15a20 8515d4bfda768e04af4c13a69a72e28c7effbea7 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 8515d4bfda768e04af4c13a69a72e28c7effbea7 55
+      ae25a31b30b3490a981e7b96a3238cc69583fda1 686dbf0aeca417636fa26a9121c681eabbb15a20 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 686dbf0aeca417636fa26a9121c681eabbb15a20 55
+  
+      b
+      b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 0
+  
+      c
+      b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0000000000000000000000000000000000000000 0
+
   $ cd ..
--- a/tests/test-devel-warnings.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-devel-warnings.t	Tue Jun 14 14:52:58 2016 -0500
@@ -10,14 +10,17 @@
   > 
   > @command('buggylocking', [], '')
   > def buggylocking(ui, repo):
-  >     tr = repo.transaction('buggy')
-  >     # make sure we rollback the transaction as we don't want to rely on the__del__
-  >     tr.release()
   >     lo = repo.lock()
   >     wl = repo.wlock()
   >     wl.release()
   >     lo.release()
   > 
+  > @command('buggytransaction', [], '')
+  > def buggylocking(ui, repo):
+  >     tr = repo.transaction('buggy')
+  >     # make sure we rollback the transaction as we don't want to rely on the__del__
+  >     tr.release()
+  > 
   > @command('properlocking', [], '')
   > def properlocking(ui, repo):
   >     """check that reentrance is fine"""
@@ -74,7 +77,6 @@
   $ hg init lock-checker
   $ cd lock-checker
   $ hg buggylocking
-  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   $ cat << EOF >> $HGRCPATH
   > [devel]
@@ -82,21 +84,8 @@
   > check-locks=1
   > EOF
   $ hg buggylocking
-  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   $ hg buggylocking --traceback
-  devel-warn: transaction with no lock at:
-   */hg:* in * (glob)
-   */mercurial/dispatch.py:* in run (glob)
-   */mercurial/dispatch.py:* in dispatch (glob)
-   */mercurial/dispatch.py:* in _runcatch (glob)
-   */mercurial/dispatch.py:* in _dispatch (glob)
-   */mercurial/dispatch.py:* in runcommand (glob)
-   */mercurial/dispatch.py:* in _runcommand (glob)
-   */mercurial/dispatch.py:* in checkargs (glob)
-   */mercurial/dispatch.py:* in <lambda> (glob)
-   */mercurial/util.py:* in check (glob)
-   $TESTTMP/buggylocking.py:* in buggylocking (glob)
   devel-warn: "wlock" acquired after "lock" at:
    */hg:* in * (glob)
    */mercurial/dispatch.py:* in run (glob)
@@ -122,7 +111,8 @@
   [255]
 
   $ hg log -r "oldstyle()" -T '{rev}\n'
-  devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob)
+  devel-warn: revset "oldstyle" uses list instead of smartset
+  (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
   0
   $ hg oldanddeprecated
   devel-warn: foorbar is deprecated, go shopping
@@ -143,7 +133,8 @@
    */mercurial/util.py:* in check (glob)
    $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
   $ hg blackbox -l 9
-  1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob)
+  1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" uses list instead of smartset
+  (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
@@ -165,4 +156,18 @@
    $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9
+
+Test programming error failure:
+
+  $ hg buggytransaction 2>&1 | egrep -v '^  '
+  ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+  ** which supports versions unknown of Mercurial.
+  ** Please disable buggylocking and try your action again.
+  ** If that fixes the bug please report it to the extension author.
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: * (glob)
+  Traceback (most recent call last):
+  RuntimeError: programming error: transaction requires locking
+
   $ cd ..
--- a/tests/test-docker-packaging.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-docker-packaging.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,5 +1,7 @@
 #require test-repo slow docker
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
 Ensure debuild doesn't run the testsuite, as that could get silly.
   $ DEB_BUILD_OPTIONS=nocheck
   $ export DEB_BUILD_OPTIONS
--- a/tests/test-extension.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-extension.t	Tue Jun 14 14:52:58 2016 -0500
@@ -958,7 +958,7 @@
   ** Extensions loaded: throw, older
 
 Declare the version as supporting this hg version, show regular bts link:
-  $ hgver=`$PYTHON -c 'from mercurial import util; print util.version().split("+")[0]'`
+  $ hgver=`hg debuginstall -T '{hgver}'`
   $ echo 'testedwith = """'"$hgver"'"""' >> throw.py
   $ if [ -z "$hgver" ]; then
   >   echo "unable to fetch a mercurial version. Make sure __version__ is correct";
--- a/tests/test-filelog.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-filelog.py	Tue Jun 14 14:52:58 2016 -0500
@@ -3,14 +3,15 @@
 Tests the behavior of filelog w.r.t. data starting with '\1\n'
 """
 from __future__ import absolute_import, print_function
+
+from mercurial.node import (
+    hex,
+    nullid,
+)
 from mercurial import (
     hg,
     ui as uimod,
 )
-from mercurial.node import (
-    hex,
-    nullid,
-)
 
 myui = uimod.ui()
 repo = hg.repository(myui, path='.', create=True)
--- a/tests/test-glog-topological.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-glog-topological.t	Tue Jun 14 14:52:58 2016 -0500
@@ -40,7 +40,7 @@
 
 (display all nodes)
 
-  $ hg --config experimental.graph-group-branches=1 log -G
+  $ hg log -G -r 'sort(all(), topo)'
   o  8
   |
   o  3
@@ -62,7 +62,7 @@
 
 (revset skipping nodes)
 
-  $ hg --config experimental.graph-group-branches=1 log -G --rev 'not (2+6)'
+  $ hg log -G --rev 'sort(not (2+6), topo)'
   o  8
   |
   o  3
@@ -80,7 +80,7 @@
 
 (begin) from the other branch
 
-  $ hg --config experimental.graph-group-branches=1 --config experimental.graph-group-branches.firstbranch=5 log -G
+  $ hg log -G -r 'sort(all(), topo, topo.firstbranch=5)'
   o  7
   |
   o  6
--- a/tests/test-glog.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-glog.t	Tue Jun 14 14:52:58 2016 -0500
@@ -3036,7 +3036,229 @@
        date:        Thu Jan 01 00:00:04 1970 +0000
        summary:     (4) merge two known; one immediate left, one immediate right
   
+Draw only part of a grandparent line differently with "<N><char>"; only the
+last N lines (for positive N) or everything but the first N lines (for
+negative N) along the current node use the style, the rest of the edge uses
+the parent edge styling.
 
+Last 3 lines:
+
+  $ cat << EOF >> $HGRCPATH
+  > [experimental]
+  > graphstyle.parent = !
+  > graphstyle.grandparent = 3.
+  > graphstyle.missing =
+  > EOF
+  $ hg log -G -r '36:18 & file("a")' -m
+  @  changeset:   36:08a19a744424
+  !  branch:      branch
+  !  tag:         tip
+  !  parent:      35:9159c3644c5e
+  !  parent:      35:9159c3644c5e
+  !  user:        test
+  .  date:        Thu Jan 01 00:00:36 1970 +0000
+  .  summary:     (36) buggy merge: identical parents
+  .
+  o    changeset:   32:d06dffa21a31
+  !\   parent:      27:886ed638191b
+  ! !  parent:      31:621d83e11f67
+  ! !  user:        test
+  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! .  summary:     (32) expand
+  ! .
+  o !  changeset:   31:621d83e11f67
+  !\!  parent:      21:d42a756af44d
+  ! !  parent:      30:6e11cd4b648f
+  ! !  user:        test
+  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! !  summary:     (31) expand
+  ! !
+  o !    changeset:   30:6e11cd4b648f
+  !\ \   parent:      28:44ecd0b9ae99
+  ! ~ !  parent:      29:cd9bb2be7593
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   !  summary:     (30) expand
+  !  /
+  o !    changeset:   28:44ecd0b9ae99
+  !\ \   parent:      1:6db2ef61d156
+  ! ~ !  parent:      26:7f25b6c2f0b9
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   !  summary:     (28) merge zero known
+  !  /
+  o !    changeset:   26:7f25b6c2f0b9
+  !\ \   parent:      18:1aa84d96232a
+  ! ! !  parent:      25:91da8ed57247
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! !  summary:     (26) merge one known; far right
+  ! ! !
+  ! o !  changeset:   25:91da8ed57247
+  ! !\!  parent:      21:d42a756af44d
+  ! ! !  parent:      24:a9c19a3d96b7
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! !  summary:     (25) merge one known; far left
+  ! ! !
+  ! o !    changeset:   24:a9c19a3d96b7
+  ! !\ \   parent:      0:e6eb3150255d
+  ! ! ~ !  parent:      23:a01cddf0766d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   !  summary:     (24) merge one known; immediate right
+  ! !  /
+  ! o !    changeset:   23:a01cddf0766d
+  ! !\ \   parent:      1:6db2ef61d156
+  ! ! ~ !  parent:      22:e0d9cccacb5d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   !  summary:     (23) merge one known; immediate left
+  ! !  /
+  ! o !  changeset:   22:e0d9cccacb5d
+  !/!/   parent:      18:1aa84d96232a
+  ! !    parent:      21:d42a756af44d
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
+  ! !    summary:     (22) merge two known; one far left, one far right
+  ! !
+  ! o    changeset:   21:d42a756af44d
+  ! !\   parent:      19:31ddc2c1573b
+  ! ! !  parent:      20:d30ed6450e32
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:21 1970 +0000
+  ! ! !  summary:     (21) expand
+  ! ! !
+  +---o  changeset:   20:d30ed6450e32
+  ! ! |  parent:      0:e6eb3150255d
+  ! ! ~  parent:      18:1aa84d96232a
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:20 1970 +0000
+  ! !    summary:     (20) merge two known; two far right
+  ! !
+  ! o    changeset:   19:31ddc2c1573b
+  ! |\   parent:      15:1dda3f72782d
+  ! ~ ~  parent:      17:44765d7c06e0
+  !      user:        test
+  !      date:        Thu Jan 01 00:00:19 1970 +0000
+  !      summary:     (19) expand
+  !
+  o    changeset:   18:1aa84d96232a
+  |\   parent:      1:6db2ef61d156
+  ~ ~  parent:      15:1dda3f72782d
+       user:        test
+       date:        Thu Jan 01 00:00:18 1970 +0000
+       summary:     (18) merge two known; two far left
+  
+All but the first 3 lines:
+
+  $ cat << EOF >> $HGRCPATH
+  > [experimental]
+  > graphstyle.parent = !
+  > graphstyle.grandparent = -3.
+  > graphstyle.missing =
+  > EOF
+  $ hg log -G -r '36:18 & file("a")' -m
+  @  changeset:   36:08a19a744424
+  !  branch:      branch
+  !  tag:         tip
+  .  parent:      35:9159c3644c5e
+  .  parent:      35:9159c3644c5e
+  .  user:        test
+  .  date:        Thu Jan 01 00:00:36 1970 +0000
+  .  summary:     (36) buggy merge: identical parents
+  .
+  o    changeset:   32:d06dffa21a31
+  !\   parent:      27:886ed638191b
+  ! !  parent:      31:621d83e11f67
+  ! .  user:        test
+  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! .  summary:     (32) expand
+  ! .
+  o !  changeset:   31:621d83e11f67
+  !\!  parent:      21:d42a756af44d
+  ! !  parent:      30:6e11cd4b648f
+  ! !  user:        test
+  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! !  summary:     (31) expand
+  ! !
+  o !    changeset:   30:6e11cd4b648f
+  !\ \   parent:      28:44ecd0b9ae99
+  ! ~ !  parent:      29:cd9bb2be7593
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   !  summary:     (30) expand
+  !  /
+  o !    changeset:   28:44ecd0b9ae99
+  !\ \   parent:      1:6db2ef61d156
+  ! ~ !  parent:      26:7f25b6c2f0b9
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   !  summary:     (28) merge zero known
+  !  /
+  o !    changeset:   26:7f25b6c2f0b9
+  !\ \   parent:      18:1aa84d96232a
+  ! ! !  parent:      25:91da8ed57247
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! !  summary:     (26) merge one known; far right
+  ! ! !
+  ! o !  changeset:   25:91da8ed57247
+  ! !\!  parent:      21:d42a756af44d
+  ! ! !  parent:      24:a9c19a3d96b7
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! !  summary:     (25) merge one known; far left
+  ! ! !
+  ! o !    changeset:   24:a9c19a3d96b7
+  ! !\ \   parent:      0:e6eb3150255d
+  ! ! ~ !  parent:      23:a01cddf0766d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   !  summary:     (24) merge one known; immediate right
+  ! !  /
+  ! o !    changeset:   23:a01cddf0766d
+  ! !\ \   parent:      1:6db2ef61d156
+  ! ! ~ !  parent:      22:e0d9cccacb5d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   !  summary:     (23) merge one known; immediate left
+  ! !  /
+  ! o !  changeset:   22:e0d9cccacb5d
+  !/!/   parent:      18:1aa84d96232a
+  ! !    parent:      21:d42a756af44d
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
+  ! !    summary:     (22) merge two known; one far left, one far right
+  ! !
+  ! o    changeset:   21:d42a756af44d
+  ! !\   parent:      19:31ddc2c1573b
+  ! ! !  parent:      20:d30ed6450e32
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:21 1970 +0000
+  ! ! !  summary:     (21) expand
+  ! ! !
+  +---o  changeset:   20:d30ed6450e32
+  ! ! |  parent:      0:e6eb3150255d
+  ! ! ~  parent:      18:1aa84d96232a
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:20 1970 +0000
+  ! !    summary:     (20) merge two known; two far right
+  ! !
+  ! o    changeset:   19:31ddc2c1573b
+  ! |\   parent:      15:1dda3f72782d
+  ! ~ ~  parent:      17:44765d7c06e0
+  !      user:        test
+  !      date:        Thu Jan 01 00:00:19 1970 +0000
+  !      summary:     (19) expand
+  !
+  o    changeset:   18:1aa84d96232a
+  |\   parent:      1:6db2ef61d156
+  ~ ~  parent:      15:1dda3f72782d
+       user:        test
+       date:        Thu Jan 01 00:00:18 1970 +0000
+       summary:     (18) merge two known; two far left
+  
   $ cd ..
 
 Change graph shorten, test better with graphstyle.missing not none
--- a/tests/test-help.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-help.t	Tue Jun 14 14:52:58 2016 -0500
@@ -260,7 +260,6 @@
        extdiff       command to allow external programs to compare revisions
        factotum      http authentication with factotum
        gpg           commands to sign and verify changesets
-       hgcia         hooks for integrating with the CIA.vc notification service
        hgk           browse the repository in a graphical way
        highlight     syntax highlighting for hgweb (requires Pygments)
        histedit      interactive history editing
@@ -429,6 +428,22 @@
    -h --help              display help and exit
       --hidden            consider hidden changesets
 
+Test the textwidth config option
+
+  $ hg root -h  --config ui.textwidth=50
+  hg root
+  
+  print the root (top) of the current working
+  directory
+  
+      Print the root directory of the current
+      repository.
+  
+      Returns 0 on success.
+  
+  (some details hidden, use --verbose to show
+  complete help)
+
 Test help option with version option
 
   $ hg add -h --version
--- a/tests/test-hgcia.t	Tue Jun 07 08:32:33 2016 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-Test the CIA extension
-
-  $ cat >> $HGRCPATH <<EOF
-  > [extensions]
-  > hgcia=
-  > 
-  > [hooks]
-  > changegroup.cia = python:hgext.hgcia.hook
-  > 
-  > [web]
-  > baseurl = http://hgserver/
-  > 
-  > [cia]
-  > user = testuser
-  > project = testproject
-  > test = True
-  > EOF
-
-  $ hg init src
-  $ hg init cia
-  $ cd src
-  $ echo foo > foo
-  $ hg ci -Amfoo
-  adding foo
-  $ hg push ../cia
-  pushing to ../cia
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  
-  <message>
-    <generator>
-      <name>Mercurial (hgcia)</name>
-      <version>0.1</version>
-      <url>http://hg.kublai.com/mercurial/hgcia</url>
-      <user>testuser</user>
-    </generator>
-    <source>
-  <project>testproject</project>
-  <branch>default</branch>
-  </source>
-    <body>
-      <commit>
-        <author>test</author>
-        <version>0:e63c23eaa88a</version>
-        <log>foo</log>
-        <url>http://hgserver/rev/e63c23eaa88a</url>
-        <files><file uri="http://hgserver/file/e63c23eaa88a/foo" action="add">foo</file></files>
-      </commit>
-    </body>
-    <timestamp>0</timestamp>
-  </message>
-
-  $ cat >> $HGRCPATH <<EOF
-  > strip = 0
-  > EOF
-
-  $ echo bar > bar
-  $ hg ci -Ambar
-  adding bar
-  $ hg push ../cia
-  pushing to ../cia
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  
-  <message>
-    <generator>
-      <name>Mercurial (hgcia)</name>
-      <version>0.1</version>
-      <url>http://hg.kublai.com/mercurial/hgcia</url>
-      <user>testuser</user>
-    </generator>
-    <source>
-  <project>testproject</project>
-  <branch>default</branch>
-  </source>
-    <body>
-      <commit>
-        <author>test</author>
-        <version>1:c0c7cf58edc5</version>
-        <log>bar</log>
-        <url>http://hgserver/$TESTTMP/cia/rev/c0c7cf58edc5</url>
-        <files><file uri="http://hgserver/$TESTTMP/cia/file/c0c7cf58edc5/bar" action="add">bar</file></files>
-      </commit>
-    </body>
-    <timestamp>0</timestamp>
-  </message>
-
-  $ cd ..
--- a/tests/test-histedit-arguments.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-histedit-arguments.t	Tue Jun 14 14:52:58 2016 -0500
@@ -499,3 +499,53 @@
   > EOF
   $ hg commit --amend -m 'allow this fold'
   $ hg histedit --continue
+
+  $ cd ..
+
+Test autoverb feature
+
+  $ hg init autoverb
+  $ cd autoverb
+  $ echo alpha >> alpha
+  $ hg addr
+  adding alpha
+  $ hg ci -m one
+  $ echo alpha >> alpha
+  $ hg ci -m two
+  $ echo alpha >> alpha
+  $ hg ci -m "roll! three"
+
+  $ hg log --style compact --graph
+  @  2[tip]   1b0b0b04c8fe   1970-01-01 00:00 +0000   test
+  |    roll! three
+  |
+  o  1   579e40513370   1970-01-01 00:00 +0000   test
+  |    two
+  |
+  o  0   6058cbb6cfd7   1970-01-01 00:00 +0000   test
+       one
+  
+
+Check that 'roll' is selected by default
+
+  $ HGEDITOR=cat hg histedit 1 --config experimental.histedit.autoverb=True
+  pick 579e40513370 1 two
+  roll 1b0b0b04c8fe 2 roll! three
+  
+  # Edit history between 579e40513370 and 1b0b0b04c8fe
+  #
+  # Commits are listed from least to most recent
+  #
+  # You can reorder changesets by reordering the lines
+  #
+  # Commands:
+  #
+  #  e, edit = use commit, but stop for amending
+  #  m, mess = edit commit message without changing commit content
+  #  p, pick = use commit
+  #  d, drop = remove commit from history
+  #  f, fold = use commit, but combine it with the one above
+  #  r, roll = like fold, but discard this commit's description
+  #
+
+  $ cd ..
--- a/tests/test-histedit-base.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-histedit-base.t	Tue Jun 14 14:52:58 2016 -0500
@@ -39,7 +39,10 @@
   |/
   o  0:cd010b8cd998f3981a5a8115f94f8da4ab506089:draft 'A'
   
+Verify that implicit base command and help are listed
 
+  $ HGEDITOR=cat hg histedit |grep base
+  #  b, base = checkout changeset and apply further changesets from there
 
 Go to D
   $ hg update 3
--- a/tests/test-https.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-https.t	Tue Jun 14 14:52:58 2016 -0500
@@ -2,131 +2,13 @@
 
 Proper https client requires the built-in ssl from Python 2.6.
 
-Certificates created with:
- printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \
- openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem
-Can be dumped with:
- openssl x509 -in pub.pem -text
-
-  $ cat << EOT > priv.pem
-  > -----BEGIN PRIVATE KEY-----
-  > MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH
-  > aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8
-  > j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc
-  > EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG
-  > MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR
-  > +wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy
-  > aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh
-  > HY8gUVkVRVs=
-  > -----END PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > pub.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
-  > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
-  > MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0
-  > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
-  > ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX
-  > 6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm
-  > r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw
-  > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl
-  > t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub.pem >> server.pem
-  $ PRIV=`pwd`/server.pem
-
-  $ cat << EOT > pub-other.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
-  > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
-  > MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0
-  > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
-  > ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo
-  > K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN
-  > y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw
-  > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6
-  > bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig=
-  > -----END CERTIFICATE-----
-  > EOT
-
-pub.pem patched with other notBefore / notAfter:
+Make server certificates:
 
-  $ cat << EOT > pub-not-yet.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
-  > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw
-  > NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
-  > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
-  > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
-  > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb
-  > /12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub-not-yet.pem > server-not-yet.pem
-
-  $ cat << EOT > pub-expired.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
-  > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx
-  > NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
-  > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
-  > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
-  > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt
-  > 2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub-expired.pem > server-expired.pem
-
-Client certificates created with:
- openssl genrsa -aes128 -passout pass:1234 -out client-key.pem 512
- openssl rsa -in client-key.pem -passin pass:1234 -out client-key-decrypted.pem
- printf '.\n.\n.\n.\n.\n.\nhg-client@localhost\n.\n.\n' | \
- openssl req -new -key client-key.pem -passin pass:1234 -out client-csr.pem
- openssl x509 -req -days 9000 -in client-csr.pem -CA pub.pem -CAkey priv.pem \
- -set_serial 01 -out client-cert.pem
-
-  $ cat << EOT > client-key.pem
-  > -----BEGIN RSA PRIVATE KEY-----
-  > Proc-Type: 4,ENCRYPTED
-  > DEK-Info: AES-128-CBC,C8B8F103A61A336FB0716D1C0F8BB2E8
-  > 
-  > JolMlCFjEW3q3JJjO9z99NJWeJbFgF5DpUOkfSCxH56hxxtZb9x++rBvBZkxX1bF
-  > BAIe+iI90+jdCLwxbILWuFcrJUaLC5WmO14XDKYVmr2eW9e4MiCYOlO0Q6a9rDFS
-  > jctRCfvubOXFHbBGLH8uKEMpXEkP7Lc60FiIukqjuQEivJjrQirVtZCGwyk3qUi7
-  > Eyh4Lo63IKGu8T1Bkmn2kaMvFhu7nC/CQLBjSq0YYI1tmCOkVb/3tPrz8oqgDJp2
-  > u7bLS3q0xDNZ52nVrKIoZC/UlRXGlPyzPpa70/jPIdfCbkwDaBpRVXc+62Pj2n5/
-  > CnO2xaKwfOG6pDvanBhFD72vuBOkAYlFZPiEku4sc2WlNggsSWCPCIFwzmiHjKIl
-  > bWmdoTq3nb7sNfnBbV0OCa7fS1dFwCm4R1NC7ELENu0=
-  > -----END RSA PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > client-key-decrypted.pem
-  > -----BEGIN RSA PRIVATE KEY-----
-  > MIIBOgIBAAJBAJs4LS3glAYU92bg5kPgRPNW84ewB0fWJfAKccCp1ACHAdZPeaKb
-  > FCinVMYKAVbVqBkyrZ/Tyr8aSfMz4xO4+KsCAwEAAQJAeKDr25+Q6jkZHEbkLRP6
-  > AfMtR+Ixhk6TJT24sbZKIC2V8KuJTDEvUhLU0CAr1nH79bDqiSsecOiVCr2HHyfT
-  > AQIhAM2C5rHbTs9R3PkywFEqq1gU3ztCnpiWglO7/cIkuGBhAiEAwVpMSAf77kop
-  > 4h/1kWsgMALQTJNsXd4CEUK4BOxvJIsCIQCbarVAKBQvoT81jfX27AfscsxnKnh5
-  > +MjSvkanvdFZwQIgbbcTefwt1LV4trtz2SR0i0nNcOZmo40Kl0jIquKO3qkCIH01
-  > mJHzZr3+jQqeIFtr5P+Xqi30DJxgrnEobbJ0KFjY
-  > -----END RSA PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > client-cert.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBPjCB6QIBATANBgkqhkiG9w0BAQsFADAxMRIwEAYDVQQDDAlsb2NhbGhvc3Qx
-  > GzAZBgkqhkiG9w0BCQEWDGhnQGxvY2FsaG9zdDAeFw0xNTA1MDcwNjI5NDVaFw0z
-  > OTEyMjcwNjI5NDVaMCQxIjAgBgkqhkiG9w0BCQEWE2hnLWNsaWVudEBsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAmzgtLeCUBhT3ZuDmQ+BE81bzh7AH
-  > R9Yl8ApxwKnUAIcB1k95opsUKKdUxgoBVtWoGTKtn9PKvxpJ8zPjE7j4qwIDAQAB
-  > MA0GCSqGSIb3DQEBCwUAA0EAfBTqBG5pYhuGk+ZnyUufgS+d7Nk/sZAZjNdCAEj/
-  > NFPo5fR1jM6jlEWoWbeg298+SkjV7tfO+2nt0otUFkdM6A==
-  > -----END CERTIFICATE-----
-  > EOT
+  $ CERTSDIR="$TESTDIR/sslcerts"
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
+  $ PRIV=`pwd`/server.pem
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-not-yet.pem" > server-not-yet.pem
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-expired.pem" > server-expired.pem
 
   $ hg init test
   $ cd test
@@ -162,22 +44,69 @@
 #endif
   $ cd ..
 
-OS X has a dummy CA cert that enables use of the system CA store when using
-Apple's OpenSSL. This trick do not work with plain OpenSSL.
+Our test cert is not signed by a trusted CA. It should fail to verify if
+we are able to load CA certs.
 
-  $ DISABLEOSXDUMMYCERT=
 #if defaultcacerts
   $ hg clone https://localhost:$HGPORT/ copy-pull
   abort: error: *certificate verify failed* (glob)
   [255]
+#endif
 
-  $ DISABLEOSXDUMMYCERT="--insecure"
-#endif
+Specifying a per-host certificate file that doesn't exist will abort
+
+  $ hg --config hostsecurity.localhost:verifycertsfile=/does/not/exist clone https://localhost:$HGPORT/
+  abort: path specified by hostsecurity.localhost:verifycertsfile does not exist: /does/not/exist
+  [255]
+
+A malformed per-host certificate file will raise an error
+
+  $ echo baddata > badca.pem
+  $ hg --config hostsecurity.localhost:verifycertsfile=badca.pem clone https://localhost:$HGPORT/
+  abort: error: unknown error* (glob)
+  [255]
+
+A per-host certificate mismatching the server will fail verification
+
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/client-cert.pem" clone https://localhost:$HGPORT/
+  abort: error: *certificate verify failed* (glob)
+  [255]
+
+A per-host certificate matching the server's cert will be accepted
+
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" clone -U https://localhost:$HGPORT/ perhostgood1
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+A per-host certificate with multiple certs and one matching will be accepted
+
+  $ cat "$CERTSDIR/client-cert.pem" "$CERTSDIR/pub.pem" > perhost.pem
+  $ hg --config hostsecurity.localhost:verifycertsfile=perhost.pem clone -U https://localhost:$HGPORT/ perhostgood2
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+Defining both per-host certificate and a fingerprint will print a warning
+
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" --config hostsecurity.localhost:fingerprints=sha1:914f1aff87249c09b6859b88b1906d30756491ca clone -U https://localhost:$HGPORT/ caandfingerwarning
+  (hostsecurity.localhost:verifycertsfile ignored when host fingerprints defined; using host fingerprints for verification)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+  $ DISABLECACERTS="--config devel.disableloaddefaultcerts=true"
 
 clone via pull
 
-  $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLEOSXDUMMYCERT
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLECACERTS
+  warning: certificate for localhost not verified (set hostsecurity.localhost:certfingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30 or web.cacerts config settings)
   requesting all changes
   adding changesets
   adding manifests
@@ -202,9 +131,9 @@
   $ cd copy-pull
   $ echo '[hooks]' >> .hg/hgrc
   $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc
-  $ hg pull $DISABLEOSXDUMMYCERT
+  $ hg pull $DISABLECACERTS
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: certificate for localhost not verified (set hostsecurity.localhost:certfingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30 or web.cacerts config settings)
   searching for changes
   adding changesets
   adding manifests
@@ -218,7 +147,7 @@
 
   $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu
   $ echo "[web]" >> copy-pull/.hg/hgrc
-  $ echo "cacerts=`pwd`/pub.pem" >> copy-pull/.hg/hgrc
+  $ echo "cacerts=$CERTSDIR/pub.pem" >> copy-pull/.hg/hgrc
   $ hg -R copy-pull pull --traceback
   pulling from https://localhost:$HGPORT/
   searching for changes
@@ -230,35 +159,38 @@
 
   $ echo "[web]" >> $HGRCPATH
   $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH
-  $ P=`pwd` hg -R copy-pull pull
+  $ P="$CERTSDIR" hg -R copy-pull pull
   pulling from https://localhost:$HGPORT/
   searching for changes
   no changes found
-  $ P=`pwd` hg -R copy-pull pull --insecure
+  $ P="$CERTSDIR" hg -R copy-pull pull --insecure
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
 
 cacert mismatch
 
-  $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
+  > https://127.0.0.1:$HGPORT/
   pulling from https://127.0.0.1:$HGPORT/
   abort: 127.0.0.1 certificate error: certificate is for localhost
-  (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely)
+  (set hostsecurity.127.0.0.1:certfingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30 config setting or use --insecure to connect insecurely)
   [255]
-  $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
+  > https://127.0.0.1:$HGPORT/ --insecure
   pulling from https://127.0.0.1:$HGPORT/
-  warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connection security to 127.0.0.1 is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
-  $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem"
   pulling from https://localhost:$HGPORT/
   abort: error: *certificate verify failed* (glob)
   [255]
-  $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem" \
+  > --insecure
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
 
@@ -266,7 +198,8 @@
 
   $ hg serve -R test -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
   $ cat hg1.pid >> $DAEMON_PIDS
-  $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-not-yet.pem" \
+  > https://localhost:$HGPORT1/
   pulling from https://localhost:$HGPORT1/
   abort: error: *certificate verify failed* (glob)
   [255]
@@ -275,29 +208,39 @@
 
   $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
   $ cat hg2.pid >> $DAEMON_PIDS
-  $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-expired.pem" \
+  > https://localhost:$HGPORT2/
   pulling from https://localhost:$HGPORT2/
   abort: error: *certificate verify failed* (glob)
   [255]
 
 Fingerprints
 
-  $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
-  $ echo "localhost = 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca" >> copy-pull/.hg/hgrc
-  $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc
+- works without cacerts (hostkeyfingerprints)
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure --config hostfingerprints.localhost=91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca
+  5fed3813f7f5
 
-- works without cacerts
-  $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure
+- works without cacerts (hostsecurity)
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha1:914f1aff87249c09b6859b88b1906d30756491ca
+  5fed3813f7f5
+
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30
   5fed3813f7f5
 
 - multiple fingerprints specified and first matches
   $ hg --config 'hostfingerprints.localhost=914f1aff87249c09b6859b88b1906d30756491ca, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
   5fed3813f7f5
 
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:914f1aff87249c09b6859b88b1906d30756491ca, sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
+  5fed3813f7f5
+
 - multiple fingerprints specified and last matches
   $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, 914f1aff87249c09b6859b88b1906d30756491ca' -R copy-pull id https://localhost:$HGPORT/ --insecure
   5fed3813f7f5
 
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:914f1aff87249c09b6859b88b1906d30756491ca' -R copy-pull id https://localhost:$HGPORT/
+  5fed3813f7f5
+
 - multiple fingerprints specified and none match
 
   $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
@@ -305,15 +248,20 @@
   (check hostfingerprint configuration)
   [255]
 
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
+  abort: certificate for localhost has unexpected fingerprint sha1:91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca
+  (check hostsecurity configuration)
+  [255]
+
 - fails when cert doesn't match hostname (port is ignored)
-  $ hg -R copy-pull id https://localhost:$HGPORT1/
+  $ hg -R copy-pull id https://localhost:$HGPORT1/ --config hostfingerprints.localhost=914f1aff87249c09b6859b88b1906d30756491ca
   abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b
   (check hostfingerprint configuration)
   [255]
 
 
 - ignores that certificate doesn't match hostname
-  $ hg -R copy-pull id https://127.0.0.1:$HGPORT/
+  $ hg -R copy-pull id https://127.0.0.1:$HGPORT/ --config hostfingerprints.127.0.0.1=914f1aff87249c09b6859b88b1906d30756491ca
   5fed3813f7f5
 
 HGPORT1 is reused below for tinyproxy tests. Kill that server.
@@ -334,28 +282,31 @@
 
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
 
 Test https with cacert and fingerprint through proxy
 
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub.pem"
   pulling from https://localhost:$HGPORT/
   searching for changes
   no changes found
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/ --config hostfingerprints.127.0.0.1=914f1aff87249c09b6859b88b1906d30756491ca
   pulling from https://127.0.0.1:$HGPORT/
   searching for changes
   no changes found
 
 Test https with cert problems through proxy
 
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub-other.pem"
   pulling from https://localhost:$HGPORT/
   abort: error: *certificate verify failed* (glob)
   [255]
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub-expired.pem" https://localhost:$HGPORT2/
   pulling from https://localhost:$HGPORT2/
   abort: error: *certificate verify failed* (glob)
   [255]
@@ -390,7 +341,7 @@
 
 without client certificate:
 
-  $ P=`pwd` hg id https://localhost:$HGPORT/
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
   abort: error: *handshake failure* (glob)
   [255]
 
@@ -399,19 +350,19 @@
   $ cat << EOT >> $HGRCPATH
   > [auth]
   > l.prefix = localhost
-  > l.cert = client-cert.pem
-  > l.key = client-key.pem
+  > l.cert = $CERTSDIR/client-cert.pem
+  > l.key = $CERTSDIR/client-key.pem
   > EOT
 
-  $ P=`pwd` hg id https://localhost:$HGPORT/ \
-  > --config auth.l.key=client-key-decrypted.pem
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
+  > --config auth.l.key="$CERTSDIR/client-key-decrypted.pem"
   5fed3813f7f5
 
-  $ printf '1234\n' | env P=`pwd` hg id https://localhost:$HGPORT/ \
+  $ printf '1234\n' | env P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
   > --config ui.interactive=True --config ui.nontty=True
-  passphrase for client-key.pem: 5fed3813f7f5
+  passphrase for */client-key.pem: 5fed3813f7f5 (glob)
 
-  $ env P=`pwd` hg id https://localhost:$HGPORT/
+  $ env P="$CERTSDIR" hg id https://localhost:$HGPORT/
   abort: error: * (glob)
   [255]
 
--- a/tests/test-inherit-mode.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-inherit-mode.t	Tue Jun 14 14:52:58 2016 -0500
@@ -117,6 +117,7 @@
   00660 ../push/.hg/cache/branch2-base
   00660 ../push/.hg/cache/rbc-names-v1
   00660 ../push/.hg/cache/rbc-revs-v1
+  00660 ../push/.hg/dirstate
   00660 ../push/.hg/requires
   00770 ../push/.hg/store/
   00660 ../push/.hg/store/00changelog.i
--- a/tests/test-install.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-install.t	Tue Jun 14 14:52:58 2016 -0500
@@ -4,6 +4,9 @@
   checking Python executable (*) (glob)
   checking Python version (2.*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -23,7 +26,10 @@
     "encoding": "ascii",
     "encodingerror": null,
     "extensionserror": null,
+    "hgmodulepolicy": "*", (glob)
     "hgmodules": "*mercurial", (glob)
+    "hgver": "*", (glob)
+    "hgverextra": "*", (glob)
     "problems": 0,
     "pythonexe": "*", (glob)
     "pythonlib": "*", (glob)
@@ -41,6 +47,9 @@
   checking Python executable (*) (glob)
   checking Python version (2.*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -62,6 +71,9 @@
   checking Python executable (*) (glob)
   checking Python version (*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -70,6 +82,8 @@
   no problems detected
 
 #if test-repo
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
   $ cat >> wixxml.py << EOF
   > import os, subprocess, sys
   > import xml.etree.ElementTree as ET
--- a/tests/test-largefiles-wireproto.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-largefiles-wireproto.t	Tue Jun 14 14:52:58 2016 -0500
@@ -306,4 +306,78 @@
 used all HGPORTs, kill all daemons
   $ killdaemons.py
 
+largefiles should batch verify remote calls
+
+  $ hg init batchverifymain
+  $ cd batchverifymain
+  $ echo "aaa" >> a
+  $ hg add --large a
+  $ hg commit -m "a"
+  Invoking status precommit hook
+  A a
+  $ echo "bbb" >> b
+  $ hg add --large b
+  $ hg commit -m "b"
+  Invoking status precommit hook
+  A b
+  $ cd ..
+  $ hg serve -R batchverifymain -d -p $HGPORT --pid-file hg.pid \
+  > -A access.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ hg clone --noupdate http://localhost:$HGPORT batchverifyclone
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  $ hg -R batchverifyclone verify --large --lfa
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 2 changesets, 2 total revisions
+  searching 2 changesets for largefiles
+  verified existence of 2 revisions of 2 largefiles
+  $ tail -1 access.log
+  127.0.0.1 - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3D972a1a11f19934401291cc99117ec614933374ce%3Bstatlfile+sha%3Dc801c9cfe94400963fcb683246217d5db77f9a9a (glob)
+  $ hg -R batchverifyclone update
+  getting changed largefiles
+  2 largefiles updated, 0 removed
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Clear log file before next test
+
+  $ printf "" > access.log
+
+Verify should check file on remote server only when file is not
+available locally.
+
+  $ echo "ccc" >> batchverifymain/c
+  $ hg -R batchverifymain status
+  ? c
+  $ hg -R batchverifymain add --large batchverifymain/c
+  $ hg -R batchverifymain commit -m "c"
+  Invoking status precommit hook
+  A c
+  $ hg -R batchverifyclone pull
+  pulling from http://localhost:$HGPORT/
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  (run 'hg update' to get a working copy)
+  $ hg -R batchverifyclone verify --lfa
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  3 files, 3 changesets, 3 total revisions
+  searching 3 changesets for largefiles
+  verified existence of 3 revisions of 3 largefiles
+  $ tail -1 access.log
+  127.0.0.1 - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3Dc8559c3c9cfb42131794b7d8009230403b9b454c (glob)
+
+  $ killdaemons.py
+
 #endif
--- a/tests/test-mac-packages.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-mac-packages.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1,4 +1,7 @@
 #require test-repo slow osx osxpackaging
+
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
   $ OUTPUTDIR=`pwd`
   $ export OUTPUTDIR
   $ KEEPMPKG=yes
--- a/tests/test-newbranch.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-newbranch.t	Tue Jun 14 14:52:58 2016 -0500
@@ -463,3 +463,72 @@
   -1 new
 
   $ cd ..
+
+We expect that update --clean discard changes in working directory,
+and updates to the head of parent branch.
+
+  $ hg init updatebareclean
+  $ cd updatebareclean
+  $ hg update --clean
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ touch a
+  $ hg commit -A -m "a"
+  adding a
+  $ touch b
+  $ hg commit -A -m "b"
+  adding b
+  $ touch c
+  $ hg commit -A -m "c"
+  adding c
+  $ hg log
+  changeset:   2:991a3460af53
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+  changeset:   1:0e067c57feba
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  changeset:   0:3903775176ed
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a
+  
+  $ hg update -r 1
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg branch new-branch
+  marked working directory as branch new-branch
+  (branches are permanent and global, did you want a bookmark?)
+  $ echo "aa" >> a
+  $ hg update --clean
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg status
+  $ hg branch
+  default
+  $ hg parent
+  changeset:   2:991a3460af53
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+We expect that update --clean on non existing parent discards a new branch
+and updates to the tipmost non-closed branch head
+
+  $ hg update null
+  0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+  $ hg branch newbranch
+  marked working directory as branch newbranch
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg update -C
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg summary
+  parent: 2:991a3460af53 tip
+   c
+  branch: default
+  commit: (clean)
+  update: (current)
+  phases: 3 draft
--- a/tests/test-notify.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-notify.t	Tue Jun 14 14:52:58 2016 -0500
@@ -571,7 +571,7 @@
   Message-Id: <hg.3548c9e294b6.*.*@*> (glob)
   To: baz@test.com, foo@bar
   
-  changeset 3548c9e294b6 in $TESTTMP/b
+  changeset 3548c9e294b6 in $TESTTMP/b (glob)
   details: http://test/b?cmd=changeset;node=3548c9e294b6
   description: default template
 
--- a/tests/test-pager.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-pager.t	Tue Jun 14 14:52:58 2016 -0500
@@ -177,3 +177,48 @@
   paged! 'date:        Thu Jan 01 00:00:00 1970 +0000\n'
   paged! 'summary:     modify a 8\n'
   paged! '\n'
+
+Pager works with shell aliases.
+
+  $ cat >> $HGRCPATH <<EOF
+  > [alias]
+  > echoa = !echo a
+  > EOF
+
+  $ hg echoa
+  a
+  $ hg --config pager.attend-echoa=yes echoa
+  paged! 'a\n'
+
+Pager works with hg aliases including environment variables.
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [alias]
+  > printa = log -T "$A\n" -r 0
+  > EOF
+
+  $ A=1 hg --config pager.attend-printa=yes printa
+  paged! '1\n'
+  $ A=2 hg --config pager.attend-printa=yes printa
+  paged! '2\n'
+
+Pager should not override the exit code of other commands
+
+  $ cat >> $TESTTMP/fortytwo.py <<'EOF'
+  > from mercurial import cmdutil, commands
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('fortytwo', [], 'fortytwo', norepo=True)
+  > def fortytwo(ui, *opts):
+  >     ui.write('42\n')
+  >     return 42
+  > EOF
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [extensions]
+  > fortytwo = $TESTTMP/fortytwo.py
+  > EOF
+
+  $ hg fortytwo --pager=on
+  paged! '42\n'
+  [42]
--- a/tests/test-parseindex2.py	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-parseindex2.py	Tue Jun 14 14:52:58 2016 -0500
@@ -9,13 +9,13 @@
 import subprocess
 import sys
 
-from mercurial import (
-    parsers,
-)
 from mercurial.node import (
     nullid,
     nullrev,
 )
+from mercurial import (
+    parsers,
+)
 
 # original python implementation
 def gettype(q):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-patchbomb-tls.t	Tue Jun 14 14:52:58 2016 -0500
@@ -0,0 +1,89 @@
+#require serve ssl
+
+Set up SMTP server:
+
+  $ CERTSDIR="$TESTDIR/sslcerts"
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
+
+  $ python "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
+  > --tls smtps --certificate `pwd`/server.pem
+  listening at localhost:$HGPORT
+  $ cat a.pid >> $DAEMON_PIDS
+
+Ensure hg email output is sent to stdout:
+
+  $ unset PAGER
+
+Set up repository:
+
+  $ hg init t
+  $ cd t
+  $ cat <<EOF >> .hg/hgrc
+  > [extensions]
+  > patchbomb =
+  > [email]
+  > method = smtp
+  > [smtp]
+  > host = localhost
+  > port = $HGPORT
+  > tls = smtps
+  > EOF
+
+  $ echo a > a
+  $ hg commit -Ama -d '1 0'
+  adding a
+
+Utility functions:
+
+  $ DISABLECACERTS=
+  $ try () {
+  >   hg email $DISABLECACERTS -f quux -t foo -c bar -r tip "$@"
+  > }
+
+Our test cert is not signed by a trusted CA. It should fail to verify if
+we are able to load CA certs:
+
+#if defaultcacerts
+  $ try
+  this patch series consists of 1 patches.
+  
+  
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+#endif
+
+  $ DISABLECACERTS="--config devel.disableloaddefaultcerts=true"
+
+Without certificates:
+
+  $ try --debug
+  this patch series consists of 1 patches.
+  
+  
+  (using smtps)
+  sending mail: smtp host localhost, port * (glob)
+  (verifying remote certificate)
+  warning: certificate for localhost not verified (set hostsecurity.localhost:certfingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30 or web.cacerts config settings)
+  sending [PATCH] a ...
+
+With global certificates:
+
+  $ try --debug --config web.cacerts="$CERTSDIR/pub.pem"
+  this patch series consists of 1 patches.
+  
+  
+  (using smtps)
+  sending mail: smtp host localhost, port * (glob)
+  (verifying remote certificate)
+  sending [PATCH] a ...
+
+With invalid certificates:
+
+  $ try --config web.cacerts="$CERTSDIR/pub-other.pem"
+  this patch series consists of 1 patches.
+  
+  
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+
+  $ cd ..
--- a/tests/test-push-warn.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-push-warn.t	Tue Jun 14 14:52:58 2016 -0500
@@ -785,4 +785,14 @@
   no changes found
   [1]
 
+Test fail hook
+
+  $ hg push inner --config hooks.fail-push="echo running fail-push hook"
+  pushing to inner
+  searching for changes
+  running fail-push hook
+  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'!
+  (merge or see "hg help push" for details about pushing new heads)
+  [255]
+
   $ cd ..
--- a/tests/test-rebase-obsolete.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-rebase-obsolete.t	Tue Jun 14 14:52:58 2016 -0500
@@ -863,3 +863,56 @@
   rebasing 20:b82fb57ea638 "willconflict second version"
   note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
   rebasing 22:7bdc8a87673d "dummy change" (tip)
+  $ cd ..
+
+rebase source is obsoleted (issue5198)
+---------------------------------
+
+  $ hg clone base amended
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd amended
+  $ hg up 9520eea781bc
+  1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ echo 1 >> E
+  $ hg commit --amend -m "E'"
+  $ hg log -G
+  @  9:69abe8906104 E'
+  |
+  | o  7:02de42196ebe H
+  | |
+  | | o  6:eea13746799a G
+  | |/|
+  | o |  5:24b6387c8c8c F
+  |/ /
+  | x  4:9520eea781bc E
+  |/
+  | o  3:32af7686d403 D
+  | |
+  | o  2:5fddd98957c8 C
+  | |
+  | o  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg rebase -d . -s 9520eea781bc
+  note: not rebasing 4:9520eea781bc "E", already in destination as 9:69abe8906104 "E'"
+  rebasing 6:eea13746799a "G"
+  $ hg log -G
+  o    10:17be06e82e95 G
+  |\
+  | @  9:69abe8906104 E'
+  | |
+  +---o  7:02de42196ebe H
+  | |
+  o |  5:24b6387c8c8c F
+  |/
+  | o  3:32af7686d403 D
+  | |
+  | o  2:5fddd98957c8 C
+  | |
+  | o  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ cd ..
--- a/tests/test-revert-interactive.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-revert-interactive.t	Tue Jun 14 14:52:58 2016 -0500
@@ -64,7 +64,7 @@
    3
    4
    5
-  record change 1/6 to 'f'? [Ynesfdaq?] y
+  revert change 1/6 to 'f'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -73,7 +73,7 @@
    4
    5
   +b
-  record change 2/6 to 'f'? [Ynesfdaq?] y
+  revert change 2/6 to 'f'? [Ynesfdaq?] y
   
   diff --git a/folder1/g b/folder1/g
   2 hunks, 2 lines changed
@@ -86,7 +86,7 @@
    3
    4
    5
-  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -95,7 +95,7 @@
    4
    5
   +d
-  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
   
   diff --git a/folder2/h b/folder2/h
   2 hunks, 2 lines changed
@@ -163,7 +163,7 @@
    3
    4
    5
-  record change 1/6 to 'f'? [Ynesfdaq?] y
+  revert change 1/6 to 'f'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -172,7 +172,7 @@
    4
    5
   +b
-  record change 2/6 to 'f'? [Ynesfdaq?] y
+  revert change 2/6 to 'f'? [Ynesfdaq?] y
   
   diff --git a/folder1/g b/folder1/g
   2 hunks, 2 lines changed
@@ -185,7 +185,7 @@
    3
    4
    5
-  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -194,7 +194,7 @@
    4
    5
   +d
-  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
   
   diff --git a/folder2/h b/folder2/h
   2 hunks, 2 lines changed
@@ -242,7 +242,7 @@
    3
    4
    5
-  record change 1/2 to 'f'? [Ynesfdaq?] y
+  discard change 1/2 to 'f'? [Ynesfdaq?] y
   
   @@ -2,6 +1,5 @@
    1
@@ -251,7 +251,7 @@
    4
    5
   -b
-  record change 2/2 to 'f'? [Ynesfdaq?] n
+  discard change 2/2 to 'f'? [Ynesfdaq?] n
   
   $ hg st
   M f
@@ -303,7 +303,7 @@
   -1
   +0
   +2
-  record this change to 'k'? [Ynesfdaq?] e
+  discard this change to 'k'? [Ynesfdaq?] e
   
   $ cat k
   42
@@ -350,7 +350,7 @@
    1
    2
    3
-  record change 1/3 to 'folder1/g'? [Ynesfdaq?] y
+  discard change 1/3 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -2,7 +1,7 @@
    c
@@ -361,13 +361,13 @@
   +4
    5
    d
-  record change 2/3 to 'folder1/g'? [Ynesfdaq?] y
+  discard change 2/3 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -7,3 +6,2 @@
    5
    d
   -lastline
-  record change 3/3 to 'folder1/g'? [Ynesfdaq?] n
+  discard change 3/3 to 'folder1/g'? [Ynesfdaq?] n
   
   $ hg diff --nodates
   diff -r a3d963a027aa folder1/g
--- a/tests/test-revset.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-revset.t	Tue Jun 14 14:52:58 2016 -0500
@@ -898,6 +898,16 @@
   $ log 'tag(tip)'
   9
 
+Test order of revisions in compound expression
+----------------------------------------------
+
+ 'A & B' should follow the order of 'A':
+
+  $ log '2:0 & 0::2'
+  2
+  1
+  0
+
 test sort revset
 --------------------------------------------
 
@@ -952,6 +962,12 @@
   6
   2
 
+test invalid sort keys
+
+  $ log 'sort(all(), -invalid)'
+  hg: parse error: unknown sort key '-invalid'
+  [255]
+
   $ cd ..
 
 test sorting by multiple keys including variable-length strings
@@ -1090,6 +1106,67 @@
   0 b12  m111 u112 111 10800
   2 b111 m11  u12  111 3600
 
+ toposort prioritises graph branches
+
+  $ hg up 2
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ touch a
+  $ hg addremove
+  adding a
+  $ hg ci -m 't1' -u 'tu' -d '130 0'
+  created new head
+  $ echo 'a' >> a
+  $ hg ci -m 't2' -u 'tu' -d '130 0'
+  $ hg book book1
+  $ hg up 4
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark book1)
+  $ touch a
+  $ hg addremove
+  adding a
+  $ hg ci -m 't3' -u 'tu' -d '130 0'
+
+  $ hg log -r 'sort(all(), topo)'
+  7 b111 t3   tu   130 0
+  4 b111 m112 u111 110 14400
+  3 b112 m111 u11  120 0
+  6 b111 t2   tu   130 0
+  5 b111 t1   tu   130 0
+  2 b111 m11  u12  111 3600
+  1 b11  m12  u111 112 7200
+  0 b12  m111 u112 111 10800
+
+  $ hg log -r 'sort(all(), -topo)'
+  0 b12  m111 u112 111 10800
+  1 b11  m12  u111 112 7200
+  2 b111 m11  u12  111 3600
+  5 b111 t1   tu   130 0
+  6 b111 t2   tu   130 0
+  3 b112 m111 u11  120 0
+  4 b111 m112 u111 110 14400
+  7 b111 t3   tu   130 0
+
+  $ hg log -r 'sort(all(), topo, topo.firstbranch=book1)'
+  6 b111 t2   tu   130 0
+  5 b111 t1   tu   130 0
+  7 b111 t3   tu   130 0
+  4 b111 m112 u111 110 14400
+  3 b112 m111 u11  120 0
+  2 b111 m11  u12  111 3600
+  1 b11  m12  u111 112 7200
+  0 b12  m111 u112 111 10800
+
+topographical sorting can't be combined with other sort keys, and you can't
+use the topo.firstbranch option when topo sort is not active:
+
+  $ hg log -r 'sort(all(), "topo user")'
+  hg: parse error: topo sort order cannot be combined with other sort keys
+  [255]
+
+  $ hg log -r 'sort(all(), user, topo.firstbranch=book1)'
+  hg: parse error: topo.firstbranch can only be used when using the topo sort key
+  [255]
+
   $ cd ..
   $ cd repo
 
@@ -1550,7 +1627,10 @@
   0
   $ log '4::8 - 8'
   4
-  $ log 'matching(1 or 2 or 3) and (2 or 3 or 1)'
+
+matching() should preserve the order of the input set:
+
+  $ log '(2 or 3 or 1) and matching(1 or 2 or 3)'
   2
   3
   1
@@ -1967,12 +2047,12 @@
   (func
     ('symbol', 'unknownref')
     ('symbol', '0'))
-  abort: failed to parse the definition of revset alias "unknownref": '$' not for alias arguments
+  abort: bad definition of revset alias "unknownref": invalid symbol '$2'
   [255]
 
   $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
   ('symbol', 'tip')
-  warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
+  warning: bad definition of revset alias "anotherbadone": at 7: not a prefix: end
   * set:
   <baseset [9]>
   9
@@ -1985,7 +2065,7 @@
 
   $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
   ('symbol', 'tip')
-  warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
+  warning: bad declaration of revset alias "bad name": at 4: invalid token
   * set:
   <baseset [9]>
   9
--- a/tests/test-rollback.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-rollback.t	Tue Jun 14 14:52:58 2016 -0500
@@ -196,3 +196,15 @@
   checking files
   1 files, 2 changesets, 2 total revisions
 
+rollback disabled by config
+  $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > rollback = false
+  > EOF
+  $ echo narf >> pinky-sayings.txt
+  $ hg add pinky-sayings.txt
+  $ hg ci -m 'First one.'
+  $ hg rollback
+  abort: rollback is disabled because it is unsafe
+  (see `hg help -v rollback` for information)
+  [255]
--- a/tests/test-run-tests.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-run-tests.t	Tue Jun 14 14:52:58 2016 -0500
@@ -2,10 +2,7 @@
 
 Avoid interference from actual test env:
 
-  $ unset HGTEST_JOBS
-  $ unset HGTEST_TIMEOUT
-  $ unset HGTEST_PORT
-  $ unset HGTEST_SHELL
+  $ . "$TESTDIR/helper-runtests.sh"
 
 Smoke test with install
 ============
@@ -196,6 +193,10 @@
   ]]>  </testcase>
   </testsuite>
 
+  $ cat .testtimes
+  test-failure-unicode.t * (glob)
+  test-failure.t * (glob)
+  test-success.t * (glob)
   $ rm test-failure-unicode.t
 
 test for --retest
@@ -304,6 +305,8 @@
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
   $ rm test-serve-inuse.t
+  $ killdaemons.py $DAEMON_PIDS
+  $ rm $DAEMON_PIDS
 
 Running In Debug Mode
 ======================
@@ -586,11 +589,35 @@
   testreport ={
       "test-bogus.t": {
           "result": "skip"
-      }, 
+      },
       "test-failure.t": {
           "result": "skip"
       }
   } (no-eol)
+
+Whitelist trumps blacklist
+  $ echo test-failure.t > whitelist
+  $ rt --blacklist=blacklist --whitelist=whitelist --json\
+  >   test-failure.t test-bogus.t
+  s
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
+  @@ -1,5 +1,5 @@
+     $ echo babar
+  -  rataxes
+  +  babar
+   This is a noop statement so that
+   this test is still more bytes than success.
+   pad pad pad pad............................................................
+  
+  ERROR: test-failure.t output changed
+  !
+  Skipped test-bogus.t: Doesn't exist
+  Failed test-failure.t: output changed
+  # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+  python hash seed: * (glob)
+  [1]
+
 test for --json
 ==================
 
--- a/tests/test-subrepo-deep-nested-change.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-subrepo-deep-nested-change.t	Tue Jun 14 14:52:58 2016 -0500
@@ -303,9 +303,9 @@
   archiving (sub1) [===================================>] 4/4\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:'
   Only in ../wdir: .hg_archival.txt
@@ -347,9 +347,9 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../wdir -type f | sort
   ../wdir/.hg_archival.txt
@@ -379,10 +379,10 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ cat ../wdir/.hg_archival.txt
   repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
@@ -510,10 +510,10 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_all | sort
   ../archive_all
@@ -547,8 +547,8 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_exclude | sort
   ../archive_exclude
@@ -568,9 +568,9 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_include | sort
   ../archive_include
@@ -945,7 +945,7 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (esc)
+  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving (sub3) [ <=>                                  ] 0\r (no-eol) (esc)
@@ -959,7 +959,7 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (esc)
+  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   diff -Nru cloned.*/.hgsub cloned/.hgsub (glob)
   --- cloned.*/.hgsub	* (glob)
@@ -987,8 +987,8 @@
   archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving [                                           ] 0/8\r (no-eol) (esc)
@@ -1006,10 +1006,10 @@
   archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving (sub3) [                                    ] 0/1\r (no-eol) (esc)
@@ -1084,8 +1084,8 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   --- */cloned.*/sub1/sub2/sub2	* (glob)
   +++ */cloned/sub1/sub2/sub2	* (glob)
--- a/tests/test-subrepo-git.t	Tue Jun 07 08:32:33 2016 +0200
+++ b/tests/test-subrepo-git.t	Tue Jun 14 14:52:58 2016 -0500
@@ -1146,8 +1146,8 @@
   $ hg commit -m "add subrepo"
   $ cd ..
   $ rm -f pwned.txt
-  $ env -u GIT_ALLOW_PROTOCOL \
-  > PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
+  $ unset GIT_ALLOW_PROTOCOL
+  $ PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
   > malicious-subrepository malicious-subrepository-protected
   Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob)
   fatal: transport 'ext' not allowed