merge default into stable for 3.9 code freeze stable 3.9-rc
authorMatt Mackall <mpm@selenic.com>
Mon, 18 Jul 2016 23:28:14 -0500
branchstable
changeset 29605 519bb4f9d3a4
parent 29460 a7d1532b26a1 (current diff)
parent 29604 db0095c83344 (diff)
child 29606 59a0cbd71921
merge default into stable for 3.9 code freeze
contrib/macosx/macosx-build.txt
hgext/hgcia.py
mercurial/httpclient/socketutil.py
tests/test-hgcia.t
tests/test-update-renames.t
--- a/Makefile	Sat Jul 02 09:41:40 2016 -0700
+++ b/Makefile	Mon Jul 18 23:28:14 2016 -0500
@@ -156,7 +156,7 @@
 # Packaging targets
 
 osx:
-	python setup.py install --optimize=1 \
+	/usr/bin/python2.7 setup.py install --optimize=1 \
 	  --root=build/mercurial/ --prefix=/usr/local/ \
 	  --install-lib=/Library/Python/2.7/site-packages/
 	make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
--- a/contrib/bash_completion	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/bash_completion	Mon Jul 18 23:28:14 2016 -0500
@@ -184,7 +184,7 @@
             return
         fi
 
-        opts=$(_hg_cmd debugcomplete --options "$cmd")
+        opts=$(HGPLAINEXCEPT=alias _hg_cmd debugcomplete --options "$cmd")
 
         COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$opts' -- "$cur"))
         _hg_fix_wordlist
--- a/contrib/bdiff-torture.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/bdiff-torture.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,7 +1,9 @@
 # Randomized torture test generation for bdiff
 
 from __future__ import absolute_import, print_function
-import random, sys
+import random
+import sys
+
 from mercurial import (
     bdiff,
     mpatch,
--- a/contrib/check-code.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/check-code.py	Mon Jul 18 23:28:14 2016 -0500
@@ -26,6 +26,15 @@
 import os
 import re
 import sys
+if sys.version_info[0] < 3:
+    opentext = open
+else:
+    def opentext(f):
+        return open(f, encoding='ascii')
+try:
+    xrange
+except NameError:
+    xrange = range
 try:
     import re2
 except ImportError:
@@ -41,26 +50,26 @@
             pass
     return re.compile(pat)
 
+# check "rules depending on implementation of repquote()" in each
+# patterns (especially pypats), before changing around repquote()
+_repquotefixedmap = {' ': ' ', '\n': '\n', '.': 'p', ':': 'q',
+                     '%': '%', '\\': 'b', '*': 'A', '+': 'P', '-': 'M'}
+def _repquoteencodechr(i):
+    if i > 255:
+        return 'u'
+    c = chr(i)
+    if c in _repquotefixedmap:
+        return _repquotefixedmap[c]
+    if c.isalpha():
+        return 'x'
+    if c.isdigit():
+        return 'n'
+    return 'o'
+_repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
+
 def repquote(m):
-    fromc = '.:'
-    tochr = 'pq'
-    def encodechr(i):
-        if i > 255:
-            return 'u'
-        c = chr(i)
-        if c in ' \n':
-            return c
-        if c.isalpha():
-            return 'x'
-        if c.isdigit():
-            return 'n'
-        try:
-            return tochr[fromc.find(c)]
-        except (ValueError, IndexError):
-            return 'o'
     t = m.group('text')
-    tt = ''.join(encodechr(i) for i in xrange(256))
-    t = t.translate(tt)
+    t = t.translate(_repquotett)
     return m.group('quote') + t + m.group('quote')
 
 def reppython(m):
@@ -103,7 +112,7 @@
     (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
     (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
     (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
-    (r'printf.*[^\\]\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
+    (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
     (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
     (r'\$\(.*\)', "don't use $(expr), use `expr`"),
     (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
@@ -114,7 +123,7 @@
     (r'export .*=', "don't export and assign at once"),
     (r'^source\b', "don't use 'source', use '.'"),
     (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
-    (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
+    (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
     (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
     (r'^stop\(\)', "don't use 'stop' as a shell function name"),
     (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
@@ -133,6 +142,7 @@
     (r'\|&', "don't use |&, use 2>&1"),
     (r'\w =  +\w', "only one space after = allowed"),
     (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
+    (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'")
   ],
   # warnings
   [
@@ -179,6 +189,8 @@
     (r'^  .*: largefile \S+ not available from file:.*/.*[^)]$', winglobmsg),
     (r'^  .*file://\$TESTTMP',
      'write "file:/*/$TESTTMP" + (glob) to match on windows too'),
+    (r'^  [^$>].*27\.0\.0\.1.*[^)]$',
+     'use (glob) to match localhost IP on hosts without 127.0.0.1 too'),
     (r'^  (cat|find): .*: No such file or directory',
      'use test -f to test for file existence'),
     (r'^  diff -[^ -]*p',
@@ -197,8 +209,8 @@
   ],
   # warnings
   [
-    (r'^  [^*?/\n]* \(glob\)$',
-     "glob match with no glob character (?*/)"),
+    (r'^  (?!.*127\.0\.0\.1)[^*?/\n]* \(glob\)$',
+     "glob match with no glob string (?, *, /, and 127.0.0.1)"),
   ]
 ]
 
@@ -214,7 +226,7 @@
 
 utestfilters = [
     (r"<<(\S+)((.|\n)*?\n  > \1)", rephere),
-    (r"( *)(#([^\n]*\S)?)", repcomment),
+    (r"( +)(#([^\n]*\S)?)", repcomment),
 ]
 
 pypats = [
@@ -238,7 +250,6 @@
     (r'^\s+(\w|\.)+=\w[^,()\n]*$', "missing whitespace in assignment"),
     (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
     (r'.{81}', "line too long"),
-    (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
     (r'[^\n]\Z', "no trailing newline"),
     (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
 #    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
@@ -305,8 +316,6 @@
     (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
      'legacy exception syntax; use "as" instead of ","'),
     (r':\n(    )*( ){1,3}[^ ]', "must indent 4 spaces"),
-    (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
-     "missing _() in ui message (use () to hide false-positives)"),
     (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
     (r'\b__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
     (r'os\.path\.join\(.*, *(""|\'\')\)',
@@ -318,9 +327,37 @@
     (r'^import Queue', "don't use Queue, use util.queue + util.empty"),
     (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
     (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
+    (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
+    (r'^import urlparse', "don't use urlparse, use util.urlparse"),
+    (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
+    (r'^import cPickle', "don't use cPickle, use util.pickle"),
+    (r'^import pickle', "don't use pickle, use util.pickle"),
+    (r'^import httplib', "don't use httplib, use util.httplib"),
+    (r'^import BaseHTTPServer', "use util.httpserver instead"),
+    (r'\.next\(\)', "don't use .next(), use next(...)"),
+
+    # rules depending on implementation of repquote()
+    (r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
+     'string join across lines with no space'),
+    (r'''(?x)ui\.(status|progress|write|note|warn)\(
+         [ \t\n#]*
+         (?# any strings/comments might precede a string, which
+           # contains translatable message)
+         ((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)*
+         (?# sequence consisting of below might precede translatable message
+           # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ...
+           # - escaped character: "\\", "\n", "\0" ...
+           # - character other than '%', 'b' as '\', and 'x' as alphabet)
+         (['"]|\'\'\'|""")
+         ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x
+         (?# this regexp can't use [^...] style,
+           # because _preparepats forcibly adds "\n" into [^...],
+           # even though this regexp wants match it against "\n")''',
+     "missing _() in ui message (use () to hide false-positives)"),
   ],
   # warnings
   [
+    # rules depending on implementation of repquote()
     (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
   ]
 ]
@@ -365,9 +402,13 @@
     (r'^\s*#import\b', "use only #include in standard C code"),
     (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
     (r'strcat\(', "don't use strcat"),
+
+    # rules depending on implementation of repquote()
   ],
   # warnings
-  []
+  [
+    # rules depending on implementation of repquote()
+  ]
 ]
 
 cfilters = [
@@ -433,7 +474,6 @@
         filters = c[3]
         for i, flt in enumerate(filters):
             filters[i] = re.compile(flt[0]), flt[1]
-_preparepats()
 
 class norepeatlogger(object):
     def __init__(self):
@@ -486,12 +526,15 @@
     result = True
 
     try:
-        fp = open(f)
+        with opentext(f) as fp:
+            try:
+                pre = post = fp.read()
+            except UnicodeDecodeError as e:
+                print("%s while reading %s" % (e, f))
+                return result
     except IOError as e:
         print("Skipping %s, %s" % (f, str(e).split(':', 1)[0]))
         return result
-    pre = post = fp.read()
-    fp.close()
 
     for name, match, magic, filters, pats in checks:
         if debug:
@@ -578,7 +621,7 @@
 
     return result
 
-if __name__ == "__main__":
+def main():
     parser = optparse.OptionParser("%prog [options] [files]")
     parser.add_option("-w", "--warnings", action="store_true",
                       help="include warning-level checks")
@@ -600,10 +643,15 @@
     else:
         check = args
 
+    _preparepats()
+
     ret = 0
     for f in check:
         if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
                          blame=options.blame, debug=options.debug,
                          lineno=options.lineno):
             ret = 1
-    sys.exit(ret)
+    return ret
+
+if __name__ == "__main__":
+    sys.exit(main())
--- a/contrib/check-commit	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/check-commit	Mon Jul 18 23:28:14 2016 -0500
@@ -15,7 +15,11 @@
 #
 # See also: https://mercurial-scm.org/wiki/ContributingChanges
 
-import re, sys, os
+from __future__ import absolute_import, print_function
+
+import os
+import re
+import sys
 
 commitheader = r"^(?:# [^\n]*\n)*"
 afterheader = commitheader + r"(?!#)"
@@ -69,9 +73,9 @@
                     break
                 if not printed:
                     printed = True
-                    print "node: %s" % node
-                print "%d: %s" % (n, msg)
-                print " %s" % nonempty(l, last)[:-1]
+                    print("node: %s" % node)
+                print("%d: %s" % (n, msg))
+                print(" %s" % nonempty(l, last)[:-1])
                 if "BYPASS" not in os.environ:
                     exitcode = 1
                 del hits[0]
--- a/contrib/check-py3-compat.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/check-py3-compat.py	Mon Jul 18 23:28:14 2016 -0500
@@ -61,7 +61,20 @@
                 imp.load_module(name, fh, '', ('py', 'r', imp.PY_SOURCE))
             except Exception as e:
                 exc_type, exc_value, tb = sys.exc_info()
-                frame = traceback.extract_tb(tb)[-1]
+                # We walk the stack and ignore frames from our custom importer,
+                # import mechanisms, and stdlib modules. This kinda/sorta
+                # emulates CPython behavior in import.c while also attempting
+                # to pin blame on a Mercurial file.
+                for frame in reversed(traceback.extract_tb(tb)):
+                    if frame.name == '_call_with_frames_removed':
+                        continue
+                    if 'importlib' in frame.filename:
+                        continue
+                    if 'mercurial/__init__.py' in frame.filename:
+                        continue
+                    if frame.filename.startswith(sys.prefix):
+                        continue
+                    break
 
                 if frame.filename:
                     filename = os.path.basename(frame.filename)
--- a/contrib/chg/README	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/chg/README	Mon Jul 18 23:28:14 2016 -0500
@@ -28,3 +28,5 @@
 
  * CHGDEBUG enables debug messages.
  * CHGSOCKNAME specifies the socket path of the background cmdserver.
+ * CHGTIMEOUT specifies how many seconds chg will wait before giving up
+   connecting to a cmdserver. If it is 0, chg will wait forever. Default: 60
--- a/contrib/chg/chg.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/chg/chg.c	Mon Jul 18 23:28:14 2016 -0500
@@ -249,7 +249,13 @@
 	int pst = 0;
 
 	debugmsg("try connect to %s repeatedly", opts->sockname);
-	for (unsigned int i = 0; i < 10 * 100; i++) {
+
+	unsigned int timeoutsec = 60;  /* default: 60 seconds */
+	const char *timeoutenv = getenv("CHGTIMEOUT");
+	if (timeoutenv)
+		sscanf(timeoutenv, "%u", &timeoutsec);
+
+	for (unsigned int i = 0; !timeoutsec || i < timeoutsec * 100; i++) {
 		hgclient_t *hgc = hgc_open(opts->sockname);
 		if (hgc)
 			return hgc;
@@ -332,6 +338,7 @@
 	}
 }
 
+static pid_t pagerpid = 0;
 static pid_t peerpid = 0;
 
 static void forwardsignal(int sig)
@@ -374,6 +381,17 @@
 	abortmsgerrno("failed to handle stop signal");
 }
 
+static void handlechildsignal(int sig UNUSED_)
+{
+	if (peerpid == 0 || pagerpid == 0)
+		return;
+	/* if pager exits, notify the server with SIGPIPE immediately.
+	 * otherwise the server won't get SIGPIPE if it does not write
+	 * anything. (issue5278) */
+	if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid)
+		kill(peerpid, SIGPIPE);
+}
+
 static void setupsignalhandler(pid_t pid)
 {
 	if (pid <= 0)
@@ -410,6 +428,11 @@
 	sa.sa_flags = SA_RESTART;
 	if (sigaction(SIGTSTP, &sa, NULL) < 0)
 		goto error;
+	/* get notified when pager exits */
+	sa.sa_handler = handlechildsignal;
+	sa.sa_flags = SA_RESTART;
+	if (sigaction(SIGCHLD, &sa, NULL) < 0)
+		goto error;
 
 	return;
 
@@ -417,21 +440,56 @@
 	abortmsgerrno("failed to set up signal handlers");
 }
 
-/* This implementation is based on hgext/pager.py (pre 369741ef7253) */
-static void setuppager(hgclient_t *hgc, const char *const args[],
+static void restoresignalhandler()
+{
+	struct sigaction sa;
+	memset(&sa, 0, sizeof(sa));
+	sa.sa_handler = SIG_DFL;
+	sa.sa_flags = SA_RESTART;
+	if (sigemptyset(&sa.sa_mask) < 0)
+		goto error;
+
+	if (sigaction(SIGHUP, &sa, NULL) < 0)
+		goto error;
+	if (sigaction(SIGTERM, &sa, NULL) < 0)
+		goto error;
+	if (sigaction(SIGWINCH, &sa, NULL) < 0)
+		goto error;
+	if (sigaction(SIGCONT, &sa, NULL) < 0)
+		goto error;
+	if (sigaction(SIGTSTP, &sa, NULL) < 0)
+		goto error;
+	if (sigaction(SIGCHLD, &sa, NULL) < 0)
+		goto error;
+
+	/* ignore Ctrl+C while shutting down to make pager exits cleanly */
+	sa.sa_handler = SIG_IGN;
+	if (sigaction(SIGINT, &sa, NULL) < 0)
+		goto error;
+
+	peerpid = 0;
+	return;
+
+error:
+	abortmsgerrno("failed to restore signal handlers");
+}
+
+/* This implementation is based on hgext/pager.py (post 369741ef7253)
+ * Return 0 if pager is not started, or pid of the pager */
+static pid_t setuppager(hgclient_t *hgc, const char *const args[],
 		       size_t argsize)
 {
 	const char *pagercmd = hgc_getpager(hgc, args, argsize);
 	if (!pagercmd)
-		return;
+		return 0;
 
 	int pipefds[2];
 	if (pipe(pipefds) < 0)
-		return;
+		return 0;
 	pid_t pid = fork();
 	if (pid < 0)
 		goto error;
-	if (pid == 0) {
+	if (pid > 0) {
 		close(pipefds[0]);
 		if (dup2(pipefds[1], fileno(stdout)) < 0)
 			goto error;
@@ -441,7 +499,7 @@
 		}
 		close(pipefds[1]);
 		hgc_attachio(hgc);  /* reattach to pager */
-		return;
+		return pid;
 	} else {
 		dup2(pipefds[0], fileno(stdin));
 		close(pipefds[0]);
@@ -451,13 +509,27 @@
 		if (r < 0) {
 			abortmsgerrno("cannot start pager '%s'", pagercmd);
 		}
-		return;
+		return 0;
 	}
 
 error:
 	close(pipefds[0]);
 	close(pipefds[1]);
 	abortmsgerrno("failed to prepare pager");
+	return 0;
+}
+
+static void waitpager(pid_t pid)
+{
+	/* close output streams to notify the pager its input ends */
+	fclose(stdout);
+	fclose(stderr);
+	while (1) {
+		pid_t ret = waitpid(pid, NULL, 0);
+		if (ret == -1 && errno == EINTR)
+			continue;
+		break;
+	}
 }
 
 /* Run instructions sent from the server like unlink and set redirect path
@@ -585,9 +657,13 @@
 	}
 
 	setupsignalhandler(hgc_peerpid(hgc));
-	setuppager(hgc, argv + 1, argc - 1);
+	pagerpid = setuppager(hgc, argv + 1, argc - 1);
 	int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1);
+	restoresignalhandler();
 	hgc_close(hgc);
 	freecmdserveropts(&opts);
+	if (pagerpid)
+		waitpager(pagerpid);
+
 	return exitcode;
 }
--- a/contrib/chg/hgclient.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/chg/hgclient.c	Mon Jul 18 23:28:14 2016 -0500
@@ -63,6 +63,7 @@
 
 struct hgclient_tag_ {
 	int sockfd;
+	pid_t pgid;
 	pid_t pid;
 	context_t ctx;
 	unsigned int capflags;
@@ -125,10 +126,15 @@
 		return;  /* assumes input request */
 
 	size_t cursize = 0;
+	int emptycount = 0;
 	while (cursize < hgc->ctx.datasize) {
 		rsize = recv(hgc->sockfd, hgc->ctx.data + cursize,
 			     hgc->ctx.datasize - cursize, 0);
-		if (rsize < 0)
+		/* rsize == 0 normally indicates EOF, while it's also a valid
+		 * packet size for unix socket. treat it as EOF and abort if
+		 * we get many empty responses in a row. */
+		emptycount = (rsize == 0 ? emptycount + 1 : 0);
+		if (rsize < 0 || emptycount > 20)
 			abortmsg("failed to read data block");
 		cursize += rsize;
 	}
@@ -339,6 +345,8 @@
 			u = dataend;
 		if (strncmp(s, "capabilities:", t - s + 1) == 0) {
 			hgc->capflags = parsecapabilities(t + 2, u);
+		} else if (strncmp(s, "pgid:", t - s + 1) == 0) {
+			hgc->pgid = strtol(t + 2, NULL, 10);
 		} else if (strncmp(s, "pid:", t - s + 1) == 0) {
 			hgc->pid = strtol(t + 2, NULL, 10);
 		}
@@ -463,6 +471,12 @@
 	free(hgc);
 }
 
+pid_t hgc_peerpgid(const hgclient_t *hgc)
+{
+	assert(hgc);
+	return hgc->pgid;
+}
+
 pid_t hgc_peerpid(const hgclient_t *hgc)
 {
 	assert(hgc);
--- a/contrib/chg/hgclient.h	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/chg/hgclient.h	Mon Jul 18 23:28:14 2016 -0500
@@ -18,6 +18,7 @@
 hgclient_t *hgc_open(const char *sockname);
 void hgc_close(hgclient_t *hgc);
 
+pid_t hgc_peerpgid(const hgclient_t *hgc);
 pid_t hgc_peerpid(const hgclient_t *hgc);
 
 const char **hgc_validate(hgclient_t *hgc, const char *const args[],
--- a/contrib/chg/util.h	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/chg/util.h	Mon Jul 18 23:28:14 2016 -0500
@@ -12,8 +12,10 @@
 
 #ifdef __GNUC__
 #define PRINTF_FORMAT_ __attribute__((format(printf, 1, 2)))
+#define UNUSED_ __attribute__((unused))
 #else
 #define PRINTF_FORMAT_
+#define UNUSED_
 #endif
 
 void abortmsg(const char *fmt, ...) PRINTF_FORMAT_;
--- a/contrib/debugshell.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/debugshell.py	Mon Jul 18 23:28:14 2016 -0500
@@ -52,7 +52,7 @@
         with demandimport.deactivated():
             __import__(pdbmap[debugger])
     except ImportError:
-        ui.warn("%s debugger specified but %s module was not found\n"
+        ui.warn(("%s debugger specified but %s module was not found\n")
                 % (debugger, pdbmap[debugger]))
         debugger = 'pdb'
 
--- a/contrib/dirstatenonnormalcheck.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/dirstatenonnormalcheck.py	Mon Jul 18 23:28:14 2016 -0500
@@ -25,10 +25,10 @@
     """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
     nonnormalcomputedmap = nonnormalentries(dmap)
     if _nonnormalset != nonnormalcomputedmap:
-        ui.develwarn("%s call to %s\n" % (label, orig))
-        ui.develwarn("inconsistency in nonnormalset\n")
-        ui.develwarn("[nonnormalset] %s\n" % _nonnormalset)
-        ui.develwarn("[map] %s\n" % nonnormalcomputedmap)
+        ui.develwarn("%s call to %s\n" % (label, orig), config='dirstate')
+        ui.develwarn("inconsistency in nonnormalset\n", config='dirstate')
+        ui.develwarn("[nonnormalset] %s\n" % _nonnormalset, config='dirstate')
+        ui.develwarn("[map] %s\n" % nonnormalcomputedmap, config='dirstate')
 
 def _checkdirstate(orig, self, arg):
     """Check nonnormal set consistency before and after the call to orig"""
--- a/contrib/dumprevlog	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/dumprevlog	Mon Jul 18 23:28:14 2016 -0500
@@ -2,8 +2,14 @@
 # Dump revlogs as raw data stream
 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
 
+from __future__ import absolute_import, print_function
+
 import sys
-from mercurial import revlog, node, util
+from mercurial import (
+    node,
+    revlog,
+    util,
+)
 
 for fp in (sys.stdin, sys.stdout, sys.stderr):
     util.setbinary(fp)
@@ -11,15 +17,15 @@
 for f in sys.argv[1:]:
     binopen = lambda fn: open(fn, 'rb')
     r = revlog.revlog(binopen, f)
-    print "file:", f
+    print("file:", f)
     for i in r:
         n = r.node(i)
         p = r.parents(n)
         d = r.revision(n)
-        print "node:", node.hex(n)
-        print "linkrev:", r.linkrev(i)
-        print "parents:", node.hex(p[0]), node.hex(p[1])
-        print "length:", len(d)
-        print "-start-"
-        print d
-        print "-end-"
+        print("node:", node.hex(n))
+        print("linkrev:", r.linkrev(i))
+        print("parents:", node.hex(p[0]), node.hex(p[1]))
+        print("length:", len(d))
+        print("-start-")
+        print(d)
+        print("-end-")
--- a/contrib/import-checker.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/import-checker.py	Mon Jul 18 23:28:14 2016 -0500
@@ -11,8 +11,9 @@
 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
 # to work when run from a virtualenv.  The modules were chosen empirically
 # so that the return value matches the return value without virtualenv.
-import BaseHTTPServer
-import zlib
+if True: # disable lexical sorting checks
+    import BaseHTTPServer
+    import zlib
 
 # Whitelist of modules that symbols can be directly imported from.
 allowsymbolimports = (
@@ -126,22 +127,32 @@
     False
     >>> fromlocal(None, 1)
     ('foo', 'foo.__init__', True)
+    >>> fromlocal('foo1', 1)
+    ('foo.foo1', 'foo.foo1', False)
     >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
     >>> fromlocal2(None, 2)
     ('foo', 'foo.__init__', True)
+    >>> fromlocal2('bar2', 1)
+    False
+    >>> fromlocal2('bar', 2)
+    ('foo.bar', 'foo.bar.__init__', True)
     """
     prefix = '.'.join(modulename.split('.')[:-1])
     if prefix:
         prefix += '.'
     def fromlocal(name, level=0):
-        # name is None when relative imports are used.
-        if name is None:
+        # name is false value when relative imports are used.
+        if not name:
             # If relative imports are used, level must not be absolute.
             assert level > 0
             candidates = ['.'.join(modulename.split('.')[:-level])]
         else:
-            # Check relative name first.
-            candidates = [prefix + name, name]
+            if not level:
+                # Check relative name first.
+                candidates = [prefix + name, name]
+            else:
+                candidates = ['.'.join(modulename.split('.')[:-level]) +
+                              '.' + name]
 
         for n in candidates:
             if n in localmods:
@@ -175,6 +186,9 @@
 
     >>> 'cStringIO' in mods
     True
+
+    >>> 'cffi' in mods
+    True
     """
     for m in sys.builtin_module_names:
         yield m
@@ -187,6 +201,8 @@
         yield m
     for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
         yield m
+    for m in ['cffi']:
+        yield m
     stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
     # We need to supplement the list of prefixes for the search to work
     # when run from within a virtualenv.
@@ -360,7 +376,7 @@
     * Symbols can only be imported from specific modules (see
       `allowsymbolimports`). For other modules, first import the module then
       assign the symbol to a module-level variable. In addition, these imports
-      must be performed before other relative imports. This rule only
+      must be performed before other local imports. This rule only
       applies to import statements outside of any blocks.
     * Relative imports from the standard library are not allowed.
     * Certain modules must be aliased to alternate names to avoid aliasing
@@ -371,8 +387,8 @@
 
     # Whether a local/non-stdlib import has been performed.
     seenlocal = None
-    # Whether a relative, non-symbol import has been seen.
-    seennonsymbolrelative = False
+    # Whether a local/non-stdlib, non-symbol import has been seen.
+    seennonsymbollocal = False
     # The last name to be imported (for sorting).
     lastname = None
     # Relative import levels encountered so far.
@@ -446,26 +462,26 @@
 
             # Direct symbol import is only allowed from certain modules and
             # must occur before non-symbol imports.
+            found = fromlocal(node.module, node.level)
+            if found and found[2]:  # node.module is a package
+                prefix = found[0] + '.'
+                symbols = [n.name for n in node.names
+                           if not fromlocal(prefix + n.name)]
+            else:
+                symbols = [n.name for n in node.names]
             if node.module and node.col_offset == root_col_offset:
-                found = fromlocal(node.module, node.level)
-                if found and found[2]:  # node.module is a package
-                    prefix = found[0] + '.'
-                    symbols = [n.name for n in node.names
-                               if not fromlocal(prefix + n.name)]
-                else:
-                    symbols = [n.name for n in node.names]
-
                 if symbols and fullname not in allowsymbolimports:
                     yield msg('direct symbol import %s from %s',
                               ', '.join(symbols), fullname)
 
-                if symbols and seennonsymbolrelative:
+                if symbols and seennonsymbollocal:
                     yield msg('symbol import follows non-symbol import: %s',
                               fullname)
+            if not symbols and fullname not in stdlib_modules:
+                seennonsymbollocal = True
 
             if not node.module:
                 assert node.level
-                seennonsymbolrelative = True
 
                 # Only allow 1 group per level.
                 if (node.level in seenlevels
@@ -652,7 +668,7 @@
     the input file.
     """
     py = False
-    if f.endswith('.py'):
+    if not f.endswith('.t'):
         with open(f) as src:
             yield src.read(), modname, f, 0
             py = True
--- a/contrib/macosx/Welcome.html	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/macosx/Welcome.html	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,5 @@
 <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
-<!-- This is the second screen displayed during the install. -->
+<!-- This is the first screen displayed during the install. -->
 <html>
 <head>
   <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
--- a/contrib/macosx/macosx-build.txt	Sat Jul 02 09:41:40 2016 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,11 +0,0 @@
-to build a new macosx binary package:
-
-install macpython from http://www.python.org/download/mac
-
-install py2app from http://pythonmac.org/packages/
-
-make sure /usr/local/bin is in your path
-
-run bdist_mpkg in top-level hg directory
-
-find packaged stuff in dist directory
--- a/contrib/perf.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/perf.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,6 +1,23 @@
 # perf.py - performance test routines
 '''helper extension to measure performance'''
 
+# "historical portability" policy of perf.py:
+#
+# We have to do:
+# - make perf.py "loadable" with as wide Mercurial version as possible
+#   This doesn't mean that perf commands work correctly with that Mercurial.
+#   BTW, perf.py itself has been available since 1.1 (or eb240755386d).
+# - make historical perf command work correctly with as wide Mercurial
+#   version as possible
+#
+# We have to do, if possible with reasonable cost:
+# - make recent perf command for historical feature work correctly
+#   with early Mercurial
+#
+# We don't have to do:
+# - make perf command for recent feature work correctly with early
+#   Mercurial
+
 from __future__ import absolute_import
 import functools
 import os
@@ -8,25 +25,97 @@
 import sys
 import time
 from mercurial import (
-    branchmap,
     cmdutil,
     commands,
     copies,
     error,
+    extensions,
     mdiff,
     merge,
-    obsolete,
-    repoview,
     revlog,
-    scmutil,
     util,
 )
 
-formatteropts = commands.formatteropts
-revlogopts = commands.debugrevlogopts
+# for "historical portability":
+# try to import modules separately (in dict order), and ignore
+# failure, because these aren't available with early Mercurial
+try:
+    from mercurial import branchmap # since 2.5 (or bcee63733aad)
+except ImportError:
+    pass
+try:
+    from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
+except ImportError:
+    pass
+try:
+    from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
+except ImportError:
+    pass
+try:
+    from mercurial import scmutil # since 1.9 (or 8b252e826c68)
+except ImportError:
+    pass
+
+# for "historical portability":
+# define util.safehasattr forcibly, because util.safehasattr has been
+# available since 1.9.3 (or 94b200a11cf7)
+_undefined = object()
+def safehasattr(thing, attr):
+    return getattr(thing, attr, _undefined) is not _undefined
+setattr(util, 'safehasattr', safehasattr)
+
+# for "historical portability":
+# use locally defined empty option list, if formatteropts isn't
+# available, because commands.formatteropts has been available since
+# 3.2 (or 7a7eed5176a4), even though formatting itself has been
+# available since 2.2 (or ae5f92e154d3)
+formatteropts = getattr(commands, "formatteropts", [])
+
+# for "historical portability":
+# use locally defined option list, if debugrevlogopts isn't available,
+# because commands.debugrevlogopts has been available since 3.7 (or
+# 5606f7d0d063), even though cmdutil.openrevlog() has been available
+# since 1.9 (or a79fea6b3e77).
+revlogopts = getattr(commands, "debugrevlogopts", [
+        ('c', 'changelog', False, ('open changelog')),
+        ('m', 'manifest', False, ('open manifest')),
+        ('', 'dir', False, ('open directory manifest')),
+        ])
 
 cmdtable = {}
-command = cmdutil.command(cmdtable)
+
+# for "historical portability":
+# define parsealiases locally, because cmdutil.parsealiases has been
+# available since 1.5 (or 6252852b4332)
+def parsealiases(cmd):
+    return cmd.lstrip("^").split("|")
+
+if safehasattr(cmdutil, 'command'):
+    import inspect
+    command = cmdutil.command(cmdtable)
+    if 'norepo' not in inspect.getargspec(command)[0]:
+        # for "historical portability":
+        # wrap original cmdutil.command, because "norepo" option has
+        # been available since 3.1 (or 75a96326cecb)
+        _command = command
+        def command(name, options=(), synopsis=None, norepo=False):
+            if norepo:
+                commands.norepo += ' %s' % ' '.join(parsealiases(name))
+            return _command(name, list(options), synopsis)
+else:
+    # for "historical portability":
+    # define "@command" annotation locally, because cmdutil.command
+    # has been available since 1.9 (or 2daa5179e73f)
+    def command(name, options=(), synopsis=None, norepo=False):
+        def decorator(func):
+            if synopsis:
+                cmdtable[name] = func, list(options), synopsis
+            else:
+                cmdtable[name] = func, list(options)
+            if norepo:
+                commands.norepo += ' %s' % ' '.join(parsealiases(name))
+            return func
+        return decorator
 
 def getlen(ui):
     if ui.configbool("perf", "stub"):
@@ -796,3 +885,18 @@
         timer, fm = gettimer(ui, opts)
         timer(fn, title=title)
         fm.end()
+
+def uisetup(ui):
+    if (util.safehasattr(cmdutil, 'openrevlog') and
+        not util.safehasattr(commands, 'debugrevlogopts')):
+        # for "historical portability":
+        # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
+        # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
+        # openrevlog() should cause failure, because it has been
+        # available since 3.5 (or 49c583ca48c4).
+        def openrevlog(orig, repo, cmd, file_, opts):
+            if opts.get('dir') and not util.safehasattr(repo, 'dirlog'):
+                raise error.Abort("This version doesn't support --dir option",
+                                  hint="use 3.5 or later")
+            return orig(repo, cmd, file_, opts)
+        extensions.wrapfunction(cmdutil, 'openrevlog', openrevlog)
--- a/contrib/revsetbenchmarks.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/revsetbenchmarks.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,41 +10,32 @@
 
 from __future__ import absolute_import, print_function
 import math
+import optparse  # cannot use argparse, python 2.7 only
 import os
 import re
+import subprocess
 import sys
-from subprocess import (
-    CalledProcessError,
-    check_call,
-    PIPE,
-    Popen,
-    STDOUT,
-)
-# cannot use argparse, python 2.7 only
-from optparse import (
-    OptionParser,
-)
 
 DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
                    'reverse', 'reverse+first', 'reverse+last',
                    'sort', 'sort+first', 'sort+last']
 
 def check_output(*args, **kwargs):
-    kwargs.setdefault('stderr', PIPE)
-    kwargs.setdefault('stdout', PIPE)
-    proc = Popen(*args, **kwargs)
+    kwargs.setdefault('stderr', subprocess.PIPE)
+    kwargs.setdefault('stdout', subprocess.PIPE)
+    proc = subprocess.Popen(*args, **kwargs)
     output, error = proc.communicate()
     if proc.returncode != 0:
-        raise CalledProcessError(proc.returncode, ' '.join(args[0]))
+        raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
     return output
 
 def update(rev):
     """update the repo to a revision"""
     try:
-        check_call(['hg', 'update', '--quiet', '--check', str(rev)])
+        subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
         check_output(['make', 'local'],
                      stderr=None)  # suppress output except for error/warning
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print('update to revision %s failed, aborting'%rev, file=sys.stderr)
         sys.exit(exc.returncode)
 
@@ -60,7 +51,7 @@
     fullcmd += ['--config',
                 'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
     fullcmd += cmd
-    return check_output(fullcmd, stderr=STDOUT)
+    return check_output(fullcmd, stderr=subprocess.STDOUT)
 
 def perf(revset, target=None, contexts=False):
     """run benchmark for this very revset"""
@@ -70,7 +61,7 @@
             args.append('--contexts')
         output = hg(args, repo=target)
         return parseoutput(output)
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
         if getattr(exc, 'output', None) is None: # no output before 2.7
             print('(no output)', file=sys.stderr)
@@ -103,9 +94,9 @@
     """print data about a revision"""
     sys.stdout.write("Revision ")
     sys.stdout.flush()
-    check_call(['hg', 'log', '--rev', str(rev), '--template',
-                '{if(tags, " ({tags})")} '
-                '{rev}:{node|short}: {desc|firstline}\n'])
+    subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
+                           '{if(tags, " ({tags})")} '
+                           '{rev}:{node|short}: {desc|firstline}\n'])
 
 def idxwidth(nbidx):
     """return the max width of number used for index
@@ -215,7 +206,7 @@
     """get the list of rev matched by a revset"""
     try:
         out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
-    except CalledProcessError as exc:
+    except subprocess.CalledProcessError as exc:
         print("abort, can't get revision from %s"%spec, file=sys.stderr)
         sys.exit(exc.returncode)
     return [r for r in out.split() if r]
@@ -234,8 +225,8 @@
 point regressions. Revsets to run are specified in a file (or from stdin), one
 revsets per line. Line starting with '#' will be ignored, allowing insertion of
 comments."""
-parser = OptionParser(usage="usage: %prog [options] <revs>",
-                      description=helptext)
+parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
+                               description=helptext)
 parser.add_option("-f", "--file",
                   help="read revset from FILE (stdin if omitted)",
                   metavar="FILE")
--- a/contrib/synthrepo.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/synthrepo.py	Mon Jul 18 23:28:14 2016 -0500
@@ -45,6 +45,13 @@
 import random
 import sys
 import time
+
+from mercurial.i18n import _
+from mercurial.node import (
+    nullid,
+    nullrev,
+    short,
+)
 from mercurial import (
     cmdutil,
     context,
@@ -54,12 +61,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
-from mercurial.node import (
-    nullid,
-    nullrev,
-    short,
-)
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -506,7 +507,7 @@
             head = rename(head)
         else:
             head = ''
-        renamed = os.path.join(head, wordgen.next())
+        renamed = os.path.join(head, next(wordgen))
         replacements[dirpath] = renamed
         return renamed
     result = []
--- a/contrib/undumprevlog	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/undumprevlog	Mon Jul 18 23:28:14 2016 -0500
@@ -3,8 +3,16 @@
 # $ hg init
 # $ undumprevlog < repo.dump
 
+from __future__ import absolute_import
+
 import sys
-from mercurial import revlog, node, scmutil, util, transaction
+from mercurial import (
+    node,
+    revlog,
+    scmutil,
+    transaction,
+    util,
+)
 
 for fp in (sys.stdin, sys.stdout, sys.stderr):
     util.setbinary(fp)
--- a/contrib/win32/hgwebdir_wsgi.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/win32/hgwebdir_wsgi.py	Mon Jul 18 23:28:14 2016 -0500
@@ -79,6 +79,8 @@
 # - Restart the web server and see if things are running.
 #
 
+from __future__ import absolute_import
+
 # Configuration file location
 hgweb_config = r'c:\your\directory\wsgi.config'
 
@@ -87,7 +89,6 @@
 path_prefix = 1  # This many path elements are prefixes (depends on the
                  # virtual path of the IIS application).
 
-from __future__ import absolute_import
 import sys
 
 # Adjust python path if this is not a system-wide install
--- a/contrib/win32/mercurial.ini	Sat Jul 02 09:41:40 2016 -0700
+++ b/contrib/win32/mercurial.ini	Mon Jul 18 23:28:14 2016 -0500
@@ -46,7 +46,6 @@
 ;extdiff =
 ;fetch =
 ;gpg =
-;hgcia =
 ;hgk =
 ;highlight = 
 ;histedit =
--- a/doc/docchecker	Sat Jul 02 09:41:40 2016 -0700
+++ b/doc/docchecker	Mon Jul 18 23:28:14 2016 -0500
@@ -6,8 +6,11 @@
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import, print_function
+
+import re
 import sys
-import re
 
 leadingline = re.compile(r'(^\s*)(\S.*)$')
 
--- a/doc/gendoc.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/doc/gendoc.py	Mon Jul 18 23:28:14 2016 -0500
@@ -117,11 +117,11 @@
     ui.write(_("This section contains help for extensions that are "
                "distributed together with Mercurial. Help for other "
                "extensions is available in the help system."))
-    ui.write("\n\n"
+    ui.write(("\n\n"
              ".. contents::\n"
              "   :class: htmlonly\n"
              "   :local:\n"
-             "   :depth: 1\n\n")
+             "   :depth: 1\n\n"))
 
     for extensionname in sorted(allextensionnames()):
         mod = extensions.load(ui, extensionname, None)
--- a/doc/hgmanpage.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/doc/hgmanpage.py	Mon Jul 18 23:28:14 2016 -0500
@@ -415,7 +415,7 @@
         else:
             self._docinfo[name] = node.astext()
         self._docinfo_keys.append(name)
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def depart_docinfo_item(self, node):
         pass
@@ -469,7 +469,7 @@
 
     def visit_citation_reference(self, node):
         self.body.append('['+node.astext()+']')
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_classifier(self, node):
         pass
@@ -489,7 +489,7 @@
     def visit_comment(self, node,
                       sub=re.compile('-(?=-)').sub):
         self.body.append(self.comment(node.astext()))
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_contact(self, node):
         self.visit_docinfo_item(node, 'contact')
@@ -643,7 +643,7 @@
             name_normalized = self._field_name.lower().replace(" ","_")
             self._docinfo_names[name_normalized] = self._field_name
             self.visit_docinfo_item(node, name_normalized)
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
 
     def depart_field_body(self, node):
         pass
@@ -657,7 +657,7 @@
     def visit_field_name(self, node):
         if self._in_docinfo:
             self._field_name = node.astext()
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         else:
             self.body.append(self.defs['field_name'][0])
 
@@ -693,7 +693,7 @@
 
     def visit_footnote_reference(self, node):
         self.body.append('['+self.deunicode(node.astext())+']')
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def depart_footnote_reference(self, node):
         pass
@@ -705,7 +705,7 @@
         pass
 
     def visit_header(self, node):
-        raise NotImplementedError, node.astext()
+        raise NotImplementedError(node.astext())
 
     def depart_header(self, node):
         pass
@@ -742,7 +742,7 @@
         if 'uri' in node.attributes:
             text.append(node.attributes['uri'])
         self.body.append('[image: %s]\n' % ('/'.join(text)))
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_important(self, node):
         self.visit_admonition(node, 'important')
@@ -753,7 +753,7 @@
         # footnote and citation
         if (isinstance(node.parent, nodes.footnote)
             or isinstance(node.parent, nodes.citation)):
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         self.document.reporter.warning('"unsupported "label"',
                 base_node=node)
         self.body.append('[')
@@ -793,7 +793,7 @@
     def visit_list_item(self, node):
         # man 7 man argues to use ".IP" instead of ".TP"
         self.body.append('.IP %s %d\n' % (
-                self._list_char[-1].next(),
+                next(self._list_char[-1]),
                 self._list_char[-1].get_width(),))
 
     def depart_list_item(self, node):
@@ -814,7 +814,7 @@
         self.body.append(self.defs['literal_block'][1])
 
     def visit_meta(self, node):
-        raise NotImplementedError, node.astext()
+        raise NotImplementedError(node.astext())
 
     def depart_meta(self, node):
         pass
@@ -924,7 +924,7 @@
         if node.get('format') == 'manpage':
             self.body.append(node.astext() + "\n")
         # Keep non-manpage raw text out of output:
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_reference(self, node):
         """E.g. link or email address."""
@@ -963,7 +963,7 @@
 
     def visit_substitution_definition(self, node):
         """Internal only."""
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_substitution_reference(self, node):
         self.document.reporter.warning('"substitution_reference" not supported',
@@ -1009,7 +1009,7 @@
 
     def visit_target(self, node):
         # targets are in-document hyper targets, without any use for man-pages.
-        raise nodes.SkipNode
+        raise nodes.SkipNode()
 
     def visit_tbody(self, node):
         pass
@@ -1053,7 +1053,7 @@
             self._docinfo['title'] = node.astext()
             # document title for .TH
             self._docinfo['title_upper'] = node.astext().upper()
-            raise nodes.SkipNode
+            raise nodes.SkipNode()
         elif self.section_level == 1:
             self.body.append('.SH ')
             for n in node.traverse(nodes.Text):
--- a/hg	Sat Jul 02 09:41:40 2016 -0700
+++ b/hg	Mon Jul 18 23:28:14 2016 -0500
@@ -11,9 +11,11 @@
 import sys
 
 if os.environ.get('HGUNICODEPEDANTRY', False):
-    reload(sys)
-    sys.setdefaultencoding("undefined")
-
+    try:
+        reload(sys)
+        sys.setdefaultencoding("undefined")
+    except NameError:
+        pass
 
 libdir = '@LIBDIR@'
 
@@ -26,9 +28,9 @@
 
 # enable importing on demand to reduce startup time
 try:
-    from mercurial import demandimport; demandimport.enable()
+    if sys.version_info[0] < 3:
+        from mercurial import demandimport; demandimport.enable()
 except ImportError:
-    import sys
     sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
                      ' '.join(sys.path))
     sys.stderr.write("(check your install and PYTHONPATH)\n")
--- a/hgext/automv.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/automv.py	Mon Jul 18 23:28:14 2016 -0500
@@ -26,6 +26,7 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     commands,
     copies,
@@ -34,7 +35,6 @@
     scmutil,
     similar
 )
-from mercurial.i18n import _
 
 def extsetup(ui):
     entry = extensions.wrapcommand(
--- a/hgext/bugzilla.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/bugzilla.py	Mon Jul 18 23:28:14 2016 -0500
@@ -281,8 +281,6 @@
 
 import re
 import time
-import urlparse
-import xmlrpclib
 
 from mercurial.i18n import _
 from mercurial.node import short
@@ -293,6 +291,9 @@
     util,
 )
 
+urlparse = util.urlparse
+xmlrpclib = util.xmlrpclib
+
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/chgserver.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/chgserver.py	Mon Jul 18 23:28:14 2016 -0500
@@ -40,18 +40,15 @@
 
 from __future__ import absolute_import
 
-import SocketServer
 import errno
-import gc
+import hashlib
 import inspect
 import os
-import random
 import re
+import signal
 import struct
 import sys
-import threading
 import time
-import traceback
 
 from mercurial.i18n import _
 
@@ -76,10 +73,11 @@
 
 def _hashlist(items):
     """return sha1 hexdigest for a list"""
-    return util.sha1(str(items)).hexdigest()
+    return hashlib.sha1(str(items)).hexdigest()
 
 # sensitive config sections affecting confighash
 _configsections = [
+    'alias',  # affects global state commands.table
     'extdiff',  # uisetup will register new commands
     'extensions',
 ]
@@ -150,6 +148,10 @@
 
     for chgserver, it is designed that once mtimehash changes, the server is
     considered outdated immediately and should no longer provide service.
+
+    mtimehash is not included in confighash because we only know the paths of
+    extensions after importing them (there is imp.find_module but that faces
+    race conditions). We need to calculate confighash without importing.
     """
     def trystat(path):
         try:
@@ -213,18 +215,6 @@
         ui.setconfig('ui', 'interactive', False, 'pager')
         return p
 
-_envvarre = re.compile(r'\$[a-zA-Z_]+')
-
-def _clearenvaliases(cmdtable):
-    """Remove stale command aliases referencing env vars; variable expansion
-    is done at dispatch.addaliases()"""
-    for name, tab in cmdtable.items():
-        cmddef = tab[0]
-        if (isinstance(cmddef, dispatch.cmdalias) and
-            not cmddef.definition.startswith('!') and  # shell alias
-            _envvarre.search(cmddef.definition)):
-            del cmdtable[name]
-
 def _newchgui(srcui, csystem):
     class chgui(srcui.__class__):
         def __init__(self, src=None):
@@ -357,6 +347,7 @@
             self.capabilities['validate'] = chgcmdserver.validate
 
     def cleanup(self):
+        super(chgcmdserver, self).cleanup()
         # dispatch._runcatch() does not flush outputs if exception is not
         # handled by dispatch._dispatch()
         self.ui.flush()
@@ -508,6 +499,11 @@
 
         pagercmd = _setuppagercmd(self.ui, options, cmd)
         if pagercmd:
+            # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
+            # we can exit if the pipe to the pager is closed
+            if util.safehasattr(signal, 'SIGPIPE') and \
+                    signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
+                signal.signal(signal.SIGPIPE, signal.SIG_DFL)
             self.cresult.write(pagercmd)
         else:
             self.cresult.write('\0')
@@ -525,7 +521,6 @@
         _log('setenv: %r\n' % sorted(newenv.keys()))
         os.environ.clear()
         os.environ.update(newenv)
-        _clearenvaliases(commands.table)
 
     capabilities = commandserver.server.capabilities.copy()
     capabilities.update({'attachio': attachio,
@@ -534,174 +529,110 @@
                          'setenv': setenv,
                          'setumask': setumask})
 
-# copied from mercurial/commandserver.py
-class _requesthandler(SocketServer.StreamRequestHandler):
-    def handle(self):
-        # use a different process group from the master process, making this
-        # process pass kernel "is_current_pgrp_orphaned" check so signals like
-        # SIGTSTP, SIGTTIN, SIGTTOU are not ignored.
-        os.setpgid(0, 0)
-        # change random state otherwise forked request handlers would have a
-        # same state inherited from parent.
-        random.seed()
-        ui = self.server.ui
-        repo = self.server.repo
-        sv = None
-        try:
-            sv = chgcmdserver(ui, repo, self.rfile, self.wfile, self.connection,
-                              self.server.hashstate, self.server.baseaddress)
-            try:
-                sv.serve()
-            # handle exceptions that may be raised by command server. most of
-            # known exceptions are caught by dispatch.
-            except error.Abort as inst:
-                ui.warn(_('abort: %s\n') % inst)
-            except IOError as inst:
-                if inst.errno != errno.EPIPE:
-                    raise
-            except KeyboardInterrupt:
-                pass
-            finally:
-                sv.cleanup()
-        except: # re-raises
-            # also write traceback to error channel. otherwise client cannot
-            # see it because it is written to server's stderr by default.
-            if sv:
-                cerr = sv.cerr
-            else:
-                cerr = commandserver.channeledoutput(self.wfile, 'e')
-            traceback.print_exc(file=cerr)
-            raise
-        finally:
-            # trigger __del__ since ForkingMixIn uses os._exit
-            gc.collect()
-
 def _tempaddress(address):
     return '%s.%d.tmp' % (address, os.getpid())
 
 def _hashaddress(address, hashstr):
     return '%s-%s' % (address, hashstr)
 
-class AutoExitMixIn:  # use old-style to comply with SocketServer design
-    lastactive = time.time()
-    idletimeout = 3600  # default 1 hour
+class chgunixservicehandler(object):
+    """Set of operations for chg services"""
+
+    pollinterval = 1  # [sec]
 
-    def startautoexitthread(self):
-        # note: the auto-exit check here is cheap enough to not use a thread,
-        # be done in serve_forever. however SocketServer is hook-unfriendly,
-        # you simply cannot hook serve_forever without copying a lot of code.
-        # besides, serve_forever's docstring suggests using thread.
-        thread = threading.Thread(target=self._autoexitloop)
-        thread.daemon = True
-        thread.start()
+    def __init__(self, ui):
+        self.ui = ui
+        self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
+        self._lastactive = time.time()
+
+    def bindsocket(self, sock, address):
+        self._inithashstate(address)
+        self._checkextensions()
+        self._bind(sock)
+        self._createsymlink()
 
-    def _autoexitloop(self, interval=1):
-        while True:
-            time.sleep(interval)
-            if not self.issocketowner():
-                _log('%s is not owned, exiting.\n' % self.server_address)
-                break
-            if time.time() - self.lastactive > self.idletimeout:
-                _log('being idle too long. exiting.\n')
-                break
-        self.shutdown()
+    def _inithashstate(self, address):
+        self._baseaddress = address
+        if self.ui.configbool('chgserver', 'skiphash', False):
+            self._hashstate = None
+            self._realaddress = address
+            return
+        self._hashstate = hashstate.fromui(self.ui)
+        self._realaddress = _hashaddress(address, self._hashstate.confighash)
 
-    def process_request(self, request, address):
-        self.lastactive = time.time()
-        return SocketServer.ForkingMixIn.process_request(
-            self, request, address)
+    def _checkextensions(self):
+        if not self._hashstate:
+            return
+        if extensions.notloaded():
+            # one or more extensions failed to load. mtimehash becomes
+            # meaningless because we do not know the paths of those extensions.
+            # set mtimehash to an illegal hash value to invalidate the server.
+            self._hashstate.mtimehash = ''
 
-    def server_bind(self):
+    def _bind(self, sock):
         # use a unique temp address so we can stat the file and do ownership
         # check later
-        tempaddress = _tempaddress(self.server_address)
-        # use relative path instead of full path at bind() if possible, since
-        # AF_UNIX path has very small length limit (107 chars) on common
-        # platforms (see sys/un.h)
-        dirname, basename = os.path.split(tempaddress)
-        bakwdfd = None
-        if dirname:
-            bakwdfd = os.open('.', os.O_DIRECTORY)
-            os.chdir(dirname)
-        self.socket.bind(basename)
-        self._socketstat = os.stat(basename)
+        tempaddress = _tempaddress(self._realaddress)
+        util.bindunixsocket(sock, tempaddress)
+        self._socketstat = os.stat(tempaddress)
         # rename will replace the old socket file if exists atomically. the
         # old server will detect ownership change and exit.
-        util.rename(basename, self.server_address)
-        if bakwdfd:
-            os.fchdir(bakwdfd)
-            os.close(bakwdfd)
+        util.rename(tempaddress, self._realaddress)
 
-    def issocketowner(self):
+    def _createsymlink(self):
+        if self._baseaddress == self._realaddress:
+            return
+        tempaddress = _tempaddress(self._baseaddress)
+        os.symlink(os.path.basename(self._realaddress), tempaddress)
+        util.rename(tempaddress, self._baseaddress)
+
+    def _issocketowner(self):
         try:
-            stat = os.stat(self.server_address)
+            stat = os.stat(self._realaddress)
             return (stat.st_ino == self._socketstat.st_ino and
                     stat.st_mtime == self._socketstat.st_mtime)
         except OSError:
             return False
 
-    def unlinksocketfile(self):
-        if not self.issocketowner():
+    def unlinksocket(self, address):
+        if not self._issocketowner():
             return
         # it is possible to have a race condition here that we may
         # remove another server's socket file. but that's okay
         # since that server will detect and exit automatically and
         # the client will start a new server on demand.
         try:
-            os.unlink(self.server_address)
+            os.unlink(self._realaddress)
         except OSError as exc:
             if exc.errno != errno.ENOENT:
                 raise
 
-class chgunixservice(commandserver.unixservice):
-    def init(self):
-        if self.repo:
-            # one chgserver can serve multiple repos. drop repo infomation
-            self.ui.setconfig('bundle', 'mainreporoot', '', 'repo')
-            self.repo = None
-        self._inithashstate()
-        self._checkextensions()
-        class cls(AutoExitMixIn, SocketServer.ForkingMixIn,
-                  SocketServer.UnixStreamServer):
-            ui = self.ui
-            repo = self.repo
-            hashstate = self.hashstate
-            baseaddress = self.baseaddress
-        self.server = cls(self.address, _requesthandler)
-        self.server.idletimeout = self.ui.configint(
-            'chgserver', 'idletimeout', self.server.idletimeout)
-        self.server.startautoexitthread()
-        self._createsymlink()
+    def printbanner(self, address):
+        # no "listening at" message should be printed to simulate hg behavior
+        pass
+
+    def shouldexit(self):
+        if not self._issocketowner():
+            self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
+            return True
+        if time.time() - self._lastactive > self._idletimeout:
+            self.ui.debug('being idle too long. exiting.\n')
+            return True
+        return False
 
-    def _inithashstate(self):
-        self.baseaddress = self.address
-        if self.ui.configbool('chgserver', 'skiphash', False):
-            self.hashstate = None
-            return
-        self.hashstate = hashstate.fromui(self.ui)
-        self.address = _hashaddress(self.address, self.hashstate.confighash)
+    def newconnection(self):
+        self._lastactive = time.time()
+
+    def createcmdserver(self, repo, conn, fin, fout):
+        return chgcmdserver(self.ui, repo, fin, fout, conn,
+                            self._hashstate, self._baseaddress)
 
-    def _checkextensions(self):
-        if not self.hashstate:
-            return
-        if extensions.notloaded():
-            # one or more extensions failed to load. mtimehash becomes
-            # meaningless because we do not know the paths of those extensions.
-            # set mtimehash to an illegal hash value to invalidate the server.
-            self.hashstate.mtimehash = ''
-
-    def _createsymlink(self):
-        if self.baseaddress == self.address:
-            return
-        tempaddress = _tempaddress(self.baseaddress)
-        os.symlink(os.path.basename(self.address), tempaddress)
-        util.rename(tempaddress, self.baseaddress)
-
-    def run(self):
-        try:
-            self.server.serve_forever()
-        finally:
-            self.server.unlinksocketfile()
+def chgunixservice(ui, repo, opts):
+    if repo:
+        # one chgserver can serve multiple repos. drop repo infomation
+        ui.setconfig('bundle', 'mainreporoot', '', 'repo')
+    h = chgunixservicehandler(ui)
+    return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
 
 def uisetup(ui):
     commandserver._servicemap['chgunix'] = chgunixservice
--- a/hgext/color.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/color.py	Mon Jul 18 23:28:14 2016 -0500
@@ -156,6 +156,8 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -165,7 +167,6 @@
     ui as uimod,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/convert/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,11 +9,11 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     registrar,
 )
-from mercurial.i18n import _
 
 from . import (
     convcmd,
--- a/hgext/convert/bzr.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/bzr.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,11 +10,12 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     demandimport,
     error
 )
-from mercurial.i18n import _
 from . import common
 
 # these do not work with demandimport, blacklist
--- a/hgext/convert/common.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/common.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,20 +7,20 @@
 from __future__ import absolute_import
 
 import base64
-import cPickle as pickle
 import datetime
 import errno
 import os
 import re
 import subprocess
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     phases,
     util,
 )
-from mercurial.i18n import _
 
+pickle = util.pickle
 propertycache = util.propertycache
 
 def encodeargs(args):
--- a/hgext/convert/convcmd.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/convcmd.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,13 +10,13 @@
 import shlex
 import shutil
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     hg,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     bzr,
--- a/hgext/convert/cvs.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/cvs.py	Mon Jul 18 23:28:14 2016 -0500
@@ -11,12 +11,12 @@
 import re
 import socket
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     common,
--- a/hgext/convert/cvsps.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/cvsps.py	Mon Jul 18 23:28:14 2016 -0500
@@ -6,15 +6,16 @@
 # GNU General Public License version 2 or any later version.
 from __future__ import absolute_import
 
-import cPickle as pickle
 import os
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     hook,
     util,
 )
-from mercurial.i18n import _
+
+pickle = util.pickle
 
 class logentry(object):
     '''Class logentry has the following attributes:
--- a/hgext/convert/filemap.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/filemap.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,10 +7,11 @@
 
 import posixpath
 import shlex
+
+from mercurial.i18n import _
 from mercurial import (
     error,
 )
-from mercurial.i18n import _
 from . import common
 SKIPREV = common.SKIPREV
 
--- a/hgext/convert/git.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/git.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,12 +7,13 @@
 from __future__ import absolute_import
 
 import os
+
+from mercurial.i18n import _
 from mercurial import (
     config,
     error,
     node as nodemod,
 )
-from mercurial.i18n import _
 
 from . import (
     common,
--- a/hgext/convert/gnuarch.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/gnuarch.py	Mon Jul 18 23:28:14 2016 -0500
@@ -12,12 +12,13 @@
 import shutil
 import stat
 import tempfile
+
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
     util,
 )
-from mercurial.i18n import _
 from . import common
 
 class gnuarch_source(common.converter_source, common.commandline):
--- a/hgext/convert/hg.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/hg.py	Mon Jul 18 23:28:14 2016 -0500
@@ -22,6 +22,7 @@
 import re
 import time
 
+from mercurial.i18n import _
 from mercurial import (
     bookmarks,
     context,
@@ -37,7 +38,6 @@
 )
 stringio = util.stringio
 
-from mercurial.i18n import _
 from . import common
 mapfile = common.mapfile
 NoRepo = common.NoRepo
--- a/hgext/convert/monotone.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/monotone.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,11 +10,11 @@
 import os
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
--- a/hgext/convert/p4.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/p4.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,11 +9,11 @@
 import marshal
 import re
 
+from mercurial.i18n import _
 from mercurial import (
     error,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
--- a/hgext/convert/subversion.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/convert/subversion.py	Mon Jul 18 23:28:14 2016 -0500
@@ -3,13 +3,13 @@
 # Copyright(C) 2007 Daniel Holth et al
 from __future__ import absolute_import
 
-import cPickle as pickle
 import os
 import re
 import sys
 import tempfile
 import xml.dom.minidom
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
@@ -17,10 +17,10 @@
     strutil,
     util,
 )
-from mercurial.i18n import _
 
 from . import common
 
+pickle = util.pickle
 stringio = util.stringio
 propertycache = util.propertycache
 urlerr = util.urlerr
--- a/hgext/factotum.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/factotum.py	Mon Jul 18 23:28:14 2016 -0500
@@ -102,8 +102,7 @@
 
 @monkeypatch_method(passwordmgr)
 def find_user_password(self, realm, authuri):
-    user, passwd = urlreq.httppasswordmgrwithdefaultrealm.find_user_password(
-        self, realm, authuri)
+    user, passwd = self.passwddb.find_user_password(realm, authuri)
     if user and passwd:
         self._writedebug(user, passwd)
         return (user, passwd)
--- a/hgext/fetch.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/fetch.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,12 +7,23 @@
 
 '''pull, update and merge in one command (DEPRECATED)'''
 
+from __future__ import absolute_import
+
 from mercurial.i18n import _
-from mercurial.node import short
-from mercurial import commands, cmdutil, hg, util, error
-from mercurial.lock import release
-from mercurial import exchange
+from mercurial.node import (
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    error,
+    exchange,
+    hg,
+    lock,
+    util,
+)
 
+release = lock.release
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 # Note for extension authors: ONLY specify testedwith = 'internal' for
--- a/hgext/fsmonitor/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/fsmonitor/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -91,10 +91,12 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import os
 import stat
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     context,
     extensions,
@@ -105,7 +107,6 @@
     util,
 )
 from mercurial import match as matchmod
-from mercurial.i18n import _
 
 from . import (
     state,
@@ -141,7 +142,7 @@
     copy.
 
     """
-    sha1 = util.sha1()
+    sha1 = hashlib.sha1()
     if util.safehasattr(ignore, 'includepat'):
         sha1.update(ignore.includepat)
     sha1.update('\0\0')
--- a/hgext/fsmonitor/state.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/fsmonitor/state.py	Mon Jul 18 23:28:14 2016 -0500
@@ -12,8 +12,8 @@
 import socket
 import struct
 
+from mercurial.i18n import _
 from mercurial import pathutil
-from mercurial.i18n import _
 
 _version = 4
 _versionformat = ">I"
--- a/hgext/gpg.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/gpg.py	Mon Jul 18 23:28:14 2016 -0500
@@ -5,10 +5,21 @@
 
 '''commands to sign and verify changesets'''
 
-import os, tempfile, binascii
-from mercurial import util, commands, match, cmdutil, error
-from mercurial import node as hgnode
+from __future__ import absolute_import
+
+import binascii
+import os
+import tempfile
+
 from mercurial.i18n import _
+from mercurial import (
+    cmdutil,
+    commands,
+    error,
+    match,
+    node as hgnode,
+    util,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -187,7 +198,7 @@
         return
 
     # print summary
-    ui.write("%s is signed by:\n" % hgnode.short(rev))
+    ui.write(_("%s is signed by:\n") % hgnode.short(rev))
     for key in keys:
         ui.write(" %s\n" % keystr(ui, key))
 
--- a/hgext/graphlog.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/graphlog.py	Mon Jul 18 23:28:14 2016 -0500
@@ -15,8 +15,13 @@
 revision graph is also shown.
 '''
 
+from __future__ import absolute_import
+
 from mercurial.i18n import _
-from mercurial import cmdutil, commands
+from mercurial import (
+    cmdutil,
+    commands,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/hgcia.py	Sat Jul 02 09:41:40 2016 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,285 +0,0 @@
-# Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-"""hooks for integrating with the CIA.vc notification service
-
-This is meant to be run as a changegroup or incoming hook. To
-configure it, set the following options in your hgrc::
-
-  [cia]
-  # your registered CIA user name
-  user = foo
-  # the name of the project in CIA
-  project = foo
-  # the module (subproject) (optional)
-  #module = foo
-  # Append a diffstat to the log message (optional)
-  #diffstat = False
-  # Template to use for log messages (optional)
-  #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
-  # Style to use (optional)
-  #style = foo
-  # The URL of the CIA notification service (optional)
-  # You can use mailto: URLs to send by email, e.g.
-  # mailto:cia@cia.vc
-  # Make sure to set email.from if you do this.
-  #url = http://cia.vc/
-  # print message instead of sending it (optional)
-  #test = False
-  # number of slashes to strip for url paths
-  #strip = 0
-
-  [hooks]
-  # one of these:
-  changegroup.cia = python:hgcia.hook
-  #incoming.cia = python:hgcia.hook
-
-  [web]
-  # If you want hyperlinks (optional)
-  baseurl = http://server/path/to/repo
-"""
-
-from mercurial.i18n import _
-from mercurial.node import bin, short
-from mercurial import cmdutil, patch, util, mail, error
-import email.Parser
-
-import socket, xmlrpclib
-from xml.sax import saxutils
-# Note for extension authors: ONLY specify testedwith = 'internal' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = 'internal'
-
-socket_timeout = 30 # seconds
-if util.safehasattr(socket, 'setdefaulttimeout'):
-    # set a timeout for the socket so you don't have to wait so looooong
-    # when cia.vc is having problems. requires python >= 2.3:
-    socket.setdefaulttimeout(socket_timeout)
-
-HGCIA_VERSION = '0.1'
-HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
-
-
-class ciamsg(object):
-    """ A CIA message """
-    def __init__(self, cia, ctx):
-        self.cia = cia
-        self.ctx = ctx
-        self.url = self.cia.url
-        if self.url:
-            self.url += self.cia.root
-
-    def fileelem(self, path, uri, action):
-        if uri:
-            uri = ' uri=%s' % saxutils.quoteattr(uri)
-        return '<file%s action=%s>%s</file>' % (
-            uri, saxutils.quoteattr(action), saxutils.escape(path))
-
-    def fileelems(self):
-        n = self.ctx.node()
-        f = self.cia.repo.status(self.ctx.p1().node(), n)
-        url = self.url or ''
-        if url and url[-1] == '/':
-            url = url[:-1]
-        elems = []
-        for path in f.modified:
-            uri = '%s/diff/%s/%s' % (url, short(n), path)
-            elems.append(self.fileelem(path, url and uri, 'modify'))
-        for path in f.added:
-            # TODO: copy/rename ?
-            uri = '%s/file/%s/%s' % (url, short(n), path)
-            elems.append(self.fileelem(path, url and uri, 'add'))
-        for path in f.removed:
-            elems.append(self.fileelem(path, '', 'remove'))
-
-        return '\n'.join(elems)
-
-    def sourceelem(self, project, module=None, branch=None):
-        msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
-        if module:
-            msg.append('<module>%s</module>' % saxutils.escape(module))
-        if branch:
-            msg.append('<branch>%s</branch>' % saxutils.escape(branch))
-        msg.append('</source>')
-
-        return '\n'.join(msg)
-
-    def diffstat(self):
-        class patchbuf(object):
-            def __init__(self):
-                self.lines = []
-                # diffstat is stupid
-                self.name = 'cia'
-            def write(self, data):
-                self.lines += data.splitlines(True)
-            def close(self):
-                pass
-
-        n = self.ctx.node()
-        pbuf = patchbuf()
-        cmdutil.export(self.cia.repo, [n], fp=pbuf)
-        return patch.diffstat(pbuf.lines) or ''
-
-    def logmsg(self):
-        if self.cia.diffstat:
-            diffstat = self.diffstat()
-        else:
-            diffstat = ''
-        self.cia.ui.pushbuffer()
-        self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
-                                baseurl=self.cia.ui.config('web', 'baseurl'),
-                                url=self.url, diffstat=diffstat,
-                                webroot=self.cia.root)
-        return self.cia.ui.popbuffer()
-
-    def xml(self):
-        n = short(self.ctx.node())
-        src = self.sourceelem(self.cia.project, module=self.cia.module,
-                              branch=self.ctx.branch())
-        # unix timestamp
-        dt = self.ctx.date()
-        timestamp = dt[0]
-
-        author = saxutils.escape(self.ctx.user())
-        rev = '%d:%s' % (self.ctx.rev(), n)
-        log = saxutils.escape(self.logmsg())
-
-        url = self.url
-        if url and url[-1] == '/':
-            url = url[:-1]
-        url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
-
-        msg = """
-<message>
-  <generator>
-    <name>Mercurial (hgcia)</name>
-    <version>%s</version>
-    <url>%s</url>
-    <user>%s</user>
-  </generator>
-  %s
-  <body>
-    <commit>
-      <author>%s</author>
-      <version>%s</version>
-      <log>%s</log>
-      %s
-      <files>%s</files>
-    </commit>
-  </body>
-  <timestamp>%d</timestamp>
-</message>
-""" % \
-            (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
-            saxutils.escape(self.cia.user), src, author, rev, log, url,
-            self.fileelems(), timestamp)
-
-        return msg
-
-
-class hgcia(object):
-    """ CIA notification class """
-
-    deftemplate = '{desc}'
-    dstemplate = '{desc}\n-- \n{diffstat}'
-
-    def __init__(self, ui, repo):
-        self.ui = ui
-        self.repo = repo
-
-        self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
-        self.user = self.ui.config('cia', 'user')
-        self.project = self.ui.config('cia', 'project')
-        self.module = self.ui.config('cia', 'module')
-        self.diffstat = self.ui.configbool('cia', 'diffstat')
-        self.emailfrom = self.ui.config('email', 'from')
-        self.dryrun = self.ui.configbool('cia', 'test')
-        self.url = self.ui.config('web', 'baseurl')
-        # Default to -1 for backward compatibility
-        self.stripcount = int(self.ui.config('cia', 'strip', -1))
-        self.root = self.strip(self.repo.root)
-
-        style = self.ui.config('cia', 'style')
-        template = self.ui.config('cia', 'template')
-        if not template:
-            if self.diffstat:
-                template = self.dstemplate
-            else:
-                template = self.deftemplate
-        t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
-                                        template, style, False)
-        self.templater = t
-
-    def strip(self, path):
-        '''strip leading slashes from local path, turn into web-safe path.'''
-
-        path = util.pconvert(path)
-        count = self.stripcount
-        if count < 0:
-            return ''
-        while count > 0:
-            c = path.find('/')
-            if c == -1:
-                break
-            path = path[c + 1:]
-            count -= 1
-        return path
-
-    def sendrpc(self, msg):
-        srv = xmlrpclib.Server(self.ciaurl)
-        res = srv.hub.deliver(msg)
-        if res is not True and res != 'queued.':
-            raise error.Abort(_('%s returned an error: %s') %
-                             (self.ciaurl, res))
-
-    def sendemail(self, address, data):
-        p = email.Parser.Parser()
-        msg = p.parsestr(data)
-        msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
-        msg['To'] = address
-        msg['From'] = self.emailfrom
-        msg['Subject'] = 'DeliverXML'
-        msg['Content-type'] = 'text/xml'
-        msgtext = msg.as_string()
-
-        self.ui.status(_('hgcia: sending update to %s\n') % address)
-        mail.sendmail(self.ui, util.email(self.emailfrom),
-                      [address], msgtext)
-
-
-def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
-    """ send CIA notification """
-    def sendmsg(cia, ctx):
-        msg = ciamsg(cia, ctx).xml()
-        if cia.dryrun:
-            ui.write(msg)
-        elif cia.ciaurl.startswith('mailto:'):
-            if not cia.emailfrom:
-                raise error.Abort(_('email.from must be defined when '
-                                   'sending by email'))
-            cia.sendemail(cia.ciaurl[7:], msg)
-        else:
-            cia.sendrpc(msg)
-
-    n = bin(node)
-    cia = hgcia(ui, repo)
-    if not cia.user:
-        ui.debug('cia: no user specified')
-        return
-    if not cia.project:
-        ui.debug('cia: no project specified')
-        return
-    if hooktype == 'changegroup':
-        start = repo.changelog.rev(n)
-        end = len(repo.changelog)
-        for rev in xrange(start, end):
-            n = repo.changelog.node(rev)
-            ctx = repo.changectx(n)
-            sendmsg(cia, ctx)
-    else:
-        ctx = repo.changectx(n)
-        sendmsg(cia, ctx)
--- a/hgext/hgk.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/hgk.py	Mon Jul 18 23:28:14 2016 -0500
@@ -34,10 +34,23 @@
 vdiff on hovered and selected revisions.
 '''
 
+from __future__ import absolute_import
+
 import os
-from mercurial import cmdutil, commands, patch, scmutil, obsolete
-from mercurial.node import nullid, nullrev, short
+
 from mercurial.i18n import _
+from mercurial.node import (
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    obsolete,
+    patch,
+    scmutil,
+)
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -68,13 +81,13 @@
 
         for f in modified:
             # TODO get file permissions
-            ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
+            ui.write((":100664 100664 %s %s M\t%s\t%s\n") %
                      (short(mmap[f]), short(mmap2[f]), f, f))
         for f in added:
-            ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
+            ui.write((":000000 100664 %s %s N\t%s\t%s\n") %
                      (empty, short(mmap2[f]), f, f))
         for f in removed:
-            ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
+            ui.write((":100664 000000 %s %s D\t%s\t%s\n") %
                      (short(mmap[f]), empty, f, f))
     ##
 
--- a/hgext/highlight/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/highlight/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -26,9 +26,21 @@
 match (even matches with a low confidence score) will be used.
 """
 
-import highlight
-from mercurial.hgweb import webcommands, webutil, common
-from mercurial import extensions, encoding, fileset
+from __future__ import absolute_import
+
+from . import highlight
+from mercurial.hgweb import (
+    common,
+    webcommands,
+    webutil,
+)
+
+from mercurial import (
+    encoding,
+    extensions,
+    fileset,
+)
+
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
--- a/hgext/highlight/highlight.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/highlight/highlight.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,14 +8,27 @@
 # The original module was split in an interface and an implementation
 # file to defer pygments loading and speedup extension setup.
 
+from __future__ import absolute_import
+
+import pygments
+import pygments.formatters
+import pygments.lexers
+import pygments.util
+
 from mercurial import demandimport
 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
-from mercurial import util, encoding
+
+from mercurial import (
+    encoding,
+    util,
+)
 
-from pygments import highlight
-from pygments.util import ClassNotFound
-from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
-from pygments.formatters import HtmlFormatter
+highlight = pygments.highlight
+ClassNotFound = pygments.util.ClassNotFound
+guess_lexer = pygments.lexers.guess_lexer
+guess_lexer_for_filename = pygments.lexers.guess_lexer_for_filename
+TextLexer = pygments.lexers.TextLexer
+HtmlFormatter = pygments.formatters.HtmlFormatter
 
 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
               'type="text/css" />')
@@ -68,7 +81,7 @@
     coloriter = (s.encode(encoding.encoding, 'replace')
                  for s in colorized.splitlines())
 
-    tmpl.filters['colorize'] = lambda x: coloriter.next()
+    tmpl.filters['colorize'] = lambda x: next(coloriter)
 
     oldl = tmpl.cache[field]
     newl = oldl.replace('line|escape', 'line|colorize')
--- a/hgext/histedit.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/histedit.py	Mon Jul 18 23:28:14 2016 -0500
@@ -169,30 +169,35 @@
 
 """
 
-import pickle
+from __future__ import absolute_import
+
 import errno
 import os
 import sys
 
-from mercurial import bundle2
-from mercurial import cmdutil
-from mercurial import discovery
-from mercurial import error
-from mercurial import copies
-from mercurial import context
-from mercurial import destutil
-from mercurial import exchange
-from mercurial import extensions
-from mercurial import hg
-from mercurial import node
-from mercurial import repair
-from mercurial import scmutil
-from mercurial import util
-from mercurial import obsolete
-from mercurial import merge as mergemod
-from mercurial.lock import release
 from mercurial.i18n import _
+from mercurial import (
+    bundle2,
+    cmdutil,
+    context,
+    copies,
+    destutil,
+    discovery,
+    error,
+    exchange,
+    extensions,
+    hg,
+    lock,
+    merge as mergemod,
+    node,
+    obsolete,
+    repair,
+    scmutil,
+    util,
+)
 
+pickle = util.pickle
+release = lock.release
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 
@@ -415,9 +420,7 @@
         <hash> <rev> <summary>
         """
         ctx = self.repo[self.node]
-        summary = ''
-        if ctx.description():
-            summary = ctx.description().splitlines()[0]
+        summary = _getsummary(ctx)
         line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
@@ -1264,6 +1267,14 @@
                                     'histedit')
     state.backupfile = backupfile
 
+def _getsummary(ctx):
+    # a common pattern is to extract the summary but default to the empty
+    # string
+    summary = ctx.description() or ''
+    if summary:
+        summary = summary.splitlines()[0]
+    return summary
+
 def bootstrapcontinue(ui, state, opts):
     repo = state.repo
     if state.actions:
@@ -1304,6 +1315,40 @@
 
     rules are in the format [ [act, ctx], ...] like in state.rules
     """
+    if repo.ui.configbool("experimental", "histedit.autoverb"):
+        newact = util.sortdict()
+        for act in actions:
+            ctx = repo[act.node]
+            summary = _getsummary(ctx)
+            fword = summary.split(' ', 1)[0].lower()
+            added = False
+
+            # if it doesn't end with the special character '!' just skip this
+            if fword.endswith('!'):
+                fword = fword[:-1]
+                if fword in primaryactions | secondaryactions | tertiaryactions:
+                    act.verb = fword
+                    # get the target summary
+                    tsum = summary[len(fword) + 1:].lstrip()
+                    # safe but slow: reverse iterate over the actions so we
+                    # don't clash on two commits having the same summary
+                    for na, l in reversed(list(newact.iteritems())):
+                        actx = repo[na.node]
+                        asum = _getsummary(actx)
+                        if asum == tsum:
+                            added = True
+                            l.append(act)
+                            break
+
+            if not added:
+                newact[act] = []
+
+        # copy over and flatten the new list
+        actions = []
+        for na, l in newact.iteritems():
+            actions.append(na)
+            actions += l
+
     rules = '\n'.join([act.torule() for act in actions])
     rules += '\n\n'
     rules += editcomment
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/journal.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,506 @@
+# journal.py
+#
+# Copyright 2014-2016 Facebook, Inc.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+"""Track previous positions of bookmarks (EXPERIMENTAL)
+
+This extension adds a new command: `hg journal`, which shows you where
+bookmarks were previously located.
+
+"""
+
+from __future__ import absolute_import
+
+import collections
+import errno
+import os
+import weakref
+
+from mercurial.i18n import _
+
+from mercurial import (
+    bookmarks,
+    cmdutil,
+    commands,
+    dirstate,
+    dispatch,
+    error,
+    extensions,
+    hg,
+    localrepo,
+    lock,
+    node,
+    util,
+)
+
+from . import share
+
+cmdtable = {}
+command = cmdutil.command(cmdtable)
+
+# Note for extension authors: ONLY specify testedwith = 'internal' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = 'internal'
+
+# storage format version; increment when the format changes
+storageversion = 0
+
+# namespaces
+bookmarktype = 'bookmark'
+wdirparenttype = 'wdirparent'
+# In a shared repository, what shared feature name is used
+# to indicate this namespace is shared with the source?
+sharednamespaces = {
+    bookmarktype: hg.sharedbookmarks,
+}
+
+# Journal recording, register hooks and storage object
+def extsetup(ui):
+    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
+    extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
+    extensions.wrapfunction(
+        dirstate.dirstate, '_writedirstate', recorddirstateparents)
+    extensions.wrapfunction(
+        localrepo.localrepository.dirstate, 'func', wrapdirstate)
+    extensions.wrapfunction(hg, 'postshare', wrappostshare)
+    extensions.wrapfunction(hg, 'copystore', unsharejournal)
+
+def reposetup(ui, repo):
+    if repo.local():
+        repo.journal = journalstorage(repo)
+
+def runcommand(orig, lui, repo, cmd, fullargs, *args):
+    """Track the command line options for recording in the journal"""
+    journalstorage.recordcommand(*fullargs)
+    return orig(lui, repo, cmd, fullargs, *args)
+
+# hooks to record dirstate changes
+def wrapdirstate(orig, repo):
+    """Make journal storage available to the dirstate object"""
+    dirstate = orig(repo)
+    if util.safehasattr(repo, 'journal'):
+        dirstate.journalstorage = repo.journal
+    return dirstate
+
+def recorddirstateparents(orig, dirstate, dirstatefp):
+    """Records all dirstate parent changes in the journal."""
+    if util.safehasattr(dirstate, 'journalstorage'):
+        old = [node.nullid, node.nullid]
+        nodesize = len(node.nullid)
+        try:
+            # The only source for the old state is in the dirstate file still
+            # on disk; the in-memory dirstate object only contains the new
+            # state. dirstate._opendirstatefile() switches beteen .hg/dirstate
+            # and .hg/dirstate.pending depending on the transaction state.
+            with dirstate._opendirstatefile() as fp:
+                state = fp.read(2 * nodesize)
+            if len(state) == 2 * nodesize:
+                old = [state[:nodesize], state[nodesize:]]
+        except IOError:
+            pass
+
+        new = dirstate.parents()
+        if old != new:
+            # only record two hashes if there was a merge
+            oldhashes = old[:1] if old[1] == node.nullid else old
+            newhashes = new[:1] if new[1] == node.nullid else new
+            dirstate.journalstorage.record(
+                wdirparenttype, '.', oldhashes, newhashes)
+
+    return orig(dirstate, dirstatefp)
+
+# hooks to record bookmark changes (both local and remote)
+def recordbookmarks(orig, store, fp):
+    """Records all bookmark changes in the journal."""
+    repo = store._repo
+    if util.safehasattr(repo, 'journal'):
+        oldmarks = bookmarks.bmstore(repo)
+        for mark, value in store.iteritems():
+            oldvalue = oldmarks.get(mark, node.nullid)
+            if value != oldvalue:
+                repo.journal.record(bookmarktype, mark, oldvalue, value)
+    return orig(store, fp)
+
+# shared repository support
+def _readsharedfeatures(repo):
+    """A set of shared features for this repository"""
+    try:
+        return set(repo.vfs.read('shared').splitlines())
+    except IOError as inst:
+        if inst.errno != errno.ENOENT:
+            raise
+        return set()
+
+def _mergeentriesiter(*iterables, **kwargs):
+    """Given a set of sorted iterables, yield the next entry in merged order
+
+    Note that by default entries go from most recent to oldest.
+    """
+    order = kwargs.pop('order', max)
+    iterables = [iter(it) for it in iterables]
+    # this tracks still active iterables; iterables are deleted as they are
+    # exhausted, which is why this is a dictionary and why each entry also
+    # stores the key. Entries are mutable so we can store the next value each
+    # time.
+    iterable_map = {}
+    for key, it in enumerate(iterables):
+        try:
+            iterable_map[key] = [next(it), key, it]
+        except StopIteration:
+            # empty entry, can be ignored
+            pass
+
+    while iterable_map:
+        value, key, it = order(iterable_map.itervalues())
+        yield value
+        try:
+            iterable_map[key][0] = next(it)
+        except StopIteration:
+            # this iterable is empty, remove it from consideration
+            del iterable_map[key]
+
+def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
+    """Mark this shared working copy as sharing journal information"""
+    orig(sourcerepo, destrepo, **kwargs)
+    with destrepo.vfs('shared', 'a') as fp:
+        fp.write('journal\n')
+
+def unsharejournal(orig, ui, repo, repopath):
+    """Copy shared journal entries into this repo when unsharing"""
+    if (repo.path == repopath and repo.shared() and
+            util.safehasattr(repo, 'journal')):
+        sharedrepo = share._getsrcrepo(repo)
+        sharedfeatures = _readsharedfeatures(repo)
+        if sharedrepo and sharedfeatures > set(['journal']):
+            # there is a shared repository and there are shared journal entries
+            # to copy. move shared date over from source to destination but
+            # move the local file first
+            if repo.vfs.exists('journal'):
+                journalpath = repo.join('journal')
+                util.rename(journalpath, journalpath + '.bak')
+            storage = repo.journal
+            local = storage._open(
+                repo.vfs, filename='journal.bak', _newestfirst=False)
+            shared = (
+                e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
+                if sharednamespaces.get(e.namespace) in sharedfeatures)
+            for entry in _mergeentriesiter(local, shared, order=min):
+                storage._write(repo.vfs, entry)
+
+    return orig(ui, repo, repopath)
+
+class journalentry(collections.namedtuple(
+        'journalentry',
+        'timestamp user command namespace name oldhashes newhashes')):
+    """Individual journal entry
+
+    * timestamp: a mercurial (time, timezone) tuple
+    * user: the username that ran the command
+    * namespace: the entry namespace, an opaque string
+    * name: the name of the changed item, opaque string with meaning in the
+      namespace
+    * command: the hg command that triggered this record
+    * oldhashes: a tuple of one or more binary hashes for the old location
+    * newhashes: a tuple of one or more binary hashes for the new location
+
+    Handles serialisation from and to the storage format. Fields are
+    separated by newlines, hashes are written out in hex separated by commas,
+    timestamp and timezone are separated by a space.
+
+    """
+    @classmethod
+    def fromstorage(cls, line):
+        (time, user, command, namespace, name,
+         oldhashes, newhashes) = line.split('\n')
+        timestamp, tz = time.split()
+        timestamp, tz = float(timestamp), int(tz)
+        oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
+        newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
+        return cls(
+            (timestamp, tz), user, command, namespace, name,
+            oldhashes, newhashes)
+
+    def __str__(self):
+        """String representation for storage"""
+        time = ' '.join(map(str, self.timestamp))
+        oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
+        newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
+        return '\n'.join((
+            time, self.user, self.command, self.namespace, self.name,
+            oldhashes, newhashes))
+
+class journalstorage(object):
+    """Storage for journal entries
+
+    Entries are divided over two files; one with entries that pertain to the
+    local working copy *only*, and one with entries that are shared across
+    multiple working copies when shared using the share extension.
+
+    Entries are stored with NUL bytes as separators. See the journalentry
+    class for the per-entry structure.
+
+    The file format starts with an integer version, delimited by a NUL.
+
+    This storage uses a dedicated lock; this makes it easier to avoid issues
+    with adding entries that added when the regular wlock is unlocked (e.g.
+    the dirstate).
+
+    """
+    _currentcommand = ()
+    _lockref = None
+
+    def __init__(self, repo):
+        self.user = util.getuser()
+        self.ui = repo.ui
+        self.vfs = repo.vfs
+
+        # is this working copy using a shared storage?
+        self.sharedfeatures = self.sharedvfs = None
+        if repo.shared():
+            features = _readsharedfeatures(repo)
+            sharedrepo = share._getsrcrepo(repo)
+            if sharedrepo is not None and 'journal' in features:
+                self.sharedvfs = sharedrepo.vfs
+                self.sharedfeatures = features
+
+    # track the current command for recording in journal entries
+    @property
+    def command(self):
+        commandstr = ' '.join(
+            map(util.shellquote, journalstorage._currentcommand))
+        if '\n' in commandstr:
+            # truncate multi-line commands
+            commandstr = commandstr.partition('\n')[0] + ' ...'
+        return commandstr
+
+    @classmethod
+    def recordcommand(cls, *fullargs):
+        """Set the current hg arguments, stored with recorded entries"""
+        # Set the current command on the class because we may have started
+        # with a non-local repo (cloning for example).
+        cls._currentcommand = fullargs
+
+    def jlock(self, vfs):
+        """Create a lock for the journal file"""
+        if self._lockref and self._lockref():
+            raise error.Abort(_('journal lock does not support nesting'))
+        desc = _('journal of %s') % vfs.base
+        try:
+            l = lock.lock(vfs, 'journal.lock', 0, desc=desc)
+        except error.LockHeld as inst:
+            self.ui.warn(
+                _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
+            # default to 600 seconds timeout
+            l = lock.lock(
+                vfs, 'journal.lock',
+                int(self.ui.config("ui", "timeout", "600")), desc=desc)
+            self.ui.warn(_("got lock after %s seconds\n") % l.delay)
+        self._lockref = weakref.ref(l)
+        return l
+
+    def record(self, namespace, name, oldhashes, newhashes):
+        """Record a new journal entry
+
+        * namespace: an opaque string; this can be used to filter on the type
+          of recorded entries.
+        * name: the name defining this entry; for bookmarks, this is the
+          bookmark name. Can be filtered on when retrieving entries.
+        * oldhashes and newhashes: each a single binary hash, or a list of
+          binary hashes. These represent the old and new position of the named
+          item.
+
+        """
+        if not isinstance(oldhashes, list):
+            oldhashes = [oldhashes]
+        if not isinstance(newhashes, list):
+            newhashes = [newhashes]
+
+        entry = journalentry(
+            util.makedate(), self.user, self.command, namespace, name,
+            oldhashes, newhashes)
+
+        vfs = self.vfs
+        if self.sharedvfs is not None:
+            # write to the shared repository if this feature is being
+            # shared between working copies.
+            if sharednamespaces.get(namespace) in self.sharedfeatures:
+                vfs = self.sharedvfs
+
+        self._write(vfs, entry)
+
+    def _write(self, vfs, entry):
+        with self.jlock(vfs):
+            version = None
+            # open file in amend mode to ensure it is created if missing
+            with vfs('journal', mode='a+b', atomictemp=True) as f:
+                f.seek(0, os.SEEK_SET)
+                # Read just enough bytes to get a version number (up to 2
+                # digits plus separator)
+                version = f.read(3).partition('\0')[0]
+                if version and version != str(storageversion):
+                    # different version of the storage. Exit early (and not
+                    # write anything) if this is not a version we can handle or
+                    # the file is corrupt. In future, perhaps rotate the file
+                    # instead?
+                    self.ui.warn(
+                        _("unsupported journal file version '%s'\n") % version)
+                    return
+                if not version:
+                    # empty file, write version first
+                    f.write(str(storageversion) + '\0')
+                f.seek(0, os.SEEK_END)
+                f.write(str(entry) + '\0')
+
+    def filtered(self, namespace=None, name=None):
+        """Yield all journal entries with the given namespace or name
+
+        Both the namespace and the name are optional; if neither is given all
+        entries in the journal are produced.
+
+        Matching supports regular expressions by using the `re:` prefix
+        (use `literal:` to match names or namespaces that start with `re:`)
+
+        """
+        if namespace is not None:
+            namespace = util.stringmatcher(namespace)[-1]
+        if name is not None:
+            name = util.stringmatcher(name)[-1]
+        for entry in self:
+            if namespace is not None and not namespace(entry.namespace):
+                continue
+            if name is not None and not name(entry.name):
+                continue
+            yield entry
+
+    def __iter__(self):
+        """Iterate over the storage
+
+        Yields journalentry instances for each contained journal record.
+
+        """
+        local = self._open(self.vfs)
+
+        if self.sharedvfs is None:
+            return local
+
+        # iterate over both local and shared entries, but only those
+        # shared entries that are among the currently shared features
+        shared = (
+            e for e in self._open(self.sharedvfs)
+            if sharednamespaces.get(e.namespace) in self.sharedfeatures)
+        return _mergeentriesiter(local, shared)
+
+    def _open(self, vfs, filename='journal', _newestfirst=True):
+        if not vfs.exists(filename):
+            return
+
+        with vfs(filename) as f:
+            raw = f.read()
+
+        lines = raw.split('\0')
+        version = lines and lines[0]
+        if version != str(storageversion):
+            version = version or _('not available')
+            raise error.Abort(_("unknown journal file version '%s'") % version)
+
+        # Skip the first line, it's a version number. Normally we iterate over
+        # these in reverse order to list newest first; only when copying across
+        # a shared storage do we forgo reversing.
+        lines = lines[1:]
+        if _newestfirst:
+            lines = reversed(lines)
+        for line in lines:
+            if not line:
+                continue
+            yield journalentry.fromstorage(line)
+
+# journal reading
+# log options that don't make sense for journal
+_ignoreopts = ('no-merges', 'graph')
+@command(
+    'journal', [
+        ('', 'all', None, 'show history for all names'),
+        ('c', 'commits', None, 'show commit metadata'),
+    ] + [opt for opt in commands.logopts if opt[1] not in _ignoreopts],
+    '[OPTION]... [BOOKMARKNAME]')
+def journal(ui, repo, *args, **opts):
+    """show the previous position of bookmarks and the working copy
+
+    The journal is used to see the previous commits that bookmarks and the
+    working copy pointed to. By default the previous locations for the working
+    copy.  Passing a bookmark name will show all the previous positions of
+    that bookmark. Use the --all switch to show previous locations for all
+    bookmarks and the working copy; each line will then include the bookmark
+    name, or '.' for the working copy, as well.
+
+    If `name` starts with `re:`, the remainder of the name is treated as
+    a regular expression. To match a name that actually starts with `re:`,
+    use the prefix `literal:`.
+
+    By default hg journal only shows the commit hash and the command that was
+    running at that time. -v/--verbose will show the prior hash, the user, and
+    the time at which it happened.
+
+    Use -c/--commits to output log information on each commit hash; at this
+    point you can use the usual `--patch`, `--git`, `--stat` and `--template`
+    switches to alter the log output for these.
+
+    `hg journal -T json` can be used to produce machine readable output.
+
+    """
+    name = '.'
+    if opts.get('all'):
+        if args:
+            raise error.Abort(
+                _("You can't combine --all and filtering on a name"))
+        name = None
+    if args:
+        name = args[0]
+
+    fm = ui.formatter('journal', opts)
+
+    if opts.get("template") != "json":
+        if name is None:
+            displayname = _('the working copy and bookmarks')
+        else:
+            displayname = "'%s'" % name
+        ui.status(_("previous locations of %s:\n") % displayname)
+
+    limit = cmdutil.loglimit(opts)
+    entry = None
+    for count, entry in enumerate(repo.journal.filtered(name=name)):
+        if count == limit:
+            break
+        newhashesstr = ','.join([node.short(hash) for hash in entry.newhashes])
+        oldhashesstr = ','.join([node.short(hash) for hash in entry.oldhashes])
+
+        fm.startitem()
+        fm.condwrite(ui.verbose, 'oldhashes', '%s -> ', oldhashesstr)
+        fm.write('newhashes', '%s', newhashesstr)
+        fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
+        fm.condwrite(
+            opts.get('all') or name.startswith('re:'),
+            'name', '  %-8s', entry.name)
+
+        timestring = util.datestr(entry.timestamp, '%Y-%m-%d %H:%M %1%2')
+        fm.condwrite(ui.verbose, 'date', ' %s', timestring)
+        fm.write('command', '  %s\n', entry.command)
+
+        if opts.get("commits"):
+            displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
+            for hash in entry.newhashes:
+                try:
+                    ctx = repo[hash]
+                    displayer.show(ctx)
+                except error.RepoLookupError as e:
+                    fm.write('repolookuperror', "%s\n\n", str(e))
+            displayer.close()
+
+    fm.end()
+
+    if entry is None:
+        ui.status(_("no recorded locations\n"))
--- a/hgext/keyword.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/keyword.py	Mon Jul 18 23:28:14 2016 -0500
@@ -89,8 +89,8 @@
 import re
 import tempfile
 
+from mercurial.i18n import _
 from mercurial.hgweb import webcommands
-from mercurial.i18n import _
 
 from mercurial import (
     cmdutil,
@@ -455,7 +455,7 @@
 
     uisetup(ui)
     reposetup(ui, repo)
-    ui.write('[extensions]\nkeyword =\n')
+    ui.write(('[extensions]\nkeyword =\n'))
     demoitems('keyword', ui.configitems('keyword'))
     demoitems('keywordset', ui.configitems('keywordset'))
     demoitems('keywordmaps', kwmaps.iteritems())
@@ -735,7 +735,7 @@
     def kwfilectx_cmp(orig, self, fctx):
         # keyword affects data size, comparing wdir and filelog size does
         # not make sense
-        if (fctx._filerev is None and
+        if (fctx._filenode is None and
             (self._repo._encodefilterpats or
              kwt.match(fctx.path()) and 'l' not in fctx.flags() or
              self.size() - 4 == fctx.size()) or
--- a/hgext/largefiles/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -104,14 +104,20 @@
 explicitly do so with the --large flag passed to the :hg:`add`
 command.
 '''
+from __future__ import absolute_import
 
-from mercurial import hg, localrepo
+from mercurial import (
+    hg,
+    localrepo,
+)
 
-import lfcommands
-import proto
-import reposetup
-import uisetup as uisetupmod
-import overrides
+from . import (
+    lfcommands,
+    overrides,
+    proto,
+    reposetup,
+    uisetup as uisetupmod,
+)
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/hgext/largefiles/basestore.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/basestore.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,13 +7,13 @@
 # GNU General Public License version 2 or any later version.
 
 '''base class for store implementations and store-related utility code'''
+from __future__ import absolute_import
 
-import re
-
-from mercurial import util, node, hg, error
 from mercurial.i18n import _
 
-import lfutil
+from mercurial import node, util
+
+from . import lfutil
 
 class StoreError(Exception):
     '''Raised when there is a problem getting files from or putting
@@ -116,19 +116,26 @@
         '''Verify the existence (and, optionally, contents) of every big
         file revision referenced by every changeset in revs.
         Return 0 if all is well, non-zero on any errors.'''
-        failed = False
 
         self.ui.status(_('searching %d changesets for largefiles\n') %
                        len(revs))
         verified = set()                # set of (filename, filenode) tuples
-
+        filestocheck = []               # list of (cset, filename, expectedhash)
         for rev in revs:
             cctx = self.repo[rev]
             cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
 
             for standin in cctx:
-                if self._verifyfile(cctx, cset, contents, standin, verified):
-                    failed = True
+                filename = lfutil.splitstandin(standin)
+                if filename:
+                    fctx = cctx[standin]
+                    key = (filename, fctx.filenode())
+                    if key not in verified:
+                        verified.add(key)
+                        expectedhash = fctx.data()[0:40]
+                        filestocheck.append((cset, filename, expectedhash))
+
+        failed = self._verifyfiles(contents, filestocheck)
 
         numrevs = len(verified)
         numlfiles = len(set([fname for (fname, fnode) in verified]))
@@ -150,72 +157,10 @@
         exist in the store).'''
         raise NotImplementedError('abstract method')
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        '''Perform the actual verification of a file in the store.
-        'cset' is only used in warnings.
+    def _verifyfiles(self, contents, filestocheck):
+        '''Perform the actual verification of files in the store.
         'contents' controls verification of content hash.
-        'standin' is the standin path of the largefile to verify.
-        'verified' is maintained as a set of already verified files.
-        Returns _true_ if it is a standin and any problems are found!
+        'filestocheck' is list of files to check.
+        Returns _true_ if any problems are found!
         '''
         raise NotImplementedError('abstract method')
-
-import localstore, wirestore
-
-_storeprovider = {
-    'file':  [localstore.localstore],
-    'http':  [wirestore.wirestore],
-    'https': [wirestore.wirestore],
-    'ssh': [wirestore.wirestore],
-    }
-
-_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
-
-# During clone this function is passed the src's ui object
-# but it needs the dest's ui object so it can read out of
-# the config file. Use repo.ui instead.
-def _openstore(repo, remote=None, put=False):
-    ui = repo.ui
-
-    if not remote:
-        lfpullsource = getattr(repo, 'lfpullsource', None)
-        if lfpullsource:
-            path = ui.expandpath(lfpullsource)
-        elif put:
-            path = ui.expandpath('default-push', 'default')
-        else:
-            path = ui.expandpath('default')
-
-        # ui.expandpath() leaves 'default-push' and 'default' alone if
-        # they cannot be expanded: fallback to the empty string,
-        # meaning the current directory.
-        if path == 'default-push' or path == 'default':
-            path = ''
-            remote = repo
-        else:
-            path, _branches = hg.parseurl(path)
-            remote = hg.peer(repo, {}, path)
-
-    # The path could be a scheme so use Mercurial's normal functionality
-    # to resolve the scheme to a repository and use its path
-    path = util.safehasattr(remote, 'url') and remote.url() or remote.path
-
-    match = _scheme_re.match(path)
-    if not match:                       # regular filesystem path
-        scheme = 'file'
-    else:
-        scheme = match.group(1)
-
-    try:
-        storeproviders = _storeprovider[scheme]
-    except KeyError:
-        raise error.Abort(_('unsupported URL scheme %r') % scheme)
-
-    for classobj in storeproviders:
-        try:
-            return classobj(ui, repo, remote)
-        except lfutil.storeprotonotcapable:
-            pass
-
-    raise error.Abort(_('%s does not appear to be a largefile store') %
-                     util.hidepassword(path))
--- a/hgext/largefiles/lfcommands.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/lfcommands.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,20 +7,39 @@
 # GNU General Public License version 2 or any later version.
 
 '''High-level command function for lfconvert, plus the cmdtable.'''
+from __future__ import absolute_import
 
-import os, errno
+import errno
+import hashlib
+import os
 import shutil
 
-from mercurial import util, match as match_, hg, node, context, error, \
-    cmdutil, scmutil, commands
 from mercurial.i18n import _
-from mercurial.lock import release
 
-from hgext.convert import convcmd
-from hgext.convert import filemap
+from mercurial import (
+    cmdutil,
+    commands,
+    context,
+    error,
+    hg,
+    lock,
+    match as matchmod,
+    node,
+    scmutil,
+    util,
+)
 
-import lfutil
-import basestore
+from ..convert import (
+    convcmd,
+    filemap,
+)
+
+from . import (
+    lfutil,
+    storefactory
+)
+
+release = lock.release
 
 # -- Commands ----------------------------------------------------------
 
@@ -92,7 +111,7 @@
             if not pats:
                 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
             if pats:
-                matcher = match_.match(rsrc.root, '', list(pats))
+                matcher = matchmod.match(rsrc.root, '', list(pats))
             else:
                 matcher = None
 
@@ -211,7 +230,7 @@
                         raise error.Abort(_('largefile %s becomes symlink') % f)
 
                 # largefile was modified, update standins
-                m = util.sha1('')
+                m = hashlib.sha1('')
                 m.update(ctx[f].data())
                 hash = m.hexdigest()
                 if f not in lfiletohash or lfiletohash[f] != hash:
@@ -337,7 +356,7 @@
     if not files:
         return
 
-    store = basestore._openstore(rsrc, rdst, put=True)
+    store = storefactory.openstore(rsrc, rdst, put=True)
 
     at = 0
     ui.debug("sending statlfile command for %d largefiles\n" % len(files))
@@ -368,7 +387,7 @@
     else:
         revs = ['.']
 
-    store = basestore._openstore(repo)
+    store = storefactory.openstore(repo)
     return store.verify(revs, contents=contents)
 
 def cachelfiles(ui, repo, node, filelist=None):
@@ -394,7 +413,7 @@
             toget.append((lfile, expectedhash))
 
     if toget:
-        store = basestore._openstore(repo)
+        store = storefactory.openstore(repo)
         ret = store.get(toget)
         return ret
 
--- a/hgext/largefiles/lfutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/lfutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,21 +7,30 @@
 # GNU General Public License version 2 or any later version.
 
 '''largefiles utility code: must not import other modules in this package.'''
+from __future__ import absolute_import
 
+import copy
+import hashlib
 import os
 import platform
 import stat
-import copy
+
+from mercurial.i18n import _
 
-from mercurial import dirstate, httpconnection, match as match_, util, scmutil
-from mercurial.i18n import _
-from mercurial import node, error
+from mercurial import (
+    dirstate,
+    error,
+    httpconnection,
+    match as matchmod,
+    node,
+    scmutil,
+    util,
+)
 
 shortname = '.hglf'
 shortnameslash = shortname + '/'
 longname = 'largefiles'
 
-
 # -- Private worker functions ------------------------------------------
 
 def getminsize(ui, assumelfiles, opt, default=10):
@@ -152,7 +161,7 @@
 
 def lfdirstatestatus(lfdirstate, repo):
     wctx = repo['.']
-    match = match_.always(repo.root, repo.getcwd())
+    match = matchmod.always(repo.root, repo.getcwd())
     unsure, s = lfdirstate.status(match, [], False, False, False)
     modified, clean = s.modified, s.clean
     for lfile in unsure:
@@ -180,12 +189,11 @@
             if rev is not None or repo.dirstate[f] != '?']
 
 def instore(repo, hash, forcelocal=False):
-    '''Return true if a largefile with the given hash exists in the user
-    cache.'''
+    '''Return true if a largefile with the given hash exists in the store'''
     return os.path.exists(storepath(repo, hash, forcelocal))
 
 def storepath(repo, hash, forcelocal=False):
-    '''Return the correct location in the repository largefiles cache for a
+    '''Return the correct location in the repository largefiles store for a
     file with the given hash.'''
     if not forcelocal and repo.shared():
         return repo.vfs.reljoin(repo.sharedpath, longname, hash)
@@ -251,7 +259,6 @@
             realfile = splitstandin(filename)
             copytostore(repo, ctx.node(), realfile)
 
-
 def copytostoreabsolute(repo, file, hash):
     if inusercache(repo.ui, hash):
         link(usercachepath(repo.ui, hash), storepath(repo, hash))
@@ -350,7 +357,7 @@
 def copyandhash(instream, outfile):
     '''Read bytes from instream (iterable) and write them to outfile,
     computing the SHA-1 hash of the data along the way. Return the hash.'''
-    hasher = util.sha1('')
+    hasher = hashlib.sha1('')
     for data in instream:
         hasher.update(data)
         outfile.write(data)
@@ -362,7 +369,7 @@
 def hashfile(file):
     if not os.path.exists(file):
         return ''
-    hasher = util.sha1('')
+    hasher = hashlib.sha1('')
     fd = open(file, 'rb')
     for data in util.filechunkiter(fd, 128 * 1024):
         hasher.update(data)
@@ -391,7 +398,7 @@
 def hexsha1(data):
     """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
     object data"""
-    h = util.sha1()
+    h = hashlib.sha1()
     for chunk in util.filechunkiter(data):
         h.update(chunk)
     return h.hexdigest()
@@ -533,7 +540,7 @@
         # otherwise to update all standins if the largefiles are
         # large.
         lfdirstate = openlfdirstate(ui, repo)
-        dirtymatch = match_.always(repo.root, repo.getcwd())
+        dirtymatch = matchmod.always(repo.root, repo.getcwd())
         unsure, s = lfdirstate.status(dirtymatch, [], False, False,
                                       False)
         modifiedfiles = unsure + s.modified + s.added + s.removed
--- a/hgext/largefiles/localstore.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/localstore.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,11 +7,14 @@
 # GNU General Public License version 2 or any later version.
 
 '''store class for local filesystem'''
+from __future__ import absolute_import
 
 from mercurial.i18n import _
 
-import lfutil
-import basestore
+from . import (
+    basestore,
+    lfutil,
+)
 
 class localstore(basestore.basestore):
     '''localstore first attempts to grab files out of the store in the remote
@@ -33,7 +36,6 @@
             retval[hash] = lfutil.instore(self.remote, hash)
         return retval
 
-
     def _getfile(self, tmpfile, filename, hash):
         path = lfutil.findfile(self.remote, hash)
         if not path:
@@ -42,29 +44,23 @@
         with open(path, 'rb') as fd:
             return lfutil.copyandhash(fd, tmpfile)
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        filename = lfutil.splitstandin(standin)
-        if not filename:
-            return False
-        fctx = cctx[standin]
-        key = (filename, fctx.filenode())
-        if key in verified:
-            return False
-
-        expecthash = fctx.data()[0:40]
-        storepath, exists = lfutil.findstorepath(self.remote, expecthash)
-        verified.add(key)
-        if not exists:
-            self.ui.warn(
-                _('changeset %s: %s references missing %s\n')
-                % (cset, filename, storepath))
-            return True                 # failed
-
-        if contents:
-            actualhash = lfutil.hashfile(storepath)
-            if actualhash != expecthash:
+    def _verifyfiles(self, contents, filestocheck):
+        failed = False
+        for cset, filename, expectedhash in filestocheck:
+            storepath, exists = lfutil.findstorepath(self.repo, expectedhash)
+            if not exists:
+                storepath, exists = lfutil.findstorepath(
+                    self.remote, expectedhash)
+            if not exists:
                 self.ui.warn(
-                    _('changeset %s: %s references corrupted %s\n')
+                    _('changeset %s: %s references missing %s\n')
                     % (cset, filename, storepath))
-                return True             # failed
-        return False
+                failed = True
+            elif contents:
+                actualhash = lfutil.hashfile(storepath)
+                if actualhash != expectedhash:
+                    self.ui.warn(
+                        _('changeset %s: %s references corrupted %s\n')
+                        % (cset, filename, storepath))
+                    failed = True
+        return failed
--- a/hgext/largefiles/overrides.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/overrides.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,17 +7,31 @@
 # GNU General Public License version 2 or any later version.
 
 '''Overridden Mercurial commands and functions for the largefiles extension'''
+from __future__ import absolute_import
 
-import os
 import copy
+import os
 
-from mercurial import hg, util, cmdutil, scmutil, match as match_, \
-        archival, pathutil, registrar, revset, error
 from mercurial.i18n import _
 
-import lfutil
-import lfcommands
-import basestore
+from mercurial import (
+    archival,
+    cmdutil,
+    error,
+    hg,
+    match as matchmod,
+    pathutil,
+    registrar,
+    revset,
+    scmutil,
+    util,
+)
+
+from . import (
+    lfcommands,
+    lfutil,
+    storefactory,
+)
 
 # -- Utility functions: commonly/repeatedly needed functionality ---------------
 
@@ -99,13 +113,13 @@
     if lfutil.islfilesrepo(repo):
         lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
         if lfpats:
-            lfmatcher = match_.match(repo.root, '', list(lfpats))
+            lfmatcher = matchmod.match(repo.root, '', list(lfpats))
 
     lfnames = []
     m = matcher
 
     wctx = repo[None]
-    for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
+    for f in repo.walk(matchmod.badmatch(m, lambda x, y: None)):
         exact = m.exact(f)
         lfile = lfutil.standin(f) in wctx
         nfile = f in wctx
@@ -307,7 +321,7 @@
             if pat.startswith('set:'):
                 return pat
 
-            kindpat = match_._patsplit(pat, None)
+            kindpat = matchmod._patsplit(pat, None)
 
             if kindpat[0] is not None:
                 return kindpat[0] + ':' + tostandin(kindpat[1])
@@ -532,7 +546,6 @@
 
     return orig(repo, actions, branchmerge)
 
-
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
 def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
@@ -626,7 +639,7 @@
             # The patterns were previously mangled to add the standin
             # directory; we need to remove that now
             for pat in pats:
-                if match_.patkind(pat) is None and lfutil.shortname in pat:
+                if matchmod.patkind(pat) is None and lfutil.shortname in pat:
                     newpats.append(pat.replace(lfutil.shortname, ''))
                 else:
                     newpats.append(pat)
@@ -644,7 +657,7 @@
         oldmatch = installmatchfn(overridematch)
         listpats = []
         for pat in pats:
-            if match_.patkind(pat) is not None:
+            if matchmod.patkind(pat) is not None:
                 listpats.append(pat)
             else:
                 listpats.append(makestandin(pat))
@@ -977,7 +990,7 @@
     if subrepos:
         for subpath in sorted(ctx.substate):
             sub = ctx.workingsub(subpath)
-            submatch = match_.subdirmatcher(subpath, matchfn)
+            submatch = matchmod.subdirmatcher(subpath, matchfn)
             sub._repo.lfstatus = True
             sub.archive(archiver, prefix, submatch)
 
@@ -1025,7 +1038,7 @@
 
     for subpath in sorted(ctx.substate):
         sub = ctx.workingsub(subpath)
-        submatch = match_.subdirmatcher(subpath, match)
+        submatch = matchmod.subdirmatcher(subpath, match)
         sub._repo.lfstatus = True
         sub.archive(archiver, prefix + repo._path + '/', submatch)
 
@@ -1109,7 +1122,7 @@
             lfhashes.add(lfhash)
     lfutil.getlfilestoupload(repo, missing, dedup)
     if lfhashes:
-        lfexists = basestore._openstore(repo, other).exists(lfhashes)
+        lfexists = storefactory.openstore(repo, other).exists(lfhashes)
         for fn, lfhash in knowns:
             if not lfexists[lfhash]: # lfhash doesn't exist on "other"
                 addfunc(fn, lfhash)
@@ -1190,7 +1203,7 @@
         return orig(repo, matcher, prefix, opts, dry_run, similarity)
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-    unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
+    unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), [],
                                   False, False, False)
 
     # Call into the normal remove code, but the removing of the standin, we want
@@ -1338,7 +1351,7 @@
         else:
             hash = lfutil.readstandin(repo, lf, ctx.rev())
             if not lfutil.inusercache(repo.ui, hash):
-                store = basestore._openstore(repo)
+                store = storefactory.openstore(repo)
                 success, missing = store.get([(lf, hash)])
                 if len(success) != 1:
                     raise error.Abort(
@@ -1375,7 +1388,7 @@
         # (*1) deprecated, but used internally (e.g: "rebase --collapse")
 
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-        unsure, s = lfdirstate.status(match_.always(repo.root,
+        unsure, s = lfdirstate.status(matchmod.always(repo.root,
                                                     repo.getcwd()),
                                       [], False, False, False)
         pctx = repo['.']
--- a/hgext/largefiles/proto.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/proto.py	Mon Jul 18 23:28:14 2016 -0500
@@ -2,18 +2,27 @@
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
+from __future__ import absolute_import
 
 import os
 import re
 
-from mercurial import error, httppeer, util, wireproto
 from mercurial.i18n import _
 
+from mercurial import (
+    error,
+    httppeer,
+    util,
+    wireproto,
+)
+
+from . import (
+    lfutil,
+)
+
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-import lfutil
-
 LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
                            '\n\nPlease enable it in your Mercurial config '
                            'file.\n')
--- a/hgext/largefiles/remotestore.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/remotestore.py	Mon Jul 18 23:28:14 2016 -0500
@@ -5,20 +5,30 @@
 # GNU General Public License version 2 or any later version.
 
 '''remote largefile store; the base class for wirestore'''
+from __future__ import absolute_import
 
-from mercurial import util, wireproto, error
 from mercurial.i18n import _
 
+from mercurial import (
+    error,
+    util,
+    wireproto,
+)
+
+from . import (
+    basestore,
+    lfutil,
+    localstore,
+)
+
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-import lfutil
-import basestore
-
 class remotestore(basestore.basestore):
     '''a largefile store accessed over a network'''
     def __init__(self, ui, repo, url):
         super(remotestore, self).__init__(ui, repo, url)
+        self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
 
     def put(self, source, hash):
         if self.sendfile(source, hash):
@@ -65,34 +75,43 @@
 
         return lfutil.copyandhash(chunks, tmpfile)
 
-    def _verifyfile(self, cctx, cset, contents, standin, verified):
-        filename = lfutil.splitstandin(standin)
-        if not filename:
-            return False
-        fctx = cctx[standin]
-        key = (filename, fctx.filenode())
-        if key in verified:
-            return False
+    def _hashesavailablelocally(self, hashes):
+        existslocallymap = self._lstore.exists(hashes)
+        localhashes = [hash for hash in hashes if existslocallymap[hash]]
+        return localhashes
 
-        verified.add(key)
+    def _verifyfiles(self, contents, filestocheck):
+        failed = False
+        expectedhashes = [expectedhash
+                          for cset, filename, expectedhash in filestocheck]
+        localhashes = self._hashesavailablelocally(expectedhashes)
+        stats = self._stat([expectedhash for expectedhash in expectedhashes
+                            if expectedhash not in localhashes])
 
-        expecthash = fctx.data()[0:40]
-        stat = self._stat([expecthash])[expecthash]
-        if not stat:
-            return False
-        elif stat == 1:
-            self.ui.warn(
-                _('changeset %s: %s: contents differ\n')
-                % (cset, filename))
-            return True # failed
-        elif stat == 2:
-            self.ui.warn(
-                _('changeset %s: %s missing\n')
-                % (cset, filename))
-            return True # failed
-        else:
-            raise RuntimeError('verify failed: unexpected response from '
-                               'statlfile (%r)' % stat)
+        for cset, filename, expectedhash in filestocheck:
+            if expectedhash in localhashes:
+                filetocheck = (cset, filename, expectedhash)
+                verifyresult = self._lstore._verifyfiles(contents,
+                                                         [filetocheck])
+                if verifyresult:
+                    failed = True
+            else:
+                stat = stats[expectedhash]
+                if stat:
+                    if stat == 1:
+                        self.ui.warn(
+                            _('changeset %s: %s: contents differ\n')
+                            % (cset, filename))
+                        failed = True
+                    elif stat == 2:
+                        self.ui.warn(
+                            _('changeset %s: %s missing\n')
+                            % (cset, filename))
+                        failed = True
+                    else:
+                        raise RuntimeError('verify failed: unexpected response '
+                                           'from statlfile (%r)' % stat)
+        return failed
 
     def batch(self):
         '''Support for remote batching.'''
--- a/hgext/largefiles/reposetup.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/reposetup.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,14 +7,23 @@
 # GNU General Public License version 2 or any later version.
 
 '''setup for largefiles repositories: reposetup'''
+from __future__ import absolute_import
+
 import copy
 
-from mercurial import error, match as match_, error
 from mercurial.i18n import _
-from mercurial import scmutil, localrepo
 
-import lfcommands
-import lfutil
+from mercurial import (
+    error,
+    localrepo,
+    match as matchmod,
+    scmutil,
+)
+
+from . import (
+    lfcommands,
+    lfutil,
+)
 
 def reposetup(ui, repo):
     # wire repositories should be given new wireproto functions
@@ -94,7 +103,7 @@
             parentworking = working and ctx1 == self['.']
 
             if match is None:
-                match = match_.always(self.root, self.getcwd())
+                match = matchmod.always(self.root, self.getcwd())
 
             wlock = None
             try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/largefiles/storefactory.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,78 @@
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import re
+
+from mercurial.i18n import _
+
+from mercurial import (
+    error,
+    hg,
+    util,
+)
+
+from . import (
+    lfutil,
+    localstore,
+    wirestore,
+)
+
+# During clone this function is passed the src's ui object
+# but it needs the dest's ui object so it can read out of
+# the config file. Use repo.ui instead.
+def openstore(repo, remote=None, put=False):
+    ui = repo.ui
+
+    if not remote:
+        lfpullsource = getattr(repo, 'lfpullsource', None)
+        if lfpullsource:
+            path = ui.expandpath(lfpullsource)
+        elif put:
+            path = ui.expandpath('default-push', 'default')
+        else:
+            path = ui.expandpath('default')
+
+        # ui.expandpath() leaves 'default-push' and 'default' alone if
+        # they cannot be expanded: fallback to the empty string,
+        # meaning the current directory.
+        if path == 'default-push' or path == 'default':
+            path = ''
+            remote = repo
+        else:
+            path, _branches = hg.parseurl(path)
+            remote = hg.peer(repo, {}, path)
+
+    # The path could be a scheme so use Mercurial's normal functionality
+    # to resolve the scheme to a repository and use its path
+    path = util.safehasattr(remote, 'url') and remote.url() or remote.path
+
+    match = _scheme_re.match(path)
+    if not match:                       # regular filesystem path
+        scheme = 'file'
+    else:
+        scheme = match.group(1)
+
+    try:
+        storeproviders = _storeprovider[scheme]
+    except KeyError:
+        raise error.Abort(_('unsupported URL scheme %r') % scheme)
+
+    for classobj in storeproviders:
+        try:
+            return classobj(ui, repo, remote)
+        except lfutil.storeprotonotcapable:
+            pass
+
+    raise error.Abort(_('%s does not appear to be a largefile store') %
+                     util.hidepassword(path))
+
+_storeprovider = {
+    'file':  [localstore.localstore],
+    'http':  [wirestore.wirestore],
+    'https': [wirestore.wirestore],
+    'ssh': [wirestore.wirestore],
+    }
+
+_scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
--- a/hgext/largefiles/uisetup.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/uisetup.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,14 +7,36 @@
 # GNU General Public License version 2 or any later version.
 
 '''setup for largefiles extension: uisetup'''
+from __future__ import absolute_import
 
-from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
-    httppeer, merge, scmutil, sshpeer, wireproto, subrepo, copies, exchange
 from mercurial.i18n import _
-from mercurial.hgweb import hgweb_mod, webcommands
+
+from mercurial.hgweb import (
+    hgweb_mod,
+    webcommands,
+)
 
-import overrides
-import proto
+from mercurial import (
+    archival,
+    cmdutil,
+    commands,
+    copies,
+    exchange,
+    extensions,
+    filemerge,
+    hg,
+    httppeer,
+    merge,
+    scmutil,
+    sshpeer,
+    subrepo,
+    wireproto,
+)
+
+from . import (
+    overrides,
+    proto,
+)
 
 def uisetup(ui):
     # Disable auto-status for some commands which assume that all
--- a/hgext/largefiles/wirestore.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/largefiles/wirestore.py	Mon Jul 18 23:28:14 2016 -0500
@@ -4,9 +4,12 @@
 # GNU General Public License version 2 or any later version.
 
 '''largefile store working over Mercurial's wire protocol'''
+from __future__ import absolute_import
 
-import lfutil
-import remotestore
+from . import (
+    lfutil,
+    remotestore,
+)
 
 class wirestore(remotestore.remotestore):
     def __init__(self, ui, repo, remote):
--- a/hgext/logtoprocess.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/logtoprocess.py	Mon Jul 18 23:28:14 2016 -0500
@@ -92,7 +92,7 @@
             Arguments are passed on as environment variables.
 
             """
-            script = ui.config('logtoprocess', event)
+            script = self.config('logtoprocess', event)
             if script:
                 if msg:
                     # try to format the log message given the remaining
--- a/hgext/mq.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/mq.py	Mon Jul 18 23:28:14 2016 -0500
@@ -62,19 +62,39 @@
 in the strip extension.
 '''
 
+from __future__ import absolute_import
+
+import errno
+import os
+import re
+import shutil
 from mercurial.i18n import _
-from mercurial.node import bin, hex, short, nullid, nullrev
-from mercurial.lock import release
-from mercurial import commands, cmdutil, hg, scmutil, util, revset
-from mercurial import dispatch
-from mercurial import extensions, error, phases
-from mercurial import patch as patchmod
-from mercurial import lock as lockmod
-from mercurial import localrepo
-from mercurial import registrar
-from mercurial import subrepo
-import os, re, errno, shutil
-
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    cmdutil,
+    commands,
+    dispatch,
+    error,
+    extensions,
+    hg,
+    localrepo,
+    lock as lockmod,
+    patch as patchmod,
+    phases,
+    registrar,
+    revset,
+    scmutil,
+    subrepo,
+    util,
+)
+
+release = lockmod.release
 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
 
 cmdtable = {}
--- a/hgext/notify.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/notify.py	Mon Jul 18 23:28:14 2016 -0500
@@ -139,6 +139,7 @@
 import socket
 import time
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
@@ -146,7 +147,6 @@
     patch,
     util,
 )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -363,7 +363,7 @@
             s = patch.diffstat(difflines)
             # s may be nil, don't include the header if it is
             if s:
-                self.ui.write('\ndiffstat:\n\n%s' % s)
+                self.ui.write(_('\ndiffstat:\n\n%s') % s)
 
         if maxdiff == 0:
             return
--- a/hgext/pager.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/pager.py	Mon Jul 18 23:28:14 2016 -0500
@@ -66,6 +66,7 @@
 import subprocess
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -73,7 +74,6 @@
     extensions,
     util,
     )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/hgext/patchbomb.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/patchbomb.py	Mon Jul 18 23:28:14 2016 -0500
@@ -71,6 +71,7 @@
 import socket
 import tempfile
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -83,7 +84,6 @@
     util,
 )
 stringio = util.stringio
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -708,13 +708,7 @@
                 fp.close()
         else:
             if not sendmail:
-                verifycert = ui.config('smtp', 'verifycert', 'strict')
-                if opts.get('insecure'):
-                    ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
-                try:
-                    sendmail = mail.connect(ui, mbox=mbox)
-                finally:
-                    ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
+                sendmail = mail.connect(ui, mbox=mbox)
             ui.status(_('sending '), subj, ' ...\n')
             ui.progress(_('sending'), i, item=subj, total=len(msgs),
                         unit=_('emails'))
--- a/hgext/purge.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/purge.py	Mon Jul 18 23:28:14 2016 -0500
@@ -27,6 +27,7 @@
 
 import os
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
@@ -34,7 +35,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -84,13 +84,13 @@
     list of files that this program would delete, use the --print
     option.
     '''
-    act = not opts['print']
+    act = not opts.get('print')
     eol = '\n'
-    if opts['print0']:
+    if opts.get('print0'):
         eol = '\0'
         act = False # --print0 implies --print
-    removefiles = opts['files']
-    removedirs = opts['dirs']
+    removefiles = opts.get('files')
+    removedirs = opts.get('dirs')
     if not removefiles and not removedirs:
         removefiles = True
         removedirs = True
@@ -101,7 +101,7 @@
                 remove_func(repo.wjoin(name))
             except OSError:
                 m = _('%s cannot be removed') % name
-                if opts['abort_on_err']:
+                if opts.get('abort_on_err'):
                     raise error.Abort(m)
                 ui.warn(_('warning: %s\n') % m)
         else:
@@ -111,7 +111,7 @@
     if removedirs:
         directories = []
         match.explicitdir = match.traversedir = directories.append
-    status = repo.status(match=match, ignored=opts['all'], unknown=True)
+    status = repo.status(match=match, ignored=opts.get('all'), unknown=True)
 
     if removefiles:
         for f in sorted(status.unknown + status.ignored):
--- a/hgext/rebase.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/rebase.py	Mon Jul 18 23:28:14 2016 -0500
@@ -14,14 +14,42 @@
 https://mercurial-scm.org/wiki/RebaseExtension
 '''
 
-from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
-from mercurial import extensions, patch, scmutil, phases, obsolete, error
-from mercurial import copies, destutil, repoview, registrar, revset
-from mercurial.commands import templateopts
-from mercurial.node import nullrev, nullid, hex, short
-from mercurial.lock import release
+from __future__ import absolute_import
+
+import errno
+import os
+
 from mercurial.i18n import _
-import os, errno
+from mercurial.node import (
+    hex,
+    nullid,
+    nullrev,
+    short,
+)
+from mercurial import (
+    bookmarks,
+    cmdutil,
+    commands,
+    copies,
+    destutil,
+    error,
+    extensions,
+    hg,
+    lock,
+    merge,
+    obsolete,
+    patch,
+    phases,
+    registrar,
+    repair,
+    repoview,
+    revset,
+    scmutil,
+    util,
+)
+
+release = lock.release
+templateopts = commands.templateopts
 
 # The following constants are used throughout the rebase module. The ordering of
 # their values must be maintained.
@@ -91,6 +119,394 @@
         sourceset = revset.getset(repo, revset.fullreposet(repo), x)
     return subset & revset.baseset([_destrebase(repo, sourceset)])
 
+class rebaseruntime(object):
+    """This class is a container for rebase runtime state"""
+    def __init__(self, repo, ui, opts=None):
+        if opts is None:
+            opts = {}
+
+        self.repo = repo
+        self.ui = ui
+        self.opts = opts
+        self.originalwd = None
+        self.external = nullrev
+        # Mapping between the old revision id and either what is the new rebased
+        # revision or what needs to be done with the old revision. The state
+        # dict will be what contains most of the rebase progress state.
+        self.state = {}
+        self.activebookmark = None
+        self.currentbookmarks = None
+        self.target = None
+        self.skipped = set()
+        self.targetancestors = set()
+
+        self.collapsef = opts.get('collapse', False)
+        self.collapsemsg = cmdutil.logmessage(ui, opts)
+        self.date = opts.get('date', None)
+
+        e = opts.get('extrafn') # internal, used by e.g. hgsubversion
+        self.extrafns = [_savegraft]
+        if e:
+            self.extrafns = [e]
+
+        self.keepf = opts.get('keep', False)
+        self.keepbranchesf = opts.get('keepbranches', False)
+        # keepopen is not meant for use on the command line, but by
+        # other extensions
+        self.keepopen = opts.get('keepopen', False)
+        self.obsoletenotrebased = {}
+
+    def restorestatus(self):
+        """Restore a previously stored status"""
+        repo = self.repo
+        keepbranches = None
+        target = None
+        collapse = False
+        external = nullrev
+        activebookmark = None
+        state = {}
+
+        try:
+            f = repo.vfs("rebasestate")
+            for i, l in enumerate(f.read().splitlines()):
+                if i == 0:
+                    originalwd = repo[l].rev()
+                elif i == 1:
+                    target = repo[l].rev()
+                elif i == 2:
+                    external = repo[l].rev()
+                elif i == 3:
+                    collapse = bool(int(l))
+                elif i == 4:
+                    keep = bool(int(l))
+                elif i == 5:
+                    keepbranches = bool(int(l))
+                elif i == 6 and not (len(l) == 81 and ':' in l):
+                    # line 6 is a recent addition, so for backwards
+                    # compatibility check that the line doesn't look like the
+                    # oldrev:newrev lines
+                    activebookmark = l
+                else:
+                    oldrev, newrev = l.split(':')
+                    if newrev in (str(nullmerge), str(revignored),
+                                  str(revprecursor), str(revpruned)):
+                        state[repo[oldrev].rev()] = int(newrev)
+                    elif newrev == nullid:
+                        state[repo[oldrev].rev()] = revtodo
+                        # Legacy compat special case
+                    else:
+                        state[repo[oldrev].rev()] = repo[newrev].rev()
+
+        except IOError as err:
+            if err.errno != errno.ENOENT:
+                raise
+            cmdutil.wrongtooltocontinue(repo, _('rebase'))
+
+        if keepbranches is None:
+            raise error.Abort(_('.hg/rebasestate is incomplete'))
+
+        skipped = set()
+        # recompute the set of skipped revs
+        if not collapse:
+            seen = set([target])
+            for old, new in sorted(state.items()):
+                if new != revtodo and new in seen:
+                    skipped.add(old)
+                seen.add(new)
+        repo.ui.debug('computed skipped revs: %s\n' %
+                        (' '.join(str(r) for r in sorted(skipped)) or None))
+        repo.ui.debug('rebase status resumed\n')
+        _setrebasesetvisibility(repo, state.keys())
+
+        self.originalwd = originalwd
+        self.target = target
+        self.state = state
+        self.skipped = skipped
+        self.collapsef = collapse
+        self.keepf = keep
+        self.keepbranchesf = keepbranches
+        self.external = external
+        self.activebookmark = activebookmark
+
+    def _handleskippingobsolete(self, rebaserevs, obsoleterevs, target):
+        """Compute structures necessary for skipping obsolete revisions
+
+        rebaserevs:     iterable of all revisions that are to be rebased
+        obsoleterevs:   iterable of all obsolete revisions in rebaseset
+        target:         a destination revision for the rebase operation
+        """
+        self.obsoletenotrebased = {}
+        if not self.ui.configbool('experimental', 'rebaseskipobsolete',
+                                  default=True):
+            return
+        rebaseset = set(rebaserevs)
+        obsoleteset = set(obsoleterevs)
+        self.obsoletenotrebased = _computeobsoletenotrebased(self.repo,
+                                    obsoleteset, target)
+        skippedset = set(self.obsoletenotrebased)
+        _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset)
+
+    def _prepareabortorcontinue(self, isabort):
+        try:
+            self.restorestatus()
+            self.collapsemsg = restorecollapsemsg(self.repo)
+        except error.RepoLookupError:
+            if isabort:
+                clearstatus(self.repo)
+                clearcollapsemsg(self.repo)
+                self.repo.ui.warn(_('rebase aborted (no revision is removed,'
+                                    ' only broken state is cleared)\n'))
+                return 0
+            else:
+                msg = _('cannot continue inconsistent rebase')
+                hint = _('use "hg rebase --abort" to clear broken state')
+                raise error.Abort(msg, hint=hint)
+        if isabort:
+            return abort(self.repo, self.originalwd, self.target,
+                         self.state, activebookmark=self.activebookmark)
+
+        obsrevs = (r for r, st in self.state.items() if st == revprecursor)
+        self._handleskippingobsolete(self.state.keys(), obsrevs, self.target)
+
+    def _preparenewrebase(self, dest, rebaseset):
+        if dest is None:
+            return _nothingtorebase()
+
+        allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
+        if (not (self.keepf or allowunstable)
+              and self.repo.revs('first(children(%ld) - %ld)',
+                                 rebaseset, rebaseset)):
+            raise error.Abort(
+                _("can't remove original changesets with"
+                  " unrebased descendants"),
+                hint=_('use --keep to keep original changesets'))
+
+        obsrevs = _filterobsoleterevs(self.repo, rebaseset)
+        self._handleskippingobsolete(rebaseset, obsrevs, dest)
+
+        result = buildstate(self.repo, dest, rebaseset, self.collapsef,
+                            self.obsoletenotrebased)
+
+        if not result:
+            # Empty state built, nothing to rebase
+            self.ui.status(_('nothing to rebase\n'))
+            return _nothingtorebase()
+
+        root = min(rebaseset)
+        if not self.keepf and not self.repo[root].mutable():
+            raise error.Abort(_("can't rebase public changeset %s")
+                             % self.repo[root],
+                             hint=_('see "hg help phases" for details'))
+
+        (self.originalwd, self.target, self.state) = result
+        if self.collapsef:
+            self.targetancestors = self.repo.changelog.ancestors(
+                                        [self.target],
+                                        inclusive=True)
+            self.external = externalparent(self.repo, self.state,
+                                              self.targetancestors)
+
+        if dest.closesbranch() and not self.keepbranchesf:
+            self.ui.status(_('reopening closed branch head %s\n') % dest)
+
+    def _performrebase(self):
+        repo, ui, opts = self.repo, self.ui, self.opts
+        if self.keepbranchesf:
+            # insert _savebranch at the start of extrafns so if
+            # there's a user-provided extrafn it can clobber branch if
+            # desired
+            self.extrafns.insert(0, _savebranch)
+            if self.collapsef:
+                branches = set()
+                for rev in self.state:
+                    branches.add(repo[rev].branch())
+                    if len(branches) > 1:
+                        raise error.Abort(_('cannot collapse multiple named '
+                            'branches'))
+
+        # Rebase
+        if not self.targetancestors:
+            self.targetancestors = repo.changelog.ancestors([self.target],
+                                                               inclusive=True)
+
+        # Keep track of the current bookmarks in order to reset them later
+        self.currentbookmarks = repo._bookmarks.copy()
+        self.activebookmark = self.activebookmark or repo._activebookmark
+        if self.activebookmark:
+            bookmarks.deactivate(repo)
+
+        sortedrevs = sorted(self.state)
+        total = len(self.state)
+        pos = 0
+        for rev in sortedrevs:
+            ctx = repo[rev]
+            desc = '%d:%s "%s"' % (ctx.rev(), ctx,
+                                   ctx.description().split('\n', 1)[0])
+            names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
+            if names:
+                desc += ' (%s)' % ' '.join(names)
+            pos += 1
+            if self.state[rev] == revtodo:
+                ui.status(_('rebasing %s\n') % desc)
+                ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
+                            _('changesets'), total)
+                p1, p2, base = defineparents(repo, rev, self.target,
+                                             self.state,
+                                             self.targetancestors,
+                                             self.obsoletenotrebased)
+                storestatus(repo, self.originalwd, self.target,
+                            self.state, self.collapsef, self.keepf,
+                            self.keepbranchesf, self.external,
+                            self.activebookmark)
+                storecollapsemsg(repo, self.collapsemsg)
+                if len(repo[None].parents()) == 2:
+                    repo.ui.debug('resuming interrupted rebase\n')
+                else:
+                    try:
+                        ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
+                                     'rebase')
+                        stats = rebasenode(repo, rev, p1, base, self.state,
+                                           self.collapsef, self.target)
+                        if stats and stats[3] > 0:
+                            raise error.InterventionRequired(
+                                _('unresolved conflicts (see hg '
+                                  'resolve, then hg rebase --continue)'))
+                    finally:
+                        ui.setconfig('ui', 'forcemerge', '', 'rebase')
+                if not self.collapsef:
+                    merging = p2 != nullrev
+                    editform = cmdutil.mergeeditform(merging, 'rebase')
+                    editor = cmdutil.getcommiteditor(editform=editform, **opts)
+                    newnode = concludenode(repo, rev, p1, p2,
+                                           extrafn=_makeextrafn(self.extrafns),
+                                           editor=editor,
+                                           keepbranches=self.keepbranchesf,
+                                           date=self.date)
+                else:
+                    # Skip commit if we are collapsing
+                    repo.dirstate.beginparentchange()
+                    repo.setparents(repo[p1].node())
+                    repo.dirstate.endparentchange()
+                    newnode = None
+                # Update the state
+                if newnode is not None:
+                    self.state[rev] = repo[newnode].rev()
+                    ui.debug('rebased as %s\n' % short(newnode))
+                else:
+                    if not self.collapsef:
+                        ui.warn(_('note: rebase of %d:%s created no changes '
+                                  'to commit\n') % (rev, ctx))
+                        self.skipped.add(rev)
+                    self.state[rev] = p1
+                    ui.debug('next revision set to %s\n' % p1)
+            elif self.state[rev] == nullmerge:
+                ui.debug('ignoring null merge rebase of %s\n' % rev)
+            elif self.state[rev] == revignored:
+                ui.status(_('not rebasing ignored %s\n') % desc)
+            elif self.state[rev] == revprecursor:
+                targetctx = repo[self.obsoletenotrebased[rev]]
+                desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
+                             targetctx.description().split('\n', 1)[0])
+                msg = _('note: not rebasing %s, already in destination as %s\n')
+                ui.status(msg % (desc, desctarget))
+            elif self.state[rev] == revpruned:
+                msg = _('note: not rebasing %s, it has no successor\n')
+                ui.status(msg % desc)
+            else:
+                ui.status(_('already rebased %s as %s\n') %
+                          (desc, repo[self.state[rev]]))
+
+        ui.progress(_('rebasing'), None)
+        ui.note(_('rebase merging completed\n'))
+
+    def _finishrebase(self):
+        repo, ui, opts = self.repo, self.ui, self.opts
+        if self.collapsef and not self.keepopen:
+            p1, p2, _base = defineparents(repo, min(self.state),
+                                          self.target, self.state,
+                                          self.targetancestors,
+                                          self.obsoletenotrebased)
+            editopt = opts.get('edit')
+            editform = 'rebase.collapse'
+            if self.collapsemsg:
+                commitmsg = self.collapsemsg
+            else:
+                commitmsg = 'Collapsed revision'
+                for rebased in self.state:
+                    if rebased not in self.skipped and\
+                       self.state[rebased] > nullmerge:
+                        commitmsg += '\n* %s' % repo[rebased].description()
+                editopt = True
+            editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
+            revtoreuse = max(self.state)
+            newnode = concludenode(repo, revtoreuse, p1, self.external,
+                                   commitmsg=commitmsg,
+                                   extrafn=_makeextrafn(self.extrafns),
+                                   editor=editor,
+                                   keepbranches=self.keepbranchesf,
+                                   date=self.date)
+            if newnode is None:
+                newrev = self.target
+            else:
+                newrev = repo[newnode].rev()
+            for oldrev in self.state.iterkeys():
+                if self.state[oldrev] > nullmerge:
+                    self.state[oldrev] = newrev
+
+        if 'qtip' in repo.tags():
+            updatemq(repo, self.state, self.skipped, **opts)
+
+        if self.currentbookmarks:
+            # Nodeids are needed to reset bookmarks
+            nstate = {}
+            for k, v in self.state.iteritems():
+                if v > nullmerge:
+                    nstate[repo[k].node()] = repo[v].node()
+                elif v == revprecursor:
+                    succ = self.obsoletenotrebased[k]
+                    nstate[repo[k].node()] = repo[succ].node()
+            # XXX this is the same as dest.node() for the non-continue path --
+            # this should probably be cleaned up
+            targetnode = repo[self.target].node()
+
+        # restore original working directory
+        # (we do this before stripping)
+        newwd = self.state.get(self.originalwd, self.originalwd)
+        if newwd == revprecursor:
+            newwd = self.obsoletenotrebased[self.originalwd]
+        elif newwd < 0:
+            # original directory is a parent of rebase set root or ignored
+            newwd = self.originalwd
+        if newwd not in [c.rev() for c in repo[None].parents()]:
+            ui.note(_("update back to initial working directory parent\n"))
+            hg.updaterepo(repo, newwd, False)
+
+        if not self.keepf:
+            collapsedas = None
+            if self.collapsef:
+                collapsedas = newnode
+            clearrebased(ui, repo, self.state, self.skipped, collapsedas)
+
+        with repo.transaction('bookmark') as tr:
+            if self.currentbookmarks:
+                updatebookmarks(repo, targetnode, nstate,
+                                self.currentbookmarks, tr)
+                if self.activebookmark not in repo._bookmarks:
+                    # active bookmark was divergent one and has been deleted
+                    self.activebookmark = None
+        clearstatus(repo)
+        clearcollapsemsg(repo)
+
+        ui.note(_("rebase completed\n"))
+        util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
+        if self.skipped:
+            skippedlen = len(self.skipped)
+            ui.note(_("%d revisions have been skipped\n") % skippedlen)
+
+        if (self.activebookmark and
+            repo['.'].node() == repo._bookmarks[self.activebookmark]):
+                bookmarks.activate(repo, self.activebookmark)
+
 @command('rebase',
     [('s', 'source', '',
      _('rebase the specified changeset and descendants'), _('REV')),
@@ -201,16 +617,7 @@
     unresolved conflicts.
 
     """
-    originalwd = target = None
-    activebookmark = None
-    external = nullrev
-    # Mapping between the old revision id and either what is the new rebased
-    # revision or what needs to be done with the old revision. The state dict
-    # will be what contains most of the rebase progress state.
-    state = {}
-    skipped = set()
-    targetancestors = set()
-
+    rbsrt = rebaseruntime(repo, ui, opts)
 
     lock = wlock = None
     try:
@@ -227,19 +634,6 @@
         destspace = opts.get('_destspace')
         contf = opts.get('continue')
         abortf = opts.get('abort')
-        collapsef = opts.get('collapse', False)
-        collapsemsg = cmdutil.logmessage(ui, opts)
-        date = opts.get('date', None)
-        e = opts.get('extrafn') # internal, used by e.g. hgsubversion
-        extrafns = [_savegraft]
-        if e:
-            extrafns = [e]
-        keepf = opts.get('keep', False)
-        keepbranchesf = opts.get('keepbranches', False)
-        # keepopen is not meant for use on the command line, but by
-        # other extensions
-        keepopen = opts.get('keepopen', False)
-
         if opts.get('interactive'):
             try:
                 if extensions.find('histedit'):
@@ -251,14 +645,14 @@
                     "'histedit' extension (see \"%s\")") % help
             raise error.Abort(msg)
 
-        if collapsemsg and not collapsef:
+        if rbsrt.collapsemsg and not rbsrt.collapsef:
             raise error.Abort(
                 _('message can only be specified with collapse'))
 
         if contf or abortf:
             if contf and abortf:
                 raise error.Abort(_('cannot use both abort and continue'))
-            if collapsef:
+            if rbsrt.collapsef:
                 raise error.Abort(
                     _('cannot use collapse with continue or abort'))
             if srcf or basef or destf:
@@ -267,265 +661,18 @@
             if abortf and opts.get('tool', False):
                 ui.warn(_('tool option will be ignored\n'))
 
-            try:
-                (originalwd, target, state, skipped, collapsef, keepf,
-                 keepbranchesf, external, activebookmark) = restorestatus(repo)
-                collapsemsg = restorecollapsemsg(repo)
-            except error.RepoLookupError:
-                if abortf:
-                    clearstatus(repo)
-                    clearcollapsemsg(repo)
-                    repo.ui.warn(_('rebase aborted (no revision is removed,'
-                                   ' only broken state is cleared)\n'))
-                    return 0
-                else:
-                    msg = _('cannot continue inconsistent rebase')
-                    hint = _('use "hg rebase --abort" to clear broken state')
-                    raise error.Abort(msg, hint=hint)
-            if abortf:
-                return abort(repo, originalwd, target, state,
-                             activebookmark=activebookmark)
-
-            obsoletenotrebased = {}
-            if ui.configbool('experimental', 'rebaseskipobsolete',
-                             default=True):
-                rebaseobsrevs = set([r for r, status in state.items()
-                                     if status == revprecursor])
-                rebasesetrevs = set(state.keys())
-                obsoletenotrebased = _computeobsoletenotrebased(repo,
-                                                                rebaseobsrevs,
-                                                                target)
-                rebaseobsskipped = set(obsoletenotrebased)
-                _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs,
-                                rebaseobsskipped)
+            retcode = rbsrt._prepareabortorcontinue(abortf)
+            if retcode is not None:
+                return retcode
         else:
             dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf,
                                           destspace=destspace)
-            if dest is None:
-                return _nothingtorebase()
-
-            allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
-            if (not (keepf or allowunstable)
-                  and repo.revs('first(children(%ld) - %ld)',
-                                rebaseset, rebaseset)):
-                raise error.Abort(
-                    _("can't remove original changesets with"
-                      " unrebased descendants"),
-                    hint=_('use --keep to keep original changesets'))
-
-            obsoletenotrebased = {}
-            if ui.configbool('experimental', 'rebaseskipobsolete',
-                             default=True):
-                rebasesetrevs = set(rebaseset)
-                rebaseobsrevs = _filterobsoleterevs(repo, rebasesetrevs)
-                obsoletenotrebased = _computeobsoletenotrebased(repo,
-                                                                rebaseobsrevs,
-                                                                dest)
-                rebaseobsskipped = set(obsoletenotrebased)
-                _checkobsrebase(repo, ui, rebaseobsrevs,
-                                              rebasesetrevs,
-                                              rebaseobsskipped)
-
-            result = buildstate(repo, dest, rebaseset, collapsef,
-                                obsoletenotrebased)
-
-            if not result:
-                # Empty state built, nothing to rebase
-                ui.status(_('nothing to rebase\n'))
-                return _nothingtorebase()
-
-            root = min(rebaseset)
-            if not keepf and not repo[root].mutable():
-                raise error.Abort(_("can't rebase public changeset %s")
-                                 % repo[root],
-                                 hint=_('see "hg help phases" for details'))
-
-            originalwd, target, state = result
-            if collapsef:
-                targetancestors = repo.changelog.ancestors([target],
-                                                           inclusive=True)
-                external = externalparent(repo, state, targetancestors)
-
-            if dest.closesbranch() and not keepbranchesf:
-                ui.status(_('reopening closed branch head %s\n') % dest)
-
-        if keepbranchesf:
-            # insert _savebranch at the start of extrafns so if
-            # there's a user-provided extrafn it can clobber branch if
-            # desired
-            extrafns.insert(0, _savebranch)
-            if collapsef:
-                branches = set()
-                for rev in state:
-                    branches.add(repo[rev].branch())
-                    if len(branches) > 1:
-                        raise error.Abort(_('cannot collapse multiple named '
-                            'branches'))
-
-        # Rebase
-        if not targetancestors:
-            targetancestors = repo.changelog.ancestors([target], inclusive=True)
-
-        # Keep track of the current bookmarks in order to reset them later
-        currentbookmarks = repo._bookmarks.copy()
-        activebookmark = activebookmark or repo._activebookmark
-        if activebookmark:
-            bookmarks.deactivate(repo)
-
-        extrafn = _makeextrafn(extrafns)
+            retcode = rbsrt._preparenewrebase(dest, rebaseset)
+            if retcode is not None:
+                return retcode
 
-        sortedstate = sorted(state)
-        total = len(sortedstate)
-        pos = 0
-        for rev in sortedstate:
-            ctx = repo[rev]
-            desc = '%d:%s "%s"' % (ctx.rev(), ctx,
-                                   ctx.description().split('\n', 1)[0])
-            names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
-            if names:
-                desc += ' (%s)' % ' '.join(names)
-            pos += 1
-            if state[rev] == revtodo:
-                ui.status(_('rebasing %s\n') % desc)
-                ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
-                            _('changesets'), total)
-                p1, p2, base = defineparents(repo, rev, target, state,
-                                             targetancestors)
-                storestatus(repo, originalwd, target, state, collapsef, keepf,
-                            keepbranchesf, external, activebookmark)
-                storecollapsemsg(repo, collapsemsg)
-                if len(repo[None].parents()) == 2:
-                    repo.ui.debug('resuming interrupted rebase\n')
-                else:
-                    try:
-                        ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
-                                     'rebase')
-                        stats = rebasenode(repo, rev, p1, base, state,
-                                           collapsef, target)
-                        if stats and stats[3] > 0:
-                            raise error.InterventionRequired(
-                                _('unresolved conflicts (see hg '
-                                  'resolve, then hg rebase --continue)'))
-                    finally:
-                        ui.setconfig('ui', 'forcemerge', '', 'rebase')
-                if not collapsef:
-                    merging = p2 != nullrev
-                    editform = cmdutil.mergeeditform(merging, 'rebase')
-                    editor = cmdutil.getcommiteditor(editform=editform, **opts)
-                    newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
-                                           editor=editor,
-                                           keepbranches=keepbranchesf,
-                                           date=date)
-                else:
-                    # Skip commit if we are collapsing
-                    repo.dirstate.beginparentchange()
-                    repo.setparents(repo[p1].node())
-                    repo.dirstate.endparentchange()
-                    newnode = None
-                # Update the state
-                if newnode is not None:
-                    state[rev] = repo[newnode].rev()
-                    ui.debug('rebased as %s\n' % short(newnode))
-                else:
-                    if not collapsef:
-                        ui.warn(_('note: rebase of %d:%s created no changes '
-                                  'to commit\n') % (rev, ctx))
-                        skipped.add(rev)
-                    state[rev] = p1
-                    ui.debug('next revision set to %s\n' % p1)
-            elif state[rev] == nullmerge:
-                ui.debug('ignoring null merge rebase of %s\n' % rev)
-            elif state[rev] == revignored:
-                ui.status(_('not rebasing ignored %s\n') % desc)
-            elif state[rev] == revprecursor:
-                targetctx = repo[obsoletenotrebased[rev]]
-                desctarget = '%d:%s "%s"' % (targetctx.rev(), targetctx,
-                             targetctx.description().split('\n', 1)[0])
-                msg = _('note: not rebasing %s, already in destination as %s\n')
-                ui.status(msg % (desc, desctarget))
-            elif state[rev] == revpruned:
-                msg = _('note: not rebasing %s, it has no successor\n')
-                ui.status(msg % desc)
-            else:
-                ui.status(_('already rebased %s as %s\n') %
-                          (desc, repo[state[rev]]))
-
-        ui.progress(_('rebasing'), None)
-        ui.note(_('rebase merging completed\n'))
-
-        if collapsef and not keepopen:
-            p1, p2, _base = defineparents(repo, min(state), target,
-                                          state, targetancestors)
-            editopt = opts.get('edit')
-            editform = 'rebase.collapse'
-            if collapsemsg:
-                commitmsg = collapsemsg
-            else:
-                commitmsg = 'Collapsed revision'
-                for rebased in state:
-                    if rebased not in skipped and state[rebased] > nullmerge:
-                        commitmsg += '\n* %s' % repo[rebased].description()
-                editopt = True
-            editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
-            newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
-                                   extrafn=extrafn, editor=editor,
-                                   keepbranches=keepbranchesf,
-                                   date=date)
-            if newnode is None:
-                newrev = target
-            else:
-                newrev = repo[newnode].rev()
-            for oldrev in state.iterkeys():
-                if state[oldrev] > nullmerge:
-                    state[oldrev] = newrev
-
-        if 'qtip' in repo.tags():
-            updatemq(repo, state, skipped, **opts)
-
-        if currentbookmarks:
-            # Nodeids are needed to reset bookmarks
-            nstate = {}
-            for k, v in state.iteritems():
-                if v > nullmerge:
-                    nstate[repo[k].node()] = repo[v].node()
-            # XXX this is the same as dest.node() for the non-continue path --
-            # this should probably be cleaned up
-            targetnode = repo[target].node()
-
-        # restore original working directory
-        # (we do this before stripping)
-        newwd = state.get(originalwd, originalwd)
-        if newwd < 0:
-            # original directory is a parent of rebase set root or ignored
-            newwd = originalwd
-        if newwd not in [c.rev() for c in repo[None].parents()]:
-            ui.note(_("update back to initial working directory parent\n"))
-            hg.updaterepo(repo, newwd, False)
-
-        if not keepf:
-            collapsedas = None
-            if collapsef:
-                collapsedas = newnode
-            clearrebased(ui, repo, state, skipped, collapsedas)
-
-        with repo.transaction('bookmark') as tr:
-            if currentbookmarks:
-                updatebookmarks(repo, targetnode, nstate, currentbookmarks, tr)
-                if activebookmark not in repo._bookmarks:
-                    # active bookmark was divergent one and has been deleted
-                    activebookmark = None
-        clearstatus(repo)
-        clearcollapsemsg(repo)
-
-        ui.note(_("rebase completed\n"))
-        util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
-        if skipped:
-            ui.note(_("%d revisions have been skipped\n") % len(skipped))
-
-        if (activebookmark and
-            repo['.'].node() == repo._bookmarks[activebookmark]):
-                bookmarks.activate(repo, activebookmark)
-
+        rbsrt._performrebase()
+        rbsrt._finishrebase()
     finally:
         release(lock, wlock)
 
@@ -733,21 +880,12 @@
               "experimental.allowdivergence=True")
         raise error.Abort(msg % (",".join(divhashes),), hint=h)
 
-    # - plain prune (no successor) changesets are rebased
-    # - split changesets are not rebased if at least one of the
-    # changeset resulting from the split is an ancestor of dest
-    rebaseset = rebasesetrevs - rebaseobsskipped
-    if rebasesetrevs and not rebaseset:
-        msg = _('all requested changesets have equivalents '
-                'or were marked as obsolete')
-        hint = _('to force the rebase, set the config '
-                 'experimental.rebaseskipobsolete to False')
-        raise error.Abort(msg, hint=hint)
-
-def defineparents(repo, rev, target, state, targetancestors):
+def defineparents(repo, rev, target, state, targetancestors,
+                  obsoletenotrebased):
     'Return the new parent relationship of the revision that will be rebased'
     parents = repo[rev].parents()
     p1 = p2 = nullrev
+    rp1 = None
 
     p1n = parents[0].rev()
     if p1n in targetancestors:
@@ -771,6 +909,8 @@
         if p2n in state:
             if p1 == target: # p1n in targetancestors or external
                 p1 = state[p2n]
+                if p1 == revprecursor:
+                    rp1 = obsoletenotrebased[p2n]
             elif state[p2n] in revskipped:
                 p2 = nearestrebased(repo, p2n, state)
                 if p2 is None:
@@ -784,7 +924,7 @@
                         'would have 3 parents') % rev)
             p2 = p2n
     repo.ui.debug(" future parents are %d and %d\n" %
-                            (repo[p1].rev(), repo[p2].rev()))
+                            (repo[rp1 or p1].rev(), repo[p2].rev()))
 
     if not any(p.rev() in state for p in parents):
         # Case (1) root changeset of a non-detaching rebase set.
@@ -828,6 +968,8 @@
         # make it feasible to consider different cases separately. In these
         # other cases we currently just leave it to the user to correctly
         # resolve an impossible merge using a wrong ancestor.
+        #
+        # xx, p1 could be -4, and both parents could probably be -4...
         for p in repo[rev].parents():
             if state.get(p.rev()) == p1:
                 base = p.rev()
@@ -838,7 +980,7 @@
             # Raise because this function is called wrong (see issue 4106)
             raise AssertionError('no base found to rebase on '
                                  '(defineparents called wrong)')
-    return p1, p2, base
+    return rp1 or p1, p2, base
 
 def isagitpatch(repo, patchname):
     'Return true if the given patch is in git format'
@@ -952,68 +1094,6 @@
     _clearrebasesetvisibiliy(repo)
     util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
 
-def restorestatus(repo):
-    'Restore a previously stored status'
-    keepbranches = None
-    target = None
-    collapse = False
-    external = nullrev
-    activebookmark = None
-    state = {}
-
-    try:
-        f = repo.vfs("rebasestate")
-        for i, l in enumerate(f.read().splitlines()):
-            if i == 0:
-                originalwd = repo[l].rev()
-            elif i == 1:
-                target = repo[l].rev()
-            elif i == 2:
-                external = repo[l].rev()
-            elif i == 3:
-                collapse = bool(int(l))
-            elif i == 4:
-                keep = bool(int(l))
-            elif i == 5:
-                keepbranches = bool(int(l))
-            elif i == 6 and not (len(l) == 81 and ':' in l):
-                # line 6 is a recent addition, so for backwards compatibility
-                # check that the line doesn't look like the oldrev:newrev lines
-                activebookmark = l
-            else:
-                oldrev, newrev = l.split(':')
-                if newrev in (str(nullmerge), str(revignored),
-                              str(revprecursor), str(revpruned)):
-                    state[repo[oldrev].rev()] = int(newrev)
-                elif newrev == nullid:
-                    state[repo[oldrev].rev()] = revtodo
-                    # Legacy compat special case
-                else:
-                    state[repo[oldrev].rev()] = repo[newrev].rev()
-
-    except IOError as err:
-        if err.errno != errno.ENOENT:
-            raise
-        cmdutil.wrongtooltocontinue(repo, _('rebase'))
-
-    if keepbranches is None:
-        raise error.Abort(_('.hg/rebasestate is incomplete'))
-
-    skipped = set()
-    # recompute the set of skipped revs
-    if not collapse:
-        seen = set([target])
-        for old, new in sorted(state.items()):
-            if new != revtodo and new in seen:
-                skipped.add(old)
-            seen.add(new)
-    repo.ui.debug('computed skipped revs: %s\n' %
-                    (' '.join(str(r) for r in sorted(skipped)) or None))
-    repo.ui.debug('rebase status resumed\n')
-    _setrebasesetvisibility(repo, state.keys())
-    return (originalwd, target, state, skipped,
-            collapse, keep, keepbranches, external, activebookmark)
-
 def needupdate(repo, state):
     '''check whether we should `update --clean` away from a merge, or if
     somehow the working dir got forcibly updated, e.g. by older hg'''
@@ -1336,7 +1416,9 @@
     if not os.path.exists(repo.join('rebasestate')):
         return
     try:
-        state = restorestatus(repo)[2]
+        rbsrt = rebaseruntime(repo, ui, {})
+        rbsrt.restorestatus()
+        state = rbsrt.state
     except error.RepoLookupError:
         # i18n: column positioning for "hg summary"
         msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
--- a/hgext/record.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/record.py	Mon Jul 18 23:28:14 2016 -0500
@@ -12,13 +12,13 @@
 
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     commands,
     error,
     extensions,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/relink.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/relink.py	Mon Jul 18 23:28:14 2016 -0500
@@ -11,13 +11,13 @@
 import os
 import stat
 
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
     hg,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/schemes.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/schemes.py	Mon Jul 18 23:28:14 2016 -0500
@@ -43,6 +43,8 @@
 
 import os
 import re
+
+from mercurial.i18n import _
 from mercurial import (
     cmdutil,
     error,
@@ -51,7 +53,6 @@
     templater,
     util,
 )
-from mercurial.i18n import _
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
--- a/hgext/share.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/share.py	Mon Jul 18 23:28:14 2016 -0500
@@ -37,10 +37,22 @@
     The default naming mode is "identity."
 '''
 
+from __future__ import absolute_import
+
+import errno
 from mercurial.i18n import _
-from mercurial import cmdutil, commands, hg, util, extensions, bookmarks, error
-from mercurial.hg import repository, parseurl
-import errno
+from mercurial import (
+    bookmarks,
+    cmdutil,
+    commands,
+    error,
+    extensions,
+    hg,
+    util,
+)
+
+repository = hg.repository
+parseurl = hg.parseurl
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -135,7 +147,7 @@
         if inst.errno != errno.ENOENT:
             raise
         return False
-    return 'bookmarks' in shared
+    return hg.sharedbookmarks in shared
 
 def _getsrcrepo(repo):
     """
@@ -145,10 +157,15 @@
     if repo.sharedpath == repo.path:
         return None
 
+    if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
+        return repo.srcrepo
+
     # the sharedpath always ends in the .hg; we want the path to the repo
     source = repo.vfs.split(repo.sharedpath)[0]
     srcurl, branches = parseurl(source)
-    return repository(repo.ui, srcurl)
+    srcrepo = repository(repo.ui, srcurl)
+    repo.srcrepo = srcrepo
+    return srcrepo
 
 def getbkfile(orig, repo):
     if _hassharedbookmarks(repo):
--- a/hgext/shelve.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/shelve.py	Mon Jul 18 23:28:14 2016 -0500
@@ -25,6 +25,8 @@
 import collections
 import errno
 import itertools
+
+from mercurial.i18n import _
 from mercurial import (
     bundle2,
     bundlerepo,
@@ -45,7 +47,6 @@
     templatefilters,
     util,
 )
-from mercurial.i18n import _
 
 from . import (
     rebase,
@@ -164,21 +165,26 @@
                 raise error.Abort(_('this version of shelve is incompatible '
                                    'with the version used in this repo'))
             name = fp.readline().strip()
-            wctx = fp.readline().strip()
-            pendingctx = fp.readline().strip()
+            wctx = nodemod.bin(fp.readline().strip())
+            pendingctx = nodemod.bin(fp.readline().strip())
             parents = [nodemod.bin(h) for h in fp.readline().split()]
             stripnodes = [nodemod.bin(h) for h in fp.readline().split()]
             branchtorestore = fp.readline().strip()
+        except (ValueError, TypeError) as err:
+            raise error.CorruptedState(str(err))
         finally:
             fp.close()
 
-        obj = cls()
-        obj.name = name
-        obj.wctx = repo[nodemod.bin(wctx)]
-        obj.pendingctx = repo[nodemod.bin(pendingctx)]
-        obj.parents = parents
-        obj.stripnodes = stripnodes
-        obj.branchtorestore = branchtorestore
+        try:
+            obj = cls()
+            obj.name = name
+            obj.wctx = repo[wctx]
+            obj.pendingctx = repo[pendingctx]
+            obj.parents = parents
+            obj.stripnodes = stripnodes
+            obj.branchtorestore = branchtorestore
+        except error.RepoLookupError as err:
+            raise error.CorruptedState(str(err))
 
         return obj
 
@@ -225,28 +231,10 @@
 def _aborttransaction(repo):
     '''Abort current transaction for shelve/unshelve, but keep dirstate
     '''
-    backupname = 'dirstate.shelve'
-    dirstatebackup = None
-    try:
-        # create backup of (un)shelved dirstate, because aborting transaction
-        # should restore dirstate to one at the beginning of the
-        # transaction, which doesn't include the result of (un)shelving
-        fp = repo.vfs.open(backupname, "w")
-        dirstatebackup = backupname
-        # clearing _dirty/_dirtypl of dirstate by _writedirstate below
-        # is unintentional. but it doesn't cause problem in this case,
-        # because no code path refers them until transaction is aborted.
-        repo.dirstate._writedirstate(fp) # write in-memory changes forcibly
-
-        tr = repo.currenttransaction()
-        tr.abort()
-
-        # restore to backuped dirstate
-        repo.vfs.rename(dirstatebackup, 'dirstate')
-        dirstatebackup = None
-    finally:
-        if dirstatebackup:
-            repo.vfs.unlink(dirstatebackup)
+    tr = repo.currenttransaction()
+    repo.dirstate.savebackup(tr, suffix='.shelve')
+    tr.abort()
+    repo.dirstate.restorebackup(None, suffix='.shelve')
 
 def createcmd(ui, repo, pats, opts):
     """subcommand that creates a new shelve"""
@@ -683,6 +671,20 @@
             if err.errno != errno.ENOENT:
                 raise
             cmdutil.wrongtooltocontinue(repo, _('unshelve'))
+        except error.CorruptedState as err:
+            ui.debug(str(err) + '\n')
+            if continuef:
+                msg = _('corrupted shelved state file')
+                hint = _('please run hg unshelve --abort to abort unshelve '
+                         'operation')
+                raise error.Abort(msg, hint=hint)
+            elif abortf:
+                msg = _('could not read shelved state file, your working copy '
+                        'may be in an unexpected state\nplease update to some '
+                        'commit\n')
+                ui.warn(msg)
+                shelvedstate.clear(repo)
+            return
 
         if abortf:
             return unshelveabort(ui, repo, state, opts)
--- a/hgext/strip.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/strip.py	Mon Jul 18 23:28:14 2016 -0500
@@ -5,6 +5,7 @@
 """
 from __future__ import absolute_import
 
+from mercurial.i18n import _
 from mercurial import (
     bookmarks as bookmarksmod,
     cmdutil,
@@ -17,7 +18,6 @@
     scmutil,
     util,
 )
-from mercurial.i18n import _
 nullid = nodemod.nullid
 release = lockmod.release
 
--- a/hgext/win32mbcs.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/win32mbcs.py	Mon Jul 18 23:28:14 2016 -0500
@@ -49,11 +49,11 @@
 import os
 import sys
 
+from mercurial.i18n import _
 from mercurial import (
     encoding,
     error,
 )
-from mercurial.i18n import _
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -192,5 +192,5 @@
         # command line options is not yet applied when
         # extensions.loadall() is called.
         if '--debug' in sys.argv:
-            ui.write("[win32mbcs] activated with encoding: %s\n"
+            ui.write(("[win32mbcs] activated with encoding: %s\n")
                      % _encoding)
--- a/hgext/win32text.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/hgext/win32text.py	Mon Jul 18 23:28:14 2016 -0500
@@ -41,10 +41,16 @@
   # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
 '''
 
+from __future__ import absolute_import
+
+import re
 from mercurial.i18n import _
-from mercurial.node import short
-from mercurial import util
-import re
+from mercurial.node import (
+    short,
+)
+from mercurial import (
+    util,
+)
 
 # Note for extension authors: ONLY specify testedwith = 'internal' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
--- a/i18n/hggettext	Sat Jul 02 09:41:40 2016 -0700
+++ b/i18n/hggettext	Mon Jul 18 23:28:14 2016 -0500
@@ -20,7 +20,11 @@
 join the message cataloges to get the final catalog.
 """
 
-import os, sys, inspect
+from __future__ import absolute_import, print_function
+
+import inspect
+import os
+import sys
 
 
 def escape(s):
@@ -95,7 +99,7 @@
     if mod.__doc__:
         src = open(path).read()
         lineno = 1 + offset(src, mod.__doc__, path, 7)
-        print poentry(path, lineno, mod.__doc__)
+        print(poentry(path, lineno, mod.__doc__))
 
     functions = list(getattr(mod, 'i18nfunctions', []))
     functions = [(f, True) for f in functions]
@@ -115,12 +119,12 @@
             if rstrip:
                 doc = doc.rstrip()
             lineno += offset(src, doc, name, 1)
-            print poentry(path, lineno, doc)
+            print(poentry(path, lineno, doc))
 
 
 def rawtext(path):
     src = open(path).read()
-    print poentry(path, 1, src)
+    print(poentry(path, 1, src))
 
 
 if __name__ == "__main__":
--- a/i18n/polib.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/i18n/polib.py	Mon Jul 18 23:28:14 2016 -0500
@@ -13,6 +13,8 @@
 :func:`~polib.mofile` convenience functions.
 """
 
+from __future__ import absolute_import
+
 __author__    = 'David Jean Louis <izimobil@gmail.com>'
 __version__   = '0.6.4'
 __all__       = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry',
--- a/i18n/posplit	Sat Jul 02 09:41:40 2016 -0700
+++ b/i18n/posplit	Mon Jul 18 23:28:14 2016 -0500
@@ -5,9 +5,11 @@
 # license: MIT/X11/Expat
 #
 
+from __future__ import absolute_import, print_function
+
+import polib
 import re
 import sys
-import polib
 
 def addentry(po, entry, cache):
     e = cache.get(entry.msgid)
@@ -67,8 +69,8 @@
                             continue
                         else:
                             # lines following directly, unexpected
-                            print 'Warning: text follows line with directive' \
-                                  ' %s' % directive
+                            print('Warning: text follows line with directive' \
+                                  ' %s' % directive)
                     comment = 'do not translate: .. %s::' % directive
                     if not newentry.comment:
                         newentry.comment = comment
--- a/mercurial/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -12,36 +12,13 @@
 import sys
 import zipimport
 
+from . import (
+    policy
+)
+
 __all__ = []
 
-# Rules for how modules can be loaded. Values are:
-#
-#    c - require C extensions
-#    allow - allow pure Python implementation when C loading fails
-#    py - only load pure Python modules
-#
-# By default, require the C extensions for performance reasons.
-modulepolicy = 'c'
-try:
-    from . import __modulepolicy__
-    modulepolicy = __modulepolicy__.modulepolicy
-except ImportError:
-    pass
-
-# PyPy doesn't load C extensions.
-#
-# The canonical way to do this is to test platform.python_implementation().
-# But we don't import platform and don't bloat for it here.
-if '__pypy__' in sys.builtin_module_names:
-    modulepolicy = 'py'
-
-# Our C extensions aren't yet compatible with Python 3. So use pure Python
-# on Python 3 for now.
-if sys.version_info[0] >= 3:
-    modulepolicy = 'py'
-
-# Environment variable can always force settings.
-modulepolicy = os.environ.get('HGMODULEPOLICY', modulepolicy)
+modulepolicy = policy.policy
 
 # Modules that have both Python and C implementations. See also the
 # set of .py files under mercurial/pure/.
@@ -82,7 +59,7 @@
                 return zl
 
             try:
-                if modulepolicy == 'py':
+                if modulepolicy in policy.policynoc:
                     raise ImportError()
 
                 zl = ziploader('mercurial')
@@ -109,7 +86,7 @@
         stem = name.split('.')[-1]
 
         try:
-            if modulepolicy == 'py':
+            if modulepolicy in policy.policynoc:
                 raise ImportError()
 
             modinfo = imp.find_module(stem, mercurial.__path__)
@@ -144,9 +121,238 @@
         sys.modules[name] = mod
         return mod
 
+# Python 3 uses a custom module loader that transforms source code between
+# source file reading and compilation. This is done by registering a custom
+# finder that changes the spec for Mercurial modules to use a custom loader.
+if sys.version_info[0] >= 3:
+    from . import pure
+    import importlib
+    import io
+    import token
+    import tokenize
+
+    class hgpathentryfinder(importlib.abc.MetaPathFinder):
+        """A sys.meta_path finder that uses a custom module loader."""
+        def find_spec(self, fullname, path, target=None):
+            # Only handle Mercurial-related modules.
+            if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
+                return None
+
+            # This assumes Python 3 doesn't support loading C modules.
+            if fullname in _dualmodules:
+                stem = fullname.split('.')[-1]
+                fullname = 'mercurial.pure.%s' % stem
+                target = pure
+                assert len(path) == 1
+                path = [os.path.join(path[0], 'pure')]
+
+            # Try to find the module using other registered finders.
+            spec = None
+            for finder in sys.meta_path:
+                if finder == self:
+                    continue
+
+                spec = finder.find_spec(fullname, path, target=target)
+                if spec:
+                    break
+
+            # This is a Mercurial-related module but we couldn't find it
+            # using the previously-registered finders. This likely means
+            # the module doesn't exist.
+            if not spec:
+                return None
+
+            if fullname.startswith('mercurial.pure.'):
+                spec.name = spec.name.replace('.pure.', '.')
+
+            # TODO need to support loaders from alternate specs, like zip
+            # loaders.
+            spec.loader = hgloader(spec.name, spec.origin)
+            return spec
+
+    def replacetokens(tokens):
+        """Transform a stream of tokens from raw to Python 3.
+
+        It is called by the custom module loading machinery to rewrite
+        source/tokens between source decoding and compilation.
+
+        Returns a generator of possibly rewritten tokens.
+
+        The input token list may be mutated as part of processing. However,
+        its changes do not necessarily match the output token stream.
+
+        REMEMBER TO CHANGE ``BYTECODEHEADER`` WHEN CHANGING THIS FUNCTION
+        OR CACHED FILES WON'T GET INVALIDATED PROPERLY.
+        """
+        for i, t in enumerate(tokens):
+            # Convert most string literals to byte literals. String literals
+            # in Python 2 are bytes. String literals in Python 3 are unicode.
+            # Most strings in Mercurial are bytes and unicode strings are rare.
+            # Rather than rewrite all string literals to use ``b''`` to indicate
+            # byte strings, we apply this token transformer to insert the ``b``
+            # prefix nearly everywhere.
+            if t.type == token.STRING:
+                s = t.string
+
+                # Preserve docstrings as string literals. This is inconsistent
+                # with regular unprefixed strings. However, the
+                # "from __future__" parsing (which allows a module docstring to
+                # exist before it) doesn't properly handle the docstring if it
+                # is b''' prefixed, leading to a SyntaxError. We leave all
+                # docstrings as unprefixed to avoid this. This means Mercurial
+                # components touching docstrings need to handle unicode,
+                # unfortunately.
+                if s[0:3] in ("'''", '"""'):
+                    yield t
+                    continue
+
+                # If the first character isn't a quote, it is likely a string
+                # prefixing character (such as 'b', 'u', or 'r'. Ignore.
+                if s[0] not in ("'", '"'):
+                    yield t
+                    continue
+
+                # String literal. Prefix to make a b'' string.
+                yield tokenize.TokenInfo(t.type, 'b%s' % s, t.start, t.end,
+                                          t.line)
+                continue
+
+            try:
+                nexttoken = tokens[i + 1]
+            except IndexError:
+                nexttoken = None
+
+            try:
+                prevtoken = tokens[i - 1]
+            except IndexError:
+                prevtoken = None
+
+            # This looks like a function call.
+            if (t.type == token.NAME and nexttoken and
+                nexttoken.type == token.OP and nexttoken.string == '('):
+                fn = t.string
+
+                # *attr() builtins don't accept byte strings to 2nd argument.
+                # Rewrite the token to include the unicode literal prefix so
+                # the string transformer above doesn't add the byte prefix.
+                if fn in ('getattr', 'setattr', 'hasattr', 'safehasattr'):
+                    try:
+                        # (NAME, 'getattr')
+                        # (OP, '(')
+                        # (NAME, 'foo')
+                        # (OP, ',')
+                        # (NAME|STRING, foo)
+                        st = tokens[i + 4]
+                        if (st.type == token.STRING and
+                            st.string[0] in ("'", '"')):
+                            rt = tokenize.TokenInfo(st.type, 'u%s' % st.string,
+                                                    st.start, st.end, st.line)
+                            tokens[i + 4] = rt
+                    except IndexError:
+                        pass
+
+                # .encode() and .decode() on str/bytes/unicode don't accept
+                # byte strings on Python 3. Rewrite the token to include the
+                # unicode literal prefix so the string transformer above doesn't
+                # add the byte prefix.
+                if (fn in ('encode', 'decode') and
+                    prevtoken.type == token.OP and prevtoken.string == '.'):
+                    # (OP, '.')
+                    # (NAME, 'encode')
+                    # (OP, '(')
+                    # (STRING, 'utf-8')
+                    # (OP, ')')
+                    try:
+                        st = tokens[i + 2]
+                        if (st.type == token.STRING and
+                            st.string[0] in ("'", '"')):
+                            rt = tokenize.TokenInfo(st.type, 'u%s' % st.string,
+                                                    st.start, st.end, st.line)
+                            tokens[i + 2] = rt
+                    except IndexError:
+                        pass
+
+            # Emit unmodified token.
+            yield t
+
+    # Header to add to bytecode files. This MUST be changed when
+    # ``replacetoken`` or any mechanism that changes semantics of module
+    # loading is changed. Otherwise cached bytecode may get loaded without
+    # the new transformation mechanisms applied.
+    BYTECODEHEADER = b'HG\x00\x01'
+
+    class hgloader(importlib.machinery.SourceFileLoader):
+        """Custom module loader that transforms source code.
+
+        When the source code is converted to a code object, we transform
+        certain patterns to be Python 3 compatible. This allows us to write code
+        that is natively Python 2 and compatible with Python 3 without
+        making the code excessively ugly.
+
+        We do this by transforming the token stream between parse and compile.
+
+        Implementing transformations invalidates caching assumptions made
+        by the built-in importer. The built-in importer stores a header on
+        saved bytecode files indicating the Python/bytecode version. If the
+        version changes, the cached bytecode is ignored. The Mercurial
+        transformations could change at any time. This means we need to check
+        that cached bytecode was generated with the current transformation
+        code or there could be a mismatch between cached bytecode and what
+        would be generated from this class.
+
+        We supplement the bytecode caching layer by wrapping ``get_data``
+        and ``set_data``. These functions are called when the
+        ``SourceFileLoader`` retrieves and saves bytecode cache files,
+        respectively. We simply add an additional header on the file. As
+        long as the version in this file is changed when semantics change,
+        cached bytecode should be invalidated when transformations change.
+
+        The added header has the form ``HG<VERSION>``. That is a literal
+        ``HG`` with 2 binary bytes indicating the transformation version.
+        """
+        def get_data(self, path):
+            data = super(hgloader, self).get_data(path)
+
+            if not path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
+                return data
+
+            # There should be a header indicating the Mercurial transformation
+            # version. If it doesn't exist or doesn't match the current version,
+            # we raise an OSError because that is what
+            # ``SourceFileLoader.get_code()`` expects when loading bytecode
+            # paths to indicate the cached file is "bad."
+            if data[0:2] != b'HG':
+                raise OSError('no hg header')
+            if data[0:4] != BYTECODEHEADER:
+                raise OSError('hg header version mismatch')
+
+            return data[4:]
+
+        def set_data(self, path, data, *args, **kwargs):
+            if path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
+                data = BYTECODEHEADER + data
+
+            return super(hgloader, self).set_data(path, data, *args, **kwargs)
+
+        def source_to_code(self, data, path):
+            """Perform token transformation before compilation."""
+            buf = io.BytesIO(data)
+            tokens = tokenize.tokenize(buf.readline)
+            data = tokenize.untokenize(replacetokens(list(tokens)))
+            # Python's built-in importer strips frames from exceptions raised
+            # for this code. Unfortunately, that mechanism isn't extensible
+            # and our frame will be blamed for the import failure. There
+            # are extremely hacky ways to do frame stripping. We haven't
+            # implemented them because they are very ugly.
+            return super(hgloader, self).source_to_code(data, path)
+
 # We automagically register our custom importer as a side-effect of loading.
 # This is necessary to ensure that any entry points are able to import
 # mercurial.* modules without having to perform this registration themselves.
-if not any(isinstance(x, hgimporter) for x in sys.meta_path):
+if sys.version_info[0] >= 3:
+    _importercls = hgpathentryfinder
+else:
+    _importercls = hgimporter
+if not any(isinstance(x, _importercls) for x in sys.meta_path):
     # meta_path is used before any implicit finders and before sys.path.
-    sys.meta_path.insert(0, hgimporter())
+    sys.meta_path.insert(0, _importercls())
--- a/mercurial/ancestor.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/ancestor.py	Mon Jul 18 23:28:14 2016 -0500
@@ -291,7 +291,7 @@
     def __nonzero__(self):
         """False if the set is empty, True otherwise."""
         try:
-            iter(self).next()
+            next(iter(self))
             return True
         except StopIteration:
             return False
--- a/mercurial/bdiff.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/bdiff.c	Mon Jul 18 23:28:14 2016 -0500
@@ -9,37 +9,25 @@
  Based roughly on Python difflib
 */
 
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
 #include <stdlib.h>
 #include <string.h>
 #include <limits.h>
 
-#include "util.h"
-
-struct line {
-	int hash, n, e;
-	Py_ssize_t len;
-	const char *l;
-};
+#include "compat.h"
+#include "bitmanipulation.h"
+#include "bdiff.h"
 
 struct pos {
 	int pos, len;
 };
 
-struct hunk;
-struct hunk {
-	int a1, a2, b1, b2;
-	struct hunk *next;
-};
-
-static int splitlines(const char *a, Py_ssize_t len, struct line **lr)
+int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr)
 {
 	unsigned hash;
 	int i;
 	const char *p, *b = a;
 	const char * const plast = a + len - 1;
-	struct line *l;
+	struct bdiff_line *l;
 
 	/* count the lines */
 	i = 1; /* extra line for sentinel */
@@ -47,7 +35,7 @@
 		if (*p == '\n' || p == plast)
 			i++;
 
-	*lr = l = (struct line *)malloc(sizeof(struct line) * i);
+	*lr = l = (struct bdiff_line *)malloc(sizeof(struct bdiff_line) * i);
 	if (!l)
 		return -1;
 
@@ -75,12 +63,13 @@
 	return i - 1;
 }
 
-static inline int cmp(struct line *a, struct line *b)
+static inline int cmp(struct bdiff_line *a, struct bdiff_line *b)
 {
 	return a->hash != b->hash || a->len != b->len || memcmp(a->l, b->l, a->len);
 }
 
-static int equatelines(struct line *a, int an, struct line *b, int bn)
+static int equatelines(struct bdiff_line *a, int an, struct bdiff_line *b,
+	int bn)
 {
 	int i, j, buckets = 1, t, scale;
 	struct pos *h = NULL;
@@ -145,7 +134,8 @@
 	return 1;
 }
 
-static int longest_match(struct line *a, struct line *b, struct pos *pos,
+static int longest_match(struct bdiff_line *a, struct bdiff_line *b,
+			struct pos *pos,
 			 int a1, int a2, int b1, int b2, int *omi, int *omj)
 {
 	int mi = a1, mj = b1, mk = 0, i, j, k, half;
@@ -206,8 +196,9 @@
 	return mk;
 }
 
-static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos,
-			    int a1, int a2, int b1, int b2, struct hunk *l)
+static struct bdiff_hunk *recurse(struct bdiff_line *a, struct bdiff_line *b,
+				struct pos *pos,
+			    int a1, int a2, int b1, int b2, struct bdiff_hunk *l)
 {
 	int i, j, k;
 
@@ -222,7 +213,7 @@
 		if (!l)
 			return NULL;
 
-		l->next = (struct hunk *)malloc(sizeof(struct hunk));
+		l->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk));
 		if (!l->next)
 			return NULL;
 
@@ -239,10 +230,10 @@
 	}
 }
 
-static int diff(struct line *a, int an, struct line *b, int bn,
-		 struct hunk *base)
+int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b,
+		int bn, struct bdiff_hunk *base)
 {
-	struct hunk *curr;
+	struct bdiff_hunk *curr;
 	struct pos *pos;
 	int t, count = 0;
 
@@ -258,7 +249,7 @@
 			return -1;
 
 		/* sentinel end hunk */
-		curr->next = (struct hunk *)malloc(sizeof(struct hunk));
+		curr->next = (struct bdiff_hunk *)malloc(sizeof(struct bdiff_hunk));
 		if (!curr->next)
 			return -1;
 		curr = curr->next;
@@ -271,7 +262,7 @@
 
 	/* normalize the hunk list, try to push each hunk towards the end */
 	for (curr = base->next; curr; curr = curr->next) {
-		struct hunk *next = curr->next;
+		struct bdiff_hunk *next = curr->next;
 
 		if (!next)
 			break;
@@ -293,195 +284,13 @@
 	return count;
 }
 
-static void freehunks(struct hunk *l)
+void bdiff_freehunks(struct bdiff_hunk *l)
 {
-	struct hunk *n;
+	struct bdiff_hunk *n;
 	for (; l; l = n) {
 		n = l->next;
 		free(l);
 	}
 }
 
-static PyObject *blocks(PyObject *self, PyObject *args)
-{
-	PyObject *sa, *sb, *rl = NULL, *m;
-	struct line *a, *b;
-	struct hunk l, *h;
-	int an, bn, count, pos = 0;
 
-	l.next = NULL;
-
-	if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
-		return NULL;
-
-	an = splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
-	bn = splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
-
-	if (!a || !b)
-		goto nomem;
-
-	count = diff(a, an, b, bn, &l);
-	if (count < 0)
-		goto nomem;
-
-	rl = PyList_New(count);
-	if (!rl)
-		goto nomem;
-
-	for (h = l.next; h; h = h->next) {
-		m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
-		PyList_SetItem(rl, pos, m);
-		pos++;
-	}
-
-nomem:
-	free(a);
-	free(b);
-	freehunks(l.next);
-	return rl ? rl : PyErr_NoMemory();
-}
-
-static PyObject *bdiff(PyObject *self, PyObject *args)
-{
-	char *sa, *sb, *rb;
-	PyObject *result = NULL;
-	struct line *al, *bl;
-	struct hunk l, *h;
-	int an, bn, count;
-	Py_ssize_t len = 0, la, lb;
-	PyThreadState *_save;
-
-	l.next = NULL;
-
-	if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
-		return NULL;
-
-	if (la > UINT_MAX || lb > UINT_MAX) {
-		PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
-		return NULL;
-	}
-
-	_save = PyEval_SaveThread();
-	an = splitlines(sa, la, &al);
-	bn = splitlines(sb, lb, &bl);
-	if (!al || !bl)
-		goto nomem;
-
-	count = diff(al, an, bl, bn, &l);
-	if (count < 0)
-		goto nomem;
-
-	/* calculate length of output */
-	la = lb = 0;
-	for (h = l.next; h; h = h->next) {
-		if (h->a1 != la || h->b1 != lb)
-			len += 12 + bl[h->b1].l - bl[lb].l;
-		la = h->a2;
-		lb = h->b2;
-	}
-	PyEval_RestoreThread(_save);
-	_save = NULL;
-
-	result = PyBytes_FromStringAndSize(NULL, len);
-
-	if (!result)
-		goto nomem;
-
-	/* build binary patch */
-	rb = PyBytes_AsString(result);
-	la = lb = 0;
-
-	for (h = l.next; h; h = h->next) {
-		if (h->a1 != la || h->b1 != lb) {
-			len = bl[h->b1].l - bl[lb].l;
-			putbe32((uint32_t)(al[la].l - al->l), rb);
-			putbe32((uint32_t)(al[h->a1].l - al->l), rb + 4);
-			putbe32((uint32_t)len, rb + 8);
-			memcpy(rb + 12, bl[lb].l, len);
-			rb += 12 + len;
-		}
-		la = h->a2;
-		lb = h->b2;
-	}
-
-nomem:
-	if (_save)
-		PyEval_RestoreThread(_save);
-	free(al);
-	free(bl);
-	freehunks(l.next);
-	return result ? result : PyErr_NoMemory();
-}
-
-/*
- * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
- * reduce whitespace sequences to a single space and trim remaining whitespace
- * from end of lines.
- */
-static PyObject *fixws(PyObject *self, PyObject *args)
-{
-	PyObject *s, *result = NULL;
-	char allws, c;
-	const char *r;
-	Py_ssize_t i, rlen, wlen = 0;
-	char *w;
-
-	if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
-		return NULL;
-	r = PyBytes_AsString(s);
-	rlen = PyBytes_Size(s);
-
-	w = (char *)malloc(rlen ? rlen : 1);
-	if (!w)
-		goto nomem;
-
-	for (i = 0; i != rlen; i++) {
-		c = r[i];
-		if (c == ' ' || c == '\t' || c == '\r') {
-			if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
-				w[wlen++] = ' ';
-		} else if (c == '\n' && !allws
-			  && wlen > 0 && w[wlen - 1] == ' ') {
-			w[wlen - 1] = '\n';
-		} else {
-			w[wlen++] = c;
-		}
-	}
-
-	result = PyBytes_FromStringAndSize(w, wlen);
-
-nomem:
-	free(w);
-	return result ? result : PyErr_NoMemory();
-}
-
-
-static char mdiff_doc[] = "Efficient binary diff.";
-
-static PyMethodDef methods[] = {
-	{"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
-	{"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
-	{"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
-	{NULL, NULL}
-};
-
-#ifdef IS_PY3K
-static struct PyModuleDef bdiff_module = {
-	PyModuleDef_HEAD_INIT,
-	"bdiff",
-	mdiff_doc,
-	-1,
-	methods
-};
-
-PyMODINIT_FUNC PyInit_bdiff(void)
-{
-	return PyModule_Create(&bdiff_module);
-}
-#else
-PyMODINIT_FUNC initbdiff(void)
-{
-	Py_InitModule3("bdiff", methods, mdiff_doc);
-}
-#endif
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/bdiff.h	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,21 @@
+#ifndef _HG_BDIFF_H_
+#define _HG_BDIFF_H_
+
+struct bdiff_line {
+	int hash, n, e;
+	ssize_t len;
+	const char *l;
+};
+
+struct bdiff_hunk;
+struct bdiff_hunk {
+	int a1, a2, b1, b2;
+	struct bdiff_hunk *next;
+};
+
+int bdiff_splitlines(const char *a, ssize_t len, struct bdiff_line **lr);
+int bdiff_diff(struct bdiff_line *a, int an, struct bdiff_line *b, int bn,
+	struct bdiff_hunk *base);
+void bdiff_freehunks(struct bdiff_hunk *l);
+
+#endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/bdiff_module.c	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,203 @@
+/*
+ bdiff.c - efficient binary diff extension for Mercurial
+
+ Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+
+ Based roughly on Python difflib
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+#include <stdlib.h>
+#include <string.h>
+#include <limits.h>
+
+#include "bdiff.h"
+#include "bitmanipulation.h"
+
+
+static PyObject *blocks(PyObject *self, PyObject *args)
+{
+	PyObject *sa, *sb, *rl = NULL, *m;
+	struct bdiff_line *a, *b;
+	struct bdiff_hunk l, *h;
+	int an, bn, count, pos = 0;
+
+	l.next = NULL;
+
+	if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb))
+		return NULL;
+
+	an = bdiff_splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a);
+	bn = bdiff_splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b);
+
+	if (!a || !b)
+		goto nomem;
+
+	count = bdiff_diff(a, an, b, bn, &l);
+	if (count < 0)
+		goto nomem;
+
+	rl = PyList_New(count);
+	if (!rl)
+		goto nomem;
+
+	for (h = l.next; h; h = h->next) {
+		m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2);
+		PyList_SetItem(rl, pos, m);
+		pos++;
+	}
+
+nomem:
+	free(a);
+	free(b);
+	bdiff_freehunks(l.next);
+	return rl ? rl : PyErr_NoMemory();
+}
+
+static PyObject *bdiff(PyObject *self, PyObject *args)
+{
+	char *sa, *sb, *rb;
+	PyObject *result = NULL;
+	struct bdiff_line *al, *bl;
+	struct bdiff_hunk l, *h;
+	int an, bn, count;
+	Py_ssize_t len = 0, la, lb;
+	PyThreadState *_save;
+
+	l.next = NULL;
+
+	if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
+		return NULL;
+
+	if (la > UINT_MAX || lb > UINT_MAX) {
+		PyErr_SetString(PyExc_ValueError, "bdiff inputs too large");
+		return NULL;
+	}
+
+	_save = PyEval_SaveThread();
+	an = bdiff_splitlines(sa, la, &al);
+	bn = bdiff_splitlines(sb, lb, &bl);
+	if (!al || !bl)
+		goto nomem;
+
+	count = bdiff_diff(al, an, bl, bn, &l);
+	if (count < 0)
+		goto nomem;
+
+	/* calculate length of output */
+	la = lb = 0;
+	for (h = l.next; h; h = h->next) {
+		if (h->a1 != la || h->b1 != lb)
+			len += 12 + bl[h->b1].l - bl[lb].l;
+		la = h->a2;
+		lb = h->b2;
+	}
+	PyEval_RestoreThread(_save);
+	_save = NULL;
+
+	result = PyBytes_FromStringAndSize(NULL, len);
+
+	if (!result)
+		goto nomem;
+
+	/* build binary patch */
+	rb = PyBytes_AsString(result);
+	la = lb = 0;
+
+	for (h = l.next; h; h = h->next) {
+		if (h->a1 != la || h->b1 != lb) {
+			len = bl[h->b1].l - bl[lb].l;
+			putbe32((uint32_t)(al[la].l - al->l), rb);
+			putbe32((uint32_t)(al[h->a1].l - al->l), rb + 4);
+			putbe32((uint32_t)len, rb + 8);
+			memcpy(rb + 12, bl[lb].l, len);
+			rb += 12 + len;
+		}
+		la = h->a2;
+		lb = h->b2;
+	}
+
+nomem:
+	if (_save)
+		PyEval_RestoreThread(_save);
+	free(al);
+	free(bl);
+	bdiff_freehunks(l.next);
+	return result ? result : PyErr_NoMemory();
+}
+
+/*
+ * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
+ * reduce whitespace sequences to a single space and trim remaining whitespace
+ * from end of lines.
+ */
+static PyObject *fixws(PyObject *self, PyObject *args)
+{
+	PyObject *s, *result = NULL;
+	char allws, c;
+	const char *r;
+	Py_ssize_t i, rlen, wlen = 0;
+	char *w;
+
+	if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
+		return NULL;
+	r = PyBytes_AsString(s);
+	rlen = PyBytes_Size(s);
+
+	w = (char *)malloc(rlen ? rlen : 1);
+	if (!w)
+		goto nomem;
+
+	for (i = 0; i != rlen; i++) {
+		c = r[i];
+		if (c == ' ' || c == '\t' || c == '\r') {
+			if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
+				w[wlen++] = ' ';
+		} else if (c == '\n' && !allws
+			  && wlen > 0 && w[wlen - 1] == ' ') {
+			w[wlen - 1] = '\n';
+		} else {
+			w[wlen++] = c;
+		}
+	}
+
+	result = PyBytes_FromStringAndSize(w, wlen);
+
+nomem:
+	free(w);
+	return result ? result : PyErr_NoMemory();
+}
+
+
+static char mdiff_doc[] = "Efficient binary diff.";
+
+static PyMethodDef methods[] = {
+	{"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
+	{"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
+	{"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
+	{NULL, NULL}
+};
+
+#ifdef IS_PY3K
+static struct PyModuleDef bdiff_module = {
+	PyModuleDef_HEAD_INIT,
+	"bdiff",
+	mdiff_doc,
+	-1,
+	methods
+};
+
+PyMODINIT_FUNC PyInit_bdiff(void)
+{
+	return PyModule_Create(&bdiff_module);
+}
+#else
+PyMODINIT_FUNC initbdiff(void)
+{
+	Py_InitModule3("bdiff", methods, mdiff_doc);
+}
+#endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/bitmanipulation.h	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,53 @@
+#ifndef _HG_BITMANIPULATION_H_
+#define _HG_BITMANIPULATION_H_
+
+#include "compat.h"
+
+static inline uint32_t getbe32(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 24) |
+		(d[1] << 16) |
+		(d[2] << 8) |
+		(d[3]));
+}
+
+static inline int16_t getbeint16(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 8) |
+		(d[1]));
+}
+
+static inline uint16_t getbeuint16(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 8) |
+		(d[1]));
+}
+
+static inline void putbe32(uint32_t x, char *c)
+{
+	c[0] = (x >> 24) & 0xff;
+	c[1] = (x >> 16) & 0xff;
+	c[2] = (x >> 8) & 0xff;
+	c[3] = (x) & 0xff;
+}
+
+static inline double getbefloat64(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+	double ret;
+	int i;
+	uint64_t t = 0;
+	for (i = 0; i < 8; i++) {
+		t = (t<<8) + d[i];
+	}
+	memcpy(&ret, &t, sizeof(t));
+	return ret;
+}
+
+#endif
--- a/mercurial/bookmarks.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/bookmarks.py	Mon Jul 18 23:28:14 2016 -0500
@@ -17,6 +17,7 @@
 )
 from . import (
     encoding,
+    error,
     lock as lockmod,
     obsolete,
     util,
@@ -109,39 +110,6 @@
                             location='plain')
         tr.hookargs['bookmark_moved'] = '1'
 
-    def write(self):
-        '''Write bookmarks
-
-        Write the given bookmark => hash dictionary to the .hg/bookmarks file
-        in a format equal to those of localtags.
-
-        We also store a backup of the previous state in undo.bookmarks that
-        can be copied back on rollback.
-        '''
-        msg = 'bm.write() is deprecated, use bm.recordchange(transaction)'
-        self._repo.ui.deprecwarn(msg, '3.7')
-        # TODO: writing the active bookmark should probably also use a
-        # transaction.
-        self._writeactive()
-        if self._clean:
-            return
-        repo = self._repo
-        if (repo.ui.configbool('devel', 'all-warnings')
-                or repo.ui.configbool('devel', 'check-locks')):
-            l = repo._wlockref and repo._wlockref()
-            if l is None or not l.held:
-                repo.ui.develwarn('bookmarks write with no wlock')
-
-        tr = repo.currenttransaction()
-        if tr:
-            self.recordchange(tr)
-            # invalidatevolatilesets() is omitted because this doesn't
-            # write changes out actually
-            return
-
-        self._writerepo(repo)
-        repo.invalidatevolatilesets()
-
     def _writerepo(self, repo):
         """Factored out for extensibility"""
         rbm = repo._bookmarks
@@ -150,7 +118,8 @@
             rbm._writeactive()
 
         with repo.wlock():
-            file_ = repo.vfs('bookmarks', 'w', atomictemp=True)
+            file_ = repo.vfs('bookmarks', 'w', atomictemp=True,
+                             checkambig=True)
             try:
                 self._write(file_)
             except: # re-raises
@@ -164,7 +133,8 @@
             return
         with self._repo.wlock():
             if self._active is not None:
-                f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True)
+                f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
+                                   checkambig=True)
                 try:
                     f.write(encoding.fromlocal(self._active))
                 finally:
@@ -185,7 +155,10 @@
 
     def expandname(self, bname):
         if bname == '.':
-            return self.active
+            if self.active:
+                return self.active
+            else:
+                raise error.Abort(_("no active bookmark"))
         return bname
 
 def _readactive(repo, marks):
--- a/mercurial/branchmap.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/branchmap.py	Mon Jul 18 23:28:14 2016 -0500
@@ -363,7 +363,7 @@
             bndata = repo.vfs.read(_rbcnames)
             self._rbcsnameslen = len(bndata) # for verification before writing
             self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')]
-        except (IOError, OSError) as inst:
+        except (IOError, OSError):
             if readonly:
                 # don't try to use cache - fall back to the slow path
                 self.branchinfo = self._branchinfo
@@ -402,10 +402,9 @@
         if rev == nullrev:
             return changelog.branchinfo(rev)
 
-        # if requested rev is missing, add and populate all missing revs
+        # if requested rev isn't allocated, grow and cache the rev info
         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
-            self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
-                                         len(self._rbcrevs)))
+            return self._branchinfo(rev)
 
         # fast path: extract data from cache, use it if node is matching
         reponode = changelog.node(rev)[:_rbcnodelen]
@@ -452,6 +451,10 @@
         rbcrevidx = rev * _rbcrecsize
         rec = array('c')
         rec.fromstring(pack(_rbcrecfmt, node, branchidx))
+        if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
+            self._rbcrevs.extend('\0' *
+                                 (len(self._repo.changelog) * _rbcrecsize -
+                                  len(self._rbcrevs)))
         self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec
         self._rbcrevslen = min(self._rbcrevslen, rev)
 
--- a/mercurial/bundle2.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/bundle2.py	Mon Jul 18 23:28:14 2016 -0500
@@ -690,7 +690,7 @@
 
     def _processallparams(self, paramsblock):
         """"""
-        params = {}
+        params = util.sortdict()
         for p in paramsblock.split(' '):
             p = p.split('=', 1)
             p = [urlreq.unquote(i) for i in p]
@@ -1115,8 +1115,8 @@
         self.mandatoryparams = tuple(mandatoryparams)
         self.advisoryparams  = tuple(advisoryparams)
         # user friendly UI
-        self.params = dict(self.mandatoryparams)
-        self.params.update(dict(self.advisoryparams))
+        self.params = util.sortdict(self.mandatoryparams)
+        self.params.update(self.advisoryparams)
         self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
 
     def _payloadchunks(self, chunknum=0):
@@ -1294,6 +1294,9 @@
         bundle.setcompression(compression)
         part = bundle.newpart('changegroup', data=cg.getchunks())
         part.addparam('version', cg.version)
+        if 'clcount' in cg.extras:
+            part.addparam('nbchanges', str(cg.extras['clcount']),
+                          mandatory=False)
         chunkiter = bundle.getchunks()
     else:
         # compression argument is only for the bundle2 case
--- a/mercurial/bundlerepo.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/bundlerepo.py	Mon Jul 18 23:28:14 2016 -0500
@@ -291,7 +291,7 @@
                                                     ".cg%sun" % version)
 
             if cgstream is None:
-                raise error.Abort('No changegroups found')
+                raise error.Abort(_('No changegroups found'))
             cgstream.seek(0)
 
             self.bundle = changegroup.getunbundler(version, cgstream, 'UN')
--- a/mercurial/changegroup.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/changegroup.py	Mon Jul 18 23:28:14 2016 -0500
@@ -135,7 +135,7 @@
     version = '01'
     _grouplistcount = 1 # One list of files after the manifests
 
-    def __init__(self, fh, alg):
+    def __init__(self, fh, alg, extras=None):
         if alg == 'UN':
             alg = None # get more modern without breaking too much
         if not alg in util.decompressors:
@@ -145,6 +145,7 @@
             alg = '_truncatedBZ'
         self._stream = util.decompressors[alg](fh)
         self._type = alg
+        self.extras = extras or {}
         self.callback = None
 
     # These methods (compressed, read, seek, tell) all appear to only
@@ -530,6 +531,17 @@
     def fileheader(self, fname):
         return chunkheader(len(fname)) + fname
 
+    # Extracted both for clarity and for overriding in extensions.
+    def _sortgroup(self, revlog, nodelist, lookup):
+        """Sort nodes for change group and turn them into revnums."""
+        # for generaldelta revlogs, we linearize the revs; this will both be
+        # much quicker and generate a much smaller bundle
+        if (revlog._generaldelta and self._reorder is None) or self._reorder:
+            dag = dagutil.revlogdag(revlog)
+            return dag.linearize(set(revlog.rev(n) for n in nodelist))
+        else:
+            return sorted([revlog.rev(n) for n in nodelist])
+
     def group(self, nodelist, revlog, lookup, units=None):
         """Calculate a delta group, yielding a sequence of changegroup chunks
         (strings).
@@ -549,14 +561,7 @@
             yield self.close()
             return
 
-        # for generaldelta revlogs, we linearize the revs; this will both be
-        # much quicker and generate a much smaller bundle
-        if (revlog._generaldelta and self._reorder is None) or self._reorder:
-            dag = dagutil.revlogdag(revlog)
-            revs = set(revlog.rev(n) for n in nodelist)
-            revs = dag.linearize(revs)
-        else:
-            revs = sorted([revlog.rev(n) for n in nodelist])
+        revs = self._sortgroup(revlog, nodelist, lookup)
 
         # add the parent of the first rev
         p = revlog.parentrevs(revs[0])[0]
@@ -724,10 +729,11 @@
             dir = min(tmfnodes)
             nodes = tmfnodes[dir]
             prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
-            for x in self._packmanifests(dir, prunednodes,
-                                         makelookupmflinknode(dir)):
-                size += len(x)
-                yield x
+            if not dir or prunednodes:
+                for x in self._packmanifests(dir, prunednodes,
+                                             makelookupmflinknode(dir)):
+                    size += len(x)
+                    yield x
             del tmfnodes[dir]
         self._verbosenote(_('%8.i (manifests)\n') % size)
         yield self._manifestsdone()
@@ -895,8 +901,8 @@
     assert version in supportedoutgoingversions(repo)
     return _packermap[version][0](repo, bundlecaps)
 
-def getunbundler(version, fh, alg):
-    return _packermap[version][1](fh, alg)
+def getunbundler(version, fh, alg, extras=None):
+    return _packermap[version][1](fh, alg, extras=extras)
 
 def _changegroupinfo(repo, nodes, source):
     if repo.ui.verbose or source == 'bundle':
@@ -924,7 +930,8 @@
 
 def getsubset(repo, outgoing, bundler, source, fastpath=False):
     gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
-    return getunbundler(bundler.version, util.chunkbuffer(gengroup), None)
+    return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
+                        {'clcount': len(outgoing.missing)})
 
 def changegroupsubset(repo, roots, heads, source, version='01'):
     """Compute a changegroup consisting of all the nodes that are
--- a/mercurial/cmdutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/cmdutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -83,7 +83,7 @@
         else:
             recordfn = crecordmod.chunkselector
 
-        return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
+        return crecordmod.filterpatch(ui, originalhunks, recordfn)
 
     else:
         return patch.filterpatch(ui, originalhunks, operation)
@@ -91,9 +91,9 @@
 def recordfilter(ui, originalhunks, operation=None):
     """ Prompts the user to filter the originalhunks and return a list of
     selected hunks.
-    *operation* is used for ui purposes to indicate the user
-    what kind of filtering they are doing: reverting, committing, shelving, etc.
-    *operation* has to be a translated string.
+    *operation* is used for to build ui messages to indicate the user what
+    kind of filtering they are doing: reverting, committing, shelving, etc.
+    (see patch.filterpatch).
     """
     usecurses = crecordmod.checkcurses(ui)
     testfile = ui.config('experimental', 'crecordtest', None)
@@ -532,7 +532,7 @@
         msg = _('cannot specify --changelog and --manifest at the same time')
     elif cl and dir:
         msg = _('cannot specify --changelog and --dir at the same time')
-    elif cl or mf:
+    elif cl or mf or dir:
         if file_:
             msg = _('cannot specify filename with --changelog or --manifest')
         elif not repo:
@@ -549,7 +549,7 @@
             if 'treemanifest' not in repo.requirements:
                 raise error.Abort(_("--dir can only be used on repos with "
                                    "treemanifest enabled"))
-            dirlog = repo.dirlog(file_)
+            dirlog = repo.dirlog(dir)
             if len(dirlog):
                 r = dirlog
         elif mf:
@@ -1405,24 +1405,24 @@
             self.ui.write(",\n {")
 
         if self.ui.quiet:
-            self.ui.write('\n  "rev": %s' % jrev)
-            self.ui.write(',\n  "node": %s' % jnode)
+            self.ui.write(('\n  "rev": %s') % jrev)
+            self.ui.write((',\n  "node": %s') % jnode)
             self.ui.write('\n }')
             return
 
-        self.ui.write('\n  "rev": %s' % jrev)
-        self.ui.write(',\n  "node": %s' % jnode)
-        self.ui.write(',\n  "branch": "%s"' % j(ctx.branch()))
-        self.ui.write(',\n  "phase": "%s"' % ctx.phasestr())
-        self.ui.write(',\n  "user": "%s"' % j(ctx.user()))
-        self.ui.write(',\n  "date": [%d, %d]' % ctx.date())
-        self.ui.write(',\n  "desc": "%s"' % j(ctx.description()))
-
-        self.ui.write(',\n  "bookmarks": [%s]' %
+        self.ui.write(('\n  "rev": %s') % jrev)
+        self.ui.write((',\n  "node": %s') % jnode)
+        self.ui.write((',\n  "branch": "%s"') % j(ctx.branch()))
+        self.ui.write((',\n  "phase": "%s"') % ctx.phasestr())
+        self.ui.write((',\n  "user": "%s"') % j(ctx.user()))
+        self.ui.write((',\n  "date": [%d, %d]') % ctx.date())
+        self.ui.write((',\n  "desc": "%s"') % j(ctx.description()))
+
+        self.ui.write((',\n  "bookmarks": [%s]') %
                       ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
-        self.ui.write(',\n  "tags": [%s]' %
+        self.ui.write((',\n  "tags": [%s]') %
                       ", ".join('"%s"' % j(t) for t in ctx.tags()))
-        self.ui.write(',\n  "parents": [%s]' %
+        self.ui.write((',\n  "parents": [%s]') %
                       ", ".join('"%s"' % c.hex() for c in ctx.parents()))
 
         if self.ui.debugflag:
@@ -1430,26 +1430,26 @@
                 jmanifestnode = 'null'
             else:
                 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
-            self.ui.write(',\n  "manifest": %s' % jmanifestnode)
-
-            self.ui.write(',\n  "extra": {%s}' %
+            self.ui.write((',\n  "manifest": %s') % jmanifestnode)
+
+            self.ui.write((',\n  "extra": {%s}') %
                           ", ".join('"%s": "%s"' % (j(k), j(v))
                                     for k, v in ctx.extra().items()))
 
             files = ctx.p1().status(ctx)
-            self.ui.write(',\n  "modified": [%s]' %
+            self.ui.write((',\n  "modified": [%s]') %
                           ", ".join('"%s"' % j(f) for f in files[0]))
-            self.ui.write(',\n  "added": [%s]' %
+            self.ui.write((',\n  "added": [%s]') %
                           ", ".join('"%s"' % j(f) for f in files[1]))
-            self.ui.write(',\n  "removed": [%s]' %
+            self.ui.write((',\n  "removed": [%s]') %
                           ", ".join('"%s"' % j(f) for f in files[2]))
 
         elif self.ui.verbose:
-            self.ui.write(',\n  "files": [%s]' %
+            self.ui.write((',\n  "files": [%s]') %
                           ", ".join('"%s"' % j(f) for f in ctx.files()))
 
             if copies:
-                self.ui.write(',\n  "copies": {%s}' %
+                self.ui.write((',\n  "copies": {%s}') %
                               ", ".join('"%s": "%s"' % (j(k), j(v))
                                                         for k, v in copies))
 
@@ -1463,12 +1463,13 @@
                 self.ui.pushbuffer()
                 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
                                match=matchfn, stat=True)
-                self.ui.write(',\n  "diffstat": "%s"' % j(self.ui.popbuffer()))
+                self.ui.write((',\n  "diffstat": "%s"')
+                              % j(self.ui.popbuffer()))
             if diff:
                 self.ui.pushbuffer()
                 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
                                match=matchfn, stat=False)
-                self.ui.write(',\n  "diff": "%s"' % j(self.ui.popbuffer()))
+                self.ui.write((',\n  "diff": "%s"') % j(self.ui.popbuffer()))
 
         self.ui.write("\n }")
 
@@ -1998,7 +1999,7 @@
         followfirst = 0
     # --follow with FILE behavior depends on revs...
     it = iter(revs)
-    startrev = it.next()
+    startrev = next(it)
     followdescendants = startrev < next(it, startrev)
 
     # branch and only_branch are really aliases and must be handled at
@@ -2147,7 +2148,8 @@
     if opts.get('rev'):
         # User-specified revs might be unsorted, but don't sort before
         # _makelogrevset because it might depend on the order of revs
-        revs.sort(reverse=True)
+        if not (revs.isdescending() or revs.istopo()):
+            revs.sort(reverse=True)
     if expr:
         # Revset matchers often operate faster on revisions in changelog
         # order, because most filters deal with the changelog.
@@ -3071,7 +3073,7 @@
 
             # tell newly modified apart.
             dsmodified &= modified
-            dsmodified |= modified & dsadded # dirstate added may needs backup
+            dsmodified |= modified & dsadded # dirstate added may need backup
             modified -= dsmodified
 
             # We need to wait for some post-processing to update this set
@@ -3141,11 +3143,17 @@
         # All set to `discard` if `no-backup` is set do avoid checking
         # no_backup lower in the code.
         # These values are ordered for comparison purposes
+        backupinteractive = 3 # do backup if interactively modified
         backup = 2  # unconditionally do backup
         check = 1   # check if the existing file differs from target
         discard = 0 # never do backup
         if opts.get('no_backup'):
-            backup = check = discard
+            backupinteractive = backup = check = discard
+        if interactive:
+            dsmodifiedbackup = backupinteractive
+        else:
+            dsmodifiedbackup = backup
+        tobackup = set()
 
         backupanddel = actions['remove']
         if not opts.get('no_backup'):
@@ -3163,7 +3171,7 @@
             # Modified compared to target, but local file is deleted
             (deleted,       actions['revert'],   discard),
             # Modified compared to target, local change
-            (dsmodified,    actions['revert'],   backup),
+            (dsmodified,    actions['revert'],   dsmodifiedbackup),
             # Added since target
             (added,         actions['remove'],   discard),
             # Added in working directory
@@ -3198,8 +3206,12 @@
                     continue
                 if xlist is not None:
                     xlist.append(abs)
-                    if dobackup and (backup <= dobackup
-                                     or wctx[abs].cmp(ctx[abs])):
+                    if dobackup:
+                        # If in interactive mode, don't automatically create
+                        # .orig files (issue4793)
+                        if dobackup == backupinteractive:
+                            tobackup.add(abs)
+                        elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
                             bakname = scmutil.origpath(ui, repo, rel)
                             ui.note(_('saving current version of %s as %s\n') %
                                     (rel, bakname))
@@ -3219,7 +3231,7 @@
         if not opts.get('dry_run'):
             needdata = ('revert', 'add', 'undelete')
             _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
-            _performrevert(repo, parents, ctx, actions, interactive)
+            _performrevert(repo, parents, ctx, actions, interactive, tobackup)
 
         if targetsubs:
             # Revert the subrepos on the revert list
@@ -3234,7 +3246,8 @@
     """Let extension changing the storage layer prefetch content"""
     pass
 
-def _performrevert(repo, parents, ctx, actions, interactive=False):
+def _performrevert(repo, parents, ctx, actions, interactive=False,
+                   tobackup=None):
     """function that actually perform all the actions computed for revert
 
     This is an independent function to let extension to plug in and react to
@@ -3301,10 +3314,12 @@
         else:
             diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
         originalchunks = patch.parsepatch(diff)
+        operation = 'discard' if node == parent else 'revert'
 
         try:
 
-            chunks, opts = recordfilter(repo.ui, originalchunks)
+            chunks, opts = recordfilter(repo.ui, originalchunks,
+                                        operation=operation)
             if reversehunks:
                 chunks = patch.reversehunks(chunks)
 
@@ -3312,9 +3327,18 @@
             raise error.Abort(_('error parsing patch: %s') % err)
 
         newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
+        if tobackup is None:
+            tobackup = set()
         # Apply changes
         fp = stringio()
         for c in chunks:
+            # Create a backup file only if this hunk should be backed up
+            if ishunk(c) and c.header.filename() in tobackup:
+                abs = c.header.filename()
+                target = repo.wjoin(abs)
+                bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
+                util.copyfile(target, bakname)
+                tobackup.remove(abs)
             c.write(fp)
         dopatch = fp.tell()
         fp.seek(0)
@@ -3518,7 +3542,7 @@
     def __init__(self, repo, name):
         self._repo = repo
         self._suffix = '.backup.%s.%d' % (name, id(self))
-        repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
+        repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
         self._active = True
         self._closed = False
 
@@ -3536,13 +3560,13 @@
                    % self._suffix)
             raise error.Abort(msg)
 
-        self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
+        self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
                                          self._suffix)
         self._active = False
         self._closed = True
 
     def _abort(self):
-        self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
+        self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
                                            self._suffix)
         self._active = False
 
--- a/mercurial/commands.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/commands.py	Mon Jul 18 23:28:14 2016 -0500
@@ -59,6 +59,7 @@
     obsolete,
     patch,
     phases,
+    policy,
     pvec,
     repair,
     revlog,
@@ -215,7 +216,7 @@
 debugrevlogopts = [
     ('c', 'changelog', False, _('open changelog')),
     ('m', 'manifest', False, _('open manifest')),
-    ('', 'dir', False, _('open directory manifest')),
+    ('', 'dir', '', _('open directory manifest')),
 ]
 
 # Commands start here, listed alphabetically
@@ -468,26 +469,27 @@
 
         lines = fctx.annotate(follow=follow, linenumber=linenumber,
                               diffopts=diffopts)
+        if not lines:
+            continue
         formats = []
         pieces = []
 
         for f, sep in funcmap:
             l = [f(n) for n, dummy in lines]
-            if l:
-                if fm:
-                    formats.append(['%s' for x in l])
-                else:
-                    sizes = [encoding.colwidth(x) for x in l]
-                    ml = max(sizes)
-                    formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
-                pieces.append(l)
+            if fm:
+                formats.append(['%s' for x in l])
+            else:
+                sizes = [encoding.colwidth(x) for x in l]
+                ml = max(sizes)
+                formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
+            pieces.append(l)
 
         for f, p, l in zip(zip(*formats), zip(*pieces), lines):
             fm.startitem()
             fm.write(fields, "".join(f), *p)
             fm.write('line', ": %s", l[1])
 
-        if lines and not lines[-1][1].endswith('\n'):
+        if not lines[-1][1].endswith('\n'):
             fm.plain('\n')
 
     fm.end()
@@ -2089,51 +2091,56 @@
         gen = exchange.readbundle(ui, f, bundlepath)
         if isinstance(gen, bundle2.unbundle20):
             return _debugbundle2(ui, gen, all=all, **opts)
-        if all:
-            ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
-
-            def showchunks(named):
-                ui.write("\n%s\n" % named)
-                chain = None
-                while True:
-                    chunkdata = gen.deltachunk(chain)
-                    if not chunkdata:
-                        break
-                    node = chunkdata['node']
-                    p1 = chunkdata['p1']
-                    p2 = chunkdata['p2']
-                    cs = chunkdata['cs']
-                    deltabase = chunkdata['deltabase']
-                    delta = chunkdata['delta']
-                    ui.write("%s %s %s %s %s %s\n" %
-                             (hex(node), hex(p1), hex(p2),
-                              hex(cs), hex(deltabase), len(delta)))
-                    chain = node
-
-            chunkdata = gen.changelogheader()
-            showchunks("changelog")
-            chunkdata = gen.manifestheader()
-            showchunks("manifest")
-            while True:
-                chunkdata = gen.filelogheader()
-                if not chunkdata:
-                    break
-                fname = chunkdata['filename']
-                showchunks(fname)
-        else:
-            if isinstance(gen, bundle2.unbundle20):
-                raise error.Abort(_('use debugbundle2 for this file'))
-            chunkdata = gen.changelogheader()
+        _debugchangegroup(ui, gen, all=all, **opts)
+
+def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
+    indent_string = ' ' * indent
+    if all:
+        ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
+                 % indent_string)
+
+        def showchunks(named):
+            ui.write("\n%s%s\n" % (indent_string, named))
             chain = None
             while True:
                 chunkdata = gen.deltachunk(chain)
                 if not chunkdata:
                     break
                 node = chunkdata['node']
-                ui.write("%s\n" % hex(node))
+                p1 = chunkdata['p1']
+                p2 = chunkdata['p2']
+                cs = chunkdata['cs']
+                deltabase = chunkdata['deltabase']
+                delta = chunkdata['delta']
+                ui.write("%s%s %s %s %s %s %s\n" %
+                         (indent_string, hex(node), hex(p1), hex(p2),
+                          hex(cs), hex(deltabase), len(delta)))
                 chain = node
 
-def _debugbundle2(ui, gen, **opts):
+        chunkdata = gen.changelogheader()
+        showchunks("changelog")
+        chunkdata = gen.manifestheader()
+        showchunks("manifest")
+        while True:
+            chunkdata = gen.filelogheader()
+            if not chunkdata:
+                break
+            fname = chunkdata['filename']
+            showchunks(fname)
+    else:
+        if isinstance(gen, bundle2.unbundle20):
+            raise error.Abort(_('use debugbundle2 for this file'))
+        chunkdata = gen.changelogheader()
+        chain = None
+        while True:
+            chunkdata = gen.deltachunk(chain)
+            if not chunkdata:
+                break
+            node = chunkdata['node']
+            ui.write("%s%s\n" % (indent_string, hex(node)))
+            chain = node
+
+def _debugbundle2(ui, gen, all=None, **opts):
     """lists the contents of a bundle2"""
     if not isinstance(gen, bundle2.unbundle20):
         raise error.Abort(_('not a bundle2 file'))
@@ -2143,15 +2150,7 @@
         if part.type == 'changegroup':
             version = part.params.get('version', '01')
             cg = changegroup.getunbundler(version, part, 'UN')
-            chunkdata = cg.changelogheader()
-            chain = None
-            while True:
-                chunkdata = cg.deltachunk(chain)
-                if not chunkdata:
-                    break
-                node = chunkdata['node']
-                ui.write("    %s\n" % hex(node))
-                chain = node
+            _debugchangegroup(ui, cg, all=all, indent=4, **opts)
 
 @command('debugcreatestreamclonebundle', [], 'FILE')
 def debugcreatestreamclonebundle(ui, repo, fname):
@@ -2301,7 +2300,9 @@
 @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV'))
 def debugdata(ui, repo, file_, rev=None, **opts):
     """dump the contents of a data file revision"""
-    if opts.get('changelog') or opts.get('manifest'):
+    if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
+        if rev is not None:
+            raise error.CommandError('debugdata', _('invalid arguments'))
         file_, rev = None, file_
     elif rev is None:
         raise error.CommandError('debugdata', _('invalid arguments'))
@@ -2524,15 +2525,16 @@
                             break
             if ignored:
                 if ignored == nf:
-                    ui.write("%s is ignored\n" % f)
+                    ui.write(_("%s is ignored\n") % f)
                 else:
-                    ui.write("%s is ignored because of containing folder %s\n"
+                    ui.write(_("%s is ignored because of "
+                               "containing folder %s\n")
                              % (f, ignored))
                 ignorefile, lineno, line = ignoredata
-                ui.write("(ignore rule in %s, line %d: '%s')\n"
+                ui.write(_("(ignore rule in %s, line %d: '%s')\n")
                          % (ignorefile, lineno, line))
             else:
-                ui.write("%s is not ignored\n" % f)
+                ui.write(_("%s is not ignored\n") % f)
 
 @command('debugindex', debugrevlogopts +
     [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
@@ -2563,12 +2565,12 @@
         break
 
     if format == 0:
-        ui.write("   rev    offset  length " + basehdr + " linkrev"
-                 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
+        ui.write(("   rev    offset  length " + basehdr + " linkrev"
+                 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
     elif format == 1:
-        ui.write("   rev flag   offset   length"
+        ui.write(("   rev flag   offset   length"
                  "     size " + basehdr + "   link     p1     p2"
-                 " %s\n" % "nodeid".rjust(idlen))
+                 " %s\n") % "nodeid".rjust(idlen))
 
     for i in r:
         node = r.node(i)
@@ -2743,7 +2745,16 @@
     fm.write('pythonlib', _("checking Python lib (%s)...\n"),
              os.path.dirname(os.__file__))
 
+    # hg version
+    hgver = util.version()
+    fm.write('hgver', _("checking Mercurial version (%s)\n"),
+             hgver.split('+')[0])
+    fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
+             '+'.join(hgver.split('+')[1:]))
+
     # compiled modules
+    fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
+             policy.policy)
     fm.write('hgmodules', _("checking installed modules (%s)...\n"),
              os.path.dirname(__file__))
 
@@ -3022,13 +3033,13 @@
                     else:
                         locker = 'user %s, process %s, host %s' \
                                  % (user, pid, host)
-                ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
+                ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
                 return 1
             except OSError as e:
                 if e.errno != errno.ENOENT:
                     raise
 
-        ui.write("%-6s free\n" % (name + ":"))
+        ui.write(("%-6s free\n") % (name + ":"))
         return 0
 
     held += report(repo.svfs, "lock", repo.lock)
@@ -3321,8 +3332,8 @@
 
     if opts.get("dump"):
         numrevs = len(r)
-        ui.write("# rev p1rev p2rev start   end deltastart base   p1   p2"
-                 " rawsize totalsize compression heads chainlen\n")
+        ui.write(("# rev p1rev p2rev start   end deltastart base   p1   p2"
+                 " rawsize totalsize compression heads chainlen\n"))
         ts = 0
         heads = set()
 
@@ -3511,18 +3522,19 @@
         ui.note(revset.prettyformat(tree), "\n")
         newtree = revset.expandaliases(ui, tree)
         if newtree != tree:
-            ui.note("* expanded:\n", revset.prettyformat(newtree), "\n")
+            ui.note(("* expanded:\n"), revset.prettyformat(newtree), "\n")
         tree = newtree
         newtree = revset.foldconcat(tree)
         if newtree != tree:
-            ui.note("* concatenated:\n", revset.prettyformat(newtree), "\n")
+            ui.note(("* concatenated:\n"), revset.prettyformat(newtree), "\n")
         if opts["optimize"]:
-            weight, optimizedtree = revset.optimize(newtree, True)
-            ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
+            optimizedtree = revset.optimize(newtree)
+            ui.note(("* optimized:\n"),
+                    revset.prettyformat(optimizedtree), "\n")
     func = revset.match(ui, expr, repo)
     revs = func(repo)
     if ui.verbose:
-        ui.note("* set:\n", revset.prettyformatset(revs), "\n")
+        ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
     for c in revs:
         ui.write("%s\n" % c)
 
@@ -3677,7 +3689,7 @@
         ui.note(templater.prettyformat(tree), '\n')
         newtree = templater.expandaliases(tree, aliases)
         if newtree != tree:
-            ui.note("* expanded:\n", templater.prettyformat(newtree), '\n')
+            ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
 
     mapfile = None
     if revs is None:
@@ -4406,7 +4418,7 @@
             if not opts.get('files_with_matches'):
                 ui.write(sep, label='grep.sep')
                 if not opts.get('text') and binary():
-                    ui.write(" Binary file matches")
+                    ui.write(_(" Binary file matches"))
                 else:
                     for s, label in l:
                         ui.write(s, label=label)
@@ -4570,7 +4582,10 @@
     Returns 0 if successful.
     """
 
-    textwidth = min(ui.termwidth(), 80) - 2
+    textwidth = ui.configint('ui', 'textwidth', 78)
+    termwidth = ui.termwidth() - 2
+    if textwidth <= 0 or termwidth < textwidth:
+        textwidth = termwidth
 
     keep = opts.get('system') or []
     if len(keep) == 0:
@@ -5773,6 +5788,9 @@
     If SOURCE is omitted, the 'default' path will be used.
     See :hg:`help urls` for more information.
 
+    Specifying bookmark as ``.`` is equivalent to specifying the active
+    bookmark's name.
+
     Returns 0 on success, 1 if an update had unresolved files.
     """
     source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
@@ -5794,6 +5812,7 @@
             remotebookmarks = other.listkeys('bookmarks')
             pullopargs['remotebookmarks'] = remotebookmarks
             for b in opts['bookmark']:
+                b = repo._bookmarks.expandname(b)
                 if b not in remotebookmarks:
                     raise error.Abort(_('remote bookmark %s not found!') % b)
                 revs.append(remotebookmarks[b])
@@ -5926,6 +5945,15 @@
         if not revs:
             raise error.Abort(_("specified revisions evaluate to an empty set"),
                              hint=_("use different revision arguments"))
+    elif path.pushrev:
+        # It doesn't make any sense to specify ancestor revisions. So limit
+        # to DAG heads to make discovery simpler.
+        expr = revset.formatspec('heads(%r)', path.pushrev)
+        revs = scmutil.revrange(repo, [expr])
+        revs = [repo[rev].node() for rev in revs]
+        if not revs:
+            raise error.Abort(_('default push revset for path evaluates to an '
+                                'empty set'))
 
     repo._subtoppath = dest
     try:
@@ -6300,7 +6328,10 @@
     related method.
 
     Modified files are saved with a .orig suffix before reverting.
-    To disable these backups, use --no-backup.
+    To disable these backups, use --no-backup. It is possible to store
+    the backup files in a custom directory relative to the root of the
+    repository by setting the ``ui.origbackuppath`` configuration
+    option.
 
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
@@ -6380,6 +6411,11 @@
       commit transaction if it isn't checked out. Use --force to
       override this protection.
 
+      The rollback command can be entirely disabled by setting the
+      ``ui.rollback`` configuration setting to false. If you're here
+      because you want to use rollback and it's disabled, you can
+      re-enable the command by setting ``ui.rollback`` to true.
+
     This command is not intended for use on public repositories. Once
     changes are visible for pull by other users, rolling a transaction
     back locally is ineffective (someone else may already have pulled
@@ -6389,6 +6425,9 @@
 
     Returns 0 on success, 1 if no rollback data is available.
     """
+    if not ui.configbool('ui', 'rollback', True):
+        raise error.Abort(_('rollback is disabled because it is unsafe'),
+                          hint=('see `hg help -v rollback` for information'))
     return repo.rollback(dryrun=opts.get('dry_run'),
                          force=opts.get('force'))
 
--- a/mercurial/commandserver.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/commandserver.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,9 +7,13 @@
 
 from __future__ import absolute_import
 
-import SocketServer
 import errno
+import gc
 import os
+import random
+import select
+import signal
+import socket
 import struct
 import sys
 import traceback
@@ -178,6 +182,10 @@
 
         self.client = fin
 
+    def cleanup(self):
+        """release and restore resources taken during server session"""
+        pass
+
     def _read(self, size):
         if not size:
             return ''
@@ -229,12 +237,8 @@
             self.repo.ui = self.repo.dirstate._ui = repoui
             self.repo.invalidateall()
 
-        # reset last-print time of progress bar per command
-        # (progbar is singleton, we don't have to do for all uis)
-        if copiedui._progbar:
-            copiedui._progbar.resetstate()
-
         for ui in uis:
+            ui.resetstate()
             # any kind of interaction must use server channels, but chg may
             # replace channels by fully functional tty files. so nontty is
             # enforced only if cin is a channel.
@@ -278,6 +282,9 @@
         hellomsg += 'encoding: ' + encoding.encoding
         hellomsg += '\n'
         hellomsg += 'pid: %d' % util.getpid()
+        if util.safehasattr(os, 'getpgid'):
+            hellomsg += '\n'
+            hellomsg += 'pgid: %d' % os.getpgid(0)
 
         # write the hello msg in -one- chunk
         self.cout.write(hellomsg)
@@ -332,66 +339,193 @@
             sv = server(ui, self.repo, fin, fout)
             return sv.serve()
         finally:
+            sv.cleanup()
             _restoreio(ui, fin, fout)
 
-class _requesthandler(SocketServer.StreamRequestHandler):
-    def handle(self):
-        ui = self.server.ui
-        repo = self.server.repo
-        sv = None
+def _initworkerprocess():
+    # use a different process group from the master process, making this
+    # process pass kernel "is_current_pgrp_orphaned" check so signals like
+    # SIGTSTP, SIGTTIN, SIGTTOU are not ignored.
+    os.setpgid(0, 0)
+    # change random state otherwise forked request handlers would have a
+    # same state inherited from parent.
+    random.seed()
+
+def _serverequest(ui, repo, conn, createcmdserver):
+    fin = conn.makefile('rb')
+    fout = conn.makefile('wb')
+    sv = None
+    try:
+        sv = createcmdserver(repo, conn, fin, fout)
+        try:
+            sv.serve()
+        # handle exceptions that may be raised by command server. most of
+        # known exceptions are caught by dispatch.
+        except error.Abort as inst:
+            ui.warn(_('abort: %s\n') % inst)
+        except IOError as inst:
+            if inst.errno != errno.EPIPE:
+                raise
+        except KeyboardInterrupt:
+            pass
+        finally:
+            sv.cleanup()
+    except: # re-raises
+        # also write traceback to error channel. otherwise client cannot
+        # see it because it is written to server's stderr by default.
+        if sv:
+            cerr = sv.cerr
+        else:
+            cerr = channeledoutput(fout, 'e')
+        traceback.print_exc(file=cerr)
+        raise
+    finally:
+        fin.close()
         try:
-            sv = server(ui, repo, self.rfile, self.wfile)
-            try:
-                sv.serve()
-            # handle exceptions that may be raised by command server. most of
-            # known exceptions are caught by dispatch.
-            except error.Abort as inst:
-                ui.warn(_('abort: %s\n') % inst)
-            except IOError as inst:
-                if inst.errno != errno.EPIPE:
-                    raise
-            except KeyboardInterrupt:
-                pass
-        except: # re-raises
-            # also write traceback to error channel. otherwise client cannot
-            # see it because it is written to server's stderr by default.
-            if sv:
-                cerr = sv.cerr
-            else:
-                cerr = channeledoutput(self.wfile, 'e')
-            traceback.print_exc(file=cerr)
-            raise
+            fout.close()  # implicit flush() may cause another EPIPE
+        except IOError as inst:
+            if inst.errno != errno.EPIPE:
+                raise
+
+class unixservicehandler(object):
+    """Set of pluggable operations for unix-mode services
+
+    Almost all methods except for createcmdserver() are called in the main
+    process. You can't pass mutable resource back from createcmdserver().
+    """
+
+    pollinterval = None
+
+    def __init__(self, ui):
+        self.ui = ui
+
+    def bindsocket(self, sock, address):
+        util.bindunixsocket(sock, address)
 
-class unixservice(object):
+    def unlinksocket(self, address):
+        os.unlink(address)
+
+    def printbanner(self, address):
+        self.ui.status(_('listening at %s\n') % address)
+        self.ui.flush()  # avoid buffering of status message
+
+    def shouldexit(self):
+        """True if server should shut down; checked per pollinterval"""
+        return False
+
+    def newconnection(self):
+        """Called when main process notices new connection"""
+        pass
+
+    def createcmdserver(self, repo, conn, fin, fout):
+        """Create new command server instance; called in the process that
+        serves for the current connection"""
+        return server(self.ui, repo, fin, fout)
+
+class unixforkingservice(object):
     """
     Listens on unix domain socket and forks server per connection
     """
-    def __init__(self, ui, repo, opts):
+
+    def __init__(self, ui, repo, opts, handler=None):
         self.ui = ui
         self.repo = repo
         self.address = opts['address']
-        if not util.safehasattr(SocketServer, 'UnixStreamServer'):
+        if not util.safehasattr(socket, 'AF_UNIX'):
             raise error.Abort(_('unsupported platform'))
         if not self.address:
             raise error.Abort(_('no socket path specified with --address'))
+        self._servicehandler = handler or unixservicehandler(ui)
+        self._sock = None
+        self._oldsigchldhandler = None
+        self._workerpids = set()  # updated by signal handler; do not iterate
 
     def init(self):
-        class cls(SocketServer.ForkingMixIn, SocketServer.UnixStreamServer):
-            ui = self.ui
-            repo = self.repo
-        self.server = cls(self.address, _requesthandler)
-        self.ui.status(_('listening at %s\n') % self.address)
-        self.ui.flush()  # avoid buffering of status message
+        self._sock = socket.socket(socket.AF_UNIX)
+        self._servicehandler.bindsocket(self._sock, self.address)
+        self._sock.listen(socket.SOMAXCONN)
+        o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
+        self._oldsigchldhandler = o
+        self._servicehandler.printbanner(self.address)
+
+    def _cleanup(self):
+        signal.signal(signal.SIGCHLD, self._oldsigchldhandler)
+        self._sock.close()
+        self._servicehandler.unlinksocket(self.address)
+        # don't kill child processes as they have active clients, just wait
+        self._reapworkers(0)
 
     def run(self):
         try:
-            self.server.serve_forever()
+            self._mainloop()
         finally:
-            os.unlink(self.address)
+            self._cleanup()
+
+    def _mainloop(self):
+        h = self._servicehandler
+        while not h.shouldexit():
+            try:
+                ready = select.select([self._sock], [], [], h.pollinterval)[0]
+                if not ready:
+                    continue
+                conn, _addr = self._sock.accept()
+            except (select.error, socket.error) as inst:
+                if inst.args[0] == errno.EINTR:
+                    continue
+                raise
+
+            pid = os.fork()
+            if pid:
+                try:
+                    self.ui.debug('forked worker process (pid=%d)\n' % pid)
+                    self._workerpids.add(pid)
+                    h.newconnection()
+                finally:
+                    conn.close()  # release handle in parent process
+            else:
+                try:
+                    self._runworker(conn)
+                    conn.close()
+                    os._exit(0)
+                except:  # never return, hence no re-raises
+                    try:
+                        self.ui.traceback(force=True)
+                    finally:
+                        os._exit(255)
+
+    def _sigchldhandler(self, signal, frame):
+        self._reapworkers(os.WNOHANG)
+
+    def _reapworkers(self, options):
+        while self._workerpids:
+            try:
+                pid, _status = os.waitpid(-1, options)
+            except OSError as inst:
+                if inst.errno == errno.EINTR:
+                    continue
+                if inst.errno != errno.ECHILD:
+                    raise
+                # no child processes at all (reaped by other waitpid()?)
+                self._workerpids.clear()
+                return
+            if pid == 0:
+                # no waitable child processes
+                return
+            self.ui.debug('worker process exited (pid=%d)\n' % pid)
+            self._workerpids.discard(pid)
+
+    def _runworker(self, conn):
+        signal.signal(signal.SIGCHLD, self._oldsigchldhandler)
+        _initworkerprocess()
+        h = self._servicehandler
+        try:
+            _serverequest(self.ui, self.repo, conn, h.createcmdserver)
+        finally:
+            gc.collect()  # trigger __del__ since worker process uses os._exit
 
 _servicemap = {
     'pipe': pipeservice,
-    'unix': unixservice,
+    'unix': unixforkingservice,
     }
 
 def createservice(ui, repo, opts):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/compat.h	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,43 @@
+#ifndef _HG_COMPAT_H_
+#define _HG_COMPAT_H_
+
+#ifdef _WIN32
+#ifdef _MSC_VER
+/* msvc 6.0 has problems */
+#define inline __inline
+#if defined(_WIN64)
+typedef __int64 ssize_t;
+#else
+typedef int ssize_t;
+#endif
+typedef signed char int8_t;
+typedef short int16_t;
+typedef long int32_t;
+typedef __int64 int64_t;
+typedef unsigned char uint8_t;
+typedef unsigned short uint16_t;
+typedef unsigned long uint32_t;
+typedef unsigned __int64 uint64_t;
+#else
+#include <stdint.h>
+#endif
+#else
+/* not windows */
+#include <sys/types.h>
+#if defined __BEOS__ && !defined __HAIKU__
+#include <ByteOrder.h>
+#else
+#include <arpa/inet.h>
+#endif
+#include <inttypes.h>
+#endif
+
+#if defined __hpux || defined __SUNPRO_C || defined _AIX
+#define inline
+#endif
+
+#ifdef __linux
+#define inline __inline
+#endif
+
+#endif
--- a/mercurial/context.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/context.py	Mon Jul 18 23:28:14 2016 -0500
@@ -918,28 +918,25 @@
             return p[1]
         return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
 
-    def annotate(self, follow=False, linenumber=None, diffopts=None):
-        '''returns a list of tuples of (ctx, line) for each line
+    def annotate(self, follow=False, linenumber=False, diffopts=None):
+        '''returns a list of tuples of ((ctx, number), line) for each line
         in the file, where ctx is the filectx of the node where
-        that line was last changed.
-        This returns tuples of ((ctx, linenumber), line) for each line,
-        if "linenumber" parameter is NOT "None".
-        In such tuples, linenumber means one at the first appearance
-        in the managed file.
-        To reduce annotation cost,
-        this returns fixed value(False is used) as linenumber,
-        if "linenumber" parameter is "False".'''
+        that line was last changed; if linenumber parameter is true, number is
+        the line number at the first appearance in the managed file, otherwise,
+        number has a fixed value of False.
+        '''
 
-        if linenumber is None:
+        def lines(text):
+            if text.endswith("\n"):
+                return text.count("\n")
+            return text.count("\n") + 1
+
+        if linenumber:
             def decorate(text, rev):
-                return ([rev] * len(text.splitlines()), text)
-        elif linenumber:
-            def decorate(text, rev):
-                size = len(text.splitlines())
-                return ([(rev, i) for i in xrange(1, size + 1)], text)
+                return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
         else:
             def decorate(text, rev):
-                return ([(rev, False)] * len(text.splitlines()), text)
+                return ([(rev, False)] * lines(text), text)
 
         def pair(parent, child):
             blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
--- a/mercurial/copies.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/copies.py	Mon Jul 18 23:28:14 2016 -0500
@@ -484,16 +484,16 @@
             f1r, f2r = f1.linkrev(), f2.linkrev()
 
             if f1r is None:
-                f1 = g1.next()
+                f1 = next(g1)
             if f2r is None:
-                f2 = g2.next()
+                f2 = next(g2)
 
             while True:
                 f1r, f2r = f1.linkrev(), f2.linkrev()
                 if f1r > f2r:
-                    f1 = g1.next()
+                    f1 = next(g1)
                 elif f2r > f1r:
-                    f2 = g2.next()
+                    f2 = next(g2)
                 elif f1 == f2:
                     return f1 # a match
                 elif f1r == f2r or f1r < limit or f2r < limit:
--- a/mercurial/crecord.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/crecord.py	Mon Jul 18 23:28:14 2016 -0500
@@ -91,6 +91,7 @@
     def allchildren(self):
         "Return a list of all of the direct children of this node"
         raise NotImplementedError("method must be implemented by subclass")
+
     def nextsibling(self):
         """
         Return the closest next item of the same type where there are no items
@@ -110,18 +111,12 @@
     def parentitem(self):
         raise NotImplementedError("method must be implemented by subclass")
 
-
-    def nextitem(self, constrainlevel=True, skipfolded=True):
+    def nextitem(self, skipfolded=True):
         """
-        If constrainLevel == True, return the closest next item
-        of the same type where there are no items of different types between
-        the current item and this closest item.
+        Try to return the next item closest to this item, regardless of item's
+        type (header, hunk, or hunkline).
 
-        If constrainLevel == False, then try to return the next item
-        closest to this item, regardless of item's type (header, hunk, or
-        HunkLine).
-
-        If skipFolded == True, and the current item is folded, then the child
+        If skipfolded == True, and the current item is folded, then the child
         items that are hidden due to folding will be skipped when determining
         the next item.
 
@@ -131,9 +126,7 @@
             itemfolded = self.folded
         except AttributeError:
             itemfolded = False
-        if constrainlevel:
-            return self.nextsibling()
-        elif skipfolded and itemfolded:
+        if skipfolded and itemfolded:
             nextitem = self.nextsibling()
             if nextitem is None:
                 try:
@@ -164,43 +157,31 @@
             except AttributeError: # parent and/or grandparent was None
                 return None
 
-    def previtem(self, constrainlevel=True, skipfolded=True):
+    def previtem(self):
         """
-        If constrainLevel == True, return the closest previous item
-        of the same type where there are no items of different types between
-        the current item and this closest item.
-
-        If constrainLevel == False, then try to return the previous item
-        closest to this item, regardless of item's type (header, hunk, or
-        HunkLine).
-
-        If skipFolded == True, and the current item is folded, then the items
-        that are hidden due to folding will be skipped when determining the
-        next item.
+        Try to return the previous item closest to this item, regardless of
+        item's type (header, hunk, or hunkline).
 
         If it is not possible to get the previous item, return None.
         """
-        if constrainlevel:
-            return self.prevsibling()
-        else:
-            # try previous sibling's last child's last child,
-            # else try previous sibling's last child, else try previous sibling
-            prevsibling = self.prevsibling()
-            if prevsibling is not None:
-                prevsiblinglastchild = prevsibling.lastchild()
-                if ((prevsiblinglastchild is not None) and
-                    not prevsibling.folded):
-                    prevsiblinglclc = prevsiblinglastchild.lastchild()
-                    if ((prevsiblinglclc is not None) and
-                        not prevsiblinglastchild.folded):
-                        return prevsiblinglclc
-                    else:
-                        return prevsiblinglastchild
+        # try previous sibling's last child's last child,
+        # else try previous sibling's last child, else try previous sibling
+        prevsibling = self.prevsibling()
+        if prevsibling is not None:
+            prevsiblinglastchild = prevsibling.lastchild()
+            if ((prevsiblinglastchild is not None) and
+                not prevsibling.folded):
+                prevsiblinglclc = prevsiblinglastchild.lastchild()
+                if ((prevsiblinglclc is not None) and
+                    not prevsiblinglastchild.folded):
+                    return prevsiblinglclc
                 else:
-                    return prevsibling
+                    return prevsiblinglastchild
+            else:
+                return prevsibling
 
-            # try parent (or None)
-            return self.parentitem()
+        # try parent (or None)
+        return self.parentitem()
 
 class patch(patchnode, list): # todo: rename patchroot
     """
@@ -236,7 +217,6 @@
         self.neverunfolded = True
         self.hunks = [uihunk(h, self) for h in self.hunks]
 
-
     def prettystr(self):
         x = stringio()
         self.pretty(x)
@@ -392,6 +372,7 @@
     def allchildren(self):
         "return a list of all of the direct children of this node"
         return self.changedlines
+
     def countchanges(self):
         """changedlines -> (n+,n-)"""
         add = len([l for l in self.changedlines if l.applied
@@ -455,14 +436,12 @@
 
     def __getattr__(self, name):
         return getattr(self._hunk, name)
+
     def __repr__(self):
         return '<hunk %r@%d>' % (self.filename(), self.fromline)
 
-def filterpatch(ui, chunks, chunkselector, operation=None):
+def filterpatch(ui, chunks, chunkselector):
     """interactively filter patch chunks into applied-only chunks"""
-
-    if operation is None:
-        operation = _('confirm')
     chunks = list(chunks)
     # convert chunks list into structure suitable for displaying/modifying
     # with curses.  create a list of headers only.
@@ -603,13 +582,10 @@
         the last hunkline of the hunk prior to the selected hunk.  or, if
         the first hunkline of a hunk is currently selected, then select the
         hunk itself.
-
-        if the currently selected item is already at the top of the screen,
-        scroll the screen down to show the new-selected item.
         """
         currentitem = self.currentselecteditem
 
-        nextitem = currentitem.previtem(constrainlevel=False)
+        nextitem = currentitem.previtem()
 
         if nextitem is None:
             # if no parent item (i.e. currentitem is the first header), then
@@ -623,13 +599,10 @@
         select (if possible) the previous item on the same level as the
         currently selected item.  otherwise, select (if possible) the
         parent-item of the currently selected item.
-
-        if the currently selected item is already at the top of the screen,
-        scroll the screen down to show the new-selected item.
         """
         currentitem = self.currentselecteditem
-        nextitem = currentitem.previtem()
-        # if there's no previous item on this level, try choosing the parent
+        nextitem = currentitem.prevsibling()
+        # if there's no previous sibling, try choosing the parent
         if nextitem is None:
             nextitem = currentitem.parentitem()
         if nextitem is None:
@@ -646,14 +619,11 @@
         the first hunkline of the selected hunk.  or, if the last hunkline of
         a hunk is currently selected, then select the next hunk, if one exists,
         or if not, the next header if one exists.
-
-        if the currently selected item is already at the bottom of the screen,
-        scroll the screen up to show the new-selected item.
         """
         #self.startprintline += 1 #debug
         currentitem = self.currentselecteditem
 
-        nextitem = currentitem.nextitem(constrainlevel=False)
+        nextitem = currentitem.nextitem()
         # if there's no next item, keep the selection as-is
         if nextitem is None:
             nextitem = currentitem
@@ -662,24 +632,21 @@
 
     def downarrowshiftevent(self):
         """
-        if the cursor is already at the bottom chunk, scroll the screen up and
-        move the cursor-position to the subsequent chunk.  otherwise, only move
-        the cursor position down one chunk.
+        select (if possible) the next item on the same level as the currently
+        selected item.  otherwise, select (if possible) the next item on the
+        same level as the parent item of the currently selected item.
         """
-        # todo: update docstring
-
         currentitem = self.currentselecteditem
-        nextitem = currentitem.nextitem()
-        # if there's no previous item on this level, try choosing the parent's
-        # nextitem.
+        nextitem = currentitem.nextsibling()
+        # if there's no next sibling, try choosing the parent's nextsibling
         if nextitem is None:
             try:
-                nextitem = currentitem.parentitem().nextitem()
+                nextitem = currentitem.parentitem().nextsibling()
             except AttributeError:
-                # parentitem returned None, so nextitem() can't be called
+                # parentitem returned None, so nextsibling() can't be called
                 nextitem = None
         if nextitem is None:
-            # if no next item on parent-level, then no change...
+            # if parent has no next sibling, then no change...
             nextitem = currentitem
 
         self.currentselecteditem = nextitem
@@ -766,7 +733,6 @@
             # negative values scroll in pgup direction
             self.scrolllines(selstart - padstartbuffered)
 
-
     def scrolllines(self, numlines):
         "scroll the screen up (down) by numlines when numlines >0 (<0)."
         self.firstlineofpadtoprint += numlines
@@ -894,7 +860,6 @@
         if isinstance(item, (uiheader, uihunk)):
             item.folded = not item.folded
 
-
     def alignstring(self, instr, window):
         """
         add whitespace to the end of a string in order to make it fill
@@ -1133,7 +1098,6 @@
         lineprefix = " "*self.hunkindentnumchars + checkbox
         frtoline = "   " + hunk.getfromtoline().strip("\n")
 
-
         outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
                                    align=False) # add uncolored checkbox/indent
         outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair,
@@ -1377,7 +1341,7 @@
                       F : fold / unfold parent item and all of its ancestors
                       m : edit / resume editing the commit message
                       e : edit the currently selected hunk
-                      a : toggle amend mode (hg rev >= 2.2), only with commit -i
+                      a : toggle amend mode, only with commit -i
                       c : confirm selected changes
                       r : review/edit and confirm selected changes
                       q : quit without confirming (no changes will be made)
--- a/mercurial/demandimport.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/demandimport.py	Mon Jul 18 23:28:14 2016 -0500
@@ -188,15 +188,23 @@
             if globalname and isinstance(symbol, _demandmod):
                 symbol._addref(globalname)
 
+        def chainmodules(rootmod, modname):
+            # recurse down the module chain, and return the leaf module
+            mod = rootmod
+            for comp in modname.split('.')[1:]:
+                if getattr(mod, comp, nothing) is nothing:
+                    setattr(mod, comp,
+                            _demandmod(comp, mod.__dict__, mod.__dict__))
+                mod = getattr(mod, comp)
+            return mod
+
         if level >= 0:
-            # The "from a import b,c,d" or "from .a import b,c,d"
-            # syntax gives errors with some modules for unknown
-            # reasons. Work around the problem.
             if name:
-                return _hgextimport(_origimport, name, globals, locals,
-                                    fromlist, level)
-
-            if _pypy:
+                # "from a import b" or "from .a import b" style
+                rootmod = _hgextimport(_origimport, name, globals, locals,
+                                       level=level)
+                mod = chainmodules(rootmod, name)
+            elif _pypy:
                 # PyPy's __import__ throws an exception if invoked
                 # with an empty name and no fromlist.  Recreate the
                 # desired behaviour by hand.
@@ -220,12 +228,7 @@
         # But, we still need to support lazy loading of standard library and 3rd
         # party modules. So handle level == -1.
         mod = _hgextimport(_origimport, name, globals, locals)
-        # recurse down the module chain
-        for comp in name.split('.')[1:]:
-            if getattr(mod, comp, nothing) is nothing:
-                setattr(mod, comp,
-                        _demandmod(comp, mod.__dict__, mod.__dict__))
-            mod = getattr(mod, comp)
+        mod = chainmodules(mod, name)
 
         for x in fromlist:
             processfromitem(mod, x)
--- a/mercurial/destutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/destutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -95,6 +95,10 @@
     wc = repo[None]
     movemark = node = None
     currentbranch = wc.branch()
+
+    if clean:
+        currentbranch = repo['.'].branch()
+
     if currentbranch in repo.branchmap():
         heads = repo.branchheads(currentbranch)
         if heads:
--- a/mercurial/dirstate.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/dirstate.py	Mon Jul 18 23:28:14 2016 -0500
@@ -74,6 +74,8 @@
                 raise
     return (vfs(filename), False)
 
+_token = object()
+
 class dirstate(object):
 
     def __init__(self, opener, ui, root, validate):
@@ -365,7 +367,7 @@
 
     def setbranch(self, branch):
         self._branch = encoding.fromlocal(branch)
-        f = self._opener('branch', 'w', atomictemp=True)
+        f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
         try:
             f.write(self._branch + '\n')
             f.close()
@@ -580,6 +582,8 @@
             del self._map[f]
             if f in self._nonnormalset:
                 self._nonnormalset.remove(f)
+            if f in self._copymap:
+                del self._copymap[f]
 
     def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
         if exists is None:
@@ -688,16 +692,15 @@
         self._pl = (parent, nullid)
         self._dirty = True
 
-    def write(self, tr=False):
+    def write(self, tr=_token):
         if not self._dirty:
             return
 
         filename = self._filename
-        if tr is False: # not explicitly specified
-            if (self._ui.configbool('devel', 'all-warnings')
-                or self._ui.configbool('devel', 'check-dirstate-write')):
-                self._ui.develwarn('use dirstate.write with '
-                                   'repo.currenttransaction()')
+        if tr is _token: # not explicitly specified
+            self._ui.deprecwarn('use dirstate.write with '
+                               'repo.currenttransaction()',
+                               '3.9')
 
             if self._opener.lexists(self._pendingfilename):
                 # if pending file already exists, in-memory changes
@@ -727,7 +730,7 @@
                                 self._writedirstate, location='plain')
             return
 
-        st = self._opener(filename, "w", atomictemp=True)
+        st = self._opener(filename, "w", atomictemp=True, checkambig=True)
         self._writedirstate(st)
 
     def _writedirstate(self, st):
@@ -1206,14 +1209,16 @@
         else:
             return self._filename
 
-    def _savebackup(self, tr, suffix):
+    def savebackup(self, tr, suffix='', prefix=''):
         '''Save current dirstate into backup file with suffix'''
+        assert len(suffix) > 0 or len(prefix) > 0
         filename = self._actualfilename(tr)
 
         # use '_writedirstate' instead of 'write' to write changes certainly,
         # because the latter omits writing out if transaction is running.
         # output file will be used to create backup of dirstate at this point.
-        self._writedirstate(self._opener(filename, "w", atomictemp=True))
+        self._writedirstate(self._opener(filename, "w", atomictemp=True,
+                                         checkambig=True))
 
         if tr:
             # ensure that subsequent tr.writepending returns True for
@@ -1227,17 +1232,22 @@
             # end of this transaction
             tr.registertmp(filename, location='plain')
 
-        self._opener.write(filename + suffix, self._opener.tryread(filename))
+        self._opener.write(prefix + self._filename + suffix,
+                           self._opener.tryread(filename))
 
-    def _restorebackup(self, tr, suffix):
+    def restorebackup(self, tr, suffix='', prefix=''):
         '''Restore dirstate by backup file with suffix'''
+        assert len(suffix) > 0 or len(prefix) > 0
         # this "invalidate()" prevents "wlock.release()" from writing
         # changes of dirstate out after restoring from backup file
         self.invalidate()
         filename = self._actualfilename(tr)
-        self._opener.rename(filename + suffix, filename)
+        # using self._filename to avoid having "pending" in the backup filename
+        self._opener.rename(prefix + self._filename + suffix, filename,
+                            checkambig=True)
 
-    def _clearbackup(self, tr, suffix):
+    def clearbackup(self, tr, suffix='', prefix=''):
         '''Clear backup file with suffix'''
-        filename = self._actualfilename(tr)
-        self._opener.unlink(filename + suffix)
+        assert len(suffix) > 0 or len(prefix) > 0
+        # using self._filename to avoid having "pending" in the backup filename
+        self._opener.unlink(prefix + self._filename + suffix)
--- a/mercurial/dispatch.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/dispatch.py	Mon Jul 18 23:28:14 2016 -0500
@@ -384,7 +384,7 @@
         self.cmdname = ''
         self.definition = definition
         self.fn = None
-        self.args = []
+        self.givenargs = []
         self.opts = []
         self.help = ''
         self.badalias = None
@@ -432,7 +432,7 @@
                              % (self.name, inst))
             return
         self.cmdname = cmd = args.pop(0)
-        args = map(util.expandpath, args)
+        self.givenargs = args
 
         for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
             if _earlygetopt([invalidarg], args):
@@ -448,7 +448,6 @@
             else:
                 self.fn, self.opts = tableentry
 
-            self.args = aliasargs(self.fn, args)
             if self.help.startswith("hg " + cmd):
                 # drop prefix in old-style help lines so hg shows the alias
                 self.help = self.help[4 + len(cmd):]
@@ -462,6 +461,11 @@
             self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
                              % (self.name, cmd))
 
+    @property
+    def args(self):
+        args = map(util.expandpath, self.givenargs)
+        return aliasargs(self.fn, args)
+
     def __getattr__(self, name):
         adefaults = {'norepo': True, 'optionalrepo': False, 'inferrepo': False}
         if name not in adefaults:
@@ -629,10 +633,16 @@
     # run pre-hook, and abort if it fails
     hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
               pats=cmdpats, opts=cmdoptions)
-    ret = _runcommand(ui, options, cmd, d)
-    # run post-hook, passing command result
-    hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
-              result=ret, pats=cmdpats, opts=cmdoptions)
+    try:
+        ret = _runcommand(ui, options, cmd, d)
+        # run post-hook, passing command result
+        hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
+                  result=ret, pats=cmdpats, opts=cmdoptions)
+    except Exception:
+        # run failure hook and re-raise
+        hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
+                  pats=cmdpats, opts=cmdoptions)
+        raise
     return ret
 
 def _getlocal(ui, rpath, wd=None):
@@ -660,12 +670,8 @@
 
     return path, lui
 
-def _checkshellalias(lui, ui, args, precheck=True):
-    """Return the function to run the shell alias, if it is required
-
-    'precheck' is whether this function is invoked before adding
-    aliases or not.
-    """
+def _checkshellalias(lui, ui, args):
+    """Return the function to run the shell alias, if it is required"""
     options = {}
 
     try:
@@ -676,16 +682,11 @@
     if not args:
         return
 
-    if precheck:
-        strict = True
-        cmdtable = commands.table.copy()
-        addaliases(lui, cmdtable)
-    else:
-        strict = False
-        cmdtable = commands.table
+    cmdtable = commands.table
 
     cmd = args[0]
     try:
+        strict = ui.configbool("ui", "strict")
         aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
     except (error.AmbiguousCommand, error.UnknownCommand):
         return
@@ -735,12 +736,6 @@
     rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
     path, lui = _getlocal(ui, rpath)
 
-    # Now that we're operating in the right directory/repository with
-    # the right config settings, check for shell aliases
-    shellaliasfn = _checkshellalias(lui, ui, args)
-    if shellaliasfn:
-        return shellaliasfn()
-
     # Configure extensions in phases: uisetup, extsetup, cmdtable, and
     # reposetup. Programs like TortoiseHg will call _dispatch several
     # times so we keep track of configured extensions in _loaded.
@@ -762,13 +757,11 @@
 
     addaliases(lui, commands.table)
 
-    if not lui.configbool("ui", "strict"):
-        # All aliases and commands are completely defined, now.
-        # Check abbreviation/ambiguity of shell alias again, because shell
-        # alias may cause failure of "_parse" (see issue4355)
-        shellaliasfn = _checkshellalias(lui, ui, args, precheck=False)
-        if shellaliasfn:
-            return shellaliasfn()
+    # All aliases and commands are completely defined, now.
+    # Check abbreviation/ambiguity of shell alias.
+    shellaliasfn = _checkshellalias(lui, ui, args)
+    if shellaliasfn:
+        return shellaliasfn()
 
     # check for fallback encoding
     fallback = lui.config('ui', 'fallbackencoding')
@@ -825,7 +818,7 @@
 
     if cmdoptions.get('insecure', False):
         for ui_ in uis:
-            ui_.setconfig('web', 'cacerts', '!', '--insecure')
+            ui_.insecureconnections = True
 
     if options['version']:
         return commands.version_(ui)
--- a/mercurial/error.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/error.py	Mon Jul 18 23:28:14 2016 -0500
@@ -15,12 +15,17 @@
 
 # Do not import anything here, please
 
-class HintException(Exception):
+class Hint(object):
+    """Mix-in to provide a hint of an error
+
+    This should come first in the inheritance list to consume a hint and
+    pass remaining arguments to the exception class.
+    """
     def __init__(self, *args, **kw):
-        Exception.__init__(self, *args)
-        self.hint = kw.get('hint')
+        self.hint = kw.pop('hint', None)
+        super(Hint, self).__init__(*args, **kw)
 
-class RevlogError(HintException):
+class RevlogError(Hint, Exception):
     pass
 
 class FilteredIndexError(IndexError):
@@ -50,10 +55,10 @@
 class CommandError(Exception):
     """Exception raised on errors in parsing the command line."""
 
-class InterventionRequired(HintException):
+class InterventionRequired(Hint, Exception):
     """Exception raised when a command requires human intervention."""
 
-class Abort(HintException):
+class Abort(Hint, Exception):
     """Raised if a command needs to print an error and exit."""
 
 class HookLoadError(Abort):
@@ -87,10 +92,10 @@
         from .i18n import _
         Abort.__init__(self, _('response expected'))
 
-class OutOfBandError(HintException):
+class OutOfBandError(Hint, Exception):
     """Exception raised when a remote repo reports failure"""
 
-class ParseError(HintException):
+class ParseError(Hint, Exception):
     """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
 
 class UnknownIdentifier(ParseError):
@@ -102,7 +107,7 @@
         self.function = function
         self.symbols = symbols
 
-class RepoError(HintException):
+class RepoError(Hint, Exception):
     pass
 
 class RepoLookupError(RepoError):
@@ -235,3 +240,6 @@
 
 class UnsupportedBundleSpecification(Exception):
     """error raised when a bundle specification is not supported."""
+
+class CorruptedState(Exception):
+    """error raised when a command is not able to read its state from file"""
--- a/mercurial/exchange.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/exchange.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 
 from .i18n import _
 from .node import (
@@ -857,14 +858,14 @@
         try:
             reply = pushop.remote.unbundle(stream, ['force'], 'push')
         except error.BundleValueError as exc:
-            raise error.Abort('missing support for %s' % exc)
+            raise error.Abort(_('missing support for %s') % exc)
         try:
             trgetter = None
             if pushback:
                 trgetter = pushop.trmanager.transaction
             op = bundle2.processbundle(pushop.repo, reply, trgetter)
         except error.BundleValueError as exc:
-            raise error.Abort('missing support for %s' % exc)
+            raise error.Abort(_('missing support for %s') % exc)
         except bundle2.AbortFromPart as exc:
             pushop.ui.status(_('remote: %s\n') % exc)
             raise error.Abort(_('push failed on remote'), hint=exc.hint)
@@ -1055,7 +1056,8 @@
         # revision we try to pull (None is "all")
         self.heads = heads
         # bookmark pulled explicitly
-        self.explicitbookmarks = bookmarks
+        self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
+                                  for bookmark in bookmarks]
         # do we force pull?
         self.force = force
         # whether a streaming clone was requested
@@ -1323,7 +1325,7 @@
     try:
         op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
     except error.BundleValueError as exc:
-        raise error.Abort('missing support for %s' % exc)
+        raise error.Abort(_('missing support for %s') % exc)
 
     if pullop.fetch:
         results = [cg['return'] for cg in op.records['changegroup']]
@@ -1646,7 +1648,7 @@
     Used by peer for unbundling.
     """
     heads = repo.heads()
-    heads_hash = util.sha1(''.join(sorted(heads))).digest()
+    heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
     if not (their_heads == ['force'] or their_heads == heads or
             their_heads == ['hashed', heads_hash]):
         # someone else committed/pushed/unbundled while we
--- a/mercurial/extensions.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/extensions.py	Mon Jul 18 23:28:14 2016 -0500
@@ -25,7 +25,7 @@
 _aftercallbacks = {}
 _order = []
 _builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
-                'inotify'])
+                'inotify', 'hgcia'])
 
 def extensions(ui=None):
     if ui:
@@ -127,6 +127,21 @@
         fn(loaded=True)
     return mod
 
+def _runuisetup(name, ui):
+    uisetup = getattr(_extensions[name], 'uisetup', None)
+    if uisetup:
+        uisetup(ui)
+
+def _runextsetup(name, ui):
+    extsetup = getattr(_extensions[name], 'extsetup', None)
+    if extsetup:
+        try:
+            extsetup(ui)
+        except TypeError:
+            if extsetup.func_code.co_argcount != 0:
+                raise
+            extsetup() # old extsetup with no ui argument
+
 def loadall(ui):
     result = ui.configitems("extensions")
     newindex = len(_order)
@@ -148,19 +163,10 @@
             ui.traceback()
 
     for name in _order[newindex:]:
-        uisetup = getattr(_extensions[name], 'uisetup', None)
-        if uisetup:
-            uisetup(ui)
+        _runuisetup(name, ui)
 
     for name in _order[newindex:]:
-        extsetup = getattr(_extensions[name], 'extsetup', None)
-        if extsetup:
-            try:
-                extsetup(ui)
-            except TypeError:
-                if extsetup.func_code.co_argcount != 0:
-                    raise
-                extsetup() # old extsetup with no ui argument
+        _runextsetup(name, ui)
 
     # Call aftercallbacks that were never met.
     for shortname in _aftercallbacks:
--- a/mercurial/formatter.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/formatter.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,7 +7,6 @@
 
 from __future__ import absolute_import
 
-import cPickle
 import os
 
 from .i18n import _
@@ -20,8 +19,11 @@
     encoding,
     error,
     templater,
+    util,
 )
 
+pickle = util.pickle
+
 class baseformatter(object):
     def __init__(self, ui, topic, opts):
         self._ui = ui
@@ -107,7 +109,7 @@
         self._data.append(self._item)
     def end(self):
         baseformatter.end(self)
-        self._ui.write(cPickle.dumps(self._data))
+        self._ui.write(pickle.dumps(self._data))
 
 def _jsonifyobj(v):
     if isinstance(v, tuple):
--- a/mercurial/graphmod.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/graphmod.py	Mon Jul 18 23:28:14 2016 -0500
@@ -19,8 +19,6 @@
 
 from __future__ import absolute_import
 
-import heapq
-
 from .node import nullrev
 from . import (
     revset,
@@ -32,207 +30,11 @@
 GRANDPARENT = 'G'
 MISSINGPARENT = 'M'
 # Style of line to draw. None signals a line that ends and is removed at this
-# point.
+# point. A number prefix means only the last N characters of the current block
+# will use that style, the rest will use the PARENT style. Add a - sign
+# (so making N negative) and all but the first N characters use that style.
 EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None}
 
-def groupbranchiter(revs, parentsfunc, firstbranch=()):
-    """Yield revisions from heads to roots one (topo) branch at a time.
-
-    This function aims to be used by a graph generator that wishes to minimize
-    the number of parallel branches and their interleaving.
-
-    Example iteration order (numbers show the "true" order in a changelog):
-
-      o  4
-      |
-      o  1
-      |
-      | o  3
-      | |
-      | o  2
-      |/
-      o  0
-
-    Note that the ancestors of merges are understood by the current
-    algorithm to be on the same branch. This means no reordering will
-    occur behind a merge.
-    """
-
-    ### Quick summary of the algorithm
-    #
-    # This function is based around a "retention" principle. We keep revisions
-    # in memory until we are ready to emit a whole branch that immediately
-    # "merges" into an existing one. This reduces the number of parallel
-    # branches with interleaved revisions.
-    #
-    # During iteration revs are split into two groups:
-    # A) revision already emitted
-    # B) revision in "retention". They are stored as different subgroups.
-    #
-    # for each REV, we do the following logic:
-    #
-    #   1) if REV is a parent of (A), we will emit it. If there is a
-    #   retention group ((B) above) that is blocked on REV being
-    #   available, we emit all the revisions out of that retention
-    #   group first.
-    #
-    #   2) else, we'll search for a subgroup in (B) awaiting for REV to be
-    #   available, if such subgroup exist, we add REV to it and the subgroup is
-    #   now awaiting for REV.parents() to be available.
-    #
-    #   3) finally if no such group existed in (B), we create a new subgroup.
-    #
-    #
-    # To bootstrap the algorithm, we emit the tipmost revision (which
-    # puts it in group (A) from above).
-
-    revs.sort(reverse=True)
-
-    # Set of parents of revision that have been emitted. They can be considered
-    # unblocked as the graph generator is already aware of them so there is no
-    # need to delay the revisions that reference them.
-    #
-    # If someone wants to prioritize a branch over the others, pre-filling this
-    # set will force all other branches to wait until this branch is ready to be
-    # emitted.
-    unblocked = set(firstbranch)
-
-    # list of groups waiting to be displayed, each group is defined by:
-    #
-    #   (revs:    lists of revs waiting to be displayed,
-    #    blocked: set of that cannot be displayed before those in 'revs')
-    #
-    # The second value ('blocked') correspond to parents of any revision in the
-    # group ('revs') that is not itself contained in the group. The main idea
-    # of this algorithm is to delay as much as possible the emission of any
-    # revision.  This means waiting for the moment we are about to display
-    # these parents to display the revs in a group.
-    #
-    # This first implementation is smart until it encounters a merge: it will
-    # emit revs as soon as any parent is about to be emitted and can grow an
-    # arbitrary number of revs in 'blocked'. In practice this mean we properly
-    # retains new branches but gives up on any special ordering for ancestors
-    # of merges. The implementation can be improved to handle this better.
-    #
-    # The first subgroup is special. It corresponds to all the revision that
-    # were already emitted. The 'revs' lists is expected to be empty and the
-    # 'blocked' set contains the parents revisions of already emitted revision.
-    #
-    # You could pre-seed the <parents> set of groups[0] to a specific
-    # changesets to select what the first emitted branch should be.
-    groups = [([], unblocked)]
-    pendingheap = []
-    pendingset = set()
-
-    heapq.heapify(pendingheap)
-    heappop = heapq.heappop
-    heappush = heapq.heappush
-    for currentrev in revs:
-        # Heap works with smallest element, we want highest so we invert
-        if currentrev not in pendingset:
-            heappush(pendingheap, -currentrev)
-            pendingset.add(currentrev)
-        # iterates on pending rev until after the current rev have been
-        # processed.
-        rev = None
-        while rev != currentrev:
-            rev = -heappop(pendingheap)
-            pendingset.remove(rev)
-
-            # Seek for a subgroup blocked, waiting for the current revision.
-            matching = [i for i, g in enumerate(groups) if rev in g[1]]
-
-            if matching:
-                # The main idea is to gather together all sets that are blocked
-                # on the same revision.
-                #
-                # Groups are merged when a common blocking ancestor is
-                # observed. For example, given two groups:
-                #
-                # revs [5, 4] waiting for 1
-                # revs [3, 2] waiting for 1
-                #
-                # These two groups will be merged when we process
-                # 1. In theory, we could have merged the groups when
-                # we added 2 to the group it is now in (we could have
-                # noticed the groups were both blocked on 1 then), but
-                # the way it works now makes the algorithm simpler.
-                #
-                # We also always keep the oldest subgroup first. We can
-                # probably improve the behavior by having the longest set
-                # first. That way, graph algorithms could minimise the length
-                # of parallel lines their drawing. This is currently not done.
-                targetidx = matching.pop(0)
-                trevs, tparents = groups[targetidx]
-                for i in matching:
-                    gr = groups[i]
-                    trevs.extend(gr[0])
-                    tparents |= gr[1]
-                # delete all merged subgroups (except the one we kept)
-                # (starting from the last subgroup for performance and
-                # sanity reasons)
-                for i in reversed(matching):
-                    del groups[i]
-            else:
-                # This is a new head. We create a new subgroup for it.
-                targetidx = len(groups)
-                groups.append(([], set([rev])))
-
-            gr = groups[targetidx]
-
-            # We now add the current nodes to this subgroups. This is done
-            # after the subgroup merging because all elements from a subgroup
-            # that relied on this rev must precede it.
-            #
-            # we also update the <parents> set to include the parents of the
-            # new nodes.
-            if rev == currentrev: # only display stuff in rev
-                gr[0].append(rev)
-            gr[1].remove(rev)
-            parents = [p for p in parentsfunc(rev) if p > nullrev]
-            gr[1].update(parents)
-            for p in parents:
-                if p not in pendingset:
-                    pendingset.add(p)
-                    heappush(pendingheap, -p)
-
-            # Look for a subgroup to display
-            #
-            # When unblocked is empty (if clause), we were not waiting for any
-            # revisions during the first iteration (if no priority was given) or
-            # if we emitted a whole disconnected set of the graph (reached a
-            # root).  In that case we arbitrarily take the oldest known
-            # subgroup. The heuristic could probably be better.
-            #
-            # Otherwise (elif clause) if the subgroup is blocked on
-            # a revision we just emitted, we can safely emit it as
-            # well.
-            if not unblocked:
-                if len(groups) > 1:  # display other subset
-                    targetidx = 1
-                    gr = groups[1]
-            elif not gr[1] & unblocked:
-                gr = None
-
-            if gr is not None:
-                # update the set of awaited revisions with the one from the
-                # subgroup
-                unblocked |= gr[1]
-                # output all revisions in the subgroup
-                for r in gr[0]:
-                    yield r
-                # delete the subgroup that you just output
-                # unless it is groups[0] in which case you just empty it.
-                if targetidx:
-                    del groups[targetidx]
-                else:
-                    gr[0][:] = []
-    # Check if we have some subgroup waiting for revisions we are not going to
-    # iterate over
-    for g in groups:
-        for r in g[0]:
-            yield r
-
 def dagwalker(repo, revs):
     """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples
 
@@ -250,16 +52,6 @@
 
     gpcache = {}
 
-    if repo.ui.configbool('experimental', 'graph-group-branches', False):
-        firstbranch = ()
-        firstbranchrevset = repo.ui.config(
-            'experimental', 'graph-group-branches.firstbranch', '')
-        if firstbranchrevset:
-            firstbranch = repo.revs(firstbranchrevset)
-        parentrevs = repo.changelog.parentrevs
-        revs = groupbranchiter(revs, parentrevs, firstbranch)
-        revs = revset.baseset(revs)
-
     for rev in revs:
         ctx = repo[rev]
         # partition into parents in the rev set and missing parents, then
@@ -653,6 +445,22 @@
     while len(text) < len(lines):
         text.append("")
 
+    if any(len(char) > 1 for char in edgemap.values()):
+        # limit drawing an edge to the first or last N lines of the current
+        # section the rest of the edge is drawn like a parent line.
+        parent = state['styles'][PARENT][-1]
+        def _drawgp(char, i):
+            # should a grandparent character be drawn for this line?
+            if len(char) < 2:
+                return True
+            num = int(char[:-1])
+            # either skip first num lines or take last num lines, based on sign
+            return -num <= i if num < 0 else (len(lines) - i) <= num
+        for i, line in enumerate(lines):
+            line[:] = [c[-1] if _drawgp(c, i) else parent for c in line]
+        edgemap.update(
+            (e, (c if len(c) < 2 else parent)) for e, c in edgemap.items())
+
     # print lines
     indentation_level = max(ncols, ncols + coldiff)
     for (line, logstr) in zip(lines, text):
--- a/mercurial/help/config.txt	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/help/config.txt	Mon Jul 18 23:28:14 2016 -0500
@@ -811,6 +811,15 @@
   dictionary of options (with unspecified options set to their defaults).
   ``$HG_PATS`` is a list of arguments. Hook failure is ignored.
 
+``fail-<command>``
+  Run after a failed invocation of an associated command. The contents
+  of the command line are passed as ``$HG_ARGS``. Parsed command line
+  arguments are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain
+  string representations of the python data internally passed to
+  <command>. ``$HG_OPTS`` is a dictionary of options (with unspecified
+  options set to their defaults). ``$HG_PATS`` is a list of arguments.
+  Hook failure is ignored.
+
 ``pre-<command>``
   Run before executing the associated command. The contents of the
   command line are passed as ``$HG_ARGS``. Parsed command line arguments
@@ -967,6 +976,8 @@
 ``hostfingerprints``
 --------------------
 
+(Deprecated. Use ``[hostsecurity]``'s ``fingerprints`` options instead.)
+
 Fingerprints of the certificates of known HTTPS servers.
 
 A HTTPS connection to a server with a fingerprint configured here will
@@ -986,6 +997,114 @@
     hg.intevation.de = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
     hg.intevation.org = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
 
+``hostsecurity``
+----------------
+
+Used to specify global and per-host security settings for connecting to
+other machines.
+
+The following options control default behavior for all hosts.
+
+``ciphers``
+    Defines the cryptographic ciphers to use for connections.
+
+    Value must be a valid OpenSSL Cipher List Format as documented at
+    https://www.openssl.org/docs/manmaster/apps/ciphers.html#CIPHER-LIST-FORMAT.
+
+    This setting is for advanced users only. Setting to incorrect values
+    can significantly lower connection security or decrease performance.
+    You have been warned.
+
+    This option requires Python 2.7.
+
+``minimumprotocol``
+    Defines the minimum channel encryption protocol to use.
+
+    By default, the highest version of TLS supported by both client and server
+    is used.
+
+    Allowed values are: ``tls1.0``, ``tls1.1``, ``tls1.2``.
+
+    When running on an old Python version, only ``tls1.0`` is allowed since
+    old versions of Python only support up to TLS 1.0.
+
+    When running a Python that supports modern TLS versions, the default is
+    ``tls1.1``. ``tls1.0`` can still be used to allow TLS 1.0. However, this
+    weakens security and should only be used as a feature of last resort if
+    a server does not support TLS 1.1+.
+
+Options in the ``[hostsecurity]`` section can have the form
+``hostname``:``setting``. This allows multiple settings to be defined on a
+per-host basis.
+
+The following per-host settings can be defined.
+
+``ciphers``
+    This behaves like ``ciphers`` as described above except it only applies
+    to the host on which it is defined.
+
+``fingerprints``
+    A list of hashes of the DER encoded peer/remote certificate. Values have
+    the form ``algorithm``:``fingerprint``. e.g.
+    ``sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2``.
+
+    The following algorithms/prefixes are supported: ``sha1``, ``sha256``,
+    ``sha512``.
+
+    Use of ``sha256`` or ``sha512`` is preferred.
+
+    If a fingerprint is specified, the CA chain is not validated for this
+    host and Mercurial will require the remote certificate to match one
+    of the fingerprints specified. This means if the server updates its
+    certificate, Mercurial will abort until a new fingerprint is defined.
+    This can provide stronger security than traditional CA-based validation
+    at the expense of convenience.
+
+    This option takes precedence over ``verifycertsfile``.
+
+``minimumprotocol``
+    This behaves like ``minimumprotocol`` as described above except it
+    only applies to the host on which it is defined.
+
+``verifycertsfile``
+    Path to file a containing a list of PEM encoded certificates used to
+    verify the server certificate. Environment variables and ``~user``
+    constructs are expanded in the filename.
+
+    The server certificate or the certificate's certificate authority (CA)
+    must match a certificate from this file or certificate verification
+    will fail and connections to the server will be refused.
+
+    If defined, only certificates provided by this file will be used:
+    ``web.cacerts`` and any system/default certificates will not be
+    used.
+
+    This option has no effect if the per-host ``fingerprints`` option
+    is set.
+
+    The format of the file is as follows:
+
+        -----BEGIN CERTIFICATE-----
+        ... (certificate in base64 PEM encoding) ...
+        -----END CERTIFICATE-----
+        -----BEGIN CERTIFICATE-----
+        ... (certificate in base64 PEM encoding) ...
+        -----END CERTIFICATE-----
+
+For example::
+
+    [hostsecurity]
+    hg.example.com:fingerprints = sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2
+    hg2.example.com:fingerprints = sha1:914f1aff87249c09b6859b88b1906d30756491ca, sha1:fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33
+    foo.example.com:verifycertsfile = /etc/ssl/trusted-ca-certs.pem
+
+To change the default minimum protocol version to TLS 1.2 but to allow TLS 1.1
+when connecting to ``hg.example.com``::
+
+    [hostsecurity]
+    minimumprotocol = tls1.2
+    hg.example.com:minimumprotocol = tls1.1
+
 ``http_proxy``
 --------------
 
@@ -1020,8 +1139,8 @@
    file in the changeset being merged or updated to, and has different
    contents. Options are ``abort``, ``warn`` and ``ignore``. With ``abort``,
    abort on such files. With ``warn``, warn on such files and back them up as
-   .orig. With ``ignore``, don't print a warning and back them up as
-   .orig. (default: ``abort``)
+   ``.orig``. With ``ignore``, don't print a warning and back them up as
+   ``.orig``. (default: ``abort``)
 
 ``checkunknown``
    Controls behavior when an unknown file that isn't ignored has the same name
@@ -1210,6 +1329,18 @@
    The URL to use for push operations. If not defined, the location
    defined by the path's main entry is used.
 
+``pushrev``
+   A revset defining which revisions to push by default.
+
+   When :hg:`push` is executed without a ``-r`` argument, the revset
+   defined by this sub-option is evaluated to determine what to push.
+
+   For example, a value of ``.`` will push the working directory's
+   revision by default.
+
+   Revsets specifying bookmarks will not result in the bookmark being
+   pushed.
+
 The following special named paths exist:
 
 ``default``
@@ -1442,16 +1573,6 @@
     Optional. Method to enable TLS when connecting to mail server: starttls,
     smtps or none. (default: none)
 
-``verifycert``
-    Optional. Verification for the certificate of mail server, when
-    ``tls`` is starttls or smtps. "strict", "loose" or False. For
-    "strict" or "loose", the certificate is verified as same as the
-    verification for HTTPS connections (see ``[hostfingerprints]`` and
-    ``[web] cacerts`` also). For "strict", sending email is also
-    aborted, if there is no configuration for mail server in
-    ``[hostfingerprints]`` and ``[web] cacerts``.  --insecure for
-    :hg:`email` overwrites this as "loose". (default: strict)
-
 ``username``
     Optional. User name for authenticating with the SMTP server.
     (default: None)
@@ -1738,6 +1859,13 @@
     large organisation with its own Mercurial deployment process and crash
     reports should be addressed to your internal support.
 
+``textwidth``
+    Maximum width of help text. A longer line generated by ``hg help`` or
+    ``hg subcommand --help`` will be broken after white space to get this
+    width or the terminal width, whichever comes first.
+    A non-positive value will disable this and the terminal width will be
+    used. (default: 78)
+
 ``timeout``
     The timeout used when a lock is held (in seconds), a negative value
     means no timeout. (default: 600)
@@ -1945,6 +2073,14 @@
 ``ipv6``
     Whether to use IPv6. (default: False)
 
+``labels``
+    List of string *labels* associated with the repository.
+
+    Labels are exposed as a template keyword and can be used to customize
+    output. e.g. the ``index`` template can group or filter repositories
+    by labels and the ``summary`` template can display additional content
+    if a specific label is present.
+
 ``logoimg``
     File name of the logo image that some templates display on each page.
     The file name is relative to ``staticurl``. That is, the full path to
--- a/mercurial/help/templates.txt	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/help/templates.txt	Mon Jul 18 23:28:14 2016 -0500
@@ -81,6 +81,10 @@
 
    $ hg log -r 0 --template "files: {join(files, ', ')}\n"
 
+- Separate non-empty arguments by a " "::
+
+   $ hg log -r 0 --template "{separate(' ', node, bookmarks, tags}\n"
+
 - Modify each line of a commit description::
 
    $ hg log --template "{splitlines(desc) % '**** {line}\n'}"
--- a/mercurial/hg.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hg.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import shutil
 
@@ -43,6 +44,9 @@
 
 release = lock.release
 
+# shared features
+sharedbookmarks = 'bookmarks'
+
 def _local(path):
     path = util.expandpath(util.urllocalpath(path))
     return (os.path.isfile(path) and bundlerepo or localrepo)
@@ -257,7 +261,7 @@
 
     if bookmarks:
         fp = destrepo.vfs('shared', 'w')
-        fp.write('bookmarks\n')
+        fp.write(sharedbookmarks + '\n')
         fp.close()
 
 def _postshareupdate(repo, update, checkout=None):
@@ -480,9 +484,11 @@
                 ui.status(_('(not using pooled storage: '
                             'unable to resolve identity of remote)\n'))
         elif sharenamemode == 'remote':
-            sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
+            sharepath = os.path.join(
+                sharepool, hashlib.sha1(source).hexdigest())
         else:
-            raise error.Abort('unknown share naming mode: %s' % sharenamemode)
+            raise error.Abort(_('unknown share naming mode: %s') %
+                              sharenamemode)
 
         if sharepath:
             return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
@@ -921,9 +927,7 @@
         for key, val in src.configitems(sect):
             dst.setconfig(sect, key, val, 'copied')
     v = src.config('web', 'cacerts')
-    if v == '!':
-        dst.setconfig('web', 'cacerts', v, 'copied')
-    elif v:
+    if v:
         dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
 
     return dst
--- a/mercurial/hgweb/common.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hgweb/common.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,11 +8,14 @@
 
 from __future__ import absolute_import
 
-import BaseHTTPServer
 import errno
 import mimetypes
 import os
 
+from .. import util
+
+httpserver = util.httpserver
+
 HTTP_OK = 200
 HTTP_NOT_MODIFIED = 304
 HTTP_BAD_REQUEST = 400
@@ -107,7 +110,7 @@
         raise AttributeError
 
 def _statusmessage(code):
-    responses = BaseHTTPServer.BaseHTTPRequestHandler.responses
+    responses = httpserver.basehttprequesthandler.responses
     return responses.get(code, ('Error', 'Unknown error'))[0]
 
 def statusmessage(code, message=None):
@@ -187,7 +190,7 @@
             os.environ.get("EMAIL") or "")
 
 def caching(web, req):
-    tag = str(web.mtime)
+    tag = 'W/"%s"' % web.mtime
     if req.env.get('HTTP_IF_NONE_MATCH') == tag:
         raise ErrorResponse(HTTP_NOT_MODIFIED)
     req.headers.append(('ETag', tag))
--- a/mercurial/hgweb/hgwebdir_mod.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hgweb/hgwebdir_mod.py	Mon Jul 18 23:28:14 2016 -0500
@@ -366,7 +366,9 @@
                            'lastchange': d,
                            'lastchange_sort': d[1]-d[0],
                            'archives': [],
-                           'isdirectory': True}
+                           'isdirectory': True,
+                           'labels': [],
+                           }
 
                     seendirs.add(name)
                     yield row
@@ -416,6 +418,7 @@
                        'lastchange_sort': d[1]-d[0],
                        'archives': archivelist(u, "tip", url),
                        'isdirectory': None,
+                       'labels': u.configlist('web', 'labels', untrusted=True),
                        }
 
                 yield row
--- a/mercurial/hgweb/server.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hgweb/server.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,8 +8,6 @@
 
 from __future__ import absolute_import
 
-import BaseHTTPServer
-import SocketServer
 import errno
 import os
 import socket
@@ -23,6 +21,8 @@
     util,
 )
 
+httpservermod = util.httpserver
+socketserver = util.socketserver
 urlerr = util.urlerr
 urlreq = util.urlreq
 
@@ -53,18 +53,18 @@
         for msg in seq:
             self.handler.log_error("HG error:  %s", msg)
 
-class _httprequesthandler(BaseHTTPServer.BaseHTTPRequestHandler):
+class _httprequesthandler(httpservermod.basehttprequesthandler):
 
     url_scheme = 'http'
 
     @staticmethod
-    def preparehttpserver(httpserver, ssl_cert):
+    def preparehttpserver(httpserver, ui):
         """Prepare .socket of new HTTPServer instance"""
         pass
 
     def __init__(self, *args, **kargs):
         self.protocol_version = 'HTTP/1.1'
-        BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
+        httpservermod.basehttprequesthandler.__init__(self, *args, **kargs)
 
     def _log_any(self, fp, format, *args):
         fp.write("%s - - [%s] %s\n" % (self.client_address[0],
@@ -147,9 +147,9 @@
         env['wsgi.input'] = self.rfile
         env['wsgi.errors'] = _error_logger(self)
         env['wsgi.multithread'] = isinstance(self.server,
-                                             SocketServer.ThreadingMixIn)
+                                             socketserver.ThreadingMixIn)
         env['wsgi.multiprocess'] = isinstance(self.server,
-                                              SocketServer.ForkingMixIn)
+                                              socketserver.ForkingMixIn)
         env['wsgi.run_once'] = 0
 
         self.saved_status = None
@@ -222,15 +222,25 @@
     url_scheme = 'https'
 
     @staticmethod
-    def preparehttpserver(httpserver, ssl_cert):
+    def preparehttpserver(httpserver, ui):
         try:
-            import ssl
-            ssl.wrap_socket
+            from .. import sslutil
+            sslutil.modernssl
         except ImportError:
             raise error.Abort(_("SSL support is unavailable"))
-        httpserver.socket = ssl.wrap_socket(
-            httpserver.socket, server_side=True,
-            certfile=ssl_cert, ssl_version=ssl.PROTOCOL_TLSv1)
+
+        certfile = ui.config('web', 'certificate')
+
+        # These config options are currently only meant for testing. Use
+        # at your own risk.
+        cafile = ui.config('devel', 'servercafile')
+        reqcert = ui.configbool('devel', 'serverrequirecert')
+
+        httpserver.socket = sslutil.wrapserversocket(httpserver.socket,
+                                                     ui,
+                                                     certfile=certfile,
+                                                     cafile=cafile,
+                                                     requireclientcert=reqcert)
 
     def setup(self):
         self.connection = self.request
@@ -240,10 +250,10 @@
 try:
     import threading
     threading.activeCount() # silence pyflakes and bypass demandimport
-    _mixin = SocketServer.ThreadingMixIn
+    _mixin = socketserver.ThreadingMixIn
 except ImportError:
     if util.safehasattr(os, "fork"):
-        _mixin = SocketServer.ForkingMixIn
+        _mixin = socketserver.ForkingMixIn
     else:
         class _mixin(object):
             pass
@@ -253,18 +263,18 @@
         return open(opt, 'a')
     return default
 
-class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
+class MercurialHTTPServer(object, _mixin, httpservermod.httpserver):
 
     # SO_REUSEADDR has broken semantics on windows
     if os.name == 'nt':
         allow_reuse_address = 0
 
     def __init__(self, ui, app, addr, handler, **kwargs):
-        BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs)
+        httpservermod.httpserver.__init__(self, addr, handler, **kwargs)
         self.daemon_threads = True
         self.application = app
 
-        handler.preparehttpserver(self, ui.config('web', 'certificate'))
+        handler.preparehttpserver(self, ui)
 
         prefix = ui.config('web', 'prefix', '')
         if prefix:
--- a/mercurial/hgweb/webcommands.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hgweb/webcommands.py	Mon Jul 18 23:28:14 2016 -0500
@@ -139,7 +139,7 @@
             yield {"line": t,
                    "lineid": "l%d" % (lineno + 1),
                    "linenumber": "% 6d" % (lineno + 1),
-                   "parity": parity.next()}
+                   "parity": next(parity)}
 
     return tmpl("filerevision",
                 file=f,
@@ -278,7 +278,7 @@
             files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
 
             yield tmpl('searchentry',
-                       parity=parity.next(),
+                       parity=next(parity),
                        changelogtag=showtags,
                        files=files,
                        **webutil.commonentry(web.repo, ctx))
@@ -375,7 +375,7 @@
                 break
 
             entry = webutil.changelistentry(web, web.repo[rev], tmpl)
-            entry['parity'] = parity.next()
+            entry['parity'] = next(parity)
             yield entry
 
     if shortlog:
@@ -527,7 +527,7 @@
 
             fctx = ctx.filectx(full)
             yield {"file": full,
-                   "parity": parity.next(),
+                   "parity": next(parity),
                    "basename": f,
                    "date": fctx.date(),
                    "size": fctx.size(),
@@ -545,7 +545,7 @@
                 h = v
 
             path = "%s%s" % (abspath, d)
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "path": path,
                    "emptydirs": "/".join(emptydirs),
                    "basename": d}
@@ -554,7 +554,7 @@
                 symrev=symrev,
                 path=abspath,
                 up=webutil.up(abspath),
-                upparity=parity.next(),
+                upparity=next(parity),
                 fentries=filelist,
                 dentries=dirlist,
                 archives=web.archivelist(hex(node)),
@@ -582,7 +582,7 @@
         if latestonly:
             t = t[:1]
         for k, n in t:
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "tag": k,
                    "date": web.repo[n].date(),
                    "node": hex(n)}
@@ -615,7 +615,7 @@
         if latestonly:
             t = i[:1]
         for k, n in t:
-            yield {"parity": parity.next(),
+            yield {"parity": next(parity),
                    "bookmark": k,
                    "date": web.repo[n].date(),
                    "node": hex(n)}
@@ -677,7 +677,7 @@
                 break
 
             yield tmpl("tagentry",
-                       parity=parity.next(),
+                       parity=next(parity),
                        tag=k,
                        node=hex(n),
                        date=web.repo[n].date())
@@ -688,7 +688,7 @@
         sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
         marks = sorted(marks, key=sortkey, reverse=True)
         for k, n in marks[:10]:  # limit to 10 bookmarks
-            yield {'parity': parity.next(),
+            yield {'parity': next(parity),
                    'bookmark': k,
                    'date': web.repo[n].date(),
                    'node': hex(n)}
@@ -704,11 +704,11 @@
 
             l.append(tmpl(
                 'shortlogentry',
-                parity=parity.next(),
+                parity=next(parity),
                 **webutil.commonentry(web.repo, ctx)))
 
-        l.reverse()
-        yield l
+        for entry in reversed(l):
+            yield entry
 
     tip = web.repo['tip']
     count = len(web.repo)
@@ -725,7 +725,8 @@
                 shortlog=changelist,
                 node=tip.hex(),
                 symrev='tip',
-                archives=web.archivelist("tip"))
+                archives=web.archivelist("tip"),
+                labels=web.configlist('web', 'labels'))
 
 @webcommand('filediff')
 def filediff(web, req, tmpl):
@@ -863,29 +864,41 @@
     diffopts = patch.difffeatureopts(web.repo.ui, untrusted=True,
                                      section='annotate', whitespace=True)
 
+    def parents(f):
+        for p in f.parents():
+            yield {
+                "node": p.hex(),
+                "rev": p.rev(),
+            }
+
     def annotate(**map):
-        last = None
         if util.binary(fctx.data()):
             mt = (mimetypes.guess_type(fctx.path())[0]
                   or 'application/octet-stream')
-            lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
-                                '(binary:%s)' % mt)])
+            lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)]
         else:
-            lines = enumerate(fctx.annotate(follow=True, linenumber=True,
-                                            diffopts=diffopts))
-        for lineno, ((f, targetline), l) in lines:
-            fnode = f.filenode()
-
-            if last != fnode:
-                last = fnode
-
-            yield {"parity": parity.next(),
+            lines = fctx.annotate(follow=True, linenumber=True,
+                                  diffopts=diffopts)
+        previousrev = None
+        blockparitygen = paritygen(1)
+        for lineno, ((f, targetline), l) in enumerate(lines):
+            rev = f.rev()
+            if rev != previousrev:
+                blockhead = True
+                blockparity = next(blockparitygen)
+            else:
+                blockhead = None
+            previousrev = rev
+            yield {"parity": next(parity),
                    "node": f.hex(),
-                   "rev": f.rev(),
+                   "rev": rev,
                    "author": f.user(),
+                   "parents": parents(f),
                    "desc": f.description(),
                    "extra": f.extra(),
                    "file": f.path(),
+                   "blockhead": blockhead,
+                   "blockparity": blockparity,
                    "targetline": targetline,
                    "line": l,
                    "lineno": lineno + 1,
@@ -963,7 +976,7 @@
             iterfctx = fctx.filectx(i)
 
             l.append(dict(
-                parity=parity.next(),
+                parity=next(parity),
                 filerev=i,
                 file=f,
                 rename=webutil.renamelink(iterfctx),
--- a/mercurial/hgweb/webutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/hgweb/webutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -75,7 +75,7 @@
     def _first(self):
         """return the minimum non-filtered changeset or None"""
         try:
-            return iter(self._revlog).next()
+            return next(iter(self._revlog))
         except StopIteration:
             return None
 
@@ -247,7 +247,7 @@
             else:
                 status = 'open'
             yield {
-                'parity': parity.next(),
+                'parity': next(parity),
                 'branch': ctx.branch(),
                 'status': status,
                 'node': ctx.hex(),
@@ -369,7 +369,7 @@
         template = f in ctx and 'filenodelink' or 'filenolink'
         files.append(tmpl(template,
                           node=ctx.hex(), file=f, blockno=blockno + 1,
-                          parity=parity.next()))
+                          parity=next(parity)))
 
     basectx = basechangectx(web.repo, req)
     if basectx is None:
@@ -450,15 +450,15 @@
     block = []
     for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
         if chunk.startswith('diff') and block:
-            blockno = blockcount.next()
-            yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+            blockno = next(blockcount)
+            yield tmpl('diffblock', parity=next(parity), blockno=blockno,
                        lines=prettyprintlines(''.join(block), blockno))
             block = []
         if chunk.startswith('diff') and style != 'raw':
             chunk = ''.join(chunk.splitlines(True)[1:])
         block.append(chunk)
-    blockno = blockcount.next()
-    yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+    blockno = next(blockcount)
+    yield tmpl('diffblock', parity=next(parity), blockno=blockno,
                lines=prettyprintlines(''.join(block), blockno))
 
 def compare(tmpl, context, leftlines, rightlines):
@@ -521,14 +521,14 @@
 def diffsummary(statgen):
     '''Return a short summary of the diff.'''
 
-    stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
+    stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
     return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
              len(stats), addtotal, removetotal)
 
 def diffstat(tmpl, ctx, statgen, parity):
     '''Return a diffstat template for each file in the diff.'''
 
-    stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
+    stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
     files = ctx.files()
 
     def pct(i):
@@ -543,7 +543,7 @@
         fileno += 1
         yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
                    total=total, addpct=pct(adds), removepct=pct(removes),
-                   parity=parity.next())
+                   parity=next(parity))
 
 class sessionvars(object):
     def __init__(self, vars, start='?'):
--- a/mercurial/httpclient/__init__.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/httpclient/__init__.py	Mon Jul 18 23:28:14 2016 -0500
@@ -40,26 +40,38 @@
 
 # Many functions in this file have too many arguments.
 # pylint: disable=R0913
-
-import cStringIO
+import email
+import email.message
 import errno
-import httplib
+import inspect
 import logging
-import rfc822
 import select
 import socket
+import ssl
+import sys
+
+try:
+    import cStringIO as io
+    io.StringIO
+except ImportError:
+    import io
+
+try:
+    import httplib
+    httplib.HTTPException
+except ImportError:
+    import http.client as httplib
 
 from . import (
     _readers,
-    socketutil,
-    )
+)
 
 logger = logging.getLogger(__name__)
 
 __all__ = ['HTTPConnection', 'HTTPResponse']
 
-HTTP_VER_1_0 = 'HTTP/1.0'
-HTTP_VER_1_1 = 'HTTP/1.1'
+HTTP_VER_1_0 = b'HTTP/1.0'
+HTTP_VER_1_1 = b'HTTP/1.1'
 
 OUTGOING_BUFFER_SIZE = 1 << 15
 INCOMING_BUFFER_SIZE = 1 << 20
@@ -73,7 +85,7 @@
 
 CONNECTION_CLOSE = 'close'
 
-EOL = '\r\n'
+EOL = b'\r\n'
 _END_HEADERS = EOL * 2
 
 # Based on some searching around, 1 second seems like a reasonable
@@ -81,6 +93,57 @@
 TIMEOUT_ASSUME_CONTINUE = 1
 TIMEOUT_DEFAULT = None
 
+if sys.version_info > (3, 0):
+    _unicode = str
+else:
+    _unicode = unicode
+
+def _ensurebytes(data):
+    if not isinstance(data, (_unicode, bytes)):
+        data = str(data)
+    if not isinstance(data, bytes):
+        try:
+            return data.encode('latin-1')
+        except UnicodeEncodeError as err:
+            raise UnicodeEncodeError(
+                err.encoding,
+                err.object,
+                err.start,
+                err.end,
+                '%r is not valid Latin-1 Use .encode("utf-8") '
+                'if sending as utf-8 is desired.' % (
+                    data[err.start:err.end],))
+    return data
+
+class _CompatMessage(email.message.Message):
+    """Workaround for rfc822.Message and email.message.Message API diffs."""
+
+    @classmethod
+    def from_string(cls, s):
+        if sys.version_info > (3, 0):
+            # Python 3 can't decode headers from bytes, so we have to
+            # trust RFC 2616 and decode the headers as iso-8859-1
+            # bytes.
+            s = s.decode('iso-8859-1')
+        headers = email.message_from_string(s, _class=_CompatMessage)
+        # Fix multi-line headers to match httplib's behavior from
+        # Python 2.x, since email.message.Message handles them in
+        # slightly different ways.
+        if sys.version_info < (3, 0):
+            new = []
+            for h, v in headers._headers:
+                if '\r\n' in v:
+                    v = '\n'.join([' ' + x.lstrip() for x in v.split('\r\n')])[1:]
+                new.append((h, v))
+            headers._headers = new
+        return headers
+
+    def getheaders(self, key):
+        return self.get_all(key)
+
+    def getheader(self, key, default=None):
+        return self.get(key, failobj=default)
+
 
 class HTTPResponse(object):
     """Response from an HTTP server.
@@ -91,11 +154,11 @@
     def __init__(self, sock, timeout, method):
         self.sock = sock
         self.method = method
-        self.raw_response = ''
+        self.raw_response = b''
         self._headers_len = 0
         self.headers = None
         self.will_close = False
-        self.status_line = ''
+        self.status_line = b''
         self.status = None
         self.continued = False
         self.http_version = None
@@ -131,6 +194,10 @@
         return self.headers.getheader(header, default=default)
 
     def getheaders(self):
+        if sys.version_info < (3, 0):
+            return [(k.lower(), v) for k, v in self.headers.items()]
+        # Starting in Python 3, headers aren't lowercased before being
+        # returned here.
         return self.headers.items()
 
     def readline(self):
@@ -141,14 +208,14 @@
         """
         blocks = []
         while True:
-            self._reader.readto('\n', blocks)
+            self._reader.readto(b'\n', blocks)
 
-            if blocks and blocks[-1][-1] == '\n' or self.complete():
+            if blocks and blocks[-1][-1:] == b'\n' or self.complete():
                 break
 
             self._select()
 
-        return ''.join(blocks)
+        return b''.join(blocks)
 
     def read(self, length=None):
         """Read data from the response body."""
@@ -175,8 +242,8 @@
                 raise HTTPTimeoutException('timeout reading data')
         try:
             data = self.sock.recv(INCOMING_BUFFER_SIZE)
-        except socket.sslerror as e:
-            if e.args[0] != socket.SSL_ERROR_WANT_READ:
+        except ssl.SSLError as e:
+            if e.args[0] != ssl.SSL_ERROR_WANT_READ:
                 raise
             logger.debug('SSL_ERROR_WANT_READ in _select, should retry later')
             return True
@@ -203,7 +270,7 @@
         self.raw_response += data
         # This is a bogus server with bad line endings
         if self._eol not in self.raw_response:
-            for bad_eol in ('\n', '\r'):
+            for bad_eol in (b'\n', b'\r'):
                 if (bad_eol in self.raw_response
                     # verify that bad_eol is not the end of the incoming data
                     # as this could be a response line that just got
@@ -220,8 +287,8 @@
 
         # handle 100-continue response
         hdrs, body = self.raw_response.split(self._end_headers, 1)
-        unused_http_ver, status = hdrs.split(' ', 1)
-        if status.startswith('100'):
+        unused_http_ver, status = hdrs.split(b' ', 1)
+        if status.startswith(b'100'):
             self.raw_response = body
             self.continued = True
             logger.debug('continue seen, setting body to %r', body)
@@ -235,14 +302,14 @@
             self.status_line, hdrs = hdrs.split(self._eol, 1)
         else:
             self.status_line = hdrs
-            hdrs = ''
+            hdrs = b''
         # TODO HTTP < 1.0 support
         (self.http_version, self.status,
-         self.reason) = self.status_line.split(' ', 2)
+         self.reason) = self.status_line.split(b' ', 2)
         self.status = int(self.status)
         if self._eol != EOL:
-            hdrs = hdrs.replace(self._eol, '\r\n')
-        headers = rfc822.Message(cStringIO.StringIO(hdrs))
+            hdrs = hdrs.replace(self._eol, b'\r\n')
+        headers = _CompatMessage.from_string(hdrs)
         content_len = None
         if HDR_CONTENT_LENGTH in headers:
             content_len = int(headers[HDR_CONTENT_LENGTH])
@@ -259,8 +326,8 @@
             # HEAD responses are forbidden from returning a body, and
             # it's implausible for a CONNECT response to use
             # close-is-end logic for an OK response.
-            if (self.method == 'HEAD' or
-                (self.method == 'CONNECT' and content_len is None)):
+            if (self.method == b'HEAD' or
+                (self.method == b'CONNECT' and content_len is None)):
                 content_len = 0
             if content_len is not None:
                 logger.debug('using a content-length reader with length %d',
@@ -294,8 +361,48 @@
     >>> _foldheaders({'Accept-Encoding': 'wat'})
     {'accept-encoding': ('Accept-Encoding', 'wat')}
     """
-    return dict((k.lower(), (k, v)) for k, v in headers.iteritems())
+    return dict((k.lower(), (k, v)) for k, v in headers.items())
+
+try:
+    inspect.signature
+    def _handlesarg(func, arg):
+        """ Try to determine if func accepts arg
+
+        If it takes arg, return True
+        If it happens to take **args, then it could do anything:
+            * It could throw a different TypeError, just for fun
+            * It could throw an ArgumentError or anything else
+            * It could choose not to throw an Exception at all
+        ... return 'unknown'
 
+        Otherwise, return False
+        """
+        params = inspect.signature(func).parameters
+        if arg in params:
+            return True
+        for p in params:
+            if params[p].kind == inspect._ParameterKind.VAR_KEYWORD:
+                return 'unknown'
+        return False
+except AttributeError:
+    def _handlesarg(func, arg):
+        """ Try to determine if func accepts arg
+
+        If it takes arg, return True
+        If it happens to take **args, then it could do anything:
+            * It could throw a different TypeError, just for fun
+            * It could throw an ArgumentError or anything else
+            * It could choose not to throw an Exception at all
+        ... return 'unknown'
+
+        Otherwise, return False
+        """
+        spec = inspect.getargspec(func)
+        if arg in spec.args:
+            return True
+        if spec.keywords:
+            return 'unknown'
+        return False
 
 class HTTPConnection(object):
     """Connection to a single http server.
@@ -340,15 +447,38 @@
         Any extra keyword arguments to this function will be provided
         to the ssl_wrap_socket method. If no ssl
         """
-        if port is None and host.count(':') == 1 or ']:' in host:
-            host, port = host.rsplit(':', 1)
+        host = _ensurebytes(host)
+        if port is None and host.count(b':') == 1 or b']:' in host:
+            host, port = host.rsplit(b':', 1)
             port = int(port)
-            if '[' in host:
+            if b'[' in host:
                 host = host[1:-1]
         if ssl_wrap_socket is not None:
-            self._ssl_wrap_socket = ssl_wrap_socket
+            _wrap_socket = ssl_wrap_socket
         else:
-            self._ssl_wrap_socket = socketutil.wrap_socket
+            _wrap_socket = ssl.wrap_socket
+        call_wrap_socket = None
+        handlesubar = _handlesarg(_wrap_socket, 'server_hostname')
+        if handlesubar is True:
+            # supports server_hostname
+            call_wrap_socket = _wrap_socket
+        handlesnobar = _handlesarg(_wrap_socket, 'serverhostname')
+        if handlesnobar is True and handlesubar is not True:
+            # supports serverhostname
+            def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
+                return _wrap_socket(sock, serverhostname=server_hostname,
+                                    **ssl_opts)
+        if handlesubar is False and handlesnobar is False:
+            # does not support either
+            def call_wrap_socket(sock, server_hostname=None, **ssl_opts):
+                return _wrap_socket(sock, **ssl_opts)
+        if call_wrap_socket is None:
+            # we assume it takes **args
+            def call_wrap_socket(sock, **ssl_opts):
+                if 'server_hostname' in ssl_opts:
+                    ssl_opts['serverhostname'] = ssl_opts['server_hostname']
+                return _wrap_socket(sock, **ssl_opts)
+        self._ssl_wrap_socket = call_wrap_socket
         if use_ssl is None and port is None:
             use_ssl = False
             port = 80
@@ -357,8 +487,6 @@
         elif port is None:
             port = (use_ssl and 443 or 80)
         self.port = port
-        if use_ssl and not socketutil.have_ssl:
-            raise Exception('ssl requested but unavailable on this Python')
         self.ssl = use_ssl
         self.ssl_opts = ssl_opts
         self._ssl_validator = ssl_validator
@@ -388,15 +516,15 @@
         if self._proxy_host is not None:
             logger.info('Connecting to http proxy %s:%s',
                         self._proxy_host, self._proxy_port)
-            sock = socketutil.create_connection((self._proxy_host,
-                                                 self._proxy_port))
+            sock = socket.create_connection((self._proxy_host,
+                                             self._proxy_port))
             if self.ssl:
-                data = self._buildheaders('CONNECT', '%s:%d' % (self.host,
-                                                                self.port),
+                data = self._buildheaders(b'CONNECT', b'%s:%d' % (self.host,
+                                                                  self.port),
                                           proxy_headers, HTTP_VER_1_0)
                 sock.send(data)
                 sock.setblocking(0)
-                r = self.response_class(sock, self.timeout, 'CONNECT')
+                r = self.response_class(sock, self.timeout, b'CONNECT')
                 timeout_exc = HTTPTimeoutException(
                     'Timed out waiting for CONNECT response from proxy')
                 while not r.complete():
@@ -421,7 +549,7 @@
                 logger.info('CONNECT (for SSL) to %s:%s via proxy succeeded.',
                             self.host, self.port)
         else:
-            sock = socketutil.create_connection((self.host, self.port))
+            sock = socket.create_connection((self.host, self.port))
         if self.ssl:
             # This is the default, but in the case of proxied SSL
             # requests the proxy logic above will have cleared
@@ -429,7 +557,8 @@
             sock.setblocking(1)
             logger.debug('wrapping socket for ssl with options %r',
                          self.ssl_opts)
-            sock = self._ssl_wrap_socket(sock, **self.ssl_opts)
+            sock = self._ssl_wrap_socket(sock, server_hostname=self.host,
+                                         **self.ssl_opts)
             if self._ssl_validator:
                 self._ssl_validator(sock)
         sock.setblocking(0)
@@ -441,25 +570,26 @@
             hdrhost = self.host
         else:
             # include nonstandard port in header
-            if ':' in self.host:  # must be IPv6
-                hdrhost = '[%s]:%d' % (self.host, self.port)
+            if b':' in self.host:  # must be IPv6
+                hdrhost = b'[%s]:%d' % (self.host, self.port)
             else:
-                hdrhost = '%s:%d' % (self.host, self.port)
+                hdrhost = b'%s:%d' % (self.host, self.port)
         if self._proxy_host and not self.ssl:
             # When talking to a regular http proxy we must send the
             # full URI, but in all other cases we must not (although
             # technically RFC 2616 says servers must accept our
             # request if we screw up, experimentally few do that
             # correctly.)
-            assert path[0] == '/', 'path must start with a /'
-            path = 'http://%s%s' % (hdrhost, path)
-        outgoing = ['%s %s %s%s' % (method, path, http_ver, EOL)]
-        headers['host'] = ('Host', hdrhost)
+            assert path[0:1] == b'/', 'path must start with a /'
+            path = b'http://%s%s' % (hdrhost, path)
+        outgoing = [b'%s %s %s%s' % (method, path, http_ver, EOL)]
+        headers[b'host'] = (b'Host', hdrhost)
         headers[HDR_ACCEPT_ENCODING] = (HDR_ACCEPT_ENCODING, 'identity')
-        for hdr, val in headers.itervalues():
-            outgoing.append('%s: %s%s' % (hdr, val, EOL))
+        for hdr, val in sorted((_ensurebytes(h), _ensurebytes(v))
+                               for h, v in headers.values()):
+            outgoing.append(b'%s: %s%s' % (hdr, val, EOL))
         outgoing.append(EOL)
-        return ''.join(outgoing)
+        return b''.join(outgoing)
 
     def close(self):
         """Close the connection to the server.
@@ -512,6 +642,8 @@
         available. Use the `getresponse()` method to retrieve the
         response.
         """
+        method = _ensurebytes(method)
+        path = _ensurebytes(path)
         if self.busy():
             raise httplib.CannotSendRequest(
                 'Can not send another request before '
@@ -520,11 +652,26 @@
 
         logger.info('sending %s request for %s to %s on port %s',
                     method, path, self.host, self.port)
+
         hdrs = _foldheaders(headers)
-        if hdrs.get('expect', ('', ''))[1].lower() == '100-continue':
+        # Figure out headers that have to be computed from the request
+        # body.
+        chunked = False
+        if body and HDR_CONTENT_LENGTH not in hdrs:
+            if getattr(body, '__len__', False):
+                hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH,
+                                            b'%d' % len(body))
+            elif getattr(body, 'read', False):
+                hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING,
+                                           XFER_ENCODING_CHUNKED)
+                chunked = True
+            else:
+                raise BadRequestData('body has no __len__() nor read()')
+        # Figure out expect-continue header
+        if hdrs.get('expect', ('', ''))[1].lower() == b'100-continue':
             expect_continue = True
         elif expect_continue:
-            hdrs['expect'] = ('Expect', '100-Continue')
+            hdrs['expect'] = (b'Expect', b'100-Continue')
         # httplib compatibility: if the user specified a
         # proxy-authorization header, that's actually intended for a
         # proxy CONNECT action, not the real request, but only if
@@ -534,25 +681,15 @@
             pa = hdrs.pop('proxy-authorization', None)
             if pa is not None:
                 pheaders['proxy-authorization'] = pa
-
-        chunked = False
-        if body and HDR_CONTENT_LENGTH not in hdrs:
-            if getattr(body, '__len__', False):
-                hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH, len(body))
-            elif getattr(body, 'read', False):
-                hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING,
-                                           XFER_ENCODING_CHUNKED)
-                chunked = True
-            else:
-                raise BadRequestData('body has no __len__() nor read()')
+        # Build header data
+        outgoing_headers = self._buildheaders(
+            method, path, hdrs, self.http_version)
 
         # If we're reusing the underlying socket, there are some
         # conditions where we'll want to retry, so make a note of the
         # state of self.sock
         fresh_socket = self.sock is None
         self._connect(pheaders)
-        outgoing_headers = self._buildheaders(
-            method, path, hdrs, self.http_version)
         response = None
         first = True
 
@@ -592,8 +729,8 @@
                 try:
                     try:
                         data = r[0].recv(INCOMING_BUFFER_SIZE)
-                    except socket.sslerror as e:
-                        if e.args[0] != socket.SSL_ERROR_WANT_READ:
+                    except ssl.SSLError as e:
+                        if e.args[0] != ssl.SSL_ERROR_WANT_READ:
                             raise
                         logger.debug('SSL_ERROR_WANT_READ while sending '
                                      'data, retrying...')
@@ -662,16 +799,20 @@
                             continue
                         if len(data) < OUTGOING_BUFFER_SIZE:
                             if chunked:
-                                body = '0' + EOL + EOL
+                                body = b'0' + EOL + EOL
                             else:
                                 body = None
                         if chunked:
-                            out = hex(len(data))[2:] + EOL + data + EOL
+                            # This encode is okay because we know
+                            # hex() is building us only 0-9 and a-f
+                            # digits.
+                            asciilen = hex(len(data))[2:].encode('ascii')
+                            out = asciilen + EOL + data + EOL
                         else:
                             out = data
                     amt = w[0].send(out)
                 except socket.error as e:
-                    if e[0] == socket.SSL_ERROR_WANT_WRITE and self.ssl:
+                    if e[0] == ssl.SSL_ERROR_WANT_WRITE and self.ssl:
                         # This means that SSL hasn't flushed its buffer into
                         # the socket yet.
                         # TODO: find a way to block on ssl flushing its buffer
@@ -690,6 +831,7 @@
                     body = out[amt:]
                 else:
                     outgoing_headers = out[amt:]
+        # End of request-sending loop.
 
         # close if the server response said to or responded before eating
         # the whole request
--- a/mercurial/httpclient/_readers.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/httpclient/_readers.py	Mon Jul 18 23:28:14 2016 -0500
@@ -33,7 +33,12 @@
 """
 from __future__ import absolute_import
 
-import httplib
+try:
+    import httplib
+    httplib.HTTPException
+except ImportError:
+    import http.client as httplib
+
 import logging
 
 logger = logging.getLogger(__name__)
@@ -93,7 +98,7 @@
             need -= len(b)
             if need == 0:
                 break
-        result = ''.join(blocks)
+        result = b''.join(blocks)
         assert len(result) == amt or (self._finished and len(result) < amt)
 
         return result
--- a/mercurial/httpclient/socketutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,140 +0,0 @@
-# Copyright 2010, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"""Abstraction to simplify socket use for Python < 2.6
-
-This will attempt to use the ssl module and the new
-socket.create_connection method, but fall back to the old
-methods if those are unavailable.
-"""
-from __future__ import absolute_import
-
-import logging
-import socket
-
-logger = logging.getLogger(__name__)
-
-try:
-    import ssl
-    # make demandimporters load the module
-    ssl.wrap_socket # pylint: disable=W0104
-    have_ssl = True
-except ImportError:
-    import httplib
-    import urllib2
-    have_ssl = getattr(urllib2, 'HTTPSHandler', False)
-    ssl = False
-
-
-try:
-    create_connection = socket.create_connection
-except AttributeError:
-    def create_connection(address):
-        """Backport of socket.create_connection from Python 2.6."""
-        host, port = address
-        msg = "getaddrinfo returns an empty list"
-        sock = None
-        for res in socket.getaddrinfo(host, port, 0,
-                                      socket.SOCK_STREAM):
-            af, socktype, proto, unused_canonname, sa = res
-            try:
-                sock = socket.socket(af, socktype, proto)
-                logger.info("connect: (%s, %s)", host, port)
-                sock.connect(sa)
-            except socket.error as msg:
-                logger.info('connect fail: %s %s', host, port)
-                if sock:
-                    sock.close()
-                sock = None
-                continue
-            break
-        if not sock:
-            raise socket.error(msg)
-        return sock
-
-if ssl:
-    wrap_socket = ssl.wrap_socket
-    CERT_NONE = ssl.CERT_NONE
-    CERT_OPTIONAL = ssl.CERT_OPTIONAL
-    CERT_REQUIRED = ssl.CERT_REQUIRED
-else:
-    class FakeSocket(httplib.FakeSocket):
-        """Socket wrapper that supports SSL."""
-
-        # Silence lint about this goofy backport class
-        # pylint: disable=W0232,E1101,R0903,R0913,C0111
-
-        # backport the behavior from Python 2.6, which is to busy wait
-        # on the socket instead of anything nice. Sigh.
-        # See http://bugs.python.org/issue3890 for more info.
-        def recv(self, buflen=1024, flags=0):
-            """ssl-aware wrapper around socket.recv
-            """
-            if flags != 0:
-                raise ValueError(
-                    "non-zero flags not allowed in calls to recv() on %s" %
-                    self.__class__)
-            while True:
-                try:
-                    return self._ssl.read(buflen)
-                except socket.sslerror as x:
-                    if x.args[0] == socket.SSL_ERROR_WANT_READ:
-                        continue
-                    else:
-                        raise x
-
-    _PROTOCOL_SSLv23 = 2
-
-    CERT_NONE = 0
-    CERT_OPTIONAL = 1
-    CERT_REQUIRED = 2
-
-    # Disable unused-argument because we're making a dumb wrapper
-    # that's like an upstream method.
-    #
-    # pylint: disable=W0613,R0913
-    def wrap_socket(sock, keyfile=None, certfile=None,
-                server_side=False, cert_reqs=CERT_NONE,
-                ssl_version=_PROTOCOL_SSLv23, ca_certs=None,
-                do_handshake_on_connect=True,
-                suppress_ragged_eofs=True):
-        """Backport of ssl.wrap_socket from Python 2.6."""
-        if cert_reqs != CERT_NONE and ca_certs:
-            raise CertificateValidationUnsupported(
-                'SSL certificate validation requires the ssl module'
-                '(included in Python 2.6 and later.)')
-        sslob = socket.ssl(sock)
-        # borrow httplib's workaround for no ssl.wrap_socket
-        sock = FakeSocket(sock, sslob)
-        return sock
-    # pylint: enable=W0613,R0913
-
-
-class CertificateValidationUnsupported(Exception):
-    """Exception raised when cert validation is requested but unavailable."""
-# no-check-code
--- a/mercurial/httpconnection.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/httpconnection.py	Mon Jul 18 23:28:14 2016 -0500
@@ -280,10 +280,9 @@
         kwargs['keyfile'] = keyfile
         kwargs['certfile'] = certfile
 
-        kwargs.update(sslutil.sslkwargs(self.ui, host))
-
         con = HTTPConnection(host, port, use_ssl=True,
                              ssl_wrap_socket=sslutil.wrapsocket,
-                             ssl_validator=sslutil.validator(self.ui, host),
+                             ssl_validator=sslutil.validatesocket,
+                             ui=self.ui,
                              **kwargs)
         return con
--- a/mercurial/httppeer.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/httppeer.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,7 +9,6 @@
 from __future__ import absolute_import
 
 import errno
-import httplib
 import os
 import socket
 import tempfile
@@ -27,6 +26,7 @@
     wireproto,
 )
 
+httplib = util.httplib
 urlerr = util.urlerr
 urlreq = util.urlreq
 
@@ -302,7 +302,7 @@
     except error.RepoError as httpexception:
         try:
             r = statichttprepo.instance(ui, "static-" + path, create)
-            ui.note('(falling back to static-http)\n')
+            ui.note(_('(falling back to static-http)\n'))
             return r
         except error.RepoError:
             raise httpexception # use the original http RepoError instead
--- a/mercurial/i18n.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/i18n.py	Mon Jul 18 23:28:14 2016 -0500
@@ -78,7 +78,7 @@
             paragraphs = [p.decode("ascii") for p in message.split('\n\n')]
         # Be careful not to translate the empty string -- it holds the
         # meta data of the .po file.
-        u = u'\n\n'.join([p and _ugettext(p) or '' for p in paragraphs])
+        u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
         try:
             # encoding.tolocal cannot be used since it will first try to
             # decode the Unicode string. Calling u.decode(enc) really
--- a/mercurial/keepalive.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/keepalive.py	Mon Jul 18 23:28:14 2016 -0500
@@ -110,15 +110,16 @@
 from __future__ import absolute_import, print_function
 
 import errno
-import httplib
+import hashlib
 import socket
 import sys
-import thread
+import threading
 
 from . import (
     util,
 )
 
+httplib = util.httplib
 urlerr = util.urlerr
 urlreq = util.urlreq
 
@@ -134,7 +135,7 @@
       * keep track of all existing
       """
     def __init__(self):
-        self._lock = thread.allocate_lock()
+        self._lock = threading.Lock()
         self._hostmap = {} # map hosts to a list of connections
         self._connmap = {} # map connections to host
         self._readymap = {} # map connection to ready state
@@ -624,8 +625,7 @@
     keepalive_handler.close_all()
 
 def continuity(url):
-    from . import util
-    md5 = util.md5
+    md5 = hashlib.md5
     format = '%25s: %s'
 
     # first fetch the file with the normal http handler
--- a/mercurial/localrepo.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/localrepo.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import inspect
 import os
 import random
@@ -57,16 +58,16 @@
 )
 
 release = lockmod.release
-propertycache = util.propertycache
 urlerr = util.urlerr
 urlreq = util.urlreq
-filecache = scmutil.filecache
 
-class repofilecache(filecache):
+class repofilecache(scmutil.filecache):
     """All filecache usage on repo are done for logic that should be unfiltered
     """
 
     def __get__(self, repo, type=None):
+        if repo is None:
+            return self
         return super(repofilecache, self).__get__(repo.unfiltered(), type)
     def __set__(self, repo, value):
         return super(repofilecache, self).__set__(repo.unfiltered(), value)
@@ -78,7 +79,7 @@
     def join(self, obj, fname):
         return obj.sjoin(fname)
 
-class unfilteredpropertycache(propertycache):
+class unfilteredpropertycache(util.propertycache):
     """propertycache that apply to unfiltered repo only"""
 
     def __get__(self, repo, type=None):
@@ -87,7 +88,7 @@
             return super(unfilteredpropertycache, self).__get__(unfi)
         return getattr(unfi, self.name)
 
-class filteredpropertycache(propertycache):
+class filteredpropertycache(util.propertycache):
     """propertycache that must take filtering in account"""
 
     def cachevalue(self, obj, value):
@@ -553,7 +554,10 @@
         The revset is specified as a string ``expr`` that may contain
         %-formatting to escape certain types. See ``revset.formatspec``.
 
-        Return a revset.abstractsmartset, which is a list-like interface
+        Revset aliases from the configuration are not expanded. To expand
+        user aliases, consider calling ``scmutil.revrange()``.
+
+        Returns a revset.abstractsmartset, which is a list-like interface
         that contains integer revisions.
         '''
         expr = revset.formatspec(expr, *args)
@@ -565,6 +569,9 @@
 
         This is a convenience wrapper around ``revs()`` that iterates the
         result and is a generator of changectx instances.
+
+        Revset aliases from the configuration are not expanded. To expand
+        user aliases, consider calling ``scmutil.revrange()``.
         '''
         for r in self.revs(expr, *args):
             yield self[r]
@@ -881,12 +888,6 @@
             f = f[1:]
         return filelog.filelog(self.svfs, f)
 
-    def parents(self, changeid=None):
-        '''get list of changectxs for parents of changeid'''
-        msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
-        self.ui.deprecwarn(msg, '3.7')
-        return self[changeid].parents()
-
     def changectx(self, changeid):
         return self[changeid]
 
@@ -1008,7 +1009,8 @@
                 or self.ui.configbool('devel', 'check-locks')):
             l = self._lockref and self._lockref()
             if l is None or not l.held:
-                self.ui.develwarn('transaction with no lock')
+                raise RuntimeError('programming error: transaction requires '
+                                   'locking')
         tr = self.currenttransaction()
         if tr is not None:
             return tr.nest()
@@ -1019,11 +1021,8 @@
                 _("abandoned transaction found"),
                 hint=_("run 'hg recover' to clean up transaction"))
 
-        # make journal.dirstate contain in-memory changes at this point
-        self.dirstate.write(None)
-
         idbase = "%.40f#%f" % (random.random(), time.time())
-        txnid = 'TXN:' + util.sha1(idbase).hexdigest()
+        txnid = 'TXN:' + hashlib.sha1(idbase).hexdigest()
         self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
 
         self._writejournal(desc)
@@ -1049,13 +1048,9 @@
                 # transaction running
                 repo.dirstate.write(None)
             else:
-                # prevent in-memory changes from being written out at
-                # the end of outer wlock scope or so
-                repo.dirstate.invalidate()
-
                 # discard all changes (including ones already written
                 # out) in this transaction
-                repo.vfs.rename('journal.dirstate', 'dirstate')
+                repo.dirstate.restorebackup(None, prefix='journal.')
 
                 repo.invalidate(clearfilecache=True)
 
@@ -1110,8 +1105,7 @@
         return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
 
     def _writejournal(self, desc):
-        self.vfs.write("journal.dirstate",
-                          self.vfs.tryread("dirstate"))
+        self.dirstate.savebackup(None, prefix='journal.')
         self.vfs.write("journal.branch",
                           encoding.fromlocal(self.dirstate.branch()))
         self.vfs.write("journal.desc",
@@ -1186,9 +1180,9 @@
         vfsmap = {'plain': self.vfs, '': self.svfs}
         transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
         if self.vfs.exists('undo.bookmarks'):
-            self.vfs.rename('undo.bookmarks', 'bookmarks')
+            self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
         if self.svfs.exists('undo.phaseroots'):
-            self.svfs.rename('undo.phaseroots', 'phaseroots')
+            self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
         self.invalidate()
 
         parentgone = (parents[0] not in self.changelog.nodemap or
@@ -1197,7 +1191,7 @@
             # prevent dirstateguard from overwriting already restored one
             dsguard.close()
 
-            self.vfs.rename('undo.dirstate', 'dirstate')
+            self.dirstate.restorebackup(None, prefix='undo.')
             try:
                 branch = self.vfs.read('undo.branch')
                 self.dirstate.setbranch(encoding.tolocal(branch))
@@ -1206,7 +1200,6 @@
                           'current branch is still \'%s\'\n')
                         % self.dirstate.branch())
 
-            self.dirstate.invalidate()
             parents = tuple([p.rev() for p in self[None].parents()])
             if len(parents) > 1:
                 ui.status(_('working directory now based on '
--- a/mercurial/mail.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/mail.py	Mon Jul 18 23:28:14 2016 -0500
@@ -41,16 +41,16 @@
     kw['continuation_ws'] = ' '
     _oldheaderinit(self, *args, **kw)
 
-email.Header.Header.__dict__['__init__'] = _unifiedheaderinit
+setattr(email.header.Header, '__init__', _unifiedheaderinit)
 
 class STARTTLS(smtplib.SMTP):
     '''Derived class to verify the peer certificate for STARTTLS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
-    def __init__(self, sslkwargs, host=None, **kwargs):
+    def __init__(self, ui, host=None, **kwargs):
         smtplib.SMTP.__init__(self, **kwargs)
-        self._sslkwargs = sslkwargs
+        self._ui = ui
         self._host = host
 
     def starttls(self, keyfile=None, certfile=None):
@@ -60,8 +60,8 @@
         (resp, reply) = self.docmd("STARTTLS")
         if resp == 220:
             self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile,
-                                           serverhostname=self._host,
-                                           **self._sslkwargs)
+                                           ui=self._ui,
+                                           serverhostname=self._host)
             self.file = smtplib.SSLFakeFile(self.sock)
             self.helo_resp = None
             self.ehlo_resp = None
@@ -74,14 +74,14 @@
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
-    def __init__(self, sslkwargs, keyfile=None, certfile=None, host=None,
+    def __init__(self, ui, keyfile=None, certfile=None, host=None,
                  **kwargs):
         self.keyfile = keyfile
         self.certfile = certfile
         smtplib.SMTP.__init__(self, **kwargs)
         self._host = host
         self.default_port = smtplib.SMTP_SSL_PORT
-        self._sslkwargs = sslkwargs
+        self._ui = ui
 
     def _get_socket(self, host, port, timeout):
         if self.debuglevel > 0:
@@ -89,8 +89,8 @@
         new_socket = socket.create_connection((host, port), timeout)
         new_socket = sslutil.wrapsocket(new_socket,
                                         self.keyfile, self.certfile,
-                                        serverhostname=self._host,
-                                        **self._sslkwargs)
+                                        ui=self._ui,
+                                        serverhostname=self._host)
         self.file = smtplib.SSLFakeFile(new_socket)
         return new_socket
 
@@ -106,22 +106,11 @@
     mailhost = ui.config('smtp', 'host')
     if not mailhost:
         raise error.Abort(_('smtp.host not configured - cannot send mail'))
-    verifycert = ui.config('smtp', 'verifycert', 'strict')
-    if verifycert not in ['strict', 'loose']:
-        if util.parsebool(verifycert) is not False:
-            raise error.Abort(_('invalid smtp.verifycert configuration: %s')
-                             % (verifycert))
-        verifycert = False
-    if (starttls or smtps) and verifycert:
-        sslkwargs = sslutil.sslkwargs(ui, mailhost)
-    else:
-        # 'ui' is required by sslutil.wrapsocket() and set by sslkwargs()
-        sslkwargs = {'ui': ui}
     if smtps:
         ui.note(_('(using smtps)\n'))
-        s = SMTPS(sslkwargs, local_hostname=local_hostname, host=mailhost)
+        s = SMTPS(ui, local_hostname=local_hostname, host=mailhost)
     elif starttls:
-        s = STARTTLS(sslkwargs, local_hostname=local_hostname, host=mailhost)
+        s = STARTTLS(ui, local_hostname=local_hostname, host=mailhost)
     else:
         s = smtplib.SMTP(local_hostname=local_hostname)
     if smtps:
@@ -137,9 +126,9 @@
         s.ehlo()
         s.starttls()
         s.ehlo()
-    if (starttls or smtps) and verifycert:
+    if starttls or smtps:
         ui.note(_('(verifying remote certificate)\n'))
-        sslutil.validator(ui, mailhost)(s.sock, verifycert == 'strict')
+        sslutil.validatesocket(s.sock)
     username = ui.config('smtp', 'username')
     password = ui.config('smtp', 'password')
     if username and not password:
--- a/mercurial/manifest.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/manifest.py	Mon Jul 18 23:28:14 2016 -0500
@@ -211,8 +211,10 @@
 
     def filesnotin(self, m2):
         '''Set of files in this manifest that are not in the other'''
-        files = set(self)
-        files.difference_update(m2)
+        diff = self.diff(m2)
+        files = set(filepath
+                    for filepath, hashflags in diff.iteritems()
+                    if hashflags[1][0] is None)
         return files
 
     @propertycache
@@ -966,7 +968,7 @@
             return self.readdelta(node)
         if self._usemanifestv2:
             raise error.Abort(
-                "readshallowdelta() not implemented for manifestv2")
+                _("readshallowdelta() not implemented for manifestv2"))
         r = self.rev(node)
         d = mdiff.patchtext(self.revdiff(self.deltaparent(r), r))
         return manifestdict(d)
--- a/mercurial/match.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/match.py	Mon Jul 18 23:28:14 2016 -0500
@@ -38,7 +38,7 @@
     for kind, pat, source in kindpats:
         if kind == 'set':
             if not ctx:
-                raise error.Abort("fileset expression with no context")
+                raise error.Abort(_("fileset expression with no context"))
             s = ctx.getfileset(pat)
             fset.update(s)
 
--- a/mercurial/mdiff.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/mdiff.py	Mon Jul 18 23:28:14 2016 -0500
@@ -58,10 +58,8 @@
         'upgrade': False,
         }
 
-    __slots__ = defaults.keys()
-
     def __init__(self, **opts):
-        for k in self.__slots__:
+        for k in self.defaults.keys():
             v = opts.get(k)
             if v is None:
                 v = self.defaults[k]
--- a/mercurial/merge.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/merge.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import shutil
 import struct
@@ -373,7 +374,7 @@
         """Write current state on disk in a version 1 file"""
         f = self._repo.vfs(self.statepathv1, 'w')
         irecords = iter(records)
-        lrecords = irecords.next()
+        lrecords = next(irecords)
         assert lrecords[0] == 'L'
         f.write(hex(self._local) + '\n')
         for rtype, data in irecords:
@@ -408,7 +409,7 @@
         if fcl.isabsent():
             hash = nullhex
         else:
-            hash = util.sha1(fcl.path()).hexdigest()
+            hash = hashlib.sha1(fcl.path()).hexdigest()
             self._repo.vfs.write('merge/' + hash, fcl.data())
         self._state[fd] = ['u', hash, fcl.path(),
                            fca.path(), hex(fca.filenode()),
@@ -989,19 +990,19 @@
             if len(bids) == 1: # all bids are the same kind of method
                 m, l = bids.items()[0]
                 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
-                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
+                    repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
                     actions[f] = l[0]
                     continue
             # If keep is an option, just do it.
             if 'k' in bids:
-                repo.ui.note(" %s: picking 'keep' action\n" % f)
+                repo.ui.note(_(" %s: picking 'keep' action\n") % f)
                 actions[f] = bids['k'][0]
                 continue
             # If there are gets and they all agree [how could they not?], do it.
             if 'g' in bids:
                 ga0 = bids['g'][0]
                 if all(a == ga0 for a in bids['g'][1:]):
-                    repo.ui.note(" %s: picking 'get' action\n" % f)
+                    repo.ui.note(_(" %s: picking 'get' action\n") % f)
                     actions[f] = ga0
                     continue
             # TODO: Consider other simple actions such as mode changes
@@ -1075,15 +1076,14 @@
                 absf = repo.wjoin(f)
                 orig = scmutil.origpath(ui, repo, absf)
                 try:
-                    # TODO Mercurial has always aborted if an untracked
-                    # directory is replaced by a tracked file, or generally
-                    # with file/directory merges. This needs to be sorted out.
                     if repo.wvfs.isfileorlink(f):
                         util.rename(absf, orig)
                 except OSError as e:
                     if e.errno != errno.ENOENT:
                         raise
 
+            if repo.wvfs.isdir(f):
+                repo.wvfs.removedirs(f)
             wwrite(f, fctx(f).data(), flags, backgroundclose=True)
             if i == 100:
                 yield i, f
@@ -1442,9 +1442,7 @@
             pas = [repo[ancestor]]
 
         if node is None:
-            if (repo.ui.configbool('devel', 'all-warnings')
-                    or repo.ui.configbool('devel', 'oldapi')):
-                repo.ui.develwarn('update with no target')
+            repo.ui.deprecwarn('update with no target', '3.9')
             rev, _mark, _act = destutil.destupdate(repo)
             node = repo[rev].node()
 
--- a/mercurial/mpatch.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/mpatch.c	Mon Jul 18 23:28:14 2016 -0500
@@ -26,6 +26,7 @@
 #include <string.h>
 
 #include "util.h"
+#include "bitmanipulation.h"
 
 static char mpatch_doc[] = "Efficient binary patching.";
 static PyObject *mpatch_Error;
--- a/mercurial/obsolete.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/obsolete.py	Mon Jul 18 23:28:14 2016 -0500
@@ -600,8 +600,8 @@
         Take care of filtering duplicate.
         Return the number of new marker."""
         if self._readonly:
-            raise error.Abort('creating obsolete markers is not enabled on '
-                              'this repo')
+            raise error.Abort(_('creating obsolete markers is not enabled on '
+                              'this repo'))
         known = set(self._all)
         new = []
         for m in markers:
@@ -1171,7 +1171,7 @@
                                    ignoreflags=bumpedfix):
             prev = torev(pnode) # unfiltered! but so is phasecache
             if (prev is not None) and (phase(repo, prev) <= public):
-                # we have a public precursors
+                # we have a public precursor
                 bumped.add(rev)
                 break # Next draft!
     return bumped
@@ -1234,7 +1234,7 @@
                 localmetadata.update(rel[2])
 
             if not prec.mutable():
-                raise error.Abort("cannot obsolete public changeset: %s"
+                raise error.Abort(_("cannot obsolete public changeset: %s")
                                  % prec,
                                  hint='see "hg help phases" for details')
             nprec = prec.node()
@@ -1243,7 +1243,8 @@
             if not nsucs:
                 npare = tuple(p.node() for p in prec.parents())
             if nprec in nsucs:
-                raise error.Abort("changeset %s cannot obsolete itself" % prec)
+                raise error.Abort(_("changeset %s cannot obsolete itself")
+                                  % prec)
 
             # Creating the marker causes the hidden cache to become invalid,
             # which causes recomputation when we ask for prec.parents() above.
--- a/mercurial/parser.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/parser.py	Mon Jul 18 23:28:14 2016 -0500
@@ -325,13 +325,13 @@
         >>> builddecl('foo')
         ('foo', None, None)
         >>> builddecl('$foo')
-        ('$foo', None, "'$' not for alias arguments")
+        ('$foo', None, "invalid symbol '$foo'")
         >>> builddecl('foo::bar')
         ('foo::bar', None, 'invalid format')
         >>> builddecl('foo()')
         ('foo', [], None)
         >>> builddecl('$foo()')
-        ('$foo()', None, "'$' not for alias arguments")
+        ('$foo()', None, "invalid function '$foo'")
         >>> builddecl('foo($1, $2)')
         ('foo', ['$1', '$2'], None)
         >>> builddecl('foo(bar_bar, baz.baz)')
@@ -358,7 +358,7 @@
             # "name = ...." style
             name = tree[1]
             if name.startswith('$'):
-                return (decl, None, _("'$' not for alias arguments"))
+                return (decl, None, _("invalid symbol '%s'") % name)
             return (name, None, None)
 
         func = cls._trygetfunc(tree)
@@ -366,7 +366,7 @@
             # "name(arg, ....) = ...." style
             name, args = func
             if name.startswith('$'):
-                return (decl, None, _("'$' not for alias arguments"))
+                return (decl, None, _("invalid function '%s'") % name)
             if any(t[0] != cls._symbolnode for t in args):
                 return (decl, None, _("invalid argument list"))
             if len(args) != len(set(args)):
@@ -389,7 +389,7 @@
         if sym in args:
             op = '_aliasarg'
         elif sym.startswith('$'):
-            raise error.ParseError(_("'$' not for alias arguments"))
+            raise error.ParseError(_("invalid symbol '%s'") % sym)
         return (op, sym)
 
     @classmethod
@@ -423,7 +423,7 @@
         ...     builddefn('$1 or $bar', args)
         ... except error.ParseError as inst:
         ...     print parseerrordetail(inst)
-        '$' not for alias arguments
+        invalid symbol '$bar'
         >>> args = ['$1', '$10', 'foo']
         >>> pprint(builddefn('$10 or baz', args))
         (or
@@ -447,15 +447,13 @@
         repl = efmt = None
         name, args, err = cls._builddecl(decl)
         if err:
-            efmt = _('failed to parse the declaration of %(section)s '
-                     '"%(name)s": %(error)s')
+            efmt = _('bad declaration of %(section)s "%(name)s": %(error)s')
         else:
             try:
                 repl = cls._builddefn(defn, args)
             except error.ParseError as inst:
                 err = parseerrordetail(inst)
-                efmt = _('failed to parse the definition of %(section)s '
-                         '"%(name)s": %(error)s')
+                efmt = _('bad definition of %(section)s "%(name)s": %(error)s')
         if err:
             err = efmt % {'section': cls._section, 'name': name, 'error': err}
         return alias(name, args, err, repl)
--- a/mercurial/parsers.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/parsers.c	Mon Jul 18 23:28:14 2016 -0500
@@ -13,6 +13,7 @@
 #include <string.h>
 
 #include "util.h"
+#include "bitmanipulation.h"
 
 static char *versionerrortext = "Python minor version mismatch";
 
--- a/mercurial/patch.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/patch.py	Mon Jul 18 23:28:14 2016 -0500
@@ -12,6 +12,7 @@
 import copy
 import email
 import errno
+import hashlib
 import os
 import posixpath
 import re
@@ -978,7 +979,19 @@
 def filterpatch(ui, headers, operation=None):
     """Interactively filter patch chunks into applied-only chunks"""
     if operation is None:
-        operation = _('record')
+        operation = 'record'
+    messages = {
+        'multiple': {
+            'discard': _("discard change %d/%d to '%s'?"),
+            'record': _("record change %d/%d to '%s'?"),
+            'revert': _("revert change %d/%d to '%s'?"),
+        }[operation],
+        'single': {
+            'discard': _("discard this change to '%s'?"),
+            'record': _("record this change to '%s'?"),
+            'revert': _("revert this change to '%s'?"),
+        }[operation],
+    }
 
     def prompt(skipfile, skipall, query, chunk):
         """prompt query, and process base inputs
@@ -1109,11 +1122,10 @@
             if skipfile is None and skipall is None:
                 chunk.pretty(ui)
             if total == 1:
-                msg = _("record this change to '%s'?") % chunk.filename()
+                msg = messages['single'] % chunk.filename()
             else:
                 idx = pos - len(h.hunks) + i
-                msg = _("record change %d/%d to '%s'?") % (idx, total,
-                                                           chunk.filename())
+                msg = messages['multiple'] % (idx, total, chunk.filename())
             r, skipfile, skipall, newpatches = prompt(skipfile,
                     skipall, msg, chunk)
             if r:
@@ -2172,7 +2184,7 @@
     return mdiff.diffopts(**buildopts)
 
 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
-         losedatafn=None, prefix='', relroot=''):
+         losedatafn=None, prefix='', relroot='', copy=None):
     '''yields diff of changes to files between two nodes, or node and
     working directory.
 
@@ -2191,7 +2203,10 @@
     display (used for subrepos).
 
     relroot, if not empty, must be normalized with a trailing /. Any match
-    patterns that fall outside it will be ignored.'''
+    patterns that fall outside it will be ignored.
+
+    copy, if not empty, should contain mappings {dst@y: src@x} of copy
+    information.'''
 
     if opts is None:
         opts = mdiff.defaultopts
@@ -2238,9 +2253,10 @@
         hexfunc = short
     revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
 
-    copy = {}
-    if opts.git or opts.upgrade:
-        copy = copies.pathcopies(ctx1, ctx2, match=match)
+    if copy is None:
+        copy = {}
+        if opts.git or opts.upgrade:
+            copy = copies.pathcopies(ctx1, ctx2, match=match)
 
     if relroot is not None:
         if not relfiltered:
@@ -2401,7 +2417,7 @@
         if not text:
             text = ""
         l = len(text)
-        s = util.sha1('blob %d\0' % l)
+        s = hashlib.sha1('blob %d\0' % l)
         s.update(text)
         return s.hexdigest()
 
--- a/mercurial/pathencode.c	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/pathencode.c	Mon Jul 18 23:28:14 2016 -0500
@@ -653,24 +653,24 @@
 	PyObject *shaobj, *hashobj;
 
 	if (shafunc == NULL) {
-		PyObject *util, *name = PyString_FromString("mercurial.util");
+		PyObject *hashlib, *name = PyString_FromString("hashlib");
 
 		if (name == NULL)
 			return -1;
 
-		util = PyImport_Import(name);
+		hashlib = PyImport_Import(name);
 		Py_DECREF(name);
 
-		if (util == NULL) {
-			PyErr_SetString(PyExc_ImportError, "mercurial.util");
+		if (hashlib == NULL) {
+			PyErr_SetString(PyExc_ImportError, "hashlib");
 			return -1;
 		}
-		shafunc = PyObject_GetAttrString(util, "sha1");
-		Py_DECREF(util);
+		shafunc = PyObject_GetAttrString(hashlib, "sha1");
+		Py_DECREF(hashlib);
 
 		if (shafunc == NULL) {
 			PyErr_SetString(PyExc_AttributeError,
-					"module 'mercurial.util' has no "
+					"module 'hashlib' has no "
 					"attribute 'sha1'");
 			return -1;
 		}
--- a/mercurial/peer.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/peer.py	Mon Jul 18 23:28:14 2016 -0500
@@ -98,12 +98,12 @@
     '''
     def plain(*args, **opts):
         batchable = f(*args, **opts)
-        encargsorres, encresref = batchable.next()
+        encargsorres, encresref = next(batchable)
         if not encresref:
             return encargsorres # a local result in this case
         self = args[0]
         encresref.set(self._submitone(f.func_name, encargsorres))
-        return batchable.next()
+        return next(batchable)
     setattr(plain, 'batchable', f)
     return plain
 
--- a/mercurial/phases.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/phases.py	Mon Jul 18 23:28:14 2016 -0500
@@ -251,7 +251,7 @@
     def write(self):
         if not self.dirty:
             return
-        f = self.opener('phaseroots', 'w', atomictemp=True)
+        f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
         try:
             self._write(f)
         finally:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/policy.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,45 @@
+# policy.py - module policy logic for Mercurial.
+#
+# Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+# Rules for how modules can be loaded. Values are:
+#
+#    c - require C extensions
+#    allow - allow pure Python implementation when C loading fails
+#    cffi - required cffi versions (implemented within pure module)
+#    cffi-allow - allow pure Python implementation if cffi version is missing
+#    py - only load pure Python modules
+#
+# By default, require the C extensions for performance reasons.
+policy = 'c'
+policynoc = ('cffi', 'cffi-allow', 'py')
+policynocffi = ('c', 'py')
+
+try:
+    from . import __modulepolicy__
+    policy = __modulepolicy__.modulepolicy
+except ImportError:
+    pass
+
+# PyPy doesn't load C extensions.
+#
+# The canonical way to do this is to test platform.python_implementation().
+# But we don't import platform and don't bloat for it here.
+if '__pypy__' in sys.builtin_module_names:
+    policy = 'cffi'
+
+# Our C extensions aren't yet compatible with Python 3. So use pure Python
+# on Python 3 for now.
+if sys.version_info[0] >= 3:
+    policy = 'py'
+
+# Environment variable can always force settings.
+policy = os.environ.get('HGMODULEPOLICY', policy)
--- a/mercurial/posix.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/posix.py	Mon Jul 18 23:28:14 2016 -0500
@@ -598,3 +598,18 @@
         return ''.join(chunks)
     finally:
         fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
+
+def bindunixsocket(sock, path):
+    """Bind the UNIX domain socket to the specified path"""
+    # use relative path instead of full path at bind() if possible, since
+    # AF_UNIX path has very small length limit (107 chars) on common
+    # platforms (see sys/un.h)
+    dirname, basename = os.path.split(path)
+    bakwdfd = None
+    if dirname:
+        bakwdfd = os.open('.', os.O_DIRECTORY)
+        os.chdir(dirname)
+    sock.bind(basename)
+    if bakwdfd:
+        os.fchdir(bakwdfd)
+        os.close(bakwdfd)
--- a/mercurial/pure/osutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/pure/osutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -14,6 +14,10 @@
 import stat as statmod
 import sys
 
+from . import policy
+modulepolicy = policy.policy
+policynocffi = policy.policynocffi
+
 def _mode_to_kind(mode):
     if statmod.S_ISREG(mode):
         return statmod.S_IFREG
@@ -31,7 +35,7 @@
         return statmod.S_IFSOCK
     return mode
 
-def listdir(path, stat=False, skip=None):
+def listdirpure(path, stat=False, skip=None):
     '''listdir(path, stat=False) -> list_of_tuples
 
     Return a sorted list containing information about the entries
@@ -61,6 +65,95 @@
             result.append((fn, _mode_to_kind(st.st_mode)))
     return result
 
+ffi = None
+if modulepolicy not in policynocffi and sys.platform == 'darwin':
+    try:
+        from _osutil_cffi import ffi, lib
+    except ImportError:
+        if modulepolicy == 'cffi': # strict cffi import
+            raise
+
+if sys.platform == 'darwin' and ffi is not None:
+    listdir_batch_size = 4096
+    # tweakable number, only affects performance, which chunks
+    # of bytes do we get back from getattrlistbulk
+
+    attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
+
+    attrkinds[lib.VREG] = statmod.S_IFREG
+    attrkinds[lib.VDIR] = statmod.S_IFDIR
+    attrkinds[lib.VLNK] = statmod.S_IFLNK
+    attrkinds[lib.VBLK] = statmod.S_IFBLK
+    attrkinds[lib.VCHR] = statmod.S_IFCHR
+    attrkinds[lib.VFIFO] = statmod.S_IFIFO
+    attrkinds[lib.VSOCK] = statmod.S_IFSOCK
+
+    class stat_res(object):
+        def __init__(self, st_mode, st_mtime, st_size):
+            self.st_mode = st_mode
+            self.st_mtime = st_mtime
+            self.st_size = st_size
+
+    tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
+    buf = ffi.new("char[]", listdir_batch_size)
+
+    def listdirinternal(dfd, req, stat, skip):
+        ret = []
+        while True:
+            r = lib.getattrlistbulk(dfd, req, buf, listdir_batch_size, 0)
+            if r == 0:
+                break
+            if r == -1:
+                raise OSError(ffi.errno, os.strerror(ffi.errno))
+            cur = ffi.cast("val_attrs_t*", buf)
+            for i in range(r):
+                lgt = cur.length
+                assert lgt == ffi.cast('uint32_t*', cur)[0]
+                ofs = cur.name_info.attr_dataoffset
+                str_lgt = cur.name_info.attr_length
+                base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
+                name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
+                           str_lgt - 1))
+                tp = attrkinds[cur.obj_type]
+                if name == "." or name == "..":
+                    continue
+                if skip == name and tp == statmod.S_ISDIR:
+                    return []
+                if stat:
+                    mtime = cur.time.tv_sec
+                    mode = (cur.accessmask & ~lib.S_IFMT)| tp
+                    ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
+                                st_size=cur.datalength)))
+                else:
+                    ret.append((name, tp))
+                cur += lgt
+        return ret
+
+    def listdir(path, stat=False, skip=None):
+        req = ffi.new("struct attrlist*")
+        req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
+        req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
+                          lib.ATTR_CMN_NAME |
+                          lib.ATTR_CMN_OBJTYPE |
+                          lib.ATTR_CMN_ACCESSMASK |
+                          lib.ATTR_CMN_MODTIME)
+        req.fileattr = lib.ATTR_FILE_DATALENGTH
+        dfd = lib.open(path, lib.O_RDONLY, 0)
+        if dfd == -1:
+            raise OSError(ffi.errno, os.strerror(ffi.errno))
+
+        try:
+            ret = listdirinternal(dfd, req, stat, skip)
+        finally:
+            try:
+                lib.close(dfd)
+            except BaseException:
+                pass # we ignore all the errors from closing, not
+                # much we can do about that
+        return ret
+else:
+    listdir = listdirpure
+
 if os.name != 'nt':
     posixfile = open
 
--- a/mercurial/pure/parsers.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/pure/parsers.py	Mon Jul 18 23:28:14 2016 -0500
@@ -25,49 +25,111 @@
     # x is a tuple
     return x
 
-def parse_index2(data, inline):
-    def gettype(q):
-        return int(q & 0xFFFF)
+indexformatng = ">Qiiiiii20s12x"
+indexfirst = struct.calcsize('Q')
+sizeint = struct.calcsize('i')
+indexsize = struct.calcsize(indexformatng)
+
+def gettype(q):
+    return int(q & 0xFFFF)
 
-    def offset_type(offset, type):
-        return long(long(offset) << 16 | type)
+def offset_type(offset, type):
+    return long(long(offset) << 16 | type)
+
+class BaseIndexObject(object):
+    def __len__(self):
+        return self._lgt + len(self._extra) + 1
+
+    def insert(self, i, tup):
+        assert i == -1
+        self._extra.append(tup)
 
-    indexformatng = ">Qiiiiii20s12x"
+    def _fix_index(self, i):
+        if not isinstance(i, int):
+            raise TypeError("expecting int indexes")
+        if i < 0:
+            i = len(self) + i
+        if i < 0 or i >= len(self):
+            raise IndexError
+        return i
 
-    s = struct.calcsize(indexformatng)
-    index = []
-    cache = None
-    off = 0
+    def __getitem__(self, i):
+        i = self._fix_index(i)
+        if i == len(self) - 1:
+            return (0, 0, 0, -1, -1, -1, -1, nullid)
+        if i >= self._lgt:
+            return self._extra[i - self._lgt]
+        index = self._calculate_index(i)
+        r = struct.unpack(indexformatng, self._data[index:index + indexsize])
+        if i == 0:
+            e = list(r)
+            type = gettype(e[0])
+            e[0] = offset_type(0, type)
+            return tuple(e)
+        return r
+
+class IndexObject(BaseIndexObject):
+    def __init__(self, data):
+        assert len(data) % indexsize == 0
+        self._data = data
+        self._lgt = len(data) // indexsize
+        self._extra = []
+
+    def _calculate_index(self, i):
+        return i * indexsize
 
-    l = len(data) - s
-    append = index.append
-    if inline:
-        cache = (0, data)
-        while off <= l:
-            e = _unpack(indexformatng, data[off:off + s])
-            append(e)
-            if e[1] < 0:
-                break
-            off += e[1] + s
-    else:
-        while off <= l:
-            e = _unpack(indexformatng, data[off:off + s])
-            append(e)
-            off += s
+    def __delitem__(self, i):
+        if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+            raise ValueError("deleting slices only supports a:-1 with step 1")
+        i = self._fix_index(i.start)
+        if i < self._lgt:
+            self._data = self._data[:i * indexsize]
+            self._lgt = i
+            self._extra = []
+        else:
+            self._extra = self._extra[:i - self._lgt]
+
+class InlinedIndexObject(BaseIndexObject):
+    def __init__(self, data, inline=0):
+        self._data = data
+        self._lgt = self._inline_scan(None)
+        self._inline_scan(self._lgt)
+        self._extra = []
 
-    if off != len(data):
-        raise ValueError('corrupt index file')
+    def _inline_scan(self, lgt):
+        off = 0
+        if lgt is not None:
+            self._offsets = [0] * lgt
+        count = 0
+        while off <= len(self._data) - indexsize:
+            s, = struct.unpack('>i',
+                self._data[off + indexfirst:off + sizeint + indexfirst])
+            if lgt is not None:
+                self._offsets[count] = off
+            count += 1
+            off += indexsize + s
+        if off != len(self._data):
+            raise ValueError("corrupted data")
+        return count
 
-    if index:
-        e = list(index[0])
-        type = gettype(e[0])
-        e[0] = offset_type(0, type)
-        index[0] = tuple(e)
+    def __delitem__(self, i):
+        if not isinstance(i, slice) or not i.stop == -1 or not i.step is None:
+            raise ValueError("deleting slices only supports a:-1 with step 1")
+        i = self._fix_index(i.start)
+        if i < self._lgt:
+            self._offsets = self._offsets[:i]
+            self._lgt = i
+            self._extra = []
+        else:
+            self._extra = self._extra[:i - self._lgt]
 
-    # add the magic null revision at -1
-    index.append((0, 0, 0, -1, -1, -1, -1, nullid))
+    def _calculate_index(self, i):
+        return self._offsets[i]
 
-    return index, cache
+def parse_index2(data, inline):
+    if not inline:
+        return IndexObject(data), None
+    return InlinedIndexObject(data, inline), (0, data)
 
 def parse_dirstate(dmap, copymap, st):
     parents = [st[:20], st[20: 40]]
--- a/mercurial/pycompat.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/pycompat.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,18 +10,26 @@
 
 from __future__ import absolute_import
 
-try:
+import sys
+
+if sys.version_info[0] < 3:
+    import cPickle as pickle
     import cStringIO as io
-    stringio = io.StringIO
-except ImportError:
+    import httplib
+    import Queue as _queue
+    import SocketServer as socketserver
+    import urlparse
+    import xmlrpclib
+else:
+    import http.client as httplib
     import io
-    stringio = io.StringIO
+    import pickle
+    import queue as _queue
+    import socketserver
+    import urllib.parse as urlparse
+    import xmlrpc.client as xmlrpclib
 
-try:
-    import Queue as _queue
-    _queue.Queue
-except ImportError:
-    import queue as _queue
+stringio = io.StringIO
 empty = _queue.Empty
 queue = _queue.Queue
 
@@ -41,9 +49,13 @@
         except AttributeError:
             pass
 
+httpserver = _pycompatstub()
 urlreq = _pycompatstub()
 urlerr = _pycompatstub()
 try:
+    import BaseHTTPServer
+    import CGIHTTPServer
+    import SimpleHTTPServer
     import urllib2
     import urllib
     _alias(urlreq, urllib, (
@@ -81,6 +93,16 @@
         "HTTPError",
         "URLError",
     ))
+    _alias(httpserver, BaseHTTPServer, (
+        "HTTPServer",
+        "BaseHTTPRequestHandler",
+    ))
+    _alias(httpserver, SimpleHTTPServer, (
+        "SimpleHTTPRequestHandler",
+    ))
+    _alias(httpserver, CGIHTTPServer, (
+        "CGIHTTPRequestHandler",
+    ))
 
 except ImportError:
     import urllib.request
@@ -99,6 +121,7 @@
         "pathname2url",
         "HTTPBasicAuthHandler",
         "HTTPDigestAuthHandler",
+        "HTTPPasswordMgrWithDefaultRealm",
         "ProxyHandler",
         "quote",
         "Request",
@@ -115,6 +138,13 @@
         "HTTPError",
         "URLError",
     ))
+    import http.server
+    _alias(httpserver, http.server, (
+        "HTTPServer",
+        "BaseHTTPRequestHandler",
+        "SimpleHTTPRequestHandler",
+        "CGIHTTPRequestHandler",
+    ))
 
 try:
     xrange
--- a/mercurial/repair.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/repair.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 
 from .i18n import _
 from .node import short
@@ -35,7 +36,7 @@
     # Include a hash of all the nodes in the filename for uniqueness
     allcommits = repo.set('%ln::%ln', bases, heads)
     allhashes = sorted(c.hex() for c in allcommits)
-    totalhash = util.sha1(''.join(allhashes)).hexdigest()
+    totalhash = hashlib.sha1(''.join(allhashes)).hexdigest()
     name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
 
     comp = None
@@ -166,6 +167,13 @@
             tr.startgroup()
             cl.strip(striprev, tr)
             mfst.strip(striprev, tr)
+            if 'treemanifest' in repo.requirements: # safe but unnecessary
+                                                    # otherwise
+                for unencoded, encoded, size in repo.store.datafiles():
+                    if (unencoded.startswith('meta/') and
+                        unencoded.endswith('00manifest.i')):
+                        dir = unencoded[5:-12]
+                        repo.dirlog(dir).strip(striprev, tr)
             for fn in files:
                 repo.file(fn).strip(striprev, tr)
             tr.endgroup()
--- a/mercurial/repoview.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/repoview.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,6 +9,7 @@
 from __future__ import absolute_import
 
 import copy
+import hashlib
 import heapq
 import struct
 
@@ -18,7 +19,6 @@
     obsolete,
     phases,
     tags as tagsmod,
-    util,
 )
 
 def hideablerevs(repo):
@@ -102,7 +102,7 @@
     it to the cache. Upon reading we can easily validate by checking the hash
     against the stored one and discard the cache in case the hashes don't match.
     """
-    h = util.sha1()
+    h = hashlib.sha1()
     h.update(''.join(repo.heads()))
     h.update(str(hash(frozenset(hideable))))
     return h.digest()
--- a/mercurial/revlog.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/revlog.py	Mon Jul 18 23:28:14 2016 -0500
@@ -15,6 +15,7 @@
 
 import collections
 import errno
+import hashlib
 import os
 import struct
 import zlib
@@ -40,7 +41,6 @@
 _unpack = struct.unpack
 _compress = zlib.compress
 _decompress = zlib.decompress
-_sha = util.sha1
 
 # revlog header flags
 REVLOGV0 = 0
@@ -74,7 +74,7 @@
 def offset_type(offset, type):
     return long(long(offset) << 16 | type)
 
-_nullhash = _sha(nullid)
+_nullhash = hashlib.sha1(nullid)
 
 def hash(text, p1, p2):
     """generate a hash from the given text and its parent hashes
@@ -92,7 +92,7 @@
         # none of the parent nodes are nullid
         l = [p1, p2]
         l.sort()
-        s = _sha(l[0])
+        s = hashlib.sha1(l[0])
         s.update(l[1])
     s.update(text)
     return s.digest()
@@ -941,8 +941,11 @@
             return None
         except RevlogError:
             # parsers.c radix tree lookup gave multiple matches
+            # fast path: for unfiltered changelog, radix tree is accurate
+            if not getattr(self, 'filteredrevs', None):
+                raise LookupError(id, self.indexfile,
+                                  _('ambiguous identifier'))
             # fall through to slow path that filters hidden revisions
-            pass
         except (AttributeError, ValueError):
             # we are pure python, or key was too short to search radix tree
             pass
--- a/mercurial/revset.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/revset.py	Mon Jul 18 23:28:14 2016 -0500
@@ -302,6 +302,11 @@
 
 # helpers
 
+def getsymbol(x):
+    if x and x[0] == 'symbol':
+        return x[1]
+    raise error.ParseError(_('not a symbol'))
+
 def getstring(x, err):
     if x and (x[0] == 'string' or x[0] == 'symbol'):
         return x[1]
@@ -330,13 +335,12 @@
     s = methods[x[0]](repo, subset, *x[1:])
     if util.safehasattr(s, 'isascending'):
         return s
-    if (repo.ui.configbool('devel', 'all-warnings')
-            or repo.ui.configbool('devel', 'old-revset')):
-        # else case should not happen, because all non-func are internal,
-        # ignoring for now.
-        if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
-            repo.ui.develwarn('revset "%s" use list instead of smartset, '
-                              '(upgrade your code)' % x[1][1])
+    # else case should not happen, because all non-func are internal,
+    # ignoring for now.
+    if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols:
+        repo.ui.deprecwarn('revset "%s" uses list instead of smartset'
+                           % x[1][1],
+                           '3.9')
     return baseset(s)
 
 def _getrevsource(repo, r):
@@ -387,9 +391,7 @@
     r = fullreposet(repo)
     xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
                          includepath=True)
-    # XXX We should combine with subset first: 'subset & baseset(...)'. This is
-    # necessary to ensure we preserve the order in subset.
-    return xs & subset
+    return subset & xs
 
 def andset(repo, subset, x, y):
     return getset(repo, getset(repo, subset, x), y)
@@ -417,13 +419,14 @@
     raise error.ParseError(_("can't use a key-value pair in this context"))
 
 def func(repo, subset, a, b):
-    if a[0] == 'symbol' and a[1] in symbols:
-        return symbols[a[1]](repo, subset, b)
+    f = getsymbol(a)
+    if f in symbols:
+        return symbols[f](repo, subset, b)
 
     keep = lambda fn: getattr(fn, '__doc__', None) is not None
 
     syms = [s for (s, fn) in symbols.items() if keep(fn)]
-    raise error.UnknownIdentifier(a[1], syms)
+    raise error.UnknownIdentifier(f, syms)
 
 # functions
 
@@ -695,20 +698,18 @@
 
     return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat))
 
-def _children(repo, narrow, parentset):
+def _children(repo, subset, parentset):
     if not parentset:
         return baseset()
     cs = set()
     pr = repo.changelog.parentrevs
     minrev = parentset.min()
-    for r in narrow:
+    for r in subset:
         if r <= minrev:
             continue
         for p in pr(r):
             if p in parentset:
                 cs.add(r)
-    # XXX using a set to feed the baseset is wrong. Sets are not ordered.
-    # This does not break because of other fullreposet misbehavior.
     return baseset(cs)
 
 @predicate('children(set)', safe=True)
@@ -1150,13 +1151,9 @@
     getargs(x, 0, 0, _("head takes no arguments"))
     hs = set()
     cl = repo.changelog
-    for b, ls in repo.branchmap().iteritems():
+    for ls in repo.branchmap().itervalues():
         hs.update(cl.rev(h) for h in ls)
-    # XXX using a set to feed the baseset is wrong. Sets are not ordered.
-    # This does not break because of other fullreposet misbehavior.
-    # XXX We should combine with subset first: 'subset & baseset(...)'. This is
-    # necessary to ensure we preserve the order in subset.
-    return baseset(hs) & subset
+    return subset & baseset(hs)
 
 @predicate('heads(set)', safe=True)
 def heads(repo, subset, x):
@@ -1837,7 +1834,54 @@
         return True
     return subset & s.filter(filter, condrepr='<roots>')
 
-@predicate('sort(set[, [-]key...])', safe=True)
+_sortkeyfuncs = {
+    'rev': lambda c: c.rev(),
+    'branch': lambda c: c.branch(),
+    'desc': lambda c: c.description(),
+    'user': lambda c: c.user(),
+    'author': lambda c: c.user(),
+    'date': lambda c: c.date()[0],
+}
+
+def _getsortargs(x):
+    """Parse sort options into (set, [(key, reverse)], opts)"""
+    args = getargsdict(x, 'sort', 'set keys topo.firstbranch')
+    if 'set' not in args:
+        # i18n: "sort" is a keyword
+        raise error.ParseError(_('sort requires one or two arguments'))
+    keys = "rev"
+    if 'keys' in args:
+        # i18n: "sort" is a keyword
+        keys = getstring(args['keys'], _("sort spec must be a string"))
+
+    keyflags = []
+    for k in keys.split():
+        fk = k
+        reverse = (k[0] == '-')
+        if reverse:
+            k = k[1:]
+        if k not in _sortkeyfuncs and k != 'topo':
+            raise error.ParseError(_("unknown sort key %r") % fk)
+        keyflags.append((k, reverse))
+
+    if len(keyflags) > 1 and any(k == 'topo' for k, reverse in keyflags):
+        # i18n: "topo" is a keyword
+        raise error.ParseError(_(
+            'topo sort order cannot be combined with other sort keys'))
+
+    opts = {}
+    if 'topo.firstbranch' in args:
+        if any(k == 'topo' for k, reverse in keyflags):
+            opts['topo.firstbranch'] = args['topo.firstbranch']
+        else:
+            # i18n: "topo" and "topo.firstbranch" are keywords
+            raise error.ParseError(_(
+                'topo.firstbranch can only be used when using the topo sort '
+                'key'))
+
+    return args['set'], keyflags, opts
+
+@predicate('sort(set[, [-]key... [, ...]])', safe=True)
 def sort(repo, subset, x):
     """Sort set by keys. The default sort order is ascending, specify a key
     as ``-key`` to sort in descending order.
@@ -1849,50 +1893,235 @@
     - ``desc`` for the commit message (description),
     - ``user`` for user name (``author`` can be used as an alias),
     - ``date`` for the commit date
+    - ``topo`` for a reverse topographical sort
+
+    The ``topo`` sort order cannot be combined with other sort keys. This sort
+    takes one optional argument, ``topo.firstbranch``, which takes a revset that
+    specifies what topographical branches to prioritize in the sort.
+
     """
-    # i18n: "sort" is a keyword
-    l = getargs(x, 1, 2, _("sort requires one or two arguments"))
-    keys = "rev"
-    if len(l) == 2:
-        # i18n: "sort" is a keyword
-        keys = getstring(l[1], _("sort spec must be a string"))
-
-    s = l[0]
-    keys = keys.split()
+    s, keyflags, opts = _getsortargs(x)
     revs = getset(repo, subset, s)
-    if keys == ["rev"]:
-        revs.sort()
+
+    if not keyflags:
+        return revs
+    if len(keyflags) == 1 and keyflags[0][0] == "rev":
+        revs.sort(reverse=keyflags[0][1])
         return revs
-    elif keys == ["-rev"]:
-        revs.sort(reverse=True)
+    elif keyflags[0][0] == "topo":
+        firstbranch = ()
+        if 'topo.firstbranch' in opts:
+            firstbranch = getset(repo, subset, opts['topo.firstbranch'])
+        revs = baseset(_toposort(revs, repo.changelog.parentrevs, firstbranch),
+                       istopo=True)
+        if keyflags[0][1]:
+            revs.reverse()
         return revs
+
     # sort() is guaranteed to be stable
     ctxs = [repo[r] for r in revs]
-    for k in reversed(keys):
-        if k == 'rev':
-            ctxs.sort(key=lambda c: c.rev())
-        elif k == '-rev':
-            ctxs.sort(key=lambda c: c.rev(), reverse=True)
-        elif k == 'branch':
-            ctxs.sort(key=lambda c: c.branch())
-        elif k == '-branch':
-            ctxs.sort(key=lambda c: c.branch(), reverse=True)
-        elif k == 'desc':
-            ctxs.sort(key=lambda c: c.description())
-        elif k == '-desc':
-            ctxs.sort(key=lambda c: c.description(), reverse=True)
-        elif k in 'user author':
-            ctxs.sort(key=lambda c: c.user())
-        elif k in '-user -author':
-            ctxs.sort(key=lambda c: c.user(), reverse=True)
-        elif k == 'date':
-            ctxs.sort(key=lambda c: c.date()[0])
-        elif k == '-date':
-            ctxs.sort(key=lambda c: c.date()[0], reverse=True)
-        else:
-            raise error.ParseError(_("unknown sort key %r") % k)
+    for k, reverse in reversed(keyflags):
+        ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
     return baseset([c.rev() for c in ctxs])
 
+def _toposort(revs, parentsfunc, firstbranch=()):
+    """Yield revisions from heads to roots one (topo) branch at a time.
+
+    This function aims to be used by a graph generator that wishes to minimize
+    the number of parallel branches and their interleaving.
+
+    Example iteration order (numbers show the "true" order in a changelog):
+
+      o  4
+      |
+      o  1
+      |
+      | o  3
+      | |
+      | o  2
+      |/
+      o  0
+
+    Note that the ancestors of merges are understood by the current
+    algorithm to be on the same branch. This means no reordering will
+    occur behind a merge.
+    """
+
+    ### Quick summary of the algorithm
+    #
+    # This function is based around a "retention" principle. We keep revisions
+    # in memory until we are ready to emit a whole branch that immediately
+    # "merges" into an existing one. This reduces the number of parallel
+    # branches with interleaved revisions.
+    #
+    # During iteration revs are split into two groups:
+    # A) revision already emitted
+    # B) revision in "retention". They are stored as different subgroups.
+    #
+    # for each REV, we do the following logic:
+    #
+    #   1) if REV is a parent of (A), we will emit it. If there is a
+    #   retention group ((B) above) that is blocked on REV being
+    #   available, we emit all the revisions out of that retention
+    #   group first.
+    #
+    #   2) else, we'll search for a subgroup in (B) awaiting for REV to be
+    #   available, if such subgroup exist, we add REV to it and the subgroup is
+    #   now awaiting for REV.parents() to be available.
+    #
+    #   3) finally if no such group existed in (B), we create a new subgroup.
+    #
+    #
+    # To bootstrap the algorithm, we emit the tipmost revision (which
+    # puts it in group (A) from above).
+
+    revs.sort(reverse=True)
+
+    # Set of parents of revision that have been emitted. They can be considered
+    # unblocked as the graph generator is already aware of them so there is no
+    # need to delay the revisions that reference them.
+    #
+    # If someone wants to prioritize a branch over the others, pre-filling this
+    # set will force all other branches to wait until this branch is ready to be
+    # emitted.
+    unblocked = set(firstbranch)
+
+    # list of groups waiting to be displayed, each group is defined by:
+    #
+    #   (revs:    lists of revs waiting to be displayed,
+    #    blocked: set of that cannot be displayed before those in 'revs')
+    #
+    # The second value ('blocked') correspond to parents of any revision in the
+    # group ('revs') that is not itself contained in the group. The main idea
+    # of this algorithm is to delay as much as possible the emission of any
+    # revision.  This means waiting for the moment we are about to display
+    # these parents to display the revs in a group.
+    #
+    # This first implementation is smart until it encounters a merge: it will
+    # emit revs as soon as any parent is about to be emitted and can grow an
+    # arbitrary number of revs in 'blocked'. In practice this mean we properly
+    # retains new branches but gives up on any special ordering for ancestors
+    # of merges. The implementation can be improved to handle this better.
+    #
+    # The first subgroup is special. It corresponds to all the revision that
+    # were already emitted. The 'revs' lists is expected to be empty and the
+    # 'blocked' set contains the parents revisions of already emitted revision.
+    #
+    # You could pre-seed the <parents> set of groups[0] to a specific
+    # changesets to select what the first emitted branch should be.
+    groups = [([], unblocked)]
+    pendingheap = []
+    pendingset = set()
+
+    heapq.heapify(pendingheap)
+    heappop = heapq.heappop
+    heappush = heapq.heappush
+    for currentrev in revs:
+        # Heap works with smallest element, we want highest so we invert
+        if currentrev not in pendingset:
+            heappush(pendingheap, -currentrev)
+            pendingset.add(currentrev)
+        # iterates on pending rev until after the current rev have been
+        # processed.
+        rev = None
+        while rev != currentrev:
+            rev = -heappop(pendingheap)
+            pendingset.remove(rev)
+
+            # Seek for a subgroup blocked, waiting for the current revision.
+            matching = [i for i, g in enumerate(groups) if rev in g[1]]
+
+            if matching:
+                # The main idea is to gather together all sets that are blocked
+                # on the same revision.
+                #
+                # Groups are merged when a common blocking ancestor is
+                # observed. For example, given two groups:
+                #
+                # revs [5, 4] waiting for 1
+                # revs [3, 2] waiting for 1
+                #
+                # These two groups will be merged when we process
+                # 1. In theory, we could have merged the groups when
+                # we added 2 to the group it is now in (we could have
+                # noticed the groups were both blocked on 1 then), but
+                # the way it works now makes the algorithm simpler.
+                #
+                # We also always keep the oldest subgroup first. We can
+                # probably improve the behavior by having the longest set
+                # first. That way, graph algorithms could minimise the length
+                # of parallel lines their drawing. This is currently not done.
+                targetidx = matching.pop(0)
+                trevs, tparents = groups[targetidx]
+                for i in matching:
+                    gr = groups[i]
+                    trevs.extend(gr[0])
+                    tparents |= gr[1]
+                # delete all merged subgroups (except the one we kept)
+                # (starting from the last subgroup for performance and
+                # sanity reasons)
+                for i in reversed(matching):
+                    del groups[i]
+            else:
+                # This is a new head. We create a new subgroup for it.
+                targetidx = len(groups)
+                groups.append(([], set([rev])))
+
+            gr = groups[targetidx]
+
+            # We now add the current nodes to this subgroups. This is done
+            # after the subgroup merging because all elements from a subgroup
+            # that relied on this rev must precede it.
+            #
+            # we also update the <parents> set to include the parents of the
+            # new nodes.
+            if rev == currentrev: # only display stuff in rev
+                gr[0].append(rev)
+            gr[1].remove(rev)
+            parents = [p for p in parentsfunc(rev) if p > node.nullrev]
+            gr[1].update(parents)
+            for p in parents:
+                if p not in pendingset:
+                    pendingset.add(p)
+                    heappush(pendingheap, -p)
+
+            # Look for a subgroup to display
+            #
+            # When unblocked is empty (if clause), we were not waiting for any
+            # revisions during the first iteration (if no priority was given) or
+            # if we emitted a whole disconnected set of the graph (reached a
+            # root).  In that case we arbitrarily take the oldest known
+            # subgroup. The heuristic could probably be better.
+            #
+            # Otherwise (elif clause) if the subgroup is blocked on
+            # a revision we just emitted, we can safely emit it as
+            # well.
+            if not unblocked:
+                if len(groups) > 1:  # display other subset
+                    targetidx = 1
+                    gr = groups[1]
+            elif not gr[1] & unblocked:
+                gr = None
+
+            if gr is not None:
+                # update the set of awaited revisions with the one from the
+                # subgroup
+                unblocked |= gr[1]
+                # output all revisions in the subgroup
+                for r in gr[0]:
+                    yield r
+                # delete the subgroup that you just output
+                # unless it is groups[0] in which case you just empty it.
+                if targetidx:
+                    del groups[targetidx]
+                else:
+                    gr[0][:] = []
+    # Check if we have some subgroup waiting for revisions we are not going to
+    # iterate over
+    for g in groups:
+        for r in g[0]:
+            yield r
+
 @predicate('subrepo([pattern])')
 def subrepo(repo, subset, x):
     """Changesets that add, modify or remove the given subrepo.  If no subrepo
@@ -2073,7 +2302,22 @@
     "parentpost": p1,
 }
 
-def optimize(x, small):
+def _matchonly(revs, bases):
+    """
+    >>> f = lambda *args: _matchonly(*map(parse, args))
+    >>> f('ancestors(A)', 'not ancestors(B)')
+    ('list', ('symbol', 'A'), ('symbol', 'B'))
+    """
+    if (revs is not None
+        and revs[0] == 'func'
+        and getsymbol(revs[1]) == 'ancestors'
+        and bases is not None
+        and bases[0] == 'not'
+        and bases[1][0] == 'func'
+        and getsymbol(bases[1][1]) == 'ancestors'):
+        return ('list', revs[2], bases[1][2])
+
+def _optimize(x, small):
     if x is None:
         return 0, x
 
@@ -2083,47 +2327,36 @@
 
     op = x[0]
     if op == 'minus':
-        return optimize(('and', x[1], ('not', x[2])), small)
+        return _optimize(('and', x[1], ('not', x[2])), small)
     elif op == 'only':
-        return optimize(('func', ('symbol', 'only'),
-                         ('list', x[1], x[2])), small)
+        t = ('func', ('symbol', 'only'), ('list', x[1], x[2]))
+        return _optimize(t, small)
     elif op == 'onlypost':
-        return optimize(('func', ('symbol', 'only'), x[1]), small)
+        return _optimize(('func', ('symbol', 'only'), x[1]), small)
     elif op == 'dagrangepre':
-        return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
+        return _optimize(('func', ('symbol', 'ancestors'), x[1]), small)
     elif op == 'dagrangepost':
-        return optimize(('func', ('symbol', 'descendants'), x[1]), small)
+        return _optimize(('func', ('symbol', 'descendants'), x[1]), small)
     elif op == 'rangeall':
-        return optimize(('range', ('string', '0'), ('string', 'tip')), small)
+        return _optimize(('range', ('string', '0'), ('string', 'tip')), small)
     elif op == 'rangepre':
-        return optimize(('range', ('string', '0'), x[1]), small)
+        return _optimize(('range', ('string', '0'), x[1]), small)
     elif op == 'rangepost':
-        return optimize(('range', x[1], ('string', 'tip')), small)
+        return _optimize(('range', x[1], ('string', 'tip')), small)
     elif op == 'negate':
-        return optimize(('string',
-                         '-' + getstring(x[1], _("can't negate that"))), small)
+        s = getstring(x[1], _("can't negate that"))
+        return _optimize(('string', '-' + s), small)
     elif op in 'string symbol negate':
         return smallbonus, x # single revisions are small
     elif op == 'and':
-        wa, ta = optimize(x[1], True)
-        wb, tb = optimize(x[2], True)
+        wa, ta = _optimize(x[1], True)
+        wb, tb = _optimize(x[2], True)
+        w = min(wa, wb)
 
         # (::x and not ::y)/(not ::y and ::x) have a fast path
-        def isonly(revs, bases):
-            return (
-                revs is not None
-                and revs[0] == 'func'
-                and getstring(revs[1], _('not a symbol')) == 'ancestors'
-                and bases is not None
-                and bases[0] == 'not'
-                and bases[1][0] == 'func'
-                and getstring(bases[1][1], _('not a symbol')) == 'ancestors')
-
-        w = min(wa, wb)
-        if isonly(ta, tb):
-            return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2]))
-        if isonly(tb, ta):
-            return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2]))
+        tm = _matchonly(ta, tb) or _matchonly(tb, ta)
+        if tm:
+            return w, ('func', ('symbol', 'only'), tm)
 
         if tb is not None and tb[0] == 'not':
             return wa, ('difference', ta, tb[1])
@@ -2143,12 +2376,12 @@
             else:
                 s = '\0'.join(t[1] for w, t in ss)
                 y = ('func', ('symbol', '_list'), ('string', s))
-                w, t = optimize(y, False)
+                w, t = _optimize(y, False)
             ws.append(w)
             ts.append(t)
             del ss[:]
         for y in x[1:]:
-            w, t = optimize(y, False)
+            w, t = _optimize(y, False)
             if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
                 ss.append((w, t))
                 continue
@@ -2166,34 +2399,34 @@
         # Optimize not public() to _notpublic() because we have a fast version
         if x[1] == ('func', ('symbol', 'public'), None):
             newsym = ('func', ('symbol', '_notpublic'), None)
-            o = optimize(newsym, not small)
+            o = _optimize(newsym, not small)
             return o[0], o[1]
         else:
-            o = optimize(x[1], not small)
+            o = _optimize(x[1], not small)
             return o[0], (op, o[1])
     elif op == 'parentpost':
-        o = optimize(x[1], small)
+        o = _optimize(x[1], small)
         return o[0], (op, o[1])
     elif op == 'group':
-        return optimize(x[1], small)
+        return _optimize(x[1], small)
     elif op in 'dagrange range parent ancestorspec':
         if op == 'parent':
             # x^:y means (x^) : y, not x ^ (:y)
             post = ('parentpost', x[1])
             if x[2][0] == 'dagrangepre':
-                return optimize(('dagrange', post, x[2][1]), small)
+                return _optimize(('dagrange', post, x[2][1]), small)
             elif x[2][0] == 'rangepre':
-                return optimize(('range', post, x[2][1]), small)
-
-        wa, ta = optimize(x[1], small)
-        wb, tb = optimize(x[2], small)
+                return _optimize(('range', post, x[2][1]), small)
+
+        wa, ta = _optimize(x[1], small)
+        wb, tb = _optimize(x[2], small)
         return wa + wb, (op, ta, tb)
     elif op == 'list':
-        ws, ts = zip(*(optimize(y, small) for y in x[1:]))
+        ws, ts = zip(*(_optimize(y, small) for y in x[1:]))
         return sum(ws), (op,) + ts
     elif op == 'func':
-        f = getstring(x[1], _("not a symbol"))
-        wa, ta = optimize(x[2], small)
+        f = getsymbol(x[1])
+        wa, ta = _optimize(x[2], small)
         if f in ("author branch closed date desc file grep keyword "
                  "outgoing user"):
             w = 10 # slow
@@ -2212,33 +2445,32 @@
         return w + wa, (op, x[1], ta)
     return 1, x
 
+def optimize(tree):
+    _weight, newtree = _optimize(tree, small=True)
+    return newtree
+
 # the set of valid characters for the initial letter of symbols in
 # alias declarations and definitions
 _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
                            if c.isalnum() or c in '._@$' or ord(c) > 127)
 
-def _tokenizealias(program, lookup=None):
-    """Parse alias declaration/definition into a stream of tokens
-
-    This allows symbol names to use also ``$`` as an initial letter
-    (for backward compatibility), and callers of this function should
-    examine whether ``$`` is used also for unexpected symbols or not.
-    """
-    return tokenize(program, lookup=lookup,
-                    syminitletters=_aliassyminitletters)
-
-def _parsealias(spec):
-    """Parse alias declaration/definition ``spec``
-
-    >>> _parsealias('foo($1)')
+def _parsewith(spec, lookup=None, syminitletters=None):
+    """Generate a parse tree of given spec with given tokenizing options
+
+    >>> _parsewith('foo($1)', syminitletters=_aliassyminitletters)
     ('func', ('symbol', 'foo'), ('symbol', '$1'))
-    >>> _parsealias('foo bar')
+    >>> _parsewith('$1')
+    Traceback (most recent call last):
+      ...
+    ParseError: ("syntax error in revset '$1'", 0)
+    >>> _parsewith('foo bar')
     Traceback (most recent call last):
       ...
     ParseError: ('invalid token', 4)
     """
     p = parser.parser(elements)
-    tree, pos = p.parse(_tokenizealias(spec))
+    tree, pos = p.parse(tokenize(spec, lookup=lookup,
+                                 syminitletters=syminitletters))
     if pos != len(spec):
         raise error.ParseError(_('invalid token'), pos)
     return parser.simplifyinfixops(tree, ('list', 'or'))
@@ -2246,7 +2478,16 @@
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of revset aliases"""
     _section = _('revset alias')
-    _parse = staticmethod(_parsealias)
+
+    @staticmethod
+    def _parse(spec):
+        """Parse alias declaration/definition ``spec``
+
+        This allows symbol names to use also ``$`` as an initial letter
+        (for backward compatibility), and callers of this function should
+        examine whether ``$`` is used also for unexpected symbols or not.
+        """
+        return _parsewith(spec, syminitletters=_aliassyminitletters)
 
     @staticmethod
     def _trygetfunc(tree):
@@ -2286,24 +2527,15 @@
         return tuple(foldconcat(t) for t in tree)
 
 def parse(spec, lookup=None):
-    p = parser.parser(elements)
-    tree, pos = p.parse(tokenize(spec, lookup=lookup))
-    if pos != len(spec):
-        raise error.ParseError(_("invalid token"), pos)
-    return parser.simplifyinfixops(tree, ('list', 'or'))
+    return _parsewith(spec, lookup=lookup)
 
 def posttreebuilthook(tree, repo):
     # hook for extensions to execute code on the optimized tree
     pass
 
 def match(ui, spec, repo=None):
-    if not spec:
-        raise error.ParseError(_("empty query"))
-    lookup = None
-    if repo:
-        lookup = repo.__contains__
-    tree = parse(spec, lookup)
-    return _makematcher(ui, tree, repo)
+    """Create a matcher for a single revision spec."""
+    return matchany(ui, [spec], repo=repo)
 
 def matchany(ui, specs, repo=None):
     """Create a matcher that will include any revisions matching one of the
@@ -2327,7 +2559,7 @@
     if ui:
         tree = expandaliases(ui, tree, showwarning=ui.warn)
     tree = foldconcat(tree)
-    weight, tree = optimize(tree, True)
+    tree = optimize(tree)
     posttreebuilthook(tree, repo)
     def mfunc(repo, subset=None):
         if subset is None:
@@ -2426,7 +2658,8 @@
                 ret += listexp(list(args[arg]), d)
                 arg += 1
             else:
-                raise error.Abort('unexpected revspec format character %s' % d)
+                raise error.Abort(_('unexpected revspec format character %s')
+                                  % d)
         else:
             ret += c
         pos += 1
@@ -2506,6 +2739,10 @@
         """True if the set will iterate in descending order"""
         raise NotImplementedError()
 
+    def istopo(self):
+        """True if the set will iterate in topographical order"""
+        raise NotImplementedError()
+
     @util.cachefunc
     def min(self):
         """return the minimum element in the set"""
@@ -2591,12 +2828,13 @@
 
     Every method in this class should be implemented by any smartset class.
     """
-    def __init__(self, data=(), datarepr=None):
+    def __init__(self, data=(), datarepr=None, istopo=False):
         """
         datarepr: a tuple of (format, obj, ...), a function or an object that
                   provides a printable representation of the given data.
         """
         self._ascending = None
+        self._istopo = istopo
         if not isinstance(data, list):
             if isinstance(data, set):
                 self._set = data
@@ -2639,12 +2877,14 @@
 
     def sort(self, reverse=False):
         self._ascending = not bool(reverse)
+        self._istopo = False
 
     def reverse(self):
         if self._ascending is None:
             self._list.reverse()
         else:
             self._ascending = not self._ascending
+        self._istopo = False
 
     def __len__(self):
         return len(self._list)
@@ -2665,6 +2905,14 @@
             return True
         return self._ascending is not None and not self._ascending
 
+    def istopo(self):
+        """Is the collection is in topographical order or not.
+
+        This is part of the mandatory API for smartset."""
+        if len(self) <= 1:
+            return True
+        return self._istopo
+
     def first(self):
         if self:
             if self._ascending is None:
@@ -2741,9 +2989,16 @@
         return lambda: self._iterfilter(it())
 
     def __nonzero__(self):
-        fast = self.fastasc
-        if fast is None:
-            fast = self.fastdesc
+        fast = None
+        candidates = [self.fastasc if self.isascending() else None,
+                      self.fastdesc if self.isdescending() else None,
+                      self.fastasc,
+                      self.fastdesc]
+        for candidate in candidates:
+            if candidate is not None:
+                fast = candidate
+                break
+
         if fast is not None:
             it = fast()
         else:
@@ -2773,6 +3028,9 @@
     def isdescending(self):
         return self._subset.isdescending()
 
+    def istopo(self):
+        return self._subset.istopo()
+
     def first(self):
         for x in self:
             return x
@@ -2816,14 +3074,14 @@
         # Consume both iterators in an ordered way until one is empty
         while True:
             if val1 is None:
-                val1 = iter1.next()
+                val1 = next(iter1)
             if val2 is None:
-                val2 = iter2.next()
-            next = choice(val1, val2)
-            yield next
-            if val1 == next:
+                val2 = next(iter2)
+            n = choice(val1, val2)
+            yield n
+            if val1 == n:
                 val1 = None
-            if val2 == next:
+            if val2 == n:
                 val2 = None
     except StopIteration:
         # Flush any remaining values and consume the other one
@@ -3019,6 +3277,12 @@
     def isdescending(self):
         return self._ascending is not None and not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def reverse(self):
         if self._ascending is None:
             self._list.reverse()
@@ -3186,6 +3450,12 @@
     def isdescending(self):
         return not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def first(self):
         if self._ascending:
             it = self.fastasc
@@ -3248,6 +3518,12 @@
     def reverse(self):
         self._ascending = not self._ascending
 
+    def istopo(self):
+        # not worth the trouble asserting if the two sets combined are still
+        # in topographical order. Use the sort() predicate to explicitly sort
+        # again instead.
+        return False
+
     def _iterfilter(self, iterrange):
         s = self._hiddenrevs
         for r in iterrange:
--- a/mercurial/scmutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/scmutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,6 +10,7 @@
 import contextlib
 import errno
 import glob
+import hashlib
 import os
 import re
 import shutil
@@ -224,7 +225,7 @@
     key = None
     revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
     if revs:
-        s = util.sha1()
+        s = hashlib.sha1()
         for rev in revs:
             s.update('%s;' % rev)
         key = s.digest()
@@ -377,8 +378,24 @@
     def readlock(self, path):
         return util.readlock(self.join(path))
 
-    def rename(self, src, dst):
-        return util.rename(self.join(src), self.join(dst))
+    def rename(self, src, dst, checkambig=False):
+        """Rename from src to dst
+
+        checkambig argument is used with util.filestat, and is useful
+        only if destination file is guarded by any lock
+        (e.g. repo.lock or repo.wlock).
+        """
+        dstpath = self.join(dst)
+        oldstat = checkambig and util.filestat(dstpath)
+        if oldstat and oldstat.stat:
+            ret = util.rename(self.join(src), dstpath)
+            newstat = util.filestat(dstpath)
+            if newstat.isambig(oldstat):
+                # stat of renamed file is ambiguous to original one
+                advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                os.utime(dstpath, (advanced, advanced))
+            return ret
+        return util.rename(self.join(src), dstpath)
 
     def readlink(self, path):
         return os.readlink(self.join(path))
@@ -451,7 +468,8 @@
         # have a use case.
         vfs = getattr(self, 'vfs', self)
         if getattr(vfs, '_backgroundfilecloser', None):
-            raise error.Abort('can only have 1 active background file closer')
+            raise error.Abort(
+                _('can only have 1 active background file closer'))
 
         with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
             try:
@@ -502,7 +520,7 @@
         os.chmod(name, self.createmode & 0o666)
 
     def __call__(self, path, mode="r", text=False, atomictemp=False,
-                 notindexed=False, backgroundclose=False):
+                 notindexed=False, backgroundclose=False, checkambig=False):
         '''Open ``path`` file, which is relative to vfs root.
 
         Newly created directories are marked as "not to be indexed by
@@ -521,6 +539,10 @@
            closing a file on a background thread and reopening it. (If the
            file were opened multiple times, there could be unflushed data
            because the original file handle hasn't been flushed/closed yet.)
+
+        ``checkambig`` argument is passed to atomictemplfile (valid
+        only for writing), and is useful only if target file is
+        guarded by any lock (e.g. repo.lock or repo.wlock).
         '''
         if self._audit:
             r = util.checkosfilename(path)
@@ -540,7 +562,8 @@
             if basename:
                 if atomictemp:
                     util.makedirs(dirname, self.createmode, notindexed)
-                    return util.atomictempfile(f, mode, self.createmode)
+                    return util.atomictempfile(f, mode, self.createmode,
+                                               checkambig=checkambig)
                 try:
                     if 'w' in mode:
                         util.unlink(f)
@@ -568,8 +591,9 @@
 
         if backgroundclose:
             if not self._backgroundfilecloser:
-                raise error.Abort('backgroundclose can only be used when a '
+                raise error.Abort(_('backgroundclose can only be used when a '
                                   'backgroundclosing context manager is active')
+                                  )
 
             fp = delayclosedfile(fp, self._backgroundfilecloser)
 
@@ -640,7 +664,7 @@
 
     def __call__(self, path, mode='r', *args, **kw):
         if mode not in ('r', 'rb'):
-            raise error.Abort('this vfs is read only')
+            raise error.Abort(_('this vfs is read only'))
         return self.vfs(path, mode, *args, **kw)
 
     def join(self, path, *insidef):
@@ -751,7 +775,7 @@
 
 def _pairspec(revspec):
     tree = revset.parse(revspec)
-    tree = revset.optimize(tree, True)[1]  # fix up "x^:y" -> "(x^):y"
+    tree = revset.optimize(tree)  # fix up "x^:y" -> "(x^):y"
     return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
 
 def revpair(repo, revs):
@@ -784,10 +808,29 @@
 
     return repo.lookup(first), repo.lookup(second)
 
-def revrange(repo, revs):
-    """Yield revision as strings from a list of revision specifications."""
+def revrange(repo, specs):
+    """Execute 1 to many revsets and return the union.
+
+    This is the preferred mechanism for executing revsets using user-specified
+    config options, such as revset aliases.
+
+    The revsets specified by ``specs`` will be executed via a chained ``OR``
+    expression. If ``specs`` is empty, an empty result is returned.
+
+    ``specs`` can contain integers, in which case they are assumed to be
+    revision numbers.
+
+    It is assumed the revsets are already formatted. If you have arguments
+    that need to be expanded in the revset, call ``revset.formatspec()``
+    and pass the result as an element of ``specs``.
+
+    Specifying a single revset is allowed.
+
+    Returns a ``revset.abstractsmartset`` which is a list-like interface over
+    integer revisions.
+    """
     allspecs = []
-    for spec in revs:
+    for spec in specs:
         if isinstance(spec, int):
             spec = revset.formatspec('rev(%d)', spec)
         allspecs.append(spec)
@@ -1183,6 +1226,9 @@
         return self
 
     def __get__(self, obj, type=None):
+        # if accessed on the class, return the descriptor itself.
+        if obj is None:
+            return self
         # do we need to check if the file changed?
         if self.name in obj.__dict__:
             assert self.name in obj._filecache, self.name
@@ -1358,8 +1404,8 @@
     def close(self, fh):
         """Schedule a file for closing."""
         if not self._entered:
-            raise error.Abort('can only call close() when context manager '
-                              'active')
+            raise error.Abort(_('can only call close() when context manager '
+                              'active'))
 
         # If a background thread encountered an exception, raise now so we fail
         # fast. Otherwise we may potentially go on for minutes until the error
@@ -1375,4 +1421,3 @@
             return
 
         self._queue.put(fh, block=True, timeout=None)
-
--- a/mercurial/similar.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/similar.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,6 +7,8 @@
 
 from __future__ import absolute_import
 
+import hashlib
+
 from .i18n import _
 from . import (
     bdiff,
@@ -27,14 +29,14 @@
     for i, fctx in enumerate(removed):
         repo.ui.progress(_('searching for exact renames'), i, total=numfiles,
                          unit=_('files'))
-        h = util.sha1(fctx.data()).digest()
+        h = hashlib.sha1(fctx.data()).digest()
         hashes[h] = fctx
 
     # For each added file, see if it corresponds to a removed file.
     for i, fctx in enumerate(added):
         repo.ui.progress(_('searching for exact renames'), i + len(removed),
                 total=numfiles, unit=_('files'))
-        h = util.sha1(fctx.data()).digest()
+        h = hashlib.sha1(fctx.data()).digest()
         if h in hashes:
             yield (hashes[h], fctx)
 
@@ -106,4 +108,3 @@
         for (a, b, score) in _findsimilarmatches(repo,
                 sorted(addedfiles), sorted(removedfiles), threshold):
             yield (a.path(), b.path(), score)
-
--- a/mercurial/sshpeer.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/sshpeer.py	Mon Jul 18 23:28:14 2016 -0500
@@ -307,7 +307,7 @@
         r = self._call(cmd, **args)
         if r:
             # XXX needs to be made better
-            raise error.Abort('unexpected remote reply: %s' % r)
+            raise error.Abort(_('unexpected remote reply: %s') % r)
         while True:
             d = fp.read(4096)
             if not d:
--- a/mercurial/sshserver.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/sshserver.py	Mon Jul 18 23:28:14 2016 -0500
@@ -11,6 +11,7 @@
 import os
 import sys
 
+from .i18n import _
 from . import (
     error,
     hook,
@@ -40,7 +41,7 @@
             argline = self.fin.readline()[:-1]
             arg, l = argline.split()
             if arg not in keys:
-                raise error.Abort("unexpected parameter %r" % arg)
+                raise error.Abort(_("unexpected parameter %r") % arg)
             if arg == '*':
                 star = {}
                 for k in xrange(int(l)):
--- a/mercurial/sslutil.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/sslutil.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,6 +9,7 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import os
 import re
 import ssl
@@ -28,14 +29,21 @@
 # modern/secure or legacy/insecure. Many operations in this module have
 # separate code paths depending on support in Python.
 
+configprotocols = set([
+    'tls1.0',
+    'tls1.1',
+    'tls1.2',
+])
+
 hassni = getattr(ssl, 'HAS_SNI', False)
 
-try:
-    OP_NO_SSLv2 = ssl.OP_NO_SSLv2
-    OP_NO_SSLv3 = ssl.OP_NO_SSLv3
-except AttributeError:
-    OP_NO_SSLv2 = 0x1000000
-    OP_NO_SSLv3 = 0x2000000
+# TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
+# against doesn't support them.
+supportedprotocols = set(['tls1.0'])
+if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
+    supportedprotocols.add('tls1.1')
+if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
+    supportedprotocols.add('tls1.2')
 
 try:
     # ssl.SSLContext was added in 2.7.9 and presence indicates modern
@@ -76,15 +84,19 @@
 
         def load_verify_locations(self, cafile=None, capath=None, cadata=None):
             if capath:
-                raise error.Abort('capath not supported')
+                raise error.Abort(_('capath not supported'))
             if cadata:
-                raise error.Abort('cadata not supported')
+                raise error.Abort(_('cadata not supported'))
 
             self._cacerts = cafile
 
         def set_ciphers(self, ciphers):
             if not self._supportsciphers:
-                raise error.Abort('setting ciphers not supported')
+                raise error.Abort(_('setting ciphers in [hostsecurity] is not '
+                                    'supported by this version of Python'),
+                                  hint=_('remove the config option or run '
+                                         'Mercurial with a modern Python '
+                                         'version (preferred)'))
 
             self._ciphers = ciphers
 
@@ -107,8 +119,213 @@
 
             return ssl.wrap_socket(socket, **args)
 
-def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE,
-               ca_certs=None, serverhostname=None):
+def _hostsettings(ui, hostname):
+    """Obtain security settings for a hostname.
+
+    Returns a dict of settings relevant to that hostname.
+    """
+    s = {
+        # Whether we should attempt to load default/available CA certs
+        # if an explicit ``cafile`` is not defined.
+        'allowloaddefaultcerts': True,
+        # List of 2-tuple of (hash algorithm, hash).
+        'certfingerprints': [],
+        # Path to file containing concatenated CA certs. Used by
+        # SSLContext.load_verify_locations().
+        'cafile': None,
+        # Whether certificate verification should be disabled.
+        'disablecertverification': False,
+        # Whether the legacy [hostfingerprints] section has data for this host.
+        'legacyfingerprint': False,
+        # PROTOCOL_* constant to use for SSLContext.__init__.
+        'protocol': None,
+        # ssl.CERT_* constant used by SSLContext.verify_mode.
+        'verifymode': None,
+        # Defines extra ssl.OP* bitwise options to set.
+        'ctxoptions': None,
+        # OpenSSL Cipher List to use (instead of default).
+        'ciphers': None,
+    }
+
+    # Allow minimum TLS protocol to be specified in the config.
+    def validateprotocol(protocol, key):
+        if protocol not in configprotocols:
+            raise error.Abort(
+                _('unsupported protocol from hostsecurity.%s: %s') %
+                (key, protocol),
+                hint=_('valid protocols: %s') %
+                     ' '.join(sorted(configprotocols)))
+
+    # We default to TLS 1.1+ where we can because TLS 1.0 has known
+    # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
+    # TLS 1.0+ via config options in case a legacy server is encountered.
+    if 'tls1.1' in supportedprotocols:
+        defaultprotocol = 'tls1.1'
+    else:
+        # Let people know they are borderline secure.
+        # We don't document this config option because we want people to see
+        # the bold warnings on the web site.
+        # internal config: hostsecurity.disabletls10warning
+        if not ui.configbool('hostsecurity', 'disabletls10warning'):
+            ui.warn(_('warning: connecting to %s using legacy security '
+                      'technology (TLS 1.0); see '
+                      'https://mercurial-scm.org/wiki/SecureConnections for '
+                      'more info\n') % hostname)
+        defaultprotocol = 'tls1.0'
+
+    key = 'minimumprotocol'
+    protocol = ui.config('hostsecurity', key, defaultprotocol)
+    validateprotocol(protocol, key)
+
+    key = '%s:minimumprotocol' % hostname
+    protocol = ui.config('hostsecurity', key, protocol)
+    validateprotocol(protocol, key)
+
+    s['protocol'], s['ctxoptions'] = protocolsettings(protocol)
+
+    ciphers = ui.config('hostsecurity', 'ciphers')
+    ciphers = ui.config('hostsecurity', '%s:ciphers' % hostname, ciphers)
+    s['ciphers'] = ciphers
+
+    # Look for fingerprints in [hostsecurity] section. Value is a list
+    # of <alg>:<fingerprint> strings.
+    fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % hostname,
+                                 [])
+    for fingerprint in fingerprints:
+        if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
+            raise error.Abort(_('invalid fingerprint for %s: %s') % (
+                                hostname, fingerprint),
+                              hint=_('must begin with "sha1:", "sha256:", '
+                                     'or "sha512:"'))
+
+        alg, fingerprint = fingerprint.split(':', 1)
+        fingerprint = fingerprint.replace(':', '').lower()
+        s['certfingerprints'].append((alg, fingerprint))
+
+    # Fingerprints from [hostfingerprints] are always SHA-1.
+    for fingerprint in ui.configlist('hostfingerprints', hostname, []):
+        fingerprint = fingerprint.replace(':', '').lower()
+        s['certfingerprints'].append(('sha1', fingerprint))
+        s['legacyfingerprint'] = True
+
+    # If a host cert fingerprint is defined, it is the only thing that
+    # matters. No need to validate CA certs.
+    if s['certfingerprints']:
+        s['verifymode'] = ssl.CERT_NONE
+        s['allowloaddefaultcerts'] = False
+
+    # If --insecure is used, don't take CAs into consideration.
+    elif ui.insecureconnections:
+        s['disablecertverification'] = True
+        s['verifymode'] = ssl.CERT_NONE
+        s['allowloaddefaultcerts'] = False
+
+    if ui.configbool('devel', 'disableloaddefaultcerts'):
+        s['allowloaddefaultcerts'] = False
+
+    # If both fingerprints and a per-host ca file are specified, issue a warning
+    # because users should not be surprised about what security is or isn't
+    # being performed.
+    cafile = ui.config('hostsecurity', '%s:verifycertsfile' % hostname)
+    if s['certfingerprints'] and cafile:
+        ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
+                  'fingerprints defined; using host fingerprints for '
+                  'verification)\n') % hostname)
+
+    # Try to hook up CA certificate validation unless something above
+    # makes it not necessary.
+    if s['verifymode'] is None:
+        # Look at per-host ca file first.
+        if cafile:
+            cafile = util.expandpath(cafile)
+            if not os.path.exists(cafile):
+                raise error.Abort(_('path specified by %s does not exist: %s') %
+                                  ('hostsecurity.%s:verifycertsfile' % hostname,
+                                   cafile))
+            s['cafile'] = cafile
+        else:
+            # Find global certificates file in config.
+            cafile = ui.config('web', 'cacerts')
+
+            if cafile:
+                cafile = util.expandpath(cafile)
+                if not os.path.exists(cafile):
+                    raise error.Abort(_('could not find web.cacerts: %s') %
+                                      cafile)
+            elif s['allowloaddefaultcerts']:
+                # CAs not defined in config. Try to find system bundles.
+                cafile = _defaultcacerts(ui)
+                if cafile:
+                    ui.debug('using %s for CA file\n' % cafile)
+
+            s['cafile'] = cafile
+
+        # Require certificate validation if CA certs are being loaded and
+        # verification hasn't been disabled above.
+        if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
+            s['verifymode'] = ssl.CERT_REQUIRED
+        else:
+            # At this point we don't have a fingerprint, aren't being
+            # explicitly insecure, and can't load CA certs. Connecting
+            # is insecure. We allow the connection and abort during
+            # validation (once we have the fingerprint to print to the
+            # user).
+            s['verifymode'] = ssl.CERT_NONE
+
+    assert s['protocol'] is not None
+    assert s['ctxoptions'] is not None
+    assert s['verifymode'] is not None
+
+    return s
+
+def protocolsettings(protocol):
+    """Resolve the protocol and context options for a config value."""
+    if protocol not in configprotocols:
+        raise ValueError('protocol value not supported: %s' % protocol)
+
+    # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
+    # that both ends support, including TLS protocols. On legacy stacks,
+    # the highest it likely goes is TLS 1.0. On modern stacks, it can
+    # support TLS 1.2.
+    #
+    # The PROTOCOL_TLSv* constants select a specific TLS version
+    # only (as opposed to multiple versions). So the method for
+    # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
+    # disable protocols via SSLContext.options and OP_NO_* constants.
+    # However, SSLContext.options doesn't work unless we have the
+    # full/real SSLContext available to us.
+    if supportedprotocols == set(['tls1.0']):
+        if protocol != 'tls1.0':
+            raise error.Abort(_('current Python does not support protocol '
+                                'setting %s') % protocol,
+                              hint=_('upgrade Python or disable setting since '
+                                     'only TLS 1.0 is supported'))
+
+        return ssl.PROTOCOL_TLSv1, 0
+
+    # WARNING: returned options don't work unless the modern ssl module
+    # is available. Be careful when adding options here.
+
+    # SSLv2 and SSLv3 are broken. We ban them outright.
+    options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
+
+    if protocol == 'tls1.0':
+        # Defaults above are to use TLS 1.0+
+        pass
+    elif protocol == 'tls1.1':
+        options |= ssl.OP_NO_TLSv1
+    elif protocol == 'tls1.2':
+        options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
+    else:
+        raise error.Abort(_('this should not happen'))
+
+    # Prevent CRIME.
+    # There is no guarantee this attribute is defined on the module.
+    options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
+
+    return ssl.PROTOCOL_SSLv23, options
+
+def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
     """Add SSL/TLS to a socket.
 
     This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
@@ -121,32 +338,33 @@
       server (and client) support SNI, this tells the server which certificate
       to use.
     """
-    # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
-    # that both ends support, including TLS protocols. On legacy stacks,
-    # the highest it likely goes in TLS 1.0. On modern stacks, it can
-    # support TLS 1.2.
-    #
-    # The PROTOCOL_TLSv* constants select a specific TLS version
-    # only (as opposed to multiple versions). So the method for
-    # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
-    # disable protocols via SSLContext.options and OP_NO_* constants.
-    # However, SSLContext.options doesn't work unless we have the
-    # full/real SSLContext available to us.
-    #
-    # SSLv2 and SSLv3 are broken. We ban them outright.
-    if modernssl:
-        protocol = ssl.PROTOCOL_SSLv23
-    else:
-        protocol = ssl.PROTOCOL_TLSv1
+    if not serverhostname:
+        raise error.Abort(_('serverhostname argument is required'))
+
+    settings = _hostsettings(ui, serverhostname)
 
-    # TODO use ssl.create_default_context() on modernssl.
-    sslcontext = SSLContext(protocol)
+    # We can't use ssl.create_default_context() because it calls
+    # load_default_certs() unless CA arguments are passed to it. We want to
+    # have explicit control over CA loading because implicitly loading
+    # CAs may undermine the user's intent. For example, a user may define a CA
+    # bundle with a specific CA cert removed. If the system/default CA bundle
+    # is loaded and contains that removed CA, you've just undone the user's
+    # choice.
+    sslcontext = SSLContext(settings['protocol'])
 
-    # This is a no-op on old Python.
-    sslcontext.options |= OP_NO_SSLv2 | OP_NO_SSLv3
+    # This is a no-op unless using modern ssl.
+    sslcontext.options |= settings['ctxoptions']
 
     # This still works on our fake SSLContext.
-    sslcontext.verify_mode = cert_reqs
+    sslcontext.verify_mode = settings['verifymode']
+
+    if settings['ciphers']:
+        try:
+            sslcontext.set_ciphers(settings['ciphers'])
+        except ssl.SSLError as e:
+            raise error.Abort(_('could not set ciphers: %s') % e.args[0],
+                              hint=_('change cipher string (%s) in config') %
+                                   settings['ciphers'])
 
     if certfile is not None:
         def password():
@@ -154,20 +372,123 @@
             return ui.getpass(_('passphrase for %s: ') % f, '')
         sslcontext.load_cert_chain(certfile, keyfile, password)
 
-    if ca_certs is not None:
-        sslcontext.load_verify_locations(cafile=ca_certs)
-    else:
+    if settings['cafile'] is not None:
+        try:
+            sslcontext.load_verify_locations(cafile=settings['cafile'])
+        except ssl.SSLError as e:
+            raise error.Abort(_('error loading CA file %s: %s') % (
+                              settings['cafile'], e.args[1]),
+                              hint=_('file is empty or malformed?'))
+        caloaded = True
+    elif settings['allowloaddefaultcerts']:
         # This is a no-op on old Python.
         sslcontext.load_default_certs()
+        caloaded = True
+    else:
+        caloaded = False
 
-    sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
+    try:
+        sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
+    except ssl.SSLError as e:
+        # If we're doing certificate verification and no CA certs are loaded,
+        # that is almost certainly the reason why verification failed. Provide
+        # a hint to the user.
+        # Only modern ssl module exposes SSLContext.get_ca_certs() so we can
+        # only show this warning if modern ssl is available.
+        if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
+            modernssl and not sslcontext.get_ca_certs()):
+            ui.warn(_('(an attempt was made to load CA certificates but none '
+                      'were loaded; see '
+                      'https://mercurial-scm.org/wiki/SecureConnections for '
+                      'how to configure Mercurial to avoid this error)\n'))
+        # Try to print more helpful error messages for known failures.
+        if util.safehasattr(e, 'reason'):
+            if e.reason == 'UNSUPPORTED_PROTOCOL':
+                ui.warn(_('(could not negotiate a common protocol; see '
+                          'https://mercurial-scm.org/wiki/SecureConnections '
+                          'for how to configure Mercurial to avoid this '
+                          'error)\n'))
+        raise
+
     # check if wrap_socket failed silently because socket had been
     # closed
     # - see http://bugs.python.org/issue13721
     if not sslsocket.cipher():
         raise error.Abort(_('ssl connection failed'))
+
+    sslsocket._hgstate = {
+        'caloaded': caloaded,
+        'hostname': serverhostname,
+        'settings': settings,
+        'ui': ui,
+    }
+
     return sslsocket
 
+def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
+                     requireclientcert=False):
+    """Wrap a socket for use by servers.
+
+    ``certfile`` and ``keyfile`` specify the files containing the certificate's
+    public and private keys, respectively. Both keys can be defined in the same
+    file via ``certfile`` (the private key must come first in the file).
+
+    ``cafile`` defines the path to certificate authorities.
+
+    ``requireclientcert`` specifies whether to require client certificates.
+
+    Typically ``cafile`` is only defined if ``requireclientcert`` is true.
+    """
+    protocol, options = protocolsettings('tls1.0')
+
+    # This config option is intended for use in tests only. It is a giant
+    # footgun to kill security. Don't define it.
+    exactprotocol = ui.config('devel', 'serverexactprotocol')
+    if exactprotocol == 'tls1.0':
+        protocol = ssl.PROTOCOL_TLSv1
+    elif exactprotocol == 'tls1.1':
+        if 'tls1.1' not in supportedprotocols:
+            raise error.Abort(_('TLS 1.1 not supported by this Python'))
+        protocol = ssl.PROTOCOL_TLSv1_1
+    elif exactprotocol == 'tls1.2':
+        if 'tls1.2' not in supportedprotocols:
+            raise error.Abort(_('TLS 1.2 not supported by this Python'))
+        protocol = ssl.PROTOCOL_TLSv1_2
+    elif exactprotocol:
+        raise error.Abort(_('invalid value for serverexactprotocol: %s') %
+                          exactprotocol)
+
+    if modernssl:
+        # We /could/ use create_default_context() here since it doesn't load
+        # CAs when configured for client auth. However, it is hard-coded to
+        # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
+        sslcontext = SSLContext(protocol)
+        sslcontext.options |= options
+
+        # Improve forward secrecy.
+        sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
+        sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
+
+        # Use the list of more secure ciphers if found in the ssl module.
+        if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
+            sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
+            sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
+    else:
+        sslcontext = SSLContext(ssl.PROTOCOL_TLSv1)
+
+    if requireclientcert:
+        sslcontext.verify_mode = ssl.CERT_REQUIRED
+    else:
+        sslcontext.verify_mode = ssl.CERT_NONE
+
+    if certfile or keyfile:
+        sslcontext.load_cert_chain(certfile=certfile, keyfile=keyfile)
+
+    if cafile:
+        sslcontext.load_verify_locations(cafile=cafile)
+
+    return sslcontext.wrap_socket(sock, server_side=True)
+
 class wildcarderror(Exception):
     """Represents an error parsing wildcards in DNS name."""
 
@@ -268,10 +589,6 @@
     else:
         return _('no commonName or subjectAltName found in certificate')
 
-
-# CERT_REQUIRED means fetch the cert from the server all the time AND
-# validate it against the CA store provided in web.cacerts.
-
 def _plainapplepython():
     """return true if this seems to be a pure Apple Python that
     * is unfrozen and presumably has the whole mercurial module in the file
@@ -286,97 +603,172 @@
     return (exe.startswith('/usr/bin/python') or
             exe.startswith('/system/library/frameworks/python.framework/'))
 
-def _defaultcacerts():
-    """return path to CA certificates; None for system's store; ! to disable"""
+_systemcacertpaths = [
+    # RHEL, CentOS, and Fedora
+    '/etc/pki/tls/certs/ca-bundle.trust.crt',
+    # Debian, Ubuntu, Gentoo
+    '/etc/ssl/certs/ca-certificates.crt',
+]
+
+def _defaultcacerts(ui):
+    """return path to default CA certificates or None.
+
+    It is assumed this function is called when the returned certificates
+    file will actually be used to validate connections. Therefore this
+    function may print warnings or debug messages assuming this usage.
+
+    We don't print a message when the Python is able to load default
+    CA certs because this scenario is detected at socket connect time.
+    """
+    # The "certifi" Python package provides certificates. If it is installed,
+    # assume the user intends it to be used and use it.
+    try:
+        import certifi
+        certs = certifi.where()
+        ui.debug('using ca certificates from certifi\n')
+        return certs
+    except ImportError:
+        pass
+
+    # On Windows, only the modern ssl module is capable of loading the system
+    # CA certificates. If we're not capable of doing that, emit a warning
+    # because we'll get a certificate verification error later and the lack
+    # of loaded CA certificates will be the reason why.
+    # Assertion: this code is only called if certificates are being verified.
+    if os.name == 'nt':
+        if not _canloaddefaultcerts:
+            ui.warn(_('(unable to load Windows CA certificates; see '
+                      'https://mercurial-scm.org/wiki/SecureConnections for '
+                      'how to configure Mercurial to avoid this message)\n'))
+
+        return None
+
+    # Apple's OpenSSL has patches that allow a specially constructed certificate
+    # to load the system CA store. If we're running on Apple Python, use this
+    # trick.
     if _plainapplepython():
         dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
         if os.path.exists(dummycert):
             return dummycert
-    if _canloaddefaultcerts:
+
+    # The Apple OpenSSL trick isn't available to us. If Python isn't able to
+    # load system certs, we're out of luck.
+    if sys.platform == 'darwin':
+        # FUTURE Consider looking for Homebrew or MacPorts installed certs
+        # files. Also consider exporting the keychain certs to a file during
+        # Mercurial install.
+        if not _canloaddefaultcerts:
+            ui.warn(_('(unable to load CA certificates; see '
+                      'https://mercurial-scm.org/wiki/SecureConnections for '
+                      'how to configure Mercurial to avoid this message)\n'))
         return None
-    return '!'
+
+    # / is writable on Windows. Out of an abundance of caution make sure
+    # we're not on Windows because paths from _systemcacerts could be installed
+    # by non-admin users.
+    assert os.name != 'nt'
 
-def sslkwargs(ui, host):
-    kws = {'ui': ui}
-    hostfingerprint = ui.config('hostfingerprints', host)
-    if hostfingerprint:
-        return kws
-    cacerts = ui.config('web', 'cacerts')
-    if cacerts == '!':
-        pass
-    elif cacerts:
-        cacerts = util.expandpath(cacerts)
-        if not os.path.exists(cacerts):
-            raise error.Abort(_('could not find web.cacerts: %s') % cacerts)
-    else:
-        cacerts = _defaultcacerts()
-        if cacerts and cacerts != '!':
-            ui.debug('using %s to enable OS X system CA\n' % cacerts)
-        ui.setconfig('web', 'cacerts', cacerts, 'defaultcacerts')
-    if cacerts != '!':
-        kws.update({'ca_certs': cacerts,
-                    'cert_reqs': ssl.CERT_REQUIRED,
-                    })
-    return kws
+    # Try to find CA certificates in well-known locations. We print a warning
+    # when using a found file because we don't want too much silent magic
+    # for security settings. The expectation is that proper Mercurial
+    # installs will have the CA certs path defined at install time and the
+    # installer/packager will make an appropriate decision on the user's
+    # behalf. We only get here and perform this setting as a feature of
+    # last resort.
+    if not _canloaddefaultcerts:
+        for path in _systemcacertpaths:
+            if os.path.isfile(path):
+                ui.warn(_('(using CA certificates from %s; if you see this '
+                          'message, your Mercurial install is not properly '
+                          'configured; see '
+                          'https://mercurial-scm.org/wiki/SecureConnections '
+                          'for how to configure Mercurial to avoid this '
+                          'message)\n') % path)
+                return path
 
-class validator(object):
-    def __init__(self, ui, host):
-        self.ui = ui
-        self.host = host
+        ui.warn(_('(unable to load CA certificates; see '
+                  'https://mercurial-scm.org/wiki/SecureConnections for '
+                  'how to configure Mercurial to avoid this message)\n'))
 
-    def __call__(self, sock, strict=False):
-        host = self.host
+    return None
+
+def validatesocket(sock):
+    """Validate a socket meets security requiremnets.
 
-        if not sock.cipher(): # work around http://bugs.python.org/issue13721
-            raise error.Abort(_('%s ssl connection error') % host)
-        try:
-            peercert = sock.getpeercert(True)
-            peercert2 = sock.getpeercert()
-        except AttributeError:
-            raise error.Abort(_('%s ssl connection error') % host)
+    The passed socket must have been created with ``wrapsocket()``.
+    """
+    host = sock._hgstate['hostname']
+    ui = sock._hgstate['ui']
+    settings = sock._hgstate['settings']
+
+    try:
+        peercert = sock.getpeercert(True)
+        peercert2 = sock.getpeercert()
+    except AttributeError:
+        raise error.Abort(_('%s ssl connection error') % host)
+
+    if not peercert:
+        raise error.Abort(_('%s certificate error: '
+                           'no certificate received') % host)
 
-        if not peercert:
-            raise error.Abort(_('%s certificate error: '
-                               'no certificate received') % host)
+    if settings['disablecertverification']:
+        # We don't print the certificate fingerprint because it shouldn't
+        # be necessary: if the user requested certificate verification be
+        # disabled, they presumably already saw a message about the inability
+        # to verify the certificate and this message would have printed the
+        # fingerprint. So printing the fingerprint here adds little to no
+        # value.
+        ui.warn(_('warning: connection security to %s is disabled per current '
+                  'settings; communication is susceptible to eavesdropping '
+                  'and tampering\n') % host)
+        return
+
+    # If a certificate fingerprint is pinned, use it and only it to
+    # validate the remote cert.
+    peerfingerprints = {
+        'sha1': hashlib.sha1(peercert).hexdigest(),
+        'sha256': hashlib.sha256(peercert).hexdigest(),
+        'sha512': hashlib.sha512(peercert).hexdigest(),
+    }
+
+    def fmtfingerprint(s):
+        return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
+
+    nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
 
-        # If a certificate fingerprint is pinned, use it and only it to
-        # validate the remote cert.
-        hostfingerprints = self.ui.configlist('hostfingerprints', host)
-        peerfingerprint = util.sha1(peercert).hexdigest()
-        nicefingerprint = ":".join([peerfingerprint[x:x + 2]
-            for x in xrange(0, len(peerfingerprint), 2)])
-        if hostfingerprints:
-            fingerprintmatch = False
-            for hostfingerprint in hostfingerprints:
-                if peerfingerprint.lower() == \
-                        hostfingerprint.replace(':', '').lower():
-                    fingerprintmatch = True
-                    break
-            if not fingerprintmatch:
-                raise error.Abort(_('certificate for %s has unexpected '
-                                   'fingerprint %s') % (host, nicefingerprint),
-                                 hint=_('check hostfingerprint configuration'))
-            self.ui.debug('%s certificate matched fingerprint %s\n' %
-                          (host, nicefingerprint))
-            return
+    if settings['certfingerprints']:
+        for hash, fingerprint in settings['certfingerprints']:
+            if peerfingerprints[hash].lower() == fingerprint:
+                ui.debug('%s certificate matched fingerprint %s:%s\n' %
+                         (host, hash, fmtfingerprint(fingerprint)))
+                return
+
+        # Pinned fingerprint didn't match. This is a fatal error.
+        if settings['legacyfingerprint']:
+            section = 'hostfingerprint'
+            nice = fmtfingerprint(peerfingerprints['sha1'])
+        else:
+            section = 'hostsecurity'
+            nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
+        raise error.Abort(_('certificate for %s has unexpected '
+                            'fingerprint %s') % (host, nice),
+                          hint=_('check %s configuration') % section)
 
-        # No pinned fingerprint. Establish trust by looking at the CAs.
-        cacerts = self.ui.config('web', 'cacerts')
-        if cacerts != '!':
-            msg = _verifycert(peercert2, host)
-            if msg:
-                raise error.Abort(_('%s certificate error: %s') % (host, msg),
-                                 hint=_('configure hostfingerprint %s or use '
-                                        '--insecure to connect insecurely') %
-                                      nicefingerprint)
-            self.ui.debug('%s certificate successfully verified\n' % host)
-        elif strict:
-            raise error.Abort(_('%s certificate with fingerprint %s not '
-                               'verified') % (host, nicefingerprint),
-                             hint=_('check hostfingerprints or web.cacerts '
-                                     'config setting'))
-        else:
-            self.ui.warn(_('warning: %s certificate with fingerprint %s not '
-                           'verified (check hostfingerprints or web.cacerts '
-                           'config setting)\n') %
-                         (host, nicefingerprint))
+    # Security is enabled but no CAs are loaded. We can't establish trust
+    # for the cert so abort.
+    if not sock._hgstate['caloaded']:
+        raise error.Abort(
+            _('unable to verify security of %s (no loaded CA certificates); '
+              'refusing to connect') % host,
+            hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
+                   'how to configure Mercurial to avoid this error or set '
+                   'hostsecurity.%s:fingerprints=%s to trust this server') %
+                   (host, nicefingerprint))
+
+    msg = _verifycert(peercert2, host)
+    if msg:
+        raise error.Abort(_('%s certificate error: %s') % (host, msg),
+                         hint=_('set hostsecurity.%s:certfingerprints=%s '
+                                'config setting or use --insecure to connect '
+                                'insecurely') %
+                              (host, nicefingerprint))
--- a/mercurial/store.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/store.py	Mon Jul 18 23:28:14 2016 -0500
@@ -8,6 +8,7 @@
 from __future__ import absolute_import
 
 import errno
+import hashlib
 import os
 import stat
 
@@ -19,8 +20,6 @@
     util,
 )
 
-_sha = util.sha1
-
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
 def _encodedir(path):
@@ -57,6 +56,23 @@
             .replace(".i.hg/", ".i/")
             .replace(".hg.hg/", ".hg/"))
 
+def _reserved():
+    ''' characters that are problematic for filesystems
+
+    * ascii escapes (0..31)
+    * ascii hi (126..255)
+    * windows specials
+
+    these characters will be escaped by encodefunctions
+    '''
+    winreserved = [ord(x) for x in '\\:*?"<>|']
+    for x in range(32):
+        yield x
+    for x in range(126, 256):
+        yield x
+    for x in winreserved:
+        yield x
+
 def _buildencodefun():
     '''
     >>> enc, dec = _buildencodefun()
@@ -82,11 +98,10 @@
     'the\\x07quick\\xadshot'
     '''
     e = '_'
-    winreserved = [ord(x) for x in '\\:*?"<>|']
     cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
-    for x in (range(32) + range(126, 256) + winreserved):
+    for x in _reserved():
         cmap[chr(x)] = "~%02x" % x
-    for x in range(ord("A"), ord("Z") + 1) + [ord(e)]:
+    for x in list(range(ord("A"), ord("Z") + 1)) + [ord(e)]:
         cmap[chr(x)] = e + chr(x).lower()
     dmap = {}
     for k, v in cmap.iteritems():
@@ -134,9 +149,8 @@
     >>> f('the\x07quick\xADshot')
     'the~07quick~adshot'
     '''
-    winreserved = [ord(x) for x in '\\:*?"<>|']
     cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
-    for x in (range(32) + range(126, 256) + winreserved):
+    for x in _reserved():
         cmap[chr(x)] = "~%02x" % x
     for x in range(ord("A"), ord("Z") + 1):
         cmap[chr(x)] = chr(x).lower()
@@ -196,7 +210,7 @@
 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
 
 def _hashencode(path, dotencode):
-    digest = _sha(path).hexdigest()
+    digest = hashlib.sha1(path).hexdigest()
     le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
     parts = _auxencode(le, dotencode)
     basename = parts[-1]
--- a/mercurial/subrepo.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/subrepo.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,6 +9,7 @@
 
 import copy
 import errno
+import hashlib
 import os
 import posixpath
 import re
@@ -50,14 +51,14 @@
 
 def _getstorehashcachename(remotepath):
     '''get a unique filename for the store hash cache of a remote repository'''
-    return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
+    return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
 
 class SubrepoAbort(error.Abort):
     """Exception class used to avoid handling a subrepo error more than once"""
     def __init__(self, *args, **kw):
+        self.subrepo = kw.pop('subrepo', None)
+        self.cause = kw.pop('cause', None)
         error.Abort.__init__(self, *args, **kw)
-        self.subrepo = kw.get('subrepo')
-        self.cause = kw.get('cause')
 
 def annotatesubrepoerror(func):
     def decoratedmethod(self, *args, **kargs):
@@ -585,7 +586,7 @@
         return 1
 
     def revert(self, substate, *pats, **opts):
-        self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
+        self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
             % (substate[0], substate[2]))
         return []
 
@@ -659,7 +660,7 @@
         yield '# %s\n' % _expandedabspath(remotepath)
         vfs = self._repo.vfs
         for relname in filelist:
-            filehash = util.sha1(vfs.tryread(relname)).hexdigest()
+            filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
             yield '%s = %s\n' % (relname, filehash)
 
     @propertycache
@@ -1413,7 +1414,7 @@
             if command in ('cat-file', 'symbolic-ref'):
                 return retdata, p.returncode
             # for all others, abort
-            raise error.Abort('git %s error %d in %s' %
+            raise error.Abort(_('git %s error %d in %s') %
                              (command, p.returncode, self._relpath))
 
         return retdata, p.returncode
--- a/mercurial/tags.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/tags.py	Mon Jul 18 23:28:14 2016 -0500
@@ -292,7 +292,7 @@
     cachehash = None
     if cachefile:
         try:
-            validline = cachelines.next()
+            validline = next(cachelines)
             validline = validline.split()
             cacherev = int(validline[0])
             cachenode = bin(validline[1])
--- a/mercurial/templater.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templater.py	Mon Jul 18 23:28:14 2016 -0500
@@ -724,6 +724,25 @@
 
     return minirst.format(text, style=style, keep=['verbose'])
 
+@templatefunc('separate(sep, args)')
+def separate(context, mapping, args):
+    """Add a separator between non-empty arguments."""
+    if not args:
+        # i18n: "separate" is a keyword
+        raise error.ParseError(_("separate expects at least one argument"))
+
+    sep = evalstring(context, mapping, args[0])
+    first = True
+    for arg in args[1:]:
+        argstr = evalstring(context, mapping, arg)
+        if not argstr:
+            continue
+        if first:
+            first = False
+        else:
+            yield sep
+        yield argstr
+
 @templatefunc('shortest(node, minlength=4)')
 def shortest(context, mapping, args):
     """Obtain the shortest representation of
--- a/mercurial/templates/atom/branchentry.tmpl	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/atom/branchentry.tmpl	Mon Jul 18 23:28:14 2016 -0500
@@ -4,5 +4,5 @@
   <id>{urlbase}{url|urlescape}#branch-{node}</id>
   <updated>{date|rfc3339date}</updated>
   <published>{date|rfc3339date}</published>
-  <content type="text"><![CDATA[{branch|strip|escape|addbreaks}]]></content>
+  <content type="text">{branch|strip|escape}</content>
  </entry>
--- a/mercurial/templates/atom/changelogentry.tmpl	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/atom/changelogentry.tmpl	Mon Jul 18 23:28:14 2016 -0500
@@ -9,35 +9,35 @@
   <updated>{date|rfc3339date}</updated>
   <published>{date|rfc3339date}</published>
   <content type="xhtml">
-	<table xmlns="http://www.w3.org/1999/xhtml">
-	<tr>
-		<th style="text-align:left;">changeset</th>
-		<td>{node|short}</td>
-              </tr>
-              <tr>
-                              <th style="text-align:left;">branch</th>
-                              <td>{inbranch%"{name|escape}"}{branches%"{name|escape}"}</td>
-              </tr>
-              <tr>
-                              <th style="text-align:left;">bookmark</th>
-		<td>{bookmarks%"{name|escape}"}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;">tag</th>
-		<td>{tags%"{name|escape}"}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;">user</th>
-		<td>{author|obfuscate}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;vertical-align:top;">description</th>
-		<td>{desc|strip|escape|websub|addbreaks|nonempty}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;vertical-align:top;">files</th>
-		<td>{files}</td>
-	</tr>
-	</table>
+   <table xmlns="http://www.w3.org/1999/xhtml">
+    <tr>
+     <th style="text-align:left;">changeset</th>
+     <td>{node|short}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;">branch</th>
+     <td>{inbranch%"{name|escape}"}{branches%"{name|escape}"}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;">bookmark</th>
+     <td>{bookmarks%"{name|escape}"}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;">tag</th>
+     <td>{tags%"{name|escape}"}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;">user</th>
+     <td>{author|obfuscate}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;vertical-align:top;">description</th>
+     <td>{desc|strip|escape|websub|addbreaks|nonempty}</td>
+    </tr>
+    <tr>
+     <th style="text-align:left;vertical-align:top;">files</th>
+     <td>{files}</td>
+    </tr>
+   </table>
   </content>
  </entry>
--- a/mercurial/templates/atom/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/atom/map	Mon Jul 18 23:28:14 2016 -0500
@@ -5,7 +5,6 @@
 changelog = changelog.tmpl
 changelogentry = changelogentry.tmpl
 filelog = filelog.tmpl
-filelogentry = filelogentry.tmpl
 tags = tags.tmpl
 tagentry = tagentry.tmpl
 bookmarks = bookmarks.tmpl
--- a/mercurial/templates/gitweb/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/gitweb/map	Mon Jul 18 23:28:14 2016 -0500
@@ -95,14 +95,29 @@
 fileline = '
   <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 annotateline = '
-  <tr id="{lineid}" style="font-family:monospace" class="parity{parity}">
-    <td class="linenr" style="text-align: right;">
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
-         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
+  <tr id="{lineid}" style="font-family:monospace" class="parity{parity}{ifeq(node, originalnode, ' thisrev')}">
+    <td class="annotate linenr parity{blockparity}" style="text-align: right;">
+      {if(blockhead,
+          '<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+             {rev}
+           </a>')}
+      <div class="annotate-info">
+        <div>
+          <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+            {node|short}</a>
+          {desc|escape|firstline}
+        </div>
+        <div><em>{author|obfuscate}</em></div>
+        <div>parents: {parents%annotateparent}</div>
+        <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>
+        <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
+      </div>
     </td>
     <td><pre><a class="linenr" href="#{lineid}">{linenumber}</a></pre></td>
     <td><pre>{line|escape}</pre></td>
   </tr>'
+annotateparent = '
+  <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rev}</a>'
 difflineplus = '
   <a href="#{lineid}"></a><span id="{lineid}" class="difflineplus">{strip(line|escape, '\r\n')}</span>'
 difflineminus = '
--- a/mercurial/templates/json/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/json/map	Mon Jul 18 23:28:14 2016 -0500
@@ -1,6 +1,26 @@
 mimetype = 'application/json'
-filerevision = '"not yet implemented"'
-search = '"not yet implemented"'
+filerevision = '\{
+  "node": {node|json},
+  "path": {file|json},
+  "date": {date|json},
+  "desc": {desc|utf8|json},
+  "branch": {if(branch, branch%changesetbranch, "default"|json)},
+  "bookmarks": [{join(bookmarks%changelistentryname, ", ")}],
+  "tags": [{join(tags%changelistentryname, ", ")}],
+  "user": {author|utf8|json},
+  "parents": [{join(parent%changesetparent, ", ")}],
+  "phase": {phase|json},
+  "lines": [{join(text%lineentry, ", ")}]
+  }'
+lineentry = '\{
+  "line": {line|json}
+  }'
+search = '\{
+  "node": {node|json},
+  "query": {query|json},
+  "entries": [{join(entries%searchentry, ", ")}]
+  }'
+searchentry = '{changelistentry}'
 # changelog and shortlog are the same web API but with different
 # number of entries.
 changelog = changelist.tmpl
@@ -9,10 +29,13 @@
   "node": {node|json},
   "date": {date|json},
   "desc": {desc|utf8|json},
+  "branch": {if(branch, branch%changesetbranch, "default"|json)},
   "bookmarks": [{join(bookmarks%changelistentryname, ", ")}],
   "tags": [{join(tags%changelistentryname, ", ")}],
   "user": {author|utf8|json},
-  "parents": [{join(allparents%changesetparent, ", ")}]
+  "phase": {phase|json},
+  "parents": [{if(allparents, join(allparents%changesetparent, ", "),
+                  join(parent%changesetparent, ", "))}]
   }'
 changelistentryname = '{name|utf8|json}'
 changeset = '\{
@@ -78,7 +101,23 @@
   "date": {date|json},
   "status": {status|json}
   }'
-summary = '"not yet implemented"'
+shortlogentry = '{changelistentry}'
+summary = '\{
+  "node": {node|json},
+  "lastchange": {lastchange|json},
+  "bookmarks": [{join(bookmarks%bookmarkentry, ", ")}],
+  "branches": [{join(branches%branchentry, ", ")}],
+  "shortlog": [{join(shortlog%shortlogentry, ", ")}],
+  "tags": [{join(tags%tagentry, ", ")}],
+  "archives": [{join(archives%archiveentry, ", ")}],
+  "labels": {labels|json}
+  }'
+archiveentry = '\{
+  "node": {node|json},
+  "extension": {extension|json},
+  "type": {type|json},
+  "url": {"{urlbase}{url}archive/{node}{extension}"|json}
+  }'
 filediff = '\{
   "path": {file|json},
   "node": {node|json},
@@ -156,7 +195,9 @@
   "lineno": {lineno|json},
   "revdate": {revdate|json}
   }'
-filelog = '"not yet implemented"'
+filelog = '\{
+  "entries": [{join(entries%changelistentry, ", ")}]
+  }'
 graph = '"not yet implemented"'
 helptopics = '\{
   "topics": [{join(topics%helptopicentry, ", ")}],
@@ -180,5 +221,6 @@
   "name": {name|utf8|json},
   "description": {description|utf8|json},
   "contact": {contact|utf8|json},
-  "lastchange": {lastchange|json}
+  "lastchange": {lastchange|json},
+  "labels": {labels|json}
   }'
--- a/mercurial/templates/monoblue/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/monoblue/map	Mon Jul 18 23:28:14 2016 -0500
@@ -91,16 +91,31 @@
 fileline = '
   <a href="#{lineid}"></a><span id="{lineid}">{strip(line|escape, '\r\n')}</span>'
 annotateline = '
-  <tr id="{lineid}" class="parity{parity}">
-    <td class="linenr">
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
-         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
+  <tr id="{lineid}" class="parity{parity}{ifeq(node, originalnode, ' thisrev')}">
+    <td class="annotate linenr parity{blockparity}">
+      {if(blockhead,
+          '<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+             {rev}
+           </a>')}
+      <div class="annotate-info">
+        <div>
+          <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+            {node|short}</a>
+          {desc|escape|firstline}
+        </div>
+        <div><em>{author|obfuscate}</em></div>
+        <div>parents: {parents%annotateparent}</div>
+        <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>
+        <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
+      </div>
     </td>
     <td class="lineno">
       <a href="#{lineid}">{linenumber}</a>
     </td>
     <td class="source">{line|escape}</td>
   </tr>'
+annotateparent = '
+  <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rev}</a>'
 difflineplus = '
   <a href="#{lineid}"></a><span id="{lineid}" class="difflineplus">{strip(line|escape, '\r\n')}</span>'
 difflineminus = '
--- a/mercurial/templates/paper/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/paper/map	Mon Jul 18 23:28:14 2016 -0500
@@ -76,14 +76,28 @@
 filelogentry = filelogentry.tmpl
 
 annotateline = '
-  <tr id="{lineid}">
-    <td class="annotate">
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
-         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
+  <tr id="{lineid}"{ifeq(node, originalnode, ' class="thisrev"')}>
+    <td class="annotate parity{blockparity}">
+      {if(blockhead,
+          '<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+             {rev}
+           </a>')}
+      <div class="annotate-info">
+        <div>
+          <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+            {node|short}</a>
+          {desc|escape|firstline}
+        </div>
+        <div><em>{author|obfuscate}</em></div>
+        <div>parents: {parents%annotateparent}</div>
+        <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>
+        <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
+      </div>
     </td>
     <td class="source"><a href="#{lineid}">{linenumber}</a> {line|escape}</td>
   </tr>'
-
+annotateparent = '
+  <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rev}</a>'
 diffblock = '<div class="bottomline inc-lineno"><pre class="sourcelines wrap">{lines}</pre></div>'
 difflineplus = '
   <span id="{lineid}" class="plusline">{strip(line|escape, '\r\n')}</span><a href="#{lineid}"></a>'
--- a/mercurial/templates/rss/bookmarks.tmpl	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/rss/bookmarks.tmpl	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,5 @@
 {header}
-    <title>{repo|escape}: bookmarks </title>
+    <title>{repo|escape}: bookmarks</title>
     <description>{repo|escape} bookmark history</description>
     {entries%bookmarkentry}
   </channel>
--- a/mercurial/templates/rss/changelogentry.tmpl	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/rss/changelogentry.tmpl	Mon Jul 18 23:28:14 2016 -0500
@@ -1,40 +1,41 @@
 <item>
     <title>{inbranch%"{if(name, '[{name|escape}] ')}"}{branches%"{if(name, '[{name|escape}] ')}"}{desc|strip|firstline|strip|escape}</title>
     <guid isPermaLink="true">{urlbase}{url|urlescape}rev/{node|short}</guid>
-             <link>{urlbase}{url|urlescape}rev/{node|short}</link>
+    <link>{urlbase}{url|urlescape}rev/{node|short}</link>
     <description>
-              <![CDATA[
-	<table>
-	<tr>
-		<th style="text-align:left;">changeset</th>
-		<td>{node|short}</td>
-              </tr>
-              <tr>
-                              <th style="text-align:left;">branch</th>
-                              <td>{inbranch%"{name|escape}"}{branches%"{name|escape}"}</td>
-              </tr>
-              <tr>
-                              <th style="text-align:left;">bookmark</th>
-		<td>{bookmarks%"{name|escape}"}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;">tag</th>
-		<td>{tags%"{name|escape}"}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;vertical-align:top;">user</th>
-		<td>{author|obfuscate}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;vertical-align:top;">description</th>
-		<td>{desc|strip|escape|websub|addbreaks|nonempty}</td>
-	</tr>
-	<tr>
-		<th style="text-align:left;vertical-align:top;">files</th>
-		<td>{files}</td>
-	</tr>
-	</table>
-	]]></description>
+    <![CDATA[
+        <table>
+            <tr>
+                <th style="text-align:left;">changeset</th>
+                <td>{node|short}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;">branch</th>
+                <td>{inbranch%"{name|escape}"}{branches%"{name|escape}"}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;">bookmark</th>
+                <td>{bookmarks%"{name|escape}"}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;">tag</th>
+                <td>{tags%"{name|escape}"}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;vertical-align:top;">user</th>
+                <td>{author|obfuscate}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;vertical-align:top;">description</th>
+                <td>{desc|strip|escape|websub|addbreaks|nonempty}</td>
+            </tr>
+            <tr>
+                <th style="text-align:left;vertical-align:top;">files</th>
+                <td>{files}</td>
+            </tr>
+        </table>
+    ]]>
+    </description>
     <author>{author|obfuscate}</author>
     <pubDate>{date|rfc822date}</pubDate>
 </item>
--- a/mercurial/templates/rss/tags.tmpl	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/rss/tags.tmpl	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,5 @@
 {header}
-    <title>{repo|escape}: tags </title>
+    <title>{repo|escape}: tags</title>
     <description>{repo|escape} tag history</description>
     {entriesnotip%tagentry}
   </channel>
--- a/mercurial/templates/spartan/map	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/spartan/map	Mon Jul 18 23:28:14 2016 -0500
@@ -54,16 +54,31 @@
 # is an empty line in the annotated file), which in turn ensures that
 # all table rows have equal height.
 annotateline = '
-  <tr class="parity{parity}">
-    <td class="annotate">
-      <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
-         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
+  <tr class="parity{parity}{ifeq(node, originalnode, ' thisrev')}">
+    <td class="annotate parity{blockparity}">
+      {if(blockhead,
+          '<a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+             {rev}
+           </a>')}
+      <div class="annotate-info">
+        <div>
+          <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}">
+            {node|short}</a>
+          {desc|escape|firstline}
+        </div>
+        <div><em>{author|obfuscate}</em></div>
+        <div>parents: {parents%annotateparent}</div>
+        <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>
+        <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a>
+      </div>
     </td>
     <td>
       <a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>
     </td>
     <td><pre>&nbsp;{line|escape}</pre></td>
   </tr>'
+annotateparent = '
+  <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rev}</a>'
 difflineplus = '<span class="plusline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>'
 difflineminus = '<span class="minusline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>'
 difflineat = '<span class="atline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>'
--- a/mercurial/templates/static/style-gitweb.css	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/static/style-gitweb.css	Mon Jul 18 23:28:14 2016 -0500
@@ -29,6 +29,7 @@
 a.list:hover { text-decoration:underline; color:#880000; }
 table { padding:8px 4px; }
 th { padding:2px 5px; font-size:12px; text-align:left; }
+.parity0 { background-color:#ffffff; }
 tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; }
 tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover,
 pre.sourcelines.stripes > :nth-child(4n+2):hover,
@@ -52,6 +53,19 @@
 div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
 div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
 div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+tr.thisrev a { color:#999999; text-decoration: none; }
+tr.thisrev pre { color:#009900; }
+div.annotate-info {
+  display: none;
+  position: absolute;
+  background-color: #FFFFFF;
+  border: 1px solid #000000;
+  text-align: left;
+  color: #000000;
+  padding: 5px;
+}
+div.annotate-info a { color: #0000FF; text-decoration: underline; }
+td.annotate:hover div.annotate-info { display: inline; }
 .linenr { color:#999999; text-decoration:none }
 div.rss_logo { float: right; white-space: nowrap; }
 div.rss_logo a {
--- a/mercurial/templates/static/style-monoblue.css	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/static/style-monoblue.css	Mon Jul 18 23:28:14 2016 -0500
@@ -210,10 +210,12 @@
   background: #D5E1E6;
 }
 */
-table tr.parity0 {
+table tr.parity0,
+table td.annotate.parity0 {
   background: #F1F6F7;
 }
-table tr.parity1 {
+table tr.parity1,
+table td.annotate.parity1 {
   background: #FFFFFF;
 }
 table tr td {
@@ -331,6 +333,19 @@
 td.linenr {
   width: 60px;
 }
+tr.thisrev a { color:#999999; text-decoration: none; }
+tr.thisrev td.source { color:#009900; }
+div.annotate-info {
+  display: none;
+  position: absolute;
+  background-color: #FFFFFF;
+  border: 1px solid #000000;
+  text-align: left;
+  color: #000000;
+  padding: 5px;
+}
+div.annotate-info a { color: #0000FF; }
+td.annotate:hover div.annotate-info { display: inline; }
 
 div#powered-by {
   position: absolute;
--- a/mercurial/templates/static/style-paper.css	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/static/style-paper.css	Mon Jul 18 23:28:14 2016 -0500
@@ -208,6 +208,19 @@
 .bigtable .annotate { text-align: right; }
 .bigtable td.annotate { font-size: smaller; }
 .bigtable td.source { font-size: inherit; }
+tr.thisrev a { color:#999999; text-decoration: none; }
+tr.thisrev td.source { color:#009900; }
+div.annotate-info {
+  display: none;
+  position: absolute;
+  background-color: #FFFFFF;
+  border: 1px solid #000000;
+  text-align: left;
+  color: #000000;
+  padding: 5px;
+}
+div.annotate-info a { color: #0000FF; }
+td.annotate:hover div.annotate-info { display: inline; }
 
 .source, .sourcefirst {
   font-family: monospace;
--- a/mercurial/templates/static/style.css	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/templates/static/style.css	Mon Jul 18 23:28:14 2016 -0500
@@ -10,6 +10,19 @@
 .minusline { color: red; }
 .atline { color: purple; }
 .annotate { font-size: smaller; text-align: right; padding-right: 1em; }
+tr.thisrev a { color:#999999; text-decoration: none; }
+tr.thisrev pre { color:#009900; }
+div.annotate-info {
+  display: none;
+  position: absolute;
+  background-color: #FFFFFF;
+  border: 1px solid #000000;
+  text-align: left;
+  color: #000000;
+  padding: 5px;
+}
+div.annotate-info a { color: #0000FF; }
+td.annotate:hover div.annotate-info { display: inline; }
 .buttons a {
   background-color: #666;
   padding: 2pt;
--- a/mercurial/transaction.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/transaction.py	Mon Jul 18 23:28:14 2016 -0500
@@ -31,10 +31,9 @@
     'dirstate'
 ])
 
-class GenerationGroup(object):
-    ALL='all'
-    PREFINALIZE='prefinalize'
-    POSTFINALIZE='postfinalize'
+gengroupall='all'
+gengroupprefinalize='prefinalize'
+gengrouppostfinalize='postfinalize'
 
 def active(func):
     def _active(self, *args, **kwds):
@@ -73,7 +72,7 @@
                 filepath = vfs.join(f)
                 backuppath = vfs.join(b)
                 try:
-                    util.copyfile(backuppath, filepath)
+                    util.copyfile(backuppath, filepath, checkambig=True)
                     backupfiles.append(b)
                 except IOError:
                     report(_("failed to recover %s\n") % f)
@@ -289,7 +288,7 @@
         # but for bookmarks that are handled outside this mechanism.
         self._filegenerators[genid] = (order, filenames, genfunc, location)
 
-    def _generatefiles(self, suffix='', group=GenerationGroup.ALL):
+    def _generatefiles(self, suffix='', group=gengroupall):
         # write files registered for generation
         any = False
         for id, entry in sorted(self._filegenerators.iteritems()):
@@ -297,8 +296,8 @@
             order, filenames, genfunc, location = entry
 
             # for generation at closing, check if it's before or after finalize
-            postfinalize = group == GenerationGroup.POSTFINALIZE
-            if (group != GenerationGroup.ALL and
+            postfinalize = group == gengrouppostfinalize
+            if (group != gengroupall and
                 (id in postfinalizegenerators) != (postfinalize)):
                 continue
 
@@ -311,7 +310,8 @@
                         self.registertmp(name, location=location)
                     else:
                         self.addbackup(name, location=location)
-                    files.append(vfs(name, 'w', atomictemp=True))
+                    files.append(vfs(name, 'w', atomictemp=True,
+                                     checkambig=not suffix))
                 genfunc(*files)
             finally:
                 for f in files:
@@ -427,13 +427,13 @@
         '''commit the transaction'''
         if self.count == 1:
             self.validator(self)  # will raise exception if needed
-            self._generatefiles(group=GenerationGroup.PREFINALIZE)
+            self._generatefiles(group=gengroupprefinalize)
             categories = sorted(self._finalizecallback)
             for cat in categories:
                 self._finalizecallback[cat](self)
             # Prevent double usage and help clear cycles.
             self._finalizecallback = None
-            self._generatefiles(group=GenerationGroup.POSTFINALIZE)
+            self._generatefiles(group=gengrouppostfinalize)
 
         self.count -= 1
         if self.count != 0:
--- a/mercurial/ui.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/ui.py	Mon Jul 18 23:28:14 2016 -0500
@@ -29,6 +29,8 @@
     util,
 )
 
+urlreq = util.urlreq
+
 samplehgrcs = {
     'user':
 """# example user config (see "hg help config" for more info)
@@ -107,6 +109,8 @@
         self._trustusers = set()
         self._trustgroups = set()
         self.callhooks = True
+        # Insecure server connections requested.
+        self.insecureconnections = False
 
         if src:
             self.fout = src.fout
@@ -120,7 +124,10 @@
             self._trustgroups = src._trustgroups.copy()
             self.environ = src.environ
             self.callhooks = src.callhooks
+            self.insecureconnections = src.insecureconnections
             self.fixconfig()
+
+            self.httppasswordmgrdb = src.httppasswordmgrdb
         else:
             self.fout = sys.stdout
             self.ferr = sys.stderr
@@ -132,9 +139,17 @@
             for f in scmutil.rcpath():
                 self.readconfig(f, trust=True)
 
+            self.httppasswordmgrdb = urlreq.httppasswordmgrwithdefaultrealm()
+
     def copy(self):
         return self.__class__(self)
 
+    def resetstate(self):
+        """Clear internal state that shouldn't persist across commands"""
+        if self._progbar:
+            self._progbar.resetstate()  # reset last-print time of progress bar
+        self.httppasswordmgrdb = urlreq.httppasswordmgrwithdefaultrealm()
+
     def formatter(self, topic, opts):
         return formatter.formatter(self, topic, opts)
 
@@ -213,6 +228,9 @@
             root = root or os.getcwd()
             for c in self._tcfg, self._ucfg, self._ocfg:
                 for n, p in c.items('paths'):
+                    # Ignore sub-options.
+                    if ':' in n:
+                        continue
                     if not p:
                         continue
                     if '%%' in p:
@@ -1135,12 +1153,15 @@
         '''
         return msg
 
-    def develwarn(self, msg, stacklevel=1):
+    def develwarn(self, msg, stacklevel=1, config=None):
         """issue a developer warning message
 
         Use 'stacklevel' to report the offender some layers further up in the
         stack.
         """
+        if not self.configbool('devel', 'all-warnings'):
+            if config is not None and not self.configbool('devel', config):
+                return
         msg = 'devel-warn: ' + msg
         stacklevel += 1 # get in develwarn
         if self.tracebackflag:
@@ -1166,7 +1187,7 @@
             return
         msg += ("\n(compatibility will be dropped after Mercurial-%s,"
                 " update your code.)") % version
-        self.develwarn(msg, stacklevel=2)
+        self.develwarn(msg, stacklevel=2, config='deprec-warn')
 
 class paths(dict):
     """Represents a collection of paths and their configs.
@@ -1260,6 +1281,10 @@
 
     return str(u)
 
+@pathsuboption('pushrev', 'pushrev')
+def pushrevpathoption(ui, path, value):
+    return value
+
 class path(object):
     """Represents an individual path and its configuration."""
 
--- a/mercurial/url.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/url.py	Mon Jul 18 23:28:14 2016 -0500
@@ -10,7 +10,6 @@
 from __future__ import absolute_import
 
 import base64
-import httplib
 import os
 import socket
 
@@ -22,19 +21,22 @@
     sslutil,
     util,
 )
+
+httplib = util.httplib
 stringio = util.stringio
-
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-class passwordmgr(urlreq.httppasswordmgrwithdefaultrealm):
-    def __init__(self, ui):
-        urlreq.httppasswordmgrwithdefaultrealm.__init__(self)
+class passwordmgr(object):
+    def __init__(self, ui, passwddb):
         self.ui = ui
+        self.passwddb = passwddb
+
+    def add_password(self, realm, uri, user, passwd):
+        return self.passwddb.add_password(realm, uri, user, passwd)
 
     def find_user_password(self, realm, authuri):
-        authinfo = urlreq.httppasswordmgrwithdefaultrealm.find_user_password(
-            self, realm, authuri)
+        authinfo = self.passwddb.find_user_password(realm, authuri)
         user, passwd = authinfo
         if user and passwd:
             self._writedebug(user, passwd)
@@ -64,7 +66,7 @@
             if not passwd:
                 passwd = self.ui.getpass()
 
-        self.add_password(realm, authuri, user, passwd)
+        self.passwddb.add_password(realm, authuri, user, passwd)
         self._writedebug(user, passwd)
         return (user, passwd)
 
@@ -73,8 +75,7 @@
         self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
 
     def find_stored_password(self, authuri):
-        return urlreq.httppasswordmgrwithdefaultrealm.find_user_password(
-            self, None, authuri)
+        return self.passwddb.find_user_password(None, authuri)
 
 class proxyhandler(urlreq.proxyhandler):
     def __init__(self, ui):
@@ -184,17 +185,6 @@
     # must be able to send big bundle as stream.
     send = _gen_sendfile(keepalive.HTTPConnection.send)
 
-    def connect(self):
-        if has_https and self.realhostport: # use CONNECT proxy
-            self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            self.sock.connect((self.host, self.port))
-            if _generic_proxytunnel(self):
-                # we do not support client X.509 certificates
-                self.sock = sslutil.wrapsocket(self.sock, None, None, None,
-                                               serverhostname=self.host)
-        else:
-            keepalive.HTTPConnection.connect(self)
-
     def getresponse(self):
         proxyres = getattr(self, 'proxyres', None)
         if proxyres:
@@ -354,16 +344,17 @@
                 _generic_proxytunnel(self)
                 host = self.realhostport.rsplit(':', 1)[0]
             self.sock = sslutil.wrapsocket(
-                self.sock, self.key_file, self.cert_file, serverhostname=host,
-                **sslutil.sslkwargs(self.ui, host))
-            sslutil.validator(self.ui, host)(self.sock)
+                self.sock, self.key_file, self.cert_file, ui=self.ui,
+                serverhostname=host)
+            sslutil.validatesocket(self.sock)
 
     class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
         def __init__(self, ui):
             keepalive.KeepAliveHandler.__init__(self)
             urlreq.httpshandler.__init__(self)
             self.ui = ui
-            self.pwmgr = passwordmgr(self.ui)
+            self.pwmgr = passwordmgr(self.ui,
+                                     self.ui.httppasswordmgrdb)
 
         def _start_transaction(self, h, req):
             _generic_start_transaction(self, h, req)
@@ -477,7 +468,11 @@
     '''
     # experimental config: ui.usehttp2
     if ui.configbool('ui', 'usehttp2', False):
-        handlers = [httpconnectionmod.http2handler(ui, passwordmgr(ui))]
+        handlers = [
+            httpconnectionmod.http2handler(
+                ui,
+                passwordmgr(ui, ui.httppasswordmgrdb))
+        ]
     else:
         handlers = [httphandler()]
         if has_https:
@@ -485,10 +480,12 @@
 
     handlers.append(proxyhandler(ui))
 
-    passmgr = passwordmgr(ui)
+    passmgr = passwordmgr(ui, ui.httppasswordmgrdb)
     if authinfo is not None:
-        passmgr.add_password(*authinfo)
-        user, passwd = authinfo[2:4]
+        realm, uris, user, passwd = authinfo
+        saveduser, savedpass = passmgr.find_stored_password(uris[0])
+        if user != saveduser or passwd:
+            passmgr.add_password(realm, uris, user, passwd)
         ui.debug('http auth: user %s, password %s\n' %
                  (user, passwd and '*' * len(passwd) or 'not set'))
 
@@ -497,8 +494,20 @@
     handlers.extend([h(ui, passmgr) for h in handlerfuncs])
     opener = urlreq.buildopener(*handlers)
 
-    # 1.0 here is the _protocol_ version
-    opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
+    # The user agent should should *NOT* be used by servers for e.g.
+    # protocol detection or feature negotiation: there are other
+    # facilities for that.
+    #
+    # "mercurial/proto-1.0" was the original user agent string and
+    # exists for backwards compatibility reasons.
+    #
+    # The "(Mercurial %s)" string contains the distribution
+    # name and version. Other client implementations should choose their
+    # own distribution name. Since servers should not be using the user
+    # agent string for anything, clients should be able to define whatever
+    # user agent they deem appropriate.
+    agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
+    opener.addheaders = [('User-agent', agent)]
     opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
     return opener
 
--- a/mercurial/util.h	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/util.h	Mon Jul 18 23:28:14 2016 -0500
@@ -8,6 +8,8 @@
 #ifndef _HG_UTIL_H_
 #define _HG_UTIL_H_
 
+#include "compat.h"
+
 #if PY_MAJOR_VERSION >= 3
 
 #define IS_PY3K
@@ -57,40 +59,6 @@
 
 #endif /* PY_MAJOR_VERSION */
 
-#ifdef _WIN32
-#ifdef _MSC_VER
-/* msvc 6.0 has problems */
-#define inline __inline
-typedef signed char int8_t;
-typedef short int16_t;
-typedef long int32_t;
-typedef __int64 int64_t;
-typedef unsigned char uint8_t;
-typedef unsigned short uint16_t;
-typedef unsigned long uint32_t;
-typedef unsigned __int64 uint64_t;
-#else
-#include <stdint.h>
-#endif
-#else
-/* not windows */
-#include <sys/types.h>
-#if defined __BEOS__ && !defined __HAIKU__
-#include <ByteOrder.h>
-#else
-#include <arpa/inet.h>
-#endif
-#include <inttypes.h>
-#endif
-
-#if defined __hpux || defined __SUNPRO_C || defined _AIX
-#define inline
-#endif
-
-#ifdef __linux
-#define inline __inline
-#endif
-
 typedef struct {
 	PyObject_HEAD
 	char state;
@@ -102,53 +70,6 @@
 extern PyTypeObject dirstateTupleType;
 #define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
 
-static inline uint32_t getbe32(const char *c)
-{
-	const unsigned char *d = (const unsigned char *)c;
-
-	return ((d[0] << 24) |
-		(d[1] << 16) |
-		(d[2] << 8) |
-		(d[3]));
-}
-
-static inline int16_t getbeint16(const char *c)
-{
-	const unsigned char *d = (const unsigned char *)c;
-
-	return ((d[0] << 8) |
-		(d[1]));
-}
-
-static inline uint16_t getbeuint16(const char *c)
-{
-	const unsigned char *d = (const unsigned char *)c;
-
-	return ((d[0] << 8) |
-		(d[1]));
-}
-
-static inline void putbe32(uint32_t x, char *c)
-{
-	c[0] = (x >> 24) & 0xff;
-	c[1] = (x >> 16) & 0xff;
-	c[2] = (x >> 8) & 0xff;
-	c[3] = (x) & 0xff;
-}
-
-static inline double getbefloat64(const char *c)
-{
-	const unsigned char *d = (const unsigned char *)c;
-	double ret;
-	int i;
-	uint64_t t = 0;
-	for (i = 0; i < 8; i++) {
-		t = (t<<8) + d[i];
-	}
-	memcpy(&ret, &t, sizeof(t));
-	return ret;
-}
-
 /* This should be kept in sync with normcasespecs in encoding.py. */
 enum normcase_spec {
 	NORMCASE_LOWER = -1,
--- a/mercurial/util.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/util.py	Mon Jul 18 23:28:14 2016 -0500
@@ -47,11 +47,17 @@
 
 for attr in (
     'empty',
+    'httplib',
+    'httpserver',
+    'pickle',
     'queue',
     'urlerr',
+    'urlparse',
     # we do import urlreq, but we do it outside the loop
     #'urlreq',
     'stringio',
+    'socketserver',
+    'xmlrpclib',
 ):
     globals()[attr] = getattr(pycompat, attr)
 
@@ -63,11 +69,9 @@
 else:
     from . import posix as platform
 
-md5 = hashlib.md5
-sha1 = hashlib.sha1
-sha512 = hashlib.sha512
 _ = i18n._
 
+bindunixsocket = platform.bindunixsocket
 cachestat = platform.cachestat
 checkexec = platform.checkexec
 checklink = platform.checklink
@@ -136,9 +140,9 @@
     return getattr(thing, attr, _notset) is not _notset
 
 DIGESTS = {
-    'md5': md5,
-    'sha1': sha1,
-    'sha512': sha512,
+    'md5': hashlib.md5,
+    'sha1': hashlib.sha1,
+    'sha512': hashlib.sha512,
 }
 # List of digest types from strongest to weakest
 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
@@ -523,6 +527,10 @@
     def insert(self, index, key, val):
         self._list.insert(index, key)
         dict.__setitem__(self, key, val)
+    def __repr__(self):
+        if not self:
+            return '%s()' % self.__class__.__name__
+        return '%s(%r)' % (self.__class__.__name__, self.items())
 
 class _lrucachenode(object):
     """A node in a doubly linked list.
@@ -1010,10 +1018,21 @@
 
     return check
 
-def copyfile(src, dest, hardlink=False, copystat=False):
+def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
     '''copy a file, preserving mode and optionally other stat info like
-    atime/mtime'''
+    atime/mtime
+
+    checkambig argument is used with filestat, and is useful only if
+    destination file is guarded by any lock (e.g. repo.lock or
+    repo.wlock).
+
+    copystat and checkambig should be exclusive.
+    '''
+    assert not (copystat and checkambig)
+    oldstat = None
     if os.path.lexists(dest):
+        if checkambig:
+            oldstat = checkambig and filestat(dest)
         unlink(dest)
     # hardlinks are problematic on CIFS, quietly ignore this flag
     # until we find a way to work around it cleanly (issue4546)
@@ -1035,6 +1054,12 @@
                 shutil.copystat(src, dest)
             else:
                 shutil.copymode(src, dest)
+                if oldstat and oldstat.stat:
+                    newstat = filestat(dest)
+                    if newstat.isambig(oldstat):
+                        # stat of copied file is ambiguous to original one
+                        advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                        os.utime(dest, (advanced, advanced))
         except shutil.Error as inst:
             raise Abort(str(inst))
 
@@ -1381,6 +1406,72 @@
         raise
     return temp
 
+class filestat(object):
+    """help to exactly detect change of a file
+
+    'stat' attribute is result of 'os.stat()' if specified 'path'
+    exists. Otherwise, it is None. This can avoid preparative
+    'exists()' examination on client side of this class.
+    """
+    def __init__(self, path):
+        try:
+            self.stat = os.stat(path)
+        except OSError as err:
+            if err.errno != errno.ENOENT:
+                raise
+            self.stat = None
+
+    __hash__ = object.__hash__
+
+    def __eq__(self, old):
+        try:
+            # if ambiguity between stat of new and old file is
+            # avoided, comparision of size, ctime and mtime is enough
+            # to exactly detect change of a file regardless of platform
+            return (self.stat.st_size == old.stat.st_size and
+                    self.stat.st_ctime == old.stat.st_ctime and
+                    self.stat.st_mtime == old.stat.st_mtime)
+        except AttributeError:
+            return False
+
+    def isambig(self, old):
+        """Examine whether new (= self) stat is ambiguous against old one
+
+        "S[N]" below means stat of a file at N-th change:
+
+        - S[n-1].ctime  < S[n].ctime: can detect change of a file
+        - S[n-1].ctime == S[n].ctime
+          - S[n-1].ctime  < S[n].mtime: means natural advancing (*1)
+          - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
+          - S[n-1].ctime  > S[n].mtime: never occurs naturally (don't care)
+        - S[n-1].ctime  > S[n].ctime: never occurs naturally (don't care)
+
+        Case (*2) above means that a file was changed twice or more at
+        same time in sec (= S[n-1].ctime), and comparison of timestamp
+        is ambiguous.
+
+        Base idea to avoid such ambiguity is "advance mtime 1 sec, if
+        timestamp is ambiguous".
+
+        But advancing mtime only in case (*2) doesn't work as
+        expected, because naturally advanced S[n].mtime in case (*1)
+        might be equal to manually advanced S[n-1 or earlier].mtime.
+
+        Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
+        treated as ambiguous regardless of mtime, to avoid overlooking
+        by confliction between such mtime.
+
+        Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
+        S[n].mtime", even if size of a file isn't changed.
+        """
+        try:
+            return (self.stat.st_ctime == old.stat.st_ctime)
+        except AttributeError:
+            return False
+
+    def __ne__(self, other):
+        return not self == other
+
 class atomictempfile(object):
     '''writable file object that atomically updates a file
 
@@ -1389,14 +1480,20 @@
     the temporary copy to the original name, making the changes
     visible. If the object is destroyed without being closed, all your
     writes are discarded.
+
+    checkambig argument of constructor is used with filestat, and is
+    useful only if target file is guarded by any lock (e.g. repo.lock
+    or repo.wlock).
     '''
-    def __init__(self, name, mode='w+b', createmode=None):
+    def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
         self.__name = name      # permanent name
         self._tempname = mktempcopy(name, emptyok=('w' in mode),
                                     createmode=createmode)
         self._fp = posixfile(self._tempname, mode)
+        self._checkambig = checkambig
 
         # delegated methods
+        self.read = self._fp.read
         self.write = self._fp.write
         self.seek = self._fp.seek
         self.tell = self._fp.tell
@@ -1405,7 +1502,17 @@
     def close(self):
         if not self._fp.closed:
             self._fp.close()
-            rename(self._tempname, localpath(self.__name))
+            filename = localpath(self.__name)
+            oldstat = self._checkambig and filestat(filename)
+            if oldstat and oldstat.stat:
+                rename(self._tempname, filename)
+                newstat = filestat(filename)
+                if newstat.isambig(oldstat):
+                    # stat of changed file is ambiguous to original one
+                    advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
+                    os.utime(filename, (advanced, advanced))
+            else:
+                rename(self._tempname, filename)
 
     def discard(self):
         if not self._fp.closed:
@@ -1419,6 +1526,15 @@
         if safehasattr(self, '_fp'): # constructor actually did something
             self.discard()
 
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exctype, excvalue, traceback):
+        if exctype is not None:
+            self.discard()
+        else:
+            self.close()
+
 def makedirs(name, mode=None, notindexed=False):
     """recursive directory creation with parent mode inheritance
 
--- a/mercurial/windows.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/windows.py	Mon Jul 18 23:28:14 2016 -0500
@@ -471,3 +471,6 @@
         chunks.append(s)
 
     return ''.join(chunks)
+
+def bindunixsocket(sock, path):
+    raise NotImplementedError('unsupported platform')
--- a/mercurial/wireproto.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/mercurial/wireproto.py	Mon Jul 18 23:28:14 2016 -0500
@@ -7,6 +7,7 @@
 
 from __future__ import absolute_import
 
+import hashlib
 import itertools
 import os
 import sys
@@ -97,7 +98,7 @@
             batchablefn = getattr(mtd, 'batchable', None)
             if batchablefn is not None:
                 batchable = batchablefn(mtd.im_self, *args, **opts)
-                encargsorres, encresref = batchable.next()
+                encargsorres, encresref = next(batchable)
                 if encresref:
                     req.append((name, encargsorres,))
                     rsp.append((batchable, encresref, resref,))
@@ -115,7 +116,7 @@
         for encres, r in zip(encresults, rsp):
             batchable, encresref, resref = r
             encresref.set(encres)
-            resref.set(batchable.next())
+            resref.set(next(batchable))
 
 class remoteiterbatcher(peer.iterbatcher):
     def __init__(self, remote):
@@ -138,7 +139,7 @@
         for name, args, opts, resref in self.calls:
             mtd = getattr(self._remote, name)
             batchable = mtd.batchable(mtd.im_self, *args, **opts)
-            encargsorres, encresref = batchable.next()
+            encargsorres, encresref = next(batchable)
             assert encresref
             req.append((name, encargsorres))
             rsp.append((batchable, encresref))
@@ -150,7 +151,7 @@
         for (batchable, encresref), encres in itertools.izip(
                 self._rsp, self._resultiter):
             encresref.set(encres)
-            yield batchable.next()
+            yield next(batchable)
 
 # Forward a couple of names from peer to make wireproto interactions
 # slightly more sensible.
@@ -231,17 +232,19 @@
                             for k, v in argsdict.iteritems())
             cmds.append('%s %s' % (op, args))
         rsp = self._callstream("batch", cmds=';'.join(cmds))
-        # TODO this response parsing is probably suboptimal for large
-        # batches with large responses.
-        work = rsp.read(1024)
-        chunk = work
+        chunk = rsp.read(1024)
+        work = [chunk]
         while chunk:
-            while ';' in work:
-                one, work = work.split(';', 1)
+            while ';' not in chunk and chunk:
+                chunk = rsp.read(1024)
+                work.append(chunk)
+            merged = ''.join(work)
+            while ';' in merged:
+                one, merged = merged.split(';', 1)
                 yield unescapearg(one)
             chunk = rsp.read(1024)
-            work += chunk
-        yield unescapearg(work)
+            work = [merged, chunk]
+        yield unescapearg(''.join(work))
 
     def _submitone(self, op, args):
         return self._call(op, **args)
@@ -408,7 +411,7 @@
 
         if heads != ['force'] and self.capable('unbundlehash'):
             heads = encodelist(['hashed',
-                                util.sha1(''.join(sorted(heads))).digest()])
+                                hashlib.sha1(''.join(sorted(heads))).digest()])
         else:
             heads = encodelist(heads)
 
@@ -533,8 +536,17 @@
     def __init__(self, message):
         self.message = message
 
+def getdispatchrepo(repo, proto, command):
+    """Obtain the repo used for processing wire protocol commands.
+
+    The intent of this function is to serve as a monkeypatch point for
+    extensions that need commands to operate on different repo views under
+    specialized circumstances.
+    """
+    return repo.filtered('served')
+
 def dispatch(repo, proto, command):
-    repo = repo.filtered("served")
+    repo = getdispatchrepo(repo, proto, command)
     func, spec = commands[command]
     args = proto.getargs(spec)
     return func(repo, proto, *args)
--- a/setup.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/setup.py	Mon Jul 18 23:28:14 2016 -0500
@@ -262,7 +262,8 @@
 
 
 class hgdist(Distribution):
-    pure = ispypy
+    pure = False
+    cffi = ispypy
 
     global_options = Distribution.global_options + \
                      [('pure', None, "use pure (slow) Python "
@@ -316,6 +317,13 @@
 
         if self.distribution.pure:
             self.distribution.ext_modules = []
+        elif self.distribution.cffi:
+            exts = []
+            # cffi modules go here
+            if sys.platform == 'darwin':
+                import setup_osutil_cffi
+                exts.append(setup_osutil_cffi.ffi.distutils_extension())
+            self.distribution.ext_modules = exts
         else:
             h = os.path.join(get_python_inc(), 'Python.h')
             if not os.path.exists(h):
@@ -536,7 +544,9 @@
             'hgext.fsmonitor.pywatchman', 'hgext.highlight',
             'hgext.largefiles', 'hgext.zeroconf', 'hgext3rd']
 
-common_depends = ['mercurial/util.h']
+common_depends = ['mercurial/bitmanipulation.h',
+                  'mercurial/compat.h',
+                  'mercurial/util.h']
 
 osutil_ldflags = []
 
@@ -546,8 +556,9 @@
 extmodules = [
     Extension('mercurial.base85', ['mercurial/base85.c'],
               depends=common_depends),
-    Extension('mercurial.bdiff', ['mercurial/bdiff.c'],
-              depends=common_depends),
+    Extension('mercurial.bdiff', ['mercurial/bdiff.c',
+                                  'mercurial/bdiff_module.c'],
+              depends=common_depends + ['mercurial/bdiff.h']),
     Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c'],
               depends=common_depends),
     Extension('mercurial.mpatch', ['mercurial/mpatch.c'],
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/setup_osutil_cffi.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import cffi
+
+ffi = cffi.FFI()
+ffi.set_source("_osutil_cffi", """
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+
+typedef struct val_attrs {
+    uint32_t          length;
+    attribute_set_t   returned;
+    attrreference_t   name_info;
+    fsobj_type_t      obj_type;
+    struct timespec   mtime;
+    uint32_t          accessmask;
+    off_t             datalength;
+} __attribute__((aligned(4), packed)) val_attrs_t;
+""", include_dirs=['mercurial'])
+ffi.cdef('''
+
+typedef uint32_t attrgroup_t;
+
+typedef struct attrlist {
+    uint16_t     bitmapcount; /* number of attr. bit sets in list */
+    uint16_t   reserved;    /* (to maintain 4-byte alignment) */
+    attrgroup_t commonattr;  /* common attribute group */
+    attrgroup_t volattr;     /* volume attribute group */
+    attrgroup_t dirattr;     /* directory attribute group */
+    attrgroup_t fileattr;    /* file attribute group */
+    attrgroup_t forkattr;    /* fork attribute group */
+    ...;
+};
+
+typedef struct attribute_set {
+    ...;
+} attribute_set_t;
+
+typedef struct attrreference {
+    int attr_dataoffset;
+    int attr_length;
+    ...;
+} attrreference_t;
+
+typedef struct val_attrs {
+    uint32_t          length;
+    attribute_set_t   returned;
+    attrreference_t   name_info;
+    uint32_t          obj_type;
+    struct timespec   mtime;
+    uint32_t          accessmask;
+    int               datalength;
+    ...;
+} val_attrs_t;
+
+/* the exact layout of the above struct will be figured out during build time */
+
+typedef int ... time_t;
+typedef int ... off_t;
+
+typedef struct timespec {
+    time_t tv_sec;
+    ...;
+};
+
+int getattrlist(const char* path, struct attrlist * attrList, void * attrBuf,
+                size_t attrBufSize, unsigned int options);
+
+int getattrlistbulk(int dirfd, struct attrlist * attrList, void * attrBuf,
+                    size_t attrBufSize, uint64_t options);
+
+#define ATTR_BIT_MAP_COUNT ...
+#define ATTR_CMN_NAME ...
+#define ATTR_CMN_OBJTYPE ...
+#define ATTR_CMN_MODTIME ...
+#define ATTR_CMN_ACCESSMASK ...
+#define ATTR_CMN_ERROR ...
+#define ATTR_CMN_RETURNED_ATTRS ...
+#define ATTR_FILE_DATALENGTH ...
+
+#define VREG ...
+#define VDIR ...
+#define VLNK ...
+#define VBLK ...
+#define VCHR ...
+#define VFIFO ...
+#define VSOCK ...
+
+#define S_IFMT ...
+
+int open(const char *path, int oflag, int perm);
+int close(int);
+
+#define O_RDONLY ...
+''')
+
+if __name__ == '__main__':
+    ffi.compile()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/check-perf-code.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+#
+# check-perf-code - (historical) portability checker for contrib/perf.py
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+# write static check patterns here
+perfpypats = [
+  [
+  ],
+  # warnings
+  [
+  ]
+]
+
+def modulewhitelist(names):
+    replacement = [('.py', ''), ('.c', ''), # trim suffix
+                   ('mercurial%s' % (os.sep), ''), # trim "mercurial/" path
+                  ]
+    ignored = set(['__init__'])
+    modules = {}
+
+    # convert from file name to module name, and count # of appearances
+    for name in names:
+        name = name.strip()
+        for old, new in replacement:
+            name = name.replace(old, new)
+        if name not in ignored:
+            modules[name] = modules.get(name, 0) + 1
+
+    # list up module names, which appear multiple times
+    whitelist = []
+    for name, count in modules.items():
+        if count > 1:
+            whitelist.append(name)
+
+    return whitelist
+
+if __name__ == "__main__":
+    # in this case, it is assumed that result of "hg files" at
+    # multiple revisions is given via stdin
+    whitelist = modulewhitelist(sys.stdin)
+    assert whitelist, "module whitelist is empty"
+
+    # build up module whitelist check from file names given at runtime
+    perfpypats[0].append(
+        # this matching pattern assumes importing modules from
+        # "mercurial" package in the current style below, for simplicity
+        #
+        #    from mercurial import (
+        #        foo,
+        #        bar,
+        #        baz
+        #    )
+        ((r'from mercurial import [(][a-z0-9, \n#]*\n(?! *%s,|^[ #]*\n|[)])'
+          % ',| *'.join(whitelist)),
+         "import newer module separately in try clause for early Mercurial"
+         ))
+
+    # import contrib/check-code.py as checkcode
+    assert 'RUNTESTDIR' in os.environ, "use check-perf-code.py in *.t script"
+    contribpath = os.path.join(os.environ['RUNTESTDIR'], '..', 'contrib')
+    sys.path.insert(0, contribpath)
+    checkcode = __import__('check-code')
+
+    # register perf.py specific entry with "checks" in check-code.py
+    checkcode.checks.append(('perf.py', r'contrib/perf.py$', '',
+                             checkcode.pyfilters, perfpypats))
+
+    sys.exit(checkcode.main())
--- a/tests/dumbhttp.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/dumbhttp.py	Mon Jul 18 23:28:14 2016 -0500
@@ -6,24 +6,24 @@
 Small and dumb HTTP server for use in tests.
 """
 
-import BaseHTTPServer
-import SimpleHTTPServer
 import optparse
 import signal
 import sys
 
 from mercurial import (
     cmdutil,
+    util,
 )
 
+httpserver = util.httpserver
 OptionParser = optparse.OptionParser
 
 class simplehttpservice(object):
     def __init__(self, host, port):
         self.address = (host, port)
     def init(self):
-        self.httpd = BaseHTTPServer.HTTPServer(
-            self.address, SimpleHTTPServer.SimpleHTTPRequestHandler)
+        self.httpd = httpserver.httpserver(
+            self.address, httpserver.simplehttprequesthandler)
     def run(self):
         self.httpd.serve_forever()
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/dummysmtpd.py	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+"""dummy SMTP server for use in tests"""
+
+from __future__ import absolute_import
+
+import asyncore
+import optparse
+import smtpd
+import ssl
+import sys
+
+from mercurial import (
+    cmdutil,
+    sslutil,
+    ui as uimod,
+)
+
+def log(msg):
+    sys.stdout.write(msg)
+    sys.stdout.flush()
+
+class dummysmtpserver(smtpd.SMTPServer):
+    def __init__(self, localaddr):
+        smtpd.SMTPServer.__init__(self, localaddr, remoteaddr=None)
+
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        log('%s from=%s to=%s\n' % (peer[0], mailfrom, ', '.join(rcpttos)))
+
+class dummysmtpsecureserver(dummysmtpserver):
+    def __init__(self, localaddr, certfile):
+        dummysmtpserver.__init__(self, localaddr)
+        self._certfile = certfile
+
+    def handle_accept(self):
+        pair = self.accept()
+        if not pair:
+            return
+        conn, addr = pair
+        ui = uimod.ui()
+        try:
+            # wrap_socket() would block, but we don't care
+            conn = sslutil.wrapserversocket(conn, ui, certfile=self._certfile)
+        except ssl.SSLError:
+            log('%s ssl error\n' % addr[0])
+            conn.close()
+            return
+        smtpd.SMTPChannel(self, conn, addr)
+
+def run():
+    try:
+        asyncore.loop()
+    except KeyboardInterrupt:
+        pass
+
+def main():
+    op = optparse.OptionParser()
+    op.add_option('-d', '--daemon', action='store_true')
+    op.add_option('--daemon-postexec', action='append')
+    op.add_option('-p', '--port', type=int, default=8025)
+    op.add_option('-a', '--address', default='localhost')
+    op.add_option('--pid-file', metavar='FILE')
+    op.add_option('--tls', choices=['none', 'smtps'], default='none')
+    op.add_option('--certificate', metavar='FILE')
+
+    opts, args = op.parse_args()
+    if opts.tls == 'smtps' and not opts.certificate:
+        op.error('--certificate must be specified')
+
+    addr = (opts.address, opts.port)
+    def init():
+        if opts.tls == 'none':
+            dummysmtpserver(addr)
+        else:
+            dummysmtpsecureserver(addr, opts.certificate)
+        log('listening at %s:%d\n' % addr)
+
+    cmdutil.service(vars(opts), initfn=init, runfn=run,
+                    runargs=[sys.executable, __file__] + sys.argv[1:])
+
+if __name__ == '__main__':
+    main()
--- a/tests/dummyssh	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/dummyssh	Mon Jul 18 23:28:14 2016 -0500
@@ -1,7 +1,9 @@
 #!/usr/bin/env python
 
+from __future__ import absolute_import
+
+import os
 import sys
-import os
 
 os.chdir(os.getenv('TESTTMP'))
 
--- a/tests/f	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/f	Mon Jul 18 23:28:14 2016 -0500
@@ -23,7 +23,14 @@
   md5sum.py
 """
 
-import sys, os, errno, re, glob, optparse
+from __future__ import absolute_import
+
+import glob
+import hashlib
+import optparse
+import os
+import re
+import sys
 
 def visit(opts, filenames, outfile):
     """Process filenames in the way specified in opts, writing output to
@@ -74,17 +81,11 @@
                 else:
                     facts.append('older than %s' % opts.newer)
         if opts.md5 and content is not None:
-            try:
-                from hashlib import md5
-            except ImportError:
-                from md5 import md5
-            facts.append('md5=%s' % md5(content).hexdigest()[:opts.bytes])
+            h = hashlib.md5(content)
+            facts.append('md5=%s' % h.hexdigest()[:opts.bytes])
         if opts.sha1 and content is not None:
-            try:
-                from hashlib import sha1
-            except ImportError:
-                from sha import sha as sha1
-            facts.append('sha1=%s' % sha1(content).hexdigest()[:opts.bytes])
+            h = hashlib.sha1(content)
+            facts.append('sha1=%s' % h.hexdigest()[:opts.bytes])
         if isstdin:
             outfile.write(', '.join(facts) + '\n')
         elif facts:
--- a/tests/get-with-headers.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/get-with-headers.py	Mon Jul 18 23:28:14 2016 -0500
@@ -5,11 +5,16 @@
 
 from __future__ import absolute_import, print_function
 
-import httplib
 import json
 import os
 import sys
 
+from mercurial import (
+    util,
+)
+
+httplib = util.httplib
+
 try:
     import msvcrt
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/helper-runtests.sh	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,7 @@
+#
+# Avoid interference from actual test env:
+
+unset HGTEST_JOBS
+unset HGTEST_TIMEOUT
+unset HGTEST_PORT
+unset HGTEST_SHELL
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/helpers-testrepo.sh	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,19 @@
+# The test-repo is a live hg repository which may have evolution
+# markers created, e.g. when a ~/.hgrc enabled evolution.
+#
+# Tests are run using a custom HGRCPATH, which do not
+# enable evolution markers by default.
+#
+# If test-repo includes evolution markers, and we do not
+# enable evolution markers, hg will occasionally complain
+# when it notices them, which disrupts tests resulting in
+# sporadic failures.
+#
+# Since we aren't performing any write operations on the
+# test-repo, there's no harm in telling hg that we support
+# evolution markers, which is what the following lines
+# for the hgrc file do:
+cat >> $HGRCPATH << EOF
+[experimental]
+evolution=createmarkers
+EOF
--- a/tests/heredoctest.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/heredoctest.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
 import sys
 
--- a/tests/hghave	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/hghave	Mon Jul 18 23:28:14 2016 -0500
@@ -4,22 +4,23 @@
 prefixed with "no-", the absence of feature is tested.
 """
 
-from __future__ import print_function
+from __future__ import absolute_import, print_function
 
+import hghave
 import optparse
-import os, sys
-import hghave
+import os
+import sys
 
 checks = hghave.checks
 
 def list_features():
-    for name, feature in sorted(checks.iteritems()):
+    for name, feature in sorted(checks.items()):
         desc = feature[1]
         print(name + ':', desc)
 
 def test_features():
     failed = 0
-    for name, feature in checks.iteritems():
+    for name, feature in checks.items():
         check, _ = feature
         try:
             check()
@@ -48,6 +49,7 @@
     sys.path.insert(0, path)
     try:
         import hghaveaddon
+        assert hghaveaddon  # silence pyflakes
     except BaseException as inst:
         sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n'
                          % (path, inst))
--- a/tests/hghave.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/hghave.py	Mon Jul 18 23:28:14 2016 -0500
@@ -104,7 +104,7 @@
 
 @check("baz", "GNU Arch baz client")
 def has_baz():
-    return matchoutput('baz --version 2>&1', r'baz Bazaar version')
+    return matchoutput('baz --version 2>&1', br'baz Bazaar version')
 
 @check("bzr", "Canonical's Bazaar client")
 def has_bzr():
@@ -130,27 +130,27 @@
 
 @check("cvs", "cvs client/server")
 def has_cvs():
-    re = r'Concurrent Versions System.*?server'
+    re = br'Concurrent Versions System.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
 def has_cvs112():
-    re = r'Concurrent Versions System \(CVS\) 1.12.*?server'
+    re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
 @check("cvsnt", "cvsnt client/server")
 def has_cvsnt():
-    re = r'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
+    re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
     return matchoutput('cvsnt --version 2>&1', re)
 
 @check("darcs", "darcs client")
 def has_darcs():
-    return matchoutput('darcs --version', r'2\.[2-9]', True)
+    return matchoutput('darcs --version', br'2\.[2-9]', True)
 
 @check("mtn", "monotone client (>= 1.0)")
 def has_mtn():
-    return matchoutput('mtn --version', r'monotone', True) and not matchoutput(
-        'mtn --version', r'monotone 0\.', True)
+    return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
+        'mtn --version', br'monotone 0\.', True)
 
 @check("eol-in-paths", "end-of-lines in paths")
 def has_eol_in_paths():
@@ -236,7 +236,7 @@
         return False
 
 def gethgversion():
-    m = matchoutput('hg --version --quiet 2>&1', r'(\d+)\.(\d+)')
+    m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
@@ -267,11 +267,11 @@
 
 @check("gettext", "GNU Gettext (msgfmt)")
 def has_gettext():
-    return matchoutput('msgfmt --version', 'GNU gettext-tools')
+    return matchoutput('msgfmt --version', br'GNU gettext-tools')
 
 @check("git", "git command line client")
 def has_git():
-    return matchoutput('git --version 2>&1', r'^git version')
+    return matchoutput('git --version 2>&1', br'^git version')
 
 @check("docutils", "Docutils text processing library")
 def has_docutils():
@@ -283,7 +283,7 @@
         return False
 
 def getsvnversion():
-    m = matchoutput('svn --version --quiet 2>&1', r'^(\d+)\.(\d+)')
+    m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
@@ -295,8 +295,8 @@
 
 @check("svn", "subversion client and admin tools")
 def has_svn():
-    return matchoutput('svn --version 2>&1', r'^svn, version') and \
-        matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
+    return matchoutput('svn --version 2>&1', br'^svn, version') and \
+        matchoutput('svnadmin --version 2>&1', br'^svnadmin, version')
 
 @check("svn-bindings", "subversion python bindings")
 def has_svn_bindings():
@@ -311,8 +311,8 @@
 
 @check("p4", "Perforce server and client")
 def has_p4():
-    return (matchoutput('p4 -V', r'Rev\. P4/') and
-            matchoutput('p4d -V', r'Rev\. P4D/'))
+    return (matchoutput('p4 -V', br'Rev\. P4/') and
+            matchoutput('p4d -V', br'Rev\. P4D/'))
 
 @check("symlink", "symbolic links")
 def has_symlink():
@@ -343,11 +343,11 @@
 
 @check("tla", "GNU Arch tla client")
 def has_tla():
-    return matchoutput('tla --version 2>&1', r'The GNU Arch Revision')
+    return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
 
 @check("gpg", "gpg client")
 def has_gpg():
-    return matchoutput('gpg --version 2>&1', r'GnuPG')
+    return matchoutput('gpg --version 2>&1', br'GnuPG')
 
 @check("unix-permissions", "unix-style permissions")
 def has_unix_permissions():
@@ -377,7 +377,7 @@
 @check("pyflakes", "Pyflakes python linter")
 def has_pyflakes():
     return matchoutput("sh -c \"echo 'import re' 2>&1 | pyflakes\"",
-                       r"<stdin>:1: 're' imported but unused",
+                       br"<stdin>:1: 're' imported but unused",
                        True)
 
 @check("pygments", "Pygments source highlighting library")
@@ -393,7 +393,7 @@
 def has_outer_repo():
     # failing for other reasons than 'no repo' imply that there is a repo
     return not matchoutput('hg root 2>&1',
-                           r'abort: no repository found', True)
+                           br'abort: no repository found', True)
 
 @check("ssl", "ssl module available")
 def has_ssl():
@@ -415,8 +415,34 @@
 
 @check("defaultcacerts", "can verify SSL certs by system's CA certs store")
 def has_defaultcacerts():
+    from mercurial import sslutil, ui as uimod
+    ui = uimod.ui()
+    return sslutil._defaultcacerts(ui) or sslutil._canloaddefaultcerts
+
+@check("defaultcacertsloaded", "detected presence of loaded system CA certs")
+def has_defaultcacertsloaded():
+    import ssl
+    from mercurial import sslutil, ui as uimod
+
+    if not has_defaultcacerts():
+        return False
+    if not has_sslcontext():
+        return False
+
+    ui = uimod.ui()
+    cafile = sslutil._defaultcacerts(ui)
+    ctx = ssl.create_default_context()
+    if cafile:
+        ctx.load_verify_locations(cafile=cafile)
+    else:
+        ctx.load_default_certs()
+
+    return len(ctx.get_ca_certs()) > 0
+
+@check("tls1.2", "TLS 1.2 protocol support")
+def has_tls1_2():
     from mercurial import sslutil
-    return sslutil._defaultcacerts() != '!'
+    return 'tls1.2' in sslutil.supportprotocols
 
 @check("windows", "Windows")
 def has_windows():
@@ -440,7 +466,7 @@
     try:
         import curses
         curses.COLOR_BLUE
-        return matchoutput('test -x "`which tic`"', '')
+        return matchoutput('test -x "`which tic`"', br'')
     except ImportError:
         return False
 
@@ -459,19 +485,19 @@
 @check("osxpackaging", "OS X packaging tools")
 def has_osxpackaging():
     try:
-        return (matchoutput('pkgbuild', 'Usage: pkgbuild ', ignorestatus=1)
+        return (matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
                 and matchoutput(
-                    'productbuild', 'Usage: productbuild ',
+                    'productbuild', br'Usage: productbuild ',
                     ignorestatus=1)
-                and matchoutput('lsbom', 'Usage: lsbom', ignorestatus=1)
+                and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
                 and matchoutput(
-                    'xar --help', 'Usage: xar', ignorestatus=1))
+                    'xar --help', br'Usage: xar', ignorestatus=1))
     except ImportError:
         return False
 
 @check("docker", "docker support")
 def has_docker():
-    pat = r'A self-sufficient runtime for'
+    pat = br'A self-sufficient runtime for'
     if matchoutput('docker --help', pat):
         if 'linux' not in sys.platform:
             # TODO: in theory we should be able to test docker-based
@@ -489,11 +515,11 @@
 @check("debhelper", "debian packaging tools")
 def has_debhelper():
     dpkg = matchoutput('dpkg --version',
-                       "Debian `dpkg' package management program")
+                       br"Debian `dpkg' package management program")
     dh = matchoutput('dh --help',
-                     'dh is a part of debhelper.', ignorestatus=True)
+                     br'dh is a part of debhelper.', ignorestatus=True)
     dh_py2 = matchoutput('dh_python2 --help',
-                         'other supported Python versions')
+                         br'other supported Python versions')
     return dpkg and dh and dh_py2
 
 @check("absimport", "absolute_import in __future__")
@@ -502,6 +528,10 @@
     from mercurial import util
     return util.safehasattr(__future__, "absolute_import")
 
+@check("py27+", "running with Python 2.7+")
+def has_python27ornewer():
+    return sys.version_info[0:2] >= (2, 7)
+
 @check("py3k", "running with Python 3.x")
 def has_py3k():
     return 3 == sys.version_info[0]
--- a/tests/md5sum.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/md5sum.py	Mon Jul 18 23:28:14 2016 -0500
@@ -6,12 +6,17 @@
 # of the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2, which is
 # GPL-compatible.
 
-import sys, os
+from __future__ import absolute_import
+
+import os
+import sys
 
 try:
-    from hashlib import md5
+    import hashlib
+    md5 = hashlib.md5
 except ImportError:
-    from md5 import md5
+    import md5
+    md5 = md5.md5
 
 try:
     import msvcrt
--- a/tests/readlink.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/readlink.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,13 +1,17 @@
 #!/usr/bin/env python
 
-import errno, os, sys
+from __future__ import absolute_import, print_function
+
+import errno
+import os
+import sys
 
 for f in sys.argv[1:]:
     try:
-        print f, '->', os.readlink(f)
+        print(f, '->', os.readlink(f))
     except OSError as err:
         if err.errno != errno.EINVAL:
             raise
-        print f, 'not a symlink'
+        print(f, '->', f, 'not a symlink')
 
 sys.exit(0)
--- a/tests/run-tests.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/run-tests.py	Mon Jul 18 23:28:14 2016 -0500
@@ -43,31 +43,38 @@
 # completes fairly quickly, includes both shell and Python scripts, and
 # includes some scripts that run daemon processes.)
 
-from __future__ import print_function
+from __future__ import absolute_import, print_function
 
-from distutils import version
 import difflib
+import distutils.version as version
 import errno
 import json
 import optparse
 import os
+import random
+import re
 import shutil
-import subprocess
 import signal
 import socket
+import subprocess
 import sys
 import tempfile
+import threading
 import time
-import random
-import re
-import threading
-import killdaemons as killmod
+import unittest
+import xml.dom.minidom as minidom
+
 try:
     import Queue as queue
 except ImportError:
     import queue
-from xml.dom import minidom
-import unittest
+
+if os.environ.get('RTUNICODEPEDANTRY', False):
+    try:
+        reload(sys)
+        sys.setdefaultencoding("undefined")
+    except NameError:
+        pass
 
 osenvironb = getattr(os, 'environb', os.environ)
 processlock = threading.Lock()
@@ -207,7 +214,8 @@
     parser.add_option("-k", "--keywords",
         help="run tests matching keywords")
     parser.add_option("-l", "--local", action="store_true",
-        help="shortcut for --with-hg=<testdir>/../hg")
+        help="shortcut for --with-hg=<testdir>/../hg, "
+             "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
     parser.add_option("--loop", action="store_true",
         help="loop tests repeatedly")
     parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
@@ -301,11 +309,16 @@
             sys.stderr.write('warning: --with-hg should specify an hg script\n')
     if options.local:
         testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
-        hgbin = os.path.join(os.path.dirname(testdir), b'hg')
-        if os.name != 'nt' and not os.access(hgbin, os.X_OK):
-            parser.error('--local specified, but %r not found or not executable'
-                         % hgbin)
-        options.with_hg = hgbin
+        reporootdir = os.path.dirname(testdir)
+        pathandattrs = [(b'hg', 'with_hg')]
+        if options.chg:
+            pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
+        for relpath, attr in pathandattrs:
+            binpath = os.path.join(reporootdir, relpath)
+            if os.name != 'nt' and not os.access(binpath, os.X_OK):
+                parser.error('--local specified, but %r not found or '
+                             'not executable' % binpath)
+            setattr(options, attr, binpath)
 
     if (options.chg or options.with_chg) and os.name == 'nt':
         parser.error('chg does not work on %s' % os.name)
@@ -468,6 +481,7 @@
         pass
 
 def killdaemons(pidfile):
+    import killdaemons as killmod
     return killmod.killdaemons(pidfile, tryhard=False, remove=True,
                                logfn=vlog)
 
@@ -941,13 +955,18 @@
 
         return result
 
-# This script may want to drop globs from lines matching these patterns on
-# Windows, but check-code.py wants a glob on these lines unconditionally.  Don't
-# warn if that is the case for anything matching these lines.
+# Some glob patterns apply only in some circumstances, so the script
+# might want to remove (glob) annotations that otherwise should be
+# retained.
 checkcodeglobpats = [
+    # On Windows it looks like \ doesn't require a (glob), but we know
+    # better.
     re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
     re.compile(br'^moving \S+/.*[^)]$'),
-    re.compile(br'^pulling from \$TESTTMP/.*[^)]$')
+    re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
+    # Not all platforms have 127.0.0.1 as loopback (though most do),
+    # so we always glob that too.
+    re.compile(br'.*127.0.0.1.*$'),
 ]
 
 bchr = chr
@@ -1255,6 +1274,7 @@
                         return True
                 return b'-glob'
             return True
+        el = el.replace(b'127.0.0.1', b'*')
         i, n = 0, len(el)
         res = b''
         while i < n:
@@ -1836,7 +1856,8 @@
                                 tres = {'result': res}
 
                             outcome[tc.name] = tres
-                    jsonout = json.dumps(outcome, sort_keys=True, indent=4)
+                    jsonout = json.dumps(outcome, sort_keys=True, indent=4,
+                                         separators=(',', ': '))
                     fp.writelines(("testreport =", jsonout))
 
             self._runner._checkhglib('Tested')
@@ -2485,7 +2506,8 @@
 
     def _outputcoverage(self):
         """Produce code coverage output."""
-        from coverage import coverage
+        import coverage
+        coverage = coverage.coverage
 
         vlog('# Producing coverage report')
         # chdir is the easiest way to get short, relative paths in the
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/README	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,45 @@
+Generate a private key (priv.pem):
+
+  $ openssl genrsa -out priv.pem 2048
+
+Generate 2 self-signed certificates from this key (pub.pem, pub-other.pem):
+
+  $ openssl req -new -x509 -key priv.pem -nodes -sha256 -days 9000 \
+    -out pub.pem -batch -subj '/CN=localhost/emailAddress=hg@localhost/'
+  $ openssl req -new -x509 -key priv.pem -nodes -sha256 -days 9000 \
+    -out pub-other.pem -batch -subj '/CN=localhost/emailAddress=hg@localhost/'
+
+Now generate an expired certificate by turning back the system time:
+
+  $ faketime 2016-01-01T00:00:00Z \
+    openssl req -new -x509 -key priv.pem -nodes -sha256 -days 1 \
+    -out pub-expired.pem -batch -subj '/CN=localhost/emailAddress=hg@localhost/'
+
+Generate a certificate not yet active by advancing the system time:
+
+  $ faketime 2030-01-1T00:00:00Z \
+    openssl req -new -x509 -key priv.pem -nodes -sha256 -days 1 \
+    -out pub-not-yet.pem -batch -subj '/CN=localhost/emailAddress=hg@localhost/'
+
+Generate a passphrase protected client certificate private key:
+
+  $ openssl genrsa -aes256 -passout pass:1234 -out client-key.pem 2048
+
+Create a copy of the private key without a passphrase:
+
+  $ openssl rsa -in client-key.pem -passin pass:1234 -out client-key-decrypted.pem
+
+Create a CSR and sign the key using the server keypair:
+
+  $ printf '.\n.\n.\n.\n.\n.\nhg-client@localhost\n.\n.\n' | \
+    openssl req -new -key client-key.pem -passin pass:1234 -out client-csr.pem
+  $ openssl x509 -req -days 9000 -in client-csr.pem -CA pub.pem -CAkey priv.pem \
+    -set_serial 01 -out client-cert.pem
+
+When replacing the certificates, references to certificate fingerprints will
+need to be updated in test files.
+
+Fingerprints for certs can be obtained by running:
+
+  $ openssl x509 -in pub.pem -noout -sha1 -fingerprint
+  $ openssl x509 -in pub.pem -noout -sha256 -fingerprint
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-cert.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICyTCCAbECAQEwDQYJKoZIhvcNAQELBQAwMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwHhcNMTYwNzEzMDQ0NzIxWhcN
+NDEwMzA0MDQ0NzIxWjAkMSIwIAYJKoZIhvcNAQkBFhNoZy1jbGllbnRAbG9jYWxo
+b3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6upuVmEs1dTpBWRe
+4LLM1ARhnMQpI6jaQ8JKzQghMU/3T3n6Qkimt2HmxuiczvsawAbUPpBAxZbBnKmX
+bKMiXjtQaO4o4gnyNZVuBgkq2Grc2BREOf0vtUvnPumlnjyAcMNRm6iVbbOerPzV
+Dn1nH7Ljf9UKyGl/Qj6eOAgez/TDui2fo5FUfaqUzF8B7FoaRmsErZZU9pJ+etKX
+M2DlLGofYNbOi+K0RbPypKNzeInNUnvh9JXKntmLQHRwXDSvcGveKepfVlmz/qme
+DqhQSonIXTektdyZ5g9dOvxEjQSYHp+7exIKvrpXLfou3s9nCUTs6ekQLi1Tb4Pn
+gbhauwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQDVgUHJlu4quQCfeHPoemj+6Jp+
+M140lY7DGFyiGfHP7KcxXiJHagbUC5D1IPYARwhh7Rdssy0FsmWQKYl8LXKvstz4
+zCgz9gxb7vydkZLF49lP1I13Pekoz99381RrXUYomHbx6jKPiOha7ikfAUefop0n
+uwfeQ5f6mfr0AcXmu6W7PHYMcPTK0ZyzoZwobRktKZ+OiwjW/nyolbdXxwU+kRQs
+r0224+GBuwPWmXAobHgPhtClHXYa2ltL1qFFQJETJt0HjhH89jl5HWJl8g3rqccn
+AkyiRIGDAWJsiQTOK7iOy0JSbmT1ePrhAyUoZO8GPbBsOdSdBMM32Y3HAKQz
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-key-decrypted.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA6upuVmEs1dTpBWRe4LLM1ARhnMQpI6jaQ8JKzQghMU/3T3n6
+Qkimt2HmxuiczvsawAbUPpBAxZbBnKmXbKMiXjtQaO4o4gnyNZVuBgkq2Grc2BRE
+Of0vtUvnPumlnjyAcMNRm6iVbbOerPzVDn1nH7Ljf9UKyGl/Qj6eOAgez/TDui2f
+o5FUfaqUzF8B7FoaRmsErZZU9pJ+etKXM2DlLGofYNbOi+K0RbPypKNzeInNUnvh
+9JXKntmLQHRwXDSvcGveKepfVlmz/qmeDqhQSonIXTektdyZ5g9dOvxEjQSYHp+7
+exIKvrpXLfou3s9nCUTs6ekQLi1Tb4PngbhauwIDAQABAoIBABATjQuCSPQ1RuEk
+lk2gTt4vkpKM5hfXpWA/uqi/Zq4eP9mDinngyPAB1i5Emv6bNqBvlzTU4GnlQEi9
+XmyD2YVDX+RecBPQBHBgUpA9Ll5zKbvr3yNszUgF8sRebwQeNdgBteMGLXu9cB18
+jAQa1uTXdDQ6WyuN9LSO3nsNKzal8uucnZxdfFDIHx0MahPlrPfAkqzeKxxfyyRE
+jzia24oE+ewE8GHX/TvYnPybCPmBtRwbldA32vx8HbDCvlJanw3dyL98isBa5prr
+DsFaDltWzTKdJOIntdTJXRUDwYp7526bUEdGo/1FddbjW6Th8sXiJu91nL3BD/Qk
+mW102bECgYEA/zEtKgXjPeV9e3/vvAYU2Bsq8TkmhU6ZiZOQCdPWUNOsyfxibJBk
+XXsldtZ111vX/+fdGVPFJRoL1Qf4Xjf3MILVhAAcmfTpnWkdbveOrdCjbACE/ReQ
+xkExZdXhBd9YTS8IelL/Hv45FUo7UWWitgtvTG6caN3LaBTx1o2DiTkCgYEA66jS
+RQrsjRNT+cf7HBmKrKd7EknAH2v83ZyPd49BSBiNnmWaqPG2NxCLWpKks20xvRo2
+j8nftCsu9vSXv+KLnSb2CfOefvNoui7wQyiiWxrMBEPn8DS5E7ctqAiIhQsWEK+e
+n9E0PW/wyKI1Gk5U1nHwEJt196kYPzD8QgnwB5MCgYEAloVrHl5aqyex3CaaQU1U
+/iMIMUCEeBzkc0GWtDU/NG2mfX1gkKiaiokYj//vgheqUEdzIn1Gy5uRXxZUaT6Z
+jwOc7T8jn6vWIALgWZOrlNp7ijjEOISt4EKT4H1HPS9/5gbX+U77LEzHXsdqNZi9
+YKNeArc7ip9IWxv/iY3vCAECgYEAgMjOuGqC4Ynpf3x5T17p+PbB/DmPo9xY4ScZ
+BEamb2bzpddy0NbfNHJ3JXU0StXms6gqnyO8e/KJhO4gK/37PFO5a7DWMhyFZBIY
+vSrspwsa6U3O5+d4KT0W11hqewKW+WFwN3iVqumM1ahHiuu500OK5RiAPGsNVCNK
+CDD0Gr8CgYEAzwTt62ke3zCFOZ2E6EIR6eM+0Q9+B5zceU8DAZaTTxP4532HnT6/
+iHMQvUh0KBE4Rxw3MeSgpe7gKIcMKXubrcDIsEz8vhhOM1xsYIzVEWLtoCLPTaSF
+OWQsk98VDt3SgEjb25nOjJ24zZzUVk45OiGUoxfn1Bp6BbJH7IDihCk=
+-----END RSA PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/client-key.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,30 @@
+-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-256-CBC,ADE9D82AA8D8023CD4E9B67FECD9FE08
+
+tjMPfTx/dFicleUbh4pH4f5RUtgZwamcU/uy246wk+f2EBG7pVKEEmoXm8rWW2tW
+xlp9BjL6yCBxoB/GGPjFAoqjQmnUQMxy/P0OWqur3t0+GrB4Fw9hB82fxgnAaydF
+10fw+bRMCfxJMRfa2nEkLzL9za6TF0IOvAYYza/rCxgOQiLg/py9V29wjVnIW9Dt
+B/GxfblTv9K2JBEVdKNWIGT1ZGxem8qiXctbufIXDr+dEEoFUKh+wvkmwVhBaSXi
+gw6fAoATz0Lpd+9d0bqEC1wC3NFdxABYUjZMQ7+xtNzaSCdXiWgv4ix1kzoY8rIi
+mnaSH1VdO27fzA0aOgi6/FAYCT0H3bEQIPgcA47kpty8a27OCylHZGa+vnmBnEtv
+qZeO9kX3Dmoi7vzXL8vjf41ZY7eTU6kYWktdBw/gM65goGINPFx85gli3k5I7+TR
+DQ1shyAmmMU9rH+YamZ9Hs4SLfAe7xPI/7i/upMsz56c57/HlvUwHr0as+L7WDZP
+iX/oW2DQmwN/C5owMPttM7dg2PvSw/Blte5lvloLbmhQTzzw0MDkPHkGt+5Hhjcl
+NwoaVCzT4Kg3E7fcXrKr80vYP9fOQIbCT5qtZ2/cTNLk8XYmLJm8Q7e1XqvuY9sQ
+K7xQ5iLz0PjWDtgbculcb3tQIIUcf/Ss9nCakWr6r4pPIQjDVJh07L7ou76n2PVs
+zJh6cJBgTEUaRWTQgGVH9euyQU3pXHLR0nk5zN4uAOVWdR7eiiskYwT3pM6HiER8
+ZYTs+fJtQD9gJPhBAa3LX5L7kWADxGFdAH5qoTn1SSJY4RIVFVfRfxXmQuTGlRQB
+UEh5Q3bdYKeauw3E9kBaYMYu19223XsAyuvs7/nB02DV6dFjTCGLsrv3JEgf+Wx6
+biCfoOrR1Kt2ez8QR9/6TIbz36kc2Jo3m2jKqUrNx1/gLj+coklSET09IwRZ0voi
+7ype+4mHFEzwiSxmugLfdnU8d9PkzFzUiu3qSYeD2DR9hBgnZtgu0fFnSCmqFDXG
+H1yWy6X6Wiqx6abPVq1ODZgeTmsjJsMLDB6PUbQyESp9ICRJyPPCrMi6UpLrWMto
+A764n5w8B2g/GPJfz1sPePZYi6sumd9UqTQ8UhM644oOlxPWufiBeTiPm1W73PSZ
+6DmLyVEh+kcfID6xq3tWVAuiPO1jMpQGoLKXO7oxGvmTNY/Va++j22DpzNoj1hTJ
+cnFOQZARKrSooAnngwUP68tGVo/+fxzWG95t7IZy8BvszP09VT1jcHOfFIZqHa/V
+rI/JrWSK+tu75Ot63QQpm1x7xSctMZg71w7riVipA+8F1FBdmp+lhOQkEMytngIA
+jKovkuwo8AiQvYCDspEcGSroQmOh1d5TraRyhTuRdiefLVSh05kVGCd6/UsVqdZs
+j+HEyepn4/A9xpHRBTWfCwBFFktAgSdCUOLh5xsT2MbbT/0wDoneD/uay0NakWXB
+zuVsaasx0Yl2cqvXKVUMphmbqMa859BNVqEK3l3tYZdvHiwT8J1LnEEK4KiBa2zZ
++8FcFvD8x1NZBcCBArYP59MbCQOC2QBPJe/oCiUVhN8kRIwlwOhytbW+QIuLZHi4
+-----END RSA PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/priv.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA2Ugt7jQrD+u+JtIfXZpVepzOAufcX4CMoHV95qZXZml2juGp
+x3T7wjQPB/IPoOpRG9CoCaekKK+bIqQX1qNuiUa2CsqchNQcua2js5DTttmRYC+f
+wHaQc0UY1QKe/0r8NFX1XoeIWfuL+0UAERoI1zmhu9px5326C7PoyBPIubT0ejLV
+LfciFgyHDmqvYGu6cUBpNFrAi8csPNGcyie1Axh0wZ/9jvHdN+iGmaV9GZObGv0G
+ZpbWlJm8fG+mH1qMFYA6mnknJbEBBTnV0IWdGJalGnz+5GfCvhxzYcEWmLDeO/7F
+NrWMVT9L8Ky65cygCeJ4lEW1XB1w/6rQYjaSnwIDAQABAoIBAAwDAH8FpUfJCYcN
+4KwFByqzFnR0qusgqSWJuT8R/QztUZ+OfBtJrU1MIXSX/iMwMPGvtEpsWRfitVnR
+5nt4J3kxTokEMGjrbPca0Uzw+bNHDdFacKNsKookzL2h2nZUh+LAycLDDVekH1Xx
+t5I6dTiot/cxmVBp0+ontPuylEsnyrQio6eljBfPzxBdRp2lkiymKf3jvbGXRnZ4
+jSFTRuUlbnVbZ3CKnFPU+d5tvn2nEwU/DVbGpJNZAPl99Q0XUcNF3AtGlwGMvi0X
+azcIIOn+swLjn+U2S6i3K234ItYS5I+c9Xi+9DO4fuVko+CQ8PWXP2HdAze7DENc
+zADmd0kCgYEA7nN+qUFAmMOcRE8nSNLt7mcwq6fYQ1MVGikCIXn/PI/wfEqY0lws
+ZhwykBXog0S7PzYkR3LcDOqN0wDcdJ3K4c/a6Z6IqbXMgxaosYfHCCMtdhy0g0F2
+ek0SaY3WQhpFRIG19hvB+ZJSc7JQt+TaXeb8HM1452kmOLpfQGiqqTsCgYEA6UXZ
+bI7c2jO1X+rWF2tZfZdtdeVrIVcm8BunF7ETC4iK/iH2phRQQAh4TFZm6wkX57Tv
+LKDGxmohFlEK7FOtSCeSSVfkvZYRBuHOYcwBgBr1XzXXjHcMoyr0+LflZysht151
+9F0hJwdGQZrivZnv9clJ632RlgE4XlPGskQhRe0CgYEAxVGdhsIQilmUfpJhl8m0
+SovpoqKKO2wNElDNCpbBt4QFJVU1kR3lP7olvUXj2nyN1okfDGDn52hRZEJaK8ZH
+lQVDyf7+aDGgwvmFLyOEeB9kB1FJrzQErsAIdICCxMCogUA1KytdIQEMaeEtGn+u
+k/YIumztl9FTZ64SFGKIlvECgYEA25Kb7csrp1g0yWxKyRCK0+TNa8Pe6ysVw7zD
+s1FCFAEak8t0Vy+Xui4+zdwmU+XjUn7FAsTzVaBgNJlkJr88xEY7ND4/WRUAQfIa
+SYO1hdfaTxxnIBiPFKdCnzq5/DplKi0H6lQe+JWoU+hutPlJHZmysq8ncoMDhAZn
+aTUn/KECgYEAvxGaWt4Fn2tRrHeaG0qT+nMBxd8cTiFInOcYDeS/FlQo3DTDK2Ai
+qLBa4DinnGN2hSKwnN3R5R2VRxk4I6+ljG0yuNBhJBcAgAFpnHfkuY1maQJB+1xY
+A07WcM4J3yuPfjcDkipNFQa4Y8oJCaS2yiOPvlUfNQrCLAV+YqHZiiQ=
+-----END RSA PRIVATE KEY-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-expired.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDNTCCAh2gAwIBAgIJANRJCnkBtkkOMA0GCSqGSIb3DQEBCwUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTE2
+MDEwMTA4MDAzMFoXDTE2MDEwMjA4MDAzMFowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQDZSC3uNCsP674m0h9dmlV6nM4C59xfgIygdX3mpldmaXaO
+4anHdPvCNA8H8g+g6lEb0KgJp6Qor5sipBfWo26JRrYKypyE1By5raOzkNO22ZFg
+L5/AdpBzRRjVAp7/Svw0VfVeh4hZ+4v7RQARGgjXOaG72nHnfboLs+jIE8i5tPR6
+MtUt9yIWDIcOaq9ga7pxQGk0WsCLxyw80ZzKJ7UDGHTBn/2O8d036IaZpX0Zk5sa
+/QZmltaUmbx8b6YfWowVgDqaeSclsQEFOdXQhZ0YlqUafP7kZ8K+HHNhwRaYsN47
+/sU2tYxVP0vwrLrlzKAJ4niURbVcHXD/qtBiNpKfAgMBAAGjUDBOMB0GA1UdDgQW
+BBT6fA08JcG+SWBN9Y+p575xcFfIVjAfBgNVHSMEGDAWgBT6fA08JcG+SWBN9Y+p
+575xcFfIVjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBnY2r60iGg
+0BqR5vOj//XjS1FZKNG6+n3MKgxBY3pqFbqsCJfX5GfWD3GHJRXzv3p1MXIP3BWj
+zFutg+FE2QChQFwZjJu3E1VnIZN5ytYBltGHwaCEUdGq9sAZ9R2Jdf8xhQa5h+1U
+NZJvYbhCyecnUh2/Dkj2pFoF7wv7BtWFJV20WzHesN/Dik51cr6yFSn4nJb6YAMw
+t4/Vnf24v36WwnBoO5VqO+ntISTD6CS3EE5Gqv2ZMQtFaMoRfKIBaDIKHvbYeXdX
+2gDTKWnS5KJYWmsl6N2CPjrHJJphaFGSKFAivmT24Q+JSKcC9hww7gvnGcVmsFan
+H5xwzFQW2cna
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-not-yet.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDNTCCAh2gAwIBAgIJAJvD5nejIHr2MA0GCSqGSIb3DQEBCwUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTMw
+MDEwMTA4MDAwOFoXDTMwMDEwMjA4MDAwOFowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQDZSC3uNCsP674m0h9dmlV6nM4C59xfgIygdX3mpldmaXaO
+4anHdPvCNA8H8g+g6lEb0KgJp6Qor5sipBfWo26JRrYKypyE1By5raOzkNO22ZFg
+L5/AdpBzRRjVAp7/Svw0VfVeh4hZ+4v7RQARGgjXOaG72nHnfboLs+jIE8i5tPR6
+MtUt9yIWDIcOaq9ga7pxQGk0WsCLxyw80ZzKJ7UDGHTBn/2O8d036IaZpX0Zk5sa
+/QZmltaUmbx8b6YfWowVgDqaeSclsQEFOdXQhZ0YlqUafP7kZ8K+HHNhwRaYsN47
+/sU2tYxVP0vwrLrlzKAJ4niURbVcHXD/qtBiNpKfAgMBAAGjUDBOMB0GA1UdDgQW
+BBT6fA08JcG+SWBN9Y+p575xcFfIVjAfBgNVHSMEGDAWgBT6fA08JcG+SWBN9Y+p
+575xcFfIVjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQC0VDzAqPiL
+6U8yqaQqXdS6iK49yDQe9qzxzNnAZnj4YCsa5+qYSf+jl49Rak+pGw3AmN9gl6xq
+aaP5xAlS8F0lnfZ5NcXmmp4Lt25qdu9J9qIPEAL4/ucirDr/cphCbDtzaWsrfi9j
+YjVzSqoSEdnV1x9GkkLVwQRmA+D/2+95pgx6UNchqMbXuEQkAv9kVOzSG62OOAzO
+z2Wct6b+DFbfFI0xcvKeJRGogjkd5QrF1XxU7e5u17DAN7/nhahv43ol3eC/fUiH
+ITZpEc+/WdVtUwZQtoEQuBLB1Mc8QvYUUksUv9+KVjZ4o2oqApup7k7oMSPYNPTf
+2O99CXjOCl9k
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub-other.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDNTCCAh2gAwIBAgIJAMXBgtbkFDfwMA0GCSqGSIb3DQEBCwUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTE2
+MDcxMzA0MTcyOFoXDTQxMDMwNDA0MTcyOFowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQDZSC3uNCsP674m0h9dmlV6nM4C59xfgIygdX3mpldmaXaO
+4anHdPvCNA8H8g+g6lEb0KgJp6Qor5sipBfWo26JRrYKypyE1By5raOzkNO22ZFg
+L5/AdpBzRRjVAp7/Svw0VfVeh4hZ+4v7RQARGgjXOaG72nHnfboLs+jIE8i5tPR6
+MtUt9yIWDIcOaq9ga7pxQGk0WsCLxyw80ZzKJ7UDGHTBn/2O8d036IaZpX0Zk5sa
+/QZmltaUmbx8b6YfWowVgDqaeSclsQEFOdXQhZ0YlqUafP7kZ8K+HHNhwRaYsN47
+/sU2tYxVP0vwrLrlzKAJ4niURbVcHXD/qtBiNpKfAgMBAAGjUDBOMB0GA1UdDgQW
+BBT6fA08JcG+SWBN9Y+p575xcFfIVjAfBgNVHSMEGDAWgBT6fA08JcG+SWBN9Y+p
+575xcFfIVjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQDLxD+Q90Ue
+zrkmq964pzl+9zd0Y1ODSBnwaZfJxaoyFwRpYva1GYyz2CnJZEDjh8nUbo/jmEU1
+9D91YT8e3plgcpsuxp0YhCUJbTz56k2OOq/MyrX+KgrC2VAdGbhr/C3hNkGKBzdu
++8p+z3jBUkiQFRb8xc485v1zkOX1lPN3tSAEOcja/lslmHV1UQhEYI/Ne2z/i/rQ
+uVtC28dTHoPnJykIhXBwgxuAL3G3eFpCRemHOyTlzNDQQxkgMNAYenutWpYXjM2Z
+paplLANjV+X91wyAXZ1XZ+5m7yLA7463MwOPU3Ko+HcyKKjPO+wJwVJbEpXr3rPR
+getT2CfPFLMe
+-----END CERTIFICATE-----
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/sslcerts/pub.pem	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDNTCCAh2gAwIBAgIJAJ12yUL2zGhzMA0GCSqGSIb3DQEBCwUAMDExEjAQBgNV
+BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTE2
+MDcxMzA0MTcxMloXDTQxMDMwNDA0MTcxMlowMTESMBAGA1UEAwwJbG9jYWxob3N0
+MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQDZSC3uNCsP674m0h9dmlV6nM4C59xfgIygdX3mpldmaXaO
+4anHdPvCNA8H8g+g6lEb0KgJp6Qor5sipBfWo26JRrYKypyE1By5raOzkNO22ZFg
+L5/AdpBzRRjVAp7/Svw0VfVeh4hZ+4v7RQARGgjXOaG72nHnfboLs+jIE8i5tPR6
+MtUt9yIWDIcOaq9ga7pxQGk0WsCLxyw80ZzKJ7UDGHTBn/2O8d036IaZpX0Zk5sa
+/QZmltaUmbx8b6YfWowVgDqaeSclsQEFOdXQhZ0YlqUafP7kZ8K+HHNhwRaYsN47
+/sU2tYxVP0vwrLrlzKAJ4niURbVcHXD/qtBiNpKfAgMBAAGjUDBOMB0GA1UdDgQW
+BBT6fA08JcG+SWBN9Y+p575xcFfIVjAfBgNVHSMEGDAWgBT6fA08JcG+SWBN9Y+p
+575xcFfIVjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCzJhM/OBoS
+JXnjfLhZqi6hTmx1XC7MR05z4fWdyBhZx8PwSDEjxAj/omAm2RMEx/Fv1a7FO6hd
+ClYsxxSfWJO7NQ3V4YLn9AvNr5gcxuXV/4bTtEFNebuzhV06u5nH7pGbHbkxCI+u
+QekmRTvKIojr8F44cyszEk+MZQ5bFBElByjVzgXNvAaDP0ryUL5eQhLrkuwbNFLQ
+mFf7EaerMuM28x1knhiH/39s7t92CJgm9+D60TmJ4XXwue1gZ0v9MVS18iOuWyio
+BklppJsdtDLxHTHGNlBeHdam5VejbXRo7s0y5OfuATwlgcaCMYC/68hVJYwl/GZ7
+3YpdNpMshSaE
+-----END CERTIFICATE-----
--- a/tests/svn-safe-append.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/svn-safe-append.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,9 +1,12 @@
 #!/usr/bin/env python
 
+from __future__ import absolute_import
+
 __doc__ = """Same as `echo a >> b`, but ensures a changed mtime of b.
 Without this svn will not detect workspace changes."""
 
-import sys, os
+import os
+import sys
 
 text = sys.argv[1]
 fname = sys.argv[2]
--- a/tests/test-addremove-similar.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-addremove-similar.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,7 +1,7 @@
   $ hg init rep; cd rep
 
   $ touch empty-file
-  $ $PYTHON -c 'for x in range(10000): print x' > large-file
+  $ $PYTHON -c 'for x in range(10000): print(x)' > large-file
 
   $ hg addremove
   adding empty-file
@@ -10,7 +10,7 @@
   $ hg commit -m A
 
   $ rm large-file empty-file
-  $ $PYTHON -c 'for x in range(10,10000): print x' > another-file
+  $ $PYTHON -c 'for x in range(10,10000): print(x)' > another-file
 
   $ hg addremove -s50
   adding another-file
@@ -34,8 +34,8 @@
 
   $ hg init rep2; cd rep2
 
-  $ $PYTHON -c 'for x in range(10000): print x' > large-file
-  $ $PYTHON -c 'for x in range(50): print x' > tiny-file
+  $ $PYTHON -c 'for x in range(10000): print(x)' > large-file
+  $ $PYTHON -c 'for x in range(50): print(x)' > tiny-file
 
   $ hg addremove
   adding large-file
@@ -43,7 +43,7 @@
 
   $ hg commit -m A
 
-  $ $PYTHON -c 'for x in range(70): print x' > small-file
+  $ $PYTHON -c 'for x in range(70): print(x)' > small-file
   $ rm tiny-file
   $ rm large-file
 
--- a/tests/test-alias.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-alias.t	Mon Jul 18 23:28:14 2016 -0500
@@ -525,6 +525,24 @@
   (use "hg help" for the full list of commands or "hg -v" for details)
   [255]
 
+environment variable changes in alias commands
+
+  $ cat > $TESTTMP/setcount.py <<EOF
+  > import os
+  > def uisetup(ui):
+  >     os.environ['COUNT'] = '2'
+  > EOF
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [extensions]
+  > setcount = $TESTTMP/setcount.py
+  > [alias]
+  > showcount = log -T "$COUNT\n" -r .
+  > EOF
+
+  $ COUNT=1 hg showcount
+  2
+
 This should show id:
 
   $ hg --config alias.log='id' log
--- a/tests/test-archive.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-archive.t	Mon Jul 18 23:28:14 2016 -0500
@@ -69,7 +69,12 @@
   $ TIP=`hg id -v | cut -f1 -d' '`
   $ QTIP=`hg id -q`
   $ cat > getarchive.py <<EOF
-  > import os, sys, urllib2
+  > from __future__ import absolute_import
+  > import os
+  > import sys
+  > from mercurial import (
+  >     util,
+  > )
   > try:
   >     # Set stdout to binary mode for win32 platforms
   >     import msvcrt
@@ -83,10 +88,14 @@
   >     node, archive, file = sys.argv[1:]
   >     requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
   > try:
-  >     f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
+  >     stdout = sys.stdout.buffer
+  > except AttributeError:
+  >     stdout = sys.stdout
+  > try:
+  >     f = util.urlreq.urlopen('http://127.0.0.1:%s/?%s'
   >                     % (os.environ['HGPORT'], requeststr))
-  >     sys.stdout.write(f.read())
-  > except urllib2.HTTPError, e:
+  >     stdout.write(f.read())
+  > except util.urlerr.httperror as e:
   >     sys.stderr.write(str(e) + '\n')
   > EOF
   $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
@@ -108,13 +117,13 @@
   $ python getarchive.py "$TIP" zip > archive.zip
   $ unzip -t archive.zip
   Archive:  archive.zip
-      testing: test-archive-1701ef1f1510/.hg_archival.txt   OK
-      testing: test-archive-1701ef1f1510/.hgsub   OK
-      testing: test-archive-1701ef1f1510/.hgsubstate   OK
-      testing: test-archive-1701ef1f1510/bar   OK
-      testing: test-archive-1701ef1f1510/baz/bletch   OK
-      testing: test-archive-1701ef1f1510/foo   OK
-      testing: test-archive-1701ef1f1510/subrepo/sub   OK
+      testing: test-archive-1701ef1f1510/.hg_archival.txt*OK (glob)
+      testing: test-archive-1701ef1f1510/.hgsub*OK (glob)
+      testing: test-archive-1701ef1f1510/.hgsubstate*OK (glob)
+      testing: test-archive-1701ef1f1510/bar*OK (glob)
+      testing: test-archive-1701ef1f1510/baz/bletch*OK (glob)
+      testing: test-archive-1701ef1f1510/foo*OK (glob)
+      testing: test-archive-1701ef1f1510/subrepo/sub*OK (glob)
   No errors detected in compressed data of archive.zip.
 
 test that we can download single directories and files
@@ -195,15 +204,16 @@
   > done
 
   $ cat > md5comp.py <<EOF
+  > from __future__ import print_function
   > try:
   >     from hashlib import md5
   > except ImportError:
   >     from md5 import md5
   > import sys
   > f1, f2 = sys.argv[1:3]
-  > h1 = md5(file(f1, 'rb').read()).hexdigest()
-  > h2 = md5(file(f2, 'rb').read()).hexdigest()
-  > print h1 == h2 or "md5 differ: " + repr((h1, h2))
+  > h1 = md5(open(f1, 'rb').read()).hexdigest()
+  > h2 = md5(open(f2, 'rb').read()).hexdigest()
+  > print(h1 == h2 or "md5 differ: " + repr((h1, h2)))
   > EOF
 
 archive name is stored in the archive, so create similar archives and
@@ -225,9 +235,9 @@
   $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
   $ unzip -t test.zip
   Archive:  test.zip
-      testing: test/bar                 OK
-      testing: test/baz/bletch          OK
-      testing: test/foo                 OK
+      testing: test/bar*OK (glob)
+      testing: test/baz/bletch*OK (glob)
+      testing: test/foo*OK (glob)
   No errors detected in compressed data of test.zip.
 
   $ hg archive -t tar - | tar tf - 2>/dev/null
@@ -343,8 +353,9 @@
   $ hg -R repo add repo/a
   $ hg -R repo commit -m '#0' -d '456789012 21600'
   $ cat > show_mtime.py <<EOF
+  > from __future__ import print_function
   > import sys, os
-  > print int(os.stat(sys.argv[1]).st_mtime)
+  > print(int(os.stat(sys.argv[1]).st_mtime))
   > EOF
 
   $ hg -R repo archive --prefix tar-extracted archive.tar
--- a/tests/test-atomictempfile.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-atomictempfile.py	Mon Jul 18 23:28:14 2016 -0500
@@ -1,42 +1,119 @@
+from __future__ import absolute_import
+
+import glob
 import os
-import glob
+import shutil
+import tempfile
 import unittest
-import silenttestrunner
 
-from mercurial.util import atomictempfile
+from mercurial import (
+    util,
+)
+atomictempfile = util.atomictempfile
 
 class testatomictempfile(unittest.TestCase):
-    def test1_simple(self):
-        if os.path.exists('foo'):
-            os.remove('foo')
-        file = atomictempfile('foo')
-        (dir, basename) = os.path.split(file._tempname)
-        self.assertFalse(os.path.isfile('foo'))
-        self.assertTrue(basename in glob.glob('.foo-*'))
+    def setUp(self):
+        self._testdir = tempfile.mkdtemp('atomictempfiletest')
+        self._filename = os.path.join(self._testdir, 'testfilename')
+
+    def tearDown(self):
+        shutil.rmtree(self._testdir, True)
 
-        file.write('argh\n')
+    def testsimple(self):
+        file = atomictempfile(self._filename)
+        self.assertFalse(os.path.isfile(self._filename))
+        tempfilename = file._tempname
+        self.assertTrue(tempfilename in glob.glob(
+            os.path.join(self._testdir, '.testfilename-*')))
+
+        file.write(b'argh\n')
         file.close()
 
-        self.assertTrue(os.path.isfile('foo'))
-        self.assertTrue(basename not in glob.glob('.foo-*'))
+        self.assertTrue(os.path.isfile(self._filename))
+        self.assertTrue(tempfilename not in glob.glob(
+            os.path.join(self._testdir, '.testfilename-*')))
 
     # discard() removes the temp file without making the write permanent
-    def test2_discard(self):
-        if os.path.exists('foo'):
-            os.remove('foo')
-        file = atomictempfile('foo')
+    def testdiscard(self):
+        file = atomictempfile(self._filename)
         (dir, basename) = os.path.split(file._tempname)
 
-        file.write('yo\n')
+        file.write(b'yo\n')
         file.discard()
 
-        self.assertFalse(os.path.isfile('foo'))
+        self.assertFalse(os.path.isfile(self._filename))
         self.assertTrue(basename not in os.listdir('.'))
 
     # if a programmer screws up and passes bad args to atomictempfile, they
     # get a plain ordinary TypeError, not infinite recursion
-    def test3_oops(self):
+    def testoops(self):
         self.assertRaises(TypeError, atomictempfile)
 
+    # checkambig=True avoids ambiguity of timestamp
+    def testcheckambig(self):
+        def atomicwrite(checkambig):
+            f = atomictempfile(self._filename, checkambig=checkambig)
+            f.write('FOO')
+            f.close()
+
+        # try some times, because reproduction of ambiguity depends on
+        # "filesystem time"
+        for i in xrange(5):
+            atomicwrite(False)
+            oldstat = os.stat(self._filename)
+            if oldstat.st_ctime != oldstat.st_mtime:
+                # subsequent changing never causes ambiguity
+                continue
+
+            repetition = 3
+
+            # repeat atomic write with checkambig=True, to examine
+            # whether st_mtime is advanced multiple times as expecetd
+            for j in xrange(repetition):
+                atomicwrite(True)
+            newstat = os.stat(self._filename)
+            if oldstat.st_ctime != newstat.st_ctime:
+                # timestamp ambiguity was naturally avoided while repetition
+                continue
+
+            # st_mtime should be advanced "repetition" times, because
+            # all atomicwrite() occured at same time (in sec)
+            self.assertTrue(newstat.st_mtime ==
+                            ((oldstat.st_mtime + repetition) & 0x7fffffff))
+            # no more examination is needed, if assumption above is true
+            break
+        else:
+            # This platform seems too slow to examine anti-ambiguity
+            # of file timestamp (or test happened to be executed at
+            # bad timing). Exit silently in this case, because running
+            # on other faster platforms can detect problems
+            pass
+
+    def testread(self):
+        with open(self._filename, 'wb') as f:
+            f.write(b'foobar\n')
+        file = atomictempfile(self._filename, mode='rb')
+        self.assertTrue(file.read(), b'foobar\n')
+        file.discard()
+
+    def testcontextmanagersuccess(self):
+        """When the context closes, the file is closed"""
+        with atomictempfile('foo') as f:
+            self.assertFalse(os.path.isfile('foo'))
+            f.write(b'argh\n')
+        self.assertTrue(os.path.isfile('foo'))
+
+    def testcontextmanagerfailure(self):
+        """On exception, the file is discarded"""
+        try:
+            with atomictempfile('foo') as f:
+                self.assertFalse(os.path.isfile('foo'))
+                f.write(b'argh\n')
+                raise ValueError
+        except ValueError:
+            pass
+        self.assertFalse(os.path.isfile('foo'))
+
 if __name__ == '__main__':
+    import silenttestrunner
     silenttestrunner.main(__name__)
--- a/tests/test-bad-pull.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bad-pull.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,14 +1,8 @@
 #require serve killdaemons
 
-#if windows
   $ hg clone http://localhost:$HGPORT/ copy
   abort: * (glob)
   [255]
-#else
-  $ hg clone http://localhost:$HGPORT/ copy
-  abort: error: Connection refused
-  [255]
-#endif
 
   $ test -d copy
   [1]
--- a/tests/test-bookmarks-pushpull.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bookmarks-pushpull.t	Mon Jul 18 23:28:14 2016 -0500
@@ -113,6 +113,19 @@
   exporting bookmark V
   [1]
 
+exporting the active bookmark with 'push -B .'
+demand that one of the bookmarks is activated
+
+  $ hg update -r default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (leaving bookmark V)
+  $ hg push -B . ../a
+  abort: no active bookmark
+  [255]
+  $ hg update -r V
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (activating bookmark V)
+
 delete the bookmark
 
   $ hg book -d V
@@ -239,7 +252,10 @@
 
 explicit pull should overwrite the local version (issue4439)
 
-  $ hg pull --config paths.foo=../a foo -B X
+  $ hg update -r X
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (activating bookmark X)
+  $ hg pull --config paths.foo=../a foo -B .
   pulling from $TESTTMP/a (glob)
   no changes found
   divergent bookmark @ stored as @foo
@@ -353,7 +369,10 @@
      X                         1:0d2164f0ce0d
    * Y                         5:35d1ef0a8d1b
      Z                         1:0d2164f0ce0d
-  $ hg pull -B Y
+  $ hg update -r Y
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (activating bookmark Y)
+  $ hg pull -B .
   pulling from http://localhost:$HGPORT/
   searching for changes
   adding changesets
@@ -363,9 +382,9 @@
   updating bookmark Y
   (run 'hg update' to get a working copy)
   $ hg book
-   * @                         1:0d2164f0ce0d
+     @                         1:0d2164f0ce0d
      X                         1:0d2164f0ce0d
-     Y                         5:35d1ef0a8d1b
+   * Y                         5:35d1ef0a8d1b
      Z                         1:0d2164f0ce0d
 
 (done with this section of the test)
--- a/tests/test-branches.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-branches.t	Mon Jul 18 23:28:14 2016 -0500
@@ -650,4 +650,21 @@
   0050: bf be 84 1b 00 00 00 00 d3 f1 63 45 80 00 00 00 |..........cE....|
   0060: e3 d4 9c 05 80 00 00 00 e2 3b 55 05 00 00 00 00 |.........;U.....|
 
+Test that cache files are created and grows correctly:
+
+  $ rm .hg/cache/rbc*
+  $ hg log -r "5 & branch(5)" -T "{rev}\n"
+  5
+  $ f --size --hexdump .hg/cache/rbc-*
+  .hg/cache/rbc-names-v1: size=1
+  0000: 61                                              |a|
+  .hg/cache/rbc-revs-v1: size=112
+  0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0010: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0020: 00 00 00 00 00 00 00 00 d8 cb c6 1d 00 00 00 00 |................|
+  0030: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0040: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+  0060: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
+
   $ cd ..
--- a/tests/test-bugzilla.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bugzilla.t	Mon Jul 18 23:28:14 2016 -0500
@@ -54,7 +54,7 @@
   $ cat bzmock.log && rm bzmock.log
   update bugid=123, newstate={}, committer='test'
   ----
-  changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123.
+  changeset 7875a8342c6f in repo $TESTTMP/mockremote refers to bug 123. (glob)
   details:
   	Fixes bug 123
   ----
--- a/tests/test-bundle-type.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bundle-type.t	Mon Jul 18 23:28:14 2016 -0500
@@ -52,7 +52,7 @@
   1 changesets found
   HG20\x00\x00 (esc)
   Stream params: {}
-  changegroup -- "{'version': '02'}"
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
   none-v2
   
@@ -60,8 +60,8 @@
   searching for changes
   1 changesets found
   HG20\x00\x00 (esc)
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
   bzip2-v2
   
@@ -69,8 +69,8 @@
   searching for changes
   1 changesets found
   HG20\x00\x00 (esc)
-  Stream params: {'Compression': 'GZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'GZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
   gzip-v2
   
@@ -79,7 +79,7 @@
   1 changesets found
   HG20\x00\x00 (esc)
   Stream params: {}
-  changegroup -- "{'version': '02'}"
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
   none-v2
   
@@ -87,8 +87,8 @@
   searching for changes
   1 changesets found
   HG20\x00\x00 (esc)
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
   bzip2-v2
   
--- a/tests/test-bundle.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bundle.t	Mon Jul 18 23:28:14 2016 -0500
@@ -708,7 +708,7 @@
   1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
   057f4db07f61970e1c11e83be79e9d08adc4dc31
   bundle2-output-bundle: "HG20", (1 params) 1 parts total
-  bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
+  bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
   bundling: 1/2 changesets (50.00%)
   bundling: 2/2 changesets (100.00%)
   bundling: 1/2 manifests (50.00%)
--- a/tests/test-bundle2-format.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-bundle2-format.t	Mon Jul 18 23:28:14 2016 -0500
@@ -990,7 +990,7 @@
 
   $ hg debugbundle ../rev.hg2
   Stream params: {}
-  changegroup -- '{}'
+  changegroup -- 'sortdict()'
       32af7686d403cf45b5d95f2d70cebea587ac806a
       9520eea781bcca16c1e15acc0ba14335a0e8e5ba
       eea13746799a9e0bfd88f29d3c2e9dc9389f524f
@@ -1118,8 +1118,8 @@
   0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
   0370: 3b 19 fd af c5 3f f1 60 c3 17                   |;....?.`..|
   $ hg debugbundle ../rev.hg2.bz
-  Stream params: {'Compression': 'GZ'}
-  changegroup -- '{}'
+  Stream params: sortdict([('Compression', 'GZ')])
+  changegroup -- 'sortdict()'
       32af7686d403cf45b5d95f2d70cebea587ac806a
       9520eea781bcca16c1e15acc0ba14335a0e8e5ba
       eea13746799a9e0bfd88f29d3c2e9dc9389f524f
@@ -1205,8 +1205,8 @@
   0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
   0430: 2e e4 8a 70 a1 21 46 96 30 7a                   |...p.!F.0z|
   $ hg debugbundle ../rev.hg2.bz
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- '{}'
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- 'sortdict()'
       32af7686d403cf45b5d95f2d70cebea587ac806a
       9520eea781bcca16c1e15acc0ba14335a0e8e5ba
       eea13746799a9e0bfd88f29d3c2e9dc9389f524f
--- a/tests/test-check-code.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-code.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ check_code="$TESTDIR"/../contrib/check-code.py
   $ cd "$TESTDIR"/..
 
@@ -16,4 +17,3 @@
   Skipping i18n/polib.py it has no-che?k-code (glob)
   Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob)
   Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob)
-  Skipping mercurial/httpclient/socketutil.py it has no-che?k-code (glob)
--- a/tests/test-check-commit.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-commit.t	Mon Jul 18 23:28:14 2016 -0500
@@ -2,10 +2,7 @@
 
 Enable obsolescence to avoid the warning issue when obsmarker are found
 
-  $ cat >> $HGRCPATH << EOF
-  > [experimental]
-  > evolution=createmarkers
-  > EOF
+  $ . "$TESTDIR/helpers-testrepo.sh"
 
 Go back in the hg repo
 
--- a/tests/test-check-config.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-config.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "$TESTDIR"/..
 
 New errors are not allowed. Warnings are strongly discouraged.
--- a/tests/test-check-execute.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-execute.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo execbit
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 look for python scripts without the execute bit
--- a/tests/test-check-module-imports.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-module-imports.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,6 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ import_checker="$TESTDIR"/../contrib/import-checker.py
 
 Run the doctests from the import checker, and make sure
@@ -11,6 +12,7 @@
 Run additional tests for the import checker
 
   $ mkdir testpackage
+  $ touch testpackage/__init__.py
 
   $ cat > testpackage/multiple.py << EOF
   > from __future__ import absolute_import
@@ -113,7 +115,16 @@
   > from testpackage.unsorted import foo
   > EOF
 
-  $ python "$import_checker" testpackage/*.py testpackage/subpackage/*.py
+  $ mkdir testpackage2
+  $ touch testpackage2/__init__.py
+
+  $ cat > testpackage2/latesymbolimport.py << EOF
+  > from __future__ import absolute_import
+  > from testpackage import unsorted
+  > from mercurial.node import hex
+  > EOF
+
+  $ python "$import_checker" testpackage*/*.py testpackage/subpackage/*.py
   testpackage/importalias.py:2: ui module must be "as" aliased to uimod
   testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod
   testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted
@@ -131,6 +142,7 @@
   testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority
   testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted
   testpackage/unsorted.py:3: imports not lexically sorted: os < sys
+  testpackage2/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node
   [1]
 
   $ cd "$TESTDIR"/..
@@ -144,8 +156,13 @@
 Known-bad files are excluded by -X as some of them would produce unstable
 outputs, which should be fixed later.
 
-  $ hg locate 'mercurial/**.py' 'hgext/**.py' 'tests/**.py' \
+  $ hg locate 'set:**.py or grep(r"^#!.*?python")' \
   > 'tests/**.t' \
+  > -X contrib/debugshell.py \
+  > -X contrib/win32/hgwebdir_wsgi.py \
+  > -X doc/gendoc.py \
+  > -X doc/hgmanpage.py \
+  > -X i18n/posplit \
   > -X tests/test-hgweb-auth.py \
   > -X tests/hypothesishelpers.py \
   > -X tests/test-ctxmanager.py \
@@ -162,5 +179,3 @@
   > -X tests/test-hgweb-no-request-uri.t \
   > -X tests/test-hgweb-non-interactive.t \
   > | sed 's-\\-/-g' | python "$import_checker" -
-  Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore
-  [1]
--- a/tests/test-check-py3-compat.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-py3-compat.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,176 +1,175 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "$TESTDIR"/..
 
   $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
-  hgext/fetch.py not using absolute_import
   hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
   hgext/fsmonitor/pywatchman/__init__.py requires print_function
   hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
   hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
-  hgext/gpg.py not using absolute_import
-  hgext/graphlog.py not using absolute_import
-  hgext/hgcia.py not using absolute_import
-  hgext/hgk.py not using absolute_import
-  hgext/highlight/__init__.py not using absolute_import
-  hgext/highlight/highlight.py not using absolute_import
-  hgext/histedit.py not using absolute_import
-  hgext/largefiles/__init__.py not using absolute_import
-  hgext/largefiles/basestore.py not using absolute_import
-  hgext/largefiles/lfcommands.py not using absolute_import
-  hgext/largefiles/lfutil.py not using absolute_import
-  hgext/largefiles/localstore.py not using absolute_import
-  hgext/largefiles/overrides.py not using absolute_import
-  hgext/largefiles/proto.py not using absolute_import
-  hgext/largefiles/remotestore.py not using absolute_import
-  hgext/largefiles/reposetup.py not using absolute_import
-  hgext/largefiles/uisetup.py not using absolute_import
-  hgext/largefiles/wirestore.py not using absolute_import
-  hgext/mq.py not using absolute_import
-  hgext/rebase.py not using absolute_import
-  hgext/share.py not using absolute_import
-  hgext/win32text.py not using absolute_import
   i18n/check-translation.py not using absolute_import
-  i18n/polib.py not using absolute_import
   setup.py not using absolute_import
-  tests/heredoctest.py requires print_function
-  tests/md5sum.py not using absolute_import
-  tests/readlink.py not using absolute_import
-  tests/readlink.py requires print_function
-  tests/run-tests.py not using absolute_import
-  tests/svn-safe-append.py not using absolute_import
-  tests/test-atomictempfile.py not using absolute_import
   tests/test-demandimport.py not using absolute_import
 
 #if py3exe
   $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
-  contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob)
   doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
-  hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
-  hgext/blackbox.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/bugzilla.py: error importing module: <ImportError> No module named 'urlparse' (line *) (glob)
-  hgext/censor.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
-  hgext/children.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/churn.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/clonebundles.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
+  hgext/acl.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/automv.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/blackbox.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/bugzilla.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/censor.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/chgserver.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/children.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/churn.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/clonebundles.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
   hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
-  hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
-  hgext/convert/convcmd.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  hgext/convert/cvs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
-  hgext/convert/darcs.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/git.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/gnuarch.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/hg.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/convert/monotone.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/p*.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
-  hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
+  hgext/convert/bzr.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/common.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/convcmd.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/cvs.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/cvsps.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/darcs.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/filemap.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/git.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/gnuarch.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/hg.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/monotone.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/p4.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/convert/subversion.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
   hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
-  hgext/eol.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/extdiff.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
-  hgext/factotum.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob)
-  hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
-  hgext/fsmonitor/watchmanclient.py: error importing module: <SystemError> Parent module 'hgext.fsmonitor' not loaded, cannot perform relative import (line *) (glob)
-  hgext/gpg.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
-  hgext/graphlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/hgcia.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/hgk.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  hgext/keyword.py: error importing: <ImportError> No module named 'BaseHTTPServer' (error at common.py:*) (glob)
-  hgext/largefiles/basestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  hgext/largefiles/lfcommands.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  hgext/largefiles/lfutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/largefiles/localstore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
-  hgext/largefiles/overrides.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  hgext/largefiles/proto.py: error importing: <ImportError> No module named 'httplib' (error at httppeer.py:*) (glob)
-  hgext/largefiles/remotestore.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
-  hgext/largefiles/reposetup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
-  hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
-  hgext/mq.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
-  hgext/notify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/pager.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/patchbomb.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/purge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/rebase.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  hgext/record.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/relink.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/schemes.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/share.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  hgext/strip.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
+  hgext/eol.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/extdiff.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/factotum.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/fetch.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/fsmonitor/state.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/fsmonitor/watchmanclient.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/gpg.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/graphlog.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/hgk.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/highlight/highlight.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/histedit.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/journal.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/keyword.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/basestore.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/lfcommands.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/lfutil.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/localstore.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/overrides.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/proto.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/remotestore.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/reposetup.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/storefactory.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/uisetup.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/largefiles/wirestore.py: error importing module: <SystemError> Parent module 'hgext.largefiles' not loaded, cannot perform relative import (line *) (glob)
+  hgext/mq.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/notify.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/pager.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/patchbomb.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/purge.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/rebase.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/record.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/relink.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/schemes.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/share.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/shelve.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/strip.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/transplant.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/win32mbcs.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  hgext/win32text.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
   mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
-  mercurial/branchmap.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
-  mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  mercurial/changegroup.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/changelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/cmdutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
+  mercurial/bookmarks.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/branchmap.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/bundle2.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
+  mercurial/bundlerepo.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/byterange.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/changegroup.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/changelog.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/cmdutil.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
   mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
-  mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
-  mercurial/context.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/copies.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/crecord.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/dirstate.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/discovery.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/dispatch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/exchange.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  mercurial/extensions.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/filelog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/filemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/fileset.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
-  mercurial/graphmod.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/help.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/hg.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
-  mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
+  mercurial/commandserver.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/config.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/context.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/copies.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/crecord.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/dagparser.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/dagutil.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/destutil.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/dirstate.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/discovery.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/dispatch.py: error importing: <TypeError> str expected, not bytes (error at encoding.py:*) (glob)
+  mercurial/exchange.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/extensions.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/fancyopts.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/filelog.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/filemerge.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/fileset.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/formatter.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/graphmod.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/hbisect.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/help.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/hg.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/hgweb/common.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/protocol.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
-  mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
+  mercurial/hgweb/server.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
   mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
-  mercurial/hook.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
-  mercurial/httpconnection.py: error importing: <ImportError> No module named 'httplib' (error at __init__.py:*) (glob)
-  mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
-  mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
-  mercurial/localrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/mail.py: error importing module: <AttributeError> module 'email' has no attribute 'Header' (line *) (glob)
-  mercurial/manifest.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/merge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/namespaces.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/patch.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/pure/mpatch.py: error importing module: <ImportError> cannot import name 'pycompat' (line *) (glob)
-  mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'mercurial.pure.node' (line *) (glob)
-  mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  mercurial/revlog.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/revset.py: error importing module: <AttributeError> 'dict' object has no attribute 'iteritems' (line *) (glob)
-  mercurial/scmutil.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
+  mercurial/hook.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/httpconnection.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/httppeer.py: error importing: <TypeError> str expected, not bytes (error at i18n.py:*) (glob)
+  mercurial/keepalive.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/localrepo.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/lock.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/mail.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/manifest.py: error importing: <TypeError> getattr(): attribute name must be string (error at pycompat.py:*) (glob)
+  mercurial/match.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/mdiff.py: error importing: <TypeError> getattr(): attribute name must be string (error at pycompat.py:*) (glob)
+  mercurial/merge.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/minirst.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/namespaces.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/obsolete.py: error importing: <TypeError> getattr(): attribute name must be string (error at pycompat.py:*) (glob)
+  mercurial/patch.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/pathutil.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/peer.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/pure/mpatch.py: error importing module: <AttributeError> 'VendorImporter' object has no attribute 'find_spec' (line *) (glob)
+  mercurial/pure/parsers.py: error importing module: <AttributeError> 'VendorImporter' object has no attribute 'find_spec' (line *) (glob)
+  mercurial/pushkey.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/pvec.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/registrar.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle2.py, line *) (line *) (glob)
+  mercurial/repoview.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/revlog.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/revset.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/scmposix.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/scmutil.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
   mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
-  mercurial/simplemerge.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/sshpeer.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at wireproto.py:*) (glob)
-  mercurial/sshserver.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/statichttprepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/store.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/streamclone.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/subrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/templatefilters.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/templatekw.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/templater.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/ui.py: error importing: <ImportError> No module named 'cPickle' (error at formatter.py:*) (glob)
-  mercurial/unionrepo.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/url.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
-  mercurial/verify.py: error importing: <AttributeError> 'dict' object has no attribute 'iteritems' (error at revset.py:*) (glob)
-  mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
+  mercurial/similar.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/simplemerge.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/sshpeer.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/sshserver.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/sslutil.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/statichttprepo.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/store.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/streamclone.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/subrepo.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/tagmerge.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/tags.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/templatefilters.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/templatekw.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/templater.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/transaction.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/ui.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/unionrepo.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/url.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/util.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/verify.py: error importing: <TypeError> '_fields_' must be a sequence of (name, C type) pairs (error at osutil.py:*) (glob)
+  mercurial/win32.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
   mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
-  mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
-  tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
+  mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle2.py, line *) (line *) (glob)
 
 #endif
--- a/tests/test-check-pyflakes.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-pyflakes.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,11 +1,14 @@
-#require test-repo pyflakes
+#require test-repo pyflakes hg10
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 run pyflakes on all tracked files ending in .py or without a file ending
 (skipping binary file random-seed)
 
-  $ hg locate 'set:**.py or grep("^!#.*python")' 2>/dev/null \
+  $ hg locate 'set:**.py or grep("^#!.*python")' \
+  > -X mercurial/pycompat.py \
+  > 2>/dev/null \
   > | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py"
   tests/filterpyflakes.py:61: undefined name 'undefinedname'
   
--- a/tests/test-check-shbang.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-check-shbang.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,13 +1,14 @@
 #require test-repo
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ cd "`dirname "$TESTDIR"`"
 
 look for python scripts that do not use /usr/bin/env
 
-  $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bin/env python")'
+  $ hg files 'set:grep(r"^#!.*?python") and not grep(r"^#!/usr/bi{1}n/env python")'
   [1]
 
 look for shell scripts that do not use /bin/sh
 
-  $ hg files 'set:grep(r"^#!.*/bin/sh") and not grep(r"^#!/bin/sh")'
+  $ hg files 'set:grep(r"^#!.*/bi{1}n/sh") and not grep(r"^#!/bi{1}n/sh")'
   [1]
--- a/tests/test-chg.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-chg.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,12 +1,86 @@
+#require chg
+
+  $ cp $HGRCPATH $HGRCPATH.orig
+
 init repo
 
-  $ hg init foo
+  $ chg init foo
   $ cd foo
 
 ill-formed config
 
-  $ hg status
+  $ chg status
   $ echo '=brokenconfig' >> $HGRCPATH
-  $ hg status
+  $ chg status
   hg: parse error at * (glob)
   [255]
+
+  $ cp $HGRCPATH.orig $HGRCPATH
+  $ cd ..
+
+server lifecycle
+----------------
+
+chg server should be restarted on code change, and old server will shut down
+automatically. In this test, we use the following time parameters:
+
+ - "sleep 1" to make mtime different
+ - "sleep 2" to notice mtime change (polling interval is 1 sec)
+
+set up repository with an extension:
+
+  $ chg init extreload
+  $ cd extreload
+  $ touch dummyext.py
+  $ cat <<EOF >> .hg/hgrc
+  > [extensions]
+  > dummyext = dummyext.py
+  > EOF
+
+isolate socket directory for stable result:
+
+  $ OLDCHGSOCKNAME=$CHGSOCKNAME
+  $ mkdir chgsock
+  $ CHGSOCKNAME=`pwd`/chgsock/server
+
+warm up server:
+
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+new server should be started if extension modified:
+
+  $ sleep 1
+  $ touch dummyext.py
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
+  chg: debug: instruction: reconnect
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+old server will shut down, while new server should still be reachable:
+
+  $ sleep 2
+  $ CHGDEBUG= chg log 2>&1 | (egrep 'instruction|start' || true)
+
+socket file should never be unlinked by old server:
+(simulates unowned socket by updating mtime, which makes sure server exits
+at polling cycle)
+
+  $ ls chgsock/server-*
+  chgsock/server-* (glob)
+  $ touch chgsock/server-*
+  $ sleep 2
+  $ ls chgsock/server-*
+  chgsock/server-* (glob)
+
+since no server is reachable from socket file, new server should be started:
+(this test makes sure that old server shut down automatically)
+
+  $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
+  chg: debug: start cmdserver at $TESTTMP/extreload/chgsock/server
+
+shut down servers and restore environment:
+
+  $ rm -R chgsock
+  $ CHGSOCKNAME=$OLDCHGSOCKNAME
+  $ cd ..
--- a/tests/test-clonebundles.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-clonebundles.t	Mon Jul 18 23:28:14 2016 -0500
@@ -156,33 +156,34 @@
 by old clients.
 
   $ f --size --hexdump full.hg
-  full.hg: size=406
+  full.hg: size=418
   0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
-  0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 90 e5 76 f6 70 |ion=GZx.c``..v.p|
-  0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 06 76 a6 b2 |.swu....`..F.v..|
-  0030: d4 a2 e2 cc fc 3c 03 23 06 06 e6 65 40 b1 4d c1 |.....<.#...e@.M.|
-  0040: 2a 31 09 cf 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 |*1...:R.........|
-  0050: 97 17 b2 c9 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 |...........%....|
-  0060: a4 a4 1a 5b 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 |...[X..'..Y..Y..|
-  0070: a4 59 26 5a 18 9a 18 59 5a 26 1a 27 27 25 99 a6 |.Y&Z...YZ&.''%..|
-  0080: 99 1a 70 95 a4 16 97 70 19 28 18 70 a5 e5 e7 73 |..p....p.(.p...s|
-  0090: 71 25 a6 a4 28 00 19 40 13 0e ac fa df ab ff 7b |q%..(..@.......{|
-  00a0: 3f fb 92 dc 8b 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 |?.....b......=ZD|
-  00b0: ac 2f b0 a9 c3 66 1e 54 b9 26 08 a7 1a 1b 1a a7 |./...f.T.&......|
-  00c0: 25 1b 9a 1b 99 19 9a 5a 18 9b a6 18 19 00 dd 67 |%......Z.......g|
-  00d0: 61 61 98 06 f4 80 49 4a 8a 65 52 92 41 9a 81 81 |aa....IJ.eR.A...|
-  00e0: a5 11 17 50 31 30 58 19 cc 80 98 25 29 b1 08 c4 |...P10X....%)...|
-  00f0: 37 07 79 19 88 d9 41 ee 07 8a 41 cd 5d 98 65 fb |7.y...A...A.].e.|
-  0100: e5 9e 45 bf 8d 7f 9f c6 97 9f 2b 44 34 67 d9 ec |..E.......+D4g..|
-  0110: 8e 0f a0 61 a8 eb 82 82 2e c9 c2 20 25 d5 34 c5 |...a....... %.4.|
-  0120: d0 d8 c2 dc d4 c2 d4 c4 30 d9 34 cd c0 d4 c8 cc |........0.4.....|
-  0130: 34 31 c5 d0 c4 24 31 c9 32 2d d1 c2 2c c5 30 25 |41...$1.2-..,.0%|
-  0140: 09 e4 ee 85 8f 85 ff 88 ab 89 36 c7 2a c4 47 34 |..........6.*.G4|
-  0150: fe f8 ec 7b 73 37 3f c3 24 62 1d 8d 4d 1d 9e 40 |...{s7?.$b..M..@|
-  0160: 06 3b 10 14 36 a4 38 10 04 d8 21 01 9a b1 83 f7 |.;..6.8...!.....|
-  0170: e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a 78 ed fc d5 |.E..V....R..x...|
-  0180: 76 f1 36 25 81 89 c7 ad ec 90 34 48 75 2b 89 49 |v.6%......4Hu+.I|
-  0190: bf 00 d6 97 f0 8d                               |......|
+  0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
+  0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
+  0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
+  0040: f4 d4 62 23 06 06 e6 65 40 f9 4d c1 2a 31 09 cf |..b#...e@.M.*1..|
+  0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
+  0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
+  0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
+  0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
+  0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
+  00a0: 28 00 19 40 13 0e ac fa df ab ff 7b 3f fb 92 dc |(..@.......{?...|
+  00b0: 8b 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 ac 2f b0 a9 |..b......=ZD./..|
+  00c0: c3 66 1e 54 b9 26 08 a7 1a 1b 1a a7 25 1b 9a 1b |.f.T.&......%...|
+  00d0: 99 19 9a 5a 18 9b a6 18 19 00 dd 67 61 61 98 06 |...Z.......gaa..|
+  00e0: f4 80 49 4a 8a 65 52 92 41 9a 81 81 a5 11 17 50 |..IJ.eR.A......P|
+  00f0: 31 30 58 19 cc 80 98 25 29 b1 08 c4 37 07 79 19 |10X....%)...7.y.|
+  0100: 88 d9 41 ee 07 8a 41 cd 5d 98 65 fb e5 9e 45 bf |..A...A.].e...E.|
+  0110: 8d 7f 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 61 |......+D4g.....a|
+  0120: a8 eb 82 82 2e c9 c2 20 25 d5 34 c5 d0 d8 c2 dc |....... %.4.....|
+  0130: d4 c2 d4 c4 30 d9 34 cd c0 d4 c8 cc 34 31 c5 d0 |....0.4.....41..|
+  0140: c4 24 31 c9 32 2d d1 c2 2c c5 30 25 09 e4 ee 85 |.$1.2-..,.0%....|
+  0150: 8f 85 ff 88 ab 89 36 c7 2a c4 47 34 fe f8 ec 7b |......6.*.G4...{|
+  0160: 73 37 3f c3 24 62 1d 8d 4d 1d 9e 40 06 3b 10 14 |s7?.$b..M..@.;..|
+  0170: 36 a4 38 10 04 d8 21 01 9a b1 83 f7 e9 45 8b d2 |6.8...!......E..|
+  0180: 56 c7 a3 1f 82 52 d7 8a 78 ed fc d5 76 f1 36 25 |V....R..x...v.6%|
+  0190: 81 89 c7 ad ec 90 34 48 75 2b 89 49 bf 00 cf 72 |......4Hu+.I...r|
+  01a0: f4 7f                                           |..|
 
   $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
   $ hg clone -U http://localhost:$HGPORT full-bundle
--- a/tests/test-command-template.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-command-template.t	Mon Jul 18 23:28:14 2016 -0500
@@ -3320,6 +3320,15 @@
   hg: parse error: pad() expects an integer width
   [255]
 
+Test separate function
+
+  $ hg log -r 0 -T '{separate("-", "", "a", "b", "", "", "c", "")}\n'
+  a-b-c
+  $ hg log -r 0 -T '{separate(" ", "{rev}:{node|short}", author|user, branch)}\n'
+  0:f7769ec2ab97 test default
+  $ hg log -r 0 --color=always -T '{separate(" ", "a", label(red, "b"), "c", label(red, ""), "d")}\n'
+  a \x1b[0;31mb\x1b[0m c d (esc)
+
 Test ifcontains function
 
   $ hg log --template '{rev} {ifcontains(rev, "2 two 0", "is in the string", "is not")}\n'
@@ -3768,10 +3777,10 @@
   $ hg debugtemplate --config templatealias.bad='x(' -v '{bad}'
   (template
     ('symbol', 'bad'))
-  abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end
+  abort: bad definition of template alias "bad": at 2: not a prefix: end
   [255]
   $ hg log --config templatealias.bad='x(' -T '{bad}'
-  abort: failed to parse the definition of template alias "bad": at 2: not a prefix: end
+  abort: bad definition of template alias "bad": at 2: not a prefix: end
   [255]
 
   $ cd ..
--- a/tests/test-commandserver.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-commandserver.t	Mon Jul 18 23:28:14 2016 -0500
@@ -13,11 +13,12 @@
   $ hg init repo
   $ cd repo
 
+  >>> from __future__ import print_function
   >>> from hgclient import readchannel, runcommand, check
   >>> @check
   ... def hellomessage(server):
   ...     ch, data = readchannel(server)
-  ...     print '%c, %r' % (ch, data)
+  ...     print('%c, %r' % (ch, data))
   ...     # run an arbitrary command to make sure the next thing the server
   ...     # sends isn't part of the hello message
   ...     runcommand(server, ['id'])
@@ -99,7 +100,7 @@
   ...     server.stdin.close()
   ... 
   ...     # server exits with 1 if the pipe closed while reading the command
-  ...     print 'server exit code =', server.wait()
+  ...     print('server exit code =', server.wait())
   server exit code = 1
 
   >>> from hgclient import readchannel, runcommand, check, stringio
@@ -206,10 +207,11 @@
 #endif
 
   $ cat <<EOF > hook.py
+  > from __future__ import print_function
   > import sys
   > def hook(**args):
-  >     print 'hook talking'
-  >     print 'now try to read something: %r' % sys.stdin.read()
+  >     print('hook talking')
+  >     print('now try to read something: %r' % sys.stdin.read())
   > EOF
 
   >>> from hgclient import readchannel, runcommand, check, stringio
@@ -610,18 +612,19 @@
 
 run commandserver in commandserver, which is silly but should work:
 
+  >>> from __future__ import print_function
   >>> from hgclient import readchannel, runcommand, check, stringio
   >>> @check
   ... def nested(server):
-  ...     print '%c, %r' % readchannel(server)
+  ...     print('%c, %r' % readchannel(server))
   ...     class nestedserver(object):
   ...         stdin = stringio('getencoding\n')
   ...         stdout = stringio()
   ...     runcommand(server, ['serve', '--cmdserver', 'pipe'],
   ...                output=nestedserver.stdout, input=nestedserver.stdin)
   ...     nestedserver.stdout.seek(0)
-  ...     print '%c, %r' % readchannel(nestedserver)  # hello
-  ...     print '%c, %r' % readchannel(nestedserver)  # getencoding
+  ...     print('%c, %r' % readchannel(nestedserver))  # hello
+  ...     print('%c, %r' % readchannel(nestedserver))  # getencoding
   o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
   *** runcommand serve --cmdserver pipe
   o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
@@ -632,11 +635,12 @@
 
   $ cd ..
 
+  >>> from __future__ import print_function
   >>> from hgclient import readchannel, runcommand, check
   >>> @check
   ... def hellomessage(server):
   ...     ch, data = readchannel(server)
-  ...     print '%c, %r' % (ch, data)
+  ...     print('%c, %r' % (ch, data))
   ...     # run an arbitrary command to make sure the next thing the server
   ...     # sends isn't part of the hello message
   ...     runcommand(server, ['id'])
@@ -672,11 +676,12 @@
 
 #if unix-socket unix-permissions
 
+  >>> from __future__ import print_function
   >>> from hgclient import unixserver, readchannel, runcommand, check, stringio
   >>> server = unixserver('.hg/server.sock', '.hg/server.log')
   >>> def hellomessage(conn):
   ...     ch, data = readchannel(conn)
-  ...     print '%c, %r' % (ch, data)
+  ...     print('%c, %r' % (ch, data))
   ...     runcommand(conn, ['id'])
   >>> check(hellomessage, server.connect)
   o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
@@ -723,6 +728,7 @@
   > [cmdserver]
   > log = inexistent/path.log
   > EOF
+  >>> from __future__ import print_function
   >>> from hgclient import unixserver, readchannel, check
   >>> server = unixserver('.hg/server.sock', '.hg/server.log')
   >>> def earlycrash(conn):
@@ -730,7 +736,7 @@
   ...         try:
   ...             ch, data = readchannel(conn)
   ...             if not data.startswith('  '):
-  ...                 print '%c, %r' % (ch, data)
+  ...                 print('%c, %r' % (ch, data))
   ...         except EOFError:
   ...             break
   >>> check(earlycrash, server.connect)
--- a/tests/test-config.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-config.t	Mon Jul 18 23:28:14 2016 -0500
@@ -90,3 +90,15 @@
 
   $ hg config Section.idontexist
   [1]
+
+sub-options in [paths] aren't expanded
+
+  $ cat > .hg/hgrc << EOF
+  > [paths]
+  > foo = ~/foo
+  > foo:suboption = ~/foo
+  > EOF
+
+  $ hg showconfig paths
+  paths.foo:suboption=~/foo
+  paths.foo=$TESTTMP/foo
--- a/tests/test-context.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-context.py	Mon Jul 18 23:28:14 2016 -0500
@@ -14,7 +14,7 @@
 
 # create 'foo' with fixed time stamp
 f = open('foo', 'wb')
-f.write('foo\n')
+f.write(b'foo\n')
 f.close()
 os.utime('foo', (1000, 1000))
 
--- a/tests/test-contrib-check-code.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-contrib-check-code.t	Mon Jul 18 23:28:14 2016 -0500
@@ -248,3 +248,64 @@
    > {desc|escape}
    warning: follow desc keyword with either firstline or websub
   [1]
+
+'string join across lines with no space' detection
+
+  $ cat > stringjoin.py <<EOF
+  > foo = (' foo'
+  >        'bar foo.'
+  >        'bar foo:'
+  >        'bar foo@'
+  >        'bar foo%'
+  >        'bar foo*'
+  >        'bar foo+'
+  >        'bar foo-'
+  >        'bar')
+  > EOF
+
+'missing _() in ui message' detection
+
+  $ cat > uigettext.py <<EOF
+  > ui.status("% 10s %05d % -3.2f %*s %%"
+  >           # this use '\\\\' instead of '\\', because the latter in
+  >           # heredoc on shell becomes just '\'
+  >           '\\\\ \n \t \0'
+  >           """12345
+  >           """
+  >           '''.:*+-=
+  >           ''' "%-6d \n 123456 .:*+-= foobar")
+  > EOF
+
+(Checking multiple invalid files at once examines whether caching
+translation table for repquote() works as expected or not. All files
+should break rules depending on result of repquote(), in this case)
+
+  $ "$check_code" stringjoin.py uigettext.py
+  stringjoin.py:1:
+   > foo = (' foo'
+   string join across lines with no space
+  stringjoin.py:2:
+   >        'bar foo.'
+   string join across lines with no space
+  stringjoin.py:3:
+   >        'bar foo:'
+   string join across lines with no space
+  stringjoin.py:4:
+   >        'bar foo@'
+   string join across lines with no space
+  stringjoin.py:5:
+   >        'bar foo%'
+   string join across lines with no space
+  stringjoin.py:6:
+   >        'bar foo*'
+   string join across lines with no space
+  stringjoin.py:7:
+   >        'bar foo+'
+   string join across lines with no space
+  stringjoin.py:8:
+   >        'bar foo-'
+   string join across lines with no space
+  uigettext.py:1:
+   > ui.status("% 10s %05d % -3.2f %*s %%"
+   missing _() in ui message (use () to hide false-positives)
+  [1]
--- a/tests/test-contrib-perf.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-contrib-perf.t	Mon Jul 18 23:28:14 2016 -0500
@@ -2,6 +2,7 @@
 
 Set vars:
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
   $ CONTRIBDIR="$TESTDIR/../contrib"
 
 Prepare repo:
@@ -147,3 +148,10 @@
   $ hg perfwalk
   $ hg perfparents
 
+Check perf.py for historical portability
+
+  $ cd "$TESTDIR/.."
+
+  $ (hg files -r 1.2 glob:mercurial/*.c glob:mercurial/*.py;
+  >  hg files -r tip glob:mercurial/*.c glob:mercurial/*.py) |
+  > "$TESTDIR"/check-perf-code.py contrib/perf.py
--- a/tests/test-convert.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-convert.t	Mon Jul 18 23:28:14 2016 -0500
@@ -422,7 +422,7 @@
   assuming destination emptydir-hg
   initializing destination emptydir-hg repository
   emptydir does not look like a CVS checkout
-  $TESTTMP/emptydir does not look like a Git repository
+  $TESTTMP/emptydir does not look like a Git repository (glob)
   emptydir does not look like a Subversion repository
   emptydir is not a local Mercurial repository
   emptydir does not look like a darcs repository
--- a/tests/test-debian-packages.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-debian-packages.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,7 @@
 #require test-repo slow debhelper
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
 Ensure debuild doesn't run the testsuite, as that could get silly.
   $ DEB_BUILD_OPTIONS=nocheck
   $ export DEB_BUILD_OPTIONS
--- a/tests/test-debugbundle.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-debugbundle.t	Mon Jul 18 23:28:14 2016 -0500
@@ -13,6 +13,13 @@
        282 (manifests)
         93  b
         93  c
+  $ hg bundle --base 0 --rev tip bundle2.hg -v --type none-v2
+  2 changesets found
+  uncompressed size of bundle content:
+       372 (changelog)
+       322 (manifests)
+       113  b
+       113  c
 
 Terse output:
 
@@ -20,6 +27,14 @@
   0e067c57feba1a5694ca4844f05588bb1bf82342
   991a3460af53952d10ec8a295d3d2cc2e5fa9690
 
+Terse output:
+
+  $ hg debugbundle bundle2.hg
+  Stream params: {}
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+      0e067c57feba1a5694ca4844f05588bb1bf82342
+      991a3460af53952d10ec8a295d3d2cc2e5fa9690
+
 Verbose output:
 
   $ hg debugbundle --all bundle.hg
@@ -39,4 +54,23 @@
   c
   b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0000000000000000000000000000000000000000 0
 
+  $ hg debugbundle --all bundle2.hg
+  Stream params: {}
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
+      format: id, p1, p2, cset, delta base, len(delta)
+  
+      changelog
+      0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 80
+      991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 80
+  
+      manifest
+      686dbf0aeca417636fa26a9121c681eabbb15a20 8515d4bfda768e04af4c13a69a72e28c7effbea7 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 8515d4bfda768e04af4c13a69a72e28c7effbea7 55
+      ae25a31b30b3490a981e7b96a3238cc69583fda1 686dbf0aeca417636fa26a9121c681eabbb15a20 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 686dbf0aeca417636fa26a9121c681eabbb15a20 55
+  
+      b
+      b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 0
+  
+      c
+      b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0000000000000000000000000000000000000000 0
+
   $ cd ..
--- a/tests/test-default-push.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-default-push.t	Mon Jul 18 23:28:14 2016 -0500
@@ -105,4 +105,43 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files
 
+:pushrev is used when no -r is passed
+
+  $ cat >> .hg/hgrc << EOF
+  > default:pushrev = .
+  > EOF
+  $ hg -q up -r 0
+  $ echo head1 > foo
+  $ hg -q commit -A -m head1
+  $ hg -q up -r 0
+  $ echo head2 > foo
+  $ hg -q commit -A -m head2
+  $ hg push -f
+  pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+
+  $ hg --config 'paths.default:pushrev=draft()' push -f
+  pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+
+Invalid :pushrev raises appropriately
+
+  $ hg --config 'paths.default:pushrev=notdefined()' push
+  pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
+  hg: parse error: unknown identifier: notdefined
+  [255]
+
+  $ hg --config 'paths.default:pushrev=(' push
+  pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
+  hg: parse error at 1: not a prefix: end
+  [255]
+
   $ cd ..
--- a/tests/test-devel-warnings.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-devel-warnings.t	Mon Jul 18 23:28:14 2016 -0500
@@ -10,14 +10,17 @@
   > 
   > @command('buggylocking', [], '')
   > def buggylocking(ui, repo):
-  >     tr = repo.transaction('buggy')
-  >     # make sure we rollback the transaction as we don't want to rely on the__del__
-  >     tr.release()
   >     lo = repo.lock()
   >     wl = repo.wlock()
   >     wl.release()
   >     lo.release()
   > 
+  > @command('buggytransaction', [], '')
+  > def buggylocking(ui, repo):
+  >     tr = repo.transaction('buggy')
+  >     # make sure we rollback the transaction as we don't want to rely on the__del__
+  >     tr.release()
+  > 
   > @command('properlocking', [], '')
   > def properlocking(ui, repo):
   >     """check that reentrance is fine"""
@@ -74,7 +77,6 @@
   $ hg init lock-checker
   $ cd lock-checker
   $ hg buggylocking
-  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   $ cat << EOF >> $HGRCPATH
   > [devel]
@@ -82,21 +84,8 @@
   > check-locks=1
   > EOF
   $ hg buggylocking
-  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
   $ hg buggylocking --traceback
-  devel-warn: transaction with no lock at:
-   */hg:* in * (glob)
-   */mercurial/dispatch.py:* in run (glob)
-   */mercurial/dispatch.py:* in dispatch (glob)
-   */mercurial/dispatch.py:* in _runcatch (glob)
-   */mercurial/dispatch.py:* in _dispatch (glob)
-   */mercurial/dispatch.py:* in runcommand (glob)
-   */mercurial/dispatch.py:* in _runcommand (glob)
-   */mercurial/dispatch.py:* in checkargs (glob)
-   */mercurial/dispatch.py:* in <lambda> (glob)
-   */mercurial/util.py:* in check (glob)
-   $TESTTMP/buggylocking.py:* in buggylocking (glob)
   devel-warn: "wlock" acquired after "lock" at:
    */hg:* in * (glob)
    */mercurial/dispatch.py:* in run (glob)
@@ -122,7 +111,8 @@
   [255]
 
   $ hg log -r "oldstyle()" -T '{rev}\n'
-  devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob)
+  devel-warn: revset "oldstyle" uses list instead of smartset
+  (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
   0
   $ hg oldanddeprecated
   devel-warn: foorbar is deprecated, go shopping
@@ -143,7 +133,8 @@
    */mercurial/util.py:* in check (glob)
    $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
   $ hg blackbox -l 9
-  1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob)
+  1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: revset "oldstyle" uses list instead of smartset
+  (compatibility will be dropped after Mercurial-3.9, update your code.) at: *mercurial/revset.py:* (mfunc) (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> log -r oldstyle() -T {rev}\n exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
@@ -165,4 +156,18 @@
    $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 9
+
+Test programming error failure:
+
+  $ hg buggytransaction 2>&1 | egrep -v '^  '
+  ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+  ** which supports versions unknown of Mercurial.
+  ** Please disable buggylocking and try your action again.
+  ** If that fixes the bug please report it to the extension author.
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: * (glob)
+  Traceback (most recent call last):
+  RuntimeError: programming error: transaction requires locking
+
   $ cd ..
--- a/tests/test-docker-packaging.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-docker-packaging.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,5 +1,7 @@
 #require test-repo slow docker
 
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
 Ensure debuild doesn't run the testsuite, as that could get silly.
   $ DEB_BUILD_OPTIONS=nocheck
   $ export DEB_BUILD_OPTIONS
--- a/tests/test-export.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-export.t	Mon Jul 18 23:28:14 2016 -0500
@@ -159,7 +159,7 @@
 Checking if only alphanumeric characters are used in the file name (%m option):
 
   $ echo "line" >> foo
-  $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~"
+  $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]"'^'"_\`abcdefghijklmnopqrstuvwxyz{|}~"
   $ hg export -v -o %m.patch tip
   exporting patch:
   ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch
--- a/tests/test-extension.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-extension.t	Mon Jul 18 23:28:14 2016 -0500
@@ -249,6 +249,191 @@
   $TESTTMP/a (glob)
 #endif
 
+#if absimport
+
+Examine whether module loading is delayed until actual refering, even
+though module is imported with "absolute_import" feature.
+
+Files below in each packages are used for descirbed purpose:
+
+- "called": examine whether "from MODULE import ATTR" works correctly
+- "unused": examine whether loading is delayed correctly
+- "used":   examine whether "from PACKAGE import MODULE" works correctly
+
+Package hierarchy is needed to examine whether demand importing works
+as expected for "from SUB.PACK.AGE import MODULE".
+
+Setup "external library" to be imported with "absolute_import"
+feature.
+
+  $ mkdir -p $TESTTMP/extlibroot/lsub1/lsub2
+  $ touch $TESTTMP/extlibroot/__init__.py
+  $ touch $TESTTMP/extlibroot/lsub1/__init__.py
+  $ touch $TESTTMP/extlibroot/lsub1/lsub2/__init__.py
+
+  $ cat > $TESTTMP/extlibroot/lsub1/lsub2/called.py <<EOF
+  > def func():
+  >     return "this is extlibroot.lsub1.lsub2.called.func()"
+  > EOF
+  $ cat > $TESTTMP/extlibroot/lsub1/lsub2/unused.py <<EOF
+  > raise Exception("extlibroot.lsub1.lsub2.unused is loaded unintentionally")
+  > EOF
+  $ cat > $TESTTMP/extlibroot/lsub1/lsub2/used.py <<EOF
+  > detail = "this is extlibroot.lsub1.lsub2.used"
+  > EOF
+
+Setup sub-package of "external library", which causes instantiation of
+demandmod in "recurse down the module chain" code path. Relative
+importing with "absolute_import" feature isn't tested, because "level
+>=1 " doesn't cause instantiation of demandmod.
+
+  $ mkdir -p $TESTTMP/extlibroot/recursedown/abs
+  $ cat > $TESTTMP/extlibroot/recursedown/abs/used.py <<EOF
+  > detail = "this is extlibroot.recursedown.abs.used"
+  > EOF
+  $ cat > $TESTTMP/extlibroot/recursedown/abs/__init__.py <<EOF
+  > from __future__ import absolute_import
+  > from extlibroot.recursedown.abs.used import detail
+  > EOF
+
+  $ mkdir -p $TESTTMP/extlibroot/recursedown/legacy
+  $ cat > $TESTTMP/extlibroot/recursedown/legacy/used.py <<EOF
+  > detail = "this is extlibroot.recursedown.legacy.used"
+  > EOF
+  $ cat > $TESTTMP/extlibroot/recursedown/legacy/__init__.py <<EOF
+  > # legacy style (level == -1) import
+  > from extlibroot.recursedown.legacy.used import detail
+  > EOF
+
+  $ cat > $TESTTMP/extlibroot/recursedown/__init__.py <<EOF
+  > from __future__ import absolute_import
+  > from extlibroot.recursedown.abs import detail as absdetail
+  > from .legacy import detail as legacydetail
+  > EOF
+
+Setup extension local modules to be imported with "absolute_import"
+feature.
+
+  $ mkdir -p $TESTTMP/absextroot/xsub1/xsub2
+  $ touch $TESTTMP/absextroot/xsub1/__init__.py
+  $ touch $TESTTMP/absextroot/xsub1/xsub2/__init__.py
+
+  $ cat > $TESTTMP/absextroot/xsub1/xsub2/called.py <<EOF
+  > def func():
+  >     return "this is absextroot.xsub1.xsub2.called.func()"
+  > EOF
+  $ cat > $TESTTMP/absextroot/xsub1/xsub2/unused.py <<EOF
+  > raise Exception("absextroot.xsub1.xsub2.unused is loaded unintentionally")
+  > EOF
+  $ cat > $TESTTMP/absextroot/xsub1/xsub2/used.py <<EOF
+  > detail = "this is absextroot.xsub1.xsub2.used"
+  > EOF
+
+Setup extension local modules to examine whether demand importing
+works as expected in "level > 1" case.
+
+  $ cat > $TESTTMP/absextroot/relimportee.py <<EOF
+  > detail = "this is absextroot.relimportee"
+  > EOF
+  $ cat > $TESTTMP/absextroot/xsub1/xsub2/relimporter.py <<EOF
+  > from __future__ import absolute_import
+  > from ... import relimportee
+  > detail = "this relimporter imports %r" % (relimportee.detail)
+  > EOF
+
+Setup modules, which actually import extension local modules at
+runtime.
+
+  $ cat > $TESTTMP/absextroot/absolute.py << EOF
+  > from __future__ import absolute_import
+  > 
+  > # import extension local modules absolutely (level = 0)
+  > from absextroot.xsub1.xsub2 import used, unused
+  > from absextroot.xsub1.xsub2.called import func
+  > 
+  > def getresult():
+  >     result = []
+  >     result.append(used.detail)
+  >     result.append(func())
+  >     return result
+  > EOF
+
+  $ cat > $TESTTMP/absextroot/relative.py << EOF
+  > from __future__ import absolute_import
+  > 
+  > # import extension local modules relatively (level == 1)
+  > from .xsub1.xsub2 import used, unused
+  > from .xsub1.xsub2.called import func
+  > 
+  > # import a module, which implies "importing with level > 1"
+  > from .xsub1.xsub2 import relimporter
+  > 
+  > def getresult():
+  >     result = []
+  >     result.append(used.detail)
+  >     result.append(func())
+  >     result.append(relimporter.detail)
+  >     return result
+  > EOF
+
+Setup main procedure of extension.
+
+  $ cat > $TESTTMP/absextroot/__init__.py <<EOF
+  > from __future__ import absolute_import
+  > from mercurial import cmdutil
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > 
+  > # "absolute" and "relative" shouldn't be imported before actual
+  > # command execution, because (1) they import same modules, and (2)
+  > # preceding import (= instantiate "demandmod" object instead of
+  > # real "module" object) might hide problem of succeeding import.
+  > 
+  > @command('showabsolute', [], norepo=True)
+  > def showabsolute(ui, *args, **opts):
+  >     from absextroot import absolute
+  >     ui.write('ABS: %s\n' % '\nABS: '.join(absolute.getresult()))
+  > 
+  > @command('showrelative', [], norepo=True)
+  > def showrelative(ui, *args, **opts):
+  >     from . import relative
+  >     ui.write('REL: %s\n' % '\nREL: '.join(relative.getresult()))
+  > 
+  > # import modules from external library
+  > from extlibroot.lsub1.lsub2 import used as lused, unused as lunused
+  > from extlibroot.lsub1.lsub2.called import func as lfunc
+  > from extlibroot.recursedown import absdetail, legacydetail
+  > 
+  > def uisetup(ui):
+  >     result = []
+  >     result.append(lused.detail)
+  >     result.append(lfunc())
+  >     result.append(absdetail)
+  >     result.append(legacydetail)
+  >     ui.write('LIB: %s\n' % '\nLIB: '.join(result))
+  > EOF
+
+Examine module importing.
+
+  $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.absextroot=$TESTTMP/absextroot showabsolute)
+  LIB: this is extlibroot.lsub1.lsub2.used
+  LIB: this is extlibroot.lsub1.lsub2.called.func()
+  LIB: this is extlibroot.recursedown.abs.used
+  LIB: this is extlibroot.recursedown.legacy.used
+  ABS: this is absextroot.xsub1.xsub2.used
+  ABS: this is absextroot.xsub1.xsub2.called.func()
+
+  $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.absextroot=$TESTTMP/absextroot showrelative)
+  LIB: this is extlibroot.lsub1.lsub2.used
+  LIB: this is extlibroot.lsub1.lsub2.called.func()
+  LIB: this is extlibroot.recursedown.abs.used
+  LIB: this is extlibroot.recursedown.legacy.used
+  REL: this is absextroot.xsub1.xsub2.used
+  REL: this is absextroot.xsub1.xsub2.called.func()
+  REL: this relimporter imports 'this is absextroot.relimportee'
+
+#endif
+
   $ cd ..
 
 hide outer repo
@@ -958,7 +1143,7 @@
   ** Extensions loaded: throw, older
 
 Declare the version as supporting this hg version, show regular bts link:
-  $ hgver=`$PYTHON -c 'from mercurial import util; print util.version().split("+")[0]'`
+  $ hgver=`hg debuginstall -T '{hgver}'`
   $ echo 'testedwith = """'"$hgver"'"""' >> throw.py
   $ if [ -z "$hgver" ]; then
   >   echo "unable to fetch a mercurial version. Make sure __version__ is correct";
--- a/tests/test-filelog.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-filelog.py	Mon Jul 18 23:28:14 2016 -0500
@@ -3,14 +3,15 @@
 Tests the behavior of filelog w.r.t. data starting with '\1\n'
 """
 from __future__ import absolute_import, print_function
+
+from mercurial.node import (
+    hex,
+    nullid,
+)
 from mercurial import (
     hg,
     ui as uimod,
 )
-from mercurial.node import (
-    hex,
-    nullid,
-)
 
 myui = uimod.ui()
 repo = hg.repository(myui, path='.', create=True)
--- a/tests/test-generaldelta.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-generaldelta.t	Mon Jul 18 23:28:14 2016 -0500
@@ -18,11 +18,12 @@
   > done
 
   $ cd ..
+  >>> from __future__ import print_function
   >>> import os
   >>> regsize = os.stat("repo/.hg/store/00manifest.i").st_size
   >>> gdsize = os.stat("gdrepo/.hg/store/00manifest.i").st_size
   >>> if regsize < gdsize:
-  ...     print 'generaldata increased size of manifest'
+  ...     print('generaldata increased size of manifest')
 
 Verify rev reordering doesnt create invalid bundles (issue4462)
 This requires a commit tree that when pulled will reorder manifest revs such
@@ -153,8 +154,8 @@
   0 files updated, 0 files merged, 5 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg (glob)
   $ hg debugbundle .hg/strip-backup/*
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9
 
   $ cd ..
--- a/tests/test-getbundle.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-getbundle.t	Mon Jul 18 23:28:14 2016 -0500
@@ -170,7 +170,7 @@
   $ hg debuggetbundle repo bundle -t bundle2
   $ hg debugbundle bundle
   Stream params: {}
-  changegroup -- "{'version': '01'}"
+  changegroup -- "sortdict([('version', '01'), ('nbchanges', '18')])"
       7704483d56b2a7b5db54dcee7c62378ac629b348
       29a4d1f17bd3f0779ca0525bebb1cfb51067c738
       713346a995c363120712aed1aee7e04afd867638
--- a/tests/test-glog-topological.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-glog-topological.t	Mon Jul 18 23:28:14 2016 -0500
@@ -40,7 +40,7 @@
 
 (display all nodes)
 
-  $ hg --config experimental.graph-group-branches=1 log -G
+  $ hg log -G -r 'sort(all(), topo)'
   o  8
   |
   o  3
@@ -62,7 +62,7 @@
 
 (revset skipping nodes)
 
-  $ hg --config experimental.graph-group-branches=1 log -G --rev 'not (2+6)'
+  $ hg log -G --rev 'sort(not (2+6), topo)'
   o  8
   |
   o  3
@@ -80,7 +80,7 @@
 
 (begin) from the other branch
 
-  $ hg --config experimental.graph-group-branches=1 --config experimental.graph-group-branches.firstbranch=5 log -G
+  $ hg log -G -r 'sort(all(), topo, topo.firstbranch=5)'
   o  7
   |
   o  6
--- a/tests/test-glog.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-glog.t	Mon Jul 18 23:28:14 2016 -0500
@@ -3036,7 +3036,229 @@
        date:        Thu Jan 01 00:00:04 1970 +0000
        summary:     (4) merge two known; one immediate left, one immediate right
   
+Draw only part of a grandparent line differently with "<N><char>"; only the
+last N lines (for positive N) or everything but the first N lines (for
+negative N) along the current node use the style, the rest of the edge uses
+the parent edge styling.
 
+Last 3 lines:
+
+  $ cat << EOF >> $HGRCPATH
+  > [experimental]
+  > graphstyle.parent = !
+  > graphstyle.grandparent = 3.
+  > graphstyle.missing =
+  > EOF
+  $ hg log -G -r '36:18 & file("a")' -m
+  @  changeset:   36:08a19a744424
+  !  branch:      branch
+  !  tag:         tip
+  !  parent:      35:9159c3644c5e
+  !  parent:      35:9159c3644c5e
+  !  user:        test
+  .  date:        Thu Jan 01 00:00:36 1970 +0000
+  .  summary:     (36) buggy merge: identical parents
+  .
+  o    changeset:   32:d06dffa21a31
+  !\   parent:      27:886ed638191b
+  ! !  parent:      31:621d83e11f67
+  ! !  user:        test
+  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! .  summary:     (32) expand
+  ! .
+  o !  changeset:   31:621d83e11f67
+  !\!  parent:      21:d42a756af44d
+  ! !  parent:      30:6e11cd4b648f
+  ! !  user:        test
+  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! !  summary:     (31) expand
+  ! !
+  o !    changeset:   30:6e11cd4b648f
+  !\ \   parent:      28:44ecd0b9ae99
+  ! ~ !  parent:      29:cd9bb2be7593
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   !  summary:     (30) expand
+  !  /
+  o !    changeset:   28:44ecd0b9ae99
+  !\ \   parent:      1:6db2ef61d156
+  ! ~ !  parent:      26:7f25b6c2f0b9
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   !  summary:     (28) merge zero known
+  !  /
+  o !    changeset:   26:7f25b6c2f0b9
+  !\ \   parent:      18:1aa84d96232a
+  ! ! !  parent:      25:91da8ed57247
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! !  summary:     (26) merge one known; far right
+  ! ! !
+  ! o !  changeset:   25:91da8ed57247
+  ! !\!  parent:      21:d42a756af44d
+  ! ! !  parent:      24:a9c19a3d96b7
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! !  summary:     (25) merge one known; far left
+  ! ! !
+  ! o !    changeset:   24:a9c19a3d96b7
+  ! !\ \   parent:      0:e6eb3150255d
+  ! ! ~ !  parent:      23:a01cddf0766d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   !  summary:     (24) merge one known; immediate right
+  ! !  /
+  ! o !    changeset:   23:a01cddf0766d
+  ! !\ \   parent:      1:6db2ef61d156
+  ! ! ~ !  parent:      22:e0d9cccacb5d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   !  summary:     (23) merge one known; immediate left
+  ! !  /
+  ! o !  changeset:   22:e0d9cccacb5d
+  !/!/   parent:      18:1aa84d96232a
+  ! !    parent:      21:d42a756af44d
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
+  ! !    summary:     (22) merge two known; one far left, one far right
+  ! !
+  ! o    changeset:   21:d42a756af44d
+  ! !\   parent:      19:31ddc2c1573b
+  ! ! !  parent:      20:d30ed6450e32
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:21 1970 +0000
+  ! ! !  summary:     (21) expand
+  ! ! !
+  +---o  changeset:   20:d30ed6450e32
+  ! ! |  parent:      0:e6eb3150255d
+  ! ! ~  parent:      18:1aa84d96232a
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:20 1970 +0000
+  ! !    summary:     (20) merge two known; two far right
+  ! !
+  ! o    changeset:   19:31ddc2c1573b
+  ! |\   parent:      15:1dda3f72782d
+  ! ~ ~  parent:      17:44765d7c06e0
+  !      user:        test
+  !      date:        Thu Jan 01 00:00:19 1970 +0000
+  !      summary:     (19) expand
+  !
+  o    changeset:   18:1aa84d96232a
+  |\   parent:      1:6db2ef61d156
+  ~ ~  parent:      15:1dda3f72782d
+       user:        test
+       date:        Thu Jan 01 00:00:18 1970 +0000
+       summary:     (18) merge two known; two far left
+  
+All but the first 3 lines:
+
+  $ cat << EOF >> $HGRCPATH
+  > [experimental]
+  > graphstyle.parent = !
+  > graphstyle.grandparent = -3.
+  > graphstyle.missing =
+  > EOF
+  $ hg log -G -r '36:18 & file("a")' -m
+  @  changeset:   36:08a19a744424
+  !  branch:      branch
+  !  tag:         tip
+  .  parent:      35:9159c3644c5e
+  .  parent:      35:9159c3644c5e
+  .  user:        test
+  .  date:        Thu Jan 01 00:00:36 1970 +0000
+  .  summary:     (36) buggy merge: identical parents
+  .
+  o    changeset:   32:d06dffa21a31
+  !\   parent:      27:886ed638191b
+  ! !  parent:      31:621d83e11f67
+  ! .  user:        test
+  ! .  date:        Thu Jan 01 00:00:32 1970 +0000
+  ! .  summary:     (32) expand
+  ! .
+  o !  changeset:   31:621d83e11f67
+  !\!  parent:      21:d42a756af44d
+  ! !  parent:      30:6e11cd4b648f
+  ! !  user:        test
+  ! !  date:        Thu Jan 01 00:00:31 1970 +0000
+  ! !  summary:     (31) expand
+  ! !
+  o !    changeset:   30:6e11cd4b648f
+  !\ \   parent:      28:44ecd0b9ae99
+  ! ~ !  parent:      29:cd9bb2be7593
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:30 1970 +0000
+  !   !  summary:     (30) expand
+  !  /
+  o !    changeset:   28:44ecd0b9ae99
+  !\ \   parent:      1:6db2ef61d156
+  ! ~ !  parent:      26:7f25b6c2f0b9
+  !   !  user:        test
+  !   !  date:        Thu Jan 01 00:00:28 1970 +0000
+  !   !  summary:     (28) merge zero known
+  !  /
+  o !    changeset:   26:7f25b6c2f0b9
+  !\ \   parent:      18:1aa84d96232a
+  ! ! !  parent:      25:91da8ed57247
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:26 1970 +0000
+  ! ! !  summary:     (26) merge one known; far right
+  ! ! !
+  ! o !  changeset:   25:91da8ed57247
+  ! !\!  parent:      21:d42a756af44d
+  ! ! !  parent:      24:a9c19a3d96b7
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:25 1970 +0000
+  ! ! !  summary:     (25) merge one known; far left
+  ! ! !
+  ! o !    changeset:   24:a9c19a3d96b7
+  ! !\ \   parent:      0:e6eb3150255d
+  ! ! ~ !  parent:      23:a01cddf0766d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:24 1970 +0000
+  ! !   !  summary:     (24) merge one known; immediate right
+  ! !  /
+  ! o !    changeset:   23:a01cddf0766d
+  ! !\ \   parent:      1:6db2ef61d156
+  ! ! ~ !  parent:      22:e0d9cccacb5d
+  ! !   !  user:        test
+  ! !   !  date:        Thu Jan 01 00:00:23 1970 +0000
+  ! !   !  summary:     (23) merge one known; immediate left
+  ! !  /
+  ! o !  changeset:   22:e0d9cccacb5d
+  !/!/   parent:      18:1aa84d96232a
+  ! !    parent:      21:d42a756af44d
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:22 1970 +0000
+  ! !    summary:     (22) merge two known; one far left, one far right
+  ! !
+  ! o    changeset:   21:d42a756af44d
+  ! !\   parent:      19:31ddc2c1573b
+  ! ! !  parent:      20:d30ed6450e32
+  ! ! !  user:        test
+  ! ! !  date:        Thu Jan 01 00:00:21 1970 +0000
+  ! ! !  summary:     (21) expand
+  ! ! !
+  +---o  changeset:   20:d30ed6450e32
+  ! ! |  parent:      0:e6eb3150255d
+  ! ! ~  parent:      18:1aa84d96232a
+  ! !    user:        test
+  ! !    date:        Thu Jan 01 00:00:20 1970 +0000
+  ! !    summary:     (20) merge two known; two far right
+  ! !
+  ! o    changeset:   19:31ddc2c1573b
+  ! |\   parent:      15:1dda3f72782d
+  ! ~ ~  parent:      17:44765d7c06e0
+  !      user:        test
+  !      date:        Thu Jan 01 00:00:19 1970 +0000
+  !      summary:     (19) expand
+  !
+  o    changeset:   18:1aa84d96232a
+  |\   parent:      1:6db2ef61d156
+  ~ ~  parent:      15:1dda3f72782d
+       user:        test
+       date:        Thu Jan 01 00:00:18 1970 +0000
+       summary:     (18) merge two known; two far left
+  
   $ cd ..
 
 Change graph shorten, test better with graphstyle.missing not none
--- a/tests/test-help.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-help.t	Mon Jul 18 23:28:14 2016 -0500
@@ -260,7 +260,6 @@
        extdiff       command to allow external programs to compare revisions
        factotum      http authentication with factotum
        gpg           commands to sign and verify changesets
-       hgcia         hooks for integrating with the CIA.vc notification service
        hgk           browse the repository in a graphical way
        highlight     syntax highlighting for hgweb (requires Pygments)
        histedit      interactive history editing
@@ -429,6 +428,22 @@
    -h --help              display help and exit
       --hidden            consider hidden changesets
 
+Test the textwidth config option
+
+  $ hg root -h  --config ui.textwidth=50
+  hg root
+  
+  print the root (top) of the current working
+  directory
+  
+      Print the root directory of the current
+      repository.
+  
+      Returns 0 on success.
+  
+  (some details hidden, use --verbose to show
+  complete help)
+
 Test help option with version option
 
   $ hg add -h --version
@@ -1500,6 +1515,18 @@
          The URL to use for push operations. If not defined, the location
          defined by the path's main entry is used.
   
+      "pushrev"
+         A revset defining which revisions to push by default.
+  
+         When 'hg push' is executed without a "-r" argument, the revset defined
+         by this sub-option is evaluated to determine what to push.
+  
+         For example, a value of "." will push the working directory's revision
+         by default.
+  
+         Revsets specifying bookmarks will not result in the bookmark being
+         pushed.
+  
       The following special named paths exist:
   
       "default"
--- a/tests/test-hgcia.t	Sat Jul 02 09:41:40 2016 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-Test the CIA extension
-
-  $ cat >> $HGRCPATH <<EOF
-  > [extensions]
-  > hgcia=
-  > 
-  > [hooks]
-  > changegroup.cia = python:hgext.hgcia.hook
-  > 
-  > [web]
-  > baseurl = http://hgserver/
-  > 
-  > [cia]
-  > user = testuser
-  > project = testproject
-  > test = True
-  > EOF
-
-  $ hg init src
-  $ hg init cia
-  $ cd src
-  $ echo foo > foo
-  $ hg ci -Amfoo
-  adding foo
-  $ hg push ../cia
-  pushing to ../cia
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  
-  <message>
-    <generator>
-      <name>Mercurial (hgcia)</name>
-      <version>0.1</version>
-      <url>http://hg.kublai.com/mercurial/hgcia</url>
-      <user>testuser</user>
-    </generator>
-    <source>
-  <project>testproject</project>
-  <branch>default</branch>
-  </source>
-    <body>
-      <commit>
-        <author>test</author>
-        <version>0:e63c23eaa88a</version>
-        <log>foo</log>
-        <url>http://hgserver/rev/e63c23eaa88a</url>
-        <files><file uri="http://hgserver/file/e63c23eaa88a/foo" action="add">foo</file></files>
-      </commit>
-    </body>
-    <timestamp>0</timestamp>
-  </message>
-
-  $ cat >> $HGRCPATH <<EOF
-  > strip = 0
-  > EOF
-
-  $ echo bar > bar
-  $ hg ci -Ambar
-  adding bar
-  $ hg push ../cia
-  pushing to ../cia
-  searching for changes
-  adding changesets
-  adding manifests
-  adding file changes
-  added 1 changesets with 1 changes to 1 files
-  
-  <message>
-    <generator>
-      <name>Mercurial (hgcia)</name>
-      <version>0.1</version>
-      <url>http://hg.kublai.com/mercurial/hgcia</url>
-      <user>testuser</user>
-    </generator>
-    <source>
-  <project>testproject</project>
-  <branch>default</branch>
-  </source>
-    <body>
-      <commit>
-        <author>test</author>
-        <version>1:c0c7cf58edc5</version>
-        <log>bar</log>
-        <url>http://hgserver/$TESTTMP/cia/rev/c0c7cf58edc5</url>
-        <files><file uri="http://hgserver/$TESTTMP/cia/file/c0c7cf58edc5/bar" action="add">bar</file></files>
-      </commit>
-    </body>
-    <timestamp>0</timestamp>
-  </message>
-
-  $ cd ..
--- a/tests/test-hgweb-auth.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-auth.py	Mon Jul 18 23:28:14 2016 -0500
@@ -43,7 +43,7 @@
     def _test(uri):
         print('URI:', uri)
         try:
-            pm = url.passwordmgr(ui)
+            pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm())
             u, authinfo = util.url(uri).authinfo()
             if authinfo is not None:
                 pm.add_password(*authinfo)
--- a/tests/test-hgweb-commands.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-commands.t	Mon Jul 18 23:28:14 2016 -0500
@@ -81,36 +81,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>cad8025a2e87</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>unstable</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>something</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>branch commit with null character: (websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>cad8025a2e87</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>unstable</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td>something</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>tip</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>branch commit with null character: (websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td></td>
+      </tr>
+     </table>
     </content>
    </entry>
    <entry>
@@ -124,36 +124,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>1d22e65f027e</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>stable</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>branch(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>foo<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>1d22e65f027e</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>stable</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>branch(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>foo<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
    <entry>
@@ -167,36 +167,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>a4f92ed23982</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>.hgtags<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>a4f92ed23982</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>.hgtags<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
    <entry>
@@ -210,36 +210,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>2ef0ac749a14</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>anotherthing</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>1.0</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>base(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>da/foo<br />foo<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>2ef0ac749a14</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td>anotherthing</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>1.0</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>base(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>da/foo<br />foo<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
@@ -258,160 +258,164 @@
       <item>
       <title>[unstable] branch commit with null character: </title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/cad8025a2e87</guid> (glob)
-               <link>http://*:$HGPORT/rev/cad8025a2e87</link> (glob)
+      <link>http://*:$HGPORT/rev/cad8025a2e87</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>cad8025a2e87</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>unstable</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>something</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>branch commit with null character: (websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>cad8025a2e87</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td>unstable</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td>something</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td>tip</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>branch commit with null character: (websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
   <item>
       <title>[stable] branch</title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/1d22e65f027e</guid> (glob)
-               <link>http://*:$HGPORT/rev/1d22e65f027e</link> (glob)
+      <link>http://*:$HGPORT/rev/1d22e65f027e</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>1d22e65f027e</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>stable</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>branch(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>foo<br /></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>1d22e65f027e</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td>stable</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>branch(websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td>foo<br /></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
   <item>
       <title>[default] Added tag 1.0 for changeset 2ef0ac749a14</title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/a4f92ed23982</guid> (glob)
-               <link>http://*:$HGPORT/rev/a4f92ed23982</link> (glob)
+      <link>http://*:$HGPORT/rev/a4f92ed23982</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>a4f92ed23982</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>.hgtags<br /></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>a4f92ed23982</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td>default</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td>.hgtags<br /></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
   <item>
       <title>base</title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/2ef0ac749a14</guid> (glob)
-               <link>http://*:$HGPORT/rev/2ef0ac749a14</link> (glob)
+      <link>http://*:$HGPORT/rev/2ef0ac749a14</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>2ef0ac749a14</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>anotherthing</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>1.0</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>base(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>da/foo<br />foo<br /></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>2ef0ac749a14</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td>anotherthing</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td>1.0</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>base(websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td>da/foo<br />foo<br /></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
@@ -441,36 +445,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>a4f92ed23982</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>.hgtags<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>a4f92ed23982</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>.hgtags<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
    <entry>
@@ -484,36 +488,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>2ef0ac749a14</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>anotherthing</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>1.0</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>base(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>da/foo<br />foo<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>2ef0ac749a14</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td>anotherthing</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>1.0</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>base(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>da/foo<br />foo<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
@@ -532,80 +536,82 @@
       <item>
       <title>[default] Added tag 1.0 for changeset 2ef0ac749a14</title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/a4f92ed23982</guid> (glob)
-               <link>http://*:$HGPORT/rev/a4f92ed23982</link> (glob)
+      <link>http://*:$HGPORT/rev/a4f92ed23982</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>a4f92ed23982</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>.hgtags<br /></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>a4f92ed23982</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td>default</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>Added tag 1.0 for changeset 2ef0ac749a14(websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td>.hgtags<br /></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
   <item>
       <title>base</title>
       <guid isPermaLink="true">http://*:$HGPORT/rev/2ef0ac749a14</guid> (glob)
-               <link>http://*:$HGPORT/rev/2ef0ac749a14</link> (glob)
+      <link>http://*:$HGPORT/rev/2ef0ac749a14</link> (glob)
       <description>
-                <![CDATA[
-  	<table>
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>2ef0ac749a14</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>anotherthing</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>1.0</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>base(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>da/foo<br />foo<br /></td>
-  	</tr>
-  	</table>
-  	]]></description>
+      <![CDATA[
+          <table>
+              <tr>
+                  <th style="text-align:left;">changeset</th>
+                  <td>2ef0ac749a14</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">branch</th>
+                  <td></td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">bookmark</th>
+                  <td>anotherthing</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;">tag</th>
+                  <td>1.0</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">user</th>
+                  <td>&#116;&#101;&#115;&#116;</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">description</th>
+                  <td>base(websub)</td>
+              </tr>
+              <tr>
+                  <th style="text-align:left;vertical-align:top;">files</th>
+                  <td>da/foo<br />foo<br /></td>
+              </tr>
+          </table>
+      ]]>
+      </description>
       <author>&#116;&#101;&#115;&#116;</author>
       <pubDate>Thu, 01 Jan 1970 00:00:00 +0000</pubDate>
   </item>
@@ -633,36 +639,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>2ef0ac749a14</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>anotherthing</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>1.0</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>base(websub)</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>2ef0ac749a14</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td>anotherthing</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>1.0</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>base(websub)</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
@@ -1957,6 +1963,19 @@
   .minusline { color: red; }
   .atline { color: purple; }
   .annotate { font-size: smaller; text-align: right; padding-right: 1em; }
+  tr.thisrev a { color:#999999; text-decoration: none; }
+  tr.thisrev pre { color:#009900; }
+  div.annotate-info {
+    display: none;
+    position: absolute;
+    background-color: #FFFFFF;
+    border: 1px solid #000000;
+    text-align: left;
+    color: #000000;
+    padding: 5px;
+  }
+  div.annotate-info a { color: #0000FF; }
+  td.annotate:hover div.annotate-info { display: inline; }
   .buttons a {
     background-color: #666;
     padding: 2pt;
@@ -2096,9 +2115,10 @@
 Graph json escape of multibyte character
 
   $ get-with-headers.py 127.0.0.1:$HGPORT 'graph/' > out
+  >>> from __future__ import print_function
   >>> for line in open("out"):
   ...     if line.startswith("var data ="):
-  ...         print line,
+  ...         print(line, end='')
   var data = [["061dd13ba3c3", [0, 1], [[0, 0, 1, -1, ""]], "\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \u0000", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
 
 capabilities
@@ -2252,7 +2272,7 @@
   $ QUERY_STRING='style=raw'
   $ python hgweb.cgi #> search
   Status: 404 Not Found\r (esc)
-  ETag: *\r (glob) (esc)
+  ETag: W/"*"\r (glob) (esc)
   Content-Type: text/plain; charset=ascii\r (esc)
   \r (esc)
   
@@ -2266,7 +2286,7 @@
   $ QUERY_STRING='style=raw'
   $ python hgweb.cgi #> search
   Status: 404 Not Found\r (esc)
-  ETag: *\r (glob) (esc)
+  ETag: W/"*"\r (glob) (esc)
   Content-Type: text/plain; charset=ascii\r (esc)
   \r (esc)
   
--- a/tests/test-hgweb-filelog.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-filelog.t	Mon Jul 18 23:28:14 2016 -0500
@@ -822,36 +822,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>3f41bc784e7e</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>a-branch</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>second a</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>3f41bc784e7e</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>a-branch</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>second a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td></td>
+      </tr>
+     </table>
     </content>
    </entry>
    <entry>
@@ -865,36 +865,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>5ed941583260</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td></td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td>a-bookmark</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>a-tag</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>first a</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>5ed941583260</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td>a-bookmark</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>a-tag</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>first a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
--- a/tests/test-hgweb-json.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-json.t	Mon Jul 18 23:28:14 2016 -0500
@@ -100,6 +100,8 @@
      summary:     initial
   
 
+  $ echo '[web]' >> .hg/hgrc
+  $ echo 'allow_archive = bz2' >> .hg/hgrc
   $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E error.log
   $ cat hg.pid >> $DAEMON_PIDS
 
@@ -111,10 +113,33 @@
 
 file/{revision}/{path} shows file revision
 
-  $ request json-file/06e557f3edf6/foo
+  $ request json-file/78896eb0e102/foo-new
   200 Script output follows
   
-  "not yet implemented"
+  {
+    "bookmarks": [],
+    "branch": "default",
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "move foo",
+    "lines": [
+      {
+        "line": "bar\n"
+      }
+    ],
+    "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+    "parents": [
+      "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+    ],
+    "path": "foo-new",
+    "phase": "public",
+    "tags": [
+      "tag1"
+    ],
+    "user": "test"
+  }
 
 file/{revision} shows root directory info
 
@@ -169,6 +194,7 @@
     "changesets": [
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -179,6 +205,7 @@
           "ceed296fe500c3fac9541e31dad860cb49c89e45",
           "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
         ],
+        "phase": "draft",
         "tags": [
           "tip"
         ],
@@ -186,6 +213,7 @@
       },
       {
         "bookmarks": [],
+        "branch": "test-branch",
         "date": [
           0.0,
           0
@@ -195,11 +223,13 @@
         "parents": [
           "6ab967a8ab3489227a83f80e920faa039a71819f"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "test-branch",
         "date": [
           0.0,
           0
@@ -209,6 +239,7 @@
         "parents": [
           "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
@@ -216,6 +247,7 @@
         "bookmarks": [
           "bookmark2"
         ],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -225,11 +257,13 @@
         "parents": [
           "f2890a05fea49bfaf9fb27ed5490894eba32da78"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -239,6 +273,7 @@
         "parents": [
           "93a8ce14f89156426b7fa981af8042da53f03aa0"
         ],
+        "phase": "draft",
         "tags": [
           "tag2"
         ],
@@ -246,6 +281,7 @@
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -255,11 +291,13 @@
         "parents": [
           "78896eb0e102174ce9278438a95e12543e4367a7"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -269,6 +307,7 @@
         "parents": [
           "8d7c456572acf3557e8ed8a07286b10c408bcec5"
         ],
+        "phase": "public",
         "tags": [
           "tag1"
         ],
@@ -278,6 +317,7 @@
         "bookmarks": [
           "bookmark1"
         ],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -287,11 +327,13 @@
         "parents": [
           "f8bbb9024b10f93cdbb8d940337398291d40dea8"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -301,11 +343,13 @@
         "parents": [
           "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -313,6 +357,7 @@
         "desc": "initial",
         "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
         "parents": [],
+        "phase": "public",
         "tags": [],
         "user": "test"
       }
@@ -330,6 +375,7 @@
     "changesets": [
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -339,11 +385,13 @@
         "parents": [
           "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -351,6 +399,7 @@
         "desc": "initial",
         "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
         "parents": [],
+        "phase": "public",
         "tags": [],
         "user": "test"
       }
@@ -368,6 +417,7 @@
     "changesets": [
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -378,6 +428,7 @@
           "ceed296fe500c3fac9541e31dad860cb49c89e45",
           "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
         ],
+        "phase": "draft",
         "tags": [
           "tip"
         ],
@@ -385,6 +436,7 @@
       },
       {
         "bookmarks": [],
+        "branch": "test-branch",
         "date": [
           0.0,
           0
@@ -394,11 +446,13 @@
         "parents": [
           "6ab967a8ab3489227a83f80e920faa039a71819f"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "test-branch",
         "date": [
           0.0,
           0
@@ -408,6 +462,7 @@
         "parents": [
           "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
@@ -415,6 +470,7 @@
         "bookmarks": [
           "bookmark2"
         ],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -424,11 +480,13 @@
         "parents": [
           "f2890a05fea49bfaf9fb27ed5490894eba32da78"
         ],
+        "phase": "draft",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -438,6 +496,7 @@
         "parents": [
           "93a8ce14f89156426b7fa981af8042da53f03aa0"
         ],
+        "phase": "draft",
         "tags": [
           "tag2"
         ],
@@ -445,6 +504,7 @@
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -454,11 +514,13 @@
         "parents": [
           "78896eb0e102174ce9278438a95e12543e4367a7"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -468,6 +530,7 @@
         "parents": [
           "8d7c456572acf3557e8ed8a07286b10c408bcec5"
         ],
+        "phase": "public",
         "tags": [
           "tag1"
         ],
@@ -477,6 +540,7 @@
         "bookmarks": [
           "bookmark1"
         ],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -486,11 +550,13 @@
         "parents": [
           "f8bbb9024b10f93cdbb8d940337398291d40dea8"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -500,11 +566,13 @@
         "parents": [
           "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
         ],
+        "phase": "public",
         "tags": [],
         "user": "test"
       },
       {
         "bookmarks": [],
+        "branch": "default",
         "date": [
           0.0,
           0
@@ -512,6 +580,7 @@
         "desc": "initial",
         "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
         "parents": [],
+        "phase": "public",
         "tags": [],
         "user": "test"
       }
@@ -732,7 +801,309 @@
   $ request json-summary
   200 Script output follows
   
-  "not yet implemented"
+  {
+    "archives": [
+      {
+        "extension": ".tar.bz2",
+        "node": "tip",
+        "type": "bz2",
+        "url": "http://*:$HGPORT/archive/tip.tar.bz2" (glob)
+      }
+    ],
+    "bookmarks": [
+      {
+        "bookmark": "bookmark2",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45"
+      },
+      {
+        "bookmark": "bookmark1",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+      }
+    ],
+    "branches": [
+      {
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+        "status": "open"
+      },
+      {
+        "branch": "test-branch",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+        "status": "inactive"
+      }
+    ],
+    "labels": [],
+    "lastchange": [
+      0.0,
+      0
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+    "shortlog": [
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "merge test-branch into default",
+        "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+        "parents": [
+          "ceed296fe500c3fac9541e31dad860cb49c89e45",
+          "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
+        ],
+        "phase": "draft",
+        "tags": [
+          "tip"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "test-branch",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit in test-branch",
+        "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+        "parents": [
+          "6ab967a8ab3489227a83f80e920faa039a71819f"
+        ],
+        "phase": "draft",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "test-branch",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create test branch",
+        "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+        "parents": [
+          "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+        ],
+        "phase": "draft",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark2"
+        ],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag2",
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45",
+        "parents": [
+          "f2890a05fea49bfaf9fb27ed5490894eba32da78"
+        ],
+        "phase": "draft",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit to da/foo",
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "parents": [
+          "93a8ce14f89156426b7fa981af8042da53f03aa0"
+        ],
+        "phase": "draft",
+        "tags": [
+          "tag2"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag",
+        "node": "93a8ce14f89156426b7fa981af8042da53f03aa0",
+        "parents": [
+          "78896eb0e102174ce9278438a95e12543e4367a7"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "move foo",
+        "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+        "parents": [
+          "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+        ],
+        "phase": "public",
+        "tags": [
+          "tag1"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark1"
+        ],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify da/foo",
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+        "parents": [
+          "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify foo",
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "parents": [
+          "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "parents": [],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      }
+    ],
+    "tags": [
+      {
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "tag": "tag2"
+      },
+      {
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+        "tag": "tag1"
+      }
+    ]
+  }
+
+  $ request json-changelog?rev=create
+  200 Script output follows
+  
+  {
+    "entries": [
+      {
+        "bookmarks": [],
+        "branch": "test-branch",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create test branch",
+        "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+        "parents": [
+          "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+        ],
+        "phase": "draft",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark2"
+        ],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag2",
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45",
+        "parents": [
+          "f2890a05fea49bfaf9fb27ed5490894eba32da78"
+        ],
+        "phase": "draft",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag",
+        "node": "93a8ce14f89156426b7fa981af8042da53f03aa0",
+        "parents": [
+          "78896eb0e102174ce9278438a95e12543e4367a7"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      }
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+    "query": "create"
+  }
 
 filediff/{revision}/{path} shows changes to a file in a revision
 
@@ -862,7 +1233,98 @@
   $ request json-filelog/f8bbb9024b10/foo
   200 Script output follows
   
-  "not yet implemented"
+  {
+    "entries": [
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify foo",
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "parents": [
+          "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "parents": [],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      }
+    ]
+  }
+
+  $ request json-filelog/cc725e08502a/da/foo
+  200 Script output follows
+  
+  {
+    "entries": [
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit to da/foo",
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "parents": [
+          "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+        ],
+        "phase": "draft",
+        "tags": [
+          "tag2"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark1"
+        ],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify da/foo",
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+        "parents": [
+          "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+        ],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "parents": [],
+        "phase": "public",
+        "tags": [],
+        "user": "test"
+      }
+    ]
+  }
 
 (archive/ doesn't use templating, so ignore it)
 
--- a/tests/test-hgweb-no-path-info.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-no-path-info.t	Mon Jul 18 23:28:14 2016 -0500
@@ -79,16 +79,16 @@
   <?xml version="1.0" encoding="ascii"?>
   <feed xmlns="http://www.w3.org/2005/Atom">
    <!-- Changelog -->
-   <id>http://127.0.0.1:$HGPORT/</id>
-   <link rel="self" href="http://127.0.0.1:$HGPORT/atom-log"/>
-   <link rel="alternate" href="http://127.0.0.1:$HGPORT/"/>
+   <id>http://127.0.0.1:$HGPORT/</id> (glob)
+   <link rel="self" href="http://127.0.0.1:$HGPORT/atom-log"/> (glob)
+   <link rel="alternate" href="http://127.0.0.1:$HGPORT/"/> (glob)
    <title>repo Changelog</title>
    <updated>1970-01-01T00:00:00+00:00</updated>
   
    <entry>
     <title>[default] test</title>
-    <id>http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c</id>
-    <link href="http://127.0.0.1:$HGPORT/rev/61c9426e69fe"/>
+    <id>http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c</id> (glob)
+    <link href="http://127.0.0.1:$HGPORT/rev/61c9426e69fe"/> (glob)
     <author>
      <name>test</name>
      <email>&#116;&#101;&#115;&#116;</email>
@@ -96,36 +96,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>61c9426e69fe</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>test</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>bar<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>61c9426e69fe</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>tip</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>test</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>bar<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
--- a/tests/test-hgweb-no-request-uri.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-no-request-uri.t	Mon Jul 18 23:28:14 2016 -0500
@@ -90,16 +90,16 @@
   <?xml version="1.0" encoding="ascii"?>
   <feed xmlns="http://www.w3.org/2005/Atom">
    <!-- Changelog -->
-   <id>http://127.0.0.1:$HGPORT/</id>
-   <link rel="self" href="http://127.0.0.1:$HGPORT/atom-log"/>
-   <link rel="alternate" href="http://127.0.0.1:$HGPORT/"/>
+   <id>http://127.0.0.1:$HGPORT/</id> (glob)
+   <link rel="self" href="http://127.0.0.1:$HGPORT/atom-log"/> (glob)
+   <link rel="alternate" href="http://127.0.0.1:$HGPORT/"/> (glob)
    <title>repo Changelog</title>
    <updated>1970-01-01T00:00:00+00:00</updated>
   
    <entry>
     <title>[default] test</title>
-    <id>http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c</id>
-    <link href="http://127.0.0.1:$HGPORT/rev/61c9426e69fe"/>
+    <id>http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c</id> (glob)
+    <link href="http://127.0.0.1:$HGPORT/rev/61c9426e69fe"/> (glob)
     <author>
      <name>test</name>
      <email>&#116;&#101;&#115;&#116;</email>
@@ -107,36 +107,36 @@
     <updated>1970-01-01T00:00:00+00:00</updated>
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>61c9426e69fe</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>test</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>bar<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>61c9426e69fe</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>tip</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>test</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>bar<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
--- a/tests/test-hgweb-symrev.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb-symrev.t	Mon Jul 18 23:28:14 2016 -0500
@@ -190,8 +190,15 @@
    annotate foo @ 1:<a href="/rev/a7c1559b7bba?style=paper">a7c1559b7bba</a>
    <td class="author"><a href="/file/43c799df6e75/foo?style=paper">43c799df6e75</a> </td>
    <td class="author"><a href="/file/9d8c40cba617/foo?style=paper">9d8c40cba617</a> </td>
-  <a href="/annotate/43c799df6e75/foo?style=paper#l1"
-  <a href="/annotate/a7c1559b7bba/foo?style=paper#l2"
+  <a href="/annotate/43c799df6e75/foo?style=paper#l1">
+  <a href="/annotate/43c799df6e75/foo?style=paper#l1">
+  <a href="/diff/43c799df6e75/foo?style=paper">diff</a>
+  <a href="/rev/43c799df6e75?style=paper">changeset</a>
+  <a href="/annotate/a7c1559b7bba/foo?style=paper#l2">
+  <a href="/annotate/a7c1559b7bba/foo?style=paper#l2">
+  <a href="/annotate/43c799df6e75/foo?style=paper">0</a></div>
+  <a href="/diff/a7c1559b7bba/foo?style=paper">diff</a>
+  <a href="/rev/a7c1559b7bba?style=paper">changeset</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=paper' | egrep $REVLINKS
   <li><a href="/shortlog/xyzzy?style=paper">log</a></li>
@@ -378,8 +385,15 @@
    annotate foo @ 1:<a href="/rev/a7c1559b7bba?style=coal">a7c1559b7bba</a>
    <td class="author"><a href="/file/43c799df6e75/foo?style=coal">43c799df6e75</a> </td>
    <td class="author"><a href="/file/9d8c40cba617/foo?style=coal">9d8c40cba617</a> </td>
-  <a href="/annotate/43c799df6e75/foo?style=coal#l1"
-  <a href="/annotate/a7c1559b7bba/foo?style=coal#l2"
+  <a href="/annotate/43c799df6e75/foo?style=coal#l1">
+  <a href="/annotate/43c799df6e75/foo?style=coal#l1">
+  <a href="/diff/43c799df6e75/foo?style=coal">diff</a>
+  <a href="/rev/43c799df6e75?style=coal">changeset</a>
+  <a href="/annotate/a7c1559b7bba/foo?style=coal#l2">
+  <a href="/annotate/a7c1559b7bba/foo?style=coal#l2">
+  <a href="/annotate/43c799df6e75/foo?style=coal">0</a></div>
+  <a href="/diff/a7c1559b7bba/foo?style=coal">diff</a>
+  <a href="/rev/a7c1559b7bba?style=coal">changeset</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=coal' | egrep $REVLINKS
   <li><a href="/shortlog/xyzzy?style=coal">log</a></li>
@@ -616,8 +630,15 @@
    <td style="font-family:monospace"><a class="list" href="/rev/a7c1559b7bba?style=gitweb">a7c1559b7bba</a></td>
   <a class="list" href="/annotate/43c799df6e75/foo?style=gitweb">
   <a class="list" href="/annotate/9d8c40cba617/foo?style=gitweb">9d8c40cba617</a></td>
-  <a href="/annotate/43c799df6e75/foo?style=gitweb#l1"
-  <a href="/annotate/a7c1559b7bba/foo?style=gitweb#l2"
+  <a href="/annotate/43c799df6e75/foo?style=gitweb#l1">
+  <a href="/annotate/43c799df6e75/foo?style=gitweb#l1">
+  <a href="/diff/43c799df6e75/foo?style=gitweb">diff</a>
+  <a href="/rev/43c799df6e75?style=gitweb">changeset</a>
+  <a href="/annotate/a7c1559b7bba/foo?style=gitweb#l2">
+  <a href="/annotate/a7c1559b7bba/foo?style=gitweb#l2">
+  <a href="/annotate/43c799df6e75/foo?style=gitweb">0</a></div>
+  <a href="/diff/a7c1559b7bba/foo?style=gitweb">diff</a>
+  <a href="/rev/a7c1559b7bba?style=gitweb">changeset</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=gitweb' | egrep $REVLINKS
   <a href="/file/xyzzy?style=gitweb">files</a> |
@@ -832,8 +853,15 @@
           <dd><a href="/rev/a7c1559b7bba?style=monoblue">a7c1559b7bba</a></dd>
   <a href="/annotate/43c799df6e75/foo?style=monoblue">
   <a href="/annotate/9d8c40cba617/foo?style=monoblue">9d8c40cba617</a>
-  <a href="/annotate/43c799df6e75/foo?style=monoblue#l1"
-  <a href="/annotate/a7c1559b7bba/foo?style=monoblue#l2"
+  <a href="/annotate/43c799df6e75/foo?style=monoblue#l1">
+  <a href="/annotate/43c799df6e75/foo?style=monoblue#l1">
+  <a href="/diff/43c799df6e75/foo?style=monoblue">diff</a>
+  <a href="/rev/43c799df6e75?style=monoblue">changeset</a>
+  <a href="/annotate/a7c1559b7bba/foo?style=monoblue#l2">
+  <a href="/annotate/a7c1559b7bba/foo?style=monoblue#l2">
+  <a href="/annotate/43c799df6e75/foo?style=monoblue">0</a></div>
+  <a href="/diff/a7c1559b7bba/foo?style=monoblue">diff</a>
+  <a href="/rev/a7c1559b7bba?style=monoblue">changeset</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=monoblue' | egrep $REVLINKS
               <li><a href="/graph/xyzzy?style=monoblue">graph</a></li>
@@ -1029,8 +1057,15 @@
    <td><a href="/rev/a7c1559b7bba?style=spartan">a7c1559b7bba</a></td>
   <a href="/annotate/43c799df6e75/foo?style=spartan">
   <td><a href="/annotate/9d8c40cba617/foo?style=spartan">9d8c40cba617</a></td>
-  <a href="/annotate/43c799df6e75/foo?style=spartan#l1"
-  <a href="/annotate/a7c1559b7bba/foo?style=spartan#l2"
+  <a href="/annotate/43c799df6e75/foo?style=spartan#l1">
+  <a href="/annotate/43c799df6e75/foo?style=spartan#l1">
+  <a href="/diff/43c799df6e75/foo?style=spartan">diff</a>
+  <a href="/rev/43c799df6e75?style=spartan">changeset</a>
+  <a href="/annotate/a7c1559b7bba/foo?style=spartan#l2">
+  <a href="/annotate/a7c1559b7bba/foo?style=spartan#l2">
+  <a href="/annotate/43c799df6e75/foo?style=spartan">0</a></div>
+  <a href="/diff/a7c1559b7bba/foo?style=spartan">diff</a>
+  <a href="/rev/a7c1559b7bba?style=spartan">changeset</a>
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'diff/xyzzy/foo?style=spartan' | egrep $REVLINKS
   <a href="/log/xyzzy?style=spartan">changelog</a>
--- a/tests/test-hgweb.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgweb.t	Mon Jul 18 23:28:14 2016 -0500
@@ -340,7 +340,7 @@
 
   $ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
   200 Script output follows
-  content-length: 6521
+  content-length: 6947
   content-type: text/css
   
   body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; }
@@ -374,6 +374,7 @@
   a.list:hover { text-decoration:underline; color:#880000; }
   table { padding:8px 4px; }
   th { padding:2px 5px; font-size:12px; text-align:left; }
+  .parity0 { background-color:#ffffff; }
   tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; }
   tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover,
   pre.sourcelines.stripes > :nth-child(4n+2):hover,
@@ -397,6 +398,19 @@
   div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; }
   div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; }
   div.search { margin:4px 8px; position:absolute; top:56px; right:12px }
+  tr.thisrev a { color:#999999; text-decoration: none; }
+  tr.thisrev pre { color:#009900; }
+  div.annotate-info {
+    display: none;
+    position: absolute;
+    background-color: #FFFFFF;
+    border: 1px solid #000000;
+    text-align: left;
+    color: #000000;
+    padding: 5px;
+  }
+  div.annotate-info a { color: #0000FF; text-decoration: underline; }
+  td.annotate:hover div.annotate-info { display: inline; }
   .linenr { color:#999999; text-decoration:none }
   div.rss_logo { float: right; white-space: nowrap; }
   div.rss_logo a {
--- a/tests/test-hgwebdir.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-hgwebdir.t	Mon Jul 18 23:28:14 2016 -0500
@@ -62,6 +62,7 @@
   $ cat >> f/.hg/hgrc << EOF
   > [web]
   > name = fancy name for repo f
+  > labels = foo, bar
   > EOF
   $ cd ..
 
@@ -108,12 +109,14 @@
   "name": "a",
   "description": "unknown",
   "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
-  "lastchange": [*, *] (glob)
+  "lastchange": [*, *], (glob)
+  "labels": []
   }, {
   "name": "b",
   "description": "unknown",
   "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
-  "lastchange": [*, *] (glob)
+  "lastchange": [*, *], (glob)
+  "labels": []
   }]
   } (no-eol)
 
@@ -201,6 +204,218 @@
   /astar/
   /astar/.hg/patches/
   
+
+  $ get-with-headers.py localhost:$HGPORT1 '?style=json'
+  200 Script output follows
+  
+  {
+  "entries": [{
+  "name": "t/a",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "b",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "coll/a",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "coll/a/.hg/patches",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "coll/b",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "coll/c",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "coll/notrepo/e",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "fancy name for repo f",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": ["foo", "bar"]
+  }, {
+  "name": "rcoll/a",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/a/.hg/patches",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/b",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/b/d",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/c",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/notrepo/e",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "rcoll/notrepo/e/e2",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "fancy name for repo f",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": ["foo", "bar"]
+  }, {
+  "name": "rcoll/notrepo/f/f2",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "star/webdir/a",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "star/webdir/a/.hg/patches",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "star/webdir/b",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "star/webdir/c",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "star/webdir/notrepo/e",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "fancy name for repo f",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": ["foo", "bar"]
+  }, {
+  "name": "starstar/webdir/a",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/a/.hg/patches",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/b",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/b/d",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/c",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/notrepo/e",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "starstar/webdir/notrepo/e/e2",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "fancy name for repo f",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": ["foo", "bar"]
+  }, {
+  "name": "starstar/webdir/notrepo/f/f2",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "astar",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }, {
+  "name": "astar/.hg/patches",
+  "description": "unknown",
+  "contact": "Foo Bar \u003cfoo.bar@example.com\u003e",
+  "lastchange": [*, *], (glob)
+  "labels": []
+  }]
+  } (no-eol)
+
   $ get-with-headers.py localhost:$HGPORT1 '?style=paper'
   200 Script output follows
   
@@ -783,36 +998,36 @@
     <updated>1970-01-01T00:00:01+00:00</updated>
     <published>1970-01-01T00:00:01+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>8580ff50825a</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>a</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>a<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>8580ff50825a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>tip</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>a<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
@@ -840,36 +1055,36 @@
     <updated>1970-01-01T00:00:01+00:00</updated>
     <published>1970-01-01T00:00:01+00:00</published>
     <content type="xhtml">
-  	<table xmlns="http://www.w3.org/1999/xhtml">
-  	<tr>
-  		<th style="text-align:left;">changeset</th>
-  		<td>8580ff50825a</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">branch</th>
-                                <td>default</td>
-                </tr>
-                <tr>
-                                <th style="text-align:left;">bookmark</th>
-  		<td></td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">tag</th>
-  		<td>tip</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;">user</th>
-  		<td>&#116;&#101;&#115;&#116;</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">description</th>
-  		<td>a</td>
-  	</tr>
-  	<tr>
-  		<th style="text-align:left;vertical-align:top;">files</th>
-  		<td>a<br /></td>
-  	</tr>
-  	</table>
+     <table xmlns="http://www.w3.org/1999/xhtml">
+      <tr>
+       <th style="text-align:left;">changeset</th>
+       <td>8580ff50825a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">branch</th>
+       <td>default</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">bookmark</th>
+       <td></td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">tag</th>
+       <td>tip</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;">user</th>
+       <td>&#116;&#101;&#115;&#116;</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">description</th>
+       <td>a</td>
+      </tr>
+      <tr>
+       <th style="text-align:left;vertical-align:top;">files</th>
+       <td>a<br /></td>
+      </tr>
+     </table>
     </content>
    </entry>
   
--- a/tests/test-highlight.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-highlight.t	Mon Jul 18 23:28:14 2016 -0500
@@ -288,234 +288,566 @@
   </thead>
   <tbody class="stripes2">
     
-  <tr id="l1">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l1"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l1" class="thisrev">
+  <td class="annotate parity0">
+  <a href="/annotate/06824edf55d0/primes.py#l1">
+  0
+  </a>
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l1">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l1">     1</a> <span class="c">#!/usr/bin/env python</span></td>
   </tr>
-  <tr id="l2">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l2"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l2" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l2">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l2">     2</a> </td>
   </tr>
-  <tr id="l3">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l3"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l3" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l3">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l3">     3</a> <span class="sd">&quot;&quot;&quot;Fun with generators. Corresponding Haskell implementation:</span></td>
   </tr>
-  <tr id="l4">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l4"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l4" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l4">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l4">     4</a> </td>
   </tr>
-  <tr id="l5">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l5"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l5" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l5">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l5">     5</a> <span class="sd">primes = 2 : sieve [3, 5..]</span></td>
   </tr>
-  <tr id="l6">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l6"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l6" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l6">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l6">     6</a> <span class="sd">    where sieve (p:ns) = p : sieve [n | n &lt;- ns, mod n p /= 0]</span></td>
   </tr>
-  <tr id="l7">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l7"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l7" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l7">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l7">     7</a> <span class="sd">&quot;&quot;&quot;</span></td>
   </tr>
-  <tr id="l8">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l8"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l8" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l8">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l8">     8</a> </td>
   </tr>
-  <tr id="l9">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l9"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l9" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l9">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l9">     9</a> <span class="kn">from</span> <span class="nn">itertools</span> <span class="kn">import</span> <span class="n">dropwhile</span><span class="p">,</span> <span class="n">ifilter</span><span class="p">,</span> <span class="n">islice</span><span class="p">,</span> <span class="n">count</span><span class="p">,</span> <span class="n">chain</span></td>
   </tr>
-  <tr id="l10">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l10"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l10" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l10">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l10">    10</a> </td>
   </tr>
-  <tr id="l11">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l11"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l11" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l11">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l11">    11</a> <span class="kn">def</span> <span class="nf">primes</span><span class="p">():</span></td>
   </tr>
-  <tr id="l12">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l12"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l12" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l12">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l12">    12</a>     <span class="sd">&quot;&quot;&quot;Generate all primes.&quot;&quot;&quot;</span></td>
   </tr>
-  <tr id="l13">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l13"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l13" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l13">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l13">    13</a>     <span class="kn">def</span> <span class="nf">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
   </tr>
-  <tr id="l14">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l14"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l14" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l14">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l14">    14</a>         <span class="n">p</span> <span class="o">=</span> <span class="n">ns</span><span class="o">.</span><span class="n">next</span><span class="p">()</span></td>
   </tr>
-  <tr id="l15">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l15"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l15" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l15">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l15">    15</a>         <span class="c"># It is important to yield *here* in order to stop the</span></td>
   </tr>
-  <tr id="l16">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l16"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l16" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l16">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l16">    16</a>         <span class="c"># infinite recursion.</span></td>
   </tr>
-  <tr id="l17">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l17"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l17" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l17">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l17">    17</a>         <span class="kn">yield</span> <span class="n">p</span></td>
   </tr>
-  <tr id="l18">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l18"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l18" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l18">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l18">    18</a>         <span class="n">ns</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">%</span> <span class="n">p</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">,</span> <span class="n">ns</span><span class="p">)</span></td>
   </tr>
-  <tr id="l19">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l19"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l19" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l19">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l19">    19</a>         <span class="kn">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="n">sieve</span><span class="p">(</span><span class="n">ns</span><span class="p">):</span></td>
   </tr>
-  <tr id="l20">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l20"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l20" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l20">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l20">    20</a>             <span class="kn">yield</span> <span class="n">n</span></td>
   </tr>
-  <tr id="l21">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l21"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l21" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l21">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l21">    21</a> </td>
   </tr>
-  <tr id="l22">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l22"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l22" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l22">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l22">    22</a>     <span class="n">odds</span> <span class="o">=</span> <span class="n">ifilter</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">i</span><span class="p">:</span> <span class="n">i</span> <span class="o">%</span> <span class="mi">2</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> <span class="n">count</span><span class="p">())</span></td>
   </tr>
-  <tr id="l23">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l23"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l23" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l23">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l23">    23</a>     <span class="kn">return</span> <span class="n">chain</span><span class="p">([</span><span class="mi">2</span><span class="p">],</span> <span class="n">sieve</span><span class="p">(</span><span class="n">dropwhile</span><span class="p">(</span><span class="kn">lambda</span> <span class="n">n</span><span class="p">:</span> <span class="n">n</span> <span class="o">&lt;</span> <span class="mi">3</span><span class="p">,</span> <span class="n">odds</span><span class="p">)))</span></td>
   </tr>
-  <tr id="l24">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l24"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l24" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l24">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l24">    24</a> </td>
   </tr>
-  <tr id="l25">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l25"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l25" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l25">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l25">    25</a> <span class="kn">if</span> <span class="n">__name__</span> <span class="o">==</span> <span class="s">&quot;__main__&quot;</span><span class="p">:</span></td>
   </tr>
-  <tr id="l26">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l26"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l26" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l26">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l26">    26</a>     <span class="kn">import</span> <span class="nn">sys</span></td>
   </tr>
-  <tr id="l27">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l27"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l27" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l27">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l27">    27</a>     <span class="kn">try</span><span class="p">:</span></td>
   </tr>
-  <tr id="l28">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l28"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l28" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l28">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l28">    28</a>         <span class="n">n</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">argv</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span></td>
   </tr>
-  <tr id="l29">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l29"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l29" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l29">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l29">    29</a>     <span class="kn">except</span> <span class="p">(</span><span class="ne">ValueError</span><span class="p">,</span> <span class="ne">IndexError</span><span class="p">):</span></td>
   </tr>
-  <tr id="l30">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l30"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l30" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l30">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l30">    30</a>         <span class="n">n</span> <span class="o">=</span> <span class="mi">10</span></td>
   </tr>
-  <tr id="l31">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l31"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l31" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l31">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l31">    31</a>     <span class="n">p</span> <span class="o">=</span> <span class="n">primes</span><span class="p">()</span></td>
   </tr>
-  <tr id="l32">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l32"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l32" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l32">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l32">    32</a>     <span class="kn">print</span> <span class="s">&quot;The first </span><span class="si">%d</span><span class="s"> primes: </span><span class="si">%s</span><span class="s">&quot;</span> <span class="o">%</span> <span class="p">(</span><span class="n">n</span><span class="p">,</span> <span class="nb">list</span><span class="p">(</span><span class="n">islice</span><span class="p">(</span><span class="n">p</span><span class="p">,</span> <span class="n">n</span><span class="p">)))</span></td>
   </tr>
-  <tr id="l33">
-  <td class="annotate">
-  <a href="/annotate/06824edf55d0/primes.py#l33"
-  title="06824edf55d0: a">test@0</a>
+  <tr id="l33" class="thisrev">
+  <td class="annotate parity0">
+  
+  <div class="annotate-info">
+  <div>
+  <a href="/annotate/06824edf55d0/primes.py#l33">
+  06824edf55d0</a>
+  a
+  </div>
+  <div><em>&#116;&#101;&#115;&#116;</em></div>
+  <div>parents: </div>
+  <a href="/diff/06824edf55d0/primes.py">diff</a>
+  <a href="/rev/06824edf55d0">changeset</a>
+  </div>
   </td>
   <td class="source"><a href="#l33">    33</a> </td>
   </tr>
--- a/tests/test-histedit-arguments.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-histedit-arguments.t	Mon Jul 18 23:28:14 2016 -0500
@@ -499,3 +499,52 @@
   > EOF
   $ hg commit --amend -m 'allow this fold'
   $ hg histedit --continue
+
+  $ cd ..
+
+Test autoverb feature
+
+  $ hg init autoverb
+  $ cd autoverb
+  $ echo alpha >> alpha
+  $ hg ci -qAm one
+  $ echo alpha >> alpha
+  $ hg ci -qm two
+  $ echo beta >> beta
+  $ hg ci -qAm "roll! one"
+
+  $ hg log --style compact --graph
+  @  2[tip]   4f34d0f8b5fa   1970-01-01 00:00 +0000   test
+  |    roll! one
+  |
+  o  1   579e40513370   1970-01-01 00:00 +0000   test
+  |    two
+  |
+  o  0   6058cbb6cfd7   1970-01-01 00:00 +0000   test
+       one
+  
+
+Check that 'roll' is selected by default
+
+  $ HGEDITOR=cat hg histedit 0 --config experimental.histedit.autoverb=True
+  pick 6058cbb6cfd7 0 one
+  roll 4f34d0f8b5fa 2 roll! one
+  pick 579e40513370 1 two
+  
+  # Edit history between 6058cbb6cfd7 and 4f34d0f8b5fa
+  #
+  # Commits are listed from least to most recent
+  #
+  # You can reorder changesets by reordering the lines
+  #
+  # Commands:
+  #
+  #  e, edit = use commit, but stop for amending
+  #  m, mess = edit commit message without changing commit content
+  #  p, pick = use commit
+  #  d, drop = remove commit from history
+  #  f, fold = use commit, but combine it with the one above
+  #  r, roll = like fold, but discard this commit's description
+  #
+
+  $ cd ..
--- a/tests/test-histedit-base.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-histedit-base.t	Mon Jul 18 23:28:14 2016 -0500
@@ -39,7 +39,10 @@
   |/
   o  0:cd010b8cd998f3981a5a8115f94f8da4ab506089:draft 'A'
   
+Verify that implicit base command and help are listed
 
+  $ HGEDITOR=cat hg histedit |grep base
+  #  b, base = checkout changeset and apply further changesets from there
 
 Go to D
   $ hg update 3
--- a/tests/test-http-bundle1.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-http-bundle1.t	Mon Jul 18 23:28:14 2016 -0500
@@ -234,7 +234,7 @@
   remote: added 1 changesets with 1 changes to 1 files
   $ hg rollback -q
 
-  $ cut -c38- ../access.log
+  $ sed 's/.*] "/"/' < ../access.log
   "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
   "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
--- a/tests/test-http.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-http.t	Mon Jul 18 23:28:14 2016 -0500
@@ -225,7 +225,7 @@
   remote: added 1 changesets with 1 changes to 1 files
   $ hg rollback -q
 
-  $ cut -c38- ../access.log
+  $ sed 's/.*] "/"/' < ../access.log
   "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
   "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
--- a/tests/test-https.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-https.t	Mon Jul 18 23:28:14 2016 -0500
@@ -2,131 +2,13 @@
 
 Proper https client requires the built-in ssl from Python 2.6.
 
-Certificates created with:
- printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \
- openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem
-Can be dumped with:
- openssl x509 -in pub.pem -text
-
-  $ cat << EOT > priv.pem
-  > -----BEGIN PRIVATE KEY-----
-  > MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH
-  > aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8
-  > j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc
-  > EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG
-  > MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR
-  > +wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy
-  > aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh
-  > HY8gUVkVRVs=
-  > -----END PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > pub.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
-  > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
-  > MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0
-  > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
-  > ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX
-  > 6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm
-  > r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw
-  > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl
-  > t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub.pem >> server.pem
-  $ PRIV=`pwd`/server.pem
-
-  $ cat << EOT > pub-other.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
-  > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
-  > MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0
-  > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
-  > ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo
-  > K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN
-  > y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw
-  > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6
-  > bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig=
-  > -----END CERTIFICATE-----
-  > EOT
-
-pub.pem patched with other notBefore / notAfter:
+Make server certificates:
 
-  $ cat << EOT > pub-not-yet.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
-  > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw
-  > NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
-  > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
-  > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
-  > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb
-  > /12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub-not-yet.pem > server-not-yet.pem
-
-  $ cat << EOT > pub-expired.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
-  > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx
-  > NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
-  > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
-  > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
-  > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt
-  > 2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ=
-  > -----END CERTIFICATE-----
-  > EOT
-  $ cat priv.pem pub-expired.pem > server-expired.pem
-
-Client certificates created with:
- openssl genrsa -aes128 -passout pass:1234 -out client-key.pem 512
- openssl rsa -in client-key.pem -passin pass:1234 -out client-key-decrypted.pem
- printf '.\n.\n.\n.\n.\n.\nhg-client@localhost\n.\n.\n' | \
- openssl req -new -key client-key.pem -passin pass:1234 -out client-csr.pem
- openssl x509 -req -days 9000 -in client-csr.pem -CA pub.pem -CAkey priv.pem \
- -set_serial 01 -out client-cert.pem
-
-  $ cat << EOT > client-key.pem
-  > -----BEGIN RSA PRIVATE KEY-----
-  > Proc-Type: 4,ENCRYPTED
-  > DEK-Info: AES-128-CBC,C8B8F103A61A336FB0716D1C0F8BB2E8
-  > 
-  > JolMlCFjEW3q3JJjO9z99NJWeJbFgF5DpUOkfSCxH56hxxtZb9x++rBvBZkxX1bF
-  > BAIe+iI90+jdCLwxbILWuFcrJUaLC5WmO14XDKYVmr2eW9e4MiCYOlO0Q6a9rDFS
-  > jctRCfvubOXFHbBGLH8uKEMpXEkP7Lc60FiIukqjuQEivJjrQirVtZCGwyk3qUi7
-  > Eyh4Lo63IKGu8T1Bkmn2kaMvFhu7nC/CQLBjSq0YYI1tmCOkVb/3tPrz8oqgDJp2
-  > u7bLS3q0xDNZ52nVrKIoZC/UlRXGlPyzPpa70/jPIdfCbkwDaBpRVXc+62Pj2n5/
-  > CnO2xaKwfOG6pDvanBhFD72vuBOkAYlFZPiEku4sc2WlNggsSWCPCIFwzmiHjKIl
-  > bWmdoTq3nb7sNfnBbV0OCa7fS1dFwCm4R1NC7ELENu0=
-  > -----END RSA PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > client-key-decrypted.pem
-  > -----BEGIN RSA PRIVATE KEY-----
-  > MIIBOgIBAAJBAJs4LS3glAYU92bg5kPgRPNW84ewB0fWJfAKccCp1ACHAdZPeaKb
-  > FCinVMYKAVbVqBkyrZ/Tyr8aSfMz4xO4+KsCAwEAAQJAeKDr25+Q6jkZHEbkLRP6
-  > AfMtR+Ixhk6TJT24sbZKIC2V8KuJTDEvUhLU0CAr1nH79bDqiSsecOiVCr2HHyfT
-  > AQIhAM2C5rHbTs9R3PkywFEqq1gU3ztCnpiWglO7/cIkuGBhAiEAwVpMSAf77kop
-  > 4h/1kWsgMALQTJNsXd4CEUK4BOxvJIsCIQCbarVAKBQvoT81jfX27AfscsxnKnh5
-  > +MjSvkanvdFZwQIgbbcTefwt1LV4trtz2SR0i0nNcOZmo40Kl0jIquKO3qkCIH01
-  > mJHzZr3+jQqeIFtr5P+Xqi30DJxgrnEobbJ0KFjY
-  > -----END RSA PRIVATE KEY-----
-  > EOT
-
-  $ cat << EOT > client-cert.pem
-  > -----BEGIN CERTIFICATE-----
-  > MIIBPjCB6QIBATANBgkqhkiG9w0BAQsFADAxMRIwEAYDVQQDDAlsb2NhbGhvc3Qx
-  > GzAZBgkqhkiG9w0BCQEWDGhnQGxvY2FsaG9zdDAeFw0xNTA1MDcwNjI5NDVaFw0z
-  > OTEyMjcwNjI5NDVaMCQxIjAgBgkqhkiG9w0BCQEWE2hnLWNsaWVudEBsb2NhbGhv
-  > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAmzgtLeCUBhT3ZuDmQ+BE81bzh7AH
-  > R9Yl8ApxwKnUAIcB1k95opsUKKdUxgoBVtWoGTKtn9PKvxpJ8zPjE7j4qwIDAQAB
-  > MA0GCSqGSIb3DQEBCwUAA0EAfBTqBG5pYhuGk+ZnyUufgS+d7Nk/sZAZjNdCAEj/
-  > NFPo5fR1jM6jlEWoWbeg298+SkjV7tfO+2nt0otUFkdM6A==
-  > -----END CERTIFICATE-----
-  > EOT
+  $ CERTSDIR="$TESTDIR/sslcerts"
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
+  $ PRIV=`pwd`/server.pem
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-not-yet.pem" > server-not-yet.pem
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-expired.pem" > server-expired.pem
 
   $ hg init test
   $ cd test
@@ -146,6 +28,7 @@
 cacert not found
 
   $ hg in --config web.cacerts=no-such.pem https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: could not find web.cacerts: no-such.pem
   [255]
 
@@ -162,22 +45,142 @@
 #endif
   $ cd ..
 
-OS X has a dummy CA cert that enables use of the system CA store when using
-Apple's OpenSSL. This trick do not work with plain OpenSSL.
+Our test cert is not signed by a trusted CA. It should fail to verify if
+we are able to load CA certs.
+
+#if sslcontext defaultcacerts no-defaultcacertsloaded
+  $ hg clone https://localhost:$HGPORT/ copy-pull
+  (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *certificate verify failed* (glob)
+  [255]
+#endif
+
+#if no-sslcontext defaultcacerts
+  $ hg clone https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+  abort: error: *certificate verify failed* (glob)
+  [255]
+#endif
 
-  $ DISABLEOSXDUMMYCERT=
-#if defaultcacerts
+#if no-sslcontext windows
+  $ hg clone https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  (unable to load Windows CA certificates; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message)
+  abort: error: *certificate verify failed* (glob)
+  [255]
+#endif
+
+#if no-sslcontext osx
   $ hg clone https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  (unable to load CA certificates; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message)
+  abort: localhost certificate error: no certificate received
+  (set hostsecurity.localhost:certfingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e config setting or use --insecure to connect insecurely)
+  [255]
+#endif
+
+#if defaultcacertsloaded
+  $ hg clone https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
   abort: error: *certificate verify failed* (glob)
   [255]
+#endif
 
-  $ DISABLEOSXDUMMYCERT="--insecure"
+#if no-defaultcacerts
+  $ hg clone https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (unable to load * certificates; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+  abort: localhost certificate error: no certificate received
+  (set hostsecurity.localhost:certfingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e config setting or use --insecure to connect insecurely)
+  [255]
+#endif
+
+Specifying a per-host certificate file that doesn't exist will abort
+
+  $ hg --config hostsecurity.localhost:verifycertsfile=/does/not/exist clone https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: path specified by hostsecurity.localhost:verifycertsfile does not exist: /does/not/exist
+  [255]
+
+A malformed per-host certificate file will raise an error
+
+  $ echo baddata > badca.pem
+#if sslcontext
+  $ hg --config hostsecurity.localhost:verifycertsfile=badca.pem clone https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: error loading CA file badca.pem: * (glob)
+  (file is empty or malformed?)
+  [255]
+#else
+  $ hg --config hostsecurity.localhost:verifycertsfile=badca.pem clone https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: error: * (glob)
+  [255]
 #endif
 
-clone via pull
+A per-host certificate mismatching the server will fail verification
+
+(modern ssl is able to discern whether the loaded cert is a CA cert)
+#if sslcontext
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/client-cert.pem" clone https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *certificate verify failed* (glob)
+  [255]
+#else
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/client-cert.pem" clone https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: error: *certificate verify failed* (glob)
+  [255]
+#endif
+
+A per-host certificate matching the server's cert will be accepted
+
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" clone -U https://localhost:$HGPORT/ perhostgood1
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+A per-host certificate with multiple certs and one matching will be accepted
 
-  $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLEOSXDUMMYCERT
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  $ cat "$CERTSDIR/client-cert.pem" "$CERTSDIR/pub.pem" > perhost.pem
+  $ hg --config hostsecurity.localhost:verifycertsfile=perhost.pem clone -U https://localhost:$HGPORT/ perhostgood2
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+Defining both per-host certificate and a fingerprint will print a warning
+
+  $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" --config hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 clone -U https://localhost:$HGPORT/ caandfingerwarning
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (hostsecurity.localhost:verifycertsfile ignored when host fingerprints defined; using host fingerprints for verification)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 4 changes to 4 files
+
+  $ DISABLECACERTS="--config devel.disableloaddefaultcerts=true"
+
+Inability to verify peer certificate will result in abort
+
+  $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLECACERTS
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
+  (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
+  [255]
+
+  $ hg clone --insecure https://localhost:$HGPORT/ copy-pull
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   requesting all changes
   adding changesets
   adding manifests
@@ -202,9 +205,17 @@
   $ cd copy-pull
   $ echo '[hooks]' >> .hg/hgrc
   $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc
-  $ hg pull $DISABLEOSXDUMMYCERT
+  $ hg pull $DISABLECACERTS
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
+  (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
+  [255]
+
+  $ hg pull --insecure
+  pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   adding changesets
   adding manifests
@@ -218,9 +229,10 @@
 
   $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu
   $ echo "[web]" >> copy-pull/.hg/hgrc
-  $ echo "cacerts=`pwd`/pub.pem" >> copy-pull/.hg/hgrc
+  $ echo "cacerts=$CERTSDIR/pub.pem" >> copy-pull/.hg/hgrc
   $ hg -R copy-pull pull --traceback
   pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   searching for changes
   no changes found
   $ mv copy-pull/.hg/hgrc.bu copy-pull/.hg/hgrc
@@ -230,35 +242,63 @@
 
   $ echo "[web]" >> $HGRCPATH
   $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH
-  $ P=`pwd` hg -R copy-pull pull
+  $ P="$CERTSDIR" hg -R copy-pull pull
   pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  searching for changes
+  no changes found
+  $ P="$CERTSDIR" hg -R copy-pull pull --insecure
+  pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
-  $ P=`pwd` hg -R copy-pull pull --insecure
+
+empty cacert file
+
+  $ touch emptycafile
+
+#if sslcontext
+  $ hg --config web.cacerts=emptycafile -R copy-pull pull
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  searching for changes
-  no changes found
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: error loading CA file emptycafile: * (glob)
+  (file is empty or malformed?)
+  [255]
+#else
+  $ hg --config web.cacerts=emptycafile -R copy-pull pull
+  pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: error: * (glob)
+  [255]
+#endif
 
 cacert mismatch
 
-  $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/
-  pulling from https://127.0.0.1:$HGPORT/
-  abort: 127.0.0.1 certificate error: certificate is for localhost
-  (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely)
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
+  > https://127.0.0.1:$HGPORT/
+  pulling from https://127.0.0.1:$HGPORT/ (glob)
+  warning: connecting to 127.0.0.1 using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: 127.0.0.1 certificate error: certificate is for localhost (glob)
+  (set hostsecurity.127.0.0.1:certfingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e config setting or use --insecure to connect insecurely) (glob)
   [255]
-  $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure
-  pulling from https://127.0.0.1:$HGPORT/
-  warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
+  > https://127.0.0.1:$HGPORT/ --insecure
+  pulling from https://127.0.0.1:$HGPORT/ (glob)
+  warning: connecting to 127.0.0.1 using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to 127.0.0.1 is disabled per current settings; communication is susceptible to eavesdropping and tampering (glob)
   searching for changes
   no changes found
-  $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem"
   pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *certificate verify failed* (glob)
   [255]
-  $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem" \
+  > --insecure
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
 
@@ -266,8 +306,10 @@
 
   $ hg serve -R test -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
   $ cat hg1.pid >> $DAEMON_PIDS
-  $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-not-yet.pem" \
+  > https://localhost:$HGPORT1/
   pulling from https://localhost:$HGPORT1/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *certificate verify failed* (glob)
   [255]
 
@@ -275,52 +317,203 @@
 
   $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
   $ cat hg2.pid >> $DAEMON_PIDS
-  $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-expired.pem" \
+  > https://localhost:$HGPORT2/
   pulling from https://localhost:$HGPORT2/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *certificate verify failed* (glob)
   [255]
 
+Disabling the TLS 1.0 warning works
+  $ hg -R copy-pull id https://localhost:$HGPORT/ \
+  > --config hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 \
+  > --config hostsecurity.disabletls10warning=true
+  5fed3813f7f5
+
+#if no-sslcontext no-py27+
+Setting ciphers doesn't work in Python 2.6
+  $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  abort: setting ciphers in [hostsecurity] is not supported by this version of Python
+  (remove the config option or run Mercurial with a modern Python version (preferred))
+  [255]
+#endif
+
+Setting ciphers works in Python 2.7+ but the error message is different on
+legacy ssl. We test legacy once and do more feature checking on modern
+configs.
+
+#if py27+ no-sslcontext
+  $ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  abort: *No cipher can be selected. (glob)
+  [255]
+
+  $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  5fed3813f7f5
+#endif
+
+#if sslcontext
+Setting ciphers to an invalid value aborts
+  $ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: could not set ciphers: No cipher can be selected.
+  (change cipher string (invalid) in config)
+  [255]
+
+  $ P="$CERTSDIR" hg --config hostsecurity.localhost:ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: could not set ciphers: No cipher can be selected.
+  (change cipher string (invalid) in config)
+  [255]
+
+Changing the cipher string works
+
+  $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
+#endif
+
 Fingerprints
 
-  $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
-  $ echo "localhost = 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca" >> copy-pull/.hg/hgrc
-  $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc
+- works without cacerts (hostkeyfingerprints)
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure --config hostfingerprints.localhost=ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
 
-- works without cacerts
-  $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure
+- works without cacerts (hostsecurity)
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
+
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   5fed3813f7f5
 
 - multiple fingerprints specified and first matches
-  $ hg --config 'hostfingerprints.localhost=914f1aff87249c09b6859b88b1906d30756491ca, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
+  $ hg --config 'hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
+
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   5fed3813f7f5
 
 - multiple fingerprints specified and last matches
-  $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, 914f1aff87249c09b6859b88b1906d30756491ca' -R copy-pull id https://localhost:$HGPORT/ --insecure
+  $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/ --insecure
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
+
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   5fed3813f7f5
 
 - multiple fingerprints specified and none match
 
   $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
-  abort: certificate for localhost has unexpected fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: certificate for localhost has unexpected fingerprint ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
   (check hostfingerprint configuration)
   [255]
 
+  $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: certificate for localhost has unexpected fingerprint sha1:ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
+  (check hostsecurity configuration)
+  [255]
+
 - fails when cert doesn't match hostname (port is ignored)
-  $ hg -R copy-pull id https://localhost:$HGPORT1/
-  abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b
+  $ hg -R copy-pull id https://localhost:$HGPORT1/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  abort: certificate for localhost has unexpected fingerprint f4:2f:5a:0c:3e:52:5b:db:e7:24:a8:32:1d:18:97:6d:69:b5:87:84
   (check hostfingerprint configuration)
   [255]
 
 
 - ignores that certificate doesn't match hostname
-  $ hg -R copy-pull id https://127.0.0.1:$HGPORT/
+  $ hg -R copy-pull id https://127.0.0.1:$HGPORT/ --config hostfingerprints.127.0.0.1=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
+  warning: connecting to 127.0.0.1 using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  5fed3813f7f5
+
+Ports used by next test. Kill servers.
+
+  $ killdaemons.py hg0.pid
+  $ killdaemons.py hg1.pid
+  $ killdaemons.py hg2.pid
+
+#if sslcontext tls1.2
+Start servers running supported TLS versions
+
+  $ cd test
+  $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \
+  > --config devel.serverexactprotocol=tls1.0
+  $ cat ../hg0.pid >> $DAEMON_PIDS
+  $ hg serve -p $HGPORT1 -d --pid-file=../hg1.pid --certificate=$PRIV \
+  > --config devel.serverexactprotocol=tls1.1
+  $ cat ../hg1.pid >> $DAEMON_PIDS
+  $ hg serve -p $HGPORT2 -d --pid-file=../hg2.pid --certificate=$PRIV \
+  > --config devel.serverexactprotocol=tls1.2
+  $ cat ../hg2.pid >> $DAEMON_PIDS
+  $ cd ..
+
+Clients talking same TLS versions work
+
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.0 id https://localhost:$HGPORT/
+  5fed3813f7f5
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT1/
+  5fed3813f7f5
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT2/
   5fed3813f7f5
 
-HGPORT1 is reused below for tinyproxy tests. Kill that server.
+Clients requiring newer TLS version than what server supports fail
+
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
+  (could not negotiate a common protocol; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *unsupported protocol* (glob)
+  [255]
+
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT/
+  (could not negotiate a common protocol; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *unsupported protocol* (glob)
+  [255]
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT/
+  (could not negotiate a common protocol; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *unsupported protocol* (glob)
+  [255]
+  $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT1/
+  (could not negotiate a common protocol; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *unsupported protocol* (glob)
+  [255]
+
+The per-host config option overrides the default
+
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
+  > --config hostsecurity.minimumprotocol=tls1.2 \
+  > --config hostsecurity.localhost:minimumprotocol=tls1.0
+  5fed3813f7f5
+
+The per-host config option by itself works
+
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
+  > --config hostsecurity.localhost:minimumprotocol=tls1.2
+  (could not negotiate a common protocol; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  abort: error: *unsupported protocol* (glob)
+  [255]
+
+  $ killdaemons.py hg0.pid
   $ killdaemons.py hg1.pid
+  $ killdaemons.py hg2.pid
+#endif
 
 Prepare for connecting through proxy
 
+  $ hg serve -R test -p $HGPORT -d --pid-file=hg0.pid --certificate=$PRIV
+  $ cat hg0.pid >> $DAEMON_PIDS
+  $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
+  $ cat hg2.pid >> $DAEMON_PIDS
+tinyproxy.py doesn't fully detach, so killing it may result in extra output
+from the shell. So don't kill it.
   $ tinyproxy.py $HGPORT1 localhost >proxy.log </dev/null 2>&1 &
   $ while [ ! -f proxy.pid ]; do sleep 0; done
   $ cat proxy.pid >> $DAEMON_PIDS
@@ -334,29 +527,37 @@
 
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback
   pulling from https://localhost:$HGPORT/
-  warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
   searching for changes
   no changes found
 
 Test https with cacert and fingerprint through proxy
 
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub.pem"
   pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   searching for changes
   no changes found
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/
-  pulling from https://127.0.0.1:$HGPORT/
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/ --config hostfingerprints.127.0.0.1=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
+  pulling from https://127.0.0.1:$HGPORT/ (glob)
+  warning: connecting to 127.0.0.1 using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   searching for changes
   no changes found
 
 Test https with cert problems through proxy
 
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub-other.pem"
   pulling from https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *certificate verify failed* (glob)
   [255]
-  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
+  > --config web.cacerts="$CERTSDIR/pub-expired.pem" https://localhost:$HGPORT2/
   pulling from https://localhost:$HGPORT2/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *certificate verify failed* (glob)
   [255]
 
@@ -365,32 +566,18 @@
 
 #if sslcontext
 
-Start patched hgweb that requires client certificates:
+Start hgweb that requires client certificates:
 
-  $ cat << EOT > reqclientcert.py
-  > import ssl
-  > from mercurial.hgweb import server
-  > class _httprequesthandlersslclientcert(server._httprequesthandlerssl):
-  >     @staticmethod
-  >     def preparehttpserver(httpserver, ssl_cert):
-  >         sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
-  >         sslcontext.verify_mode = ssl.CERT_REQUIRED
-  >         sslcontext.load_cert_chain(ssl_cert)
-  >         # verify clients by server certificate
-  >         sslcontext.load_verify_locations(ssl_cert)
-  >         httpserver.socket = sslcontext.wrap_socket(httpserver.socket,
-  >                                                    server_side=True)
-  > server._httprequesthandlerssl = _httprequesthandlersslclientcert
-  > EOT
   $ cd test
   $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \
-  > --config extensions.reqclientcert=../reqclientcert.py
+  > --config devel.servercafile=$PRIV --config devel.serverrequirecert=true
   $ cat ../hg0.pid >> $DAEMON_PIDS
   $ cd ..
 
 without client certificate:
 
-  $ P=`pwd` hg id https://localhost:$HGPORT/
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: *handshake failure* (glob)
   [255]
 
@@ -399,19 +586,22 @@
   $ cat << EOT >> $HGRCPATH
   > [auth]
   > l.prefix = localhost
-  > l.cert = client-cert.pem
-  > l.key = client-key.pem
+  > l.cert = $CERTSDIR/client-cert.pem
+  > l.key = $CERTSDIR/client-key.pem
   > EOT
 
-  $ P=`pwd` hg id https://localhost:$HGPORT/ \
-  > --config auth.l.key=client-key-decrypted.pem
+  $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
+  > --config auth.l.key="$CERTSDIR/client-key-decrypted.pem"
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   5fed3813f7f5
 
-  $ printf '1234\n' | env P=`pwd` hg id https://localhost:$HGPORT/ \
+  $ printf '1234\n' | env P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
   > --config ui.interactive=True --config ui.nontty=True
-  passphrase for client-key.pem: 5fed3813f7f5
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  passphrase for */client-key.pem: 5fed3813f7f5 (glob)
 
-  $ env P=`pwd` hg id https://localhost:$HGPORT/
+  $ env P="$CERTSDIR" hg id https://localhost:$HGPORT/
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
   abort: error: * (glob)
   [255]
 
--- a/tests/test-import.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-import.t	Mon Jul 18 23:28:14 2016 -0500
@@ -53,7 +53,8 @@
 regardless of the commit message in the patch)
 
   $ cat > dummypatch.py <<EOF
-  > print 'patching file a'
+  > from __future__ import print_function
+  > print('patching file a')
   > file('a', 'wb').write('line2\n')
   > EOF
   $ hg clone -r0 a b
--- a/tests/test-inherit-mode.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-inherit-mode.t	Mon Jul 18 23:28:14 2016 -0500
@@ -117,6 +117,7 @@
   00660 ../push/.hg/cache/branch2-base
   00660 ../push/.hg/cache/rbc-names-v1
   00660 ../push/.hg/cache/rbc-revs-v1
+  00660 ../push/.hg/dirstate
   00660 ../push/.hg/requires
   00770 ../push/.hg/store/
   00660 ../push/.hg/store/00changelog.i
--- a/tests/test-install.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-install.t	Mon Jul 18 23:28:14 2016 -0500
@@ -4,6 +4,9 @@
   checking Python executable (*) (glob)
   checking Python version (2.*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -23,7 +26,10 @@
     "encoding": "ascii",
     "encodingerror": null,
     "extensionserror": null,
+    "hgmodulepolicy": "*", (glob)
     "hgmodules": "*mercurial", (glob)
+    "hgver": "*", (glob)
+    "hgverextra": "*", (glob)
     "problems": 0,
     "pythonexe": "*", (glob)
     "pythonlib": "*", (glob)
@@ -41,6 +47,9 @@
   checking Python executable (*) (glob)
   checking Python version (2.*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -62,6 +71,9 @@
   checking Python executable (*) (glob)
   checking Python version (*) (glob)
   checking Python lib (*lib*)... (glob)
+  checking Mercurial version (*) (glob)
+  checking Mercurial custom build (*) (glob)
+  checking module policy (*) (glob)
   checking installed modules (*mercurial)... (glob)
   checking templates (*mercurial?templates)... (glob)
   checking default template (*mercurial?templates?map-cmdline.default) (glob)
@@ -70,6 +82,8 @@
   no problems detected
 
 #if test-repo
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
   $ cat >> wixxml.py << EOF
   > import os, subprocess, sys
   > import xml.etree.ElementTree as ET
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-journal-share.t	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,153 @@
+Journal extension test: tests the share extension support
+
+  $ cat >> testmocks.py << EOF
+  > # mock out util.getuser() and util.makedate() to supply testable values
+  > import os
+  > from mercurial import util
+  > def mockgetuser():
+  >     return 'foobar'
+  > 
+  > def mockmakedate():
+  >     filename = os.path.join(os.environ['TESTTMP'], 'testtime')
+  >     try:
+  >         with open(filename, 'rb') as timef:
+  >             time = float(timef.read()) + 1
+  >     except IOError:
+  >         time = 0.0
+  >     with open(filename, 'wb') as timef:
+  >         timef.write(str(time))
+  >     return (time, 0)
+  > 
+  > util.getuser = mockgetuser
+  > util.makedate = mockmakedate
+  > EOF
+
+  $ cat >> $HGRCPATH << EOF
+  > [extensions]
+  > journal=
+  > share=
+  > testmocks=`pwd`/testmocks.py
+  > [remotenames]
+  > rename.default=remote
+  > EOF
+
+  $ hg init repo
+  $ cd repo
+  $ hg bookmark bm
+  $ touch file0
+  $ hg commit -Am 'file0 added'
+  adding file0
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         commit -Am 'file0 added'
+  5640b525682e  bm        commit -Am 'file0 added'
+
+A shared working copy initially receives the same bookmarks and working copy
+
+  $ cd ..
+  $ hg share repo shared1
+  updating working directory
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd shared1
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         share repo shared1
+
+unless you explicitly share bookmarks
+
+  $ cd ..
+  $ hg share --bookmarks repo shared2
+  updating working directory
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd shared2
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         share --bookmarks repo shared2
+  5640b525682e  bm        commit -Am 'file0 added'
+
+Moving the bookmark in the original repository is only shown in the repository
+that shares bookmarks
+
+  $ cd ../repo
+  $ touch file1
+  $ hg commit -Am "file1 added"
+  adding file1
+  $ cd ../shared1
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         share repo shared1
+  $ cd ../shared2
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  6432d239ac5d  bm        commit -Am 'file1 added'
+  5640b525682e  .         share --bookmarks repo shared2
+  5640b525682e  bm        commit -Am 'file0 added'
+
+But working copy changes are always 'local'
+
+  $ cd ../repo
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark bm)
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         up 0
+  6432d239ac5d  .         commit -Am 'file1 added'
+  6432d239ac5d  bm        commit -Am 'file1 added'
+  5640b525682e  .         commit -Am 'file0 added'
+  5640b525682e  bm        commit -Am 'file0 added'
+  $ cd ../shared2
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  6432d239ac5d  bm        commit -Am 'file1 added'
+  5640b525682e  .         share --bookmarks repo shared2
+  5640b525682e  bm        commit -Am 'file0 added'
+  $ hg up tip
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg journal
+  previous locations of '.':
+  5640b525682e  up 0
+  6432d239ac5d  up tip
+  5640b525682e  share --bookmarks repo shared2
+
+Unsharing works as expected; the journal remains consistent
+
+  $ cd ../shared1
+  $ hg unshare
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         share repo shared1
+  $ cd ../shared2
+  $ hg unshare
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  5640b525682e  .         up 0
+  6432d239ac5d  .         up tip
+  6432d239ac5d  bm        commit -Am 'file1 added'
+  5640b525682e  .         share --bookmarks repo shared2
+  5640b525682e  bm        commit -Am 'file0 added'
+
+New journal entries in the source repo no longer show up in the other working copies
+
+  $ cd ../repo
+  $ hg bookmark newbm -r tip
+  $ hg journal newbm
+  previous locations of 'newbm':
+  6432d239ac5d  bookmark newbm -r tip
+  $ cd ../shared2
+  $ hg journal newbm
+  previous locations of 'newbm':
+  no recorded locations
+
+This applies for both directions
+
+  $ hg bookmark shared2bm -r tip
+  $ hg journal shared2bm
+  previous locations of 'shared2bm':
+  6432d239ac5d  bookmark shared2bm -r tip
+  $ cd ../repo
+  $ hg journal shared2bm
+  previous locations of 'shared2bm':
+  no recorded locations
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-journal.t	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,221 @@
+Tests for the journal extension; records bookmark locations.
+
+  $ cat >> testmocks.py << EOF
+  > # mock out util.getuser() and util.makedate() to supply testable values
+  > import os
+  > from mercurial import util
+  > def mockgetuser():
+  >     return 'foobar'
+  > 
+  > def mockmakedate():
+  >     filename = os.path.join(os.environ['TESTTMP'], 'testtime')
+  >     try:
+  >         with open(filename, 'rb') as timef:
+  >             time = float(timef.read()) + 1
+  >     except IOError:
+  >         time = 0.0
+  >     with open(filename, 'wb') as timef:
+  >         timef.write(str(time))
+  >     return (time, 0)
+  > 
+  > util.getuser = mockgetuser
+  > util.makedate = mockmakedate
+  > EOF
+
+  $ cat >> $HGRCPATH << EOF
+  > [extensions]
+  > journal=
+  > testmocks=`pwd`/testmocks.py
+  > EOF
+
+Setup repo
+
+  $ hg init repo
+  $ cd repo
+
+Test empty journal
+
+  $ hg journal
+  previous locations of '.':
+  no recorded locations
+  $ hg journal foo
+  previous locations of 'foo':
+  no recorded locations
+
+Test that working copy changes are tracked
+
+  $ echo a > a
+  $ hg commit -Aqm a
+  $ hg journal
+  previous locations of '.':
+  cb9a9f314b8b  commit -Aqm a
+  $ echo b > a
+  $ hg commit -Aqm b
+  $ hg journal
+  previous locations of '.':
+  1e6c11564562  commit -Aqm b
+  cb9a9f314b8b  commit -Aqm a
+  $ hg up 0
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg journal
+  previous locations of '.':
+  cb9a9f314b8b  up 0
+  1e6c11564562  commit -Aqm b
+  cb9a9f314b8b  commit -Aqm a
+
+Test that bookmarks are tracked
+
+  $ hg book -r tip bar
+  $ hg journal bar
+  previous locations of 'bar':
+  1e6c11564562  book -r tip bar
+  $ hg book -f bar
+  $ hg journal bar
+  previous locations of 'bar':
+  cb9a9f314b8b  book -f bar
+  1e6c11564562  book -r tip bar
+  $ hg up
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating bookmark bar
+  $ hg journal bar
+  previous locations of 'bar':
+  1e6c11564562  up
+  cb9a9f314b8b  book -f bar
+  1e6c11564562  book -r tip bar
+
+Test that bookmarks and working copy tracking is not mixed
+
+  $ hg journal
+  previous locations of '.':
+  1e6c11564562  up
+  cb9a9f314b8b  up 0
+  1e6c11564562  commit -Aqm b
+  cb9a9f314b8b  commit -Aqm a
+
+Test that you can list all entries as well as limit the list or filter on them
+
+  $ hg book -r tip baz
+  $ hg journal --all
+  previous locations of the working copy and bookmarks:
+  1e6c11564562  baz       book -r tip baz
+  1e6c11564562  bar       up
+  1e6c11564562  .         up
+  cb9a9f314b8b  bar       book -f bar
+  1e6c11564562  bar       book -r tip bar
+  cb9a9f314b8b  .         up 0
+  1e6c11564562  .         commit -Aqm b
+  cb9a9f314b8b  .         commit -Aqm a
+  $ hg journal --limit 2
+  previous locations of '.':
+  1e6c11564562  up
+  cb9a9f314b8b  up 0
+  $ hg journal bar
+  previous locations of 'bar':
+  1e6c11564562  up
+  cb9a9f314b8b  book -f bar
+  1e6c11564562  book -r tip bar
+  $ hg journal foo
+  previous locations of 'foo':
+  no recorded locations
+  $ hg journal .
+  previous locations of '.':
+  1e6c11564562  up
+  cb9a9f314b8b  up 0
+  1e6c11564562  commit -Aqm b
+  cb9a9f314b8b  commit -Aqm a
+  $ hg journal "re:ba."
+  previous locations of 're:ba.':
+  1e6c11564562  baz       book -r tip baz
+  1e6c11564562  bar       up
+  cb9a9f314b8b  bar       book -f bar
+  1e6c11564562  bar       book -r tip bar
+
+Test that verbose, JSON and commit output work
+
+  $ hg journal --verbose --all
+  previous locations of the working copy and bookmarks:
+  000000000000 -> 1e6c11564562 foobar    baz      1970-01-01 00:00 +0000  book -r tip baz
+  cb9a9f314b8b -> 1e6c11564562 foobar    bar      1970-01-01 00:00 +0000  up
+  cb9a9f314b8b -> 1e6c11564562 foobar    .        1970-01-01 00:00 +0000  up
+  1e6c11564562 -> cb9a9f314b8b foobar    bar      1970-01-01 00:00 +0000  book -f bar
+  000000000000 -> 1e6c11564562 foobar    bar      1970-01-01 00:00 +0000  book -r tip bar
+  1e6c11564562 -> cb9a9f314b8b foobar    .        1970-01-01 00:00 +0000  up 0
+  cb9a9f314b8b -> 1e6c11564562 foobar    .        1970-01-01 00:00 +0000  commit -Aqm b
+  000000000000 -> cb9a9f314b8b foobar    .        1970-01-01 00:00 +0000  commit -Aqm a
+  $ hg journal --verbose -Tjson
+  [
+   {
+    "command": "up",
+    "date": "1970-01-01 00:00 +0000",
+    "name": ".",
+    "newhashes": "1e6c11564562",
+    "oldhashes": "cb9a9f314b8b",
+    "user": "foobar"
+   },
+   {
+    "command": "up 0",
+    "date": "1970-01-01 00:00 +0000",
+    "name": ".",
+    "newhashes": "cb9a9f314b8b",
+    "oldhashes": "1e6c11564562",
+    "user": "foobar"
+   },
+   {
+    "command": "commit -Aqm b",
+    "date": "1970-01-01 00:00 +0000",
+    "name": ".",
+    "newhashes": "1e6c11564562",
+    "oldhashes": "cb9a9f314b8b",
+    "user": "foobar"
+   },
+   {
+    "command": "commit -Aqm a",
+    "date": "1970-01-01 00:00 +0000",
+    "name": ".",
+    "newhashes": "cb9a9f314b8b",
+    "oldhashes": "000000000000",
+    "user": "foobar"
+   }
+  ]
+  $ hg journal --commit
+  previous locations of '.':
+  1e6c11564562  up
+  changeset:   1:1e6c11564562
+  bookmark:    bar
+  bookmark:    baz
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  cb9a9f314b8b  up 0
+  changeset:   0:cb9a9f314b8b
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a
+  
+  1e6c11564562  commit -Aqm b
+  changeset:   1:1e6c11564562
+  bookmark:    bar
+  bookmark:    baz
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  cb9a9f314b8b  commit -Aqm a
+  changeset:   0:cb9a9f314b8b
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a
+  
+
+Test for behaviour on unexpected storage version information
+
+  $ printf '42\0' > .hg/journal
+  $ hg journal
+  previous locations of '.':
+  abort: unknown journal file version '42'
+  [255]
+  $ hg book -r tip doomed
+  unsupported journal file version '42'
--- a/tests/test-largefiles-wireproto.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-largefiles-wireproto.t	Mon Jul 18 23:28:14 2016 -0500
@@ -149,6 +149,14 @@
   $ hg commit -m "m2"
   Invoking status precommit hook
   A f2
+  $ hg verify --large
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 2 changesets, 2 total revisions
+  searching 1 changesets for largefiles
+  verified existence of 1 revisions of 1 largefiles
   $ hg serve --config extensions.largefiles=! -R ../r6 -d -p $HGPORT --pid-file ../hg.pid
   $ cat ../hg.pid >> $DAEMON_PIDS
   $ hg push http://localhost:$HGPORT
@@ -224,8 +232,8 @@
   ...     f.write(urllib2.urlopen(u).read())
   $ unzip -t archive.zip
   Archive:  archive.zip
-      testing: empty-default/.hg_archival.txt   OK
-      testing: empty-default/f1         OK
+      testing: empty-default/.hg_archival.txt*OK (glob)
+      testing: empty-default/f1*OK (glob)
   No errors detected in compressed data of archive.zip.
 
 test 'verify' with remotestore:
@@ -306,4 +314,134 @@
 used all HGPORTs, kill all daemons
   $ killdaemons.py
 
+largefiles should batch verify remote calls
+
+  $ hg init batchverifymain
+  $ cd batchverifymain
+  $ echo "aaa" >> a
+  $ hg add --large a
+  $ hg commit -m "a"
+  Invoking status precommit hook
+  A a
+  $ echo "bbb" >> b
+  $ hg add --large b
+  $ hg commit -m "b"
+  Invoking status precommit hook
+  A b
+  $ cd ..
+  $ hg serve -R batchverifymain -d -p $HGPORT --pid-file hg.pid \
+  > -A access.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ hg clone --noupdate http://localhost:$HGPORT batchverifyclone
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  $ hg -R batchverifyclone verify --large --lfa
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 2 changesets, 2 total revisions
+  searching 2 changesets for largefiles
+  verified existence of 2 revisions of 2 largefiles
+  $ tail -1 access.log
+  127.0.0.1 - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3D972a1a11f19934401291cc99117ec614933374ce%3Bstatlfile+sha%3Dc801c9cfe94400963fcb683246217d5db77f9a9a (glob)
+  $ hg -R batchverifyclone update
+  getting changed largefiles
+  2 largefiles updated, 0 removed
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Clear log file before next test
+
+  $ printf "" > access.log
+
+Verify should check file on remote server only when file is not
+available locally.
+
+  $ echo "ccc" >> batchverifymain/c
+  $ hg -R batchverifymain status
+  ? c
+  $ hg -R batchverifymain add --large batchverifymain/c
+  $ hg -R batchverifymain commit -m "c"
+  Invoking status precommit hook
+  A c
+  $ hg -R batchverifyclone pull
+  pulling from http://localhost:$HGPORT/
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  (run 'hg update' to get a working copy)
+  $ hg -R batchverifyclone verify --lfa
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  3 files, 3 changesets, 3 total revisions
+  searching 3 changesets for largefiles
+  verified existence of 3 revisions of 3 largefiles
+  $ tail -1 access.log
+  127.0.0.1 - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=statlfile+sha%3Dc8559c3c9cfb42131794b7d8009230403b9b454c (glob)
+
+  $ killdaemons.py
+
+largefiles should not ask for password again after succesfull authorization
+
+  $ hg init credentialmain
+  $ cd credentialmain
+  $ echo "aaa" >> a
+  $ hg add --large a
+  $ hg commit -m "a"
+  Invoking status precommit hook
+  A a
+
+Before running server clear the user cache to force clone to download
+a large file from the server rather than to get it from the cache
+
+  $ rm "${USERCACHE}"/*
+
+  $ cd ..
+  $ cat << EOT > userpass.py
+  > import base64
+  > from mercurial.hgweb import common
+  > def perform_authentication(hgweb, req, op):
+  >     auth = req.env.get('HTTP_AUTHORIZATION')
+  >     if not auth:
+  >         raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who',
+  >                 [('WWW-Authenticate', 'Basic Realm="mercurial"')])
+  >     if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']:
+  >         raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no')
+  > def extsetup():
+  >     common.permhooks.insert(0, perform_authentication)
+  > EOT
+  $ hg serve --config extensions.x=userpass.py -R credentialmain \
+  >          -d -p $HGPORT --pid-file hg.pid -A access.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ cat << EOF > get_pass.py
+  > import getpass
+  > def newgetpass(arg):
+  >   return "pass"
+  > getpass.getpass = newgetpass
+  > EOF
+  $ hg clone --config ui.interactive=true --config extensions.getpass=get_pass.py \
+  >          http://user@localhost:$HGPORT credentialclone
+  requesting all changes
+  http authorization required for http://localhost:$HGPORT/
+  realm: mercurial
+  user: user
+  password: adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  updating to branch default
+  getting changed largefiles
+  1 largefiles updated, 0 removed
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ rm hg.pid access.log
+  $ killdaemons.py
+
 #endif
--- a/tests/test-largefiles.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-largefiles.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1536,8 +1536,11 @@
   searching 1 changesets for largefiles
   verified existence of 3 revisions of 3 largefiles
 
-- introduce missing blob in local store repo and make sure that this is caught:
+- introduce missing blob in local store repo and remote store
+and make sure that this is caught:
+
   $ mv $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 .
+  $ rm .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928
   $ hg verify --large
   checking changesets
   checking manifests
@@ -1556,7 +1559,8 @@
   [1]
 
 - cleanup
-  $ mv e166e74c7303192238d60af5a9c4ce9bef0b7928 $TESTTMP/d/.hg/largefiles/
+  $ cp e166e74c7303192238d60af5a9c4ce9bef0b7928 $TESTTMP/d/.hg/largefiles/
+  $ mv e166e74c7303192238d60af5a9c4ce9bef0b7928 .hg/largefiles/
 
 - verifying all revisions will fail because we didn't clone all largefiles to d:
   $ echo 'T-shirt' > $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4
--- a/tests/test-mac-packages.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-mac-packages.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,4 +1,7 @@
 #require test-repo slow osx osxpackaging
+
+  $ . "$TESTDIR/helpers-testrepo.sh"
+
   $ OUTPUTDIR=`pwd`
   $ export OUTPUTDIR
   $ KEEPMPKG=yes
@@ -40,5 +43,11 @@
   $ grep '/hg	' boms.txt | cut -d '	' -f 1,2,3
   ./usr/local/bin/hg	100755	0/0
 
+Make sure the built binary uses the system Python interpreter
+  $ bsdtar xf mercurial.pkg/Payload usr/local/bin
+Use a glob to find this to avoid check-code whining about a fixed path.
+  $ head -n 1 usr/local/b?n/hg
+  #!/System/Library/Frameworks/Python.framework/Versions/2.7/Resources/Python.app/Contents/MacOS/Python
+
 Note that we're not currently installing any /etc/mercurial stuff,
 including merge-tool configurations.
--- a/tests/test-merge1.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-merge1.t	Mon Jul 18 23:28:14 2016 -0500
@@ -24,9 +24,10 @@
   $ hg update 0
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
-Test interrupted updates by exploiting our non-handling of directory collisions
+Test interrupted updates by having a non-empty dir with the same name as one
+of the files in a commit we're updating to
 
-  $ mkdir b
+  $ mkdir b && touch b/nonempty
   $ hg up
   abort: *: '$TESTTMP/t/b' (glob)
   [255]
@@ -38,10 +39,10 @@
   parent: 0:538afb845929 
    commit #0
   branch: default
-  commit: (interrupted update)
+  commit: 1 unknown (interrupted update)
   update: 1 new changesets (update)
   phases: 2 draft
-  $ rmdir b
+  $ rm b/nonempty
   $ hg up
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg sum
--- a/tests/test-newbranch.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-newbranch.t	Mon Jul 18 23:28:14 2016 -0500
@@ -463,3 +463,72 @@
   -1 new
 
   $ cd ..
+
+We expect that update --clean discard changes in working directory,
+and updates to the head of parent branch.
+
+  $ hg init updatebareclean
+  $ cd updatebareclean
+  $ hg update --clean
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ touch a
+  $ hg commit -A -m "a"
+  adding a
+  $ touch b
+  $ hg commit -A -m "b"
+  adding b
+  $ touch c
+  $ hg commit -A -m "c"
+  adding c
+  $ hg log
+  changeset:   2:991a3460af53
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+  changeset:   1:0e067c57feba
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  changeset:   0:3903775176ed
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a
+  
+  $ hg update -r 1
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg branch new-branch
+  marked working directory as branch new-branch
+  (branches are permanent and global, did you want a bookmark?)
+  $ echo "aa" >> a
+  $ hg update --clean
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg status
+  $ hg branch
+  default
+  $ hg parent
+  changeset:   2:991a3460af53
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+We expect that update --clean on non existing parent discards a new branch
+and updates to the tipmost non-closed branch head
+
+  $ hg update null
+  0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+  $ hg branch newbranch
+  marked working directory as branch newbranch
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg update -C
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg summary
+  parent: 2:991a3460af53 tip
+   c
+  branch: default
+  commit: (clean)
+  update: (current)
+  phases: 3 draft
--- a/tests/test-notify.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-notify.t	Mon Jul 18 23:28:14 2016 -0500
@@ -571,7 +571,7 @@
   Message-Id: <hg.3548c9e294b6.*.*@*> (glob)
   To: baz@test.com, foo@bar
   
-  changeset 3548c9e294b6 in $TESTTMP/b
+  changeset 3548c9e294b6 in $TESTTMP/b (glob)
   details: http://test/b?cmd=changeset;node=3548c9e294b6
   description: default template
 
--- a/tests/test-pager.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-pager.t	Mon Jul 18 23:28:14 2016 -0500
@@ -177,3 +177,48 @@
   paged! 'date:        Thu Jan 01 00:00:00 1970 +0000\n'
   paged! 'summary:     modify a 8\n'
   paged! '\n'
+
+Pager works with shell aliases.
+
+  $ cat >> $HGRCPATH <<EOF
+  > [alias]
+  > echoa = !echo a
+  > EOF
+
+  $ hg echoa
+  a
+  $ hg --config pager.attend-echoa=yes echoa
+  paged! 'a\n'
+
+Pager works with hg aliases including environment variables.
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [alias]
+  > printa = log -T "$A\n" -r 0
+  > EOF
+
+  $ A=1 hg --config pager.attend-printa=yes printa
+  paged! '1\n'
+  $ A=2 hg --config pager.attend-printa=yes printa
+  paged! '2\n'
+
+Pager should not override the exit code of other commands
+
+  $ cat >> $TESTTMP/fortytwo.py <<'EOF'
+  > from mercurial import cmdutil, commands
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('fortytwo', [], 'fortytwo', norepo=True)
+  > def fortytwo(ui, *opts):
+  >     ui.write('42\n')
+  >     return 42
+  > EOF
+
+  $ cat >> $HGRCPATH <<'EOF'
+  > [extensions]
+  > fortytwo = $TESTTMP/fortytwo.py
+  > EOF
+
+  $ hg fortytwo --pager=on
+  paged! '42\n'
+  [42]
--- a/tests/test-parseindex2.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-parseindex2.py	Mon Jul 18 23:28:14 2016 -0500
@@ -9,13 +9,13 @@
 import subprocess
 import sys
 
-from mercurial import (
-    parsers,
-)
 from mercurial.node import (
     nullid,
     nullrev,
 )
+from mercurial import (
+    parsers,
+)
 
 # original python implementation
 def gettype(q):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-patchbomb-tls.t	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,128 @@
+#require serve ssl
+
+Set up SMTP server:
+
+  $ CERTSDIR="$TESTDIR/sslcerts"
+  $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
+
+  $ python "$TESTDIR/dummysmtpd.py" -p $HGPORT --pid-file a.pid -d \
+  > --tls smtps --certificate `pwd`/server.pem
+  listening at localhost:$HGPORT
+  $ cat a.pid >> $DAEMON_PIDS
+
+Ensure hg email output is sent to stdout:
+
+  $ unset PAGER
+
+Set up repository:
+
+  $ hg init t
+  $ cd t
+  $ cat <<EOF >> .hg/hgrc
+  > [extensions]
+  > patchbomb =
+  > [email]
+  > method = smtp
+  > [smtp]
+  > host = localhost
+  > port = $HGPORT
+  > tls = smtps
+  > EOF
+
+  $ echo a > a
+  $ hg commit -Ama -d '1 0'
+  adding a
+
+Utility functions:
+
+  $ DISABLECACERTS=
+  $ try () {
+  >   hg email $DISABLECACERTS -f quux -t foo -c bar -r tip "$@"
+  > }
+
+Our test cert is not signed by a trusted CA. It should fail to verify if
+we are able to load CA certs:
+
+#if sslcontext defaultcacerts no-defaultcacertsloaded
+  $ try
+  this patch series consists of 1 patches.
+  
+  
+  (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+#endif
+
+#if no-sslcontext defaultcacerts
+  $ try
+  this patch series consists of 1 patches.
+  
+  
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info
+  (using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+#endif
+
+#if defaultcacertsloaded
+  $ try
+  this patch series consists of 1 patches.
+  
+  
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (using CA certificates from *; if you see this message, your Mercurial install is not properly configured; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+
+#endif
+
+#if no-defaultcacerts
+  $ try
+  this patch series consists of 1 patches.
+  
+  
+  (unable to load * certificates; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this message) (glob) (?)
+  abort: localhost certificate error: no certificate received
+  (set hostsecurity.localhost:certfingerprints=sha256:62:09:97:2f:97:60:e3:65:8f:12:5d:78:9e:35:a1:36:7a:65:4b:0e:9f:ac:db:c3:bc:6e:b6:a3:c0:16:e0:30 config setting or use --insecure to connect insecurely)
+  [255]
+#endif
+
+  $ DISABLECACERTS="--config devel.disableloaddefaultcerts=true"
+
+Without certificates:
+
+  $ try --debug
+  this patch series consists of 1 patches.
+  
+  
+  (using smtps)
+  sending mail: smtp host localhost, port * (glob)
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (verifying remote certificate)
+  abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
+  (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
+  [255]
+
+With global certificates:
+
+  $ try --debug --config web.cacerts="$CERTSDIR/pub.pem"
+  this patch series consists of 1 patches.
+  
+  
+  (using smtps)
+  sending mail: smtp host localhost, port * (glob)
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (verifying remote certificate)
+  sending [PATCH] a ...
+
+With invalid certificates:
+
+  $ try --config web.cacerts="$CERTSDIR/pub-other.pem"
+  this patch series consists of 1 patches.
+  
+  
+  warning: connecting to localhost using legacy security technology (TLS 1.0); see https://mercurial-scm.org/wiki/SecureConnections for more info (?)
+  (?i)abort: .*?certificate.verify.failed.* (re)
+  [255]
+
+  $ cd ..
--- a/tests/test-patchbomb.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-patchbomb.t	Mon Jul 18 23:28:14 2016 -0500
@@ -352,14 +352,14 @@
   Content-Disposition: attachment; filename="bundle.hg"
   Content-Transfer-Encoding: base64
   
-  SEcyMAAAAA5Db21wcmVzc2lvbj1CWkJaaDkxQVkmU1lCZFwPAAAKf//7nFYSWD1/4H7R09C/I70I
-  Ak0E4peoSIYIgQCgGUQOcLABGY2hqoAAAaBMTTAAAahgTCZoAAAAAMQaqn5GmapojQ00DEGI/VGJ
-  kDAJoGTDUAAyM0QaAEqalPTUaMhoyDIDR6IxAGEGmgAehMRhDRsoyB6TYTC8JyLN+jTGqitRAgRJ
-  b3SRlhd8/+VxlAUqAilLoKPEEyxFQkaEGo+DzItFeNiFAo8NMMweVtvXJFIMhjoKC18DeYwjLKBz
-  wrMcs86qJrctDNJorwBMuLcqvTVWHh1IlsIaaaYSUIP2IZsogT1+pSSZS+bSTJrgfKsO9go/f0HF
-  uW4Yr2vXpxDreOgSIAdK/xC8Yay48SLpxIuqc/BZ6rVZCgG21rr0zhCaEgXOTqNaYEvANvg0B0Qo
-  dgtqAs1FDcZgzYitwJh6ZAG0C4mA7FPrp9b7h0h/A44Xgd+0it1gvF0mFE/CCPwymXS+OisOOCAF
-  mDUDAC1pBvsXckU4UJBCZFwP
+  SEcyMAAAAA5Db21wcmVzc2lvbj1CWkJaaDkxQVkmU1kIqE7KAAAKf//7vFYQWD1/4H7R09C/470I
+  Ak0E4peoSIYIgQCgGUQOcLABGY2hqoTTCYaBqaYAAACaaMAATIwAA1MIYDaQaqn6p+jRop+oJkA2
+  oNqD1PU0PUBoxqaMmjMUepoBoDT1GmQNBKmlTT1GTCNMEAYTQ0NNDI0BoMQHpAZAA8o2pkyNJHfX
+  RRbXoyxKRUlAg41B3lpmMOnr77dEpFKAvEUGEkWuC4wioiMjC2Y2a84EXhsNCFIrbXUGId07PJnS
+  ELAOIpL/gE8R8CUeXuw2NKMtkFoLPkcTSomXtgHSg1IKaCNlWwVU3CpmMYqh5gkFYJKOD4UhVVQ6
+  SiF1DpE8ghWvF1ih+fYgagfYHI96w/QsrRATpYiP7VRbINFrQy2c21mZ7M4pXXrPBypoXAIhtum7
+  aKDJCpUqMDF5dfiDChMfgH9nQ4B60Uvgb4AK9dsbSYc+O3tEyNq9g9gZeA5Je2T82GzjC4DbY4F2
+  0kdrTBwslErFshCgDzeEBwICg13oQaQawQA1WWd3F3JFOFCQCKhOyg==
   --===============*==-- (glob)
 
 with a specific bundle type
--- a/tests/test-push-warn.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-push-warn.t	Mon Jul 18 23:28:14 2016 -0500
@@ -785,4 +785,14 @@
   no changes found
   [1]
 
+Test fail hook
+
+  $ hg push inner --config hooks.fail-push="echo running fail-push hook"
+  pushing to inner
+  searching for changes
+  running fail-push hook
+  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'!
+  (merge or see "hg help push" for details about pushing new heads)
+  [255]
+
   $ cd ..
--- a/tests/test-rebase-conflicts.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-rebase-conflicts.t	Mon Jul 18 23:28:14 2016 -0500
@@ -279,7 +279,7 @@
   e31216eec445e44352c5f01588856059466a24c9
   2f2496ddf49d69b5ef23ad8cf9fb2e0e4faf0ac2
   bundle2-output-bundle: "HG20", (1 params) 1 parts total
-  bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
+  bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
   saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-backup.hg (glob)
   3 changesets found
   list of changesets:
@@ -287,10 +287,10 @@
   19c888675e133ab5dff84516926a65672eaf04d9
   2a7f09cac94c7f4b73ebd5cd1a62d3b2e8e336bf
   bundle2-output-bundle: "HG20", 1 parts total
-  bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
+  bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
   adding branch
   bundle2-input-bundle: with-transaction
-  bundle2-input-part: "changegroup" (params: 1 mandatory) supported
+  bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
   adding changesets
   add changeset 4c9fbe56a16f
   add changeset 19c888675e13
--- a/tests/test-rebase-obsolete.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-rebase-obsolete.t	Mon Jul 18 23:28:14 2016 -0500
@@ -709,9 +709,7 @@
   created new head
   $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
   $ hg rebase -r . -d 10
-  abort: all requested changesets have equivalents or were marked as obsolete
-  (to force the rebase, set the config experimental.rebaseskipobsolete to False)
-  [255]
+  note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
 
 If a rebase is going to create divergence, it should abort
 
@@ -863,3 +861,91 @@
   rebasing 20:b82fb57ea638 "willconflict second version"
   note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
   rebasing 22:7bdc8a87673d "dummy change" (tip)
+  $ cd ..
+
+rebase source is obsoleted (issue5198)
+---------------------------------
+
+  $ hg clone base amended
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd amended
+  $ hg up 9520eea781bc
+  1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ echo 1 >> E
+  $ hg commit --amend -m "E'"
+  $ hg log -G
+  @  9:69abe8906104 E'
+  |
+  | o  7:02de42196ebe H
+  | |
+  | | o  6:eea13746799a G
+  | |/|
+  | o |  5:24b6387c8c8c F
+  |/ /
+  | x  4:9520eea781bc E
+  |/
+  | o  3:32af7686d403 D
+  | |
+  | o  2:5fddd98957c8 C
+  | |
+  | o  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg rebase -d . -s 9520eea781bc
+  note: not rebasing 4:9520eea781bc "E", already in destination as 9:69abe8906104 "E'"
+  rebasing 6:eea13746799a "G"
+  $ hg log -G
+  o    10:17be06e82e95 G
+  |\
+  | @  9:69abe8906104 E'
+  | |
+  +---o  7:02de42196ebe H
+  | |
+  o |  5:24b6387c8c8c F
+  |/
+  | o  3:32af7686d403 D
+  | |
+  | o  2:5fddd98957c8 C
+  | |
+  | o  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ cd ..
+
+Test that bookmark is moved and working dir is updated when all changesets have
+equivalents in destination
+  $ hg init rbsrepo && cd rbsrepo
+  $ echo "[experimental]" > .hg/hgrc
+  $ echo "evolution=all" >> .hg/hgrc
+  $ echo "rebaseskipobsolete=on" >> .hg/hgrc
+  $ echo root > root && hg ci -Am root
+  adding root
+  $ echo a > a && hg ci -Am a
+  adding a
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo b > b && hg ci -Am b
+  adding b
+  created new head
+  $ hg rebase -r 2 -d 1
+  rebasing 2:1e9a3c00cbe9 "b" (tip)
+  $ hg log -r .  # working dir is at rev 3 (successor of 2)
+  3:be1832deae9a b (no-eol)
+  $ hg book -r 2 mybook --hidden  # rev 2 has a bookmark on it now
+  $ hg up 2 && hg log -r .  # working dir is at rev 2 again
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  2:1e9a3c00cbe9 b (no-eol)
+  $ hg rebase -r 2 -d 3
+  note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
+Check that working directory was updated to rev 3 although rev 2 was skipped
+during the rebase operation
+  $ hg log -r .
+  3:be1832deae9a b (no-eol)
+
+Check that bookmark was moved to rev 3 although rev 2 was skipped
+during the rebase operation
+  $ hg bookmarks
+     mybook                    3:be1832deae9a
--- a/tests/test-revert-interactive.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-revert-interactive.t	Mon Jul 18 23:28:14 2016 -0500
@@ -64,7 +64,7 @@
    3
    4
    5
-  record change 1/6 to 'f'? [Ynesfdaq?] y
+  revert change 1/6 to 'f'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -73,7 +73,7 @@
    4
    5
   +b
-  record change 2/6 to 'f'? [Ynesfdaq?] y
+  revert change 2/6 to 'f'? [Ynesfdaq?] y
   
   diff --git a/folder1/g b/folder1/g
   2 hunks, 2 lines changed
@@ -86,7 +86,7 @@
    3
    4
    5
-  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -95,7 +95,7 @@
    4
    5
   +d
-  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
   
   diff --git a/folder2/h b/folder2/h
   2 hunks, 2 lines changed
@@ -134,7 +134,41 @@
   
   abort: user quit
   [255]
-  $ rm folder1/g.orig
+  $ ls folder1/
+  g
+
+Test that a noop revert doesn't do an unecessary backup
+  $ (echo y; echo n) | hg revert -i -r 2 folder1/g
+  diff --git a/folder1/g b/folder1/g
+  1 hunks, 1 lines changed
+  examine changes to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -3,3 +3,4 @@
+   3
+   4
+   5
+  +d
+  revert this change to 'folder1/g'? [Ynesfdaq?] n
+  
+  $ ls folder1/
+  g
+
+Test --no-backup
+  $ (echo y; echo y) | hg revert -i -C -r 2 folder1/g
+  diff --git a/folder1/g b/folder1/g
+  1 hunks, 1 lines changed
+  examine changes to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -3,3 +3,4 @@
+   3
+   4
+   5
+  +d
+  revert this change to 'folder1/g'? [Ynesfdaq?] y
+  
+  $ ls folder1/
+  g
+  >>> open('folder1/g', 'wb').write("1\n2\n3\n4\n5\nd\n")
 
 
   $ hg update -C 6
@@ -163,7 +197,7 @@
    3
    4
    5
-  record change 1/6 to 'f'? [Ynesfdaq?] y
+  revert change 1/6 to 'f'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -172,7 +206,7 @@
    4
    5
   +b
-  record change 2/6 to 'f'? [Ynesfdaq?] y
+  revert change 2/6 to 'f'? [Ynesfdaq?] y
   
   diff --git a/folder1/g b/folder1/g
   2 hunks, 2 lines changed
@@ -185,7 +219,7 @@
    3
    4
    5
-  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -1,5 +2,6 @@
    1
@@ -194,7 +228,7 @@
    4
    5
   +d
-  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
   
   diff --git a/folder2/h b/folder2/h
   2 hunks, 2 lines changed
@@ -242,7 +276,7 @@
    3
    4
    5
-  record change 1/2 to 'f'? [Ynesfdaq?] y
+  discard change 1/2 to 'f'? [Ynesfdaq?] y
   
   @@ -2,6 +1,5 @@
    1
@@ -251,7 +285,7 @@
    4
    5
   -b
-  record change 2/2 to 'f'? [Ynesfdaq?] n
+  discard change 2/2 to 'f'? [Ynesfdaq?] n
   
   $ hg st
   M f
@@ -303,7 +337,7 @@
   -1
   +0
   +2
-  record this change to 'k'? [Ynesfdaq?] e
+  discard this change to 'k'? [Ynesfdaq?] e
   
   $ cat k
   42
@@ -350,7 +384,7 @@
    1
    2
    3
-  record change 1/3 to 'folder1/g'? [Ynesfdaq?] y
+  discard change 1/3 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -2,7 +1,7 @@
    c
@@ -361,13 +395,13 @@
   +4
    5
    d
-  record change 2/3 to 'folder1/g'? [Ynesfdaq?] y
+  discard change 2/3 to 'folder1/g'? [Ynesfdaq?] y
   
   @@ -7,3 +6,2 @@
    5
    d
   -lastline
-  record change 3/3 to 'folder1/g'? [Ynesfdaq?] n
+  discard change 3/3 to 'folder1/g'? [Ynesfdaq?] n
   
   $ hg diff --nodates
   diff -r a3d963a027aa folder1/g
--- a/tests/test-revset.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-revset.t	Mon Jul 18 23:28:14 2016 -0500
@@ -31,6 +31,46 @@
   >   hg log --template '{rev}\n' -r "$1"
   > }
 
+extension to build '_intlist()' and '_hexlist()', which is necessary because
+these predicates use '\0' as a separator:
+
+  $ cat <<EOF > debugrevlistspec.py
+  > from __future__ import absolute_import
+  > from mercurial import (
+  >     cmdutil,
+  >     node as nodemod,
+  >     revset,
+  > )
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('debugrevlistspec',
+  >     [('', 'optimize', None, 'print parsed tree after optimizing'),
+  >      ('', 'bin', None, 'unhexlify arguments')])
+  > def debugrevlistspec(ui, repo, fmt, *args, **opts):
+  >     if opts['bin']:
+  >         args = map(nodemod.bin, args)
+  >     expr = revset.formatspec(fmt, list(args))
+  >     if ui.verbose:
+  >         tree = revset.parse(expr, lookup=repo.__contains__)
+  >         ui.note(revset.prettyformat(tree), "\n")
+  >         if opts["optimize"]:
+  >             opttree = revset.optimize(tree)
+  >             ui.note("* optimized:\n", revset.prettyformat(opttree), "\n")
+  >     func = revset.match(ui, expr, repo)
+  >     revs = func(repo)
+  >     if ui.verbose:
+  >         ui.note("* set:\n", revset.prettyformatset(revs), "\n")
+  >     for c in revs:
+  >         ui.write("%s\n" % c)
+  > EOF
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > debugrevlistspec = $TESTTMP/debugrevlistspec.py
+  > EOF
+  $ trylist() {
+  >   hg debugrevlistspec --debug "$@"
+  > }
+
   $ hg init repo
   $ cd repo
 
@@ -394,6 +434,12 @@
   4
   $ hg book -d date
 
+function name should be a symbol
+
+  $ log '"date"(2005)'
+  hg: parse error: not a symbol
+  [255]
+
 keyword arguments
 
   $ log 'extra(branch, value=a)'
@@ -898,6 +944,445 @@
   $ log 'tag(tip)'
   9
 
+Test order of revisions in compound expression
+----------------------------------------------
+
+The general rule is that only the outermost (= leftmost) predicate can
+enforce its ordering requirement. The other predicates should take the
+ordering defined by it.
+
+ 'A & B' should follow the order of 'A':
+
+  $ log '2:0 & 0::2'
+  2
+  1
+  0
+
+ 'head()' combines sets in right order:
+
+  $ log '2:0 & head()'
+  2
+  1
+  0
+
+ 'a + b', which is optimized to '_list(a b)', should take the ordering of
+ the left expression:
+
+  $ try --optimize '2:0 & (0 + 1 + 2)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (group
+      (or
+        ('symbol', '0')
+        ('symbol', '1')
+        ('symbol', '2'))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', '_list')
+      ('string', '0\x001\x002')))
+  * set:
+  <baseset [0, 1, 2]>
+  0
+  1
+  2
+ BROKEN: should be '2 1 0'
+
+ 'A + B' should take the ordering of the left expression:
+
+  $ try --optimize '2:0 & (0:1 + 2)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (group
+      (or
+        (range
+          ('symbol', '0')
+          ('symbol', '1'))
+        ('symbol', '2'))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (or
+      (range
+        ('symbol', '0')
+        ('symbol', '1'))
+      ('symbol', '2')))
+  * set:
+  <addset
+    <filteredset
+      <spanset+ 0:1>,
+      <spanset- 0:2>>,
+    <baseset [2]>>
+  0
+  1
+  2
+ BROKEN: should be '2 1 0'
+
+ '_intlist(a b)' should behave like 'a + b':
+
+  $ trylist --optimize '2:0 & %ld' 0 1 2
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', '_intlist')
+      ('string', '0\x001\x002')))
+  * optimized:
+  (and
+    (func
+      ('symbol', '_intlist')
+      ('string', '0\x001\x002'))
+    (range
+      ('symbol', '2')
+      ('symbol', '0')))
+  * set:
+  <filteredset
+    <spanset- 0:2>,
+    <baseset [0, 1, 2]>>
+  2
+  1
+  0
+
+  $ trylist --optimize '%ld & 2:0' 0 2 1
+  (and
+    (func
+      ('symbol', '_intlist')
+      ('string', '0\x002\x001'))
+    (range
+      ('symbol', '2')
+      ('symbol', '0')))
+  * optimized:
+  (and
+    (func
+      ('symbol', '_intlist')
+      ('string', '0\x002\x001'))
+    (range
+      ('symbol', '2')
+      ('symbol', '0')))
+  * set:
+  <filteredset
+    <spanset- 0:2>,
+    <baseset [0, 2, 1]>>
+  2
+  1
+  0
+ BROKEN: should be '0 2 1'
+
+ '_hexlist(a b)' should behave like 'a + b':
+
+  $ trylist --optimize --bin '2:0 & %ln' `hg log -T '{node} ' -r0:2`
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', '_hexlist')
+      ('string', '*'))) (glob)
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', '_hexlist')
+      ('string', '*'))) (glob)
+  * set:
+  <baseset [0, 1, 2]>
+  0
+  1
+  2
+ BROKEN: should be '2 1 0'
+
+  $ trylist --optimize --bin '%ln & 2:0' `hg log -T '{node} ' -r0+2+1`
+  (and
+    (func
+      ('symbol', '_hexlist')
+      ('string', '*')) (glob)
+    (range
+      ('symbol', '2')
+      ('symbol', '0')))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', '_hexlist')
+      ('string', '*'))) (glob)
+  * set:
+  <baseset [0, 2, 1]>
+  0
+  2
+  1
+
+ 'present()' should do nothing other than suppressing an error:
+
+  $ try --optimize '2:0 & present(0 + 1 + 2)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', 'present')
+      (or
+        ('symbol', '0')
+        ('symbol', '1')
+        ('symbol', '2'))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', 'present')
+      (func
+        ('symbol', '_list')
+        ('string', '0\x001\x002'))))
+  * set:
+  <baseset [0, 1, 2]>
+  0
+  1
+  2
+ BROKEN: should be '2 1 0'
+
+ 'reverse()' should take effect only if it is the outermost expression:
+
+  $ try --optimize '0:2 & reverse(all())'
+  (and
+    (range
+      ('symbol', '0')
+      ('symbol', '2'))
+    (func
+      ('symbol', 'reverse')
+      (func
+        ('symbol', 'all')
+        None)))
+  * optimized:
+  (and
+    (range
+      ('symbol', '0')
+      ('symbol', '2'))
+    (func
+      ('symbol', 'reverse')
+      (func
+        ('symbol', 'all')
+        None)))
+  * set:
+  <filteredset
+    <spanset- 0:2>,
+    <spanset+ 0:9>>
+  2
+  1
+  0
+ BROKEN: should be '0 1 2'
+
+ 'sort()' should take effect only if it is the outermost expression:
+
+  $ try --optimize '0:2 & sort(all(), -rev)'
+  (and
+    (range
+      ('symbol', '0')
+      ('symbol', '2'))
+    (func
+      ('symbol', 'sort')
+      (list
+        (func
+          ('symbol', 'all')
+          None)
+        (negate
+          ('symbol', 'rev')))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '0')
+      ('symbol', '2'))
+    (func
+      ('symbol', 'sort')
+      (list
+        (func
+          ('symbol', 'all')
+          None)
+        ('string', '-rev'))))
+  * set:
+  <filteredset
+    <spanset- 0:2>,
+    <spanset+ 0:9>>
+  2
+  1
+  0
+ BROKEN: should be '0 1 2'
+
+ for 'A & f(B)', 'B' should not be affected by the order of 'A':
+
+  $ try --optimize '2:0 & first(1 + 0 + 2)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', 'first')
+      (or
+        ('symbol', '1')
+        ('symbol', '0')
+        ('symbol', '2'))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', 'first')
+      (func
+        ('symbol', '_list')
+        ('string', '1\x000\x002'))))
+  * set:
+  <baseset
+    <limit n=1, offset=0,
+      <spanset- 0:2>,
+      <baseset [1, 0, 2]>>>
+  1
+
+  $ try --optimize '2:0 & not last(0 + 2 + 1)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (not
+      (func
+        ('symbol', 'last')
+        (or
+          ('symbol', '0')
+          ('symbol', '2')
+          ('symbol', '1')))))
+  * optimized:
+  (difference
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (func
+      ('symbol', 'last')
+      (func
+        ('symbol', '_list')
+        ('string', '0\x002\x001'))))
+  * set:
+  <filteredset
+    <spanset- 0:2>,
+    <not
+      <baseset
+        <last n=1,
+          <fullreposet+ 0:9>,
+          <baseset [1, 2, 0]>>>>>
+  2
+  0
+
+ for 'A & (op)(B)', 'B' should not be affected by the order of 'A':
+
+  $ try --optimize '2:0 & (1 + 0 + 2):(0 + 2 + 1)'
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (range
+      (group
+        (or
+          ('symbol', '1')
+          ('symbol', '0')
+          ('symbol', '2')))
+      (group
+        (or
+          ('symbol', '0')
+          ('symbol', '2')
+          ('symbol', '1')))))
+  * optimized:
+  (and
+    (range
+      ('symbol', '2')
+      ('symbol', '0'))
+    (range
+      (func
+        ('symbol', '_list')
+        ('string', '1\x000\x002'))
+      (func
+        ('symbol', '_list')
+        ('string', '0\x002\x001'))))
+  * set:
+  <filteredset
+    <baseset [1]>,
+    <spanset- 0:2>>
+  1
+
+ 'A & B' can be rewritten as 'B & A' by weight, but the ordering rule should
+ be determined before the optimization (i.e. 'B' should take the ordering of
+ 'A'):
+
+  $ try --optimize 'contains("glob:*") & (2 + 0 + 1)'
+  (and
+    (func
+      ('symbol', 'contains')
+      ('string', 'glob:*'))
+    (group
+      (or
+        ('symbol', '2')
+        ('symbol', '0')
+        ('symbol', '1'))))
+  * optimized:
+  (and
+    (func
+      ('symbol', '_list')
+      ('string', '2\x000\x001'))
+    (func
+      ('symbol', 'contains')
+      ('string', 'glob:*')))
+  * set:
+  <filteredset
+    <baseset [2, 0, 1]>,
+    <contains 'glob:*'>>
+  2
+  0
+  1
+ BROKEN: should be '0 1 2'
+
+  $ try --optimize 'reverse(contains("glob:*")) & (0 + 2 + 1)'
+  (and
+    (func
+      ('symbol', 'reverse')
+      (func
+        ('symbol', 'contains')
+        ('string', 'glob:*')))
+    (group
+      (or
+        ('symbol', '0')
+        ('symbol', '2')
+        ('symbol', '1'))))
+  * optimized:
+  (and
+    (func
+      ('symbol', '_list')
+      ('string', '0\x002\x001'))
+    (func
+      ('symbol', 'reverse')
+      (func
+        ('symbol', 'contains')
+        ('string', 'glob:*'))))
+  * set:
+  <filteredset
+    <baseset [1, 2, 0]>,
+    <contains 'glob:*'>>
+  1
+  2
+  0
+ BROKEN: should be '2 1 0'
+
 test sort revset
 --------------------------------------------
 
@@ -952,6 +1437,19 @@
   6
   2
 
+test empty sort key which is noop
+
+  $ log 'sort(0 + 2 + 1, "")'
+  0
+  2
+  1
+
+test invalid sort keys
+
+  $ log 'sort(all(), -invalid)'
+  hg: parse error: unknown sort key '-invalid'
+  [255]
+
   $ cd ..
 
 test sorting by multiple keys including variable-length strings
@@ -1090,6 +1588,67 @@
   0 b12  m111 u112 111 10800
   2 b111 m11  u12  111 3600
 
+ toposort prioritises graph branches
+
+  $ hg up 2
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ touch a
+  $ hg addremove
+  adding a
+  $ hg ci -m 't1' -u 'tu' -d '130 0'
+  created new head
+  $ echo 'a' >> a
+  $ hg ci -m 't2' -u 'tu' -d '130 0'
+  $ hg book book1
+  $ hg up 4
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark book1)
+  $ touch a
+  $ hg addremove
+  adding a
+  $ hg ci -m 't3' -u 'tu' -d '130 0'
+
+  $ hg log -r 'sort(all(), topo)'
+  7 b111 t3   tu   130 0
+  4 b111 m112 u111 110 14400
+  3 b112 m111 u11  120 0
+  6 b111 t2   tu   130 0
+  5 b111 t1   tu   130 0
+  2 b111 m11  u12  111 3600
+  1 b11  m12  u111 112 7200
+  0 b12  m111 u112 111 10800
+
+  $ hg log -r 'sort(all(), -topo)'
+  0 b12  m111 u112 111 10800
+  1 b11  m12  u111 112 7200
+  2 b111 m11  u12  111 3600
+  5 b111 t1   tu   130 0
+  6 b111 t2   tu   130 0
+  3 b112 m111 u11  120 0
+  4 b111 m112 u111 110 14400
+  7 b111 t3   tu   130 0
+
+  $ hg log -r 'sort(all(), topo, topo.firstbranch=book1)'
+  6 b111 t2   tu   130 0
+  5 b111 t1   tu   130 0
+  7 b111 t3   tu   130 0
+  4 b111 m112 u111 110 14400
+  3 b112 m111 u11  120 0
+  2 b111 m11  u12  111 3600
+  1 b11  m12  u111 112 7200
+  0 b12  m111 u112 111 10800
+
+topographical sorting can't be combined with other sort keys, and you can't
+use the topo.firstbranch option when topo sort is not active:
+
+  $ hg log -r 'sort(all(), "topo user")'
+  hg: parse error: topo sort order cannot be combined with other sort keys
+  [255]
+
+  $ hg log -r 'sort(all(), user, topo.firstbranch=book1)'
+  hg: parse error: topo.firstbranch can only be used when using the topo sort key
+  [255]
+
   $ cd ..
   $ cd repo
 
@@ -1480,6 +2039,16 @@
   hg: parse error: missing argument
   [255]
 
+invalid function call should not be optimized to only()
+
+  $ log '"ancestors"(6) and not ancestors(4)'
+  hg: parse error: not a symbol
+  [255]
+
+  $ log 'ancestors(6) and not "ancestors"(4)'
+  hg: parse error: not a symbol
+  [255]
+
 we can use patterns when searching for tags
 
   $ log 'tag("1..*")'
@@ -1550,7 +2119,10 @@
   0
   $ log '4::8 - 8'
   4
-  $ log 'matching(1 or 2 or 3) and (2 or 3 or 1)'
+
+matching() should preserve the order of the input set:
+
+  $ log '(2 or 3 or 1) and matching(1 or 2 or 3)'
   2
   3
   1
@@ -1967,12 +2539,12 @@
   (func
     ('symbol', 'unknownref')
     ('symbol', '0'))
-  abort: failed to parse the definition of revset alias "unknownref": '$' not for alias arguments
+  abort: bad definition of revset alias "unknownref": invalid symbol '$2'
   [255]
 
   $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
   ('symbol', 'tip')
-  warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
+  warning: bad definition of revset alias "anotherbadone": at 7: not a prefix: end
   * set:
   <baseset [9]>
   9
@@ -1985,7 +2557,7 @@
 
   $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
   ('symbol', 'tip')
-  warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
+  warning: bad declaration of revset alias "bad name": at 4: invalid token
   * set:
   <baseset [9]>
   9
--- a/tests/test-rollback.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-rollback.t	Mon Jul 18 23:28:14 2016 -0500
@@ -196,3 +196,15 @@
   checking files
   1 files, 2 changesets, 2 total revisions
 
+rollback disabled by config
+  $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > rollback = false
+  > EOF
+  $ echo narf >> pinky-sayings.txt
+  $ hg add pinky-sayings.txt
+  $ hg ci -m 'First one.'
+  $ hg rollback
+  abort: rollback is disabled because it is unsafe
+  (see `hg help -v rollback` for information)
+  [255]
--- a/tests/test-run-tests.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-run-tests.t	Mon Jul 18 23:28:14 2016 -0500
@@ -2,10 +2,7 @@
 
 Avoid interference from actual test env:
 
-  $ unset HGTEST_JOBS
-  $ unset HGTEST_TIMEOUT
-  $ unset HGTEST_PORT
-  $ unset HGTEST_SHELL
+  $ . "$TESTDIR/helper-runtests.sh"
 
 Smoke test with install
 ============
@@ -196,6 +193,10 @@
   ]]>  </testcase>
   </testsuite>
 
+  $ cat .testtimes
+  test-failure-unicode.t * (glob)
+  test-failure.t * (glob)
+  test-success.t * (glob)
   $ rm test-failure-unicode.t
 
 test for --retest
@@ -304,6 +305,8 @@
   .
   # Ran 1 tests, 0 skipped, 0 warned, 0 failed.
   $ rm test-serve-inuse.t
+  $ killdaemons.py $DAEMON_PIDS
+  $ rm $DAEMON_PIDS
 
 Running In Debug Mode
 ======================
@@ -586,11 +589,35 @@
   testreport ={
       "test-bogus.t": {
           "result": "skip"
-      }, 
+      },
       "test-failure.t": {
           "result": "skip"
       }
   } (no-eol)
+
+Whitelist trumps blacklist
+  $ echo test-failure.t > whitelist
+  $ rt --blacklist=blacklist --whitelist=whitelist --json\
+  >   test-failure.t test-bogus.t
+  s
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
+  @@ -1,5 +1,5 @@
+     $ echo babar
+  -  rataxes
+  +  babar
+   This is a noop statement so that
+   this test is still more bytes than success.
+   pad pad pad pad............................................................
+  
+  ERROR: test-failure.t output changed
+  !
+  Skipped test-bogus.t: Doesn't exist
+  Failed test-failure.t: output changed
+  # Ran 1 tests, 1 skipped, 0 warned, 1 failed.
+  python hash seed: * (glob)
+  [1]
+
 test for --json
 ==================
 
@@ -708,6 +735,10 @@
 
   $ rm -f test-glob-backslash.t
 
+Test globbing of 127.0.0.1
+  $ echo 172.16.18.1
+  127.0.0.1 (glob)
+
 Test reusability for third party tools
 ======================================
 
--- a/tests/test-serve.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-serve.t	Mon Jul 18 23:28:14 2016 -0500
@@ -34,13 +34,13 @@
 With -v
 
   $ hgserve
-  listening at http://localhost/ (bound to 127.0.0.1:HGPORT1)
+  listening at http://localhost/ (bound to 127.0.0.1:HGPORT1) (glob)
   % errors
 
 With -v and -p HGPORT2
 
   $ hgserve -p "$HGPORT2"
-  listening at http://localhost/ (bound to 127.0.0.1:HGPORT2)
+  listening at http://localhost/ (bound to 127.0.0.1:HGPORT2) (glob)
   % errors
 
 With -v and -p daytime (should fail because low port)
@@ -57,25 +57,25 @@
 With --prefix foo
 
   $ hgserve --prefix foo
-  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
+  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) (glob)
   % errors
 
 With --prefix /foo
 
   $ hgserve --prefix /foo
-  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
+  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) (glob)
   % errors
 
 With --prefix foo/
 
   $ hgserve --prefix foo/
-  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
+  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) (glob)
   % errors
 
 With --prefix /foo/
 
   $ hgserve --prefix /foo/
-  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1)
+  listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) (glob)
   % errors
 
   $ cd ..
--- a/tests/test-shelve.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-shelve.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1044,8 +1044,8 @@
   shelved as default
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg debugbundle .hg/shelved/*.hg
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       45993d65fe9dc3c6d8764b9c3b07fa831ee7d92d
   $ cd ..
 
@@ -1585,3 +1585,40 @@
   ? b
   $ hg branch
   default
+  $ cd ..
+
+Prepare unshleve with a corrupted shelvedstate
+  $ hg init r1 && cd r1
+  $ echo text1 > file && hg add file
+  $ hg shelve
+  shelved as default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo text2 > file && hg ci -Am text1
+  adding file
+  $ hg unshelve
+  unshelving change 'default'
+  rebasing shelved changes
+  rebasing 1:396ea74229f9 "(changes in empty repository)" (tip)
+  merging file
+  warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
+  unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
+  [1]
+  $ echo somethingsomething > .hg/shelvedstate
+
+Unshelve --continue fails with appropriate message if shelvedstate is corrupted
+  $ hg unshelve --continue
+  abort: corrupted shelved state file
+  (please run hg unshelve --abort to abort unshelve operation)
+  [255]
+
+Unshelve --abort works with a corrupted shelvedstate
+  $ hg unshelve --abort
+  could not read shelved state file, your working copy may be in an unexpected state
+  please update to some commit
+
+Unshelve --abort fails with appropriate message if there's no unshelve in
+progress
+  $ hg unshelve --abort
+  abort: no unshelve in progress
+  [255]
+  $ cd ..
--- a/tests/test-static-http.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-static-http.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1,14 +1,8 @@
 #require killdaemons
 
-#if windows
   $ hg clone http://localhost:$HGPORT/ copy
   abort: * (glob)
   [255]
-#else
-  $ hg clone http://localhost:$HGPORT/ copy
-  abort: error: Connection refused
-  [255]
-#endif
   $ test -d copy
   [1]
 
--- a/tests/test-status.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-status.t	Mon Jul 18 23:28:14 2016 -0500
@@ -203,8 +203,9 @@
   ]
 
   $ hg status -A -Tpickle > pickle
+  >>> from __future__ import print_function
   >>> import pickle
-  >>> print sorted((x['status'], x['path']) for x in pickle.load(open("pickle")))
+  >>> print(sorted((x['status'], x['path']) for x in pickle.load(open("pickle"))))
   [('!', 'deleted'), ('?', 'pickle'), ('?', 'unknown'), ('A', 'added'), ('A', 'copied'), ('C', '.hgignore'), ('C', 'modified'), ('I', 'ignored'), ('R', 'removed')]
   $ rm pickle
 
--- a/tests/test-strip.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-strip.t	Mon Jul 18 23:28:14 2016 -0500
@@ -210,8 +210,8 @@
   summary:     b
   
   $ hg debugbundle .hg/strip-backup/*
-  Stream params: {'Compression': 'BZ'}
-  changegroup -- "{'version': '02'}"
+  Stream params: sortdict([('Compression', 'BZ')])
+  changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
       264128213d290d868c54642d13aeaa3675551a78
   $ hg pull .hg/strip-backup/*
   pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
@@ -799,7 +799,7 @@
   6625a516847449b6f0fa3737b9ba56e9f0f3032c
   d8db9d1372214336d2b5570f20ee468d2c72fa8b
   bundle2-output-bundle: "HG20", (1 params) 1 parts total
-  bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
+  bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
   saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg (glob)
   invalid branchheads cache (served): tip differs
   truncating cache/rbc-revs-v1 to 24
--- a/tests/test-subrepo-deep-nested-change.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-subrepo-deep-nested-change.t	Mon Jul 18 23:28:14 2016 -0500
@@ -303,9 +303,9 @@
   archiving (sub1) [===================================>] 4/4\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:'
   Only in ../wdir: .hg_archival.txt
@@ -347,9 +347,9 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../wdir -type f | sort
   ../wdir/.hg_archival.txt
@@ -379,10 +379,10 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ cat ../wdir/.hg_archival.txt
   repo: 7f491f53a367861f47ee64a80eb997d1f341b77a
@@ -510,10 +510,10 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_all | sort
   ../archive_all
@@ -547,8 +547,8 @@
   archiving (sub1) [===================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_exclude | sort
   ../archive_exclude
@@ -568,9 +568,9 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============>                ] 1/2\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 2/2\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   $ find ../archive_include | sort
   ../archive_include
@@ -945,7 +945,7 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (esc)
+  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving (sub3) [ <=>                                  ] 0\r (no-eol) (esc)
@@ -959,7 +959,7 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (esc)
+  archiving (sub1/sub2) [ <=>                             ] 0\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   diff -Nru cloned.*/.hgsub cloned/.hgsub (glob)
   --- cloned.*/.hgsub	* (glob)
@@ -987,8 +987,8 @@
   archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving [                                           ] 0/8\r (no-eol) (esc)
@@ -1006,10 +1006,10 @@
   archiving (sub1) [===================================>] 1/1\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [=========>                     ] 1/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [===================>           ] 2/3\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 3/3\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
   archiving (sub3) [                                    ] 0/1\r (no-eol) (esc)
@@ -1084,8 +1084,8 @@
   archiving (sub1) [ <=>                                  ] 0\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
-  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (esc)
-  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (esc)
+  archiving (sub1/sub2) [                               ] 0/1\r (no-eol) (glob) (esc)
+  archiving (sub1/sub2) [==============================>] 1/1\r (no-eol) (glob) (esc)
                                                               \r (no-eol) (esc)
   --- */cloned.*/sub1/sub2/sub2	* (glob)
   +++ */cloned/sub1/sub2/sub2	* (glob)
--- a/tests/test-subrepo-git.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-subrepo-git.t	Mon Jul 18 23:28:14 2016 -0500
@@ -1146,8 +1146,8 @@
   $ hg commit -m "add subrepo"
   $ cd ..
   $ rm -f pwned.txt
-  $ env -u GIT_ALLOW_PROTOCOL \
-  > PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
+  $ unset GIT_ALLOW_PROTOCOL
+  $ PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
   > malicious-subrepository malicious-subrepository-protected
   Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob)
   fatal: transport 'ext' not allowed
--- a/tests/test-treemanifest.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-treemanifest.t	Mon Jul 18 23:28:14 2016 -0500
@@ -309,7 +309,16 @@
   $ hg --config extensions.strip= strip tip
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg (glob)
+  $ hg debugindex --dir dir1
+     rev    offset  length  delta linkrev nodeid       p1           p2
+       0         0     127     -1       4 064927a0648a 000000000000 000000000000
+       1       127     111      0       5 25ecb8cb8618 000000000000 000000000000
   $ hg unbundle -q .hg/strip-backup/*
+  $ hg debugindex --dir dir1
+     rev    offset  length  delta linkrev nodeid       p1           p2
+       0         0     127     -1       4 064927a0648a 000000000000 000000000000
+       1       127     111      0       5 25ecb8cb8618 000000000000 000000000000
+       2       238      55      1       6 5b16163a30c6 25ecb8cb8618 000000000000
   $ hg st --change tip
   M dir1/a
 
@@ -742,3 +751,45 @@
   $ hg -R deeprepo bundle --all -t v2 deeprepo.bundle
   abort: repository does not support bundle version 02
   [255]
+
+Pull does not include changegroup for manifest the client already has from
+other branch
+
+  $ mkdir grafted-dir-repo
+  $ cd grafted-dir-repo
+  $ hg --config experimental.treemanifest=1 init
+  $ mkdir dir
+  $ echo a > dir/file
+  $ echo a > file
+  $ hg ci -Am initial
+  adding dir/file
+  adding file
+  $ echo b > dir/file
+  $ hg ci -m updated
+  $ hg co '.^'
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg revert -r tip dir/
+  reverting dir/file (glob)
+  $ echo b > file # to make sure root manifest is sent
+  $ hg ci -m grafted
+  created new head
+  $ cd ..
+
+  $ hg --config experimental.treemanifest=1 clone --pull -r 1 \
+  >   grafted-dir-repo grafted-dir-repo-clone
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 3 changes to 2 files
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd grafted-dir-repo-clone
+  $ hg pull -r 2
+  pulling from $TESTTMP/grafted-dir-repo (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+
--- a/tests/test-unified-test.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-unified-test.t	Mon Jul 18 23:28:14 2016 -0500
@@ -26,24 +26,25 @@
 
 Doctest commands:
 
-  >>> print 'foo'
+  >>> from __future__ import print_function
+  >>> print('foo')
   foo
   $ echo interleaved
   interleaved
   >>> for c in 'xyz':
-  ...     print c
+  ...     print(c)
   x
   y
   z
-  >>> print
+  >>> print()
   
   >>> foo = 'global name'
   >>> def func():
-  ...     print foo, 'should be visible in func()'
+  ...     print(foo, 'should be visible in func()')
   >>> func()
   global name should be visible in func()
-  >>> print '''multiline
-  ... string'''
+  >>> print('''multiline
+  ... string''')
   multiline
   string
 
--- a/tests/test-up-local-change.t	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/test-up-local-change.t	Mon Jul 18 23:28:14 2016 -0500
@@ -90,12 +90,6 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1
   
-  $ hg parents
-  changeset:   0:c19d34741b0a
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     1
-  
   $ hg --debug up
     searching for copies back to rev 1
     unmatched files in other:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-update-names.t	Mon Jul 18 23:28:14 2016 -0500
@@ -0,0 +1,55 @@
+Test update logic when there are renames or weird same-name cases between dirs
+and files
+
+Update with local changes across a file rename
+
+  $ hg init r1 && cd r1
+
+  $ echo a > a
+  $ hg add a
+  $ hg ci -m a
+
+  $ hg mv a b
+  $ hg ci -m rename
+
+  $ echo b > b
+  $ hg ci -m change
+
+  $ hg up -q 0
+
+  $ echo c > a
+
+  $ hg up
+  merging a and b to b
+  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+
+Test update when local untracked directory exists with the same name as a
+tracked file in a commit we are updating to
+  $ hg init r2 && cd r2
+  $ echo root > root && hg ci -Am root  # rev 0
+  adding root
+  $ echo text > name && hg ci -Am "name is a file"  # rev 1
+  adding name
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ mkdir name
+  $ hg up 1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Test update when local untracked directory exists with some files in it and has
+the same name a tracked file in a commit we are updating to. In future this
+should be updated to give an friendlier error message, but now we should just
+make sure that this does not erase untracked data
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ mkdir name
+  $ echo text > name/file
+  $ hg st
+  ? name/file
+  $ hg up 1
+  abort: *: '$TESTTMP/r1/r2/name' (glob)
+  [255]
+  $ cd ..
--- a/tests/test-update-renames.t	Sat Jul 02 09:41:40 2016 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,26 +0,0 @@
-Test update logic when there are renames
-
-Update with local changes across a file rename
-
-  $ hg init
-
-  $ echo a > a
-  $ hg add a
-  $ hg ci -m a
-
-  $ hg mv a b
-  $ hg ci -m rename
-
-  $ echo b > b
-  $ hg ci -m change
-
-  $ hg up -q 0
-
-  $ echo c > a
-
-  $ hg up
-  merging a and b to b
-  warning: conflicts while merging b! (edit, then use 'hg resolve --mark')
-  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
-  use 'hg resolve' to retry unresolved file merges
-  [1]
--- a/tests/tinyproxy.py	Sat Jul 02 09:41:40 2016 -0700
+++ b/tests/tinyproxy.py	Mon Jul 18 23:28:14 2016 -0500
@@ -14,16 +14,20 @@
 
 __version__ = "0.2.1"
 
-import BaseHTTPServer
-import SocketServer
+import optparse
 import os
 import select
 import socket
 import sys
-import urlparse
+
+from mercurial import util
 
-class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler):
-    __base = BaseHTTPServer.BaseHTTPRequestHandler
+httpserver = util.httpserver
+urlparse = util.urlparse
+socketserver = util.socketserver
+
+class ProxyHandler (httpserver.basehttprequesthandler):
+    __base = httpserver.basehttprequesthandler
     __base_handle = __base.handle
 
     server_version = "TinyHTTPProxy/" + __version__
@@ -132,14 +136,27 @@
     do_PUT  = do_GET
     do_DELETE = do_GET
 
-class ThreadingHTTPServer (SocketServer.ThreadingMixIn,
-                           BaseHTTPServer.HTTPServer):
+class ThreadingHTTPServer (socketserver.ThreadingMixIn,
+                           httpserver.httpserver):
     def __init__(self, *args, **kwargs):
-        BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
+        httpserver.httpserver.__init__(self, *args, **kwargs)
         a = open("proxy.pid", "w")
         a.write(str(os.getpid()) + "\n")
         a.close()
 
+def runserver(port=8000, bind=""):
+    server_address = (bind, port)
+    ProxyHandler.protocol_version = "HTTP/1.0"
+    httpd = ThreadingHTTPServer(server_address, ProxyHandler)
+    sa = httpd.socket.getsockname()
+    print("Serving HTTP on", sa[0], "port", sa[1], "...")
+    try:
+        httpd.serve_forever()
+    except KeyboardInterrupt:
+        print("\nKeyboard interrupt received, exiting.")
+        httpd.server_close()
+        sys.exit(0)
+
 if __name__ == '__main__':
     argv = sys.argv
     if argv[1:] and argv[1] in ('-h', '--help'):
@@ -155,4 +172,13 @@
             del argv[2:]
         else:
             print("Any clients will be served...")
-        BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer)
+
+        parser = optparse.OptionParser()
+        parser.add_option('-b', '--bind', metavar='ADDRESS',
+                          help='Specify alternate bind address '
+                               '[default: all interfaces]', default='')
+        (options, args) = parser.parse_args()
+        port = 8000
+        if len(args) == 1:
+            port = int(args[0])
+        runserver(port, options.bind)