merge default into stable for 3.4 freeze stable 3.4-rc
authorMatt Mackall <mpm@selenic.com>
Thu, 16 Apr 2015 20:57:51 -0500
branchstable
changeset 24803 e89f909edffa
parent 24753 612ed41ae359 (current diff)
parent 24802 2ee10789d66b (diff)
child 24804 2fe9fd88db0b
merge default into stable for 3.4 freeze
contrib/lock-checker.py
--- a/Makefile	Thu Apr 16 22:33:53 2015 +0900
+++ b/Makefile	Thu Apr 16 20:57:51 2015 -0500
@@ -7,11 +7,14 @@
 PREFIX=/usr/local
 export PREFIX
 PYTHON=python
+$(eval HGROOT := $(shell pwd))
+HGPYTHONS ?= $(HGROOT)/build/pythons
 PURE=
 PYFILES:=$(shell find mercurial hgext doc -name '*.py')
 DOCFILES=mercurial/help/*.txt
 export LANGUAGE=C
 export LC_ALL=C
+TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
 
 # Set this to e.g. "mingw32" to use a non-default compiler.
 COMPILER=
@@ -98,6 +101,13 @@
 test-%:
 	cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
 
+testpy-%:
+	@echo Looking for Python $* in $(HGPYTHONS)
+	[ -e $(HGPYTHONS)/$*/bin/python ] || ( \
+	cd $$(mktemp --directory --tmpdir) && \
+        $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
+	cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
+
 check-code:
 	hg manifest | xargs python contrib/check-code.py
 
@@ -108,6 +118,7 @@
 	  hgext/*.py hgext/*/__init__.py \
 	  mercurial/fileset.py mercurial/revset.py \
 	  mercurial/templatefilters.py mercurial/templatekw.py \
+	  mercurial/templater.py \
 	  mercurial/filemerge.py \
 	  $(DOCFILES) > i18n/hg.pot.tmp
         # All strings marked for translation in Mercurial contain
--- a/contrib/buildrpm	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/buildrpm	Thu Apr 16 20:57:51 2015 -0500
@@ -74,6 +74,7 @@
 $HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
 if [ "$PYTHONVER" ]; then
 (
+    mkdir -p build
     cd build
     PYTHON_SRCFILE=Python-$PYTHONVER.tgz
     [ -f $PYTHON_SRCFILE ] || curl -Lo $PYTHON_SRCFILE http://www.python.org/ftp/python/$PYTHONVER/$PYTHON_SRCFILE
--- a/contrib/check-code.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/check-code.py	Thu Apr 16 20:57:51 2015 -0500
@@ -122,6 +122,7 @@
     (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
      "put a backslash-escaped newline after sed 'i' command"),
     (r'^diff *-\w*u.*$\n(^  \$ |^$)', "prefix diff -u with cmp"),
+    (r'seq ', "don't use 'seq', use $TESTDIR/seq.py")
   ],
   # warnings
   [
@@ -153,7 +154,7 @@
     (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
     (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
      "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
-     'hg pull -q file:../test'), # in test-pull.t which is skipped on windows
+     '# no-msys'), # in test-pull.t which is skipped on windows
     (r'^  saved backup bundle to \$TESTTMP.*\.hg$', winglobmsg),
     (r'^  changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
      winglobmsg),
@@ -334,6 +335,7 @@
     (r'(while|if|do|for)\(', "use space after while/if/do/for"),
     (r'return\(', "return is not a function"),
     (r' ;', "no space before ;"),
+    (r'[^;] \)', "no space before )"),
     (r'[)][{]', "space between ) and {"),
     (r'\w+\* \w+', "use int *foo, not int* foo"),
     (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
--- a/contrib/check-commit	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/check-commit	Thu Apr 16 20:57:51 2015 -0500
@@ -20,11 +20,12 @@
 errors = [
     (r"[(]bc[)]", "(BC) needs to be uppercase"),
     (r"[(]issue \d\d\d", "no space allowed between issue and number"),
-    (r"[(]bug", "use (issueDDDD) instead of bug"),
+    (r"[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
     (r"^# User [^@\n]+$", "username is not an email address"),
     (r"^# .*\n(?!merge with )[^#]\S+[^:] ",
      "summary line doesn't start with 'topic: '"),
     (r"^# .*\n[A-Z][a-z]\S+", "don't capitalize summary lines"),
+    (r"^# .*\n[^\n]*: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
     (r"^# .*\n.*\.\s+$", "don't add trailing period on summary line"),
     (r"^# .*\n.{78,}", "summary line too long"),
     (r"^\+\n \n", "adds double empty line"),
--- a/contrib/hgk	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/hgk	Thu Apr 16 20:57:51 2015 -0500
@@ -177,18 +177,21 @@
     set ncmupdate 1
     set limit 0
     set revargs {}
+    set showhidden no
     for {set i 0} {$i < [llength $rargs]} {incr i} {
 	set opt [lindex $rargs $i]
-	if {$opt == "--limit"} {
+	switch -- $opt --limit {
 	    incr i
 	    set limit [lindex $rargs $i]
-	} else {
+	} --hidden {
+	    set showhidden yes
+	} default {
 	    lappend revargs $opt
 	}
     }
     if [catch {
-	set parse_args [concat --default HEAD $revargs]
-	set parse_temp [eval exec {$env(HG)} --config ui.report_untrusted=false debug-rev-parse $parse_args]
+	set parse_args [concat tip $revargs]
+	set parse_temp [eval exec {$env(HG)} --config ui.report_untrusted=false log --template '{node}\n' $parse_args]
 	regsub -all "\r\n" $parse_temp "\n" parse_temp
 	set parsed_args [split $parse_temp "\n"]
     } err] {
@@ -201,6 +204,9 @@
     if {$limit > 0} {
 	set parsed_args [concat -n $limit $parsed_args]
     }
+    if {$showhidden} {
+	append parsed_args --hidden
+    }
     if [catch {
 	set commfd [open "|{$env(HG)} --config ui.report_untrusted=false debug-rev-list --header --topo-order --parents $parsed_args" r]
     } err] {
@@ -331,7 +337,7 @@
 
 proc parsecommit {id contents listed olds} {
     global commitinfo children nchildren parents nparents cdate ncleft
-    global firstparents
+    global firstparents obsolete
 
     set inhdr 1
     set comment {}
@@ -369,21 +375,25 @@
 		set inhdr 0
 	    } else {
 		set tag [lindex $line 0]
-		if {$tag == "author"} {
+		switch -- $tag "author" {
 		    set x [expr {[llength $line] - 2}]
 		    set audate [lindex $line $x]
 		    set auname [join [lrange $line 1 [expr {$x - 1}]]]
-		} elseif {$tag == "committer"} {
+		} "committer" {
 		    set x [expr {[llength $line] - 2}]
 		    set comdate [lindex $line $x]
 		    set comname [join [lrange $line 1 [expr {$x - 1}]]]
-		} elseif {$tag == "revision"} {
+		} "revision" {
 		    set rev [lindex $line 1]
-        } elseif {$tag == "branch"} {
+		} "branch" {
 		    set branch [join [lrange $line 1 end]]
-        } elseif {$tag == "bookmark"} {
+		} "bookmark" {
 		    set bookmark [join [lrange $line 1 end]]
-        }
+		} "obsolete" {
+		    set obsolete($id) ""
+		} "phase" {
+		    set phase [lindex $line 1 end]
+		}
 	    }
 	} else {
 	    if {$comment == {}} {
@@ -407,7 +417,7 @@
 	set comdate [clock format $comdate]
     }
     set commitinfo($id) [list $headline $auname $audate \
-			     $comname $comdate $comment $rev $branch $bookmark]
+			     $comname $comdate $comment $rev $branch $bookmark $phase]
 
     if {[info exists firstparents]} {
         set i [lsearch $firstparents $id]
@@ -1133,7 +1143,7 @@
     global lineno lthickness mainline mainlinearrow sidelines
     global commitlisted rowtextx idpos lastuse displist
     global oldnlines olddlevel olddisplist
-    global aucolormap curid curidfont
+    global aucolormap curid curidfont obsolete
 
     incr numcommits
     incr lineno
@@ -1141,13 +1151,26 @@
     set lastuse($id) $lineno
     set lineid($lineno) $id
     set idline($id) $lineno
-    set ofill [expr {[info exists commitlisted($id)]? "blue": "white"}]
+    set shape oval
+    set outline #000080
+    set ofill [expr {[info exists commitlisted($id)]? "#7f7fff": "white"}]
     if {![info exists commitinfo($id)]} {
 	readcommit $id
 	if {![info exists commitinfo($id)]} {
 	    set commitinfo($id) {"No commit information available"}
 	    set nparents($id) 0
 	}
+    } else {
+	switch [lindex $commitinfo($id) 9] secret {
+	    set shape rect
+	} public {
+	    set outline black
+	    set ofill blue
+	}
+    }
+    if {[info exists obsolete($id)]} {
+	set outline darkgrey
+	set ofill lightgrey
     }
     assigncolor $id
     set currentparents {}
@@ -1175,9 +1198,9 @@
     }
     drawlines $id 0
     set orad [expr {$linespc / 3}]
-    set t [$canv create oval [expr $x - $orad] [expr $y1 - $orad] \
+    set t [$canv create $shape [expr $x - $orad] [expr $y1 - $orad] \
 	       [expr $x + $orad - 1] [expr $y1 + $orad - 1] \
-	       -fill $ofill -outline black -width 1]
+	       -fill $ofill -outline $outline -width 1]
     $canv raise $t
     $canv bind $t <1> {selcanvline {} %x %y}
     set xt [xcoord [llength $displist] $level $lineno]
@@ -2493,6 +2516,9 @@
     }
     $ctext insert end "User: [lindex $info 1]\n"
     $ctext insert end "Date: [lindex $info 2]\n"
+    if {[lindex $info 3] ne ""} {
+	$ctext insert end "Committer: [lindex $info 3]\n"
+    }
     if {[info exists idbookmarks($id)]} {
 	$ctext insert end "Bookmarks:"
 	foreach bookmark $idbookmarks($id) {
@@ -2520,6 +2546,12 @@
 	    append comment "Child:  [commit_descriptor $c]\n"
 	}
     }
+
+    if {[lindex $info 9] eq "secret"} {
+	# for now, display phase for secret changesets only
+	append comment "Phase: [lindex $info 9]\n"
+    }
+
     append comment "\n"
     append comment [lindex $info 5]
 
@@ -4040,13 +4072,15 @@
 
 proc getconfig {} {
     global env
-
-    set lines [exec $env(HG) debug-config]
-    regsub -all "\r\n" $lines "\n" config
     set config {}
-    foreach line [split $lines "\n"] {
-	regsub "^(k|v)=" $line "" line
-	lappend config $line
+
+    set lines [exec $env(HG) debugconfig]
+    foreach line [split $lines \n] {
+	set line [string trimright $line \r]
+	if {[string match hgk.* $line]} {
+	    regexp {(.*)=(.*)} $line - k v
+	    lappend config $k $v
+	}
     }
     return $config
 }
@@ -4110,8 +4144,9 @@
 set stuffsaved 0
 set patchnum 0
 
+set config(hgk.vdiff) ""
 array set config [getconfig]
-set hgvdiff $config(vdiff)
+set hgvdiff $config(hgk.vdiff)
 setcoords
 makewindow
 readrefs
--- a/contrib/import-checker.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/import-checker.py	Thu Apr 16 20:57:51 2015 -0500
@@ -61,6 +61,8 @@
     for m in 'ctypes', 'email':
         yield m
     yield 'builtins' # python3 only
+    for m in 'fcntl', 'grp', 'pwd', 'termios':  # Unix only
+        yield m
     stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
     # We need to supplement the list of prefixes for the search to work
     # when run from within a virtualenv.
@@ -90,7 +92,8 @@
             for name in files:
                 if name == '__init__.py':
                     continue
-                if not (name.endswith('.py') or name.endswith('.so')):
+                if not (name.endswith('.py') or name.endswith('.so')
+                        or name.endswith('.pyd')):
                     continue
                 full_path = os.path.join(top, name)
                 if 'site-packages' in full_path:
@@ -162,36 +165,31 @@
 class CircularImport(Exception):
     pass
 
-
-def cyclekey(names):
-    return tuple(sorted(set(names)))
-
-def check_one_mod(mod, imports, path=None, ignore=None):
-    if path is None:
-        path = []
-    if ignore is None:
-        ignore = []
-    path = path + [mod]
-    for i in sorted(imports.get(mod, [])):
-        if i not in stdlib_modules and not i.startswith('mercurial.'):
-            i = mod.rsplit('.', 1)[0] + '.' + i
-        if i in path:
-            firstspot = path.index(i)
-            cycle = path[firstspot:] + [i]
-            if cyclekey(cycle) not in ignore:
-                raise CircularImport(cycle)
-            continue
-        check_one_mod(i, imports, path=path, ignore=ignore)
+def checkmod(mod, imports):
+    shortest = {}
+    visit = [[mod]]
+    while visit:
+        path = visit.pop(0)
+        for i in sorted(imports.get(path[-1], [])):
+            if i not in stdlib_modules and not i.startswith('mercurial.'):
+                i = mod.rsplit('.', 1)[0] + '.' + i
+            if len(path) < shortest.get(i, 1000):
+                shortest[i] = len(path)
+                if i in path:
+                    if i == path[0]:
+                        raise CircularImport(path)
+                    continue
+                visit.append(path + [i])
 
 def rotatecycle(cycle):
     """arrange a cycle so that the lexicographically first module listed first
 
-    >>> rotatecycle(['foo', 'bar', 'foo'])
+    >>> rotatecycle(['foo', 'bar'])
     ['bar', 'foo', 'bar']
     """
     lowest = min(cycle)
     idx = cycle.index(lowest)
-    return cycle[idx:] + cycle[1:idx] + [lowest]
+    return cycle[idx:] + cycle[:idx] + [lowest]
 
 def find_cycles(imports):
     """Find cycles in an already-loaded import graph.
@@ -201,17 +199,17 @@
     ...            'top.baz': ['foo'],
     ...            'top.qux': ['foo']}
     >>> print '\\n'.join(sorted(find_cycles(imports)))
-    top.bar -> top.baz -> top.foo -> top.bar -> top.bar
-    top.foo -> top.qux -> top.foo -> top.foo
+    top.bar -> top.baz -> top.foo -> top.bar
+    top.foo -> top.qux -> top.foo
     """
-    cycles = {}
+    cycles = set()
     for mod in sorted(imports.iterkeys()):
         try:
-            check_one_mod(mod, imports, ignore=cycles)
+            checkmod(mod, imports)
         except CircularImport, e:
             cycle = e.args[0]
-            cycles[cyclekey(cycle)] = ' -> '.join(rotatecycle(cycle))
-    return cycles.values()
+            cycles.add(" -> ".join(rotatecycle(cycle)))
+    return cycles
 
 def _cycle_sortkey(c):
     return len(c), c
--- a/contrib/lock-checker.py	Thu Apr 16 22:33:53 2015 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-"""Extension to verify locks are obtained in the required places.
-
-This works by wrapping functions that should be surrounded by a lock
-and asserting the lock is held. Missing locks are called out with a
-traceback printed to stderr.
-
-This currently only checks store locks, not working copy locks.
-"""
-import os
-from mercurial import util
-
-def _checklock(repo):
-    l = repo._lockref and repo._lockref()
-    if l is None or not l.held:
-        util.debugstacktrace('missing lock', skip=1)
-
-def reposetup(ui, repo):
-    orig = repo.__class__
-    class lockcheckrepo(repo.__class__):
-        def _writejournal(self, *args, **kwargs):
-            _checklock(self)
-            return orig._writejournal(self, *args, **kwargs)
-
-        def transaction(self, *args, **kwargs):
-            _checklock(self)
-            return orig.transaction(self, *args, **kwargs)
-
-        # TODO(durin42): kiilerix had a commented-out lock check in
-        # _writebranchcache and _writerequirements
-
-        def _tag(self, *args, **kwargs):
-            _checklock(self)
-            return orig._tag(self, *args, **kwargs)
-
-        def write(self, *args, **kwargs):
-            assert os.path.lexists(self._join('.hg/wlock'))
-            return orig.write(self, *args, **kwargs)
-
-    repo.__class__ = lockcheckrepo
--- a/contrib/mercurial.spec	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/mercurial.spec	Thu Apr 16 20:57:51 2015 -0500
@@ -9,7 +9,7 @@
 %global docutilsname docutils-0.12
 %global docutilsmd5 4622263b62c5c771c03502afa3157768
 %global pythonhg python-hg
-%global hgpyprefix /usr/%{pythonhg}
+%global hgpyprefix /opt/%{pythonhg}
 # byte compilation will fail on some some Python /test/ files
 %global _python_bytecompile_errors_terminate_build 0
 
--- a/contrib/perf.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/perf.py	Thu Apr 16 20:57:51 2015 -0500
@@ -189,14 +189,25 @@
     timer(d)
     fm.end()
 
-@command('perfdirstatefoldmap')
-def perffoldmap(ui, repo):
+@command('perffilefoldmap')
+def perffilefoldmap(ui, repo):
     timer, fm = gettimer(ui)
     dirstate = repo.dirstate
     'a' in dirstate
     def d():
-        dirstate._foldmap.get('a')
-        del dirstate._foldmap
+        dirstate._filefoldmap.get('a')
+        del dirstate._filefoldmap
+    timer(d)
+    fm.end()
+
+@command('perfdirfoldmap')
+def perfdirfoldmap(ui, repo):
+    timer, fm = gettimer(ui)
+    dirstate = repo.dirstate
+    'a' in dirstate
+    def d():
+        dirstate._dirfoldmap.get('a')
+        del dirstate._dirfoldmap
         del dirstate._dirs
     timer(d)
     fm.end()
@@ -293,6 +304,25 @@
     timer(d)
     fm.end()
 
+@command('perfctxfiles')
+def perfparents(ui, repo, x):
+    x = int(x)
+    timer, fm = gettimer(ui)
+    def d():
+        len(repo[x].files())
+    timer(d)
+    fm.end()
+
+@command('perfrawfiles')
+def perfparents(ui, repo, x):
+    x = int(x)
+    timer, fm = gettimer(ui)
+    cl = repo.changelog
+    def d():
+        len(cl.read(x)[3])
+    timer(d)
+    fm.end()
+
 @command('perflookup')
 def perflookup(ui, repo, rev):
     timer, fm = gettimer(ui)
--- a/contrib/synthrepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/synthrepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -359,7 +359,10 @@
                             files.iterkeys(), filectxfn, ui.username(),
                             '%d %d' % util.makedate())
         initnode = mc.commit()
-        hexfn = ui.debugflag and hex or short
+        if ui.debugflag:
+            hexfn = hex
+        else:
+            hexfn = short
         ui.status(_('added commit %s with %d files\n')
                   % (hexfn(initnode), len(files)))
 
@@ -475,7 +478,10 @@
         if dirpath in replacements:
             return replacements[dirpath]
         head, _ = os.path.split(dirpath)
-        head = head and rename(head) or ''
+        if head:
+            head = rename(head)
+        else:
+            head = ''
         renamed = os.path.join(head, wordgen.next())
         replacements[dirpath] = renamed
         return renamed
--- a/contrib/win32/ReadMe.html	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/win32/ReadMe.html	Thu Apr 16 20:57:51 2015 -0500
@@ -140,7 +140,7 @@
     </p>
 
     <p>
-      Mercurial is Copyright 2005-2014 Matt Mackall and others. See
+      Mercurial is Copyright 2005-2015 Matt Mackall and others. See
       the <tt>Contributors.txt</tt> file for a list of contributors.
     </p>
 
--- a/contrib/win32/mercurial.iss	Thu Apr 16 22:33:53 2015 +0900
+++ b/contrib/win32/mercurial.iss	Thu Apr 16 20:57:51 2015 -0500
@@ -21,7 +21,7 @@
 #endif
 
 [Setup]
-AppCopyright=Copyright 2005-2010 Matt Mackall and others
+AppCopyright=Copyright 2005-2015 Matt Mackall and others
 AppName=Mercurial
 #if ARCH == "x64"
 AppVerName=Mercurial {#VERSION} (64-bit)
@@ -44,7 +44,7 @@
 DefaultDirName={pf}\Mercurial
 SourceDir=..\..
 VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
-VersionInfoCopyright=Copyright 2005-2010 Matt Mackall and others
+VersionInfoCopyright=Copyright 2005-2015 Matt Mackall and others
 VersionInfoCompany=Matt Mackall and others
 InternalCompressLevel=max
 SolidCompression=true
Binary file contrib/wix/COPYING.rtf has changed
--- a/doc/hgmanpage.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/doc/hgmanpage.py	Thu Apr 16 20:57:51 2015 -0500
@@ -427,7 +427,7 @@
         pass
 
     def visit_block_quote(self, node):
-        # BUG/HACK: indent alway uses the _last_ indention,
+        # BUG/HACK: indent always uses the _last_ indention,
         # thus we need two of them.
         self.indent(BLOCKQOUTE_INDENT)
         self.indent(0)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/censor.py	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,168 @@
+# Copyright (C) 2015 - Mike Edgar <adgar@google.com>
+#
+# This extension enables removal of file content at a given revision,
+# rewriting the data/metadata of successive revisions to preserve revision log
+# integrity.
+
+"""erase file content at a given revision
+
+The censor command instructs Mercurial to erase all content of a file at a given
+revision *without updating the changeset hash.* This allows existing history to
+remain valid while preventing future clones/pulls from receiving the erased
+data.
+
+Typical uses for censor are due to security or legal requirements, including::
+
+ * Passwords, private keys, crytographic material
+ * Licensed data/code/libraries for which the license has expired
+ * Personally Identifiable Information or other private data
+
+Censored file revisions are listed in a tracked file called .hgcensored stored
+in the repository root. The censor command adds an entry to the .hgcensored file
+in the working directory and commits it (much like ``hg tag`` and .hgtags). The
+censored file data is then replaced with a pointer to the new commit, enabling
+verification.
+
+Censored nodes can interrupt mercurial's typical operation whenever the excised
+data needs to be materialized. Some commands, like ``hg cat``/``hg revert``,
+simply fail when asked to produce censored data. Others, like ``hg verify`` and
+``hg update``, must be capable of tolerating censored data to continue to
+function in a meaningful way. Such commands only tolerate censored file
+revisions if they are allowed by the policy specified by the "censor.allow"
+config option.
+"""
+
+from mercurial.node import short
+from mercurial import cmdutil, error, filelog, revlog, scmutil, util
+from mercurial.i18n import _
+
+cmdtable = {}
+command = cmdutil.command(cmdtable)
+testedwith = 'internal'
+
+@command('censor',
+    [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
+     ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
+    _('-r REV [-t TEXT] [FILE]'))
+def censor(ui, repo, path, rev='', tombstone='', **opts):
+    if not path:
+        raise util.Abort(_('must specify file path to censor'))
+    if not rev:
+        raise util.Abort(_('must specify revision to censor'))
+
+    flog = repo.file(path)
+    if not len(flog):
+        raise util.Abort(_('cannot censor file with no history'))
+
+    rev = scmutil.revsingle(repo, rev, rev).rev()
+    try:
+        ctx = repo[rev]
+    except KeyError:
+        raise util.Abort(_('invalid revision identifier %s') % rev)
+
+    try:
+        fctx = ctx.filectx(path)
+    except error.LookupError:
+        raise util.Abort(_('file does not exist at revision %s') % rev)
+
+    fnode = fctx.filenode()
+    headctxs = [repo[c] for c in repo.heads()]
+    heads = [c for c in headctxs if path in c and c.filenode(path) == fnode]
+    if heads:
+        headlist = ', '.join([short(c.node()) for c in heads])
+        raise util.Abort(_('cannot censor file in heads (%s)') % headlist,
+            hint=_('clean/delete and commit first'))
+
+    wctx = repo[None]
+    wp = wctx.parents()
+    if ctx.node() in [p.node() for p in wp]:
+        raise util.Abort(_('cannot censor working directory'),
+            hint=_('clean/delete/update first'))
+
+    flogv = flog.version & 0xFFFF
+    if flogv != revlog.REVLOGNG:
+        raise util.Abort(
+            _('censor does not support revlog version %d') % (flogv,))
+
+    tombstone = filelog.packmeta({"censored": tombstone}, "")
+
+    crev = fctx.filerev()
+
+    if len(tombstone) > flog.rawsize(crev):
+        raise util.Abort(_(
+            'censor tombstone must be no longer than censored data'))
+
+    # Using two files instead of one makes it easy to rewrite entry-by-entry
+    idxread = repo.svfs(flog.indexfile, 'r')
+    idxwrite = repo.svfs(flog.indexfile, 'wb', atomictemp=True)
+    if flog.version & revlog.REVLOGNGINLINEDATA:
+        dataread, datawrite = idxread, idxwrite
+    else:
+        dataread = repo.svfs(flog.datafile, 'r')
+        datawrite = repo.svfs(flog.datafile, 'wb', atomictemp=True)
+
+    # Copy all revlog data up to the entry to be censored.
+    rio = revlog.revlogio()
+    offset = flog.start(crev)
+
+    for chunk in util.filechunkiter(idxread, limit=crev * rio.size):
+        idxwrite.write(chunk)
+    for chunk in util.filechunkiter(dataread, limit=offset):
+        datawrite.write(chunk)
+
+    def rewriteindex(r, newoffs, newdata=None):
+        """Rewrite the index entry with a new data offset and optional new data.
+
+        The newdata argument, if given, is a tuple of three positive integers:
+        (new compressed, new uncompressed, added flag bits).
+        """
+        offlags, comp, uncomp, base, link, p1, p2, nodeid = flog.index[r]
+        flags = revlog.gettype(offlags)
+        if newdata:
+            comp, uncomp, nflags = newdata
+            flags |= nflags
+        offlags = revlog.offset_type(newoffs, flags)
+        e = (offlags, comp, uncomp, r, link, p1, p2, nodeid)
+        idxwrite.write(rio.packentry(e, None, flog.version, r))
+        idxread.seek(rio.size, 1)
+
+    def rewrite(r, offs, data, nflags=revlog.REVIDX_DEFAULT_FLAGS):
+        """Write the given full text to the filelog with the given data offset.
+
+        Returns:
+            The integer number of data bytes written, for tracking data offsets.
+        """
+        flag, compdata = flog.compress(data)
+        newcomp = len(flag) + len(compdata)
+        rewriteindex(r, offs, (newcomp, len(data), nflags))
+        datawrite.write(flag)
+        datawrite.write(compdata)
+        dataread.seek(flog.length(r), 1)
+        return newcomp
+
+    # Rewrite censored revlog entry with (padded) tombstone data.
+    pad = ' ' * (flog.rawsize(crev) - len(tombstone))
+    offset += rewrite(crev, offset, tombstone + pad, revlog.REVIDX_ISCENSORED)
+
+    # Rewrite all following filelog revisions fixing up offsets and deltas.
+    for srev in xrange(crev + 1, len(flog)):
+        if crev in flog.parentrevs(srev):
+            # Immediate children of censored node must be re-added as fulltext.
+            try:
+                revdata = flog.revision(srev)
+            except error.CensoredNodeError, e:
+                revdata = e.tombstone
+            dlen = rewrite(srev, offset, revdata)
+        else:
+            # Copy any other revision data verbatim after fixing up the offset.
+            rewriteindex(srev, offset)
+            dlen = flog.length(srev)
+            for chunk in util.filechunkiter(dataread, limit=dlen):
+                datawrite.write(chunk)
+        offset += dlen
+
+    idxread.close()
+    idxwrite.close()
+    if dataread is not idxread:
+        dataread.close()
+        datawrite.close()
--- a/hgext/children.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/children.py	Thu Apr 16 20:57:51 2015 -0500
@@ -39,11 +39,13 @@
     """
     rev = opts.get('rev')
     if file_:
-        ctx = repo.filectx(file_, changeid=rev)
+        fctx = repo.filectx(file_, changeid=rev)
+        childctxs = [fcctx.changectx() for fcctx in fctx.children()]
     else:
         ctx = repo[rev]
+        childctxs = ctx.children()
 
     displayer = cmdutil.show_changeset(ui, repo, opts)
-    for cctx in ctx.children():
+    for cctx in childctxs:
         displayer.show(cctx)
     displayer.close()
--- a/hgext/churn.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/churn.py	Thu Apr 16 20:57:51 2015 -0500
@@ -46,7 +46,7 @@
             date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
             return date.strftime(opts['dateformat'])
     else:
-        tmpl = opts.get('template', '{author|email}')
+        tmpl = opts.get('oldtemplate') or opts.get('template')
         tmpl = maketemplater(ui, repo, tmpl)
         def getkey(ctx):
             ui.pushbuffer()
@@ -95,7 +95,9 @@
      _('count rate for the specified revision or revset'), _('REV')),
     ('d', 'date', '',
      _('count rate for revisions matching date spec'), _('DATE')),
-    ('t', 'template', '{author|email}',
+    ('t', 'oldtemplate', '',
+     _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
+    ('T', 'template', '{author|email}',
      _('template to group changesets'), _('TEMPLATE')),
     ('f', 'dateformat', '',
      _('strftime-compatible format for grouping by date'), _('FORMAT')),
--- a/hgext/color.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/color.py	Thu Apr 16 20:57:51 2015 -0500
@@ -140,6 +140,17 @@
 either using ansi mode (or auto mode), or by using less -r (which will
 pass through all terminal control codes, not just color control
 codes).
+
+On some systems (such as MSYS in Windows), the terminal may support
+a different color mode than the pager (activated via the "pager"
+extension). It is possible to define separate modes depending on whether
+the pager is active::
+
+  [color]
+  mode = auto
+  pagermode = ansi
+
+If ``pagermode`` is not defined, the ``mode`` will be used.
 '''
 
 import os
@@ -213,20 +224,41 @@
     formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
 
     mode = ui.config('color', 'mode', 'auto')
+
+    # If pager is active, color.pagermode overrides color.mode.
+    if getattr(ui, 'pageractive', False):
+        mode = ui.config('color', 'pagermode', mode)
+
     realmode = mode
     if mode == 'auto':
-        if os.name == 'nt' and 'TERM' not in os.environ:
-            # looks line a cmd.exe console, use win32 API or nothing
-            realmode = 'win32'
+        if os.name == 'nt':
+            term = os.environ.get('TERM')
+            # TERM won't be defined in a vanilla cmd.exe environment.
+
+            # UNIX-like environments on Windows such as Cygwin and MSYS will
+            # set TERM. They appear to make a best effort attempt at setting it
+            # to something appropriate. However, not all environments with TERM
+            # defined support ANSI. Since "ansi" could result in terminal
+            # gibberish, we error on the side of selecting "win32". However, if
+            # w32effects is not defined, we almost certainly don't support
+            # "win32", so don't even try.
+            if (term and 'xterm' in term) or not w32effects:
+                realmode = 'ansi'
+            else:
+                realmode = 'win32'
         else:
             realmode = 'ansi'
 
+    def modewarn():
+        # only warn if color.mode was explicitly set and we're in
+        # an interactive terminal
+        if mode == realmode and ui.interactive():
+            ui.warn(_('warning: failed to set color mode to %s\n') % mode)
+
     if realmode == 'win32':
         _terminfo_params = {}
         if not w32effects:
-            if mode == 'win32':
-                # only warn if color.mode is explicitly set to win32
-                ui.warn(_('warning: failed to set color mode to %s\n') % mode)
+            modewarn()
             return None
         _effects.update(w32effects)
     elif realmode == 'ansi':
@@ -234,10 +266,8 @@
     elif realmode == 'terminfo':
         _terminfosetup(ui, mode)
         if not _terminfo_params:
-            if mode == 'terminfo':
-                ## FIXME Shouldn't we return None in this case too?
-                # only warn if color.mode is explicitly set to win32
-                ui.warn(_('warning: failed to set color mode to %s\n') % mode)
+            ## FIXME Shouldn't we return None in this case too?
+            modewarn()
             realmode = 'ansi'
     else:
         return None
--- a/hgext/convert/bzr.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/bzr.py	Thu Apr 16 20:57:51 2015 -0500
@@ -143,7 +143,8 @@
         parentids = self._parentids.pop(version)
         # only diff against first parent id
         prevtree = self.sourcerepo.revision_tree(parentids[0])
-        return self._gettreechanges(self._revtree, prevtree)
+        files, changes = self._gettreechanges(self._revtree, prevtree)
+        return files, changes, set()
 
     def getcommit(self, version):
         rev = self.sourcerepo.get_revision(version)
--- a/hgext/convert/common.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/common.py	Thu Apr 16 20:57:51 2015 -0500
@@ -31,7 +31,10 @@
 def checktool(exe, name=None, abort=True):
     name = name or exe
     if not util.findexe(exe):
-        exc = abort and util.Abort or MissingTool
+        if abort:
+            exc = util.Abort
+        else:
+            exc = MissingTool
         raise exc(_('cannot find required "%s" tool') % name)
 
 class NoRepo(Exception):
@@ -94,7 +97,7 @@
         raise NotImplementedError
 
     def getchanges(self, version, full):
-        """Returns a tuple of (files, copies).
+        """Returns a tuple of (files, copies, cleanp2).
 
         files is a sorted list of (filename, id) tuples for all files
         changed between version and its first parent returned by
@@ -102,6 +105,10 @@
         id is the source revision id of the file.
 
         copies is a dictionary of dest: source
+
+        cleanp2 is the set of files filenames that are clean against p2.
+        (Files that are clean against p1 are already not in files (unless
+        full). This makes it possible to handle p2 clean files similarly.)
         """
         raise NotImplementedError
 
@@ -212,7 +219,8 @@
         mapping equivalent authors identifiers for each system."""
         return None
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full):
+    def putcommit(self, files, copies, parents, commit, source, revmap, full,
+                  cleanp2):
         """Create a revision with all changed files listed in 'files'
         and having listed parents. 'commit' is a commit object
         containing at a minimum the author, date, and message for this
@@ -222,6 +230,8 @@
         of source revisions to converted revisions. Only getfile() and
         lookuprev() should be called on 'source'. 'full' means that 'files'
         is complete and all other files should be removed.
+        'cleanp2' is a set of the filenames that are unchanged from p2
+        (only in the common merge case where there two parents).
 
         Note that the sink repository is not told to update itself to
         a particular revision (or even what that revision would be)
--- a/hgext/convert/convcmd.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/convcmd.py	Thu Apr 16 20:57:51 2015 -0500
@@ -397,7 +397,7 @@
                 dest = self.map[changes]
             self.map[rev] = dest
             return
-        files, copies = changes
+        files, copies, cleanp2 = changes
         pbranches = []
         if commit.parents:
             for prev in commit.parents:
@@ -413,9 +413,19 @@
             parents = [self.map.get(p, p) for p in parents]
         except KeyError:
             parents = [b[0] for b in pbranches]
-        source = progresssource(self.ui, self.source, len(files))
+        if len(pbranches) != 2:
+            cleanp2 = set()
+        if len(parents) < 3:
+            source = progresssource(self.ui, self.source, len(files))
+        else:
+            # For an octopus merge, we end up traversing the list of
+            # changed files N-1 times. This tweak to the number of
+            # files makes it so the progress bar doesn't overflow
+            # itself.
+            source = progresssource(self.ui, self.source,
+                                    len(files) * (len(parents) - 1))
         newnode = self.dest.putcommit(files, copies, parents, commit,
-                                      source, self.map, full)
+                                      source, self.map, full, cleanp2)
         source.close()
         self.source.converted(rev, newnode)
         self.map[rev] = newnode
@@ -515,7 +525,11 @@
     sortmode = [m for m in sortmodes if opts.get(m)]
     if len(sortmode) > 1:
         raise util.Abort(_('more than one sort mode specified'))
-    sortmode = sortmode and sortmode[0] or defaultsort
+    if sortmode:
+        sortmode = sortmode[0]
+    else:
+        sortmode = defaultsort
+
     if sortmode == 'sourcesort' and not srcc.hasnativeorder():
         raise util.Abort(_('--sourcesort is not supported by this data source'))
     if sortmode == 'closesort' and not srcc.hasnativeclose():
@@ -531,4 +545,3 @@
 
     c = converter(ui, srcc, destc, revmapfile, opts)
     c.convert(sortmode)
-
--- a/hgext/convert/cvs.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/cvs.py	Thu Apr 16 20:57:51 2015 -0500
@@ -262,7 +262,7 @@
         if full:
             raise util.Abort(_("convert from cvs do not support --full"))
         self._parse()
-        return sorted(self.files[rev].iteritems()), {}
+        return sorted(self.files[rev].iteritems()), {}, set()
 
     def getcommit(self, rev):
         self._parse()
--- a/hgext/convert/cvsps.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/cvsps.py	Thu Apr 16 20:57:51 2015 -0500
@@ -634,14 +634,21 @@
         # By this point, the changesets are sufficiently compared that
         # we don't really care about ordering. However, this leaves
         # some race conditions in the tests, so we compare on the
-        # number of files modified and the number of branchpoints in
-        # each changeset to ensure test output remains stable.
+        # number of files modified, the files contained in each
+        # changeset, and the branchpoints in the change to ensure test
+        # output remains stable.
 
         # recommended replacement for cmp from
         # https://docs.python.org/3.0/whatsnew/3.0.html
         c = lambda x, y: (x > y) - (x < y)
+        # Sort bigger changes first.
         if not d:
             d = c(len(l.entries), len(r.entries))
+        # Try sorting by filename in the change.
+        if not d:
+            d = c([e.file for e in l.entries], [e.file for e in r.entries])
+        # Try and put changes without a branch point before ones with
+        # a branch point.
         if not d:
             d = c(len(l.branchpoints), len(r.branchpoints))
         return d
--- a/hgext/convert/darcs.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/darcs.py	Thu Apr 16 20:57:51 2015 -0500
@@ -188,7 +188,7 @@
                 changes.append((elt.text.strip(), rev))
         self.pull(rev)
         self.lastrev = rev
-        return sorted(changes), copies
+        return sorted(changes), copies, set()
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
--- a/hgext/convert/filemap.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/filemap.py	Thu Apr 16 20:57:51 2015 -0500
@@ -384,12 +384,15 @@
         # Get the real changes and do the filtering/mapping. To be
         # able to get the files later on in getfile, we hide the
         # original filename in the rev part of the return value.
-        changes, copies = self.base.getchanges(rev, full)
+        changes, copies, cleanp2 = self.base.getchanges(rev, full)
         files = {}
+        ncleanp2 = set(cleanp2)
         for f, r in changes:
             newf = self.filemapper(f)
             if newf and (newf != f or newf not in files):
                 files[newf] = (f, r)
+                if newf != f:
+                    ncleanp2.discard(f)
         files = sorted(files.items())
 
         ncopies = {}
@@ -400,7 +403,7 @@
                 if newsource:
                     ncopies[newc] = newsource
 
-        return files, ncopies
+        return files, ncopies, ncleanp2
 
     def getfile(self, name, rev):
         realname, realrev = rev
--- a/hgext/convert/git.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/git.py	Thu Apr 16 20:57:51 2015 -0500
@@ -264,7 +264,7 @@
             else:
                 self.retrievegitmodules(version)
                 changes.append(('.hgsubstate', ''))
-        return (changes, copies)
+        return (changes, copies, set())
 
     def getcommit(self, version):
         c = self.catfile(version, "commit") # read the commit hash
--- a/hgext/convert/gnuarch.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/gnuarch.py	Thu Apr 16 20:57:51 2015 -0500
@@ -171,7 +171,7 @@
             copies.update(cps)
 
         self.lastrev = rev
-        return sorted(set(changes)), copies
+        return sorted(set(changes)), copies, set()
 
     def getcommit(self, rev):
         changes = self.changes[rev]
@@ -209,7 +209,10 @@
         mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
         if stat.S_ISLNK(mode):
             data = os.readlink(os.path.join(self.tmppath, name))
-            mode = mode and 'l' or ''
+            if mode:
+                mode = 'l'
+            else:
+                mode = ''
         else:
             data = open(os.path.join(self.tmppath, name), 'rb').read()
             mode = (mode & 0111) and 'x' or ''
--- a/hgext/convert/hg.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/hg.py	Thu Apr 16 20:57:51 2015 -0500
@@ -87,7 +87,10 @@
         if not branch:
             branch = 'default'
         pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
-        pbranch = pbranches and pbranches[0][1] or 'default'
+        if pbranches:
+            pbranch = pbranches[0][1]
+        else:
+            pbranch = 'default'
 
         branchpath = os.path.join(self.path, branch)
         if setbranch:
@@ -129,9 +132,14 @@
             fp.write('%s %s\n' % (revid, s[1]))
         return fp.getvalue()
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full):
+    def putcommit(self, files, copies, parents, commit, source, revmap, full,
+                  cleanp2):
         files = dict(files)
+
         def getfilectx(repo, memctx, f):
+            if p2ctx and f in cleanp2 and f not in copies:
+                self.ui.debug('reusing %s from p2\n' % f)
+                return p2ctx[f]
             try:
                 v = files[f]
             except KeyError:
@@ -196,6 +204,9 @@
         while parents:
             p1 = p2
             p2 = parents.pop(0)
+            p2ctx = None
+            if p2 != nullid:
+                p2ctx = self.repo[p2]
             fileset = set(files)
             if full:
                 fileset.update(self.repo[p1])
@@ -379,9 +390,13 @@
         # getcopies() is also run for roots and before filtering so missing
         # revlogs are detected early
         copies = self.getcopies(ctx, parents, copyfiles)
+        cleanp2 = set()
+        if len(parents) == 2:
+            cleanp2.update(self.repo.status(parents[1].node(), ctx.node(),
+                                            clean=True).clean)
         changes = [(f, rev) for f in files if f not in self.ignored]
         changes.sort()
-        return changes, copies
+        return changes, copies, cleanp2
 
     def getcopies(self, ctx, parents, files):
         copies = {}
--- a/hgext/convert/monotone.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/monotone.py	Thu Apr 16 20:57:51 2015 -0500
@@ -280,7 +280,7 @@
             for fromfile in renamed.values():
                 files[fromfile] = rev
 
-        return (files.items(), copies)
+        return (files.items(), copies, set())
 
     def getfile(self, name, rev):
         if not self.mtnisfile(name, rev):
@@ -297,7 +297,7 @@
         extra = {}
         certs = self.mtngetcerts(rev)
         if certs.get('suspend') == certs["branch"]:
-            extra['close'] = '1'
+            extra['close'] = 1
         return commit(
             author=certs["author"],
             date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
--- a/hgext/convert/p4.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/p4.py	Thu Apr 16 20:57:51 2015 -0500
@@ -195,7 +195,7 @@
     def getchanges(self, rev, full):
         if full:
             raise util.Abort(_("convert from p4 do not support --full"))
-        return self.files[rev], {}
+        return self.files[rev], {}, set()
 
     def getcommit(self, rev):
         return self.changeset[rev]
--- a/hgext/convert/subversion.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/convert/subversion.py	Thu Apr 16 20:57:51 2015 -0500
@@ -474,7 +474,7 @@
             (files, copies) = self._getchanges(rev, full)
             # caller caches the result, so free it here to release memory
             del self.paths[rev]
-        return (files, copies)
+        return (files, copies, set())
 
     def getchangedfiles(self, rev, i):
         # called from filemap - cache computed values for reuse in getchanges
@@ -871,8 +871,16 @@
             if self.ui.configbool('convert', 'localtimezone'):
                 date = makedatetimestamp(date[0])
 
-            log = message and self.recode(message) or ''
-            author = author and self.recode(author) or ''
+            if message:
+                log = self.recode(message)
+            else:
+                log = ''
+
+            if author:
+                author = self.recode(author)
+            else:
+                author = ''
+
             try:
                 branch = self.module.split("/")[-1]
                 if branch == self.trunkname:
@@ -1118,7 +1126,10 @@
         self.opener = scmutil.opener(self.wc)
         self.wopener = scmutil.opener(self.wc)
         self.childmap = mapfile(ui, self.join('hg-childmap'))
-        self.is_exec = util.checkexec(self.wc) and util.isexec or None
+        if util.checkexec(self.wc):
+            self.is_exec = util.isexec
+        else:
+            self.is_exec = None
 
         if created:
             hook = os.path.join(created, 'hooks', 'pre-revprop-change')
@@ -1229,7 +1240,8 @@
     def revid(self, rev):
         return u"svn:%s@%s" % (self.uuid, rev)
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full):
+    def putcommit(self, files, copies, parents, commit, source, revmap, full,
+                  cleanp2):
         for parent in parents:
             try:
                 return self.revid(self.childmap[parent])
--- a/hgext/eol.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/eol.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,13 +6,13 @@
 Unix/Mac, thereby letting everybody use their OS native line endings.
 
 The extension reads its configuration from a versioned ``.hgeol``
-configuration file found in the root of the working copy. The
+configuration file found in the root of the working directory. The
 ``.hgeol`` file use the same syntax as all other Mercurial
 configuration files. It uses two sections, ``[patterns]`` and
 ``[repository]``.
 
 The ``[patterns]`` section specifies how line endings should be
-converted between the working copy and the repository. The format is
+converted between the working directory and the repository. The format is
 specified by a file pattern. The first match is used, so put more
 specific patterns first. The available line endings are ``LF``,
 ``CRLF``, and ``BIN``.
@@ -51,7 +51,7 @@
 .. note::
 
    The rules will first apply when files are touched in the working
-   copy, e.g. by updating to null and back to tip to touch all files.
+   directory, e.g. by updating to null and back to tip to touch all files.
 
 The extension uses an optional ``[eol]`` section read from both the
 normal Mercurial configuration files and the ``.hgeol`` file, with the
--- a/hgext/extdiff.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/extdiff.py	Thu Apr 16 20:57:51 2015 -0500
@@ -276,6 +276,7 @@
 
 def uisetup(ui):
     for cmd, path in ui.configitems('extdiff'):
+        path = util.expandpath(path)
         if cmd.startswith('cmd.'):
             cmd = cmd[4:]
             if not path:
--- a/hgext/fetch.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/fetch.py	Thu Apr 16 20:57:51 2015 -0500
@@ -56,8 +56,8 @@
     except error.RepoLookupError:
         branchnode = None
     if parent != branchnode:
-        raise util.Abort(_('working dir not at branch tip '
-                           '(use "hg update" to check out branch tip)'))
+        raise util.Abort(_('working directory not at branch tip'),
+                         hint=_('use "hg update" to check out branch tip'))
 
     wlock = lock = None
     try:
--- a/hgext/graphlog.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/graphlog.py	Thu Apr 16 20:57:51 2015 -0500
@@ -54,4 +54,5 @@
     Nodes printed as an @ character are parents of the working
     directory.
     """
-    return cmdutil.graphlog(ui, repo, *pats, **opts)
+    opts['graph'] = True
+    return commands.log(ui, repo, *pats, **opts)
--- a/hgext/hgcia.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/hgcia.py	Thu Apr 16 20:57:51 2015 -0500
@@ -121,7 +121,10 @@
         return patch.diffstat(pbuf.lines) or ''
 
     def logmsg(self):
-        diffstat = self.cia.diffstat and self.diffstat() or ''
+        if self.cia.diffstat:
+            diffstat = self.diffstat()
+        else:
+            diffstat = ''
         self.cia.ui.pushbuffer()
         self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
                                 baseurl=self.cia.ui.config('web', 'baseurl'),
@@ -199,7 +202,10 @@
         style = self.ui.config('cia', 'style')
         template = self.ui.config('cia', 'template')
         if not template:
-            template = self.diffstat and self.dstemplate or self.deftemplate
+            if self.diffstat:
+                template = self.dstemplate
+            else:
+                template = self.deftemplate
         template = templater.parsestring(template, quoted=False)
         t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
                                         template, style, False)
--- a/hgext/hgk.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/hgk.py	Thu Apr 16 20:57:51 2015 -0500
@@ -35,7 +35,7 @@
 '''
 
 import os
-from mercurial import cmdutil, commands, patch, revlog, scmutil
+from mercurial import cmdutil, commands, patch, scmutil, obsolete
 from mercurial.node import nullid, nullrev, short
 from mercurial.i18n import _
 
@@ -50,7 +50,7 @@
     ('s', 'stdin', None, _('stdin')),
     ('C', 'copy', None, _('detect copies')),
     ('S', 'search', "", _('search'))],
-    ('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...'),
+    ('[OPTION]... NODE1 NODE2 [FILE]...'),
     inferrepo=True)
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
     """diff trees from two commits"""
@@ -117,17 +117,16 @@
 
     date = ctx.date()
     description = ctx.description().replace("\0", "")
-    lines = description.splitlines()
-    if lines and lines[-1].startswith('committer:'):
-        committer = lines[-1].split(': ')[1].rstrip()
-    else:
-        committer = ""
+    ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
 
-    ui.write(("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])))
-    if committer != '':
-        ui.write(("committer %s %s %s\n" % (committer, int(date[0]), date[1])))
+    if 'committer' in ctx.extra():
+        ui.write(("committer %s\n" % ctx.extra()['committer']))
+
     ui.write(("revision %d\n" % ctx.rev()))
     ui.write(("branch %s\n" % ctx.branch()))
+    if obsolete.isenabled(repo, obsolete.createmarkersopt):
+        if ctx.obsolete():
+            ui.write(("obsolete\n"))
     ui.write(("phase %s\n\n" % ctx.phasestr()))
 
     if prefix != "":
@@ -138,7 +137,7 @@
     if prefix:
         ui.write('\0')
 
-@command('debug-merge-base', [], _('hg debug-merge-base REV REV'))
+@command('debug-merge-base', [], _('REV REV'))
 def base(ui, repo, node1, node2):
     """output common ancestor information"""
     node1 = repo.lookup(node1)
@@ -148,7 +147,7 @@
 
 @command('debug-cat-file',
     [('s', 'stdin', None, _('stdin'))],
-    _('hg debug-cat-file [OPTION]... TYPE FILE'),
+    _('[OPTION]... TYPE FILE'),
     inferrepo=True)
 def catfile(ui, repo, type=None, r=None, **opts):
     """cat a specific revision"""
@@ -298,22 +297,6 @@
                 break
             count += 1
 
-@command('debug-rev-parse',
-    [('', 'default', '', _('ignored'))],
-    _('hg debug-rev-parse REV'))
-def revparse(ui, repo, *revs, **opts):
-    """parse given revisions"""
-    def revstr(rev):
-        if rev == 'HEAD':
-            rev = 'tip'
-        return revlog.hex(repo.lookup(rev))
-
-    for r in revs:
-        revrange = r.split(':', 1)
-        ui.write('%s\n' % revstr(revrange[0]))
-        if len(revrange) == 2:
-            ui.write('^%s\n' % revstr(revrange[1]))
-
 # git rev-list tries to order things by date, and has the ability to stop
 # at a given commit without walking the whole repo.  TODO add the stop
 # parameter
@@ -322,7 +305,7 @@
     ('t', 'topo-order', None, _('topo-order')),
     ('p', 'parents', None, _('parents')),
     ('n', 'max-count', 0, _('max-count'))],
-    ('hg debug-rev-list [OPTION]... REV...'))
+    ('[OPTION]... REV...'))
 def revlist(ui, repo, *revs, **opts):
     """print revisions"""
     if opts['header']:
@@ -332,23 +315,17 @@
     copy = [x for x in revs]
     revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
 
-@command('debug-config', [], _('hg debug-config'))
-def config(ui, repo, **opts):
-    """print extension options"""
-    def writeopt(name, value):
-        ui.write(('k=%s\nv=%s\n' % (name, value)))
-
-    writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
-
-
 @command('view',
     [('l', 'limit', '',
      _('limit number of changes displayed'), _('NUM'))],
-    _('hg view [-l LIMIT] [REVRANGE]'))
+    _('[-l LIMIT] [REVRANGE]'))
 def view(ui, repo, *etc, **opts):
     "start interactive history viewer"
     os.chdir(repo.root)
     optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
+    if repo.filtername is None:
+        optstr += '--hidden'
+
     cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
     ui.debug("running %s\n" % cmd)
     ui.system(cmd)
--- a/hgext/histedit.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/histedit.py	Thu Apr 16 20:57:51 2015 -0500
@@ -142,6 +142,13 @@
 as running ``hg histedit 836302820282``. If you need plan to push to a
 repository that Mercurial does not detect to be related to the source
 repo, you can add a ``--force`` option.
+
+Histedit rule lines are truncated to 80 characters by default. You
+can customise this behaviour by setting a different length in your
+configuration file:
+
+[histedit]
+linelen = 120      # truncate rule lines at 120 characters
 """
 
 try:
@@ -156,8 +163,11 @@
 from mercurial import cmdutil
 from mercurial import discovery
 from mercurial import error
+from mercurial import changegroup
 from mercurial import copies
 from mercurial import context
+from mercurial import exchange
+from mercurial import extensions
 from mercurial import hg
 from mercurial import node
 from mercurial import repair
@@ -189,15 +199,16 @@
 """)
 
 class histeditstate(object):
-    def __init__(self, repo, parentctx=None, rules=None, keep=None,
+    def __init__(self, repo, parentctxnode=None, rules=None, keep=None,
             topmost=None, replacements=None, lock=None, wlock=None):
         self.repo = repo
         self.rules = rules
         self.keep = keep
         self.topmost = topmost
-        self.parentctx = parentctx
+        self.parentctxnode = parentctxnode
         self.lock = lock
         self.wlock = wlock
+        self.backupfile = None
         if replacements is None:
             self.replacements = []
         else:
@@ -212,23 +223,153 @@
                 raise
             raise util.Abort(_('no histedit in progress'))
 
-        parentctxnode, rules, keep, topmost, replacements = pickle.load(fp)
+        try:
+            data = pickle.load(fp)
+            parentctxnode, rules, keep, topmost, replacements = data
+            backupfile = None
+        except pickle.UnpicklingError:
+            data = self._load()
+            parentctxnode, rules, keep, topmost, replacements, backupfile = data
 
-        self.parentctx = self.repo[parentctxnode]
+        self.parentctxnode = parentctxnode
         self.rules = rules
         self.keep = keep
         self.topmost = topmost
         self.replacements = replacements
+        self.backupfile = backupfile
 
     def write(self):
         fp = self.repo.vfs('histedit-state', 'w')
-        pickle.dump((self.parentctx.node(), self.rules, self.keep,
-                     self.topmost, self.replacements), fp)
+        fp.write('v1\n')
+        fp.write('%s\n' % node.hex(self.parentctxnode))
+        fp.write('%s\n' % node.hex(self.topmost))
+        fp.write('%s\n' % self.keep)
+        fp.write('%d\n' % len(self.rules))
+        for rule in self.rules:
+            fp.write('%s%s\n' % (rule[1], rule[0]))
+        fp.write('%d\n' % len(self.replacements))
+        for replacement in self.replacements:
+            fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
+                for r in replacement[1])))
+        fp.write('%s\n' % self.backupfile)
         fp.close()
 
+    def _load(self):
+        fp = self.repo.vfs('histedit-state', 'r')
+        lines = [l[:-1] for l in fp.readlines()]
+
+        index = 0
+        lines[index] # version number
+        index += 1
+
+        parentctxnode = node.bin(lines[index])
+        index += 1
+
+        topmost = node.bin(lines[index])
+        index += 1
+
+        keep = lines[index] == 'True'
+        index += 1
+
+        # Rules
+        rules = []
+        rulelen = int(lines[index])
+        index += 1
+        for i in xrange(rulelen):
+            rule = lines[index]
+            rulehash = rule[:40]
+            ruleaction = rule[40:]
+            rules.append((ruleaction, rulehash))
+            index += 1
+
+        # Replacements
+        replacements = []
+        replacementlen = int(lines[index])
+        index += 1
+        for i in xrange(replacementlen):
+            replacement = lines[index]
+            original = node.bin(replacement[:40])
+            succ = [node.bin(replacement[i:i + 40]) for i in
+                    range(40, len(replacement), 40)]
+            replacements.append((original, succ))
+            index += 1
+
+        backupfile = lines[index]
+        index += 1
+
+        fp.close()
+
+        return parentctxnode, rules, keep, topmost, replacements, backupfile
+
     def clear(self):
         self.repo.vfs.unlink('histedit-state')
 
+class histeditaction(object):
+    def __init__(self, state, node):
+        self.state = state
+        self.repo = state.repo
+        self.node = node
+
+    @classmethod
+    def fromrule(cls, state, rule):
+        """Parses the given rule, returning an instance of the histeditaction.
+        """
+        repo = state.repo
+        rulehash = rule.strip().split(' ', 1)[0]
+        try:
+            node = repo[rulehash].node()
+        except error.RepoError:
+            raise util.Abort(_('unknown changeset %s listed') % rulehash[:12])
+        return cls(state, node)
+
+    def run(self):
+        """Runs the action. The default behavior is simply apply the action's
+        rulectx onto the current parentctx."""
+        self.applychange()
+        self.continuedirty()
+        return self.continueclean()
+
+    def applychange(self):
+        """Applies the changes from this action's rulectx onto the current
+        parentctx, but does not commit them."""
+        repo = self.repo
+        rulectx = repo[self.node]
+        hg.update(repo, self.state.parentctxnode)
+        stats = applychanges(repo.ui, repo, rulectx, {})
+        if stats and stats[3] > 0:
+            raise error.InterventionRequired(_('Fix up the change and run '
+                                            'hg histedit --continue'))
+
+    def continuedirty(self):
+        """Continues the action when changes have been applied to the working
+        copy. The default behavior is to commit the dirty changes."""
+        repo = self.repo
+        rulectx = repo[self.node]
+
+        editor = self.commiteditor()
+        commit = commitfuncfor(repo, rulectx)
+
+        commit(text=rulectx.description(), user=rulectx.user(),
+               date=rulectx.date(), extra=rulectx.extra(), editor=editor)
+
+    def commiteditor(self):
+        """The editor to be used to edit the commit message."""
+        return False
+
+    def continueclean(self):
+        """Continues the action when the working copy is clean. The default
+        behavior is to accept the current commit as the new version of the
+        rulectx."""
+        ctx = self.repo['.']
+        if ctx.node() == self.state.parentctxnode:
+            self.repo.ui.warn(_('%s: empty changeset\n') %
+                              node.short(self.node))
+            return ctx, [(self.node, tuple())]
+        if ctx.node() == self.node:
+            # Nothing changed
+            return ctx, []
+        return ctx, [(self.node, (ctx.node(),))]
+
 def commitfuncfor(repo, src):
     """Build a commit function for the replacement of <src>
 
@@ -345,121 +486,120 @@
                          editor=editor)
     return repo.commitctx(new)
 
-def pick(ui, state, ha, opts):
-    repo, ctx = state.repo, state.parentctx
-    oldctx = repo[ha]
-    if oldctx.parents()[0] == ctx:
-        ui.debug('node %s unchanged\n' % ha)
-        return oldctx, []
-    hg.update(repo, ctx.node())
-    stats = applychanges(ui, repo, oldctx, opts)
-    if stats and stats[3] > 0:
-        raise error.InterventionRequired(_('Fix up the change and run '
-                                           'hg histedit --continue'))
-    # drop the second merge parent
-    commit = commitfuncfor(repo, oldctx)
-    n = commit(text=oldctx.description(), user=oldctx.user(),
-               date=oldctx.date(), extra=oldctx.extra())
-    if n is None:
-        ui.warn(_('%s: empty changeset\n') % node.hex(ha))
-        return ctx, []
-    new = repo[n]
-    return new, [(oldctx.node(), (n,))]
+class pick(histeditaction):
+    def run(self):
+        rulectx = self.repo[self.node]
+        if rulectx.parents()[0].node() == self.state.parentctxnode:
+            self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
+            return rulectx, []
+
+        return super(pick, self).run()
 
+class edit(histeditaction):
+    def run(self):
+        repo = self.repo
+        rulectx = repo[self.node]
+        hg.update(repo, self.state.parentctxnode)
+        applychanges(repo.ui, repo, rulectx, {})
+        raise error.InterventionRequired(
+            _('Make changes as needed, you may commit or record as needed '
+              'now.\nWhen you are finished, run hg histedit --continue to '
+              'resume.'))
+
+    def commiteditor(self):
+        return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
 
-def edit(ui, state, ha, opts):
-    repo, ctx = state.repo, state.parentctx
-    oldctx = repo[ha]
-    hg.update(repo, ctx.node())
-    applychanges(ui, repo, oldctx, opts)
-    raise error.InterventionRequired(
-        _('Make changes as needed, you may commit or record as needed now.\n'
-          'When you are finished, run hg histedit --continue to resume.'))
+class fold(histeditaction):
+    def continuedirty(self):
+        repo = self.repo
+        rulectx = repo[self.node]
 
-def rollup(ui, state, ha, opts):
-    rollupopts = opts.copy()
-    rollupopts['rollup'] = True
-    return fold(ui, state, ha, rollupopts)
+        commit = commitfuncfor(repo, rulectx)
+        commit(text='fold-temp-revision %s' % node.short(self.node),
+               user=rulectx.user(), date=rulectx.date(),
+               extra=rulectx.extra())
 
-def fold(ui, state, ha, opts):
-    repo, ctx = state.repo, state.parentctx
-    oldctx = repo[ha]
-    hg.update(repo, ctx.node())
-    stats = applychanges(ui, repo, oldctx, opts)
-    if stats and stats[3] > 0:
-        raise error.InterventionRequired(
-            _('Fix up the change and run hg histedit --continue'))
-    n = repo.commit(text='fold-temp-revision %s' % ha, user=oldctx.user(),
-                    date=oldctx.date(), extra=oldctx.extra())
-    if n is None:
-        ui.warn(_('%s: empty changeset') % node.hex(ha))
-        return ctx, []
-    return finishfold(ui, repo, ctx, oldctx, n, opts, [])
+    def continueclean(self):
+        repo = self.repo
+        ctx = repo['.']
+        rulectx = repo[self.node]
+        parentctxnode = self.state.parentctxnode
+        if ctx.node() == parentctxnode:
+            repo.ui.warn(_('%s: empty changeset\n') %
+                              node.short(self.node))
+            return ctx, [(self.node, (parentctxnode,))]
+
+        parentctx = repo[parentctxnode]
+        newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
+                                                 parentctx))
+        if not newcommits:
+            repo.ui.warn(_('%s: cannot fold - working copy is not a '
+                           'descendant of previous commit %s\n') %
+                           (node.short(self.node), node.short(parentctxnode)))
+            return ctx, [(self.node, (ctx.node(),))]
+
+        middlecommits = newcommits.copy()
+        middlecommits.discard(ctx.node())
 
-def finishfold(ui, repo, ctx, oldctx, newnode, opts, internalchanges):
-    parent = ctx.parents()[0].node()
-    hg.update(repo, parent)
-    ### prepare new commit data
-    commitopts = opts.copy()
-    commitopts['user'] = ctx.user()
-    # commit message
-    if opts.get('rollup'):
-        newmessage = ctx.description()
-    else:
-        newmessage = '\n***\n'.join(
-            [ctx.description()] +
-            [repo[r].description() for r in internalchanges] +
-            [oldctx.description()]) + '\n'
-    commitopts['message'] = newmessage
-    # date
-    commitopts['date'] = max(ctx.date(), oldctx.date())
-    extra = ctx.extra().copy()
-    # histedit_source
-    # note: ctx is likely a temporary commit but that the best we can do here
-    #       This is sufficient to solve issue3681 anyway
-    extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
-    commitopts['extra'] = extra
-    phasebackup = repo.ui.backupconfig('phases', 'new-commit')
-    try:
-        phasemin = max(ctx.phase(), oldctx.phase())
-        repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
-        n = collapse(repo, ctx, repo[newnode], commitopts)
-    finally:
-        repo.ui.restoreconfig(phasebackup)
-    if n is None:
-        return ctx, []
-    hg.update(repo, n)
-    replacements = [(oldctx.node(), (newnode,)),
-                    (ctx.node(), (n,)),
-                    (newnode, (n,)),
-                   ]
-    for ich in internalchanges:
-        replacements.append((ich, (n,)))
-    return repo[n], replacements
+        return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
+                               middlecommits)
+
+    def skipprompt(self):
+        return False
 
-def drop(ui, state, ha, opts):
-    repo, ctx = state.repo, state.parentctx
-    return ctx, [(repo[ha].node(), ())]
-
+    def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
+        parent = ctx.parents()[0].node()
+        hg.update(repo, parent)
+        ### prepare new commit data
+        commitopts = {}
+        commitopts['user'] = ctx.user()
+        # commit message
+        if self.skipprompt():
+            newmessage = ctx.description()
+        else:
+            newmessage = '\n***\n'.join(
+                [ctx.description()] +
+                [repo[r].description() for r in internalchanges] +
+                [oldctx.description()]) + '\n'
+        commitopts['message'] = newmessage
+        # date
+        commitopts['date'] = max(ctx.date(), oldctx.date())
+        extra = ctx.extra().copy()
+        # histedit_source
+        # note: ctx is likely a temporary commit but that the best we can do
+        #       here. This is sufficient to solve issue3681 anyway.
+        extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
+        commitopts['extra'] = extra
+        phasebackup = repo.ui.backupconfig('phases', 'new-commit')
+        try:
+            phasemin = max(ctx.phase(), oldctx.phase())
+            repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
+            n = collapse(repo, ctx, repo[newnode], commitopts)
+        finally:
+            repo.ui.restoreconfig(phasebackup)
+        if n is None:
+            return ctx, []
+        hg.update(repo, n)
+        replacements = [(oldctx.node(), (newnode,)),
+                        (ctx.node(), (n,)),
+                        (newnode, (n,)),
+                       ]
+        for ich in internalchanges:
+            replacements.append((ich, (n,)))
+        return repo[n], replacements
 
-def message(ui, state, ha, opts):
-    repo, ctx = state.repo, state.parentctx
-    oldctx = repo[ha]
-    hg.update(repo, ctx.node())
-    stats = applychanges(ui, repo, oldctx, opts)
-    if stats and stats[3] > 0:
-        raise error.InterventionRequired(
-            _('Fix up the change and run hg histedit --continue'))
-    message = oldctx.description()
-    commit = commitfuncfor(repo, oldctx)
-    editor = cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
-    new = commit(text=message, user=oldctx.user(), date=oldctx.date(),
-                 extra=oldctx.extra(), editor=editor)
-    newctx = repo[new]
-    if oldctx.node() != newctx.node():
-        return newctx, [(oldctx.node(), (new,))]
-    # We didn't make an edit, so just indicate no replaced nodes
-    return newctx, []
+class rollup(fold):
+    def skipprompt(self):
+        return True
+
+class drop(histeditaction):
+    def run(self):
+        parentctx = self.repo[self.state.parentctxnode]
+        return parentctx, [(self.node, tuple())]
+
+class message(histeditaction):
+    def commiteditor(self):
+        return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
 
 def findoutgoing(ui, repo, remote=None, force=False, opts={}):
     """utility function to find the first outgoing changeset
@@ -501,15 +641,16 @@
 
 @command('histedit',
     [('', 'commands', '',
-      _('Read history edits from the specified file.')),
+      _('read history edits from the specified file'), _('FILE')),
      ('c', 'continue', False, _('continue an edit already in progress')),
+     ('', 'edit-plan', False, _('edit remaining actions list')),
      ('k', 'keep', False,
       _("don't strip old nodes after edit is complete")),
      ('', 'abort', False, _('abort an edit in progress')),
      ('o', 'outgoing', False, _('changesets not found in destination')),
      ('f', 'force', False,
       _('force outgoing even for unrelated repositories')),
-     ('r', 'rev', [], _('first revision to be edited'))],
+     ('r', 'rev', [], _('first revision to be edited'), _('REV'))],
      _("ANCESTOR | --outgoing [URL]"))
 def histedit(ui, repo, *freeargs, **opts):
     """interactively edit changeset history
@@ -552,6 +693,7 @@
     # basic argument incompatibility processing
     outg = opts.get('outgoing')
     cont = opts.get('continue')
+    editplan = opts.get('edit_plan')
     abort = opts.get('abort')
     force = opts.get('force')
     rules = opts.get('commands', '')
@@ -560,13 +702,18 @@
     if force and not outg:
         raise util.Abort(_('--force only allowed with --outgoing'))
     if cont:
-        if util.any((outg, abort, revs, freeargs, rules)):
+        if util.any((outg, abort, revs, freeargs, rules, editplan)):
             raise util.Abort(_('no arguments allowed with --continue'))
         goal = 'continue'
     elif abort:
-        if util.any((outg, revs, freeargs, rules)):
+        if util.any((outg, revs, freeargs, rules, editplan)):
             raise util.Abort(_('no arguments allowed with --abort'))
         goal = 'abort'
+    elif editplan:
+        if util.any((outg, revs, freeargs)):
+            raise util.Abort(_('only --commands argument allowed with'
+                               '--edit-plan'))
+        goal = 'edit-plan'
     else:
         if os.path.exists(os.path.join(repo.path, 'histedit-state')):
             raise util.Abort(_('history edit already in progress, try '
@@ -579,6 +726,10 @@
                     _('only one repo argument allowed with --outgoing'))
         else:
             revs.extend(freeargs)
+            if len(revs) == 0:
+                histeditdefault = ui.config('histedit', 'defaultrev')
+                if histeditdefault:
+                    revs.append(histeditdefault)
             if len(revs) != 1:
                 raise util.Abort(
                     _('histedit requires exactly one ancestor revision'))
@@ -589,17 +740,43 @@
 
     # rebuild state
     if goal == 'continue':
-        state = histeditstate(repo)
         state.read()
         state = bootstrapcontinue(ui, state, opts)
+    elif goal == 'edit-plan':
+        state.read()
+        if not rules:
+            comment = editcomment % (state.parentctx, node.short(state.topmost))
+            rules = ruleeditor(repo, ui, state.rules, comment)
+        else:
+            if rules == '-':
+                f = sys.stdin
+            else:
+                f = open(rules)
+            rules = f.read()
+            f.close()
+        rules = [l for l in (r.strip() for r in rules.splitlines())
+                 if l and not l.startswith('#')]
+        rules = verifyrules(rules, repo, [repo[c] for [_a, c] in state.rules])
+        state.rules = rules
+        state.write()
+        return
     elif goal == 'abort':
-        state = histeditstate(repo)
         state.read()
         mapping, tmpnodes, leafs, _ntm = processreplacement(state)
         ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
+
+        # Recover our old commits if necessary
+        if not state.topmost in repo and state.backupfile:
+            backupfile = repo.join(state.backupfile)
+            f = hg.openpath(ui, backupfile)
+            gen = exchange.readbundle(ui, f, backupfile)
+            changegroup.addchangegroup(repo, gen, 'histedit',
+                                       'bundle:' + backupfile)
+            os.remove(backupfile)
+
         # check whether we should update away
         parentnodes = [c.node() for c in repo[None].parents()]
-        for n in leafs | set([state.parentctx.node()]):
+        for n in leafs | set([state.parentctxnode]):
             if n in parentnodes:
                 hg.clean(repo, state.topmost)
                 break
@@ -634,16 +811,8 @@
 
         ctxs = [repo[r] for r in revs]
         if not rules:
-            rules = '\n'.join([makedesc(c) for c in ctxs])
-            rules += '\n\n'
-            rules += editcomment % (node.short(root), node.short(topmost))
-            rules = ui.edit(rules, ui.username())
-            # Save edit rules in .hg/histedit-last-edit.txt in case
-            # the user needs to ask for help after something
-            # surprising happens.
-            f = open(repo.join('histedit-last-edit.txt'), 'w')
-            f.write(rules)
-            f.close()
+            comment = editcomment % (node.short(root), node.short(topmost))
+            rules = ruleeditor(repo, ui, [['pick', c] for c in ctxs], comment)
         else:
             if rules == '-':
                 f = sys.stdin
@@ -655,23 +824,32 @@
                  if l and not l.startswith('#')]
         rules = verifyrules(rules, repo, ctxs)
 
-        parentctx = repo[root].parents()[0]
+        parentctxnode = repo[root].parents()[0].node()
 
-        state.parentctx = parentctx
+        state.parentctxnode = parentctxnode
         state.rules = rules
         state.keep = keep
         state.topmost = topmost
         state.replacements = replacements
 
+        # Create a backup so we can always abort completely.
+        backupfile = None
+        if not obsolete.isenabled(repo, obsolete.createmarkersopt):
+            backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
+                                        'histedit')
+        state.backupfile = backupfile
+
     while state.rules:
         state.write()
         action, ha = state.rules.pop(0)
-        ui.debug('histedit: processing %s %s\n' % (action, ha))
-        actfunc = actiontable[action]
-        state.parentctx, replacement_ = actfunc(ui, state, ha, opts)
+        ui.debug('histedit: processing %s %s\n' % (action, ha[:12]))
+        actobj = actiontable[action].fromrule(state, ha)
+        parentctx, replacement_ = actobj.run()
+        state.parentctxnode = parentctx.node()
         state.replacements.extend(replacement_)
+    state.write()
 
-    hg.update(repo, state.parentctx.node())
+    hg.update(repo, state.parentctxnode)
 
     mapping, tmpnodes, created, ntm = processreplacement(state)
     if mapping:
@@ -707,80 +885,22 @@
     if os.path.exists(repo.sjoin('undo')):
         os.unlink(repo.sjoin('undo'))
 
-def gatherchildren(repo, ctx):
-    # is there any new commit between the expected parent and "."
-    #
-    # note: does not take non linear new change in account (but previous
-    #       implementation didn't used them anyway (issue3655)
-    newchildren = [c.node() for c in repo.set('(%d::.)', ctx)]
-    if ctx.node() != node.nullid:
-        if not newchildren:
-            # `ctx` should match but no result. This means that
-            # currentnode is not a descendant from ctx.
-            msg = _('%s is not an ancestor of working directory')
-            hint = _('use "histedit --abort" to clear broken state')
-            raise util.Abort(msg % ctx, hint=hint)
-        newchildren.pop(0)  # remove ctx
-    return newchildren
+def bootstrapcontinue(ui, state, opts):
+    repo = state.repo
+    action, currentnode = state.rules.pop(0)
 
-def bootstrapcontinue(ui, state, opts):
-    repo, parentctx = state.repo, state.parentctx
-    action, currentnode = state.rules.pop(0)
-    ctx = repo[currentnode]
+    actobj = actiontable[action].fromrule(state, currentnode)
 
-    newchildren = gatherchildren(repo, parentctx)
-
-    # Commit dirty working directory if necessary
-    new = None
     s = repo.status()
     if s.modified or s.added or s.removed or s.deleted:
-        # prepare the message for the commit to comes
-        if action in ('f', 'fold', 'r', 'roll'):
-            message = 'fold-temp-revision %s' % currentnode
-        else:
-            message = ctx.description()
-        editopt = action in ('e', 'edit', 'm', 'mess')
-        canonaction = {'e': 'edit', 'm': 'mess', 'p': 'pick'}
-        editform = 'histedit.%s' % canonaction.get(action, action)
-        editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
-        commit = commitfuncfor(repo, ctx)
-        new = commit(text=message, user=ctx.user(), date=ctx.date(),
-                     extra=ctx.extra(), editor=editor)
-        if new is not None:
-            newchildren.append(new)
-
-    replacements = []
-    # track replacements
-    if ctx.node() not in newchildren:
-        # note: new children may be empty when the changeset is dropped.
-        # this happen e.g during conflicting pick where we revert content
-        # to parent.
-        replacements.append((ctx.node(), tuple(newchildren)))
+        actobj.continuedirty()
+        s = repo.status()
+        if s.modified or s.added or s.removed or s.deleted:
+            raise util.Abort(_("working copy still dirty"))
 
-    if action in ('f', 'fold', 'r', 'roll'):
-        if newchildren:
-            # finalize fold operation if applicable
-            if new is None:
-                new = newchildren[-1]
-            else:
-                newchildren.pop()  # remove new from internal changes
-            foldopts = opts
-            if action in ('r', 'roll'):
-                foldopts = foldopts.copy()
-                foldopts['rollup'] = True
-            parentctx, repl = finishfold(ui, repo, parentctx, ctx, new,
-                                         foldopts, newchildren)
-            replacements.extend(repl)
-        else:
-            # newchildren is empty if the fold did not result in any commit
-            # this happen when all folded change are discarded during the
-            # merge.
-            replacements.append((ctx.node(), (parentctx.node(),)))
-    elif newchildren:
-        # otherwise update "parentctx" before proceeding to further operation
-        parentctx = repo[newchildren[-1]]
+    parentctx, replacements = actobj.continueclean()
 
-    state.parentctx = parentctx
+    state.parentctxnode = parentctx.node()
     state.replacements.extend(replacements)
 
     return state
@@ -801,19 +921,41 @@
             raise util.Abort(_('cannot edit immutable changeset: %s') % root)
     return [c.node() for c in ctxs]
 
-def makedesc(c):
-    """build a initial action line for a ctx `c`
+def makedesc(repo, action, rev):
+    """build a initial action line for a ctx
 
     line are in the form:
 
-      pick <hash> <rev> <summary>
+      <action> <hash> <rev> <summary>
     """
+    ctx = repo[rev]
     summary = ''
-    if c.description():
-        summary = c.description().splitlines()[0]
-    line = 'pick %s %d %s' % (c, c.rev(), summary)
+    if ctx.description():
+        summary = ctx.description().splitlines()[0]
+    line = '%s %s %d %s' % (action, ctx, ctx.rev(), summary)
     # trim to 80 columns so it's not stupidly wide in my editor
-    return util.ellipsis(line, 80)
+    maxlen = repo.ui.configint('histedit', 'linelen', default=80)
+    maxlen = max(maxlen, 22) # avoid truncating hash
+    return util.ellipsis(line, maxlen)
+
+def ruleeditor(repo, ui, rules, editcomment=""):
+    """open an editor to edit rules
+
+    rules are in the format [ [act, ctx], ...] like in state.rules
+    """
+    rules = '\n'.join([makedesc(repo, act, rev) for [act, rev] in rules])
+    rules += '\n\n'
+    rules += editcomment
+    rules = ui.edit(rules, ui.username())
+
+    # Save edit rules in .hg/histedit-last-edit.txt in case
+    # the user needs to ask for help after something
+    # surprising happens.
+    f = open(repo.join('histedit-last-edit.txt'), 'w')
+    f.write(rules)
+    f.close()
+
+    return rules
 
 def verifyrules(rules, repo, ctxs):
     """Verify that there exists exactly one edit rule per given changeset.
@@ -822,7 +964,7 @@
     or a rule on a changeset outside of the user-given range.
     """
     parsed = []
-    expected = set(str(c) for c in ctxs)
+    expected = set(c.hex() for c in ctxs)
     seen = set()
     for r in rules:
         if ' ' not in r:
@@ -830,22 +972,24 @@
         action, rest = r.split(' ', 1)
         ha = rest.strip().split(' ', 1)[0]
         try:
-            ha = str(repo[ha])  # ensure its a short hash
+            ha = repo[ha].hex()
         except error.RepoError:
-            raise util.Abort(_('unknown changeset %s listed') % ha)
+            raise util.Abort(_('unknown changeset %s listed') % ha[:12])
         if ha not in expected:
             raise util.Abort(
                 _('may not use changesets other than the ones listed'))
         if ha in seen:
-            raise util.Abort(_('duplicated command for changeset %s') % ha)
+            raise util.Abort(_('duplicated command for changeset %s') %
+                    ha[:12])
         seen.add(ha)
         if action not in actiontable:
             raise util.Abort(_('unknown action "%s"') % action)
         parsed.append([action, ha])
     missing = sorted(expected - seen)  # sort to stabilize output
     if missing:
-        raise util.Abort(_('missing rules for changeset %s') % missing[0],
-                         hint=_('do you want to use the drop action?'))
+        raise util.Abort(_('missing rules for changeset %s') %
+                missing[0][:12],
+                hint=_('do you want to use the drop action?'))
     return parsed
 
 def processreplacement(state):
@@ -965,6 +1109,23 @@
     finally:
         release(lock)
 
+def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
+    if isinstance(nodelist, str):
+        nodelist = [nodelist]
+    if os.path.exists(os.path.join(repo.path, 'histedit-state')):
+        state = histeditstate(repo)
+        state.read()
+        histedit_nodes = set([repo[rulehash].node() for (action, rulehash)
+                             in state.rules if rulehash in repo])
+        strip_nodes = set([repo[n].node() for n in nodelist])
+        common_nodes = histedit_nodes & strip_nodes
+        if common_nodes:
+            raise util.Abort(_("histedit in progress, can't strip %s")
+                             % ', '.join(node.short(x) for x in common_nodes))
+    return orig(ui, repo, nodelist, *args, **kwargs)
+
+extensions.wrapfunction(repair, 'strip', stripwrapper)
+
 def summaryhook(ui, repo):
     if not os.path.exists(repo.join('histedit-state')):
         return
--- a/hgext/keyword.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/keyword.py	Thu Apr 16 20:57:51 2015 -0500
@@ -506,7 +506,10 @@
     kwt = kwtools['templater']
     wctx = repo[None]
     status = _status(ui, repo, wctx, kwt, *pats, **opts)
-    cwd = pats and repo.getcwd() or ''
+    if pats:
+        cwd = repo.getcwd()
+    else:
+        cwd = ''
     files = []
     if not opts.get('unknown') or opts.get('all'):
         files = sorted(status.modified + status.added + status.clean)
@@ -640,11 +643,10 @@
         # shrink keywords read from working dir
         self.lines = kwt.shrinklines(self.fname, self.lines)
 
-    def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
-                opts=None, prefix=''):
+    def kwdiff(orig, *args, **kwargs):
         '''Monkeypatch patch.diff to avoid expansion.'''
         kwt.restrict = True
-        return orig(repo, node1, node2, match, changes, opts, prefix)
+        return orig(*args, **kwargs)
 
     def kwweb_skip(orig, web, req, tmpl):
         '''Wraps webcommands.x turning off keyword expansion.'''
@@ -734,16 +736,10 @@
 
     extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
     extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
-    extensions.wrapfunction(patch, 'diff', kw_diff)
+    extensions.wrapfunction(patch, 'diff', kwdiff)
     extensions.wrapfunction(cmdutil, 'amend', kw_amend)
     extensions.wrapfunction(cmdutil, 'copy', kw_copy)
+    extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
     for c in 'annotate changeset rev filediff diff'.split():
         extensions.wrapfunction(webcommands, c, kwweb_skip)
-    for name in recordextensions.split():
-        try:
-            record = extensions.find(name)
-            extensions.wrapfunction(record, 'dorecord', kw_dorecord)
-        except KeyError:
-            pass
-
     repo.__class__ = kwrepo
--- a/hgext/largefiles/lfcommands.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/lfcommands.py	Thu Apr 16 20:57:51 2015 -0500
@@ -437,7 +437,7 @@
     return totalsuccess, totalmissing
 
 def updatelfiles(ui, repo, filelist=None, printmessage=None,
-                 normallookup=False, checked=False):
+                 normallookup=False):
     '''Update largefiles according to standins in the working directory
 
     If ``printmessage`` is other than ``None``, it means "print (or
@@ -464,16 +464,12 @@
                     shutil.copyfile(abslfile, abslfile + '.orig')
                     util.unlinkpath(absstandin + '.orig')
                 expecthash = lfutil.readstandin(repo, lfile)
-                if (expecthash != '' and
-                    (checked or
-                     not os.path.exists(abslfile) or
-                     expecthash != lfutil.hashfile(abslfile))):
+                if expecthash != '':
                     if lfile not in repo[None]: # not switched to normal file
                         util.unlinkpath(abslfile, ignoremissing=True)
                     # use normallookup() to allocate an entry in largefiles
-                    # dirstate, because lack of it misleads
-                    # lfilesrepo.status() into recognition that such cache
-                    # missing files are removed.
+                    # dirstate to prevent lfilesrepo.status() from reporting
+                    # missing files as removed.
                     lfdirstate.normallookup(lfile)
                     update[lfile] = expecthash
             else:
--- a/hgext/largefiles/lfutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/lfutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -82,9 +82,10 @@
     return path and os.path.exists(path)
 
 def findfile(repo, hash):
-    if instore(repo, hash):
+    path, exists = findstorepath(repo, hash)
+    if exists:
         repo.ui.note(_('found %s in store\n') % hash)
-        return storepath(repo, hash)
+        return path
     elif inusercache(repo.ui, hash):
         repo.ui.note(_('found %s in system cache\n') % hash)
         path = storepath(repo, hash)
@@ -164,11 +165,30 @@
             for f in repo[rev].walk(matcher)
             if rev is not None or repo.dirstate[f] != '?']
 
-def instore(repo, hash):
-    return os.path.exists(storepath(repo, hash))
+def instore(repo, hash, forcelocal=False):
+    return os.path.exists(storepath(repo, hash, forcelocal))
+
+def storepath(repo, hash, forcelocal=False):
+    if not forcelocal and repo.shared():
+        return repo.vfs.reljoin(repo.sharedpath, longname, hash)
+    return repo.join(longname, hash)
 
-def storepath(repo, hash):
-    return repo.join(os.path.join(longname, hash))
+def findstorepath(repo, hash):
+    '''Search through the local store path(s) to find the file for the given
+    hash.  If the file is not found, its path in the primary store is returned.
+    The return value is a tuple of (path, exists(path)).
+    '''
+    # For shared repos, the primary store is in the share source.  But for
+    # backward compatibility, force a lookup in the local store if it wasn't
+    # found in the share source.
+    path = storepath(repo, hash, False)
+
+    if instore(repo, hash):
+        return (path, True)
+    elif repo.shared() and instore(repo, hash, True):
+        return storepath(repo, hash, True)
+
+    return (path, False)
 
 def copyfromcache(repo, hash, filename):
     '''Copy the specified largefile from the repo or system cache to
@@ -388,7 +408,7 @@
         lfdirstate.drop(lfile)
 
 def markcommitted(orig, ctx, node):
-    repo = ctx._repo
+    repo = ctx.repo()
 
     orig(node)
 
--- a/hgext/largefiles/localstore.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/localstore.py	Thu Apr 16 20:57:51 2015 -0500
@@ -55,9 +55,9 @@
             return False
 
         expecthash = fctx.data()[0:40]
-        storepath = lfutil.storepath(self.remote, expecthash)
+        storepath, exists = lfutil.findstorepath(self.remote, expecthash)
         verified.add(key)
-        if not lfutil.instore(self.remote, expecthash):
+        if not exists:
             self.ui.warn(
                 _('changeset %s: %s references missing %s\n')
                 % (cset, filename, storepath))
--- a/hgext/largefiles/overrides.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/overrides.py	Thu Apr 16 20:57:51 2015 -0500
@@ -14,7 +14,6 @@
 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
         archival, pathutil, revset
 from mercurial.i18n import _
-from mercurial.node import hex
 
 import lfutil
 import lfcommands
@@ -304,17 +303,47 @@
             return matchandpats
 
         pats = set(p)
-        # TODO: handling of patterns in both cases below
+
+        def fixpats(pat, tostandin=lfutil.standin):
+            kindpat = match_._patsplit(pat, None)
+
+            if kindpat[0] is not None:
+                return kindpat[0] + ':' + tostandin(kindpat[1])
+            return tostandin(kindpat[1])
+
         if m._cwd:
-            if os.path.isabs(m._cwd):
-                # TODO: handle largefile magic when invoked from other cwd
-                return matchandpats
-            back = (m._cwd.count('/') + 1) * '../'
-            pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
+            hglf = lfutil.shortname
+            back = util.pconvert(m.rel(hglf)[:-len(hglf)])
+
+            def tostandin(f):
+                # The file may already be a standin, so trucate the back
+                # prefix and test before mangling it.  This avoids turning
+                # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
+                if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
+                    return f
+
+                # An absolute path is from outside the repo, so truncate the
+                # path to the root before building the standin.  Otherwise cwd
+                # is somewhere in the repo, relative to root, and needs to be
+                # prepended before building the standin.
+                if os.path.isabs(m._cwd):
+                    f = f[len(back):]
+                else:
+                    f = m._cwd + '/' + f
+                return back + lfutil.standin(f)
+
+            pats.update(fixpats(f, tostandin) for f in p)
         else:
-            pats.update(lfutil.standin(f) for f in p)
+            def tostandin(f):
+                if lfutil.splitstandin(f):
+                    return f
+                return lfutil.standin(f)
+            pats.update(fixpats(f, tostandin) for f in p)
 
         for i in range(0, len(m._files)):
+            # Don't add '.hglf' to m.files, since that is already covered by '.'
+            if m._files[i] == '.':
+                continue
             standin = lfutil.standin(m._files[i])
             # If the "standin" is a directory, append instead of replace to
             # support naming a directory on the command line with only
@@ -325,7 +354,6 @@
             elif m._files[i] not in repo[ctx.node()] \
                     and repo.wvfs.isdir(standin):
                 m._files.append(standin)
-            pats.add(standin)
 
         m._fmap = set(m._files)
         m._always = False
@@ -338,6 +366,7 @@
             return r
         m.matchfn = lfmatchfn
 
+        ui.debug('updated patterns: %s\n' % sorted(pats))
         return m, pats
 
     # For hg log --patch, the match object is used in two different senses:
@@ -346,8 +375,8 @@
     # The magic matchandpats override should be used for case (1) but not for
     # case (2).
     def overridemakelogfilematcher(repo, pats, opts):
-        pctx = repo[None]
-        match, pats = oldmatchandpats(pctx, pats, opts)
+        wctx = repo[None]
+        match, pats = oldmatchandpats(wctx, pats, opts)
         return lambda rev: match
 
     oldmatchandpats = installmatchandpatsfn(overridematchandpats)
@@ -379,36 +408,6 @@
     else:
         orig(ui, repo, *pats, **opts)
 
-# Override needs to refresh standins so that update's normal merge
-# will go through properly. Then the other update hook (overriding repo.update)
-# will get the new files. Filemerge is also overridden so that the merge
-# will merge standins correctly.
-def overrideupdate(orig, ui, repo, *pats, **opts):
-    # Need to lock between the standins getting updated and their
-    # largefiles getting updated
-    wlock = repo.wlock()
-    try:
-        if opts['check']:
-            lfdirstate = lfutil.openlfdirstate(ui, repo)
-            unsure, s = lfdirstate.status(
-                match_.always(repo.root, repo.getcwd()),
-                [], False, False, False)
-
-            mod = len(s.modified) > 0
-            for lfile in unsure:
-                standin = lfutil.standin(lfile)
-                if repo['.'][standin].data().strip() != \
-                        lfutil.hashfile(repo.wjoin(lfile)):
-                    mod = True
-                else:
-                    lfdirstate.normal(lfile)
-            lfdirstate.write()
-            if mod:
-                raise util.Abort(_('uncommitted changes'))
-        return orig(ui, repo, *pats, **opts)
-    finally:
-        wlock.release()
-
 # Before starting the manifest merge, merge.updates will call
 # _checkunknownfile to check if there are any files in the merged-in
 # changeset that collide with unknown files in the working copy.
@@ -548,6 +547,15 @@
         repo.wwrite(fcd.path(), fco.data(), fco.flags())
     return 0
 
+def copiespathcopies(orig, ctx1, ctx2, match=None):
+    copies = orig(ctx1, ctx2, match=match)
+    updated = {}
+
+    for k, v in copies.iteritems():
+        updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
+
+    return updated
+
 # Copy first changes the matchers to match standins instead of
 # largefiles.  Then it overrides util.copyfile in that function it
 # checks if the destination largefile already exists. It also keeps a
@@ -559,16 +567,6 @@
         # this isn't legal, let the original function deal with it
         return orig(ui, repo, pats, opts, rename)
 
-    def makestandin(relpath):
-        path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
-        return os.path.join(repo.wjoin(lfutil.standin(path)))
-
-    fullpats = scmutil.expandpats(pats)
-    dest = fullpats[-1]
-
-    if os.path.isdir(dest):
-        if not os.path.isdir(makestandin(dest)):
-            os.makedirs(makestandin(dest))
     # This could copy both lfiles and normal files in one command,
     # but we don't want to do that. First replace their matcher to
     # only match normal files and run it, then replace it to just
@@ -595,6 +593,17 @@
     except OSError:
         return result
 
+    def makestandin(relpath):
+        path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
+        return os.path.join(repo.wjoin(lfutil.standin(path)))
+
+    fullpats = scmutil.expandpats(pats)
+    dest = fullpats[-1]
+
+    if os.path.isdir(dest):
+        if not os.path.isdir(makestandin(dest)):
+            os.makedirs(makestandin(dest))
+
     try:
         try:
             # When we call orig below it creates the standins but we don't add
@@ -694,7 +703,7 @@
 # commits. Update the standins then run the original revert, changing
 # the matcher to hit standins instead of largefiles. Based on the
 # resulting standins update the largefiles.
-def overriderevert(orig, ui, repo, *pats, **opts):
+def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
     # Because we put the standins in a bad state (by updating them)
     # and then return them to a correct state we need to lock to
     # prevent others from changing them in their incorrect state.
@@ -711,14 +720,23 @@
 
         oldstandins = lfutil.getstandinsstate(repo)
 
-        def overridematch(ctx, pats=[], opts={}, globbed=False,
+        def overridematch(mctx, pats=[], opts={}, globbed=False,
                 default='relpath'):
-            match = oldmatch(ctx, pats, opts, globbed, default)
+            match = oldmatch(mctx, pats, opts, globbed, default)
             m = copy.copy(match)
+
+            # revert supports recursing into subrepos, and though largefiles
+            # currently doesn't work correctly in that case, this match is
+            # called, so the lfdirstate above may not be the correct one for
+            # this invocation of match.
+            lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
+                                               False)
+
             def tostandin(f):
-                if lfutil.standin(f) in ctx:
-                    return lfutil.standin(f)
-                elif lfutil.standin(f) in repo[None]:
+                standin = lfutil.standin(f)
+                if standin in ctx or standin in mctx:
+                    return standin
+                elif standin in repo[None] or lfdirstate[f] == 'r':
                     return None
                 return f
             m._files = [tostandin(f) for f in m._files]
@@ -728,13 +746,13 @@
             def matchfn(f):
                 if lfutil.isstandin(f):
                     return (origmatchfn(lfutil.splitstandin(f)) and
-                            (f in repo[None] or f in ctx))
+                            (f in ctx or f in mctx))
                 return origmatchfn(f)
             m.matchfn = matchfn
             return m
         oldmatch = installmatchfn(overridematch)
         try:
-            orig(ui, repo, *pats, **opts)
+            orig(ui, repo, ctx, parents, *pats, **opts)
         finally:
             restorematchfn()
 
@@ -820,6 +838,14 @@
         sourcerepo, destrepo = result
         repo = destrepo.local()
 
+        # If largefiles is required for this repo, permanently enable it locally
+        if 'largefiles' in repo.requirements:
+            fp = repo.vfs('hgrc', 'a', text=True)
+            try:
+                fp.write('\n[extensions]\nlargefiles=\n')
+            finally:
+                fp.close()
+
         # Caching is implicitly limited to 'rev' option, since the dest repo was
         # truncated at that point.  The user may expect a download count with
         # this option, so attempt whether or not this is a largefile repo.
@@ -845,7 +871,7 @@
         repo._lfcommithooks.pop()
 
 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
-            prefix=None, mtime=None, subrepos=None):
+            prefix='', mtime=None, subrepos=None):
     # No need to lock because we are only reading history and
     # largefile caches, neither of which are modified.
     lfcommands.cachelfiles(repo.ui, repo, node)
@@ -873,24 +899,8 @@
     archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
 
     if repo.ui.configbool("ui", "archivemeta", True):
-        def metadata():
-            base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
-                hex(repo.changelog.node(0)), hex(node), ctx.branch())
-
-            tags = ''.join('tag: %s\n' % t for t in ctx.tags()
-                           if repo.tagtype(t) == 'global')
-            if not tags:
-                repo.ui.pushbuffer()
-                opts = {'template': '{latesttag}\n{latesttagdistance}',
-                        'style': '', 'patch': None, 'git': None}
-                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
-                ltags, dist = repo.ui.popbuffer().split('\n')
-                tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
-                tags += 'latesttagdistance: %s\n' % dist
-
-            return base + tags
-
-        write('.hg_archival.txt', 0644, False, metadata)
+        write('.hg_archival.txt', 0644, False,
+              lambda: archival.buildmetadata(ctx))
 
     for f in ctx:
         ff = ctx.flags(f)
@@ -972,8 +982,8 @@
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
 # largefiles were changed. This is used by bisect, backout and fetch.
-def overridebailifchanged(orig, repo):
-    orig(repo)
+def overridebailifchanged(orig, repo, *args, **kwargs):
+    orig(repo, *args, **kwargs)
     repo.lfstatus = True
     s = repo.status()
     repo.lfstatus = False
@@ -1247,6 +1257,20 @@
         if not f in notbad:
             origbadfn(f, msg)
     m.bad = lfbadfn
+
+    origvisitdirfn = m.visitdir
+    def lfvisitdirfn(dir):
+        if dir == lfutil.shortname:
+            return True
+        ret = origvisitdirfn(dir)
+        if ret:
+            return ret
+        lf = lfutil.splitstandin(dir)
+        if lf is None:
+            return False
+        return origvisitdirfn(lf)
+    m.visitdir = lfvisitdirfn
+
     for f in ctx.walk(m):
         fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
                                  pathname=f)
@@ -1294,45 +1318,37 @@
         # (*) don't care
         # (*1) deprecated, but used internally (e.g: "rebase --collapse")
 
-        linearmerge = not branchmerge and not force and not partial
+        lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
+        unsure, s = lfdirstate.status(match_.always(repo.root,
+                                                    repo.getcwd()),
+                                      [], False, False, False)
+        pctx = repo['.']
+        for lfile in unsure + s.modified:
+            lfileabs = repo.wvfs.join(lfile)
+            if not os.path.exists(lfileabs):
+                continue
+            lfhash = lfutil.hashrepofile(repo, lfile)
+            standin = lfutil.standin(lfile)
+            lfutil.writestandin(repo, standin, lfhash,
+                                lfutil.getexecutable(lfileabs))
+            if (standin in pctx and
+                lfhash == lfutil.readstandin(repo, lfile, '.')):
+                lfdirstate.normal(lfile)
+        for lfile in s.added:
+            lfutil.updatestandin(repo, lfutil.standin(lfile))
+        lfdirstate.write()
 
-        if linearmerge or (branchmerge and force and not partial):
-            # update standins for linear-merge or force-branch-merge,
-            # because largefiles in the working directory may be modified
-            lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-            unsure, s = lfdirstate.status(match_.always(repo.root,
-                                                        repo.getcwd()),
-                                          [], False, False, False)
-            pctx = repo['.']
-            for lfile in unsure + s.modified:
-                lfileabs = repo.wvfs.join(lfile)
-                if not os.path.exists(lfileabs):
-                    continue
-                lfhash = lfutil.hashrepofile(repo, lfile)
-                standin = lfutil.standin(lfile)
-                lfutil.writestandin(repo, standin, lfhash,
-                                    lfutil.getexecutable(lfileabs))
-                if (standin in pctx and
-                    lfhash == lfutil.readstandin(repo, lfile, '.')):
-                    lfdirstate.normal(lfile)
-            for lfile in s.added:
-                lfutil.updatestandin(repo, lfutil.standin(lfile))
-            lfdirstate.write()
-
-        if linearmerge:
-            # Only call updatelfiles on the standins that have changed
-            # to save time
-            oldstandins = lfutil.getstandinsstate(repo)
+        oldstandins = lfutil.getstandinsstate(repo)
 
         result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
 
-        filelist = None
-        if linearmerge:
-            newstandins = lfutil.getstandinsstate(repo)
-            filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
+        newstandins = lfutil.getstandinsstate(repo)
+        filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
+        if branchmerge or force or partial:
+            filelist.extend(s.deleted + s.removed)
 
         lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
-                                normallookup=partial, checked=linearmerge)
+                                normallookup=partial)
 
         return result
     finally:
--- a/hgext/largefiles/reposetup.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/reposetup.py	Thu Apr 16 20:57:51 2015 -0500
@@ -10,7 +10,7 @@
 import copy
 import os
 
-from mercurial import error, manifest, match as match_, util
+from mercurial import error, match as match_, util
 from mercurial.i18n import _
 from mercurial import scmutil, localrepo
 
@@ -38,17 +38,18 @@
         def __getitem__(self, changeid):
             ctx = super(lfilesrepo, self).__getitem__(changeid)
             if self.lfstatus:
-                class lfilesmanifestdict(manifest.manifestdict):
-                    def __contains__(self, filename):
-                        orig = super(lfilesmanifestdict, self).__contains__
-                        return orig(filename) or orig(lfutil.standin(filename))
                 class lfilesctx(ctx.__class__):
                     def files(self):
                         filenames = super(lfilesctx, self).files()
                         return [lfutil.splitstandin(f) or f for f in filenames]
                     def manifest(self):
                         man1 = super(lfilesctx, self).manifest()
-                        man1.__class__ = lfilesmanifestdict
+                        class lfilesmanifest(man1.__class__):
+                            def __contains__(self, filename):
+                                orig = super(lfilesmanifest, self).__contains__
+                                return (orig(filename) or
+                                        orig(lfutil.standin(filename)))
+                        man1.__class__ = lfilesmanifest
                         return man1
                     def filectx(self, path, fileid=None, filelog=None):
                         orig = super(lfilesctx, self).filectx
@@ -329,10 +330,10 @@
                             actualfiles.append(lf)
                             if not matcheddir:
                                 # There may still be normal files in the dir, so
-                                # make sure a directory is in the list, which
-                                # forces status to walk and call the match
-                                # function on the matcher.  Windows does NOT
-                                # require this.
+                                # add a directory to the list, which
+                                # forces status/dirstate to walk all files and
+                                # call the match function on the matcher, even
+                                # on case sensitive filesystems.
                                 actualfiles.append('.')
                                 matcheddir = True
                 # Nothing in dir, so readd it
--- a/hgext/largefiles/uisetup.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/largefiles/uisetup.py	Thu Apr 16 20:57:51 2015 -0500
@@ -9,7 +9,7 @@
 '''setup for largefiles extension: uisetup'''
 
 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
-    httppeer, merge, scmutil, sshpeer, wireproto, revset, subrepo
+    httppeer, merge, scmutil, sshpeer, wireproto, revset, subrepo, copies
 from mercurial.i18n import _
 from mercurial.hgweb import hgweb_mod, webcommands
 
@@ -37,6 +37,8 @@
     extensions.wrapfunction(cmdutil, 'remove', overrides.cmdutilremove)
     extensions.wrapfunction(cmdutil, 'forget', overrides.cmdutilforget)
 
+    extensions.wrapfunction(copies, 'pathcopies', overrides.copiespathcopies)
+
     # Subrepos call status function
     entry = extensions.wrapcommand(commands.table, 'status',
                                    overrides.overridestatus)
@@ -74,8 +76,6 @@
     entry[1].extend(summaryopt)
     cmdutil.summaryremotehooks.add('largefiles', overrides.summaryremotehook)
 
-    entry = extensions.wrapcommand(commands.table, 'update',
-                                   overrides.overrideupdate)
     entry = extensions.wrapcommand(commands.table, 'pull',
                                    overrides.overridepull)
     pullopt = [('', 'all-largefiles', None,
@@ -111,11 +111,7 @@
     entry = extensions.wrapfunction(subrepo.hgsubrepo, 'dirty',
                                     overrides.overridedirty)
 
-    # Backout calls revert so we need to override both the command and the
-    # function
-    entry = extensions.wrapcommand(commands.table, 'revert',
-                                   overrides.overriderevert)
-    entry = extensions.wrapfunction(commands, 'revert',
+    entry = extensions.wrapfunction(cmdutil, 'revert',
                                     overrides.overriderevert)
 
     extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
--- a/hgext/mq.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/mq.py	Thu Apr 16 20:57:51 2015 -0500
@@ -418,7 +418,10 @@
             gitmode = ui.configbool('mq', 'git', None)
             if gitmode is None:
                 raise error.ConfigError
-            self.gitmode = gitmode and 'yes' or 'no'
+            if gitmode:
+                self.gitmode = 'yes'
+            else:
+                self.gitmode = 'no'
         except error.ConfigError:
             self.gitmode = ui.config('mq', 'git', 'auto').lower()
         self.plainmode = ui.configbool('mq', 'plain', False)
@@ -610,7 +613,11 @@
         return True, ''
 
     def explainpushable(self, idx, all_patches=False):
-        write = all_patches and self.ui.write or self.ui.warn
+        if all_patches:
+            write = self.ui.write
+        else:
+            write = self.ui.warn
+
         if all_patches or self.ui.verbose:
             if isinstance(idx, str):
                 idx = self.series.index(idx)
@@ -923,7 +930,8 @@
                 self.applied.append(statusentry(n, patchname))
 
             if patcherr:
-                self.ui.warn(_("patch failed, rejects left in working dir\n"))
+                self.ui.warn(_("patch failed, rejects left in working "
+                               "directory\n"))
                 err = 2
                 break
 
@@ -1825,7 +1833,11 @@
                 self.ui.write(pfx)
             if summary:
                 ph = patchheader(self.join(patchname), self.plainmode)
-                msg = ph.message and ph.message[0] or ''
+                if ph.message:
+                    msg = ph.message[0]
+                else:
+                    msg = ''
+
                 if self.ui.formatted():
                     width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
                     if width > 0:
@@ -2228,7 +2240,10 @@
         ui.write(_("all patches applied\n"))
         return 1
 
-    length = opts.get('first') and 1 or None
+    if opts.get('first'):
+        length = 1
+    else:
+        length = None
     q.qseries(repo, start=start, length=length, status='U',
               summary=opts.get('summary'))
 
@@ -2454,7 +2469,11 @@
 
     Returns 0 on success."""
     q = repo.mq
-    t = q.applied and q.seriesend(True) or 0
+    if q.applied:
+        t = q.seriesend(True)
+    else:
+        t = 0
+
     if t:
         q.qseries(repo, start=t - 1, length=1, status='A',
                   summary=opts.get('summary'))
--- a/hgext/notify.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/notify.py	Thu Apr 16 20:57:51 2015 -0500
@@ -340,7 +340,10 @@
 
         maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
         prev = ctx.p1().node()
-        ref = ref and ref.node() or ctx.node()
+        if ref:
+            ref = ref.node()
+        else:
+            ref = ctx.node()
         chunks = patch.diff(self.repo, prev, ref,
                             opts=patch.diffallopts(self.ui))
         difflines = ''.join(chunks).splitlines()
--- a/hgext/pager.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/pager.py	Thu Apr 16 20:57:51 2015 -0500
@@ -149,6 +149,8 @@
                     usepager = True
                     break
 
+        setattr(ui, 'pageractive', usepager)
+
         if usepager:
             ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
             ui.setconfig('ui', 'interactive', False, 'pager')
@@ -157,7 +159,12 @@
             _runpager(ui, p)
         return orig(ui, options, cmd, cmdfunc)
 
-    extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
+    # Wrap dispatch._runcommand after color is loaded so color can see
+    # ui.pageractive. Otherwise, if we loaded first, color's wrapped
+    # dispatch._runcommand would run without having access to ui.pageractive.
+    def afterloaded(loaded):
+        extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
+    extensions.afterloaded('color', afterloaded)
 
 def extsetup(ui):
     commands.globalopts.append(
--- a/hgext/patchbomb.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/patchbomb.py	Thu Apr 16 20:57:51 2015 -0500
@@ -186,7 +186,7 @@
     """
     ui = repo.ui
     prev = repo['.'].rev()
-    for r in scmutil.revrange(repo, revs):
+    for r in revs:
         if r == prev and (repo[None].files() or repo[None].deleted()):
             ui.warn(_('warning: working directory has '
                       'uncommitted changes\n'))
@@ -339,14 +339,13 @@
     url = hg.parseurl(url)[0]
     ui.status(_('comparing with %s\n') % util.hidepassword(url))
 
-    revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
+    revs = [r for r in revs if r >= 0]
     if not revs:
         revs = [len(repo) - 1]
     revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
     if not revs:
         ui.status(_("no changes found\n"))
-        return []
-    return [str(r) for r in revs]
+    return revs
 
 emailopts = [
     ('', 'body', None, _('send patches as inline message text (default)')),
@@ -489,7 +488,10 @@
     if outgoing or bundle:
         if len(revs) > 1:
             raise util.Abort(_("too many destinations"))
-        dest = revs and revs[0] or None
+        if revs:
+            dest = revs[0]
+        else:
+            dest = None
         revs = []
 
     if rev:
@@ -497,10 +499,11 @@
             raise util.Abort(_('use only one form to specify the revision'))
         revs = rev
 
+    revs = scmutil.revrange(repo, revs)
     if outgoing:
-        revs = _getoutgoing(repo, dest, rev)
+        revs = _getoutgoing(repo, dest, revs)
     if bundle:
-        opts['revs'] = revs
+        opts['revs'] = [str(r) for r in revs]
 
     # start
     if date:
--- a/hgext/rebase.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/rebase.py	Thu Apr 16 20:57:51 2015 -0500
@@ -231,7 +231,8 @@
                     hint = _('use "hg rebase --abort" to clear broken state')
                     raise util.Abort(msg, hint=hint)
             if abortf:
-                return abort(repo, originalwd, target, state)
+                return abort(repo, originalwd, target, state,
+                             activebookmark=activebookmark)
         else:
             if srcf and basef:
                 raise util.Abort(_('cannot specify both a '
@@ -852,8 +853,11 @@
 
     return False
 
-def abort(repo, originalwd, target, state):
-    'Restore the repository to its original state'
+def abort(repo, originalwd, target, state, activebookmark=None):
+    '''Restore the repository to its original state.  Additional args:
+
+    activebookmark: the name of the bookmark that should be active after the
+        restore'''
     dstates = [s for s in state.values() if s >= 0]
     immutable = [d for d in dstates if not repo[d].mutable()]
     cleanup = True
@@ -883,6 +887,9 @@
             # no backup of rebased cset versions needed
             repair.strip(repo.ui, repo, strippoints)
 
+    if activebookmark:
+        bookmarks.setcurrent(repo, activebookmark)
+
     clearstatus(repo)
     repo.ui.warn(_('rebase aborted\n'))
     return 0
--- a/hgext/record.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/record.py	Thu Apr 16 20:57:51 2015 -0500
@@ -8,409 +8,18 @@
 '''commands to interactively select changes for commit/qrefresh'''
 
 from mercurial.i18n import _
-from mercurial import cmdutil, commands, extensions, hg, patch
+from mercurial import cmdutil, commands, extensions
 from mercurial import util
-import copy, cStringIO, errno, os, re, shutil, tempfile
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 testedwith = 'internal'
 
-lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
-
-def scanpatch(fp):
-    """like patch.iterhunks, but yield different events
-
-    - ('file',    [header_lines + fromfile + tofile])
-    - ('context', [context_lines])
-    - ('hunk',    [hunk_lines])
-    - ('range',   (-start,len, +start,len, proc))
-    """
-    lr = patch.linereader(fp)
-
-    def scanwhile(first, p):
-        """scan lr while predicate holds"""
-        lines = [first]
-        while True:
-            line = lr.readline()
-            if not line:
-                break
-            if p(line):
-                lines.append(line)
-            else:
-                lr.push(line)
-                break
-        return lines
-
-    while True:
-        line = lr.readline()
-        if not line:
-            break
-        if line.startswith('diff --git a/') or line.startswith('diff -r '):
-            def notheader(line):
-                s = line.split(None, 1)
-                return not s or s[0] not in ('---', 'diff')
-            header = scanwhile(line, notheader)
-            fromfile = lr.readline()
-            if fromfile.startswith('---'):
-                tofile = lr.readline()
-                header += [fromfile, tofile]
-            else:
-                lr.push(fromfile)
-            yield 'file', header
-        elif line[0] == ' ':
-            yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
-        elif line[0] in '-+':
-            yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
-        else:
-            m = lines_re.match(line)
-            if m:
-                yield 'range', m.groups()
-            else:
-                yield 'other', line
-
-class header(object):
-    """patch header
-
-    XXX shouldn't we move this to mercurial/patch.py ?
-    """
-    diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
-    diff_re = re.compile('diff -r .* (.*)$')
-    allhunks_re = re.compile('(?:index|new file|deleted file) ')
-    pretty_re = re.compile('(?:new file|deleted file) ')
-    special_re = re.compile('(?:index|new|deleted|copy|rename) ')
-
-    def __init__(self, header):
-        self.header = header
-        self.hunks = []
-
-    def binary(self):
-        return util.any(h.startswith('index ') for h in self.header)
-
-    def pretty(self, fp):
-        for h in self.header:
-            if h.startswith('index '):
-                fp.write(_('this modifies a binary file (all or nothing)\n'))
-                break
-            if self.pretty_re.match(h):
-                fp.write(h)
-                if self.binary():
-                    fp.write(_('this is a binary file\n'))
-                break
-            if h.startswith('---'):
-                fp.write(_('%d hunks, %d lines changed\n') %
-                         (len(self.hunks),
-                          sum([max(h.added, h.removed) for h in self.hunks])))
-                break
-            fp.write(h)
-
-    def write(self, fp):
-        fp.write(''.join(self.header))
-
-    def allhunks(self):
-        return util.any(self.allhunks_re.match(h) for h in self.header)
-
-    def files(self):
-        match = self.diffgit_re.match(self.header[0])
-        if match:
-            fromfile, tofile = match.groups()
-            if fromfile == tofile:
-                return [fromfile]
-            return [fromfile, tofile]
-        else:
-            return self.diff_re.match(self.header[0]).groups()
-
-    def filename(self):
-        return self.files()[-1]
-
-    def __repr__(self):
-        return '<header %s>' % (' '.join(map(repr, self.files())))
-
-    def special(self):
-        return util.any(self.special_re.match(h) for h in self.header)
-
-def countchanges(hunk):
-    """hunk -> (n+,n-)"""
-    add = len([h for h in hunk if h[0] == '+'])
-    rem = len([h for h in hunk if h[0] == '-'])
-    return add, rem
-
-class hunk(object):
-    """patch hunk
-
-    XXX shouldn't we merge this with patch.hunk ?
-    """
-    maxcontext = 3
-
-    def __init__(self, header, fromline, toline, proc, before, hunk, after):
-        def trimcontext(number, lines):
-            delta = len(lines) - self.maxcontext
-            if False and delta > 0:
-                return number + delta, lines[:self.maxcontext]
-            return number, lines
-
-        self.header = header
-        self.fromline, self.before = trimcontext(fromline, before)
-        self.toline, self.after = trimcontext(toline, after)
-        self.proc = proc
-        self.hunk = hunk
-        self.added, self.removed = countchanges(self.hunk)
-
-    def write(self, fp):
-        delta = len(self.before) + len(self.after)
-        if self.after and self.after[-1] == '\\ No newline at end of file\n':
-            delta -= 1
-        fromlen = delta + self.removed
-        tolen = delta + self.added
-        fp.write('@@ -%d,%d +%d,%d @@%s\n' %
-                 (self.fromline, fromlen, self.toline, tolen,
-                  self.proc and (' ' + self.proc)))
-        fp.write(''.join(self.before + self.hunk + self.after))
-
-    pretty = write
-
-    def filename(self):
-        return self.header.filename()
-
-    def __repr__(self):
-        return '<hunk %r@%d>' % (self.filename(), self.fromline)
-
-def parsepatch(fp):
-    """patch -> [] of headers -> [] of hunks """
-    class parser(object):
-        """patch parsing state machine"""
-        def __init__(self):
-            self.fromline = 0
-            self.toline = 0
-            self.proc = ''
-            self.header = None
-            self.context = []
-            self.before = []
-            self.hunk = []
-            self.headers = []
-
-        def addrange(self, limits):
-            fromstart, fromend, tostart, toend, proc = limits
-            self.fromline = int(fromstart)
-            self.toline = int(tostart)
-            self.proc = proc
-
-        def addcontext(self, context):
-            if self.hunk:
-                h = hunk(self.header, self.fromline, self.toline, self.proc,
-                         self.before, self.hunk, context)
-                self.header.hunks.append(h)
-                self.fromline += len(self.before) + h.removed
-                self.toline += len(self.before) + h.added
-                self.before = []
-                self.hunk = []
-                self.proc = ''
-            self.context = context
-
-        def addhunk(self, hunk):
-            if self.context:
-                self.before = self.context
-                self.context = []
-            self.hunk = hunk
-
-        def newfile(self, hdr):
-            self.addcontext([])
-            h = header(hdr)
-            self.headers.append(h)
-            self.header = h
-
-        def addother(self, line):
-            pass # 'other' lines are ignored
-
-        def finished(self):
-            self.addcontext([])
-            return self.headers
-
-        transitions = {
-            'file': {'context': addcontext,
-                     'file': newfile,
-                     'hunk': addhunk,
-                     'range': addrange},
-            'context': {'file': newfile,
-                        'hunk': addhunk,
-                        'range': addrange,
-                        'other': addother},
-            'hunk': {'context': addcontext,
-                     'file': newfile,
-                     'range': addrange},
-            'range': {'context': addcontext,
-                      'hunk': addhunk},
-            'other': {'other': addother},
-            }
-
-    p = parser()
-
-    state = 'context'
-    for newstate, data in scanpatch(fp):
-        try:
-            p.transitions[state][newstate](p, data)
-        except KeyError:
-            raise patch.PatchError('unhandled transition: %s -> %s' %
-                                   (state, newstate))
-        state = newstate
-    return p.finished()
-
-def filterpatch(ui, headers):
-    """Interactively filter patch chunks into applied-only chunks"""
-
-    def prompt(skipfile, skipall, query, chunk):
-        """prompt query, and process base inputs
-
-        - y/n for the rest of file
-        - y/n for the rest
-        - ? (help)
-        - q (quit)
-
-        Return True/False and possibly updated skipfile and skipall.
-        """
-        newpatches = None
-        if skipall is not None:
-            return skipall, skipfile, skipall, newpatches
-        if skipfile is not None:
-            return skipfile, skipfile, skipall, newpatches
-        while True:
-            resps = _('[Ynesfdaq?]'
-                      '$$ &Yes, record this change'
-                      '$$ &No, skip this change'
-                      '$$ &Edit this change manually'
-                      '$$ &Skip remaining changes to this file'
-                      '$$ Record remaining changes to this &file'
-                      '$$ &Done, skip remaining changes and files'
-                      '$$ Record &all changes to all remaining files'
-                      '$$ &Quit, recording no changes'
-                      '$$ &? (display help)')
-            r = ui.promptchoice("%s %s" % (query, resps))
-            ui.write("\n")
-            if r == 8: # ?
-                for c, t in ui.extractchoices(resps)[1]:
-                    ui.write('%s - %s\n' % (c, t.lower()))
-                continue
-            elif r == 0: # yes
-                ret = True
-            elif r == 1: # no
-                ret = False
-            elif r == 2: # Edit patch
-                if chunk is None:
-                    ui.write(_('cannot edit patch for whole file'))
-                    ui.write("\n")
-                    continue
-                if chunk.header.binary():
-                    ui.write(_('cannot edit patch for binary file'))
-                    ui.write("\n")
-                    continue
-                # Patch comment based on the Git one (based on comment at end of
-                # http://mercurial.selenic.com/wiki/RecordExtension)
-                phelp = '---' + _("""
-To remove '-' lines, make them ' ' lines (context).
-To remove '+' lines, delete them.
-Lines starting with # will be removed from the patch.
-
-If the patch applies cleanly, the edited hunk will immediately be
-added to the record list. If it does not apply cleanly, a rejects
-file will be generated: you can use that when you try again. If
-all lines of the hunk are removed, then the edit is aborted and
-the hunk is left unchanged.
-""")
-                (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
-                        suffix=".diff", text=True)
-                ncpatchfp = None
-                try:
-                    # Write the initial patch
-                    f = os.fdopen(patchfd, "w")
-                    chunk.header.write(f)
-                    chunk.write(f)
-                    f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
-                    f.close()
-                    # Start the editor and wait for it to complete
-                    editor = ui.geteditor()
-                    ui.system("%s \"%s\"" % (editor, patchfn),
-                              environ={'HGUSER': ui.username()},
-                              onerr=util.Abort, errprefix=_("edit failed"))
-                    # Remove comment lines
-                    patchfp = open(patchfn)
-                    ncpatchfp = cStringIO.StringIO()
-                    for line in patchfp:
-                        if not line.startswith('#'):
-                            ncpatchfp.write(line)
-                    patchfp.close()
-                    ncpatchfp.seek(0)
-                    newpatches = parsepatch(ncpatchfp)
-                finally:
-                    os.unlink(patchfn)
-                    del ncpatchfp
-                # Signal that the chunk shouldn't be applied as-is, but
-                # provide the new patch to be used instead.
-                ret = False
-            elif r == 3: # Skip
-                ret = skipfile = False
-            elif r == 4: # file (Record remaining)
-                ret = skipfile = True
-            elif r == 5: # done, skip remaining
-                ret = skipall = False
-            elif r == 6: # all
-                ret = skipall = True
-            elif r == 7: # quit
-                raise util.Abort(_('user quit'))
-            return ret, skipfile, skipall, newpatches
-
-    seen = set()
-    applied = {}        # 'filename' -> [] of chunks
-    skipfile, skipall = None, None
-    pos, total = 1, sum(len(h.hunks) for h in headers)
-    for h in headers:
-        pos += len(h.hunks)
-        skipfile = None
-        fixoffset = 0
-        hdr = ''.join(h.header)
-        if hdr in seen:
-            continue
-        seen.add(hdr)
-        if skipall is None:
-            h.pretty(ui)
-        msg = (_('examine changes to %s?') %
-               _(' and ').join("'%s'" % f for f in h.files()))
-        r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
-        if not r:
-            continue
-        applied[h.filename()] = [h]
-        if h.allhunks():
-            applied[h.filename()] += h.hunks
-            continue
-        for i, chunk in enumerate(h.hunks):
-            if skipfile is None and skipall is None:
-                chunk.pretty(ui)
-            if total == 1:
-                msg = _("record this change to '%s'?") % chunk.filename()
-            else:
-                idx = pos - len(h.hunks) + i
-                msg = _("record change %d/%d to '%s'?") % (idx, total,
-                                                           chunk.filename())
-            r, skipfile, skipall, newpatches = prompt(skipfile,
-                    skipall, msg, chunk)
-            if r:
-                if fixoffset:
-                    chunk = copy.copy(chunk)
-                    chunk.toline += fixoffset
-                applied[chunk.filename()].append(chunk)
-            elif newpatches is not None:
-                for newpatch in newpatches:
-                    for newhunk in newpatch.hunks:
-                        if fixoffset:
-                            newhunk.toline += fixoffset
-                        applied[newhunk.filename()].append(newhunk)
-            else:
-                fixoffset += chunk.removed - chunk.added
-    return sum([h for h in applied.itervalues()
-               if h[0].special() or len(h) > 1], [])
 
 @command("record",
          # same options as commit + white space diff options
-         commands.table['^commit|ci'][1][:] + commands.diffwsopts,
+        [c for c in commands.table['^commit|ci'][1][:]
+            if c[1] != "interactive"] + commands.diffwsopts,
           _('hg record [OPTION]... [FILE]...'))
 def record(ui, repo, *pats, **opts):
     '''interactively select changes to commit
@@ -440,7 +49,8 @@
 
     This command is not available when committing a merge.'''
 
-    dorecord(ui, repo, commands.commit, 'commit', False, *pats, **opts)
+    opts["interactive"] = True
+    commands.commit(ui, repo, *pats, **opts)
 
 def qrefresh(origfn, ui, repo, *pats, **opts):
     if not opts['interactive']:
@@ -456,7 +66,8 @@
         mq.refresh(ui, repo, **opts)
 
     # backup all changed files
-    dorecord(ui, repo, committomq, 'qrefresh', True, *pats, **opts)
+    cmdutil.dorecord(ui, repo, committomq, 'qrefresh', True,
+                    cmdutil.recordfilter, *pats, **opts)
 
 # This command registration is replaced during uisetup().
 @command('qrecord',
@@ -481,162 +92,14 @@
         opts['checkname'] = False
         mq.new(ui, repo, patch, *pats, **opts)
 
-    dorecord(ui, repo, committomq, 'qnew', False, *pats, **opts)
+    cmdutil.dorecord(ui, repo, committomq, 'qnew', False,
+                    cmdutil.recordfilter, *pats, **opts)
 
 def qnew(origfn, ui, repo, patch, *args, **opts):
     if opts['interactive']:
         return qrecord(ui, repo, patch, *args, **opts)
     return origfn(ui, repo, patch, *args, **opts)
 
-def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
-    if not ui.interactive():
-        raise util.Abort(_('running non-interactively, use %s instead') %
-                         cmdsuggest)
-
-    # make sure username is set before going interactive
-    if not opts.get('user'):
-        ui.username() # raise exception, username not provided
-
-    def recordfunc(ui, repo, message, match, opts):
-        """This is generic record driver.
-
-        Its job is to interactively filter local changes, and
-        accordingly prepare working directory into a state in which the
-        job can be delegated to a non-interactive commit command such as
-        'commit' or 'qrefresh'.
-
-        After the actual job is done by non-interactive command, the
-        working directory is restored to its original state.
-
-        In the end we'll record interesting changes, and everything else
-        will be left in place, so the user can continue working.
-        """
-
-        cmdutil.checkunfinished(repo, commit=True)
-        merge = len(repo[None].parents()) > 1
-        if merge:
-            raise util.Abort(_('cannot partially commit a merge '
-                               '(use "hg commit" instead)'))
-
-        status = repo.status(match=match)
-        diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
-        diffopts.nodates = True
-        diffopts.git = True
-        chunks = patch.diff(repo, changes=status, opts=diffopts)
-        fp = cStringIO.StringIO()
-        fp.write(''.join(chunks))
-        fp.seek(0)
-
-        # 1. filter patch, so we have intending-to apply subset of it
-        try:
-            chunks = filterpatch(ui, parsepatch(fp))
-        except patch.PatchError, err:
-            raise util.Abort(_('error parsing patch: %s') % err)
-
-        del fp
-
-        contenders = set()
-        for h in chunks:
-            try:
-                contenders.update(set(h.files()))
-            except AttributeError:
-                pass
-
-        changed = status.modified + status.added + status.removed
-        newfiles = [f for f in changed if f in contenders]
-        if not newfiles:
-            ui.status(_('no changes to record\n'))
-            return 0
-
-        modified = set(status.modified)
-
-        # 2. backup changed files, so we can restore them in the end
-        if backupall:
-            tobackup = changed
-        else:
-            tobackup = [f for f in newfiles if f in modified]
-
-        backups = {}
-        if tobackup:
-            backupdir = repo.join('record-backups')
-            try:
-                os.mkdir(backupdir)
-            except OSError, err:
-                if err.errno != errno.EEXIST:
-                    raise
-        try:
-            # backup continues
-            for f in tobackup:
-                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
-                                               dir=backupdir)
-                os.close(fd)
-                ui.debug('backup %r as %r\n' % (f, tmpname))
-                util.copyfile(repo.wjoin(f), tmpname)
-                shutil.copystat(repo.wjoin(f), tmpname)
-                backups[f] = tmpname
-
-            fp = cStringIO.StringIO()
-            for c in chunks:
-                if c.filename() in backups:
-                    c.write(fp)
-            dopatch = fp.tell()
-            fp.seek(0)
-
-            # 3a. apply filtered patch to clean repo  (clean)
-            if backups:
-                hg.revert(repo, repo.dirstate.p1(),
-                          lambda key: key in backups)
-
-            # 3b. (apply)
-            if dopatch:
-                try:
-                    ui.debug('applying patch\n')
-                    ui.debug(fp.getvalue())
-                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
-                except patch.PatchError, err:
-                    raise util.Abort(str(err))
-            del fp
-
-            # 4. We prepared working directory according to filtered
-            #    patch. Now is the time to delegate the job to
-            #    commit/qrefresh or the like!
-
-            # Make all of the pathnames absolute.
-            newfiles = [repo.wjoin(nf) for nf in newfiles]
-            commitfunc(ui, repo, *newfiles, **opts)
-
-            return 0
-        finally:
-            # 5. finally restore backed-up files
-            try:
-                for realname, tmpname in backups.iteritems():
-                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
-                    util.copyfile(tmpname, repo.wjoin(realname))
-                    # Our calls to copystat() here and above are a
-                    # hack to trick any editors that have f open that
-                    # we haven't modified them.
-                    #
-                    # Also note that this racy as an editor could
-                    # notice the file's mtime before we've finished
-                    # writing it.
-                    shutil.copystat(tmpname, repo.wjoin(realname))
-                    os.unlink(tmpname)
-                if tobackup:
-                    os.rmdir(backupdir)
-            except OSError:
-                pass
-
-    # wrap ui.write so diff output can be labeled/colorized
-    def wrapwrite(orig, *args, **kw):
-        label = kw.pop('label', '')
-        for chunk, l in patch.difflabel(lambda: args):
-            orig(chunk, label=label + l)
-    oldwrite = ui.write
-    extensions.wrapfunction(ui, 'write', wrapwrite)
-    try:
-        return cmdutil.commit(ui, repo, recordfunc, pats, opts)
-    finally:
-        ui.write = oldwrite
 
 def uisetup(ui):
     try:
--- a/hgext/share.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/share.py	Thu Apr 16 20:57:51 2015 -0500
@@ -15,7 +15,7 @@
 testedwith = 'internal'
 
 @command('share',
-    [('U', 'noupdate', None, _('do not create a working copy')),
+    [('U', 'noupdate', None, _('do not create a working directory')),
      ('B', 'bookmarks', None, _('also share bookmarks'))],
     _('[-U] [-B] SOURCE [DEST]'),
     norepo=True)
--- a/hgext/shelve.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/shelve.py	Thu Apr 16 20:57:51 2015 -0500
@@ -226,9 +226,17 @@
             raise util.Abort(_('shelved change names may not contain slashes'))
         if name.startswith('.'):
             raise util.Abort(_("shelved change names may not start with '.'"))
+        interactive = opts.get('interactive', False)
 
-        node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
-
+        def interactivecommitfunc(ui, repo, *pats, **opts):
+            match = scmutil.match(repo['.'], pats, {})
+            message = opts['message']
+            return commitfunc(ui, repo, message, match, opts)
+        if not interactive:
+            node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
+        else:
+            node = cmdutil.dorecord(ui, repo, interactivecommitfunc, 'commit',
+                                    False, cmdutil.recordfilter, *pats, **opts)
         if not node:
             stat = repo.status(match=scmutil.match(repo[None], pats, opts))
             if stat.deleted:
@@ -536,8 +544,8 @@
     oldquiet = ui.quiet
     wlock = lock = tr = None
     try:
+        wlock = repo.wlock()
         lock = repo.lock()
-        wlock = repo.wlock()
 
         tr = repo.transaction('unshelve', report=lambda x: None)
         oldtiprev = len(repo)
@@ -649,6 +657,8 @@
            _('use the given name for the shelved commit'), _('NAME')),
           ('p', 'patch', None,
            _('show patch')),
+          ('i', 'interactive', None,
+           _('interactive mode, only works while creating a shelve')),
           ('', 'stat', None,
            _('output diffstat-style summary of changes'))] + commands.walkopts,
          _('hg shelve [OPTION]... [FILE]...'))
--- a/hgext/strip.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/strip.py	Thu Apr 16 20:57:51 2015 -0500
@@ -7,7 +7,7 @@
 from mercurial.node import nullid
 from mercurial.lock import release
 from mercurial import cmdutil, hg, scmutil, util
-from mercurial import repair, bookmarks
+from mercurial import repair, bookmarks, merge
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -23,10 +23,8 @@
     else:
         bctx = wctx.parents()[0]
     for s in sorted(wctx.substate):
-        if wctx.sub(s).dirty(True):
-            raise util.Abort(
-                _("uncommitted changes in subrepository %s") % s)
-        elif s not in bctx.substate or bctx.sub(s).dirty():
+        wctx.sub(s).bailifchanged(True)
+        if s not in bctx.substate or bctx.sub(s).dirty():
             inclsubs.append(s)
     return inclsubs
 
@@ -81,7 +79,8 @@
           ('', 'no-backup', None, _('no backups')),
           ('', 'nobackup', None, _('no backups (DEPRECATED)')),
           ('n', '', None, _('ignored  (DEPRECATED)')),
-          ('k', 'keep', None, _("do not modify working copy during strip")),
+          ('k', 'keep', None, _("do not modify working directory during "
+                                "strip")),
           ('B', 'bookmark', '', _("remove revs only reachable from given"
                                   " bookmark"))],
           _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...'))
@@ -206,6 +205,11 @@
 
             repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
             repo.dirstate.write()
+
+            # clear resolve state
+            ms = merge.mergestate(repo)
+            ms.reset(repo['.'].node())
+
             update = False
 
 
--- a/hgext/transplant.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/hgext/transplant.py	Thu Apr 16 20:57:51 2015 -0500
@@ -342,9 +342,8 @@
         try:
             p1, p2 = repo.dirstate.parents()
             if p1 != parent:
-                raise util.Abort(
-                    _('working dir not at transplant parent %s') %
-                                 revlog.hex(parent))
+                raise util.Abort(_('working directory not at transplant '
+                                   'parent %s') % revlog.hex(parent))
             if merge:
                 repo.setparents(p1, parents[1])
             modified, added, removed, deleted = repo.status()[:4]
--- a/i18n/ja.po	Thu Apr 16 22:33:53 2015 +0900
+++ b/i18n/ja.po	Thu Apr 16 20:57:51 2015 -0500
@@ -27151,8 +27151,8 @@
 msgid "couldn't read journal entry %r!\n"
 msgstr "ジャーナルファイル中のエントリ %r の解析に失敗\n"
 
-msgid "journal was created by a different version of Mercurial"
-msgstr "ジャーナルファイルは異なる版の mercurial で作成されたものです"
+msgid "journal was created by a different version of Mercurial\n"
+msgstr "ジャーナルファイルは異なる版の mercurial で作成されたものです\n"
 
 msgid "already have changeset "
 msgstr "既にあるリビジョンです "
--- a/i18n/polib.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/i18n/polib.py	Thu Apr 16 20:57:51 2015 -0500
@@ -437,8 +437,15 @@
         # the keys are sorted in the .mo file
         def cmp(_self, other):
             # msgfmt compares entries with msgctxt if it exists
-            self_msgid = _self.msgctxt and _self.msgctxt or _self.msgid
-            other_msgid = other.msgctxt and other.msgctxt or other.msgid
+            if _self.msgctxt:
+                self_msgid = _self.msgctxt
+            else:
+                self_msgid = _self.msgid
+
+            if other.msgctxt:
+                other_msgid = other.msgctxt
+            else:
+                other_msgid = other.msgid
             if self_msgid > other_msgid:
                 return 1
             elif self_msgid < other_msgid:
--- a/mercurial/archival.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/archival.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,7 +6,6 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-from node import hex
 import match as matchmod
 import cmdutil
 import scmutil, util, encoding
@@ -55,6 +54,33 @@
             return kind
     return None
 
+def _rootctx(repo):
+    # repo[0] may be hidden
+    for rev in repo:
+        return repo[rev]
+    return repo['null']
+
+def buildmetadata(ctx):
+    '''build content of .hg_archival.txt'''
+    repo = ctx.repo()
+    base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
+        _rootctx(repo).hex(), ctx.hex(), encoding.fromlocal(ctx.branch()))
+
+    tags = ''.join('tag: %s\n' % t for t in ctx.tags()
+                   if repo.tagtype(t) == 'global')
+    if not tags:
+        repo.ui.pushbuffer()
+        opts = {'template': '{latesttag}\n{latesttagdistance}',
+                'style': '', 'patch': None, 'git': None}
+        cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
+        ltags, dist = repo.ui.popbuffer().split('\n')
+        ltags = ltags.split(':')
+        changessince = len(repo.revs('only(.,%s)', ltags[0]))
+        tags = ''.join('latesttag: %s\n' % t for t in ltags)
+        tags += 'latesttagdistance: %s\n' % dist
+        tags += 'changessincelatesttag: %s\n' % changessince
+
+    return base + tags
 
 class tarit(object):
     '''write archive to tar file or stream.  can write uncompressed,
@@ -230,7 +256,7 @@
     }
 
 def archive(repo, dest, node, kind, decode=True, matchfn=None,
-            prefix=None, mtime=None, subrepos=False):
+            prefix='', mtime=None, subrepos=False):
     '''create archive of repo as it was at node.
 
     dest can be name of directory, name of archive file, or file
@@ -264,29 +290,9 @@
     archiver = archivers[kind](dest, mtime or ctx.date()[0])
 
     if repo.ui.configbool("ui", "archivemeta", True):
-        def metadata():
-            base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
-                repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
-
-            tags = ''.join('tag: %s\n' % t for t in ctx.tags()
-                           if repo.tagtype(t) == 'global')
-            if not tags:
-                repo.ui.pushbuffer()
-                opts = {'template': '{latesttag}\n{latesttagdistance}',
-                        'style': '', 'patch': None, 'git': None}
-                cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
-                ltags, dist = repo.ui.popbuffer().split('\n')
-                ltags = ltags.split(':')
-                changessince = len(repo.revs('only(.,%s)', ltags[0]))
-                tags = ''.join('latesttag: %s\n' % t for t in ltags)
-                tags += 'latesttagdistance: %s\n' % dist
-                tags += 'changessincelatesttag: %s\n' % changessince
-
-            return base + tags
-
         name = '.hg_archival.txt'
         if not matchfn or matchfn(name):
-            write(name, 0644, False, metadata)
+            write(name, 0644, False, lambda: buildmetadata(ctx))
 
     if matchfn:
         files = [f for f in ctx.manifest().keys() if matchfn(f)]
--- a/mercurial/bookmarks.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/bookmarks.py	Thu Apr 16 20:57:51 2015 -0500
@@ -362,14 +362,17 @@
 
     return results
 
-def _diverge(ui, b, path, localmarks):
+def _diverge(ui, b, path, localmarks, remotenode):
+    '''Return appropriate diverged bookmark for specified ``path``
+
+    This returns None, if it is failed to assign any divergent
+    bookmark name.
+
+    This reuses already existing one with "@number" suffix, if it
+    refers ``remotenode``.
+    '''
     if b == '@':
         b = ''
-    # find a unique @ suffix
-    for x in range(1, 100):
-        n = '%s@%d' % (b, x)
-        if n not in localmarks:
-            break
     # try to use an @pathalias suffix
     # if an @pathalias already exists, we overwrite (update) it
     if path.startswith("file:"):
@@ -378,8 +381,15 @@
         if u.startswith("file:"):
             u = util.url(u).path
         if path == u:
-            n = '%s@%s' % (b, p)
-    return n
+            return '%s@%s' % (b, p)
+
+    # assign a unique "@number" suffix newly
+    for x in range(1, 100):
+        n = '%s@%d' % (b, x)
+        if n not in localmarks or localmarks[n] == remotenode:
+            return n
+
+    return None
 
 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
     ui.debug("checking for updated bookmarks\n")
@@ -410,10 +420,15 @@
             changed.append((b, bin(scid), status,
                             _("importing bookmark %s\n") % (b)))
         else:
-            db = _diverge(ui, b, path, localmarks)
-            changed.append((db, bin(scid), warn,
-                            _("divergent bookmark %s stored as %s\n")
-                            % (b, db)))
+            snode = bin(scid)
+            db = _diverge(ui, b, path, localmarks, snode)
+            if db:
+                changed.append((db, snode, warn,
+                                _("divergent bookmark %s stored as %s\n") %
+                                (b, db)))
+            else:
+                warn(_("warning: failed to assign numbered name "
+                       "to divergent bookmark %s\n") % (b))
     for b, scid, dcid in adddst + advdst:
         if b in explicit:
             explicit.discard(b)
@@ -427,22 +442,94 @@
             writer(msg)
         localmarks.recordchange(tr)
 
-def diff(ui, dst, src):
+def incoming(ui, repo, other):
+    '''Show bookmarks incoming from other to repo
+    '''
+    ui.status(_("searching for changed bookmarks\n"))
+
+    r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
+                dsthex=hex)
+    addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
+
+    incomings = []
+    if ui.debugflag:
+        getid = lambda id: id
+    else:
+        getid = lambda id: id[:12]
+    if ui.verbose:
+        def add(b, id, st):
+            incomings.append("   %-25s %s %s\n" % (b, getid(id), st))
+    else:
+        def add(b, id, st):
+            incomings.append("   %-25s %s\n" % (b, getid(id)))
+    for b, scid, dcid in addsrc:
+        add(b, scid, _('added'))
+    for b, scid, dcid in advsrc:
+        add(b, scid, _('advanced'))
+    for b, scid, dcid in diverge:
+        add(b, scid, _('diverged'))
+    for b, scid, dcid in differ:
+        add(b, scid, _('changed'))
+
+    if not incomings:
+        ui.status(_("no changed bookmarks found\n"))
+        return 1
+
+    for s in sorted(incomings):
+        ui.write(s)
+
+    return 0
+
+def outgoing(ui, repo, other):
+    '''Show bookmarks outgoing from repo to other
+    '''
     ui.status(_("searching for changed bookmarks\n"))
 
-    smarks = src.listkeys('bookmarks')
-    dmarks = dst.listkeys('bookmarks')
+    r = compare(repo, repo._bookmarks, other.listkeys('bookmarks'),
+                srchex=hex)
+    addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
 
-    diff = sorted(set(smarks) - set(dmarks))
-    for k in diff:
-        mark = ui.debugflag and smarks[k] or smarks[k][:12]
-        ui.write("   %-25s %s\n" % (k, mark))
+    outgoings = []
+    if ui.debugflag:
+        getid = lambda id: id
+    else:
+        getid = lambda id: id[:12]
+    if ui.verbose:
+        def add(b, id, st):
+            outgoings.append("   %-25s %s %s\n" % (b, getid(id), st))
+    else:
+        def add(b, id, st):
+            outgoings.append("   %-25s %s\n" % (b, getid(id)))
+    for b, scid, dcid in addsrc:
+        add(b, scid, _('added'))
+    for b, scid, dcid in adddst:
+        add(b, ' ' * 40, _('deleted'))
+    for b, scid, dcid in advsrc:
+        add(b, scid, _('advanced'))
+    for b, scid, dcid in diverge:
+        add(b, scid, _('diverged'))
+    for b, scid, dcid in differ:
+        add(b, scid, _('changed'))
 
-    if len(diff) <= 0:
+    if not outgoings:
         ui.status(_("no changed bookmarks found\n"))
         return 1
+
+    for s in sorted(outgoings):
+        ui.write(s)
+
     return 0
 
+def summary(repo, other):
+    '''Compare bookmarks between repo and other for "hg summary" output
+
+    This returns "(# of incoming, # of outgoing)" tuple.
+    '''
+    r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
+                dsthex=hex)
+    addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
+    return (len(addsrc), len(adddst))
+
 def validdest(repo, old, new):
     """Is the new bookmark destination a valid update from the old one"""
     repo = repo.unfiltered()
@@ -456,5 +543,5 @@
     elif repo.obsstore:
         return new.node() in obsolete.foreground(repo, [old.node()])
     else:
-        # still an independent clause as it is lazyer (and therefore faster)
+        # still an independent clause as it is lazier (and therefore faster)
         return old.descendant(new)
--- a/mercurial/branchmap.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/branchmap.py	Thu Apr 16 20:57:51 2015 -0500
@@ -7,6 +7,7 @@
 
 from node import bin, hex, nullid, nullrev
 import encoding
+import scmutil
 import util
 import time
 from array import array
@@ -96,6 +97,7 @@
     if revs:
         partial.update(repo, revs)
         partial.write(repo)
+
     assert partial.validfor(repo), filtername
     repo._branchcaches[repo.filtername] = partial
 
@@ -134,28 +136,6 @@
             self._closednodes = set()
         else:
             self._closednodes = closednodes
-        self._revbranchcache = None
-
-    def _hashfiltered(self, repo):
-        """build hash of revision filtered in the current cache
-
-        Tracking tipnode and tiprev is not enough to ensure validity of the
-        cache as they do not help to distinct cache that ignored various
-        revision bellow tiprev.
-
-        To detect such difference, we build a cache of all ignored revisions.
-        """
-        cl = repo.changelog
-        if not cl.filteredrevs:
-            return None
-        key = None
-        revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
-        if revs:
-            s = util.sha1()
-            for rev in revs:
-                s.update('%s;' % rev)
-            key = s.digest()
-        return key
 
     def validfor(self, repo):
         """Is the cache content valid regarding a repo
@@ -164,7 +144,8 @@
         - True when cache is up to date or a subset of current repo."""
         try:
             return ((self.tipnode == repo.changelog.node(self.tiprev))
-                    and (self.filteredhash == self._hashfiltered(repo)))
+                    and (self.filteredhash == \
+                         scmutil.filteredhash(repo, self.tiprev)))
         except IndexError:
             return False
 
@@ -226,9 +207,6 @@
             repo.ui.debug("couldn't write branch cache: %s\n" % inst)
             # Abort may be raise by read only opener
             pass
-        if self._revbranchcache:
-            self._revbranchcache.write(repo.unfiltered())
-            self._revbranchcache = None
 
     def update(self, repo, revgen):
         """Given a branchhead cache, self, that may have extra nodes or be
@@ -239,12 +217,9 @@
         cl = repo.changelog
         # collect new branch entries
         newbranches = {}
-        urepo = repo.unfiltered()
-        self._revbranchcache = revbranchcache(urepo)
-        getbranchinfo = self._revbranchcache.branchinfo
-        ucl = urepo.changelog
+        getbranchinfo = repo.revbranchcache().branchinfo
         for r in revgen:
-            branch, closesbranch = getbranchinfo(ucl, r)
+            branch, closesbranch = getbranchinfo(r)
             newbranches.setdefault(branch, []).append(r)
             if closesbranch:
                 self._closednodes.add(cl.node(r))
@@ -289,7 +264,7 @@
                 if tiprev > self.tiprev:
                     self.tipnode = cl.node(tiprev)
                     self.tiprev = tiprev
-        self.filteredhash = self._hashfiltered(repo)
+        self.filteredhash = scmutil.filteredhash(repo, self.tiprev)
 
         duration = time.time() - starttime
         repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
@@ -332,6 +307,7 @@
 
     def __init__(self, repo, readonly=True):
         assert repo.filtername is None
+        self._repo = repo
         self._names = [] # branch names in local encoding with static index
         self._rbcrevs = array('c') # structs of type _rbcrecfmt
         self._rbcsnameslen = 0
@@ -340,8 +316,6 @@
             self._rbcsnameslen = len(bndata) # for verification before writing
             self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')]
         except (IOError, OSError), inst:
-            repo.ui.debug("couldn't read revision branch cache names: %s\n" %
-                          inst)
             if readonly:
                 # don't try to use cache - fall back to the slow path
                 self.branchinfo = self._branchinfo
@@ -361,18 +335,16 @@
         self._rbcnamescount = len(self._names) # number of good names on disk
         self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
 
-    def branchinfo(self, changelog, rev):
+    def branchinfo(self, rev):
         """Return branch name and close flag for rev, using and updating
         persistent cache."""
+        changelog = self._repo.changelog
         rbcrevidx = rev * _rbcrecsize
 
         # if requested rev is missing, add and populate all missing revs
         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
-            first = len(self._rbcrevs) // _rbcrecsize
             self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
                                          len(self._rbcrevs)))
-            for r in xrange(first, len(changelog)):
-                self._branchinfo(changelog, r)
 
         # fast path: extract data from cache, use it if node is matching
         reponode = changelog.node(rev)[:_rbcnodelen]
@@ -381,14 +353,22 @@
         close = bool(branchidx & _rbccloseflag)
         if close:
             branchidx &= _rbcbranchidxmask
-        if cachenode == reponode:
+        if cachenode == '\0\0\0\0':
+            pass
+        elif cachenode == reponode:
             return self._names[branchidx], close
+        else:
+            # rev/node map has changed, invalidate the cache from here up
+            truncate = rbcrevidx + _rbcrecsize
+            del self._rbcrevs[truncate:]
+            self._rbcrevslen = min(self._rbcrevslen, truncate)
+
         # fall back to slow path and make sure it will be written to disk
-        self._rbcrevslen = min(self._rbcrevslen, rev)
-        return self._branchinfo(changelog, rev)
+        return self._branchinfo(rev)
 
-    def _branchinfo(self, changelog, rev):
+    def _branchinfo(self, rev):
         """Retrieve branch info from changelog and update _rbcrevs"""
+        changelog = self._repo.changelog
         b, close = changelog.branchinfo(rev)
         if b in self._namesreverse:
             branchidx = self._namesreverse[b]
@@ -399,21 +379,28 @@
         reponode = changelog.node(rev)
         if close:
             branchidx |= _rbccloseflag
+        self._setcachedata(rev, reponode, branchidx)
+        return b, close
+
+    def _setcachedata(self, rev, node, branchidx):
+        """Writes the node's branch data to the in-memory cache data."""
         rbcrevidx = rev * _rbcrecsize
         rec = array('c')
-        rec.fromstring(pack(_rbcrecfmt, reponode, branchidx))
+        rec.fromstring(pack(_rbcrecfmt, node, branchidx))
         self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec
-        return b, close
+        self._rbcrevslen = min(self._rbcrevslen, rev)
 
-    def write(self, repo):
+        tr = self._repo.currenttransaction()
+        if tr:
+            tr.addfinalize('write-revbranchcache', self.write)
+
+    def write(self, tr=None):
         """Save branch cache if it is dirty."""
+        repo = self._repo
         if self._rbcnamescount < len(self._names):
             try:
                 if self._rbcnamescount != 0:
                     f = repo.vfs.open(_rbcnames, 'ab')
-                    # The position after open(x, 'a') is implementation defined-
-                    # see issue3543.  SEEK_END was added in 2.5
-                    f.seek(0, 2) #os.SEEK_END
                     if f.tell() == self._rbcsnameslen:
                         f.write('\0')
                     else:
@@ -438,9 +425,6 @@
             revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
             try:
                 f = repo.vfs.open(_rbcrevs, 'ab')
-                # The position after open(x, 'a') is implementation defined-
-                # see issue3543.  SEEK_END was added in 2.5
-                f.seek(0, 2) #os.SEEK_END
                 if f.tell() != start:
                     repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start))
                     f.seek(start)
--- a/mercurial/bundle2.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/bundle2.py	Thu Apr 16 20:57:51 2015 -0500
@@ -145,6 +145,7 @@
 preserve.
 """
 
+import errno
 import sys
 import util
 import struct
@@ -161,8 +162,6 @@
 _pack = struct.pack
 _unpack = struct.unpack
 
-_magicstring = 'HG2Y'
-
 _fstreamparamsize = '>i'
 _fpartheadersize = '>i'
 _fparttypesize = '>B'
@@ -312,13 +311,17 @@
     except Exception, exc:
         for part in iterparts:
             # consume the bundle content
-            part.read()
+            part.seek(0, 2)
         # Small hack to let caller code distinguish exceptions from bundle2
         # processing from processing the old format. This is mostly
         # needed to handle different return codes to unbundle according to the
         # type of bundle. We should probably clean up or drop this return code
         # craziness in a future version.
         exc.duringunbundle2 = True
+        salvaged = []
+        if op.reply is not None:
+            salvaged = op.reply.salvageoutput()
+        exc._bundle2salvagedoutput = salvaged
         raise
     return op
 
@@ -358,13 +361,13 @@
         finally:
             if output is not None:
                 output = op.ui.popbuffer()
-        if output:
-            outpart = op.reply.newpart('b2x:output', data=output,
-                                       mandatory=False)
-            outpart.addparam('in-reply-to', str(part.id), mandatory=False)
+            if output:
+                outpart = op.reply.newpart('output', data=output,
+                                           mandatory=False)
+                outpart.addparam('in-reply-to', str(part.id), mandatory=False)
     finally:
         # consume the part content to not corrupt the stream.
-        part.read()
+        part.seek(0, 2)
 
 
 def decodecaps(blob):
@@ -409,6 +412,8 @@
     populate it. Then call `getchunks` to retrieve all the binary chunks of
     data that compose the bundle2 container."""
 
+    _magicstring = 'HG20'
+
     def __init__(self, ui, capabilities=()):
         self.ui = ui
         self._params = []
@@ -452,8 +457,8 @@
 
     # methods used to generate the bundle2 stream
     def getchunks(self):
-        self.ui.debug('start emission of %s stream\n' % _magicstring)
-        yield _magicstring
+        self.ui.debug('start emission of %s stream\n' % self._magicstring)
+        yield self._magicstring
         param = self._paramchunk()
         self.ui.debug('bundle parameter: %s\n' % param)
         yield _pack(_fstreamparamsize, len(param))
@@ -479,11 +484,25 @@
             blocks.append(par)
         return ' '.join(blocks)
 
+    def salvageoutput(self):
+        """return a list with a copy of all output parts in the bundle
+
+        This is meant to be used during error handling to make sure we preserve
+        server output"""
+        salvaged = []
+        for part in self._parts:
+            if part.type.startswith('output'):
+                salvaged.append(part.copy())
+        return salvaged
+
+
 class unpackermixin(object):
     """A mixin to extract bytes and struct data from a stream"""
 
     def __init__(self, fp):
         self._fp = fp
+        self._seekable = (util.safehasattr(fp, 'seek') and
+                          util.safehasattr(fp, 'tell'))
 
     def _unpack(self, format):
         """unpack this struct format from the stream"""
@@ -494,6 +513,43 @@
         """read exactly <size> bytes from the stream"""
         return changegroup.readexactly(self._fp, size)
 
+    def seek(self, offset, whence=0):
+        """move the underlying file pointer"""
+        if self._seekable:
+            return self._fp.seek(offset, whence)
+        else:
+            raise NotImplementedError(_('File pointer is not seekable'))
+
+    def tell(self):
+        """return the file offset, or None if file is not seekable"""
+        if self._seekable:
+            try:
+                return self._fp.tell()
+            except IOError, e:
+                if e.errno == errno.ESPIPE:
+                    self._seekable = False
+                else:
+                    raise
+        return None
+
+    def close(self):
+        """close underlying file"""
+        if util.safehasattr(self._fp, 'close'):
+            return self._fp.close()
+
+def getunbundler(ui, fp, header=None):
+    """return a valid unbundler object for a given header"""
+    if header is None:
+        header = changegroup.readexactly(fp, 4)
+    magic, version = header[0:2], header[2:4]
+    if magic != 'HG':
+        raise util.Abort(_('not a Mercurial bundle'))
+    unbundlerclass = formatmap.get(version)
+    if unbundlerclass is None:
+        raise util.Abort(_('unknown bundle version %s') % version)
+    unbundler = unbundlerclass(ui, fp)
+    ui.debug('start processing of %s stream\n' % header)
+    return unbundler
 
 class unbundle20(unpackermixin):
     """interpret a bundle2 stream
@@ -501,18 +557,10 @@
     This class is fed with a binary stream and yields parts through its
     `iterparts` methods."""
 
-    def __init__(self, ui, fp, header=None):
+    def __init__(self, ui, fp):
         """If header is specified, we do not read it out of the stream."""
         self.ui = ui
         super(unbundle20, self).__init__(fp)
-        if header is None:
-            header = self._readexact(4)
-            magic, version = header[0:2], header[2:4]
-            if magic != 'HG':
-                raise util.Abort(_('not a Mercurial bundle'))
-            if version != '2Y':
-                raise util.Abort(_('unknown bundle version %s') % version)
-        self.ui.debug('start processing of %s stream\n' % header)
 
     @util.propertycache
     def params(self):
@@ -564,6 +612,7 @@
         while headerblock is not None:
             part = unbundlepart(self.ui, headerblock, self._fp)
             yield part
+            part.seek(0, 2)
             headerblock = self._readpartheader()
         self.ui.debug('end of bundle2 stream\n')
 
@@ -580,6 +629,10 @@
             return self._readexact(headersize)
         return None
 
+    def compressed(self):
+        return False
+
+formatmap = {'20': unbundle20}
 
 class bundlepart(object):
     """A bundle2 part contains application level payload
@@ -618,6 +671,15 @@
         self._generated = None
         self.mandatory = mandatory
 
+    def copy(self):
+        """return a copy of the part
+
+        The new part have the very same content but no partid assigned yet.
+        Parts with generated data cannot be copied."""
+        assert not util.safehasattr(self.data, 'next')
+        return self.__class__(self.type, self._mandatoryparams,
+                              self._advisoryparams, self._data, self.mandatory)
+
     # methods used to defines the part content
     def __setdata(self, data):
         if self._generated is not None:
@@ -697,7 +759,7 @@
             # backup exception data for later
             exc_info = sys.exc_info()
             msg = 'unexpected error: %s' % exc
-            interpart = bundlepart('b2x:error:abort', [('message', msg)],
+            interpart = bundlepart('error:abort', [('message', msg)],
                                    mandatory=False)
             interpart.id = 0
             yield _pack(_fpayloadsize, -1)
@@ -801,6 +863,8 @@
         self._payloadstream = None
         self._readheader()
         self._mandatory = None
+        self._chunkindex = [] #(payload, file) position tuples for chunk starts
+        self._pos = 0
 
     def _fromheader(self, size):
         """return the next <size> byte from the header"""
@@ -826,6 +890,47 @@
         self.params.update(dict(self.advisoryparams))
         self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
 
+    def _payloadchunks(self, chunknum=0):
+        '''seek to specified chunk and start yielding data'''
+        if len(self._chunkindex) == 0:
+            assert chunknum == 0, 'Must start with chunk 0'
+            self._chunkindex.append((0, super(unbundlepart, self).tell()))
+        else:
+            assert chunknum < len(self._chunkindex), \
+                   'Unknown chunk %d' % chunknum
+            super(unbundlepart, self).seek(self._chunkindex[chunknum][1])
+
+        pos = self._chunkindex[chunknum][0]
+        payloadsize = self._unpack(_fpayloadsize)[0]
+        self.ui.debug('payload chunk size: %i\n' % payloadsize)
+        while payloadsize:
+            if payloadsize == flaginterrupt:
+                # interruption detection, the handler will now read a
+                # single part and process it.
+                interrupthandler(self.ui, self._fp)()
+            elif payloadsize < 0:
+                msg = 'negative payload chunk size: %i' %  payloadsize
+                raise error.BundleValueError(msg)
+            else:
+                result = self._readexact(payloadsize)
+                chunknum += 1
+                pos += payloadsize
+                if chunknum == len(self._chunkindex):
+                    self._chunkindex.append((pos,
+                                             super(unbundlepart, self).tell()))
+                yield result
+            payloadsize = self._unpack(_fpayloadsize)[0]
+            self.ui.debug('payload chunk size: %i\n' % payloadsize)
+
+    def _findchunk(self, pos):
+        '''for a given payload position, return a chunk number and offset'''
+        for chunk, (ppos, fpos) in enumerate(self._chunkindex):
+            if ppos == pos:
+                return chunk, 0
+            elif ppos > pos:
+                return chunk - 1, pos - self._chunkindex[chunk - 1][0]
+        raise ValueError('Unknown chunk')
+
     def _readheader(self):
         """read the header and setup the object"""
         typesize = self._unpackheader(_fparttypesize)[0]
@@ -857,22 +962,7 @@
             advparams.append((self._fromheader(key), self._fromheader(value)))
         self._initparams(manparams, advparams)
         ## part payload
-        def payloadchunks():
-            payloadsize = self._unpack(_fpayloadsize)[0]
-            self.ui.debug('payload chunk size: %i\n' % payloadsize)
-            while payloadsize:
-                if payloadsize == flaginterrupt:
-                    # interruption detection, the handler will now read a
-                    # single part and process it.
-                    interrupthandler(self.ui, self._fp)()
-                elif payloadsize < 0:
-                    msg = 'negative payload chunk size: %i' %  payloadsize
-                    raise error.BundleValueError(msg)
-                else:
-                    yield self._readexact(payloadsize)
-                payloadsize = self._unpack(_fpayloadsize)[0]
-                self.ui.debug('payload chunk size: %i\n' % payloadsize)
-        self._payloadstream = util.chunkbuffer(payloadchunks())
+        self._payloadstream = util.chunkbuffer(self._payloadchunks())
         # we read the data, tell it
         self._initialized = True
 
@@ -886,13 +976,42 @@
             data = self._payloadstream.read(size)
         if size is None or len(data) < size:
             self.consumed = True
+        self._pos += len(data)
         return data
 
-capabilities = {'HG2Y': (),
-                'b2x:listkeys': (),
-                'b2x:pushkey': (),
+    def tell(self):
+        return self._pos
+
+    def seek(self, offset, whence=0):
+        if whence == 0:
+            newpos = offset
+        elif whence == 1:
+            newpos = self._pos + offset
+        elif whence == 2:
+            if not self.consumed:
+                self.read()
+            newpos = self._chunkindex[-1][0] - offset
+        else:
+            raise ValueError('Unknown whence value: %r' % (whence,))
+
+        if newpos > self._chunkindex[-1][0] and not self.consumed:
+            self.read()
+        if not 0 <= newpos <= self._chunkindex[-1][0]:
+            raise ValueError('Offset out of range')
+
+        if self._pos != newpos:
+            chunk, internaloffset = self._findchunk(newpos)
+            self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
+            adjust = self.read(internaloffset)
+            if len(adjust) != internaloffset:
+                raise util.Abort(_('Seek failed\n'))
+            self._pos = newpos
+
+capabilities = {'HG20': (),
+                'listkeys': (),
+                'pushkey': (),
                 'digests': tuple(sorted(util.DIGESTS.keys())),
-                'b2x:remote-changegroup': ('http', 'https'),
+                'remote-changegroup': ('http', 'https'),
                }
 
 def getrepocaps(repo, allowpushback=False):
@@ -901,29 +1020,29 @@
     Exists to allow extensions (like evolution) to mutate the capabilities.
     """
     caps = capabilities.copy()
-    caps['b2x:changegroup'] = tuple(sorted(changegroup.packermap.keys()))
+    caps['changegroup'] = tuple(sorted(changegroup.packermap.keys()))
     if obsolete.isenabled(repo, obsolete.exchangeopt):
         supportedformat = tuple('V%i' % v for v in obsolete.formats)
-        caps['b2x:obsmarkers'] = supportedformat
+        caps['obsmarkers'] = supportedformat
     if allowpushback:
-        caps['b2x:pushback'] = ()
+        caps['pushback'] = ()
     return caps
 
 def bundle2caps(remote):
     """return the bundle capabilities of a peer as dict"""
-    raw = remote.capable('bundle2-exp')
+    raw = remote.capable('bundle2')
     if not raw and raw != '':
         return {}
-    capsblob = urllib.unquote(remote.capable('bundle2-exp'))
+    capsblob = urllib.unquote(remote.capable('bundle2'))
     return decodecaps(capsblob)
 
 def obsmarkersversion(caps):
     """extract the list of supported obsmarkers versions from a bundle2caps dict
     """
-    obscaps = caps.get('b2x:obsmarkers', ())
+    obscaps = caps.get('obsmarkers', ())
     return [int(c[1:]) for c in obscaps if c.startswith('V')]
 
-@parthandler('b2x:changegroup', ('version',))
+@parthandler('changegroup', ('version',))
 def handlechangegroup(op, inpart):
     """apply a changegroup part on the repo
 
@@ -947,14 +1066,14 @@
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
-        part = op.reply.newpart('b2x:reply:changegroup', mandatory=False)
+        part = op.reply.newpart('reply:changegroup', mandatory=False)
         part.addparam('in-reply-to', str(inpart.id), mandatory=False)
         part.addparam('return', '%i' % ret, mandatory=False)
     assert not inpart.read()
 
 _remotechangegroupparams = tuple(['url', 'size', 'digests'] +
     ['digest:%s' % k for k in util.DIGESTS.keys()])
-@parthandler('b2x:remote-changegroup', _remotechangegroupparams)
+@parthandler('remote-changegroup', _remotechangegroupparams)
 def handleremotechangegroup(op, inpart):
     """apply a bundle10 on the repo, given an url and validation information
 
@@ -976,7 +1095,7 @@
     except KeyError:
         raise util.Abort(_('remote-changegroup: missing "%s" param') % 'url')
     parsed_url = util.url(raw_url)
-    if parsed_url.scheme not in capabilities['b2x:remote-changegroup']:
+    if parsed_url.scheme not in capabilities['remote-changegroup']:
         raise util.Abort(_('remote-changegroup does not support %s urls') %
             parsed_url.scheme)
 
@@ -1016,7 +1135,7 @@
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
-        part = op.reply.newpart('b2x:reply:changegroup')
+        part = op.reply.newpart('reply:changegroup')
         part.addparam('in-reply-to', str(inpart.id), mandatory=False)
         part.addparam('return', '%i' % ret, mandatory=False)
     try:
@@ -1026,13 +1145,13 @@
             (util.hidepassword(raw_url), str(e)))
     assert not inpart.read()
 
-@parthandler('b2x:reply:changegroup', ('return', 'in-reply-to'))
+@parthandler('reply:changegroup', ('return', 'in-reply-to'))
 def handlereplychangegroup(op, inpart):
     ret = int(inpart.params['return'])
     replyto = int(inpart.params['in-reply-to'])
     op.records.add('changegroup', {'return': ret}, replyto)
 
-@parthandler('b2x:check:heads')
+@parthandler('check:heads')
 def handlecheckheads(op, inpart):
     """check that head of the repo did not change
 
@@ -1048,13 +1167,13 @@
         raise error.PushRaced('repository changed while pushing - '
                               'please try again')
 
-@parthandler('b2x:output')
+@parthandler('output')
 def handleoutput(op, inpart):
     """forward output captured on the server to the client"""
     for line in inpart.read().splitlines():
         op.ui.write(('remote: %s\n' % line))
 
-@parthandler('b2x:replycaps')
+@parthandler('replycaps')
 def handlereplycaps(op, inpart):
     """Notify that a reply bundle should be created
 
@@ -1063,13 +1182,13 @@
     if op.reply is None:
         op.reply = bundle20(op.ui, caps)
 
-@parthandler('b2x:error:abort', ('message', 'hint'))
-def handlereplycaps(op, inpart):
+@parthandler('error:abort', ('message', 'hint'))
+def handleerrorabort(op, inpart):
     """Used to transmit abort error over the wire"""
     raise util.Abort(inpart.params['message'], hint=inpart.params.get('hint'))
 
-@parthandler('b2x:error:unsupportedcontent', ('parttype', 'params'))
-def handlereplycaps(op, inpart):
+@parthandler('error:unsupportedcontent', ('parttype', 'params'))
+def handleerrorunsupportedcontent(op, inpart):
     """Used to transmit unknown content error over the wire"""
     kwargs = {}
     parttype = inpart.params.get('parttype')
@@ -1081,19 +1200,19 @@
 
     raise error.UnsupportedPartError(**kwargs)
 
-@parthandler('b2x:error:pushraced', ('message',))
-def handlereplycaps(op, inpart):
+@parthandler('error:pushraced', ('message',))
+def handleerrorpushraced(op, inpart):
     """Used to transmit push race error over the wire"""
     raise error.ResponseError(_('push failed:'), inpart.params['message'])
 
-@parthandler('b2x:listkeys', ('namespace',))
+@parthandler('listkeys', ('namespace',))
 def handlelistkeys(op, inpart):
     """retrieve pushkey namespace content stored in a bundle2"""
     namespace = inpart.params['namespace']
     r = pushkey.decodekeys(inpart.read())
     op.records.add('listkeys', (namespace, r))
 
-@parthandler('b2x:pushkey', ('namespace', 'key', 'old', 'new'))
+@parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
 def handlepushkey(op, inpart):
     """process a pushkey request"""
     dec = pushkey.decode
@@ -1108,32 +1227,36 @@
               'new': new}
     op.records.add('pushkey', record)
     if op.reply is not None:
-        rpart = op.reply.newpart('b2x:reply:pushkey')
+        rpart = op.reply.newpart('reply:pushkey')
         rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
         rpart.addparam('return', '%i' % ret, mandatory=False)
 
-@parthandler('b2x:reply:pushkey', ('return', 'in-reply-to'))
+@parthandler('reply:pushkey', ('return', 'in-reply-to'))
 def handlepushkeyreply(op, inpart):
     """retrieve the result of a pushkey request"""
     ret = int(inpart.params['return'])
     partid = int(inpart.params['in-reply-to'])
     op.records.add('pushkey', {'return': ret}, partid)
 
-@parthandler('b2x:obsmarkers')
+@parthandler('obsmarkers')
 def handleobsmarker(op, inpart):
     """add a stream of obsmarkers to the repo"""
     tr = op.gettransaction()
-    new = op.repo.obsstore.mergemarkers(tr, inpart.read())
+    markerdata = inpart.read()
+    if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
+        op.ui.write(('obsmarker-exchange: %i bytes received\n')
+                    % len(markerdata))
+    new = op.repo.obsstore.mergemarkers(tr, markerdata)
     if new:
         op.repo.ui.status(_('%i new obsolescence markers\n') % new)
     op.records.add('obsmarkers', {'new': new})
     if op.reply is not None:
-        rpart = op.reply.newpart('b2x:reply:obsmarkers')
+        rpart = op.reply.newpart('reply:obsmarkers')
         rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
         rpart.addparam('new', '%i' % new, mandatory=False)
 
 
-@parthandler('b2x:reply:obsmarkers', ('new', 'in-reply-to'))
+@parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
 def handlepushkeyreply(op, inpart):
     """retrieve the result of a pushkey request"""
     ret = int(inpart.params['new'])
--- a/mercurial/bundlerepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/bundlerepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -15,7 +15,7 @@
 from i18n import _
 import os, tempfile, shutil
 import changegroup, util, mdiff, discovery, cmdutil, scmutil, exchange
-import localrepo, changelog, manifest, filelog, revlog, error, phases
+import localrepo, changelog, manifest, filelog, revlog, error, phases, bundle2
 
 class bundlerevlog(revlog.revlog):
     def __init__(self, opener, indexfile, bundle, linkmapper):
@@ -177,9 +177,6 @@
     def baserevision(self, nodeorrev):
         return filelog.filelog.revision(self, nodeorrev)
 
-    def _file(self, f):
-        self._repo.file(f)
-
 class bundlepeer(localrepo.localpeer):
     def canpush(self):
         return False
@@ -219,7 +216,7 @@
 
         self.tempfile = None
         f = util.posixfile(bundlename, "rb")
-        self.bundle = exchange.readbundle(ui, f, bundlename)
+        self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename)
         if self.bundle.compressed():
             fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
                                             suffix=".hg10un")
@@ -237,7 +234,27 @@
                 fptemp.close()
 
             f = self.vfs.open(self.tempfile, mode="rb")
-            self.bundle = exchange.readbundle(ui, f, bundlename, self.vfs)
+            self.bundlefile = self.bundle = exchange.readbundle(ui, f,
+                                                                bundlename,
+                                                                self.vfs)
+
+        if isinstance(self.bundle, bundle2.unbundle20):
+            cgparts = [part for part in self.bundle.iterparts()
+                       if (part.type == 'changegroup')
+                       and (part.params.get('version', '01')
+                            in changegroup.packermap)]
+
+            if not cgparts:
+                raise util.Abort('No changegroups found')
+            version = cgparts[0].params.get('version', '01')
+            cgparts = [p for p in cgparts
+                       if p.params.get('version', '01') == version]
+            if len(cgparts) > 1:
+                raise NotImplementedError("Can't process multiple changegroups")
+            part = cgparts[0]
+
+            part.seek(0)
+            self.bundle = changegroup.packermap[version][1](part, 'UN')
 
         # dict with the mapping 'filename' -> position in the bundle
         self.bundlefilespos = {}
@@ -303,7 +320,7 @@
 
     def close(self):
         """Close assigned bundle file immediately."""
-        self.bundle.close()
+        self.bundlefile.close()
         if self.tempfile is not None:
             self.vfs.unlink(self.tempfile)
         if self._tempparent:
@@ -409,7 +426,10 @@
             rheads = None
         else:
             cg = other.changegroupsubset(incoming, rheads, 'incoming')
-        bundletype = localrepo and "HG10BZ" or "HG10UN"
+        if localrepo:
+            bundletype = "HG10BZ"
+        else:
+            bundletype = "HG10UN"
         fname = bundle = changegroup.writebundle(ui, cg, bundlename, bundletype)
         # keep written bundle?
         if bundlename:
--- a/mercurial/byterange.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/byterange.py	Thu Apr 16 20:57:51 2015 -0500
@@ -274,7 +274,11 @@
             dirs = dirs[1:]
         try:
             fw = self.connect_ftp(user, passwd, host, port, dirs)
-            type = file and 'I' or 'D'
+            if file:
+                type = 'I'
+            else:
+                type = 'D'
+
             for attr in attrs:
                 attr, value = splitattr(attr)
                 if attr.lower() == 'type' and \
--- a/mercurial/changegroup.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/changegroup.py	Thu Apr 16 20:57:51 2015 -0500
@@ -71,7 +71,7 @@
     "": ("", nocompress), # only when using unbundle on ssh and old http servers
                           # since the unification ssh accepts a header but there
                           # is no capability signaling it.
-    "HG2Y": (), # special-cased below
+    "HG20": (), # special-cased below
     "HG10UN": ("HG10UN", nocompress),
     "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
     "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
@@ -102,16 +102,17 @@
             fh = os.fdopen(fd, "wb")
         cleanup = filename
 
-        if bundletype == "HG2Y":
+        if bundletype == "HG20":
             import bundle2
             bundle = bundle2.bundle20(ui)
-            part = bundle.newpart('b2x:changegroup', data=cg.getchunks())
+            part = bundle.newpart('changegroup', data=cg.getchunks())
             part.addparam('version', cg.version)
             z = nocompress()
             chunkiter = bundle.getchunks()
         else:
             if cg.version != '01':
-                raise util.Abort(_('Bundle1 only supports v1 changegroups\n'))
+                raise util.Abort(_('old bundle types only supports v1 '
+                                   'changegroups'))
             header, compressor = bundletypes[bundletype]
             fh.write(header)
             z = compressor()
@@ -481,7 +482,17 @@
         base = self.deltaparent(revlog, rev, p1, p2, prev)
 
         prefix = ''
-        if base == nullrev:
+        if revlog.iscensored(base) or revlog.iscensored(rev):
+            try:
+                delta = revlog.revision(node)
+            except error.CensoredNodeError, e:
+                delta = e.tombstone
+            if base == nullrev:
+                prefix = mdiff.trivialdiffheader(len(delta))
+            else:
+                baselen = revlog.rawsize(base)
+                prefix = mdiff.replacediffheader(baselen, len(delta))
+        elif base == nullrev:
             delta = revlog.revision(node)
             prefix = mdiff.trivialdiffheader(len(delta))
         else:
@@ -659,8 +670,11 @@
         pr()
         fl = repo.file(f)
         o = len(fl)
-        if not fl.addgroup(source, revmap, trp):
-            raise util.Abort(_("received file revlog group is empty"))
+        try:
+            if not fl.addgroup(source, revmap, trp):
+                raise util.Abort(_("received file revlog group is empty"))
+        except error.CensoredBaseError, e:
+            raise util.Abort(_("received delta base is censored: %s") % e)
         revisions += len(fl) - o
         files += 1
         if f in needfiles:
@@ -877,6 +891,7 @@
 
     finally:
         tr.release()
+        repo.ui.flush()
     # never return 0 here:
     if dh < 0:
         return dh - 1
--- a/mercurial/changelog.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/changelog.py	Thu Apr 16 20:57:51 2015 -0500
@@ -143,6 +143,11 @@
             if i not in self.filteredrevs:
                 return self.node(i)
 
+    def __contains__(self, rev):
+        """filtered version of revlog.__contains__"""
+        return (0 <= rev < len(self)
+                and rev not in self.filteredrevs)
+
     def __iter__(self):
         """filtered version of revlog.__iter__"""
         if len(self.filteredrevs) == 0:
--- a/mercurial/cmdutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/cmdutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -7,18 +7,207 @@
 
 from node import hex, nullid, nullrev, short
 from i18n import _
-import os, sys, errno, re, tempfile
+import os, sys, errno, re, tempfile, cStringIO, shutil
 import util, scmutil, templater, patch, error, templatekw, revlog, copies
 import match as matchmod
 import context, repair, graphmod, revset, phases, obsolete, pathutil
 import changelog
 import bookmarks
 import encoding
+import crecord as crecordmod
 import lock as lockmod
 
 def parsealiases(cmd):
     return cmd.lstrip("^").split("|")
 
+def setupwrapcolorwrite(ui):
+    # wrap ui.write so diff output can be labeled/colorized
+    def wrapwrite(orig, *args, **kw):
+        label = kw.pop('label', '')
+        for chunk, l in patch.difflabel(lambda: args):
+            orig(chunk, label=label + l)
+
+    oldwrite = ui.write
+    def wrap(*args, **kwargs):
+        return wrapwrite(oldwrite, *args, **kwargs)
+    setattr(ui, 'write', wrap)
+    return oldwrite
+
+def filterchunks(ui, originalhunks, usecurses, testfile):
+    if usecurses:
+        if testfile:
+            recordfn = crecordmod.testdecorator(testfile,
+                                                crecordmod.testchunkselector)
+        else:
+            recordfn = crecordmod.chunkselector
+
+        return crecordmod.filterpatch(ui, originalhunks, recordfn)
+
+    else:
+        return patch.filterpatch(ui, originalhunks)
+
+def recordfilter(ui, originalhunks):
+    usecurses =  ui.configbool('experimental', 'crecord', False)
+    testfile = ui.config('experimental', 'crecordtest', None)
+    oldwrite = setupwrapcolorwrite(ui)
+    try:
+        newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
+    finally:
+        ui.write = oldwrite
+    return newchunks
+
+def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
+            filterfn, *pats, **opts):
+    import merge as mergemod
+    hunkclasses = (crecordmod.uihunk, patch.recordhunk)
+    ishunk = lambda x: isinstance(x, hunkclasses)
+
+    if not ui.interactive():
+        raise util.Abort(_('running non-interactively, use %s instead') %
+                         cmdsuggest)
+
+    # make sure username is set before going interactive
+    if not opts.get('user'):
+        ui.username() # raise exception, username not provided
+
+    def recordfunc(ui, repo, message, match, opts):
+        """This is generic record driver.
+
+        Its job is to interactively filter local changes, and
+        accordingly prepare working directory into a state in which the
+        job can be delegated to a non-interactive commit command such as
+        'commit' or 'qrefresh'.
+
+        After the actual job is done by non-interactive command, the
+        working directory is restored to its original state.
+
+        In the end we'll record interesting changes, and everything else
+        will be left in place, so the user can continue working.
+        """
+
+        checkunfinished(repo, commit=True)
+        merge = len(repo[None].parents()) > 1
+        if merge:
+            raise util.Abort(_('cannot partially commit a merge '
+                               '(use "hg commit" instead)'))
+
+        status = repo.status(match=match)
+        diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
+        diffopts.nodates = True
+        diffopts.git = True
+        originaldiff =  patch.diff(repo, changes=status, opts=diffopts)
+        originalchunks = patch.parsepatch(originaldiff)
+
+        # 1. filter patch, so we have intending-to apply subset of it
+        try:
+            chunks = filterfn(ui, originalchunks)
+        except patch.PatchError, err:
+            raise util.Abort(_('error parsing patch: %s') % err)
+
+        contenders = set()
+        for h in chunks:
+            try:
+                contenders.update(set(h.files()))
+            except AttributeError:
+                pass
+
+        changed = status.modified + status.added + status.removed
+        newfiles = [f for f in changed if f in contenders]
+        if not newfiles:
+            ui.status(_('no changes to record\n'))
+            return 0
+
+        newandmodifiedfiles = set()
+        for h in chunks:
+            isnew = h.filename() in status.added
+            if ishunk(h) and isnew and not h in originalchunks:
+                newandmodifiedfiles.add(h.filename())
+
+        modified = set(status.modified)
+
+        # 2. backup changed files, so we can restore them in the end
+
+        if backupall:
+            tobackup = changed
+        else:
+            tobackup = [f for f in newfiles
+                        if f in modified or f in newandmodifiedfiles]
+
+        backups = {}
+        if tobackup:
+            backupdir = repo.join('record-backups')
+            try:
+                os.mkdir(backupdir)
+            except OSError, err:
+                if err.errno != errno.EEXIST:
+                    raise
+        try:
+            # backup continues
+            for f in tobackup:
+                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+                                               dir=backupdir)
+                os.close(fd)
+                ui.debug('backup %r as %r\n' % (f, tmpname))
+                util.copyfile(repo.wjoin(f), tmpname)
+                shutil.copystat(repo.wjoin(f), tmpname)
+                backups[f] = tmpname
+
+            fp = cStringIO.StringIO()
+            for c in chunks:
+                fname = c.filename()
+                if fname in backups or fname in newandmodifiedfiles:
+                    c.write(fp)
+            dopatch = fp.tell()
+            fp.seek(0)
+
+            [os.unlink(c) for c in newandmodifiedfiles]
+
+            # 3a. apply filtered patch to clean repo  (clean)
+            if backups:
+                # Equivalent to hg.revert
+                choices = lambda key: key in backups
+                mergemod.update(repo, repo.dirstate.p1(),
+                        False, True, choices)
+
+            # 3b. (apply)
+            if dopatch:
+                try:
+                    ui.debug('applying patch\n')
+                    ui.debug(fp.getvalue())
+                    patch.internalpatch(ui, repo, fp, 1, eolmode=None)
+                except patch.PatchError, err:
+                    raise util.Abort(str(err))
+            del fp
+
+            # 4. We prepared working directory according to filtered
+            #    patch. Now is the time to delegate the job to
+            #    commit/qrefresh or the like!
+
+            # Make all of the pathnames absolute.
+            newfiles = [repo.wjoin(nf) for nf in newfiles]
+            return commitfunc(ui, repo, *newfiles, **opts)
+        finally:
+            # 5. finally restore backed-up files
+            try:
+                for realname, tmpname in backups.iteritems():
+                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
+                    util.copyfile(tmpname, repo.wjoin(realname))
+                    # Our calls to copystat() here and above are a
+                    # hack to trick any editors that have f open that
+                    # we haven't modified them.
+                    #
+                    # Also note that this racy as an editor could
+                    # notice the file's mtime before we've finished
+                    # writing it.
+                    shutil.copystat(tmpname, repo.wjoin(realname))
+                    os.unlink(tmpname)
+                if tobackup:
+                    os.rmdir(backupdir)
+            except OSError:
+                pass
+
+    return commit(ui, repo, recordfunc, pats, opts)
+
 def findpossible(cmd, table, strict=False):
     """
     Return cmd -> (aliases, command table entry)
@@ -34,8 +223,10 @@
     else:
         keys = table.keys()
 
+    allcmds = []
     for e in keys:
         aliases = parsealiases(e)
+        allcmds.extend(aliases)
         found = None
         if cmd in aliases:
             found = cmd
@@ -53,11 +244,11 @@
     if not choice and debugchoice:
         choice = debugchoice
 
-    return choice
+    return choice, allcmds
 
 def findcmd(cmd, table, strict=True):
     """Return (aliases, command table entry) for command string."""
-    choice = findpossible(cmd, table, strict)
+    choice, allcmds = findpossible(cmd, table, strict)
 
     if cmd in choice:
         return choice[cmd]
@@ -70,7 +261,7 @@
     if choice:
         return choice.values()[0]
 
-    raise error.UnknownCommand(cmd)
+    raise error.UnknownCommand(cmd, allcmds)
 
 def findrepo(p):
     while not os.path.isdir(os.path.join(p, ".hg")):
@@ -80,16 +271,15 @@
 
     return p
 
-def bailifchanged(repo):
-    if repo.dirstate.p2() != nullid:
+def bailifchanged(repo, merge=True):
+    if merge and repo.dirstate.p2() != nullid:
         raise util.Abort(_('outstanding uncommitted merge'))
     modified, added, removed, deleted = repo.status()[:4]
     if modified or added or removed or deleted:
         raise util.Abort(_('uncommitted changes'))
     ctx = repo[None]
     for s in sorted(ctx.substate):
-        if ctx.sub(s).dirty():
-            raise util.Abort(_("uncommitted changes in subrepo %s") % s)
+        ctx.sub(s).bailifchanged()
 
 def logmessage(ui, opts):
     """ get the log message according to -m and -l option """
@@ -110,22 +300,22 @@
                              (logfile, inst.strerror))
     return message
 
-def mergeeditform(ctxorbool, baseform):
-    """build appropriate editform from ctxorbool and baseform
-
-    'ctxorbool' is one of a ctx to be committed, or a bool whether
+def mergeeditform(ctxorbool, baseformname):
+    """return appropriate editform name (referencing a committemplate)
+
+    'ctxorbool' is either a ctx to be committed, or a bool indicating whether
     merging is committed.
 
-    This returns editform 'baseform' with '.merge' if merging is
-    committed, or one with '.normal' suffix otherwise.
+    This returns baseformname with '.merge' appended if it is a merge,
+    otherwise '.normal' is appended.
     """
     if isinstance(ctxorbool, bool):
         if ctxorbool:
-            return baseform + ".merge"
+            return baseformname + ".merge"
     elif 1 < len(ctxorbool.parents()):
-        return baseform + ".merge"
-
-    return baseform + ".normal"
+        return baseformname + ".merge"
+
+    return baseformname + ".normal"
 
 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
                     editform='', **opts):
@@ -225,7 +415,10 @@
     writable = mode not in ('r', 'rb')
 
     if not pat or pat == '-':
-        fp = writable and repo.ui.fout or repo.ui.fin
+        if writable:
+            fp = repo.ui.fout
+        else:
+            fp = repo.ui.fin
         if util.safehasattr(fp, 'fileno'):
             return os.fdopen(os.dup(fp.fileno()), mode)
         else:
@@ -301,7 +494,10 @@
 
     def walkpat(pat):
         srcs = []
-        badstates = after and '?' or '?r'
+        if after:
+            badstates = '?'
+        else:
+            badstates = '?r'
         m = scmutil.match(repo[None], [pat], opts, globbed=True)
         for abs in repo.walk(m):
             state = repo.dirstate[abs]
@@ -387,7 +583,7 @@
                 srcexists = True
             except IOError, inst:
                 if inst.errno == errno.ENOENT:
-                    ui.warn(_('%s: deleted in working copy\n') % relsrc)
+                    ui.warn(_('%s: deleted in working directory\n') % relsrc)
                     srcexists = False
                 else:
                     ui.warn(_('%s: cannot copy - %s\n') %
@@ -476,7 +672,6 @@
                     res = lambda p: dest
         return res
 
-
     pats = scmutil.expandpats(pats)
     if not pats:
         raise util.Abort(_('no source or destination specified'))
@@ -520,7 +715,10 @@
 
     def writepid(pid):
         if opts['pid_file']:
-            mode = appendpid and 'a' or 'w'
+            if appendpid:
+                mode = 'a'
+            else:
+                mode = 'w'
             fp = open(opts['pid_file'], mode)
             fp.write(str(pid) + '\n')
             fp.close()
@@ -613,6 +811,7 @@
 
     update = not opts.get('bypass')
     strip = opts["strip"]
+    prefix = opts["prefix"]
     sim = float(opts.get('similarity') or 0)
     if not tmpname:
         return (None, None, False)
@@ -672,8 +871,8 @@
             partial = opts.get('partial', False)
             files = set()
             try:
-                patch.patch(ui, repo, tmpname, strip=strip, files=files,
-                            eolmode=None, similarity=sim / 100.0)
+                patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
+                            files=files, eolmode=None, similarity=sim / 100.0)
             except patch.PatchError, e:
                 if not partial:
                     raise util.Abort(str(e))
@@ -710,7 +909,7 @@
             try:
                 files = set()
                 try:
-                    patch.patchrepo(ui, repo, p1, store, tmpname, strip,
+                    patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
                                     files, eolmode=None)
                 except patch.PatchError, e:
                     raise util.Abort(str(e))
@@ -755,7 +954,11 @@
         branch = ctx.branch()
         if switch_parent:
             parents.reverse()
-        prev = (parents and parents[0]) or nullid
+
+        if parents:
+            prev = parents[0]
+        else:
+            prev = nullid
 
         shouldclose = False
         if not fp and len(template) > 0:
@@ -775,7 +978,6 @@
             def write(s, **kw):
                 fp.write(s)
 
-
         write("# HG changeset patch\n")
         write("# User %s\n" % ctx.user())
         write("# Date %d %d\n" % ctx.date())
@@ -800,7 +1002,7 @@
 
 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
                    changes=None, stat=False, fp=None, prefix='',
-                   listsubrepos=False):
+                   root='', listsubrepos=False):
     '''show diff or diffstat.'''
     if fp is None:
         write = ui.write
@@ -808,20 +1010,35 @@
         def write(s, **kw):
             fp.write(s)
 
+    if root:
+        relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
+    else:
+        relroot = ''
+    if relroot != '':
+        # XXX relative roots currently don't work if the root is within a
+        # subrepo
+        uirelroot = match.uipath(relroot)
+        relroot += '/'
+        for matchroot in match.files():
+            if not matchroot.startswith(relroot):
+                ui.warn(_('warning: %s not inside relative root %s\n') % (
+                    match.uipath(matchroot), uirelroot))
+
     if stat:
         diffopts = diffopts.copy(context=0)
         width = 80
         if not ui.plain():
             width = ui.termwidth()
         chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
-                            prefix=prefix)
+                            prefix=prefix, relroot=relroot)
         for chunk, label in patch.diffstatui(util.iterlines(chunks),
                                              width=width,
                                              git=diffopts.git):
             write(chunk, label=label)
     else:
         for chunk, label in patch.diffui(repo, node1, node2, match,
-                                         changes, diffopts, prefix=prefix):
+                                         changes, diffopts, prefix=prefix,
+                                         relroot=relroot):
             write(chunk, label=label)
 
     if listsubrepos:
@@ -884,22 +1101,24 @@
         '''show a single changeset or file revision'''
         changenode = ctx.node()
         rev = ctx.rev()
+        if self.ui.debugflag:
+            hexfunc = hex
+        else:
+            hexfunc = short
+        if rev is None:
+            pctx = ctx.p1()
+            revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
+        else:
+            revnode = (rev, hexfunc(changenode))
 
         if self.ui.quiet:
-            self.ui.write("%d:%s\n" % (rev, short(changenode)),
-                          label='log.node')
+            self.ui.write("%d:%s\n" % revnode, label='log.node')
             return
 
-        log = self.repo.changelog
         date = util.datestr(ctx.date())
 
-        hexfunc = self.ui.debugflag and hex or short
-
-        parents = [(p, hexfunc(log.node(p)))
-                   for p in self._meaningful_parentrevs(log, rev)]
-
         # i18n: column positioning for "hg log"
-        self.ui.write(_("changeset:   %d:%s\n") % (rev, hexfunc(changenode)),
+        self.ui.write(_("changeset:   %d:%s\n") % revnode,
                       label='log.changeset changeset.%s' % ctx.phasestr())
 
         # branches are shown first before any other names due to backwards
@@ -925,13 +1144,14 @@
             # i18n: column positioning for "hg log"
             self.ui.write(_("phase:       %s\n") % _(ctx.phasestr()),
                           label='log.phase')
-        for parent in parents:
-            label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
+        for pctx in self._meaningful_parentrevs(ctx):
+            label = 'log.parent changeset.%s' % pctx.phasestr()
             # i18n: column positioning for "hg log"
-            self.ui.write(_("parent:      %d:%s\n") % parent,
+            self.ui.write(_("parent:      %d:%s\n")
+                          % (pctx.rev(), hexfunc(pctx.node())),
                           label=label)
 
-        if self.ui.debugflag:
+        if self.ui.debugflag and rev is not None:
             mnode = ctx.manifestnode()
             # i18n: column positioning for "hg log"
             self.ui.write(_("manifest:    %d:%s\n") %
@@ -945,7 +1165,7 @@
                       label='log.date')
 
         if self.ui.debugflag:
-            files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
+            files = ctx.p1().status(ctx)[:3]
             for key, value in zip([# i18n: column positioning for "hg log"
                                    _("files:"),
                                    # i18n: column positioning for "hg log"
@@ -1008,19 +1228,20 @@
                                match=matchfn, stat=False)
             self.ui.write("\n")
 
-    def _meaningful_parentrevs(self, log, rev):
+    def _meaningful_parentrevs(self, ctx):
         """Return list of meaningful (or all if debug) parentrevs for rev.
 
         For merges (two non-nullrev revisions) both parents are meaningful.
         Otherwise the first parent revision is considered meaningful if it
         is not the preceding revision.
         """
-        parents = log.parentrevs(rev)
-        if not self.ui.debugflag and parents[1] == nullrev:
-            if parents[0] >= rev - 1:
-                parents = []
-            else:
-                parents = [parents[0]]
+        parents = ctx.parents()
+        if len(parents) > 1:
+            return parents
+        if self.ui.debugflag:
+            return [parents[0], self.repo['null']]
+        if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
+            return []
         return parents
 
 class jsonchangeset(changeset_printer):
@@ -1039,8 +1260,12 @@
 
     def _show(self, ctx, copies, matchfn, props):
         '''show a single changeset or file revision'''
-        hexnode = hex(ctx.node())
         rev = ctx.rev()
+        if rev is None:
+            jrev = jnode = 'null'
+        else:
+            jrev = str(rev)
+            jnode = '"%s"' % hex(ctx.node())
         j = encoding.jsonescape
 
         if self._first:
@@ -1050,13 +1275,13 @@
             self.ui.write(",\n {")
 
         if self.ui.quiet:
-            self.ui.write('\n  "rev": %d' % rev)
-            self.ui.write(',\n  "node": "%s"' % hexnode)
+            self.ui.write('\n  "rev": %s' % jrev)
+            self.ui.write(',\n  "node": %s' % jnode)
             self.ui.write('\n }')
             return
 
-        self.ui.write('\n  "rev": %d' % rev)
-        self.ui.write(',\n  "node": "%s"' % hexnode)
+        self.ui.write('\n  "rev": %s' % jrev)
+        self.ui.write(',\n  "node": %s' % jnode)
         self.ui.write(',\n  "branch": "%s"' % j(ctx.branch()))
         self.ui.write(',\n  "phase": "%s"' % ctx.phasestr())
         self.ui.write(',\n  "user": "%s"' % j(ctx.user()))
@@ -1071,7 +1296,11 @@
                       ", ".join('"%s"' % c.hex() for c in ctx.parents()))
 
         if self.ui.debugflag:
-            self.ui.write(',\n  "manifest": "%s"' % hex(ctx.manifestnode()))
+            if rev is None:
+                jmanifestnode = 'null'
+            else:
+                jmanifestnode = '"%s"' % hex(ctx.manifestnode())
+            self.ui.write(',\n  "manifest": %s' % jmanifestnode)
 
             self.ui.write(',\n  "extra": {%s}' %
                           ", ".join('"%s": "%s"' % (j(k), j(v))
@@ -1134,18 +1363,6 @@
 
         self.cache = {}
 
-    def _meaningful_parentrevs(self, ctx):
-        """Return list of meaningful (or all if debug) parentrevs for rev.
-        """
-        parents = ctx.parents()
-        if len(parents) > 1:
-            return parents
-        if self.ui.debugflag:
-            return [parents[0], self.repo['null']]
-        if parents[0].rev() >= ctx.rev() - 1:
-            return []
-        return parents
-
     def _show(self, ctx, copies, matchfn, props):
         '''show a single changeset or file revision'''
 
@@ -1429,7 +1646,6 @@
         else:
             last = filelog.rev(node)
 
-
         # keep track of all ancestors of the file
         ancestors = set([filelog.linkrev(last)])
 
@@ -1457,6 +1673,44 @@
 
     return wanted
 
+class _followfilter(object):
+    def __init__(self, repo, onlyfirst=False):
+        self.repo = repo
+        self.startrev = nullrev
+        self.roots = set()
+        self.onlyfirst = onlyfirst
+
+    def match(self, rev):
+        def realparents(rev):
+            if self.onlyfirst:
+                return self.repo.changelog.parentrevs(rev)[0:1]
+            else:
+                return filter(lambda x: x != nullrev,
+                              self.repo.changelog.parentrevs(rev))
+
+        if self.startrev == nullrev:
+            self.startrev = rev
+            return True
+
+        if rev > self.startrev:
+            # forward: all descendants
+            if not self.roots:
+                self.roots.add(self.startrev)
+            for parent in realparents(rev):
+                if parent in self.roots:
+                    self.roots.add(rev)
+                    return True
+        else:
+            # backwards: all parents
+            if not self.roots:
+                self.roots.update(realparents(self.startrev))
+            if rev in self.roots:
+                self.roots.remove(rev)
+                self.roots.update(realparents(rev))
+                return True
+
+        return False
+
 def walkchangerevs(repo, match, opts, prepare):
     '''Iterate over files and the revs in which they changed.
 
@@ -1473,14 +1727,7 @@
     function on each context in the window in forward order.'''
 
     follow = opts.get('follow') or opts.get('follow_first')
-
-    if opts.get('rev'):
-        revs = scmutil.revrange(repo, opts.get('rev'))
-    elif follow:
-        revs = repo.revs('reverse(:.)')
-    else:
-        revs = revset.spanset(repo)
-        revs.reverse()
+    revs = _logrevs(repo, opts)
     if not revs:
         return []
     wanted = set()
@@ -1493,7 +1740,7 @@
     # wanted: a cache of filenames that were changed (ctx.files()) and that
     # match the file filtering conditions.
 
-    if not slowpath and not match.files():
+    if match.always():
         # No files, no patterns.  Display all revs.
         wanted = revs
 
@@ -1552,48 +1799,11 @@
 
         wanted = lazywantedset()
 
-    class followfilter(object):
-        def __init__(self, onlyfirst=False):
-            self.startrev = nullrev
-            self.roots = set()
-            self.onlyfirst = onlyfirst
-
-        def match(self, rev):
-            def realparents(rev):
-                if self.onlyfirst:
-                    return repo.changelog.parentrevs(rev)[0:1]
-                else:
-                    return filter(lambda x: x != nullrev,
-                                  repo.changelog.parentrevs(rev))
-
-            if self.startrev == nullrev:
-                self.startrev = rev
-                return True
-
-            if rev > self.startrev:
-                # forward: all descendants
-                if not self.roots:
-                    self.roots.add(self.startrev)
-                for parent in realparents(rev):
-                    if parent in self.roots:
-                        self.roots.add(rev)
-                        return True
-            else:
-                # backwards: all parents
-                if not self.roots:
-                    self.roots.update(realparents(self.startrev))
-                if rev in self.roots:
-                    self.roots.remove(rev)
-                    self.roots.update(realparents(rev))
-                    return True
-
-            return False
-
     # it might be worthwhile to do this in the iterator if the rev range
     # is descending and the prune args are all within that range
     for rev in opts.get('prune', ()):
         rev = repo[rev].rev()
-        ff = followfilter()
+        ff = _followfilter(repo)
         stop = min(revs[0], revs[-1])
         for x in xrange(rev, stop - 1, -1):
             if ff.match(x):
@@ -1603,7 +1813,7 @@
     # revision range, yielding only revisions in wanted.
     def iterate():
         if follow and not match.files():
-            ff = followfilter(onlyfirst=opts.get('follow_first'))
+            ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
             def want(rev):
                 return ff.match(rev) and rev in wanted
         else:
@@ -1699,7 +1909,10 @@
     opts = dict(opts)
     # follow or not follow?
     follow = opts.get('follow') or opts.get('follow_first')
-    followfirst = opts.get('follow_first') and 1 or 0
+    if opts.get('follow_first'):
+        followfirst = 1
+    else:
+        followfirst = 0
     # --follow with FILE behaviour depends on revs...
     it = iter(revs)
     startrev = it.next()
@@ -1716,12 +1929,12 @@
     # _matchfiles() revset but walkchangerevs() builds its matcher with
     # scmutil.match(). The difference is input pats are globbed on
     # platforms without shell expansion (windows).
-    pctx = repo[None]
-    match, pats = scmutil.matchandpats(pctx, pats, opts)
+    wctx = repo[None]
+    match, pats = scmutil.matchandpats(wctx, pats, opts)
     slowpath = match.anypats() or (match.files() and opts.get('removed'))
     if not slowpath:
         for f in match.files():
-            if follow and f not in pctx:
+            if follow and f not in wctx:
                 # If the file exists, it may be a directory, so let it
                 # take the slow path.
                 if os.path.exists(repo.wjoin(f)):
@@ -1822,6 +2035,21 @@
         expr = None
     return expr, filematcher
 
+def _logrevs(repo, opts):
+    # Default --rev value depends on --follow but --follow behaviour
+    # depends on revisions resolved from --rev...
+    follow = opts.get('follow') or opts.get('follow_first')
+    if opts.get('rev'):
+        revs = scmutil.revrange(repo, opts['rev'])
+    elif follow and repo.dirstate.p1() == nullid:
+        revs = revset.baseset()
+    elif follow:
+        revs = repo.revs('reverse(:.)')
+    else:
+        revs = revset.spanset(repo)
+        revs.reverse()
+    return revs
+
 def getgraphlogrevs(repo, pats, opts):
     """Return (revs, expr, filematcher) where revs is an iterable of
     revision numbers, expr is a revset string built from log options
@@ -1830,28 +2058,14 @@
     callable taking a revision number and returning a match objects
     filtering the files to be detailed when displaying the revision.
     """
-    if not len(repo):
-        return [], None, None
     limit = loglimit(opts)
-    # Default --rev value depends on --follow but --follow behaviour
-    # depends on revisions resolved from --rev...
-    follow = opts.get('follow') or opts.get('follow_first')
-    possiblyunsorted = False # whether revs might need sorting
-    if opts.get('rev'):
-        revs = scmutil.revrange(repo, opts['rev'])
-        # Don't sort here because _makelogrevset might depend on the
-        # order of revs
-        possiblyunsorted = True
-    else:
-        if follow and len(repo) > 0:
-            revs = repo.revs('reverse(:.)')
-        else:
-            revs = revset.spanset(repo)
-            revs.reverse()
+    revs = _logrevs(repo, opts)
     if not revs:
         return revset.baseset(), None, None
     expr, filematcher = _makelogrevset(repo, pats, opts, revs)
-    if possiblyunsorted:
+    if opts.get('rev'):
+        # User-specified revs might be unsorted, but don't sort before
+        # _makelogrevset because it might depend on the order of revs
         revs.sort(reverse=True)
     if expr:
         # Revset matchers often operate faster on revisions in changelog
@@ -1882,16 +2096,7 @@
     filtering the files to be detailed when displaying the revision.
     """
     limit = loglimit(opts)
-    # Default --rev value depends on --follow but --follow behaviour
-    # depends on revisions resolved from --rev...
-    follow = opts.get('follow') or opts.get('follow_first')
-    if opts.get('rev'):
-        revs = scmutil.revrange(repo, opts['rev'])
-    elif follow:
-        revs = repo.revs('reverse(:.)')
-    else:
-        revs = revset.spanset(repo)
-        revs.reverse()
+    revs = _logrevs(repo, opts)
     if not revs:
         return revset.baseset([]), None, None
     expr, filematcher = _makelogrevset(repo, pats, opts, revs)
@@ -1930,6 +2135,8 @@
             char = '@'
         elif ctx.obsolete():
             char = 'x'
+        elif ctx.closesbranch():
+            char = '_'
         copies = None
         if getrenamed and ctx.rev():
             copies = []
@@ -2064,6 +2271,35 @@
     forgot.extend(f for f in forget if f not in rejected)
     return bad, forgot
 
+def files(ui, ctx, m, fm, fmt, subrepos):
+    rev = ctx.rev()
+    ret = 1
+    ds = ctx.repo().dirstate
+
+    for f in ctx.matches(m):
+        if rev is None and ds[f] == 'r':
+            continue
+        fm.startitem()
+        if ui.verbose:
+            fc = ctx[f]
+            fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
+        fm.data(abspath=f)
+        fm.write('path', fmt, m.rel(f))
+        ret = 0
+
+    if subrepos:
+        for subpath in sorted(ctx.substate):
+            sub = ctx.sub(subpath)
+            try:
+                submatch = matchmod.narrowmatcher(subpath, m)
+                if sub.printfiles(ui, submatch, fm, fmt) == 0:
+                    ret = 0
+            except error.LookupError:
+                ui.status(_("skipping missing subrepository: %s\n")
+                               % m.abs(subpath))
+
+    return ret
+
 def remove(ui, repo, m, prefix, after, force, subrepos):
     join = lambda f: os.path.join(prefix, f)
     ret = 0
@@ -2092,6 +2328,7 @@
                                % join(subpath))
 
     # warn about failure to delete explicit files/dirs
+    deleteddirs = util.dirs(deleted)
     for f in m.files():
         def insubrepo():
             for subpath in wctx.substate:
@@ -2099,7 +2336,8 @@
                     return True
             return False
 
-        if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
+        isdir = f in deleteddirs or f in wctx.dirs()
+        if f in repo.dirstate or isdir or f == '.' or insubrepo():
             continue
 
         if repo.wvfs.exists(f):
@@ -2164,8 +2402,8 @@
     if len(matcher.files()) == 1 and not matcher.anypats():
         file = matcher.files()[0]
         mf = repo.manifest
-        mfnode = ctx._changeset[0]
-        if mf.find(mfnode, file)[0]:
+        mfnode = ctx.manifestnode()
+        if mfnode and mf.find(mfnode, file)[0]:
             write(file)
             return 0
 
@@ -2220,7 +2458,7 @@
 def amend(ui, repo, commitfunc, old, extra, pats, opts):
     # amend will reuse the existing user if not specified, but the obsolete
     # marker creation requires that the current user's name is specified.
-    if obsolete._enabled:
+    if obsolete.isenabled(repo, obsolete.createmarkersopt):
         ui.username() # raise exception if username not set
 
     ui.note(_('amending changeset %s\n') % old)
@@ -2555,7 +2793,9 @@
 
     # need all matching names in dirstate and manifest of target rev,
     # so have to walk both. do not print errors if files exist in one
-    # but not other.
+    # but not other. in both cases, filesets should be evaluated against
+    # workingctx to get consistent result (issue4497). this means 'set:**'
+    # cannot be used to select missing files from target rev.
 
     # `names` is a mapping for all elements in working copy and target revision
     # The mapping is in the form:
@@ -2567,8 +2807,14 @@
         ## filling of the `names` mapping
         # walk dirstate to fill `names`
 
-        m = scmutil.match(repo[None], pats, opts)
-        if not m.always() or node != parent:
+        interactive = opts.get('interactive', False)
+        wctx = repo[None]
+        m = scmutil.match(wctx, pats, opts)
+
+        # we'll need this later
+        targetsubs = sorted(s for s in wctx.substate if m(s))
+
+        if not m.always():
             m.bad = lambda x, y: False
             for abs in repo.walk(m):
                 names[abs] = m.rel(abs), m.exact(abs)
@@ -2586,7 +2832,6 @@
                         return
                 ui.warn("%s: %s\n" % (m.rel(path), msg))
 
-            m = scmutil.match(ctx, pats, opts)
             m.bad = badfn
             for abs in ctx.walk(m):
                 if abs not in names:
@@ -2598,7 +2843,7 @@
             changes = repo.status(node1=node, match=m,
                                   unknown=True, ignored=True, clean=True)
         else:
-            changes = repo.status(match=m)
+            changes = repo.status(node1=node, match=m)
             for kind in changes:
                 for abs in kind:
                     names[abs] = m.rel(abs), m.exact(abs)
@@ -2621,9 +2866,8 @@
         deladded = _deleted - smf
         deleted = _deleted - deladded
 
-        # We need to account for the state of file in the dirstate.
-        #
-        # Even, when we revert against something else than parent. This will
+        # We need to account for the state of the file in the dirstate,
+        # even when we revert against something else than parent. This will
         # slightly alter the behavior of revert (doing back up or not, delete
         # or just forget etc).
         if parent == node:
@@ -2772,7 +3016,6 @@
             (unknown,       actions['unknown'],  discard),
             )
 
-        wctx = repo[None]
         for abs, (rel, exact) in sorted(names.items()):
             # target file to be touch on disk (relative to cwd)
             target = repo.wjoin(abs)
@@ -2790,7 +3033,10 @@
                             ui.note(_('saving current version of %s as %s\n') %
                                     (rel, bakname))
                             if not opts.get('dry_run'):
-                                util.rename(target, bakname)
+                                if interactive:
+                                    util.copyfile(target, bakname)
+                                else:
+                                    util.rename(target, bakname)
                     if ui.verbose or not exact:
                         if not isinstance(msg, basestring):
                             msg = msg(abs)
@@ -2799,21 +3045,19 @@
                     ui.warn(msg % rel)
                 break
 
-
         if not opts.get('dry_run'):
             needdata = ('revert', 'add', 'undelete')
             _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
-
-            _performrevert(repo, parents, ctx, actions)
-
-            # get the list of subrepos that must be reverted
-            subrepomatch = scmutil.match(ctx, pats, opts)
-            targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
-
-            if targetsubs:
-                # Revert the subrepos on the revert list
-                for sub in targetsubs:
-                    ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
+            _performrevert(repo, parents, ctx, actions, interactive)
+
+        if targetsubs:
+            # Revert the subrepos on the revert list
+            for sub in targetsubs:
+                try:
+                    wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
+                except KeyError:
+                    raise util.Abort("subrepository '%s' does not exist in %s!"
+                                      % (sub, short(ctx.node())))
     finally:
         wlock.release()
 
@@ -2821,7 +3065,7 @@
     """Let extension changing the storage layer prefetch content"""
     pass
 
-def _performrevert(repo, parents, ctx, actions):
+def _performrevert(repo, parents, ctx, actions, interactive=False):
     """function that actually perform all the actions computed for revert
 
     This is an independent function to let extension to plug in and react to
@@ -2855,10 +3099,35 @@
             normal = repo.dirstate.normallookup
         else:
             normal = repo.dirstate.normal
-    for f in actions['revert'][0]:
-        checkout(f)
-        if normal:
-            normal(f)
+
+    if interactive:
+        # Prompt the user for changes to revert
+        torevert = [repo.wjoin(f) for f in actions['revert'][0]]
+        m = scmutil.match(ctx, torevert, {})
+        diff = patch.diff(repo, None, ctx.node(), m)
+        originalchunks = patch.parsepatch(diff)
+        try:
+            chunks = recordfilter(repo.ui, originalchunks)
+        except patch.PatchError, err:
+            raise util.Abort(_('error parsing patch: %s') % err)
+
+        # Apply changes
+        fp = cStringIO.StringIO()
+        for c in chunks:
+            c.write(fp)
+        dopatch = fp.tell()
+        fp.seek(0)
+        if dopatch:
+            try:
+                patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
+            except patch.PatchError, err:
+                raise util.Abort(str(err))
+        del fp
+    else:
+        for f in actions['revert'][0]:
+            checkout(f)
+            if normal:
+                normal(f)
 
     for f in actions['add'][0]:
         checkout(f)
--- a/mercurial/commands.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/commands.py	Thu Apr 16 20:57:51 2015 -0500
@@ -148,6 +148,7 @@
     ('U', 'unified', '',
      _('number of lines of context to show'), _('NUM')),
     ('', 'stat', None, _('output diffstat-style summary of changes')),
+    ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
 ]
 
 mergetoolopts = [
@@ -276,13 +277,44 @@
         # to mimic the behavior of Mercurial before version 1.5
         opts['file'] = True
 
+    ctx = scmutil.revsingle(repo, opts.get('rev'))
+
     fm = ui.formatter('annotate', opts)
-    datefunc = ui.quiet and util.shortdate or util.datestr
-    hexfn = fm.hexfunc
+    if ui.quiet:
+        datefunc = util.shortdate
+    else:
+        datefunc = util.datestr
+    if ctx.rev() is None:
+        def hexfn(node):
+            if node is None:
+                return None
+            else:
+                return fm.hexfunc(node)
+        if opts.get('changeset'):
+            # omit "+" suffix which is appended to node hex
+            def formatrev(rev):
+                if rev is None:
+                    return '%d' % ctx.p1().rev()
+                else:
+                    return '%d' % rev
+        else:
+            def formatrev(rev):
+                if rev is None:
+                    return '%d+' % ctx.p1().rev()
+                else:
+                    return '%d ' % rev
+        def formathex(hex):
+            if hex is None:
+                return '%s+' % fm.hexfunc(ctx.p1().node())
+            else:
+                return '%s ' % hex
+    else:
+        hexfn = fm.hexfunc
+        formatrev = formathex = str
 
     opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
-             ('number', ' ', lambda x: x[0].rev(), str),
-             ('changeset', ' ', lambda x: hexfn(x[0].node()), str),
+             ('number', ' ', lambda x: x[0].rev(), formatrev),
+             ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
              ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
              ('file', ' ', lambda x: x[0].path(), str),
              ('line_number', ':', lambda x: x[1], str),
@@ -312,7 +344,6 @@
     def bad(x, y):
         raise util.Abort("%s: %s" % (x, y))
 
-    ctx = scmutil.revsingle(repo, opts.get('rev'))
     m = scmutil.match(ctx, pats, opts)
     m.bad = bad
     follow = not opts.get('no_follow')
@@ -664,7 +695,10 @@
         # one of the parent was not checked.
         parents = repo[nodes[0]].parents()
         if len(parents) > 1:
-            side = good and state['bad'] or state['good']
+            if good:
+                side = state['bad']
+            else:
+                side = state['good']
             num = len(set(i.node() for i in parents) & set(side))
             if num == 1:
                 return parents[0].ancestor(parents[1])
@@ -1184,7 +1218,7 @@
     btypes = {'none': 'HG10UN',
               'bzip2': 'HG10BZ',
               'gzip': 'HG10GZ',
-              'bundle2': 'HG2Y'}
+              'bundle2': 'HG20'}
     bundletype = btypes.get(bundletype)
     if bundletype not in changegroup.bundletypes:
         raise util.Abort(_('unknown bundle type specified with --type'))
@@ -1257,8 +1291,8 @@
     return cmdutil.cat(ui, repo, ctx, m, '', **opts)
 
 @command('^clone',
-    [('U', 'noupdate', None,
-     _('the clone will include an empty working copy (only a repository)')),
+    [('U', 'noupdate', None, _('the clone will include an empty working '
+                               'directory (only a repository)')),
     ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
     ('r', 'rev', [], _('include the specified changeset'), _('REV')),
     ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
@@ -1380,9 +1414,10 @@
      _('mark new/missing files as added/removed before committing')),
     ('', 'close-branch', None,
      _('mark a branch as closed, hiding it from the branch list')),
-    ('', 'amend', None, _('amend the parent of the working dir')),
+    ('', 'amend', None, _('amend the parent of the working directory')),
     ('s', 'secret', None, _('use the secret phase for committing')),
     ('e', 'edit', None, _('invoke editor on commit messages')),
+    ('i', 'interactive', None, _('use interactive mode')),
     ] + walkopts + commitopts + commitopts2 + subrepoopts,
     _('[OPTION]... [FILE]...'),
     inferrepo=True)
@@ -1422,6 +1457,12 @@
 
     Returns 0 on success, 1 if nothing changed.
     """
+    if opts.get('interactive'):
+        opts.pop('interactive')
+        cmdutil.dorecord(ui, repo, commit, 'commit', False,
+                        cmdutil.recordfilter, *pats, **opts)
+        return
+
     if opts.get('subrepos'):
         if opts.get('amend'):
             raise util.Abort(_('cannot amend with --subrepos'))
@@ -1874,7 +1915,7 @@
     ui.write(('Stream params: %s\n' % repr(gen.params)))
     for part in gen.iterparts():
         ui.write('%s -- %r\n' % (part.type, repr(part.params)))
-        if part.type == 'b2x:changegroup':
+        if part.type == 'changegroup':
             version = part.params.get('version', '01')
             cg = changegroup.packermap[version][1](part, 'UN')
             chunkdata = cg.changelogheader()
@@ -1946,7 +1987,7 @@
         ui.write("%s\n" % "\n".join(options))
         return
 
-    cmdlist = cmdutil.findpossible(cmd, table)
+    cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
     if ui.verbose:
         cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
     ui.write("%s\n" % "\n".join(sorted(cmdlist)))
@@ -2167,7 +2208,7 @@
     btypes = {'none': 'HG10UN',
               'bzip2': 'HG10BZ',
               'gzip': 'HG10GZ',
-              'bundle2': 'HG2Y'}
+              'bundle2': 'HG20'}
     bundletype = btypes.get(bundletype)
     if bundletype not in changegroup.bundletypes:
         raise util.Abort(_('unknown bundle type specified with --type'))
@@ -2799,6 +2840,7 @@
         deltasize[2] /= numrevs - numfull
     totalsize = fulltotal + deltatotal
     avgchainlen = sum(chainlengths) / numrevs
+    maxchainlen = max(chainlengths)
     compratio = totalrawsize / totalsize
 
     basedfmtstr = '%%%dd\n'
@@ -2831,6 +2873,7 @@
     ui.write('\n')
     fmt = dfmtstr(max(avgchainlen, compratio))
     ui.write(('avg chain length  : ') + fmt % avgchainlen)
+    ui.write(('max chain length  : ') + fmt % maxchainlen)
     ui.write(('compression ratio : ') + fmt % compratio)
 
     if format > 0:
@@ -2885,7 +2928,10 @@
             weight, optimizedtree = revset.optimize(newtree, True)
             ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
     func = revset.match(ui, expr)
-    for c in func(repo, revset.spanset(repo)):
+    revs = func(repo)
+    if ui.verbose:
+        ui.note("* set:\n", revset.prettyformatset(revs), "\n")
+    for c in revs:
         ui.write("%s\n" % c)
 
 @command('debugsetparents', [], _('REV1 [REV2]'))
@@ -2893,7 +2939,9 @@
     """manually set the parents of the current working directory
 
     This is useful for writing repository conversion tools, but should
-    be used with care.
+    be used with care. For example, neither the working directory nor the
+    dirstate is updated, so file status may be incorrect after running this
+    command.
 
     Returns 0 on success.
     """
@@ -3124,7 +3172,8 @@
     diffopts = patch.diffallopts(ui, opts)
     m = scmutil.match(repo[node2], pats, opts)
     cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
-                           listsubrepos=opts.get('subrepos'))
+                           listsubrepos=opts.get('subrepos'),
+                           root=opts.get('root'))
 
 @command('^export',
     [('o', 'output', '',
@@ -3210,7 +3259,7 @@
 @command('files',
     [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
      ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
-    ] + walkopts + formatteropts,
+    ] + walkopts + formatteropts + subrepoopts,
     _('[OPTION]... [PATTERN]...'))
 def files(ui, repo, *pats, **opts):
     """list tracked files
@@ -3220,7 +3269,7 @@
     removed files).
 
     If no patterns are given to match, this command prints the names
-    of all files under Mercurial control in the working copy.
+    of all files under Mercurial control in the working directory.
 
     .. container:: verbose
 
@@ -3257,8 +3306,6 @@
 
     """
     ctx = scmutil.revsingle(repo, opts.get('rev'), None)
-    rev = ctx.rev()
-    ret = 1
 
     end = '\n'
     if opts.get('print0'):
@@ -3267,17 +3314,7 @@
     fmt = '%s' + end
 
     m = scmutil.match(ctx, pats, opts)
-    ds = repo.dirstate
-    for f in ctx.matches(m):
-        if rev is None and ds[f] == 'r':
-            continue
-        fm.startitem()
-        if ui.verbose:
-            fc = ctx[f]
-            fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
-        fm.data(abspath=f)
-        fm.write('path', fmt, m.rel(f))
-        ret = 0
+    ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
 
     fm.end()
 
@@ -3507,9 +3544,12 @@
                 continue
 
             source = ctx.extra().get('source')
-            if not source:
-                source = ctx.hex()
-            extra = {'source': source}
+            extra = {}
+            if source:
+                extra['source'] = source
+                extra['intermediate-source'] = ctx.hex()
+            else:
+                extra['source'] = ctx.hex()
             user = ctx.user()
             if opts.get('user'):
                 user = opts['user']
@@ -3675,7 +3715,10 @@
 
     def display(fn, ctx, pstates, states):
         rev = ctx.rev()
-        datefunc = ui.quiet and util.shortdate or util.datestr
+        if ui.quiet:
+            datefunc = util.shortdate
+        else:
+            datefunc = util.datestr
         found = False
         @util.cachefunc
         def binary():
@@ -3915,7 +3958,7 @@
     optionalrepo=True)
 def identify(ui, repo, source=None, rev=None,
              num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
-    """identify the working copy or specified revision
+    """identify the working directory or specified revision
 
     Print a summary identifying the repository state at REV using one or
     two parent hash identifiers, followed by a "+" if the working
@@ -3951,7 +3994,10 @@
         raise util.Abort(_("there is no Mercurial repository here "
                            "(.hg not found)"))
 
-    hexfunc = ui.debugflag and hex or short
+    if ui.debugflag:
+        hexfunc = hex
+    else:
+        hexfunc = short
     default = not (num or id or branch or tags or bookmarks)
     output = []
     revs = []
@@ -4056,6 +4102,8 @@
      _('commit even if some hunks fail')),
     ('', 'exact', None,
      _('apply patch to the nodes from which it was generated')),
+    ('', 'prefix', '',
+     _('apply patch to subdirectory'), _('DIR')),
     ('', 'import-branch', None,
      _('use any branch information in patch (implied by --exact)'))] +
     commitopts + commitopts2 + similarityopts,
@@ -4155,6 +4203,8 @@
         raise util.Abort(_('cannot use --similarity with --bypass'))
     if opts.get('exact') and opts.get('edit'):
         raise util.Abort(_('cannot use --exact with --edit'))
+    if opts.get('exact') and opts.get('prefix'):
+        raise util.Abort(_('cannot use --exact with --prefix'))
 
     if update:
         cmdutil.checkunfinished(repo)
@@ -4243,13 +4293,36 @@
     pull location. These are the changesets that would have been pulled
     if a pull at the time you issued this command.
 
-    For remote repository, using --bundle avoids downloading the
-    changesets twice if the incoming is followed by a pull.
-
     See pull for valid source format details.
 
     .. container:: verbose
 
+      With -B/--bookmarks, the result of bookmark comparison between
+      local and remote repositories is displayed. With -v/--verbose,
+      status is also displayed for each bookmark like below::
+
+        BM1               01234567890a added
+        BM2               1234567890ab advanced
+        BM3               234567890abc diverged
+        BM4               34567890abcd changed
+
+      The action taken locally when pulling depends on the
+      status of each bookmark:
+
+      :``added``: pull will create it
+      :``advanced``: pull will update it
+      :``diverged``: pull will create a divergent bookmark
+      :``changed``: result depends on remote changesets
+
+      From the point of view of pulling behavior, bookmark
+      existing only in the remote repository are treated as ``added``,
+      even if it is in fact locally deleted.
+
+    .. container:: verbose
+
+      For remote repository, using --bundle avoids downloading the
+      changesets twice if the incoming is followed by a pull.
+
       Examples:
 
       - show incoming changes with patches and full description::
@@ -4289,7 +4362,7 @@
             ui.warn(_("remote doesn't support bookmarks\n"))
             return 0
         ui.status(_('comparing with %s\n') % util.hidepassword(source))
-        return bookmarks.diff(ui, repo, other)
+        return bookmarks.incoming(ui, repo, other)
 
     repo._subtoppath = ui.expandpath(source)
     try:
@@ -4343,7 +4416,10 @@
 
     Returns 0 if a match is found, 1 otherwise.
     """
-    end = opts.get('print0') and '\0' or '\n'
+    if opts.get('print0'):
+        end = '\0'
+    else:
+        end = '\n'
     rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
 
     ret = 1
@@ -4477,6 +4553,10 @@
     Returns 0 on success.
 
     """
+    if opts.get('follow') and opts.get('rev'):
+        opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
+        del opts['follow']
+
     if opts.get('graph'):
         return cmdutil.graphlog(ui, repo, *pats, **opts)
 
@@ -4503,7 +4583,10 @@
                 rename = getrenamed(fn, rev)
                 if rename:
                     copies.append((fn, rename[0]))
-        revmatchfn = filematcher and filematcher(ctx.rev()) or None
+        if filematcher:
+            revmatchfn = filematcher(ctx.rev())
+        else:
+            revmatchfn = None
         displayer.show(ctx, copies=copies, matchfn=revmatchfn)
         if displayer.flush(rev):
             count += 1
@@ -4709,6 +4792,31 @@
 
     See pull for details of valid destination formats.
 
+    .. container:: verbose
+
+      With -B/--bookmarks, the result of bookmark comparison between
+      local and remote repositories is displayed. With -v/--verbose,
+      status is also displayed for each bookmark like below::
+
+        BM1               01234567890a added
+        BM2                            deleted
+        BM3               234567890abc advanced
+        BM4               34567890abcd diverged
+        BM5               4567890abcde changed
+
+      The action taken when pushing depends on the
+      status of each bookmark:
+
+      :``added``: push with ``-B`` will create it
+      :``deleted``: push with ``-B`` will delete it
+      :``advanced``: push will update it
+      :``diverged``: push with ``-B`` will update it
+      :``changed``: push with ``-B`` will update it
+
+      From the point of view of pushing behavior, bookmarks
+      existing only in the remote repository are treated as
+      ``deleted``, even if it is in fact added remotely.
+
     Returns 0 if there are outgoing changes, 1 otherwise.
     """
     if opts.get('graph'):
@@ -4734,7 +4842,7 @@
             ui.warn(_("remote doesn't support bookmarks\n"))
             return 0
         ui.status(_('comparing with %s\n') % util.hidepassword(dest))
-        return bookmarks.diff(ui, other, repo)
+        return bookmarks.outgoing(ui, repo, other)
 
     repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
     try:
@@ -4821,19 +4929,20 @@
     Returns 0 on success.
     """
     if search:
-        for name, path in ui.configitems("paths"):
+        for name, path in sorted(ui.paths.iteritems()):
             if name == search:
-                ui.status("%s\n" % util.hidepassword(path))
+                ui.status("%s\n" % util.hidepassword(path.loc))
                 return
         if not ui.quiet:
             ui.warn(_("not found!\n"))
         return 1
     else:
-        for name, path in ui.configitems("paths"):
+        for name, path in sorted(ui.paths.iteritems()):
             if ui.quiet:
                 ui.write("%s\n" % name)
             else:
-                ui.write("%s = %s\n" % (name, util.hidepassword(path)))
+                ui.write("%s = %s\n" % (name,
+                                        util.hidepassword(path.loc)))
 
 @command('phase',
     [('p', 'public', False, _('set changeset phase to public')),
@@ -4984,9 +5093,9 @@
     Returns 0 on success, 1 if an update had unresolved files.
     """
     source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
+    ui.status(_('pulling from %s\n') % util.hidepassword(source))
     other = hg.peer(repo, opts, source)
     try:
-        ui.status(_('pulling from %s\n') % util.hidepassword(source))
         revs, checkout = hg.addbranchrevs(repo, other, branches,
                                           opts.get('rev'))
 
@@ -5098,6 +5207,9 @@
 
     if revs:
         revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
+        if not revs:
+            raise util.Abort(_("specified revisions evaluate to an empty set"),
+                             hint=_("use different revision arguments"))
 
     repo._subtoppath = dest
     try:
@@ -5225,7 +5337,7 @@
     ('m', 'mark', None, _('mark files as resolved')),
     ('u', 'unmark', None, _('mark files as unresolved')),
     ('n', 'no-status', None, _('hide status prefix'))]
-    + mergetoolopts + walkopts,
+    + mergetoolopts + walkopts + formatteropts,
     _('[OPTION]... [FILE]...'),
     inferrepo=True)
 def resolve(ui, repo, *pats, **opts):
@@ -5277,11 +5389,25 @@
         raise util.Abort(_('no files or directories specified'),
                          hint=('use --all to remerge all files'))
 
+    if show:
+        fm = ui.formatter('resolve', opts)
+        ms = mergemod.mergestate(repo)
+        m = scmutil.match(repo[None], pats, opts)
+        for f in ms:
+            if not m(f):
+                continue
+            l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
+            fm.startitem()
+            fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
+            fm.write('path', '%s\n', f, label=l)
+        fm.end()
+        return 0
+
     wlock = repo.wlock()
     try:
         ms = mergemod.mergestate(repo)
 
-        if not (ms.active() or repo.dirstate.p2() != nullid) and not show:
+        if not (ms.active() or repo.dirstate.p2() != nullid):
             raise util.Abort(
                 _('resolve command not applicable when not merging'))
 
@@ -5295,14 +5421,7 @@
 
             didwork = True
 
-            if show:
-                if nostatus:
-                    ui.write("%s\n" % f)
-                else:
-                    ui.write("%s %s\n" % (ms[f].upper(), f),
-                             label='resolve.' +
-                             {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
-            elif mark:
+            if mark:
                 ms.mark(f, "r")
             elif unmark:
                 ms.mark(f, "u")
@@ -5334,10 +5453,8 @@
     finally:
         wlock.release()
 
-    # Nudge users into finishing an unfinished operation. We don't print
-    # this with the list/show operation because we want list/show to remain
-    # machine readable.
-    if not list(ms.unresolved()) and not show:
+    # Nudge users into finishing an unfinished operation
+    if not list(ms.unresolved()):
         ui.status(_('(no more unresolved files)\n'))
 
     return ret
@@ -5347,6 +5464,7 @@
     ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
     ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
     ('C', 'no-backup', None, _('do not save backup copies of files')),
+    ('i', 'interactive', None, _('interactively select the changes')),
     ] + walkopts + dryrunopts,
     _('[OPTION]... [-r REV] [NAME]...'))
 def revert(ui, repo, *pats, **opts):
@@ -5393,7 +5511,7 @@
 
     ctx = scmutil.revsingle(repo, opts.get('rev'))
 
-    if not pats and not opts.get('all'):
+    if not pats and not (opts.get('all') or opts.get('interactive')):
         msg = _("no files or directories specified")
         if p2 != nullid:
             hint = _("uncommitted merge, use --all to discard all changes,"
@@ -5541,7 +5659,10 @@
     if opts.get('port'):
         opts['port'] = util.getport(opts.get('port'))
 
-    baseui = repo and repo.baseui or ui
+    if repo:
+        baseui = repo.baseui
+    else:
+        baseui = ui
     optlist = ("name templates style address port prefix ipv6"
                " accesslog errorlog certificate encoding")
     for o in optlist.split():
@@ -5668,6 +5789,11 @@
 
           hg status --rev 9353
 
+      - show changes in the working directory relative to the
+        current directory (see :hg:`help patterns` for more information)::
+
+          hg status re:
+
       - show all changes including copies in an existing changeset::
 
           hg status --copies --change 9353
@@ -5691,22 +5817,33 @@
     else:
         node1, node2 = scmutil.revpair(repo, revs)
 
-    cwd = (pats and repo.getcwd()) or ''
-    end = opts.get('print0') and '\0' or '\n'
+    if pats:
+        cwd = repo.getcwd()
+    else:
+        cwd = ''
+
+    if opts.get('print0'):
+        end = '\0'
+    else:
+        end = '\n'
     copy = {}
     states = 'modified added removed deleted unknown ignored clean'.split()
     show = [k for k in states if opts.get(k)]
     if opts.get('all'):
         show += ui.quiet and (states[:4] + ['clean']) or states
     if not show:
-        show = ui.quiet and states[:4] or states[:5]
+        if ui.quiet:
+            show = states[:4]
+        else:
+            show = states[:5]
 
     stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
                        'ignored' in show, 'clean' in show, 'unknown' in show,
                        opts.get('subrepos'))
     changestates = zip(states, 'MAR!?IC', stat)
 
-    if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
+    if (opts.get('all') or opts.get('copies')
+        or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
         copy = copies.pathcopies(repo[node1], repo[node2])
 
     fm = ui.formatter('status', opts)
@@ -5939,14 +6076,11 @@
             t.append(_('%d outgoing') % len(o))
         other = dother or sother
         if 'bookmarks' in other.listkeys('namespaces'):
-            lmarks = repo.listkeys('bookmarks')
-            rmarks = other.listkeys('bookmarks')
-            diff = set(rmarks) - set(lmarks)
-            if len(diff) > 0:
-                t.append(_('%d incoming bookmarks') % len(diff))
-            diff = set(lmarks) - set(rmarks)
-            if len(diff) > 0:
-                t.append(_('%d outgoing bookmarks') % len(diff))
+            counts = bookmarks.summary(repo, other)
+            if counts[0] > 0:
+                t.append(_('%d incoming bookmarks') % counts[0])
+            if counts[1] > 0:
+                t.append(_('%d outgoing bookmarks') % counts[1])
 
         if t:
             # i18n: column positioning for "hg summary"
@@ -6020,7 +6154,11 @@
             rev_ = opts['rev']
         message = opts.get('message')
         if opts.get('remove'):
-            expectedtype = opts.get('local') and 'local' or 'global'
+            if opts.get('local'):
+                expectedtype = 'local'
+            else:
+                expectedtype = 'global'
+
             for n in names:
                 if not repo.tagtype(n):
                     raise util.Abort(_("tag '%s' does not exist") % n)
@@ -6250,9 +6388,7 @@
         rev = cmdutil.finddate(ui, repo, date)
 
     if check:
-        c = repo[None]
-        if c.dirty(merge=False, branch=False, missing=True):
-            raise util.Abort(_("uncommitted changes"))
+        cmdutil.bailifchanged(repo, merge=False)
         if rev is None:
             rev = repo[repo[None].branch()].rev()
 
@@ -6303,7 +6439,7 @@
              % util.version())
     ui.status(_(
         "(see http://mercurial.selenic.com for more information)\n"
-        "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
+        "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
         "This is free software; see the source for copying conditions. "
         "There is NO\nwarranty; "
         "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
--- a/mercurial/context.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/context.py	Thu Apr 16 20:57:51 2015 -0500
@@ -66,8 +66,7 @@
         return self.filectx(key)
 
     def __iter__(self):
-        for f in sorted(self._manifest):
-            yield f
+        return iter(self._manifest)
 
     def _manifestmatches(self, match, s):
         """generate a new manifest filtered by the match argument
@@ -153,6 +152,8 @@
         return hex(self.node())
     def manifest(self):
         return self._manifest
+    def repo(self):
+        return self._repo
     def phasestr(self):
         return phases.phasenames[self.phase()]
     def mutable(self):
@@ -265,12 +266,11 @@
         diffopts = patch.diffopts(self._repo.ui, opts)
         return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
 
-    @propertycache
-    def _dirs(self):
-        return scmutil.dirs(self._manifest)
+    def dirs(self):
+        return self._manifest.dirs()
 
-    def dirs(self):
-        return self._dirs
+    def hasdir(self, dir):
+        return self._manifest.hasdir(dir)
 
     def dirty(self, missing=False, merge=True, branch=True):
         return False
@@ -376,10 +376,6 @@
                 return
             if isinstance(changeid, long):
                 changeid = str(changeid)
-            if changeid == '.':
-                self._node = repo.dirstate.p1()
-                self._rev = repo.changelog.rev(self._node)
-                return
             if changeid == 'null':
                 self._node = nullid
                 self._rev = nullrev
@@ -388,6 +384,12 @@
                 self._node = repo.changelog.tip()
                 self._rev = repo.changelog.rev(self._node)
                 return
+            if changeid == '.' or changeid == repo.dirstate.p1():
+                # this is a hack to delay/avoid loading obsmarkers
+                # when we know that '.' won't be hidden
+                self._node = repo.dirstate.p1()
+                self._rev = repo.unfiltered().changelog.rev(self._node)
+                return
             if len(changeid) == 20:
                 try:
                     self._node = changeid
@@ -585,30 +587,15 @@
         return self._repo.changelog.descendant(self._rev, other._rev)
 
     def walk(self, match):
-        fset = set(match.files())
-        # for dirstate.walk, files=['.'] means "walk the whole tree".
-        # follow that here, too
-        fset.discard('.')
+        '''Generates matching file names.'''
 
-        # avoid the entire walk if we're only looking for specific files
-        if fset and not match.anypats():
-            if util.all([fn in self for fn in fset]):
-                for fn in sorted(fset):
-                    if match(fn):
-                        yield fn
-                raise StopIteration
+        # Override match.bad method to have message with nodeid
+        oldbad = match.bad
+        def bad(fn, msg):
+            oldbad(fn, _('no such file in rev %s') % self)
+        match.bad = bad
 
-        for fn in self:
-            if fn in fset:
-                # specified pattern is the exact name
-                fset.remove(fn)
-            if match(fn):
-                yield fn
-        for fn in sorted(fset):
-            if fn in self._dirs:
-                # specified pattern is a directory
-                continue
-            match.bad(fn, _('no such file in rev %s') % self)
+        return self._manifest.walk(match)
 
     def matches(self, match):
         return self.walk(match)
@@ -722,6 +709,8 @@
         return self._changectx.manifest()
     def changectx(self):
         return self._changectx
+    def repo(self):
+        return self._repo
 
     def path(self):
         return self._path
@@ -752,7 +741,7 @@
         return True
 
     def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
-        """return the first ancestor of <srcrev> introducting <fnode>
+        """return the first ancestor of <srcrev> introducing <fnode>
 
         If the linkrev of the file revision does not point to an ancestor of
         srcrev, we'll walk down the ancestors until we find one introducing
@@ -830,7 +819,7 @@
             # be replaced with the rename information. This parent is -always-
             # the first one.
             #
-            # As null id have alway been filtered out in the previous list
+            # As null id have always been filtered out in the previous list
             # comprehension, inserting to 0 will always result in "replacing
             # first nullid parent with rename information.
             pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
@@ -919,7 +908,7 @@
         introrev = self.introrev()
         if self.rev() != introrev:
             base = self.filectx(self.filenode(), changeid=introrev)
-        if getattr(base, '_ancestrycontext', None) is None:
+        if introrev and getattr(base, '_ancestrycontext', None) is None:
             ac = self._repo.changelog.ancestors([introrev], inclusive=True)
             base._ancestrycontext = ac
 
@@ -969,7 +958,11 @@
     def ancestors(self, followfirst=False):
         visit = {}
         c = self
-        cut = followfirst and 1 or None
+        if followfirst:
+            cut = 1
+        else:
+            cut = None
+
         while True:
             for parent in c.parents()[:cut]:
                 visit[(parent.linkrev(), parent.filenode())] = parent
@@ -1199,6 +1192,8 @@
     def subrev(self, subpath):
         return None
 
+    def manifestnode(self):
+        return None
     def user(self):
         return self._user or self._repo.ui.username()
     def date(self):
@@ -1265,6 +1260,7 @@
         return self._parents[0].ancestor(c2) # punt on two parents for now
 
     def walk(self, match):
+        '''Generates matching file names.'''
         return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
                                                True, False))
 
@@ -1296,9 +1292,6 @@
         self._repo.dirstate.setparents(node)
         self._repo.dirstate.endparentchange()
 
-    def dirs(self):
-        return self._repo.dirstate.dirs()
-
 class workingctx(committablectx):
     """A workingctx object makes access to data related to
     the current working directory convenient.
@@ -1434,6 +1427,18 @@
             finally:
                 wlock.release()
 
+    def match(self, pats=[], include=None, exclude=None, default='glob'):
+        r = self._repo
+
+        # Only a case insensitive filesystem needs magic to translate user input
+        # to actual case in the filesystem.
+        if not util.checkcase(r.root):
+            return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
+                                           exclude, default, r.auditor, self)
+        return matchmod.match(r.root, r.getcwd(), pats,
+                              include, exclude, default,
+                              auditor=r.auditor, ctx=self)
+
     def _filtersuspectsymlink(self, files):
         if not files or self._repo.dirstate._checklink:
             return files
@@ -1570,7 +1575,7 @@
             def bad(f, msg):
                 # 'f' may be a directory pattern from 'match.files()',
                 # so 'f not in ctx1' is not enough
-                if f not in other and f not in other.dirs():
+                if f not in other and not other.hasdir(f):
                     self._repo.ui.warn('%s: %s\n' %
                                        (self._repo.dirstate.pathto(f), msg))
             match.bad = bad
@@ -1593,6 +1598,10 @@
     def __nonzero__(self):
         return True
 
+    def linkrev(self):
+        # linked to self._changectx no matter if file is modified or not
+        return self.rev()
+
     def parents(self):
         '''return parent filectxs, following copies if necessary'''
         def filenode(ctx, path):
@@ -1764,7 +1773,11 @@
             # "filectxfn" for performance (e.g. converting from another VCS)
             self._filectxfn = util.cachefunc(filectxfn)
 
-        self._extra = extra and extra.copy() or {}
+        if extra:
+            self._extra = extra.copy()
+        else:
+            self._extra = {}
+
         if self._extra.get('branch', '') == '':
             self._extra['branch'] = 'default'
 
--- a/mercurial/copies.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/copies.py	Thu Apr 16 20:57:51 2015 -0500
@@ -8,10 +8,6 @@
 import util
 import heapq
 
-def _nonoverlap(d1, d2, d3):
-    "Return list of elements in d1 not in d2 or d3"
-    return sorted([d for d in d1 if d not in d3 and d not in d2])
-
 def _dirname(f):
     s = f.rfind("/")
     if s == -1:
@@ -144,7 +140,19 @@
             del c[k]
     return c
 
-def _forwardcopies(a, b):
+def _computeforwardmissing(a, b, match=None):
+    """Computes which files are in b but not a.
+    This is its own function so extensions can easily wrap this call to see what
+    files _forwardcopies is about to process.
+    """
+    ma = a.manifest()
+    mb = b.manifest()
+    if match:
+        ma = ma.matches(match)
+        mb = mb.matches(match)
+    return mb.filesnotin(ma)
+
+def _forwardcopies(a, b, match=None):
     '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
 
     # check for working copy
@@ -167,9 +175,7 @@
     # we currently don't try to find where old files went, too expensive
     # this means we can miss a case like 'hg rm b; hg cp a b'
     cm = {}
-    missing = set(b.manifest().iterkeys())
-    missing.difference_update(a.manifest().iterkeys())
-
+    missing = _computeforwardmissing(a, b, match=match)
     ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
     for f in missing:
         fctx = b[f]
@@ -197,16 +203,36 @@
         r[v] = k
     return r
 
-def pathcopies(x, y):
+def pathcopies(x, y, match=None):
     '''find {dst@y: src@x} copy mapping for directed compare'''
     if x == y or not x or not y:
         return {}
     a = y.ancestor(x)
     if a == x:
-        return _forwardcopies(x, y)
+        return _forwardcopies(x, y, match=match)
     if a == y:
         return _backwardrenames(x, y)
-    return _chain(x, y, _backwardrenames(x, a), _forwardcopies(a, y))
+    return _chain(x, y, _backwardrenames(x, a),
+                  _forwardcopies(a, y, match=match))
+
+def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2):
+    """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
+    and c2. This is its own function so extensions can easily wrap this call
+    to see what files mergecopies is about to process.
+
+    Even though c1 and c2 are not used in this function, they are useful in
+    other extensions for being able to read the file nodes of the changed files.
+    """
+    u1 = sorted(addedinm1 - addedinm2)
+    u2 = sorted(addedinm2 - addedinm1)
+
+    if u1:
+        repo.ui.debug("  unmatched files in local:\n   %s\n"
+                      % "\n   ".join(u1))
+    if u2:
+        repo.ui.debug("  unmatched files in other:\n   %s\n"
+                      % "\n   ".join(u2))
+    return u1, u2
 
 def mergecopies(repo, c1, c2, ca):
     """
@@ -288,15 +314,9 @@
 
     repo.ui.debug("  searching for copies back to rev %d\n" % limit)
 
-    u1 = _nonoverlap(m1, m2, ma)
-    u2 = _nonoverlap(m2, m1, ma)
-
-    if u1:
-        repo.ui.debug("  unmatched files in local:\n   %s\n"
-                      % "\n   ".join(u1))
-    if u2:
-        repo.ui.debug("  unmatched files in other:\n   %s\n"
-                      % "\n   ".join(u2))
+    addedinm1 = m1.filesnotin(ma)
+    addedinm2 = m2.filesnotin(ma)
+    u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
 
     for f in u1:
         ctx = setupctx(c1)
@@ -320,7 +340,7 @@
         else:
             diverge2.update(fl) # reverse map for below
 
-    bothnew = sorted([d for d in m1 if d in m2 and d not in ma])
+    bothnew = sorted(addedinm1 & addedinm2)
     if bothnew:
         repo.ui.debug("  unmatched files new in both:\n   %s\n"
                       % "\n   ".join(bothnew))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/crecord.py	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,1597 @@
+# stuff related specifically to patch manipulation / parsing
+#
+# Copyright 2008 Mark Edgington <edgimar@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# This code is based on the Mark Edgington's crecord extension.
+# (Itself based on Bryan O'Sullivan's record extension.)
+
+from i18n import _
+import patch as patchmod
+import util, encoding
+
+import os, re, sys, struct, signal, tempfile, locale, cStringIO
+
+# This is required for ncurses to display non-ASCII characters in default user
+# locale encoding correctly.  --immerrr
+locale.setlocale(locale.LC_ALL, '')
+
+# os.name is one of: 'posix', 'nt', 'dos', 'os2', 'mac', or 'ce'
+if os.name == 'posix':
+    import curses, fcntl, termios
+else:
+    # I have no idea if wcurses works with crecord...
+    try:
+        import wcurses as curses
+    except ImportError:
+        # wcurses is not shipped on Windows by default
+        pass
+
+try:
+    curses
+except NameError:
+    if os.name != 'nt':  # Temporary hack to get running on Windows again
+        raise util.Abort(
+            _('the python curses/wcurses module is not available/installed'))
+
+_origstdout = sys.__stdout__ # used by gethw()
+
+class patchnode(object):
+    """abstract class for patch graph nodes
+    (i.e. patchroot, header, hunk, hunkline)
+    """
+
+    def firstchild(self):
+        raise NotImplementedError("method must be implemented by subclass")
+
+    def lastchild(self):
+        raise NotImplementedError("method must be implemented by subclass")
+
+    def allchildren(self):
+        "Return a list of all of the direct children of this node"
+        raise NotImplementedError("method must be implemented by subclass")
+    def nextsibling(self):
+        """
+        Return the closest next item of the same type where there are no items
+        of different types between the current item and this closest item.
+        If no such item exists, return None.
+
+        """
+        raise NotImplementedError("method must be implemented by subclass")
+
+    def prevsibling(self):
+        """
+        Return the closest previous item of the same type where there are no
+        items of different types between the current item and this closest item.
+        If no such item exists, return None.
+
+        """
+        raise NotImplementedError("method must be implemented by subclass")
+
+    def parentitem(self):
+        raise NotImplementedError("method must be implemented by subclass")
+
+
+    def nextitem(self, constrainlevel=True, skipfolded=True):
+        """
+        If constrainLevel == True, return the closest next item
+        of the same type where there are no items of different types between
+        the current item and this closest item.
+
+        If constrainLevel == False, then try to return the next item
+        closest to this item, regardless of item's type (header, hunk, or
+        HunkLine).
+
+        If skipFolded == True, and the current item is folded, then the child
+        items that are hidden due to folding will be skipped when determining
+        the next item.
+
+        If it is not possible to get the next item, return None.
+
+        """
+        try:
+            itemfolded = self.folded
+        except AttributeError:
+            itemfolded = False
+        if constrainlevel:
+            return self.nextsibling()
+        elif skipfolded and itemfolded:
+            nextitem = self.nextsibling()
+            if nextitem is None:
+                try:
+                    nextitem = self.parentitem().nextsibling()
+                except AttributeError:
+                    nextitem = None
+            return nextitem
+        else:
+            # try child
+            item = self.firstchild()
+            if item is not None:
+                return item
+
+            # else try next sibling
+            item = self.nextsibling()
+            if item is not None:
+                return item
+
+            try:
+                # else try parent's next sibling
+                item = self.parentitem().nextsibling()
+                if item is not None:
+                    return item
+
+                # else return grandparent's next sibling (or None)
+                return self.parentitem().parentitem().nextsibling()
+
+            except AttributeError: # parent and/or grandparent was None
+                return None
+
+    def previtem(self, constrainlevel=True, skipfolded=True):
+        """
+        If constrainLevel == True, return the closest previous item
+        of the same type where there are no items of different types between
+        the current item and this closest item.
+
+        If constrainLevel == False, then try to return the previous item
+        closest to this item, regardless of item's type (header, hunk, or
+        HunkLine).
+
+        If skipFolded == True, and the current item is folded, then the items
+        that are hidden due to folding will be skipped when determining the
+        next item.
+
+        If it is not possible to get the previous item, return None.
+
+        """
+        if constrainlevel:
+            return self.prevsibling()
+        else:
+            # try previous sibling's last child's last child,
+            # else try previous sibling's last child, else try previous sibling
+            prevsibling = self.prevsibling()
+            if prevsibling is not None:
+                prevsiblinglastchild = prevsibling.lastchild()
+                if ((prevsiblinglastchild is not None) and
+                    not prevsibling.folded):
+                    prevsiblinglclc = prevsiblinglastchild.lastchild()
+                    if ((prevsiblinglclc is not None) and
+                        not prevsiblinglastchild.folded):
+                        return prevsiblinglclc
+                    else:
+                        return prevsiblinglastchild
+                else:
+                    return prevsibling
+
+            # try parent (or None)
+            return self.parentitem()
+
+class patch(patchnode, list): # todo: rename patchroot
+    """
+    list of header objects representing the patch.
+
+    """
+    def __init__(self, headerlist):
+        self.extend(headerlist)
+        # add parent patch object reference to each header
+        for header in self:
+            header.patch = self
+
+class uiheader(patchnode):
+    """patch header
+
+    xxx shoudn't we move this to mercurial/patch.py ?
+    """
+
+    def __init__(self, header):
+        self.nonuiheader = header
+        # flag to indicate whether to apply this chunk
+        self.applied = True
+        # flag which only affects the status display indicating if a node's
+        # children are partially applied (i.e. some applied, some not).
+        self.partial = False
+
+        # flag to indicate whether to display as folded/unfolded to user
+        self.folded = True
+
+        # list of all headers in patch
+        self.patch = None
+
+        # flag is False if this header was ever unfolded from initial state
+        self.neverunfolded = True
+        self.hunks = [uihunk(h, self) for h in self.hunks]
+
+
+    def prettystr(self):
+        x = cStringIO.StringIO()
+        self.pretty(x)
+        return x.getvalue()
+
+    def nextsibling(self):
+        numheadersinpatch = len(self.patch)
+        indexofthisheader = self.patch.index(self)
+
+        if indexofthisheader < numheadersinpatch - 1:
+            nextheader = self.patch[indexofthisheader + 1]
+            return nextheader
+        else:
+            return None
+
+    def prevsibling(self):
+        indexofthisheader = self.patch.index(self)
+        if indexofthisheader > 0:
+            previousheader = self.patch[indexofthisheader - 1]
+            return previousheader
+        else:
+            return None
+
+    def parentitem(self):
+        """
+        there is no 'real' parent item of a header that can be selected,
+        so return None.
+        """
+        return None
+
+    def firstchild(self):
+        "return the first child of this item, if one exists.  otherwise None."
+        if len(self.hunks) > 0:
+            return self.hunks[0]
+        else:
+            return None
+
+    def lastchild(self):
+        "return the last child of this item, if one exists.  otherwise None."
+        if len(self.hunks) > 0:
+            return self.hunks[-1]
+        else:
+            return None
+
+    def allchildren(self):
+        "return a list of all of the direct children of this node"
+        return self.hunks
+
+    def __getattr__(self, name):
+        return getattr(self.nonuiheader, name)
+
+class uihunkline(patchnode):
+    "represents a changed line in a hunk"
+    def __init__(self, linetext, hunk):
+        self.linetext = linetext
+        self.applied = True
+        # the parent hunk to which this line belongs
+        self.hunk = hunk
+        # folding lines currently is not used/needed, but this flag is needed
+        # in the previtem method.
+        self.folded = False
+
+    def prettystr(self):
+        return self.linetext
+
+    def nextsibling(self):
+        numlinesinhunk = len(self.hunk.changedlines)
+        indexofthisline = self.hunk.changedlines.index(self)
+
+        if (indexofthisline < numlinesinhunk - 1):
+            nextline = self.hunk.changedlines[indexofthisline + 1]
+            return nextline
+        else:
+            return None
+
+    def prevsibling(self):
+        indexofthisline = self.hunk.changedlines.index(self)
+        if indexofthisline > 0:
+            previousline = self.hunk.changedlines[indexofthisline - 1]
+            return previousline
+        else:
+            return None
+
+    def parentitem(self):
+        "return the parent to the current item"
+        return self.hunk
+
+    def firstchild(self):
+        "return the first child of this item, if one exists.  otherwise None."
+        # hunk-lines don't have children
+        return None
+
+    def lastchild(self):
+        "return the last child of this item, if one exists.  otherwise None."
+        # hunk-lines don't have children
+        return None
+
+class uihunk(patchnode):
+    """ui patch hunk, wraps a hunk and keep track of ui behavior """
+    maxcontext = 3
+
+    def __init__(self, hunk, header):
+        self._hunk = hunk
+        self.changedlines = [uihunkline(line, self) for line in hunk.hunk]
+        self.header = header
+        # used at end for detecting how many removed lines were un-applied
+        self.originalremoved = self.removed
+
+        # flag to indicate whether to display as folded/unfolded to user
+        self.folded = True
+        # flag to indicate whether to apply this chunk
+        self.applied = True
+        # flag which only affects the status display indicating if a node's
+        # children are partially applied (i.e. some applied, some not).
+        self.partial = False
+
+    def nextsibling(self):
+        numhunksinheader = len(self.header.hunks)
+        indexofthishunk = self.header.hunks.index(self)
+
+        if (indexofthishunk < numhunksinheader - 1):
+            nexthunk = self.header.hunks[indexofthishunk + 1]
+            return nexthunk
+        else:
+            return None
+
+    def prevsibling(self):
+        indexofthishunk = self.header.hunks.index(self)
+        if indexofthishunk > 0:
+            previoushunk = self.header.hunks[indexofthishunk - 1]
+            return previoushunk
+        else:
+            return None
+
+    def parentitem(self):
+        "return the parent to the current item"
+        return self.header
+
+    def firstchild(self):
+        "return the first child of this item, if one exists.  otherwise None."
+        if len(self.changedlines) > 0:
+            return self.changedlines[0]
+        else:
+            return None
+
+    def lastchild(self):
+        "return the last child of this item, if one exists.  otherwise None."
+        if len(self.changedlines) > 0:
+            return self.changedlines[-1]
+        else:
+            return None
+
+    def allchildren(self):
+        "return a list of all of the direct children of this node"
+        return self.changedlines
+    def countchanges(self):
+        """changedlines -> (n+,n-)"""
+        add = len([l for l in self.changedlines if l.applied
+                   and l.prettystr()[0] == '+'])
+        rem = len([l for l in self.changedlines if l.applied
+                   and l.prettystr()[0] == '-'])
+        return add, rem
+
+    def getfromtoline(self):
+        # calculate the number of removed lines converted to context lines
+        removedconvertedtocontext = self.originalremoved - self.removed
+
+        contextlen = (len(self.before) + len(self.after) +
+                      removedconvertedtocontext)
+        if self.after and self.after[-1] == '\\ no newline at end of file\n':
+            contextlen -= 1
+        fromlen = contextlen + self.removed
+        tolen = contextlen + self.added
+
+        # diffutils manual, section "2.2.2.2 detailed description of unified
+        # format": "an empty hunk is considered to end at the line that
+        # precedes the hunk."
+        #
+        # so, if either of hunks is empty, decrease its line start. --immerrr
+        # but only do this if fromline > 0, to avoid having, e.g fromline=-1.
+        fromline, toline = self.fromline, self.toline
+        if fromline != 0:
+            if fromlen == 0:
+                fromline -= 1
+            if tolen == 0:
+                toline -= 1
+
+        fromtoline = '@@ -%d,%d +%d,%d @@%s\n' % (
+            fromline, fromlen, toline, tolen,
+            self.proc and (' ' + self.proc))
+        return fromtoline
+
+    def write(self, fp):
+        # updated self.added/removed, which are used by getfromtoline()
+        self.added, self.removed = self.countchanges()
+        fp.write(self.getfromtoline())
+
+        hunklinelist = []
+        # add the following to the list: (1) all applied lines, and
+        # (2) all unapplied removal lines (convert these to context lines)
+        for changedline in self.changedlines:
+            changedlinestr = changedline.prettystr()
+            if changedline.applied:
+                hunklinelist.append(changedlinestr)
+            elif changedlinestr[0] == "-":
+                hunklinelist.append(" " + changedlinestr[1:])
+
+        fp.write(''.join(self.before + hunklinelist + self.after))
+
+    pretty = write
+
+    def prettystr(self):
+        x = cStringIO.StringIO()
+        self.pretty(x)
+        return x.getvalue()
+
+    def __getattr__(self, name):
+        return getattr(self._hunk, name)
+    def __repr__(self):
+        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def filterpatch(ui, chunks, chunkselector):
+    """interactively filter patch chunks into applied-only chunks"""
+
+    chunks = list(chunks)
+    # convert chunks list into structure suitable for displaying/modifying
+    # with curses.  create a list of headers only.
+    headers = [c for c in chunks if isinstance(c, patchmod.header)]
+
+    # if there are no changed files
+    if len(headers) == 0:
+        return []
+    uiheaders = [uiheader(h) for h in headers]
+    # let user choose headers/hunks/lines, and mark their applied flags
+    # accordingly
+    chunkselector(ui, uiheaders)
+    appliedhunklist = []
+    for hdr in uiheaders:
+        if (hdr.applied and
+            (hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0)):
+            appliedhunklist.append(hdr)
+            fixoffset = 0
+            for hnk in hdr.hunks:
+                if hnk.applied:
+                    appliedhunklist.append(hnk)
+                    # adjust the 'to'-line offset of the hunk to be correct
+                    # after de-activating some of the other hunks for this file
+                    if fixoffset:
+                        #hnk = copy.copy(hnk) # necessary??
+                        hnk.toline += fixoffset
+                else:
+                    fixoffset += hnk.removed - hnk.added
+
+    return appliedhunklist
+
+def gethw():
+    """
+    magically get the current height and width of the window (without initscr)
+
+    this is a rip-off of a rip-off - taken from the bpython code.  it is
+    useful / necessary because otherwise curses.initscr() must be called,
+    which can leave the terminal in a nasty state after exiting.
+
+    """
+    h, w = struct.unpack(
+        "hhhh", fcntl.ioctl(_origstdout, termios.TIOCGWINSZ, "\000"*8))[0:2]
+    return h, w
+
+def chunkselector(ui, headerlist):
+    """
+    curses interface to get selection of chunks, and mark the applied flags
+    of the chosen chunks.
+
+    """
+    ui.write(_('starting interactive selection\n'))
+    chunkselector = curseschunkselector(headerlist, ui)
+    curses.wrapper(chunkselector.main)
+
+def testdecorator(testfn, f):
+    def u(*args, **kwargs):
+        return f(testfn, *args, **kwargs)
+    return u
+
+def testchunkselector(testfn, ui, headerlist):
+    """
+    test interface to get selection of chunks, and mark the applied flags
+    of the chosen chunks.
+
+    """
+    chunkselector = curseschunkselector(headerlist, ui)
+    if testfn and os.path.exists(testfn):
+        testf = open(testfn)
+        testcommands = map(lambda x: x.rstrip('\n'), testf.readlines())
+        testf.close()
+        while True:
+            if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
+                break
+
+class curseschunkselector(object):
+    def __init__(self, headerlist, ui):
+        # put the headers into a patch object
+        self.headerlist = patch(headerlist)
+
+        self.ui = ui
+
+        # list of all chunks
+        self.chunklist = []
+        for h in headerlist:
+            self.chunklist.append(h)
+            self.chunklist.extend(h.hunks)
+
+        # dictionary mapping (fgcolor, bgcolor) pairs to the
+        # corresponding curses color-pair value.
+        self.colorpairs = {}
+        # maps custom nicknames of color-pairs to curses color-pair values
+        self.colorpairnames = {}
+
+        # the currently selected header, hunk, or hunk-line
+        self.currentselecteditem = self.headerlist[0]
+
+        # updated when printing out patch-display -- the 'lines' here are the
+        # line positions *in the pad*, not on the screen.
+        self.selecteditemstartline = 0
+        self.selecteditemendline = None
+
+        # define indentation levels
+        self.headerindentnumchars = 0
+        self.hunkindentnumchars = 3
+        self.hunklineindentnumchars = 6
+
+        # the first line of the pad to print to the screen
+        self.firstlineofpadtoprint = 0
+
+        # keeps track of the number of lines in the pad
+        self.numpadlines = None
+
+        self.numstatuslines = 2
+
+        # keep a running count of the number of lines printed to the pad
+        # (used for determining when the selected item begins/ends)
+        self.linesprintedtopadsofar = 0
+
+        # the first line of the pad which is visible on the screen
+        self.firstlineofpadtoprint = 0
+
+        # stores optional text for a commit comment provided by the user
+        self.commenttext = ""
+
+        # if the last 'toggle all' command caused all changes to be applied
+        self.waslasttoggleallapplied = True
+
+    def uparrowevent(self):
+        """
+        try to select the previous item to the current item that has the
+        most-indented level.  for example, if a hunk is selected, try to select
+        the last hunkline of the hunk prior to the selected hunk.  or, if
+        the first hunkline of a hunk is currently selected, then select the
+        hunk itself.
+
+        if the currently selected item is already at the top of the screen,
+        scroll the screen down to show the new-selected item.
+
+        """
+        currentitem = self.currentselecteditem
+
+        nextitem = currentitem.previtem(constrainlevel=False)
+
+        if nextitem is None:
+            # if no parent item (i.e. currentitem is the first header), then
+            # no change...
+            nextitem = currentitem
+
+        self.currentselecteditem = nextitem
+
+    def uparrowshiftevent(self):
+        """
+        select (if possible) the previous item on the same level as the
+        currently selected item.  otherwise, select (if possible) the
+        parent-item of the currently selected item.
+
+        if the currently selected item is already at the top of the screen,
+        scroll the screen down to show the new-selected item.
+
+        """
+        currentitem = self.currentselecteditem
+        nextitem = currentitem.previtem()
+        # if there's no previous item on this level, try choosing the parent
+        if nextitem is None:
+            nextitem = currentitem.parentitem()
+        if nextitem is None:
+            # if no parent item (i.e. currentitem is the first header), then
+            # no change...
+            nextitem = currentitem
+
+        self.currentselecteditem = nextitem
+
+    def downarrowevent(self):
+        """
+        try to select the next item to the current item that has the
+        most-indented level.  for example, if a hunk is selected, select
+        the first hunkline of the selected hunk.  or, if the last hunkline of
+        a hunk is currently selected, then select the next hunk, if one exists,
+        or if not, the next header if one exists.
+
+        if the currently selected item is already at the bottom of the screen,
+        scroll the screen up to show the new-selected item.
+
+        """
+        #self.startprintline += 1 #debug
+        currentitem = self.currentselecteditem
+
+        nextitem = currentitem.nextitem(constrainlevel=False)
+        # if there's no next item, keep the selection as-is
+        if nextitem is None:
+            nextitem = currentitem
+
+        self.currentselecteditem = nextitem
+
+    def downarrowshiftevent(self):
+        """
+        if the cursor is already at the bottom chunk, scroll the screen up and
+        move the cursor-position to the subsequent chunk.  otherwise, only move
+        the cursor position down one chunk.
+
+        """
+        # todo: update docstring
+
+        currentitem = self.currentselecteditem
+        nextitem = currentitem.nextitem()
+        # if there's no previous item on this level, try choosing the parent's
+        # nextitem.
+        if nextitem is None:
+            try:
+                nextitem = currentitem.parentitem().nextitem()
+            except AttributeError:
+                # parentitem returned None, so nextitem() can't be called
+                nextitem = None
+        if nextitem is None:
+            # if no next item on parent-level, then no change...
+            nextitem = currentitem
+
+        self.currentselecteditem = nextitem
+
+    def rightarrowevent(self):
+        """
+        select (if possible) the first of this item's child-items.
+
+        """
+        currentitem = self.currentselecteditem
+        nextitem = currentitem.firstchild()
+
+        # turn off folding if we want to show a child-item
+        if currentitem.folded:
+            self.togglefolded(currentitem)
+
+        if nextitem is None:
+            # if no next item on parent-level, then no change...
+            nextitem = currentitem
+
+        self.currentselecteditem = nextitem
+
+    def leftarrowevent(self):
+        """
+        if the current item can be folded (i.e. it is an unfolded header or
+        hunk), then fold it.  otherwise try select (if possible) the parent
+        of this item.
+
+        """
+        currentitem = self.currentselecteditem
+
+        # try to fold the item
+        if not isinstance(currentitem, uihunkline):
+            if not currentitem.folded:
+                self.togglefolded(item=currentitem)
+                return
+
+        # if it can't be folded, try to select the parent item
+        nextitem = currentitem.parentitem()
+
+        if nextitem is None:
+            # if no item on parent-level, then no change...
+            nextitem = currentitem
+            if not nextitem.folded:
+                self.togglefolded(item=nextitem)
+
+        self.currentselecteditem = nextitem
+
+    def leftarrowshiftevent(self):
+        """
+        select the header of the current item (or fold current item if the
+        current item is already a header).
+
+        """
+        currentitem = self.currentselecteditem
+
+        if isinstance(currentitem, uiheader):
+            if not currentitem.folded:
+                self.togglefolded(item=currentitem)
+                return
+
+        # select the parent item recursively until we're at a header
+        while True:
+            nextitem = currentitem.parentitem()
+            if nextitem is None:
+                break
+            else:
+                currentitem = nextitem
+
+        self.currentselecteditem = currentitem
+
+    def updatescroll(self):
+        "scroll the screen to fully show the currently-selected"
+        selstart = self.selecteditemstartline
+        selend = self.selecteditemendline
+        #selnumlines = selend - selstart
+        padstart = self.firstlineofpadtoprint
+        padend = padstart + self.yscreensize - self.numstatuslines - 1
+        # 'buffered' pad start/end values which scroll with a certain
+        # top/bottom context margin
+        padstartbuffered = padstart + 3
+        padendbuffered = padend - 3
+
+        if selend > padendbuffered:
+            self.scrolllines(selend - padendbuffered)
+        elif selstart < padstartbuffered:
+            # negative values scroll in pgup direction
+            self.scrolllines(selstart - padstartbuffered)
+
+
+    def scrolllines(self, numlines):
+        "scroll the screen up (down) by numlines when numlines >0 (<0)."
+        self.firstlineofpadtoprint += numlines
+        if self.firstlineofpadtoprint < 0:
+            self.firstlineofpadtoprint = 0
+        if self.firstlineofpadtoprint > self.numpadlines - 1:
+            self.firstlineofpadtoprint = self.numpadlines - 1
+
+    def toggleapply(self, item=None):
+        """
+        toggle the applied flag of the specified item.  if no item is specified,
+        toggle the flag of the currently selected item.
+
+        """
+        if item is None:
+            item = self.currentselecteditem
+
+        item.applied = not item.applied
+
+        if isinstance(item, uiheader):
+            item.partial = False
+            if item.applied:
+                # apply all its hunks
+                for hnk in item.hunks:
+                    hnk.applied = True
+                    # apply all their hunklines
+                    for hunkline in hnk.changedlines:
+                        hunkline.applied = True
+            else:
+                # un-apply all its hunks
+                for hnk in item.hunks:
+                    hnk.applied = False
+                    hnk.partial = False
+                    # un-apply all their hunklines
+                    for hunkline in hnk.changedlines:
+                        hunkline.applied = False
+        elif isinstance(item, uihunk):
+            item.partial = False
+            # apply all it's hunklines
+            for hunkline in item.changedlines:
+                hunkline.applied = item.applied
+
+            siblingappliedstatus = [hnk.applied for hnk in item.header.hunks]
+            allsiblingsapplied = not (False in siblingappliedstatus)
+            nosiblingsapplied = not (True in siblingappliedstatus)
+
+            siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
+            somesiblingspartial = (True in siblingspartialstatus)
+
+            #cases where applied or partial should be removed from header
+
+            # if no 'sibling' hunks are applied (including this hunk)
+            if nosiblingsapplied:
+                if not item.header.special():
+                    item.header.applied = False
+                    item.header.partial = False
+            else: # some/all parent siblings are applied
+                item.header.applied = True
+                item.header.partial = (somesiblingspartial or
+                                        not allsiblingsapplied)
+
+        elif isinstance(item, uihunkline):
+            siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
+            allsiblingsapplied = not (False in siblingappliedstatus)
+            nosiblingsapplied = not (True in siblingappliedstatus)
+
+            # if no 'sibling' lines are applied
+            if nosiblingsapplied:
+                item.hunk.applied = False
+                item.hunk.partial = False
+            elif allsiblingsapplied:
+                item.hunk.applied = True
+                item.hunk.partial = False
+            else: # some siblings applied
+                item.hunk.applied = True
+                item.hunk.partial = True
+
+            parentsiblingsapplied = [hnk.applied for hnk
+                                     in item.hunk.header.hunks]
+            noparentsiblingsapplied = not (True in parentsiblingsapplied)
+            allparentsiblingsapplied = not (False in parentsiblingsapplied)
+
+            parentsiblingspartial = [hnk.partial for hnk
+                                     in item.hunk.header.hunks]
+            someparentsiblingspartial = (True in parentsiblingspartial)
+
+            # if all parent hunks are not applied, un-apply header
+            if noparentsiblingsapplied:
+                if not item.hunk.header.special():
+                    item.hunk.header.applied = False
+                    item.hunk.header.partial = False
+            # set the applied and partial status of the header if needed
+            else: # some/all parent siblings are applied
+                item.hunk.header.applied = True
+                item.hunk.header.partial = (someparentsiblingspartial or
+                                            not allparentsiblingsapplied)
+
+    def toggleall(self):
+        "toggle the applied flag of all items."
+        if self.waslasttoggleallapplied: # then unapply them this time
+            for item in self.headerlist:
+                if item.applied:
+                    self.toggleapply(item)
+        else:
+            for item in self.headerlist:
+                if not item.applied:
+                    self.toggleapply(item)
+        self.waslasttoggleallapplied = not self.waslasttoggleallapplied
+
+    def togglefolded(self, item=None, foldparent=False):
+        "toggle folded flag of specified item (defaults to currently selected)"
+        if item is None:
+            item = self.currentselecteditem
+        if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
+            if not isinstance(item, uiheader):
+                # we need to select the parent item in this case
+                self.currentselecteditem = item = item.parentitem()
+            elif item.neverunfolded:
+                item.neverunfolded = False
+
+            # also fold any foldable children of the parent/current item
+            if isinstance(item, uiheader): # the original or 'new' item
+                for child in item.allchildren():
+                    child.folded = not item.folded
+
+        if isinstance(item, (uiheader, uihunk)):
+            item.folded = not item.folded
+
+
+    def alignstring(self, instr, window):
+        """
+        add whitespace to the end of a string in order to make it fill
+        the screen in the x direction.  the current cursor position is
+        taken into account when making this calculation.  the string can span
+        multiple lines.
+
+        """
+        y, xstart = window.getyx()
+        width = self.xscreensize
+        # turn tabs into spaces
+        instr = instr.expandtabs(4)
+        strwidth = encoding.colwidth(instr)
+        numspaces = (width - ((strwidth + xstart) % width) - 1)
+        return instr + " " * numspaces + "\n"
+
+    def printstring(self, window, text, fgcolor=None, bgcolor=None, pair=None,
+        pairname=None, attrlist=None, towin=True, align=True, showwhtspc=False):
+        """
+        print the string, text, with the specified colors and attributes, to
+        the specified curses window object.
+
+        the foreground and background colors are of the form
+        curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green,
+        magenta, red, white, yellow].  if pairname is provided, a color
+        pair will be looked up in the self.colorpairnames dictionary.
+
+        attrlist is a list containing text attributes in the form of
+        curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout,
+        underline].
+
+        if align == True, whitespace is added to the printed string such that
+        the string stretches to the right border of the window.
+
+        if showwhtspc == True, trailing whitespace of a string is highlighted.
+
+        """
+        # preprocess the text, converting tabs to spaces
+        text = text.expandtabs(4)
+        # strip \n, and convert control characters to ^[char] representation
+        text = re.sub(r'[\x00-\x08\x0a-\x1f]',
+                lambda m:'^' + chr(ord(m.group()) + 64), text.strip('\n'))
+
+        if pair is not None:
+            colorpair = pair
+        elif pairname is not None:
+            colorpair = self.colorpairnames[pairname]
+        else:
+            if fgcolor is None:
+                fgcolor = -1
+            if bgcolor is None:
+                bgcolor = -1
+            if (fgcolor, bgcolor) in self.colorpairs:
+                colorpair = self.colorpairs[(fgcolor, bgcolor)]
+            else:
+                colorpair = self.getcolorpair(fgcolor, bgcolor)
+        # add attributes if possible
+        if attrlist is None:
+            attrlist = []
+        if colorpair < 256:
+            # then it is safe to apply all attributes
+            for textattr in attrlist:
+                colorpair |= textattr
+        else:
+            # just apply a select few (safe?) attributes
+            for textattr in (curses.A_UNDERLINE, curses.A_BOLD):
+                if textattr in attrlist:
+                    colorpair |= textattr
+
+        y, xstart = self.chunkpad.getyx()
+        t = "" # variable for counting lines printed
+        # if requested, show trailing whitespace
+        if showwhtspc:
+            origlen = len(text)
+            text = text.rstrip(' \n') # tabs have already been expanded
+            strippedlen = len(text)
+            numtrailingspaces = origlen - strippedlen
+
+        if towin:
+            window.addstr(text, colorpair)
+        t += text
+
+        if showwhtspc:
+                wscolorpair = colorpair | curses.A_REVERSE
+                if towin:
+                    for i in range(numtrailingspaces):
+                        window.addch(curses.ACS_CKBOARD, wscolorpair)
+                t += " " * numtrailingspaces
+
+        if align:
+            if towin:
+                extrawhitespace = self.alignstring("", window)
+                window.addstr(extrawhitespace, colorpair)
+            else:
+                # need to use t, since the x position hasn't incremented
+                extrawhitespace = self.alignstring(t, window)
+            t += extrawhitespace
+
+        # is reset to 0 at the beginning of printitem()
+
+        linesprinted = (xstart + len(t)) / self.xscreensize
+        self.linesprintedtopadsofar += linesprinted
+        return t
+
+    def updatescreen(self):
+        self.statuswin.erase()
+        self.chunkpad.erase()
+
+        printstring = self.printstring
+
+        # print out the status lines at the top
+        try:
+            printstring(self.statuswin,
+                        "SELECT CHUNKS: (j/k/up/dn/pgup/pgdn) move cursor; "
+                        "(space/A) toggle hunk/all; (e)dit hunk;",
+                        pairname="legend")
+            printstring(self.statuswin,
+                        " (f)old/unfold; (c)ommit applied; (q)uit; (?) help "
+                        "| [X]=hunk applied **=folded",
+                        pairname="legend")
+        except curses.error:
+            pass
+
+        # print out the patch in the remaining part of the window
+        try:
+            self.printitem()
+            self.updatescroll()
+            self.chunkpad.refresh(self.firstlineofpadtoprint, 0,
+                                  self.numstatuslines, 0,
+                                  self.yscreensize + 1 - self.numstatuslines,
+                                  self.xscreensize)
+        except curses.error:
+            pass
+
+        # refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol])
+        self.statuswin.refresh()
+
+    def getstatusprefixstring(self, item):
+        """
+        create a string to prefix a line with which indicates whether 'item'
+        is applied and/or folded.
+
+        """
+        # create checkbox string
+        if item.applied:
+            if not isinstance(item, uihunkline) and item.partial:
+                checkbox = "[~]"
+            else:
+                checkbox = "[x]"
+        else:
+            checkbox = "[ ]"
+
+        try:
+            if item.folded:
+                checkbox += "**"
+                if isinstance(item, uiheader):
+                    # one of "m", "a", or "d" (modified, added, deleted)
+                    filestatus = item.changetype
+
+                    checkbox += filestatus + " "
+            else:
+                checkbox += "  "
+                if isinstance(item, uiheader):
+                    # add two more spaces for headers
+                    checkbox += "  "
+        except AttributeError: # not foldable
+            checkbox += "  "
+
+        return checkbox
+
+    def printheader(self, header, selected=False, towin=True,
+                    ignorefolding=False):
+        """
+        print the header to the pad.  if countlines is True, don't print
+        anything, but just count the number of lines which would be printed.
+
+        """
+        outstr = ""
+        text = header.prettystr()
+        chunkindex = self.chunklist.index(header)
+
+        if chunkindex != 0 and not header.folded:
+            # add separating line before headers
+            outstr += self.printstring(self.chunkpad, '_' * self.xscreensize,
+                                       towin=towin, align=False)
+        # select color-pair based on if the header is selected
+        colorpair = self.getcolorpair(name=selected and "selected" or "normal",
+                                      attrlist=[curses.A_BOLD])
+
+        # print out each line of the chunk, expanding it to screen width
+
+        # number of characters to indent lines on this level by
+        indentnumchars = 0
+        checkbox = self.getstatusprefixstring(header)
+        if not header.folded or ignorefolding:
+            textlist = text.split("\n")
+            linestr = checkbox + textlist[0]
+        else:
+            linestr = checkbox + header.filename()
+        outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
+                                   towin=towin)
+        if not header.folded or ignorefolding:
+            if len(textlist) > 1:
+                for line in textlist[1:]:
+                    linestr = " "*(indentnumchars + len(checkbox)) + line
+                    outstr += self.printstring(self.chunkpad, linestr,
+                                               pair=colorpair, towin=towin)
+
+        return outstr
+
+    def printhunklinesbefore(self, hunk, selected=False, towin=True,
+                             ignorefolding=False):
+        "includes start/end line indicator"
+        outstr = ""
+        # where hunk is in list of siblings
+        hunkindex = hunk.header.hunks.index(hunk)
+
+        if hunkindex != 0:
+            # add separating line before headers
+            outstr += self.printstring(self.chunkpad, ' '*self.xscreensize,
+                                       towin=towin, align=False)
+
+        colorpair = self.getcolorpair(name=selected and "selected" or "normal",
+                                      attrlist=[curses.A_BOLD])
+
+        # print out from-to line with checkbox
+        checkbox = self.getstatusprefixstring(hunk)
+
+        lineprefix = " "*self.hunkindentnumchars + checkbox
+        frtoline = "   " + hunk.getfromtoline().strip("\n")
+
+
+        outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
+                                   align=False) # add uncolored checkbox/indent
+        outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair,
+                                   towin=towin)
+
+        if hunk.folded and not ignorefolding:
+            # skip remainder of output
+            return outstr
+
+        # print out lines of the chunk preceeding changed-lines
+        for line in hunk.before:
+            linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
+            outstr += self.printstring(self.chunkpad, linestr, towin=towin)
+
+        return outstr
+
+    def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
+        outstr = ""
+        if hunk.folded and not ignorefolding:
+            return outstr
+
+        # a bit superfluous, but to avoid hard-coding indent amount
+        checkbox = self.getstatusprefixstring(hunk)
+        for line in hunk.after:
+            linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
+            outstr += self.printstring(self.chunkpad, linestr, towin=towin)
+
+        return outstr
+
+    def printhunkchangedline(self, hunkline, selected=False, towin=True):
+        outstr = ""
+        checkbox = self.getstatusprefixstring(hunkline)
+
+        linestr = hunkline.prettystr().strip("\n")
+
+        # select color-pair based on whether line is an addition/removal
+        if selected:
+            colorpair = self.getcolorpair(name="selected")
+        elif linestr.startswith("+"):
+            colorpair = self.getcolorpair(name="addition")
+        elif linestr.startswith("-"):
+            colorpair = self.getcolorpair(name="deletion")
+        elif linestr.startswith("\\"):
+            colorpair = self.getcolorpair(name="normal")
+
+        lineprefix = " "*self.hunklineindentnumchars + checkbox
+        outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
+                                   align=False) # add uncolored checkbox/indent
+        outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
+                                   towin=towin, showwhtspc=True)
+        return outstr
+
+    def printitem(self, item=None, ignorefolding=False, recursechildren=True,
+                  towin=True):
+        """
+        use __printitem() to print the the specified item.applied.
+        if item is not specified, then print the entire patch.
+        (hiding folded elements, etc. -- see __printitem() docstring)
+        """
+        if item is None:
+            item = self.headerlist
+        if recursechildren:
+            self.linesprintedtopadsofar = 0
+
+        outstr = []
+        self.__printitem(item, ignorefolding, recursechildren, outstr,
+                                  towin=towin)
+        return ''.join(outstr)
+
+    def outofdisplayedarea(self):
+        y, _ = self.chunkpad.getyx() # cursor location
+        # * 2 here works but an optimization would be the max number of
+        # consecutive non selectable lines
+        # i.e the max number of context line for any hunk in the patch
+        miny = min(0, self.firstlineofpadtoprint - self.yscreensize)
+        maxy = self.firstlineofpadtoprint + self.yscreensize * 2
+        return y < miny or y > maxy
+
+    def handleselection(self, item, recursechildren):
+        selected = (item is self.currentselecteditem)
+        if selected and recursechildren:
+            # assumes line numbering starting from line 0
+            self.selecteditemstartline = self.linesprintedtopadsofar
+            selecteditemlines = self.getnumlinesdisplayed(item,
+                                                          recursechildren=False)
+            self.selecteditemendline = (self.selecteditemstartline +
+                                        selecteditemlines - 1)
+        return selected
+
+    def __printitem(self, item, ignorefolding, recursechildren, outstr,
+                    towin=True):
+        """
+        recursive method for printing out patch/header/hunk/hunk-line data to
+        screen.  also returns a string with all of the content of the displayed
+        patch (not including coloring, etc.).
+
+        if ignorefolding is True, then folded items are printed out.
+
+        if recursechildren is False, then only print the item without its
+        child items.
+
+        """
+        if towin and self.outofdisplayedarea():
+            return
+
+        selected = self.handleselection(item, recursechildren)
+
+        # patch object is a list of headers
+        if isinstance(item, patch):
+            if recursechildren:
+                for hdr in item:
+                    self.__printitem(hdr, ignorefolding,
+                            recursechildren, outstr, towin)
+        # todo: eliminate all isinstance() calls
+        if isinstance(item, uiheader):
+            outstr.append(self.printheader(item, selected, towin=towin,
+                                       ignorefolding=ignorefolding))
+            if recursechildren:
+                for hnk in item.hunks:
+                    self.__printitem(hnk, ignorefolding,
+                            recursechildren, outstr, towin)
+        elif (isinstance(item, uihunk) and
+              ((not item.header.folded) or ignorefolding)):
+            # print the hunk data which comes before the changed-lines
+            outstr.append(self.printhunklinesbefore(item, selected, towin=towin,
+                                                ignorefolding=ignorefolding))
+            if recursechildren:
+                for l in item.changedlines:
+                    self.__printitem(l, ignorefolding,
+                            recursechildren, outstr, towin)
+                outstr.append(self.printhunklinesafter(item, towin=towin,
+                                                ignorefolding=ignorefolding))
+        elif (isinstance(item, uihunkline) and
+              ((not item.hunk.folded) or ignorefolding)):
+            outstr.append(self.printhunkchangedline(item, selected,
+                towin=towin))
+
+        return outstr
+
+    def getnumlinesdisplayed(self, item=None, ignorefolding=False,
+                             recursechildren=True):
+        """
+        return the number of lines which would be displayed if the item were
+        to be printed to the display.  the item will not be printed to the
+        display (pad).
+        if no item is given, assume the entire patch.
+        if ignorefolding is True, folded items will be unfolded when counting
+        the number of lines.
+
+        """
+        # temporarily disable printing to windows by printstring
+        patchdisplaystring = self.printitem(item, ignorefolding,
+                                            recursechildren, towin=False)
+        numlines = len(patchdisplaystring) / self.xscreensize
+        return numlines
+
+    def sigwinchhandler(self, n, frame):
+        "handle window resizing"
+        try:
+            curses.endwin()
+            self.yscreensize, self.xscreensize = gethw()
+            self.statuswin.resize(self.numstatuslines, self.xscreensize)
+            self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
+            self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
+            # todo: try to resize commit message window if possible
+        except curses.error:
+            pass
+
+    def getcolorpair(self, fgcolor=None, bgcolor=None, name=None,
+                     attrlist=None):
+        """
+        get a curses color pair, adding it to self.colorpairs if it is not
+        already defined.  an optional string, name, can be passed as a shortcut
+        for referring to the color-pair.  by default, if no arguments are
+        specified, the white foreground / black background color-pair is
+        returned.
+
+        it is expected that this function will be used exclusively for
+        initializing color pairs, and not curses.init_pair().
+
+        attrlist is used to 'flavor' the returned color-pair.  this information
+        is not stored in self.colorpairs.  it contains attribute values like
+        curses.A_BOLD.
+
+        """
+        if (name is not None) and name in self.colorpairnames:
+            # then get the associated color pair and return it
+            colorpair = self.colorpairnames[name]
+        else:
+            if fgcolor is None:
+                fgcolor = -1
+            if bgcolor is None:
+                bgcolor = -1
+            if (fgcolor, bgcolor) in self.colorpairs:
+                colorpair = self.colorpairs[(fgcolor, bgcolor)]
+            else:
+                pairindex = len(self.colorpairs) + 1
+                curses.init_pair(pairindex, fgcolor, bgcolor)
+                colorpair = self.colorpairs[(fgcolor, bgcolor)] = (
+                    curses.color_pair(pairindex))
+                if name is not None:
+                    self.colorpairnames[name] = curses.color_pair(pairindex)
+
+        # add attributes if possible
+        if attrlist is None:
+            attrlist = []
+        if colorpair < 256:
+            # then it is safe to apply all attributes
+            for textattr in attrlist:
+                colorpair |= textattr
+        else:
+            # just apply a select few (safe?) attributes
+            for textattrib in (curses.A_UNDERLINE, curses.A_BOLD):
+                if textattrib in attrlist:
+                    colorpair |= textattrib
+        return colorpair
+
+    def initcolorpair(self, *args, **kwargs):
+        "same as getcolorpair."
+        self.getcolorpair(*args, **kwargs)
+
+    def helpwindow(self):
+        "print a help window to the screen.  exit after any keypress."
+        helptext = """            [press any key to return to the patch-display]
+
+crecord allows you to interactively choose among the changes you have made,
+and commit only those changes you select.  after committing the selected
+changes, the unselected changes are still present in your working copy, so you
+can use crecord multiple times to split large changes into smaller changesets.
+the following are valid keystrokes:
+
+                [space] : (un-)select item ([~]/[x] = partly/fully applied)
+                      a : (un-)select all items
+    up/down-arrow [k/j] : go to previous/next unfolded item
+        pgup/pgdn [k/j] : go to previous/next item of same type
+ right/left-arrow [l/h] : go to child item / parent item
+ shift-left-arrow   [h] : go to parent header / fold selected header
+                      f : fold / unfold item, hiding/revealing its children
+                      f : fold / unfold parent item and all of its ancestors
+                      m : edit / resume editing the commit message
+                      e : edit the currently selected hunk
+                      a : toggle amend mode (hg rev >= 2.2)
+                      c : commit selected changes
+                      r : review/edit and commit selected changes
+                      q : quit without committing (no changes will be made)
+                      ? : help (what you're currently reading)"""
+
+        helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
+        helplines = helptext.split("\n")
+        helplines = helplines + [" "]*(
+            self.yscreensize - self.numstatuslines - len(helplines) - 1)
+        try:
+            for line in helplines:
+                self.printstring(helpwin, line, pairname="legend")
+        except curses.error:
+            pass
+        helpwin.refresh()
+        try:
+            helpwin.getkey()
+        except curses.error:
+            pass
+
+    def confirmationwindow(self, windowtext):
+        "display an informational window, then wait for and return a keypress."
+
+        confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
+        try:
+            lines = windowtext.split("\n")
+            for line in lines:
+                self.printstring(confirmwin, line, pairname="selected")
+        except curses.error:
+            pass
+        self.stdscr.refresh()
+        confirmwin.refresh()
+        try:
+            response = chr(self.stdscr.getch())
+        except ValueError:
+            response = None
+
+        return response
+
+    def confirmcommit(self, review=False):
+        "ask for 'y' to be pressed to confirm commit. return True if confirmed."
+        if review:
+            confirmtext = (
+"""if you answer yes to the following, the your currently chosen patch chunks
+will be loaded into an editor.  you may modify the patch from the editor, and
+save the changes if you wish to change the patch.  otherwise, you can just
+close the editor without saving to accept the current patch as-is.
+
+note: don't add/remove lines unless you also modify the range information.
+      failing to follow this rule will result in the commit aborting.
+
+are you sure you want to review/edit and commit the selected changes [yn]? """)
+        else:
+            confirmtext = (
+                "are you sure you want to commit the selected changes [yn]? ")
+
+        response = self.confirmationwindow(confirmtext)
+        if response is None:
+            response = "n"
+        if response.lower().startswith("y"):
+            return True
+        else:
+            return False
+
+    def recenterdisplayedarea(self):
+        """
+        once we scrolled with pg up pg down we can be pointing outside of the
+        display zone. we print the patch with towin=False to compute the
+        location of the selected item eventhough it is outside of the displayed
+        zone and then update the scroll.
+        """
+        self.printitem(towin=False)
+        self.updatescroll()
+
+    def toggleedit(self, item=None, test=False):
+        """
+            edit the currently chelected chunk
+        """
+
+        def editpatchwitheditor(self, chunk):
+            if chunk is None:
+                self.ui.write(_('cannot edit patch for whole file'))
+                self.ui.write("\n")
+                return None
+            if chunk.header.binary():
+                self.ui.write(_('cannot edit patch for binary file'))
+                self.ui.write("\n")
+                return None
+            # patch comment based on the git one (based on comment at end of
+            # http://mercurial.selenic.com/wiki/recordextension)
+            phelp = '---' + _("""
+    to remove '-' lines, make them ' ' lines (context).
+    to remove '+' lines, delete them.
+    lines starting with # will be removed from the patch.
+
+    if the patch applies cleanly, the edited hunk will immediately be
+    added to the record list. if it does not apply cleanly, a rejects
+    file will be generated: you can use that when you try again. if
+    all lines of the hunk are removed, then the edit is aborted and
+    the hunk is left unchanged.
+    """)
+            (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
+                    suffix=".diff", text=True)
+            ncpatchfp = None
+            try:
+                # write the initial patch
+                f = os.fdopen(patchfd, "w")
+                chunk.header.write(f)
+                chunk.write(f)
+                f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
+                f.close()
+                # start the editor and wait for it to complete
+                editor = self.ui.geteditor()
+                self.ui.system("%s \"%s\"" % (editor, patchfn),
+                          environ={'hguser': self.ui.username()},
+                          onerr=util.Abort, errprefix=_("edit failed"))
+                # remove comment lines
+                patchfp = open(patchfn)
+                ncpatchfp = cStringIO.StringIO()
+                for line in patchfp:
+                    if not line.startswith('#'):
+                        ncpatchfp.write(line)
+                patchfp.close()
+                ncpatchfp.seek(0)
+                newpatches = patchmod.parsepatch(ncpatchfp)
+            finally:
+                os.unlink(patchfn)
+                del ncpatchfp
+            return newpatches
+        if item is None:
+            item = self.currentselecteditem
+        if isinstance(item, uiheader):
+            return
+        if isinstance(item, uihunkline):
+            item = item.parentitem()
+        if not isinstance(item, uihunk):
+            return
+
+        beforeadded, beforeremoved = item.added, item.removed
+        newpatches = editpatchwitheditor(self, item)
+        header = item.header
+        editedhunkindex = header.hunks.index(item)
+        hunksbefore = header.hunks[:editedhunkindex]
+        hunksafter = header.hunks[editedhunkindex + 1:]
+        newpatchheader = newpatches[0]
+        newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
+        newadded = sum([h.added for h in newhunks])
+        newremoved = sum([h.removed for h in newhunks])
+        offset = (newadded - beforeadded) - (newremoved - beforeremoved)
+
+        for h in hunksafter:
+            h.toline += offset
+        for h in newhunks:
+            h.folded = False
+        header.hunks = hunksbefore + newhunks + hunksafter
+        if self.emptypatch():
+            header.hunks = hunksbefore + [item] + hunksafter
+        self.currentselecteditem = header
+
+        if not test:
+            self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
+            self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
+            self.updatescroll()
+            self.stdscr.refresh()
+            self.statuswin.refresh()
+            self.stdscr.keypad(1)
+
+    def emptypatch(self):
+        item = self.headerlist
+        if not item:
+            return True
+        for header in item:
+            if header.hunks:
+                return False
+        return True
+
+    def handlekeypressed(self, keypressed, test=False):
+        if keypressed in ["k", "KEY_UP"]:
+            self.uparrowevent()
+        if keypressed in ["k", "KEY_PPAGE"]:
+            self.uparrowshiftevent()
+        elif keypressed in ["j", "KEY_DOWN"]:
+            self.downarrowevent()
+        elif keypressed in ["j", "KEY_NPAGE"]:
+            self.downarrowshiftevent()
+        elif keypressed in ["l", "KEY_RIGHT"]:
+            self.rightarrowevent()
+        elif keypressed in ["h", "KEY_LEFT"]:
+            self.leftarrowevent()
+        elif keypressed in ["h", "KEY_SLEFT"]:
+            self.leftarrowshiftevent()
+        elif keypressed in ["q"]:
+            raise util.Abort(_('user quit'))
+        elif keypressed in ["c"]:
+            if self.confirmcommit():
+                return True
+        elif keypressed in ["r"]:
+            if self.confirmcommit(review=True):
+                return True
+        elif test and keypressed in ['X']:
+            return True
+        elif keypressed in [' '] or (test and keypressed in ["TOGGLE"]):
+            self.toggleapply()
+        elif keypressed in ['A']:
+            self.toggleall()
+        elif keypressed in ['e']:
+            self.toggleedit(test=test)
+        elif keypressed in ["f"]:
+            self.togglefolded()
+        elif keypressed in ["f"]:
+            self.togglefolded(foldparent=True)
+        elif keypressed in ["?"]:
+            self.helpwindow()
+
+    def main(self, stdscr):
+        """
+        method to be wrapped by curses.wrapper() for selecting chunks.
+
+        """
+        signal.signal(signal.SIGWINCH, self.sigwinchhandler)
+        self.stdscr = stdscr
+        self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
+
+        curses.start_color()
+        curses.use_default_colors()
+
+        # available colors: black, blue, cyan, green, magenta, white, yellow
+        # init_pair(color_id, foreground_color, background_color)
+        self.initcolorpair(None, None, name="normal")
+        self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_MAGENTA,
+                           name="selected")
+        self.initcolorpair(curses.COLOR_RED, None, name="deletion")
+        self.initcolorpair(curses.COLOR_GREEN, None, name="addition")
+        self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend")
+        # newwin([height, width,] begin_y, begin_x)
+        self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
+        self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
+
+        # figure out how much space to allocate for the chunk-pad which is
+        # used for displaying the patch
+
+        # stupid hack to prevent getnumlinesdisplayed from failing
+        self.chunkpad = curses.newpad(1, self.xscreensize)
+
+        # add 1 so to account for last line text reaching end of line
+        self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
+        self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
+
+        # initialize selecteitemendline (initial start-line is 0)
+        self.selecteditemendline = self.getnumlinesdisplayed(
+            self.currentselecteditem, recursechildren=False)
+
+        while True:
+            self.updatescreen()
+            try:
+                keypressed = self.statuswin.getkey()
+            except curses.error:
+                keypressed = "foobar"
+            if self.handlekeypressed(keypressed):
+                break
--- a/mercurial/dagutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/dagutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -88,7 +88,10 @@
     '''generic implementations for DAGs'''
 
     def ancestorset(self, starts, stops=None):
-        stops = stops and set(stops) or set()
+        if stops:
+            stops = set(stops)
+        else:
+            stops = set()
         seen = set()
         pending = list(starts)
         while pending:
@@ -179,7 +182,10 @@
     def ancestorset(self, starts, stops=None):
         rlog = self._revlog
         idx = rlog.index
-        stops = stops and set(stops) or set()
+        if stops:
+            stops = set(stops)
+        else:
+            stops = set()
         seen = set()
         pending = list(starts)
         while pending:
--- a/mercurial/default.d/mergetools.rc	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/default.d/mergetools.rc	Thu Apr 16 20:57:51 2015 -0500
@@ -102,6 +102,13 @@
 bcompare.priority=-1
 bcompare.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
 
+; OS X version of Beyond Compare
+bcomposx.executable = /Applications/Beyond Compare.app/Contents/MacOS/bcomp
+bcomposx.args=$local $other $base -mergeoutput=$output -ro -lefttitle=parent1 -centertitle=base -righttitle=parent2 -outputtitle=merged -automerge -reviewconflicts -solo
+bcomposx.gui=True
+bcomposx.priority=-1
+bcomposx.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
+
 winmerge.args=/e /x /wl /ub /dl other /dr local $other $local $output
 winmerge.regkey=Software\Thingamahoochie\WinMerge
 winmerge.regkeyalt=Software\Wow6432Node\Thingamahoochie\WinMerge\
--- a/mercurial/dirs.c	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/dirs.c	Thu Apr 16 20:57:51 2015 -0500
@@ -9,6 +9,7 @@
 
 #define PY_SSIZE_T_CLEAN
 #include <Python.h>
+#include <string.h>
 #include "util.h"
 
 /*
@@ -32,23 +33,19 @@
 {
 	const char *s = PyString_AS_STRING(path);
 
-	while (pos != -1) {
-		if (s[pos] == '/')
-			break;
-		pos -= 1;
-	}
-
-	return pos;
+	const char *ret = strchr(s + pos, '/');
+	return (ret != NULL) ? (ret - s) : -1;
 }
 
 static int _addpath(PyObject *dirs, PyObject *path)
 {
-	const char *cpath = PyString_AS_STRING(path);
-	Py_ssize_t pos = PyString_GET_SIZE(path);
+	char *cpath = PyString_AS_STRING(path);
+	Py_ssize_t len = PyString_GET_SIZE(path);
+	Py_ssize_t pos = -1;
 	PyObject *key = NULL;
 	int ret = -1;
 
-	while ((pos = _finddir(path, pos - 1)) != -1) {
+	while ((pos = _finddir(path, pos + 1)) != -1) {
 		PyObject *val;
 
 		/* It's likely that every prefix already has an entry
@@ -56,10 +53,18 @@
 		   deallocating a string for each prefix we check. */
 		if (key != NULL)
 			((PyStringObject *)key)->ob_shash = -1;
-		else {
-			/* Force Python to not reuse a small shared string. */
-			key = PyString_FromStringAndSize(cpath,
-							 pos < 2 ? 2 : pos);
+		else if (pos != 0) {
+			/* pos >= 1, which means that len >= 2. This is
+			   guaranteed to produce a non-interned string. */
+			key = PyString_FromStringAndSize(cpath, len);
+			if (key == NULL)
+				goto bail;
+		} else {
+			/* pos == 0, which means we need to increment the dir
+			   count for the empty string. We need to make sure we
+			   don't muck around with interned strings, so throw it
+			   away later. */
+			key = PyString_FromString("");
 			if (key == NULL)
 				goto bail;
 		}
@@ -69,6 +74,10 @@
 		val = PyDict_GetItem(dirs, key);
 		if (val != NULL) {
 			PyInt_AS_LONG(val) += 1;
+			if (pos != 0)
+				PyString_AS_STRING(key)[pos] = '/';
+			else
+				key = NULL;
 			continue;
 		}
 
@@ -83,6 +92,9 @@
 		Py_DECREF(val);
 		if (ret == -1)
 			goto bail;
+
+		/* Clear the key out since we've already exposed it to Python
+		   and can't mutate it further. */
 		Py_CLEAR(key);
 	}
 	ret = 0;
@@ -95,11 +107,11 @@
 
 static int _delpath(PyObject *dirs, PyObject *path)
 {
-	Py_ssize_t pos = PyString_GET_SIZE(path);
+	Py_ssize_t pos = -1;
 	PyObject *key = NULL;
 	int ret = -1;
 
-	while ((pos = _finddir(path, pos - 1)) != -1) {
+	while ((pos = _finddir(path, pos + 1)) != -1) {
 		PyObject *val;
 
 		key = PyString_FromStringAndSize(PyString_AS_STRING(path), pos);
--- a/mercurial/dirstate.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/dirstate.py	Thu Apr 16 20:57:51 2015 -0500
@@ -87,15 +87,29 @@
         return self._copymap
 
     @propertycache
-    def _foldmap(self):
+    def _filefoldmap(self):
+        try:
+            makefilefoldmap = parsers.make_file_foldmap
+        except AttributeError:
+            pass
+        else:
+            return makefilefoldmap(self._map, util.normcasespec,
+                                   util.normcasefallback)
+
         f = {}
         normcase = util.normcase
         for name, s in self._map.iteritems():
             if s[0] != 'r':
                 f[normcase(name)] = name
+        f['.'] = '.' # prevents useless util.fspath() invocation
+        return f
+
+    @propertycache
+    def _dirfoldmap(self):
+        f = {}
+        normcase = util.normcase
         for name in self._dirs:
             f[normcase(name)] = name
-        f['.'] = '.' # prevents useless util.fspath() invocation
         return f
 
     @repocache('branch')
@@ -125,7 +139,7 @@
 
     @propertycache
     def _dirs(self):
-        return scmutil.dirs(self._map, 'r')
+        return util.dirs(self._map, 'r')
 
     def dirs(self):
         return self._dirs
@@ -332,8 +346,8 @@
             self._pl = p
 
     def invalidate(self):
-        for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
-                "_ignore"):
+        for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
+                  "_pl", "_dirs", "_ignore"):
             if a in self.__dict__:
                 delattr(self, a)
         self._lastnormaltime = 0
@@ -367,7 +381,7 @@
             if f in self._dirs:
                 raise util.Abort(_('directory %r already in dirstate') % f)
             # shadows
-            for d in scmutil.finddirs(f):
+            for d in util.finddirs(f):
                 if d in self._dirs:
                     break
                 if d in self._map and self[d] != 'r':
@@ -464,36 +478,56 @@
             self._droppath(f)
             del self._map[f]
 
-    def _normalize(self, path, isknown, ignoremissing=False, exists=None):
+    def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
+        if exists is None:
+            exists = os.path.lexists(os.path.join(self._root, path))
+        if not exists:
+            # Maybe a path component exists
+            if not ignoremissing and '/' in path:
+                d, f = path.rsplit('/', 1)
+                d = self._normalize(d, False, ignoremissing, None)
+                folded = d + "/" + f
+            else:
+                # No path components, preserve original case
+                folded = path
+        else:
+            # recursively normalize leading directory components
+            # against dirstate
+            if '/' in normed:
+                d, f = normed.rsplit('/', 1)
+                d = self._normalize(d, False, ignoremissing, True)
+                r = self._root + "/" + d
+                folded = d + "/" + util.fspath(f, r)
+            else:
+                folded = util.fspath(normed, self._root)
+            storemap[normed] = folded
+
+        return folded
+
+    def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
         normed = util.normcase(path)
-        folded = self._foldmap.get(normed, None)
+        folded = self._filefoldmap.get(normed, None)
         if folded is None:
             if isknown:
                 folded = path
             else:
-                if exists is None:
-                    exists = os.path.lexists(os.path.join(self._root, path))
-                if not exists:
-                    # Maybe a path component exists
-                    if not ignoremissing and '/' in path:
-                        d, f = path.rsplit('/', 1)
-                        d = self._normalize(d, isknown, ignoremissing, None)
-                        folded = d + "/" + f
-                    else:
-                        # No path components, preserve original case
-                        folded = path
-                else:
-                    # recursively normalize leading directory components
-                    # against dirstate
-                    if '/' in normed:
-                        d, f = normed.rsplit('/', 1)
-                        d = self._normalize(d, isknown, ignoremissing, True)
-                        r = self._root + "/" + d
-                        folded = d + "/" + util.fspath(f, r)
-                    else:
-                        folded = util.fspath(normed, self._root)
-                    self._foldmap[normed] = folded
+                folded = self._discoverpath(path, normed, ignoremissing, exists,
+                                            self._filefoldmap)
+        return folded
 
+    def _normalize(self, path, isknown, ignoremissing=False, exists=None):
+        normed = util.normcase(path)
+        folded = self._filefoldmap.get(normed, None)
+        if folded is None:
+            folded = self._dirfoldmap.get(normed, None)
+        if folded is None:
+            if isknown:
+                folded = path
+            else:
+                # store discovered result in dirfoldmap so that future
+                # normalizefile calls don't start matching directories
+                folded = self._discoverpath(path, normed, ignoremissing, exists,
+                                            self._dirfoldmap)
         return folded
 
     def normalize(self, path, isknown=False, ignoremissing=False):
@@ -567,7 +601,7 @@
             return False
         if self._ignore(f):
             return True
-        for p in scmutil.finddirs(f):
+        for p in util.finddirs(f):
             if self._ignore(p):
                 return True
         return False
@@ -599,7 +633,6 @@
         matchedir = match.explicitdir
         badfn = match.bad
         dmap = self._map
-        normpath = util.normpath
         lstat = os.lstat
         getkind = stat.S_IFMT
         dirkind = stat.S_IFDIR
@@ -611,7 +644,7 @@
         dirsnotfound = []
         notfoundadd = dirsnotfound.append
 
-        if match.matchfn != match.exact and self._checkcase:
+        if not match.isexact() and self._checkcase:
             normalize = self._normalize
         else:
             normalize = None
@@ -629,16 +662,18 @@
             j += 1
 
         if not files or '.' in files:
-            files = ['']
+            files = ['.']
         results = dict.fromkeys(subrepos)
         results['.hg'] = None
 
         alldirs = None
         for ff in files:
-            if normalize:
-                nf = normalize(normpath(ff), False, True)
+            # constructing the foldmap is expensive, so don't do it for the
+            # common case where files is ['.']
+            if normalize and ff != '.':
+                nf = normalize(ff, False, True)
             else:
-                nf = normpath(ff)
+                nf = ff
             if nf in results:
                 continue
 
@@ -663,7 +698,7 @@
                     results[nf] = None
                 else: # does it match a missing directory?
                     if alldirs is None:
-                        alldirs = scmutil.dirs(dmap)
+                        alldirs = util.dirs(dmap)
                     if nf in alldirs:
                         if matchedir:
                             matchedir(nf)
@@ -711,7 +746,7 @@
         join = self._join
 
         exact = skipstep3 = False
-        if matchfn == match.exact: # match.exact
+        if match.isexact(): # match.exact
             exact = True
             dirignore = util.always # skip step 2
         elif match.files() and not match.anypats(): # match.match, no patterns
@@ -719,56 +754,70 @@
 
         if not exact and self._checkcase:
             normalize = self._normalize
+            normalizefile = self._normalizefile
             skipstep3 = False
         else:
-            normalize = None
+            normalize = self._normalize
+            normalizefile = None
 
         # step 1: find all explicit files
         results, work, dirsnotfound = self._walkexplicit(match, subrepos)
 
         skipstep3 = skipstep3 and not (work or dirsnotfound)
         work = [d for d in work if not dirignore(d[0])]
-        wadd = work.append
 
         # step 2: visit subdirectories
-        while work:
-            nd, d = work.pop()
-            skip = None
-            if nd == '.':
-                nd = ''
-                d = ''
-            else:
-                skip = '.hg'
-            try:
-                entries = listdir(join(nd), stat=True, skip=skip)
-            except OSError, inst:
-                if inst.errno in (errno.EACCES, errno.ENOENT):
-                    match.bad(self.pathto(nd), inst.strerror)
-                    continue
-                raise
-            for f, kind, st in entries:
-                if normalize:
-                    nf = normalize(nd and (nd + "/" + f) or f, True, True)
-                    f = d and (d + "/" + f) or f
+        def traverse(work, alreadynormed):
+            wadd = work.append
+            while work:
+                nd = work.pop()
+                skip = None
+                if nd == '.':
+                    nd = ''
                 else:
-                    nf = nd and (nd + "/" + f) or f
-                    f = nf
-                if nf not in results:
-                    if kind == dirkind:
-                        if not ignore(nf):
-                            if matchtdir:
-                                matchtdir(nf)
-                            wadd((nf, f))
-                        if nf in dmap and (matchalways or matchfn(nf)):
+                    skip = '.hg'
+                try:
+                    entries = listdir(join(nd), stat=True, skip=skip)
+                except OSError, inst:
+                    if inst.errno in (errno.EACCES, errno.ENOENT):
+                        match.bad(self.pathto(nd), inst.strerror)
+                        continue
+                    raise
+                for f, kind, st in entries:
+                    if normalizefile:
+                        # even though f might be a directory, we're only
+                        # interested in comparing it to files currently in the
+                        # dmap -- therefore normalizefile is enough
+                        nf = normalizefile(nd and (nd + "/" + f) or f, True,
+                                           True)
+                    else:
+                        nf = nd and (nd + "/" + f) or f
+                    if nf not in results:
+                        if kind == dirkind:
+                            if not ignore(nf):
+                                if matchtdir:
+                                    matchtdir(nf)
+                                wadd(nf)
+                            if nf in dmap and (matchalways or matchfn(nf)):
+                                results[nf] = None
+                        elif kind == regkind or kind == lnkkind:
+                            if nf in dmap:
+                                if matchalways or matchfn(nf):
+                                    results[nf] = st
+                            elif ((matchalways or matchfn(nf))
+                                  and not ignore(nf)):
+                                # unknown file -- normalize if necessary
+                                if not alreadynormed:
+                                    nf = normalize(nf, False, True)
+                                results[nf] = st
+                        elif nf in dmap and (matchalways or matchfn(nf)):
                             results[nf] = None
-                    elif kind == regkind or kind == lnkkind:
-                        if nf in dmap:
-                            if matchalways or matchfn(nf):
-                                results[nf] = st
-                        elif (matchalways or matchfn(f)) and not ignore(nf):
-                            results[nf] = st
-                    elif nf in dmap and (matchalways or matchfn(nf)):
-                        results[nf] = None
+
+        for nd, d in work:
+            # alreadynormed means that processwork doesn't have to do any
+            # expensive directory normalization
+            alreadynormed = not normalize or nd == d
+            traverse([d], alreadynormed)
 
         for s in subrepos:
             del results[s]
@@ -797,7 +846,8 @@
                     # different case, don't add one for this, since that would
                     # make it appear as if the file exists under both names
                     # on disk.
-                    if normalize and normalize(nf, True, True) in results:
+                    if (normalizefile and
+                        normalizefile(nf, True, True) in results):
                         results[nf] = None
                     # Report ignored items in the dmap as long as they are not
                     # under a symlink directory.
@@ -896,9 +946,9 @@
                 elif time != mtime and time != mtime & _rangemask:
                     ladd(fn)
                 elif mtime == lastnormaltime:
-                    # fn may have been changed in the same timeslot without
-                    # changing its size. This can happen if we quickly do
-                    # multiple commits in a single transaction.
+                    # fn may have just been marked as normal and it may have
+                    # changed in the same second without changing its size.
+                    # This can happen if we quickly do multiple commits.
                     # Force lookup, so we don't miss such a racy file change.
                     ladd(fn)
                 elif listclean:
@@ -921,7 +971,7 @@
         if match.always():
             return dmap.keys()
         files = match.files()
-        if match.matchfn == match.exact:
+        if match.isexact():
             # fast path -- filter the other way around, since typically files is
             # much smaller than dmap
             return [f for f in files if f in dmap]
--- a/mercurial/discovery.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/discovery.py	Thu Apr 16 20:57:51 2015 -0500
@@ -218,7 +218,10 @@
     r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
     newheads = list(c.node() for c in r)
     # set some unsynced head to issue the "unsynced changes" warning
-    unsynced = inc and set([None]) or set()
+    if inc:
+        unsynced = set([None])
+    else:
+        unsynced = set()
     return {None: (oldheads, newheads, unsynced)}
 
 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
@@ -269,9 +272,13 @@
     # If there are more heads after the push than before, a suitable
     # error message, depending on unsynced status, is displayed.
     error = None
-    allmissing = set(outgoing.missing)
-    allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
-    allfuturecommon.update(allmissing)
+    # If there is no obsstore, allfuturecommon won't be used, so no
+    # need to compute it.
+    if repo.obsstore:
+        allmissing = set(outgoing.missing)
+        cctx = repo.set('%ld', outgoing.common)
+        allfuturecommon = set(c.node() for c in cctx)
+        allfuturecommon.update(allmissing)
     for branch, heads in sorted(headssum.iteritems()):
         remoteheads, newheads, unsyncedheads = heads
         candidate_newhs = set(newheads)
--- a/mercurial/dispatch.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/dispatch.py	Thu Apr 16 20:57:51 2015 -0500
@@ -7,6 +7,7 @@
 
 from i18n import _
 import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
+import difflib
 import util, commands, hg, fancyopts, extensions, hook, error
 import cmdutil, encoding
 import ui as uimod
@@ -27,6 +28,31 @@
     "run the command in sys.argv"
     sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
 
+def _getsimilar(symbols, value):
+    sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
+    # The cutoff for similarity here is pretty arbitrary. It should
+    # probably be investigated and tweaked.
+    return [s for s in symbols if sim(s) > 0.6]
+
+def _formatparse(write, inst):
+    similar = []
+    if isinstance(inst, error.UnknownIdentifier):
+        # make sure to check fileset first, as revset can invoke fileset
+        similar = _getsimilar(inst.symbols, inst.function)
+    if len(inst.args) > 1:
+        write(_("hg: parse error at %s: %s\n") %
+                         (inst.args[1], inst.args[0]))
+        if (inst.args[0][0] == ' '):
+            write(_("unexpected leading whitespace\n"))
+    else:
+        write(_("hg: parse error: %s\n") % inst.args[0])
+        if similar:
+            if len(similar) == 1:
+                write(_("(did you mean %r?)\n") % similar[0])
+            else:
+                ss = ", ".join(sorted(similar))
+                write(_("(did you mean one of %s?)\n") % ss)
+
 def dispatch(req):
     "run the command specified in req.args"
     if req.ferr:
@@ -55,13 +81,7 @@
             ferr.write(_("(%s)\n") % inst.hint)
         return -1
     except error.ParseError, inst:
-        if len(inst.args) > 1:
-            ferr.write(_("hg: parse error at %s: %s\n") %
-                             (inst.args[1], inst.args[0]))
-            if (inst.args[0][0] == ' '):
-                ferr.write(_("unexpected leading whitespace\n"))
-        else:
-            ferr.write(_("hg: parse error: %s\n") % inst.args[0])
+        _formatparse(ferr.write, inst)
         return -1
 
     msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
@@ -154,13 +174,7 @@
         ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
                 (inst.args[0], " ".join(inst.args[1])))
     except error.ParseError, inst:
-        if len(inst.args) > 1:
-            ui.warn(_("hg: parse error at %s: %s\n") %
-                             (inst.args[1], inst.args[0]))
-            if (inst.args[0][0] == ' '):
-                ui.warn(_("unexpected leading whitespace\n"))
-        else:
-            ui.warn(_("hg: parse error: %s\n") % inst.args[0])
+        _formatparse(ui.warn, inst)
         return -1
     except error.LockHeld, inst:
         if inst.errno == errno.ETIMEDOUT:
@@ -206,7 +220,15 @@
             # (but don't check for extensions themselves)
             commands.help_(ui, inst.args[0], unknowncmd=True)
         except error.UnknownCommand:
-            commands.help_(ui, 'shortlist')
+            suggested = False
+            if len(inst.args) == 2:
+                sim = _getsimilar(inst.args[1], inst.args[0])
+                if sim:
+                    ui.warn(_('(did you mean one of %s?)\n') %
+                            ', '.join(sorted(sim)))
+                    suggested = True
+            if not suggested:
+                commands.help_(ui, 'shortlist')
     except error.InterventionRequired, inst:
         ui.warn("%s\n" % inst)
         return 1
@@ -804,7 +826,7 @@
 
     if cmdoptions.get('insecure', False):
         for ui_ in uis:
-            ui_.setconfig('web', 'cacerts', '', '--insecure')
+            ui_.setconfig('web', 'cacerts', '!', '--insecure')
 
     if options['version']:
         return commands.version_(ui)
--- a/mercurial/encoding.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/encoding.py	Thu Apr 16 20:57:51 2015 -0500
@@ -296,6 +296,22 @@
     asciilower = impl
     return impl(s)
 
+def _asciiupper(s):
+    '''convert a string to uppercase if ASCII
+
+    Raises UnicodeDecodeError if non-ASCII characters are found.'''
+    s.decode('ascii')
+    return s.upper()
+
+def asciiupper(s):
+    # delay importing avoids cyclic dependency around "parsers" in
+    # pure Python build (util => i18n => encoding => parsers => util)
+    import parsers
+    impl = getattr(parsers, 'asciiupper', _asciiupper)
+    global asciiupper
+    asciiupper = impl
+    return impl(s)
+
 def lower(s):
     "best-effort encoding-aware case-folding of local string s"
     try:
@@ -320,10 +336,11 @@
 def upper(s):
     "best-effort encoding-aware case-folding of local string s"
     try:
-        s.decode('ascii') # throw exception for non-ASCII character
-        return s.upper()
+        return asciiupper(s)
     except UnicodeDecodeError:
-        pass
+        return upperfallback(s)
+
+def upperfallback(s):
     try:
         if isinstance(s, localstr):
             u = s._utf8.decode("utf-8")
@@ -339,6 +356,21 @@
     except LookupError, k:
         raise error.Abort(k, hint="please check your locale settings")
 
+class normcasespecs(object):
+    '''what a platform's normcase does to ASCII strings
+
+    This is specified per platform, and should be consistent with what normcase
+    on that platform actually does.
+
+    lower: normcase lowercases ASCII strings
+    upper: normcase uppercases ASCII strings
+    other: the fallback function should always be called
+
+    This should be kept in sync with normcase_spec in util.h.'''
+    lower = -1
+    upper = 1
+    other = 0
+
 _jsonmap = {}
 
 def jsonescape(s):
--- a/mercurial/error.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/error.py	Thu Apr 16 20:57:51 2015 -0500
@@ -22,6 +22,10 @@
 class LookupError(RevlogError, KeyError):
     def __init__(self, name, index, message):
         self.name = name
+        self.index = index
+        # this can't be called 'message' because at least some installs of
+        # Python 2.6+ complain about the 'message' property being deprecated
+        self.lookupmessage = message
         if isinstance(name, str) and len(name) == 20:
             from node import short
             name = short(name)
@@ -61,7 +65,16 @@
     """Exception raised when a remote repo reports failure"""
 
 class ParseError(Exception):
-    """Exception raised when parsing config files (msg[, pos])"""
+    """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
+
+class UnknownIdentifier(ParseError):
+    """Exception raised when a {rev,file}set references an unknown identifier"""
+
+    def __init__(self, function, symbols):
+        from i18n import _
+        ParseError.__init__(self, _("unknown identifier: %s") % function)
+        self.function = function
+        self.symbols = symbols
 
 class RepoError(Exception):
     def __init__(self, *args, **kw):
@@ -134,8 +147,20 @@
     pass
 
 class CensoredNodeError(RevlogError):
-    """error raised when content verification fails on a censored node"""
+    """error raised when content verification fails on a censored node
 
-    def __init__(self, filename, node):
+    Also contains the tombstone data substituted for the uncensored data.
+    """
+
+    def __init__(self, filename, node, tombstone):
         from node import short
         RevlogError.__init__(self, '%s:%s' % (filename, short(node)))
+        self.tombstone = tombstone
+
+class CensoredBaseError(RevlogError):
+    """error raised when a delta is rejected because its base is censored
+
+    A delta based on a censored revision must be formed as single patch
+    operation which replaces the entire base with new content. This ensures
+    the delta may be applied by clones which have not censored the base.
+    """
--- a/mercurial/exchange.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/exchange.py	Thu Apr 16 20:57:51 2015 -0500
@@ -10,6 +10,7 @@
 import errno, urllib
 import util, scmutil, changegroup, base85, error
 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
+import lock as lockmod
 
 def readbundle(ui, fh, fname, vfs=None):
     header = changegroup.readexactly(fh, 4)
@@ -32,8 +33,8 @@
         if alg is None:
             alg = changegroup.readexactly(fh, 2)
         return changegroup.cg1unpacker(fh, alg)
-    elif version == '2Y':
-        return bundle2.unbundle20(ui, fh, header=magic + version)
+    elif version.startswith('2'):
+        return bundle2.getunbundler(ui, fh, header=magic + version)
     else:
         raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
 
@@ -49,9 +50,17 @@
         if version is None:
             raise ValueError('bundler do not support common obsmarker format')
         stream = obsolete.encodemarkers(markers, True, version=version)
-        return bundler.newpart('b2x:obsmarkers', data=stream)
+        return bundler.newpart('obsmarkers', data=stream)
     return None
 
+def _canusebundle2(op):
+    """return true if a pull/push can use bundle2
+
+    Feel free to nuke this function when we drop the experimental option"""
+    return (op.repo.ui.configbool('experimental', 'bundle2-exp', False)
+            and op.remote.capable('bundle2'))
+
+
 class pushoperation(object):
     """A object that represent a single push operation
 
@@ -192,8 +201,13 @@
     if not pushop.remote.canpush():
         raise util.Abort(_("destination does not support push"))
     # get local lock as we might write phase data
-    locallock = None
+    localwlock = locallock = None
     try:
+        # bundle2 push may receive a reply bundle touching bookmarks or other
+        # things requiring the wlock. Take it now to ensure proper ordering.
+        maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
+        if _canusebundle2(pushop) and maypushback:
+            localwlock = pushop.repo.wlock()
         locallock = pushop.repo.lock()
         pushop.locallocked = True
     except IOError, err:
@@ -217,9 +231,7 @@
             lock = pushop.remote.lock()
         try:
             _pushdiscovery(pushop)
-            if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
-                                          False)
-                and pushop.remote.capable('bundle2-exp')):
+            if _canusebundle2(pushop):
                 _pushbundle2(pushop)
             _pushchangeset(pushop)
             _pushsyncphase(pushop)
@@ -235,6 +247,8 @@
             pushop.trmanager.release()
         if locallock is not None:
             locallock.release()
+        if localwlock is not None:
+            localwlock.release()
 
     return pushop
 
@@ -421,7 +435,7 @@
 # This exists to help extensions wrap steps if necessary
 b2partsgenmapping = {}
 
-def b2partsgenerator(stepname):
+def b2partsgenerator(stepname, idx=None):
     """decorator for function generating bundle2 part
 
     The function is added to the step -> function mapping and appended to the
@@ -433,7 +447,10 @@
     def dec(func):
         assert stepname not in b2partsgenmapping
         b2partsgenmapping[stepname] = func
-        b2partsgenorder.append(stepname)
+        if idx is None:
+            b2partsgenorder.append(stepname)
+        else:
+            b2partsgenorder.insert(idx, stepname)
         return func
     return dec
 
@@ -453,10 +470,10 @@
                                      pushop.remote,
                                      pushop.outgoing)
     if not pushop.force:
-        bundler.newpart('b2x:check:heads', data=iter(pushop.remoteheads))
+        bundler.newpart('check:heads', data=iter(pushop.remoteheads))
     b2caps = bundle2.bundle2caps(pushop.remote)
     version = None
-    cgversions = b2caps.get('b2x:changegroup')
+    cgversions = b2caps.get('changegroup')
     if not cgversions:  # 3.1 and 3.2 ship with an empty value
         cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
                                                 pushop.outgoing)
@@ -468,7 +485,7 @@
         cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
                                                 pushop.outgoing,
                                                 version=version)
-    cgpart = bundler.newpart('b2x:changegroup', data=cg)
+    cgpart = bundler.newpart('changegroup', data=cg)
     if version is not None:
         cgpart.addparam('version', version)
     def handlereply(op):
@@ -484,13 +501,13 @@
     if 'phases' in pushop.stepsdone:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
-    if not 'b2x:pushkey' in b2caps:
+    if not 'pushkey' in b2caps:
         return
     pushop.stepsdone.add('phases')
     part2node = []
     enc = pushkey.encode
     for newremotehead in pushop.outdatedphases:
-        part = bundler.newpart('b2x:pushkey')
+        part = bundler.newpart('pushkey')
         part.addparam('namespace', enc('phases'))
         part.addparam('key', enc(newremotehead.hex()))
         part.addparam('old', enc(str(phases.draft)))
@@ -527,13 +544,13 @@
     if 'bookmarks' in pushop.stepsdone:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
-    if 'b2x:pushkey' not in b2caps:
+    if 'pushkey' not in b2caps:
         return
     pushop.stepsdone.add('bookmarks')
     part2book = []
     enc = pushkey.encode
     for book, old, new in pushop.outbookmarks:
-        part = bundler.newpart('b2x:pushkey')
+        part = bundler.newpart('pushkey')
         part.addparam('namespace', enc('bookmarks'))
         part.addparam('key', enc(book))
         part.addparam('old', enc(old))
@@ -577,7 +594,7 @@
     # create reply capability
     capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
                                                       allowpushback=pushback))
-    bundler.newpart('b2x:replycaps', data=capsblob)
+    bundler.newpart('replycaps', data=capsblob)
     replyhandlers = []
     for partgenname in b2partsgenorder:
         partgen = b2partsgenmapping[partgenname]
@@ -845,15 +862,6 @@
     def close(self):
         """close transaction if created"""
         if self._tr is not None:
-            repo = self.repo
-            p = lambda: self._tr.writepending() and repo.root or ""
-            repo.hook('b2x-pretransactionclose', throw=True, pending=p,
-                      **self._tr.hookargs)
-            hookargs = dict(self._tr.hookargs)
-            def runhooks():
-                repo.hook('b2x-transactionclose', **hookargs)
-            self._tr.addpostclose('b2x-hook-transactionclose',
-                                  lambda tr: repo._afterlock(runhooks))
             self._tr.close()
 
     def release(self):
@@ -876,8 +884,7 @@
     try:
         pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
         _pulldiscovery(pullop)
-        if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
-            and pullop.remote.capable('bundle2-exp')):
+        if _canusebundle2(pullop):
             _pullbundle2(pullop)
         _pullchangeset(pullop)
         _pullphase(pullop)
@@ -970,7 +977,7 @@
     kwargs['common'] = pullop.common
     kwargs['heads'] = pullop.heads or pullop.rheads
     kwargs['cg'] = pullop.fetch
-    if 'b2x:listkeys' in remotecaps:
+    if 'listkeys' in remotecaps:
         kwargs['listkeys'] = ['phase', 'bookmarks']
     if not pullop.fetch:
         pullop.repo.ui.status(_("no changes found\n"))
@@ -984,8 +991,6 @@
             kwargs['obsmarkers'] = True
             pullop.stepsdone.add('obsmarkers')
     _pullbundle2extraprepare(pullop, kwargs)
-    if kwargs.keys() == ['format']:
-        return # nothing to pull
     bundle = pullop.remote.getbundle('pull', **kwargs)
     try:
         op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
@@ -1125,7 +1130,7 @@
 
 def caps20to10(repo):
     """return a set with appropriate options to use bundle20 during getbundle"""
-    caps = set(['HG2Y'])
+    caps = set(['HG20'])
     capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
     caps.add('bundle2=' + urllib.quote(capsblob))
     return caps
@@ -1138,7 +1143,7 @@
 # This exists to help extensions wrap steps if necessary
 getbundle2partsmapping = {}
 
-def getbundle2partsgenerator(stepname):
+def getbundle2partsgenerator(stepname, idx=None):
     """decorator for function generating bundle2 part for getbundle
 
     The function is added to the step -> function mapping and appended to the
@@ -1150,7 +1155,10 @@
     def dec(func):
         assert stepname not in getbundle2partsmapping
         getbundle2partsmapping[stepname] = func
-        getbundle2partsorder.append(stepname)
+        if idx is None:
+            getbundle2partsorder.append(stepname)
+        else:
+            getbundle2partsorder.insert(idx, stepname)
         return func
     return dec
 
@@ -1158,7 +1166,7 @@
               **kwargs):
     """return a full bundle (with potentially multiple kind of parts)
 
-    Could be a bundle HG10 or a bundle HG2Y depending on bundlecaps
+    Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
     passed. For now, the bundle can contain only changegroup, but this will
     changes when more part type will be available for bundle2.
 
@@ -1170,7 +1178,10 @@
     when the API of bundle is refined.
     """
     # bundle10 case
-    if bundlecaps is None or 'HG2Y' not in bundlecaps:
+    usebundle2 = False
+    if bundlecaps is not None:
+        usebundle2 = util.any((cap.startswith('HG2') for cap in bundlecaps))
+    if not usebundle2:
         if bundlecaps and not kwargs.get('cg', True):
             raise ValueError(_('request for bundle10 must include changegroup'))
 
@@ -1206,7 +1217,7 @@
     if kwargs.get('cg', True):
         # build changegroup bundle here.
         version = None
-        cgversions = b2caps.get('b2x:changegroup')
+        cgversions = b2caps.get('changegroup')
         if not cgversions:  # 3.1 and 3.2 ship with an empty value
             cg = changegroup.getchangegroupraw(repo, source, heads=heads,
                                                common=common,
@@ -1222,7 +1233,7 @@
                                                version=version)
 
     if cg:
-        part = bundler.newpart('b2x:changegroup', data=cg)
+        part = bundler.newpart('changegroup', data=cg)
         if version is not None:
             part.addparam('version', version)
 
@@ -1232,7 +1243,7 @@
     """add parts containing listkeys namespaces to the requested bundle"""
     listkeys = kwargs.get('listkeys', ())
     for namespace in listkeys:
-        part = bundler.newpart('b2x:listkeys')
+        part = bundler.newpart('listkeys')
         part.addparam('namespace', namespace)
         keys = repo.listkeys(namespace).items()
         part.data = pushkey.encodekeys(keys)
@@ -1272,34 +1283,29 @@
     If the push was raced as PushRaced exception is raised."""
     r = 0
     # need a transaction when processing a bundle2 stream
-    tr = None
-    lock = repo.lock()
+    wlock = lock = tr = None
     try:
         check_heads(repo, heads, 'uploading changes')
         # push can proceed
         if util.safehasattr(cg, 'params'):
+            r = None
             try:
-                tr = repo.transaction('unbundle')
+                wlock = repo.wlock()
+                lock = repo.lock()
+                tr = repo.transaction(source)
                 tr.hookargs['source'] = source
                 tr.hookargs['url'] = url
-                tr.hookargs['bundle2-exp'] = '1'
+                tr.hookargs['bundle2'] = '1'
                 r = bundle2.processbundle(repo, cg, lambda: tr).reply
-                p = lambda: tr.writepending() and repo.root or ""
-                repo.hook('b2x-pretransactionclose', throw=True, pending=p,
-                          **tr.hookargs)
-                hookargs = dict(tr.hookargs)
-                def runhooks():
-                    repo.hook('b2x-transactionclose', **hookargs)
-                tr.addpostclose('b2x-hook-transactionclose',
-                                lambda tr: repo._afterlock(runhooks))
                 tr.close()
             except Exception, exc:
                 exc.duringunbundle2 = True
+                if r is not None:
+                    exc._bundle2salvagedoutput = r.salvageoutput()
                 raise
         else:
+            lock = repo.lock()
             r = changegroup.addchangegroup(repo, cg, source, url)
     finally:
-        if tr is not None:
-            tr.release()
-        lock.release()
+        lockmod.release(tr, lock, wlock)
     return r
--- a/mercurial/extensions.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/extensions.py	Thu Apr 16 20:57:51 2015 -0500
@@ -10,6 +10,7 @@
 from i18n import _, gettext
 
 _extensions = {}
+_aftercallbacks = {}
 _order = []
 _ignore = ['hbisect', 'bookmarks', 'parentrevspec', 'interhg', 'inotify']
 
@@ -87,6 +88,8 @@
             mod = importh(name)
     _extensions[shortname] = mod
     _order.append(shortname)
+    for fn in _aftercallbacks.get(shortname, []):
+        fn(loaded=True)
     return mod
 
 def loadall(ui):
@@ -123,7 +126,45 @@
                     raise
                 extsetup() # old extsetup with no ui argument
 
-def wrapcommand(table, command, wrapper):
+    # Call aftercallbacks that were never met.
+    for shortname in _aftercallbacks:
+        if shortname in _extensions:
+            continue
+
+        for fn in _aftercallbacks[shortname]:
+            fn(loaded=False)
+
+def afterloaded(extension, callback):
+    '''Run the specified function after a named extension is loaded.
+
+    If the named extension is already loaded, the callback will be called
+    immediately.
+
+    If the named extension never loads, the callback will be called after
+    all extensions have been loaded.
+
+    The callback receives the named argument ``loaded``, which is a boolean
+    indicating whether the dependent extension actually loaded.
+    '''
+
+    if extension in _extensions:
+        callback(loaded=True)
+    else:
+        _aftercallbacks.setdefault(extension, []).append(callback)
+
+def bind(func, *args):
+    '''Partial function application
+
+      Returns a new function that is the partial application of args and kwargs
+      to func.  For example,
+
+          f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
+    assert callable(func)
+    def closure(*a, **kw):
+        return func(*(args + a), **kw)
+    return closure
+
+def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
     '''Wrap the command named `command' in table
 
     Replace command in the command table with wrapper. The wrapped command will
@@ -135,6 +176,22 @@
 
     where orig is the original (wrapped) function, and *args, **kwargs
     are the arguments passed to it.
+
+    Optionally append to the command synopsis and docstring, used for help.
+    For example, if your extension wraps the ``bookmarks`` command to add the
+    flags ``--remote`` and ``--all`` you might call this function like so:
+
+      synopsis = ' [-a] [--remote]'
+      docstring = """
+
+      The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
+      flags to the bookmarks command. Either flag will show the remote bookmarks
+      known to the repository; ``--remote`` will also supress the output of the
+      local bookmarks.
+      """
+
+      extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
+                             synopsis, docstring)
     '''
     assert callable(wrapper)
     aliases, entry = cmdutil.findcmd(command, table)
@@ -144,15 +201,19 @@
             break
 
     origfn = entry[0]
-    def wrap(*args, **kwargs):
-        return util.checksignature(wrapper)(
-            util.checksignature(origfn), *args, **kwargs)
+    wrap = bind(util.checksignature(wrapper), util.checksignature(origfn))
+
+    wrap.__module__ = getattr(origfn, '__module__')
 
-    wrap.__doc__ = getattr(origfn, '__doc__')
-    wrap.__module__ = getattr(origfn, '__module__')
+    doc = getattr(origfn, '__doc__')
+    if docstring is not None:
+        doc += docstring
+    wrap.__doc__ = doc
 
     newentry = list(entry)
     newentry[0] = wrap
+    if synopsis is not None:
+        newentry[2] += synopsis
     table[key] = tuple(newentry)
     return entry
 
@@ -190,12 +251,10 @@
     subclass trick.
     '''
     assert callable(wrapper)
-    def wrap(*args, **kwargs):
-        return wrapper(origfn, *args, **kwargs)
 
     origfn = getattr(container, funcname)
     assert callable(origfn)
-    setattr(container, funcname, wrap)
+    setattr(container, funcname, bind(wrapper, origfn))
     return origfn
 
 def _disabledpaths(strip_init=False):
--- a/mercurial/filelog.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/filelog.py	Thu Apr 16 20:57:51 2015 -0500
@@ -5,8 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import error, revlog
-import re
+import error, mdiff, revlog
+import re, struct
 
 _mdre = re.compile('\1\n')
 def parsemeta(text):
@@ -29,7 +29,7 @@
 
 def _censoredtext(text):
     m, offs = parsemeta(text)
-    return m and "censored" in m and not text[offs:]
+    return m and "censored" in m
 
 class filelog(revlog.revlog):
     def __init__(self, opener, path):
@@ -64,7 +64,7 @@
         node = self.node(rev)
         if self.renamed(node):
             return len(self.read(node))
-        if self._iscensored(rev):
+        if self.iscensored(rev):
             return 0
 
         # XXX if self.read(node).startswith("\1\n"), this returns (size+4)
@@ -85,7 +85,7 @@
             return False
 
         # censored files compare against the empty file
-        if self._iscensored(self.rev(node)):
+        if self.iscensored(self.rev(node)):
             return text != ''
 
         # renaming a file produces a different hash, even if the data
@@ -101,12 +101,29 @@
             super(filelog, self).checkhash(text, p1, p2, node, rev=rev)
         except error.RevlogError:
             if _censoredtext(text):
-                raise error.CensoredNodeError(self.indexfile, node)
+                raise error.CensoredNodeError(self.indexfile, node, text)
             raise
 
-    def _file(self, f):
-        return filelog(self.opener, f)
-
-    def _iscensored(self, rev):
+    def iscensored(self, rev):
         """Check if a file revision is censored."""
         return self.flags(rev) & revlog.REVIDX_ISCENSORED
+
+    def _peek_iscensored(self, baserev, delta, flush):
+        """Quickly check if a delta produces a censored revision."""
+        # Fragile heuristic: unless new file meta keys are added alphabetically
+        # preceding "censored", all censored revisions are prefixed by
+        # "\1\ncensored:". A delta producing such a censored revision must be a
+        # full-replacement delta, so we inspect the first and only patch in the
+        # delta for this prefix.
+        hlen = struct.calcsize(">lll")
+        if len(delta) <= hlen:
+            return False
+
+        oldlen = self.rawsize(baserev)
+        newlen = len(delta) - hlen
+        if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
+            return False
+
+        add = "\1\ncensored:"
+        addlen = len(add)
+        return newlen >= addlen and delta[hlen:hlen + addlen] == add
--- a/mercurial/filemerge.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/filemerge.py	Thu Apr 16 20:57:51 2015 -0500
@@ -21,6 +21,8 @@
     return ui.configlist("merge-tools", tool + "." + part, default)
 
 internals = {}
+# Merge tools to document.
+internalsdoc = {}
 
 def internaltool(name, trymerge, onfailure=None):
     '''return a decorator for populating internal merge tool table'''
@@ -29,6 +31,7 @@
         func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
         internals[fullname] = func
         internals['internal:' + name] = func
+        internalsdoc[fullname] = func
         func.trymerge = trymerge
         func.onfailure = onfailure
         return func
@@ -301,7 +304,10 @@
         replace = {'local': a, 'base': b, 'other': c, 'output': out}
         args = util.interpolate(r'\$', replace, args,
                                 lambda s: util.shellquote(util.localpath(s)))
-        r = ui.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
+        cmd = toolpath + ' ' + args
+        repo.ui.debug('launching merge tool: %s\n' % cmd)
+        r = ui.system(cmd, cwd=repo.root, environ=env)
+        repo.ui.debug('merge tool returned: %s\n' % r)
         return True, r
     return False, 0
 
--- a/mercurial/fileset.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/fileset.py	Thu Apr 16 20:57:51 2015 -0500
@@ -186,7 +186,7 @@
 def func(mctx, a, b):
     if a[0] == 'symbol' and a[1] in symbols:
         return symbols[a[1]](mctx, b)
-    raise error.ParseError(_("not a function: %s") % a[1])
+    raise error.UnknownIdentifier(a[1], symbols.keys())
 
 def getlist(x):
     if not x:
@@ -233,7 +233,7 @@
     getargs(x, 0, 0, _("resolved takes no arguments"))
     if mctx.ctx.rev() is not None:
         return []
-    ms = merge.mergestate(mctx.ctx._repo)
+    ms = merge.mergestate(mctx.ctx.repo())
     return [f for f in mctx.subset if f in ms and ms[f] == 'r']
 
 def unresolved(mctx, x):
@@ -244,7 +244,7 @@
     getargs(x, 0, 0, _("unresolved takes no arguments"))
     if mctx.ctx.rev() is not None:
         return []
-    ms = merge.mergestate(mctx.ctx._repo)
+    ms = merge.mergestate(mctx.ctx.repo())
     return [f for f in mctx.subset if f in ms and ms[f] == 'u']
 
 def hgignore(mctx, x):
@@ -253,9 +253,19 @@
     """
     # i18n: "hgignore" is a keyword
     getargs(x, 0, 0, _("hgignore takes no arguments"))
-    ignore = mctx.ctx._repo.dirstate._ignore
+    ignore = mctx.ctx.repo().dirstate._ignore
     return [f for f in mctx.subset if ignore(f)]
 
+def portable(mctx, x):
+    """``portable()``
+    File that has a portable name. (This doesn't include filenames with case
+    collisions.)
+    """
+    # i18n: "portable" is a keyword
+    getargs(x, 0, 0, _("portable takes no arguments"))
+    checkwinfilename = util.checkwinfilename
+    return [f for f in mctx.subset if checkwinfilename(f) is None]
+
 def grep(mctx, x):
     """``grep(regex)``
     File contains the given regular expression.
@@ -398,7 +408,7 @@
             def m(s):
                 return (s == pat)
         else:
-            m = matchmod.match(ctx._repo.root, '', [pat], ctx=ctx)
+            m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
         return [sub for sub in sstate if m(sub)]
     else:
         return [sub for sub in sstate]
@@ -416,6 +426,7 @@
     'ignored': ignored,
     'hgignore': hgignore,
     'modified': modified,
+    'portable': portable,
     'removed': removed,
     'resolved': resolved,
     'size': size,
@@ -493,7 +504,7 @@
         unknown = _intree(['unknown'], tree)
         ignored = _intree(['ignored'], tree)
 
-        r = ctx._repo
+        r = ctx.repo()
         status = r.status(ctx.p1(), ctx,
                           unknown=unknown, ignored=ignored, clean=True)
         subset = []
--- a/mercurial/formatter.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/formatter.py	Thu Apr 16 20:57:51 2015 -0500
@@ -98,6 +98,8 @@
 def _jsonifyobj(v):
     if isinstance(v, tuple):
         return '[' + ', '.join(_jsonifyobj(e) for e in v) + ']'
+    elif v is None:
+        return 'null'
     elif v is True:
         return 'true'
     elif v is False:
--- a/mercurial/graphmod.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/graphmod.py	Thu Apr 16 20:57:51 2015 -0500
@@ -122,7 +122,7 @@
             heappush(pendingheap, -currentrev)
             pendingset.add(currentrev)
         # iterates on pending rev until after the current rev have been
-        # processeed.
+        # processed.
         rev = None
         while rev != currentrev:
             rev = -heappop(pendingheap)
--- a/mercurial/help.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,11 +6,13 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import gettext, _
-import itertools, os
+import itertools, os, textwrap
 import error
 import extensions, revset, fileset, templatekw, templatefilters, filemerge
+import templater
 import encoding, util, minirst
 import cmdutil
+import hgweb.webcommands as webcommands
 
 def listexts(header, exts, indent=1, showdeprecated=False):
     '''return a text listing of the given extensions'''
@@ -171,7 +173,7 @@
 def addtopichook(topic, rewriter):
     helphooks.setdefault(topic, []).append(rewriter)
 
-def makeitemsdoc(topic, doc, marker, items):
+def makeitemsdoc(topic, doc, marker, items, dedent=False):
     """Extract docstring from the items key to function mapping, build a
     .single documentation block and use it to overwrite the marker in doc
     """
@@ -181,27 +183,36 @@
         if not text:
             continue
         text = gettext(text)
+        if dedent:
+            text = textwrap.dedent(text)
         lines = text.splitlines()
         doclines = [(lines[0])]
         for l in lines[1:]:
             # Stop once we find some Python doctest
             if l.strip().startswith('>>>'):
                 break
-            doclines.append('  ' + l.strip())
+            if dedent:
+                doclines.append(l.rstrip())
+            else:
+                doclines.append('  ' + l.strip())
         entries.append('\n'.join(doclines))
     entries = '\n\n'.join(entries)
     return doc.replace(marker, entries)
 
-def addtopicsymbols(topic, marker, symbols):
+def addtopicsymbols(topic, marker, symbols, dedent=False):
     def add(topic, doc):
-        return makeitemsdoc(topic, doc, marker, symbols)
+        return makeitemsdoc(topic, doc, marker, symbols, dedent=dedent)
     addtopichook(topic, add)
 
 addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
-addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internals)
+addtopicsymbols('merge-tools', '.. internaltoolsmarker',
+                filemerge.internalsdoc)
 addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols)
 addtopicsymbols('templates', '.. keywordsmarker', templatekw.dockeywords)
 addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
+addtopicsymbols('templates', '.. functionsmarker', templater.funcs)
+addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands,
+                dedent=True)
 
 def help_(ui, name, unknowncmd=False, full=True, **opts):
     '''
--- a/mercurial/help/config.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/config.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -808,6 +808,35 @@
   changeset to tag is in ``$HG_NODE``. Name of tag is in ``$HG_TAG``. Tag is
   local if ``$HG_LOCAL=1``, in repository if ``$HG_LOCAL=0``.
 
+``pretxnopen``
+  Run before any new repository transaction is open. The reason for the
+  transaction will be in ``$HG_TXNNAME`` and a unique identifier for the
+  transaction will be in ``HG_TXNID``. A non-zero status will prevent the
+  transaction from being opened.
+
+``pretxnclose``
+  Run right before the transaction is actually finalized. Any
+  repository change will be visible to the hook program. This lets you
+  validate the transaction content or change it. Exit status 0 allows
+  the commit to proceed. Non-zero status will cause the transaction to
+  be rolled back. The reason for the transaction opening will be in
+  ``$HG_TXNNAME`` and a unique identifier for the transaction will be in
+  ``HG_TXNID``. The rest of the available data will vary according the
+  transaction type. New changesets will add ``$HG_NODE`` (id of the
+  first added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables,
+  bookmarks and phases changes will set ``HG_BOOKMARK_MOVED`` and
+  ``HG_PHASES_MOVED`` to ``1``, etc.
+
+``txnclose``
+  Run after any repository transaction has been commited. At this
+  point, the transaction can no longer be rolled back. The hook will run
+  after the lock is released. see ``pretxnclose`` docs for details about
+  available variables.
+
+``txnabort``
+  Run when a transaction is aborted. see ``pretxnclose`` docs for details about
+  available variables.
+
 ``pretxnchangegroup``
   Run after a changegroup has been added via push, pull or unbundle,
   but before the transaction has been committed. Changegroup is
@@ -1409,6 +1438,9 @@
     backslash character (``\``)).
     Default is False.
 
+``statuscopies``
+    Display copies in the status command.
+
 ``ssh``
     command to use for SSH connections. Default is ``ssh``.
 
--- a/mercurial/help/hg.1.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/hg.1.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -112,7 +112,7 @@
 
 Copying
 """""""
-Copyright (C) 2005-2014 Matt Mackall.
+Copyright (C) 2005-2015 Matt Mackall.
 Free use of this software is granted under the terms of the GNU General
 Public License version 2 or any later version.
 
--- a/mercurial/help/hgignore.5.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/hgignore.5.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
 Copying
 =======
 This manual page is copyright 2006 Vadim Gelfer.
-Mercurial is copyright 2005-2014 Matt Mackall.
+Mercurial is copyright 2005-2015 Matt Mackall.
 Free use of this software is granted under the terms of the GNU General
 Public License version 2 or any later version.
 
--- a/mercurial/help/hgrc.5.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/hgrc.5.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -34,7 +34,7 @@
 Copying
 =======
 This manual page is copyright 2005 Bryan O'Sullivan.
-Mercurial is copyright 2005-2014 Matt Mackall.
+Mercurial is copyright 2005-2015 Matt Mackall.
 Free use of this software is granted under the terms of the GNU General
 Public License version 2 or any later version.
 
--- a/mercurial/help/hgweb.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/hgweb.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -48,3 +48,39 @@
 
 The ``collections`` section is deprecated and has been superseded by
 ``paths``.
+
+URLs and Common Arguments
+=========================
+
+URLs under each repository have the form ``/{command}[/{arguments}]``
+where ``{command}`` represents the name of a command or handler and
+``{arguments}`` represents any number of additional URL parameters
+to that command.
+
+The web server has a default style associated with it. Styles map to
+a collection of named templates. Each template is used to render a
+specific piece of data, such as a changeset or diff.
+
+The style for the current request can be overwritten two ways. First,
+if ``{command}`` contains a hyphen (``-``), the text before the hyphen
+defines the style. For example, ``/atom-log`` will render the ``log``
+command handler with the ``atom`` style. The second way to set the
+style is with the ``style`` query string argument. For example,
+``/log?style=atom``. The hyphenated URL parameter is preferred.
+
+Not all templates are available for all styles. Attempting to use
+a style that doesn't have all templates defined may result in an error
+rendering the page.
+
+Many commands take a ``{revision}`` URL parameter. This defines the
+changeset to operate on. This is commonly specified as the short,
+12 digit hexidecimal abbreviation for the full 40 character unique
+revision identifier. However, any value described by
+:hg:`help revisions` typically works.
+
+Commands and URLs
+=================
+
+The following web commands and their URLs are available:
+
+  .. webcommandsmarker
--- a/mercurial/help/subrepos.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/subrepos.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -78,7 +78,7 @@
 :add: add does not recurse in subrepos unless -S/--subrepos is
     specified.  However, if you specify the full path of a file in a
     subrepo, it will be added even without -S/--subrepos specified.
-    Git and Subversion subrepositories are currently silently
+    Subversion subrepositories are currently silently
     ignored.
 
 :addremove: addremove does not recurse into subrepos unless
@@ -91,7 +91,7 @@
     -S/--subrepos is specified.
 
 :cat: cat currently only handles exact file matches in subrepos.
-    Git and Subversion subrepositories are currently ignored.
+    Subversion subrepositories are currently ignored.
 
 :commit: commit creates a consistent snapshot of the state of the
     entire project and its subrepositories. If any subrepositories
@@ -109,6 +109,10 @@
     elements. Git subrepositories do not support --include/--exclude.
     Subversion subrepositories are currently silently ignored.
 
+:files: files does not recurse into subrepos unless -S/--subrepos is
+    specified.  Git and Subversion subrepositories are currently
+    silently ignored.
+
 :forget: forget currently only handles exact file matches in subrepos.
     Git and Subversion subrepositories are currently silently ignored.
 
--- a/mercurial/help/templates.txt	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/help/templates.txt	Thu Apr 16 20:57:51 2015 -0500
@@ -41,39 +41,7 @@
 
 In addition to filters, there are some basic built-in functions:
 
-- date(date[, fmt])
-
-- diff([includepattern [, excludepattern]])
-
-- fill(text[, width])
-
-- get(dict, key)
-
-- if(expr, then[, else])
-
-- ifcontains(expr, expr, then[, else])
-
-- ifeq(expr, expr, then[, else])
-
-- join(list, sep)
-
-- label(label, expr)
-
-- pad(text, width[, fillchar, right])
-
-- revset(query[, formatargs])
-
-- rstdoc(text, style)
-
-- shortest(node)
-
-- startswith(string, text)
-
-- strip(text[, chars])
-
-- sub(pat, repl, expr)
-
-- word(number, text[, separator])
+.. functionsmarker
 
 Also, for any expression that returns a list, there is a list operator:
 
--- a/mercurial/hg.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/hg.py	Thu Apr 16 20:57:51 2015 -0500
@@ -34,7 +34,11 @@
         else:
             y = None
         return x, y
-    revs = revs and list(revs) or []
+    if revs:
+        revs = list(revs)
+    else:
+        revs = []
+
     if not peer.capable('branchmap'):
         if branches:
             raise util.Abort(_("remote branch lookup not supported"))
@@ -239,6 +243,12 @@
     try:
         hardlink = None
         num = 0
+        closetopic = [None]
+        def prog(topic, pos):
+            if pos is None:
+                closetopic[0] = topic
+            else:
+                ui.progress(topic, pos + num)
         srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
         srcvfs = scmutil.vfs(srcrepo.sharedpath)
         dstvfs = scmutil.vfs(destpath)
@@ -255,12 +265,16 @@
                     # lock to avoid premature writing to the target
                     destlock = lock.lock(dstvfs, lockfile)
                 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
-                                             hardlink)
+                                             hardlink, progress=prog)
                 num += n
         if hardlink:
             ui.debug("linked %d files\n" % num)
+            if closetopic[0]:
+                ui.progress(closetopic[0], None)
         else:
             ui.debug("copied %d files\n" % num)
+            if closetopic[0]:
+                ui.progress(closetopic[0], None)
         return destlock
     except: # re-raises
         release(destlock)
@@ -672,7 +686,9 @@
         for key, val in src.configitems(sect):
             dst.setconfig(sect, key, val, 'copied')
     v = src.config('web', 'cacerts')
-    if v:
+    if v == '!':
+        dst.setconfig('web', 'cacerts', v, 'copied')
+    elif v:
         dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
 
     return dst
--- a/mercurial/hgweb/webcommands.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/hgweb/webcommands.py	Thu Apr 16 20:57:51 2015 -0500
@@ -13,27 +13,58 @@
 from common import paritygen, staticfile, get_contact, ErrorResponse
 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
 from mercurial import graphmod, patch
-from mercurial import help as helpmod
 from mercurial import scmutil
 from mercurial.i18n import _
 from mercurial.error import ParseError, RepoLookupError, Abort
 from mercurial import revset
 
-# __all__ is populated with the allowed commands. Be sure to add to it if
-# you're adding a new command, or the new command won't work.
+__all__ = []
+commands = {}
+
+class webcommand(object):
+    """Decorator used to register a web command handler.
+
+    The decorator takes as its positional arguments the name/path the
+    command should be accessible under.
+
+    Usage:
+
+    @webcommand('mycommand')
+    def mycommand(web, req, tmpl):
+        pass
+    """
+
+    def __init__(self, name):
+        self.name = name
 
-__all__ = [
-   'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
-   'manifest', 'tags', 'bookmarks', 'branches', 'summary', 'filediff', 'diff',
-   'comparison', 'annotate', 'filelog', 'archive', 'static', 'graph', 'help',
-]
+    def __call__(self, func):
+        __all__.append(self.name)
+        commands[self.name] = func
+        return func
+
+@webcommand('log')
+def log(web, req, tmpl):
+    """
+    /log[/{revision}[/{path}]]
+    --------------------------
 
-def log(web, req, tmpl):
+    Show repository or file history.
+
+    For URLs of the form ``/log/{revision}``, a list of changesets starting at
+    the specified changeset identifier is shown. If ``{revision}`` is not
+    defined, the default is ``tip``. This form is equivalent to the
+    ``changelog`` handler.
+
+    For URLs of the form ``/log/{revision}/{file}``, the history for a specific
+    file will be shown. This form is equivalent to the ``filelog`` handler.
+    """
+
     if 'file' in req.form and req.form['file'][0]:
         return filelog(web, req, tmpl)
     else:
         return changelog(web, req, tmpl)
 
+@webcommand('rawfile')
 def rawfile(web, req, tmpl):
     guessmime = web.configbool('web', 'guessmime', False)
 
@@ -59,7 +90,10 @@
     if guessmime:
         mt = mimetypes.guess_type(path)[0]
         if mt is None:
-            mt = util.binary(text) and 'application/binary' or 'text/plain'
+            if util.binary(text):
+                mt = 'application/binary'
+            else:
+                mt = 'text/plain'
     if mt.startswith('text/'):
         mt += '; charset="%s"' % encoding.encoding
 
@@ -98,7 +132,26 @@
                 rename=webutil.renamelink(fctx),
                 permissions=fctx.manifest().flags(f))
 
+@webcommand('file')
 def file(web, req, tmpl):
+    """
+    /file/{revision}[/{path}]
+    -------------------------
+
+    Show information about a directory or file in the repository.
+
+    Info about the ``path`` given as a URL parameter will be rendered.
+
+    If ``path`` is a directory, information about the entries in that
+    directory will be rendered. This form is equivalent to the ``manifest``
+    handler.
+
+    If ``path`` is a file, information about that file will be shown via
+    the ``filerevision`` template.
+
+    If ``path`` is not defined, information about the root directory will
+    be rendered.
+    """
     path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
     if not path:
         return manifest(web, req, tmpl)
@@ -187,7 +240,7 @@
 
         mfunc = revset.match(web.repo.ui, revdef)
         try:
-            revs = mfunc(web.repo, revset.baseset(web.repo))
+            revs = mfunc(web.repo)
             return MODE_REVSET, revs
             # ParseError: wrongly placed tokens, wrongs arguments, etc
             # RepoLookupError: no such revision, e.g. in 'revision:'
@@ -267,7 +320,31 @@
                 modedesc=searchfunc[1],
                 showforcekw=showforcekw, showunforcekw=showunforcekw)
 
+@webcommand('changelog')
 def changelog(web, req, tmpl, shortlog=False):
+    """
+    /changelog[/{revision}]
+    -----------------------
+
+    Show information about multiple changesets.
+
+    If the optional ``revision`` URL argument is absent, information about
+    all changesets starting at ``tip`` will be rendered. If the ``revision``
+    argument is present, changesets will be shown starting from the specified
+    revision.
+
+    If ``revision`` is absent, the ``rev`` query string argument may be
+    defined. This will perform a search for changesets.
+
+    The argument for ``rev`` can be a single revision, a revision set,
+    or a literal keyword to search for in changeset data (equivalent to
+    :hg:`log -k`.
+
+    The ``revcount`` query string argument defines the maximum numbers of
+    changesets to render.
+
+    For non-searches, the ``changelog`` template will be rendered.
+    """
 
     query = ''
     if 'node' in req.form:
@@ -291,7 +368,11 @@
             entry['parity'] = parity.next()
             yield entry
 
-    revcount = shortlog and web.maxshortchanges or web.maxchanges
+    if shortlog:
+        revcount = web.maxshortchanges
+    else:
+        revcount = web.maxchanges
+
     if 'revcount' in req.form:
         try:
             revcount = int(req.form.get('revcount', [revcount])[0])
@@ -326,63 +407,41 @@
                 archives=web.archivelist("tip"), revcount=revcount,
                 morevars=morevars, lessvars=lessvars, query=query)
 
+@webcommand('shortlog')
 def shortlog(web, req, tmpl):
+    """
+    /shortlog
+    ---------
+
+    Show basic information about a set of changesets.
+
+    This accepts the same parameters as the ``changelog`` handler. The only
+    difference is the ``shortlog`` template will be rendered instead of the
+    ``changelog`` template.
+    """
     return changelog(web, req, tmpl, shortlog=True)
 
+@webcommand('changeset')
 def changeset(web, req, tmpl):
-    ctx = webutil.changectx(web.repo, req)
-    basectx = webutil.basechangectx(web.repo, req)
-    if basectx is None:
-        basectx = ctx.p1()
-    showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
-    showbookmarks = webutil.showbookmark(web.repo, tmpl, 'changesetbookmark',
-                                         ctx.node())
-    showbranch = webutil.nodebranchnodefault(ctx)
+    """
+    /changeset[/{revision}]
+    -----------------------
 
-    files = []
-    parity = paritygen(web.stripecount)
-    for blockno, f in enumerate(ctx.files()):
-        template = f in ctx and 'filenodelink' or 'filenolink'
-        files.append(tmpl(template,
-                          node=ctx.hex(), file=f, blockno=blockno + 1,
-                          parity=parity.next()))
-
-    style = web.config('web', 'style', 'paper')
-    if 'style' in req.form:
-        style = req.form['style'][0]
-
-    parity = paritygen(web.stripecount)
-    diffs = webutil.diffs(web.repo, tmpl, ctx, basectx, None, parity, style)
+    Show information about a single changeset.
 
-    parity = paritygen(web.stripecount)
-    diffstatgen = webutil.diffstatgen(ctx, basectx)
-    diffstat = webutil.diffstat(tmpl, ctx, diffstatgen, parity)
+    A URL path argument is the changeset identifier to show. See ``hg help
+    revisions`` for possible values. If not defined, the ``tip`` changeset
+    will be shown.
 
-    return tmpl('changeset',
-                diff=diffs,
-                rev=ctx.rev(),
-                node=ctx.hex(),
-                parent=tuple(webutil.parents(ctx)),
-                child=webutil.children(ctx),
-                basenode=basectx.hex(),
-                changesettag=showtags,
-                changesetbookmark=showbookmarks,
-                changesetbranch=showbranch,
-                author=ctx.user(),
-                desc=ctx.description(),
-                extra=ctx.extra(),
-                date=ctx.date(),
-                files=files,
-                diffsummary=lambda **x: webutil.diffsummary(diffstatgen),
-                diffstat=diffstat,
-                archives=web.archivelist(ctx.hex()),
-                tags=webutil.nodetagsdict(web.repo, ctx.node()),
-                bookmarks=webutil.nodebookmarksdict(web.repo, ctx.node()),
-                branch=webutil.nodebranchnodefault(ctx),
-                inbranch=webutil.nodeinbranch(web.repo, ctx),
-                branches=webutil.nodebranchdict(web.repo, ctx))
+    The ``changeset`` template is rendered. Contents of the ``changesettag``,
+    ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many
+    templates related to diffs may all be used to produce the output.
+    """
+    ctx = webutil.changectx(web.repo, req)
 
-rev = changeset
+    return tmpl('changeset', **webutil.changesetentry(web, req, tmpl, ctx))
+
+rev = webcommand('rev')(changeset)
 
 def decodepath(path):
     """Hook for mapping a path in the repository to a path in the
@@ -392,7 +451,23 @@
     the virtual file system presented by the manifest command below."""
     return path
 
+@webcommand('manifest')
 def manifest(web, req, tmpl):
+    """
+    /manifest[/{revision}[/{path}]]
+    -------------------------------
+
+    Show information about a directory.
+
+    If the URL path arguments are defined, information about the root
+    directory for the ``tip`` changeset will be shown.
+
+    Because this handler can only show information for directories, it
+    is recommended to use the ``file`` handler instead, as it can handle both
+    directories and files.
+
+    The ``manifest`` template will be rendered for this handler.
+    """
     ctx = webutil.changectx(web.repo, req)
     path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
     mf = ctx.manifest()
@@ -474,7 +549,18 @@
                 inbranch=webutil.nodeinbranch(web.repo, ctx),
                 branches=webutil.nodebranchdict(web.repo, ctx))
 
+@webcommand('tags')
 def tags(web, req, tmpl):
+    """
+    /tags
+    -----
+
+    Show information about tags.
+
+    No arguments are accepted.
+
+    The ``tags`` template is rendered.
+    """
     i = list(reversed(web.repo.tagslist()))
     parity = paritygen(web.stripecount)
 
@@ -496,7 +582,18 @@
                 entriesnotip=lambda **x: entries(True, False, **x),
                 latestentry=lambda **x: entries(True, True, **x))
 
+@webcommand('bookmarks')
 def bookmarks(web, req, tmpl):
+    """
+    /bookmarks
+    ----------
+
+    Show information about bookmarks.
+
+    No arguments are accepted.
+
+    The ``bookmarks`` template is rendered.
+    """
     i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
     parity = paritygen(web.stripecount)
 
@@ -516,7 +613,20 @@
                 entries=lambda **x: entries(latestonly=False, **x),
                 latestentry=lambda **x: entries(latestonly=True, **x))
 
+@webcommand('branches')
 def branches(web, req, tmpl):
+    """
+    /branches
+    ---------
+
+    Show information about branches.
+
+    All known branches are contained in the output, even closed branches.
+
+    No arguments are accepted.
+
+    The ``branches`` template is rendered.
+    """
     tips = []
     heads = web.repo.heads()
     parity = paritygen(web.stripecount)
@@ -547,7 +657,19 @@
                 entries=lambda **x: entries(0, **x),
                 latestentry=lambda **x: entries(1, **x))
 
+@webcommand('summary')
 def summary(web, req, tmpl):
+    """
+    /summary
+    --------
+
+    Show a summary of repository state.
+
+    Information about the latest changesets, bookmarks, tags, and branches
+    is captured by this handler.
+
+    The ``summary`` template is rendered.
+    """
     i = reversed(web.repo.tagslist())
 
     def tagentries(**map):
@@ -632,7 +754,19 @@
                 node=tip.hex(),
                 archives=web.archivelist("tip"))
 
+@webcommand('filediff')
 def filediff(web, req, tmpl):
+    """
+    /diff/{revision}/{path}
+    -----------------------
+
+    Show how a file changed in a particular commit.
+
+    The ``filediff`` template is rendered.
+
+    This hander is registered under both the ``/diff`` and ``/filediff``
+    paths. ``/diff`` is used in modern code.
+    """
     fctx, ctx = None, None
     try:
         fctx = webutil.filectx(web.repo, req)
@@ -656,8 +790,12 @@
         style = req.form['style'][0]
 
     diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style)
-    rename = fctx and webutil.renamelink(fctx) or []
-    ctx = fctx and fctx or ctx
+    if fctx:
+        rename = webutil.renamelink(fctx)
+        ctx = fctx
+    else:
+        rename = []
+        ctx = ctx
     return tmpl("filediff",
                 file=path,
                 node=hex(n),
@@ -672,9 +810,25 @@
                 child=webutil.children(ctx),
                 diff=diffs)
 
-diff = filediff
+diff = webcommand('diff')(filediff)
+
+@webcommand('comparison')
+def comparison(web, req, tmpl):
+    """
+    /comparison/{revision}/{path}
+    -----------------------------
 
-def comparison(web, req, tmpl):
+    Show a comparison between the old and new versions of a file from changes
+    made on a particular revision.
+
+    This is similar to the ``diff`` handler. However, this form features
+    a split or side-by-side diff rather than a unified diff.
+
+    The ``context`` query string argument can be used to control the lines of
+    context in the diff.
+
+    The ``filecomparison`` template is rendered.
+    """
     ctx = webutil.changectx(web.repo, req)
     if 'file' not in req.form:
         raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
@@ -732,7 +886,16 @@
                 rightnode=hex(rightnode),
                 comparison=comparison)
 
+@webcommand('annotate')
 def annotate(web, req, tmpl):
+    """
+    /annotate/{revision}/{path}
+    ---------------------------
+
+    Show changeset information for each line in a file.
+
+    The ``fileannotate`` template is rendered.
+    """
     fctx = webutil.filectx(web.repo, req)
     f = fctx.path()
     parity = paritygen(web.stripecount)
@@ -764,6 +927,7 @@
                    "file": f.path(),
                    "targetline": targetline,
                    "line": l,
+                   "lineno": lineno + 1,
                    "lineid": "l%d" % (lineno + 1),
                    "linenumber": "% 6d" % (lineno + 1),
                    "revdate": f.date()}
@@ -784,7 +948,19 @@
                 child=webutil.children(fctx),
                 permissions=fctx.manifest().flags(f))
 
+@webcommand('filelog')
 def filelog(web, req, tmpl):
+    """
+    /filelog/{revision}/{path}
+    --------------------------
+
+    Show information about the history of a file in the repository.
+
+    The ``revcount`` query string argument can be defined to control the
+    maximum number of entries to show.
+
+    The ``filelog`` template will be rendered.
+    """
 
     try:
         fctx = webutil.filectx(web.repo, req)
@@ -862,7 +1038,27 @@
                 latestentry=latestentry,
                 revcount=revcount, morevars=morevars, lessvars=lessvars)
 
+@webcommand('archive')
 def archive(web, req, tmpl):
+    """
+    /archive/{revision}.{format}[/{path}]
+    -------------------------------------
+
+    Obtain an archive of repository content.
+
+    The content and type of the archive is defined by a URL path parameter.
+    ``format`` is the file extension of the archive type to be generated. e.g.
+    ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your
+    server configuration.
+
+    The optional ``path`` URL parameter controls content to include in the
+    archive. If omitted, every file in the specified revision is present in the
+    archive. If included, only the specified file or contents of the specified
+    directory will be included in the archive.
+
+    No template is used for this handler. Raw, binary content is generated.
+    """
+
     type_ = req.form.get('type', [None])[0]
     allowed = web.configlist("web", "allow_archive")
     key = req.form['node'][0]
@@ -911,6 +1107,7 @@
     return []
 
 
+@webcommand('static')
 def static(web, req, tmpl):
     fname = req.form['file'][0]
     # a repo owner may set web.static in .hg/hgrc to get any file
@@ -924,7 +1121,24 @@
     staticfile(static, fname, req)
     return []
 
+@webcommand('graph')
 def graph(web, req, tmpl):
+    """
+    /graph[/{revision}]
+    -------------------
+
+    Show information about the graphical topology of the repository.
+
+    Information rendered by this handler can be used to create visual
+    representations of repository topology.
+
+    The ``revision`` URL parameter controls the starting changeset.
+
+    The ``revcount`` query string argument can define the number of changesets
+    to show information for.
+
+    This handler will render the ``graph`` template.
+    """
 
     ctx = webutil.changectx(web.repo, req)
     rev = ctx.rev()
@@ -1047,8 +1261,23 @@
         doc = _('(no help text available)')
     return doc
 
+@webcommand('help')
 def help(web, req, tmpl):
+    """
+    /help[/{topic}]
+    ---------------
+
+    Render help documentation.
+
+    This web command is roughly equivalent to :hg:`help`. If a ``topic``
+    is defined, that help topic will be rendered. If not, an index of
+    available help topics will be rendered.
+
+    The ``help`` template will be rendered when requesting help for a topic.
+    ``helptopics`` will be rendered for the index of help topics.
+    """
     from mercurial import commands # avoid cycle
+    from mercurial import help as helpmod # avoid cycle
 
     topicname = req.form.get('node', [None])[0]
     if not topicname:
--- a/mercurial/hgweb/webutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/hgweb/webutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -10,7 +10,7 @@
 from mercurial import match, patch, error, ui, util, pathutil, context
 from mercurial.i18n import _
 from mercurial.node import hex, nullid
-from common import ErrorResponse
+from common import ErrorResponse, paritygen
 from common import HTTP_NOT_FOUND
 import difflib
 
@@ -138,9 +138,10 @@
         yield d
 
 def parents(ctx, hide=None):
-    if (isinstance(ctx, context.basefilectx) and
-        ctx.changectx().rev() != ctx.linkrev()):
-        return _siblings([ctx._repo[ctx.linkrev()]], hide)
+    if isinstance(ctx, context.basefilectx):
+        introrev = ctx.introrev()
+        if ctx.changectx().rev() != introrev:
+            return _siblings([ctx.repo()[introrev]], hide)
     return _siblings(ctx.parents(), hide)
 
 def children(ctx, hide=None):
@@ -278,6 +279,62 @@
         "branches": nodebranchdict(repo, ctx)
     }
 
+def changesetentry(web, req, tmpl, ctx):
+    '''Obtain a dictionary to be used to render the "changeset" template.'''
+
+    showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node())
+    showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark',
+                                 ctx.node())
+    showbranch = nodebranchnodefault(ctx)
+
+    files = []
+    parity = paritygen(web.stripecount)
+    for blockno, f in enumerate(ctx.files()):
+        template = f in ctx and 'filenodelink' or 'filenolink'
+        files.append(tmpl(template,
+                          node=ctx.hex(), file=f, blockno=blockno + 1,
+                          parity=parity.next()))
+
+    basectx = basechangectx(web.repo, req)
+    if basectx is None:
+        basectx = ctx.p1()
+
+    style = web.config('web', 'style', 'paper')
+    if 'style' in req.form:
+        style = req.form['style'][0]
+
+    parity = paritygen(web.stripecount)
+    diff = diffs(web.repo, tmpl, ctx, basectx, None, parity, style)
+
+    parity = paritygen(web.stripecount)
+    diffstatsgen = diffstatgen(ctx, basectx)
+    diffstats = diffstat(tmpl, ctx, diffstatsgen, parity)
+
+    return dict(
+        diff=diff,
+        rev=ctx.rev(),
+        node=ctx.hex(),
+        parent=tuple(parents(ctx)),
+        child=children(ctx),
+        basenode=basectx.hex(),
+        changesettag=showtags,
+        changesetbookmark=showbookmarks,
+        changesetbranch=showbranch,
+        author=ctx.user(),
+        desc=ctx.description(),
+        extra=ctx.extra(),
+        date=ctx.date(),
+        phase=ctx.phasestr(),
+        files=files,
+        diffsummary=lambda **x: diffsummary(diffstatsgen),
+        diffstat=diffstats,
+        archives=web.archivelist(ctx.hex()),
+        tags=nodetagsdict(web.repo, ctx.node()),
+        bookmarks=nodebookmarksdict(web.repo, ctx.node()),
+        branch=nodebranchnodefault(ctx),
+        inbranch=nodeinbranch(web.repo, ctx),
+        branches=nodebranchdict(web.repo, ctx))
+
 def listfilediffs(tmpl, files, node, max):
     for f in files[:max]:
         yield tmpl('filedifflink', node=hex(node), file=f)
@@ -295,7 +352,7 @@
     blockcount = countgen()
     def prettyprintlines(diff, blockno):
         for lineno, l in enumerate(diff.splitlines(True)):
-            lineno = "%d.%d" % (blockno, lineno + 1)
+            difflineno = "%d.%d" % (blockno, lineno + 1)
             if l.startswith('+'):
                 ltype = "difflineplus"
             elif l.startswith('-'):
@@ -306,8 +363,9 @@
                 ltype = "diffline"
             yield tmpl(ltype,
                        line=l,
-                       lineid="l%s" % lineno,
-                       linenumber="% 8s" % lineno)
+                       lineno=lineno + 1,
+                       lineid="l%s" % difflineno,
+                       linenumber="% 8s" % difflineno)
 
     if files:
         m = match.exact(repo.root, repo.getcwd(), files)
@@ -317,7 +375,10 @@
     diffopts = patch.diffopts(repo.ui, untrusted=True)
     if basectx is None:
         parents = ctx.parents()
-        node1 = parents and parents[0].node() or nullid
+        if parents:
+            node1 = parents[0].node()
+        else:
+            node1 = nullid
     else:
         node1 = basectx.node()
     node2 = ctx.node()
@@ -345,8 +406,10 @@
         return tmpl('comparisonline',
                     type=type,
                     lineid=lineid,
+                    leftlineno=leftlineno,
                     leftlinenumber="% 6s" % (leftlineno or ''),
                     leftline=leftline or '',
+                    rightlineno=rightlineno,
                     rightlinenumber="% 6s" % (rightlineno or ''),
                     rightline=rightline or '')
 
--- a/mercurial/hook.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/hook.py	Thu Apr 16 20:57:51 2015 -0500
@@ -200,6 +200,11 @@
                 r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
             else:
                 r = _exthook(ui, repo, hname, cmd, args, throw) or r
+
+            # The stderr is fully buffered on Windows when connected to a pipe.
+            # A forcible flush is required to make small stderr data in the
+            # remote side available to the client immediately.
+            sys.stderr.flush()
     finally:
         if _redirect and oldstdout >= 0:
             os.dup2(oldstdout, stdoutno)
--- a/mercurial/httpclient/__init__.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/httpclient/__init__.py	Thu Apr 16 20:57:51 2015 -0500
@@ -330,7 +330,10 @@
         elif use_ssl is None:
             use_ssl = (port == 443)
         elif port is None:
-            port = (use_ssl and 443 or 80)
+            if use_ssl:
+                port = 443
+            else:
+                port = 80
         self.port = port
         if use_ssl and not socketutil.have_ssl:
             raise Exception('ssl requested but unavailable on this Python')
--- a/mercurial/localrepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/localrepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -107,14 +107,14 @@
         return self._repo.known(nodes)
 
     def getbundle(self, source, heads=None, common=None, bundlecaps=None,
-                  format='HG10', **kwargs):
+                  **kwargs):
         cg = exchange.getbundle(self._repo, source, heads=heads,
                                 common=common, bundlecaps=bundlecaps, **kwargs)
-        if bundlecaps is not None and 'HG2Y' in bundlecaps:
+        if bundlecaps is not None and 'HG20' in bundlecaps:
             # When requesting a bundle2, getbundle returns a stream to make the
             # wire level function happier. We need to build a proper object
             # from it in local peer.
-            cg = bundle2.unbundle20(self.ui, cg)
+            cg = bundle2.getunbundler(self.ui, cg)
         return cg
 
     # TODO We might want to move the next two calls into legacypeer and add
@@ -125,15 +125,33 @@
 
         This function handles the repo locking itself."""
         try:
-            cg = exchange.readbundle(self.ui, cg, None)
-            ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
-            if util.safehasattr(ret, 'getchunks'):
-                # This is a bundle20 object, turn it into an unbundler.
-                # This little dance should be dropped eventually when the API
-                # is finally improved.
-                stream = util.chunkbuffer(ret.getchunks())
-                ret = bundle2.unbundle20(self.ui, stream)
-            return ret
+            try:
+                cg = exchange.readbundle(self.ui, cg, None)
+                ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
+                if util.safehasattr(ret, 'getchunks'):
+                    # This is a bundle20 object, turn it into an unbundler.
+                    # This little dance should be dropped eventually when the
+                    # API is finally improved.
+                    stream = util.chunkbuffer(ret.getchunks())
+                    ret = bundle2.getunbundler(self.ui, stream)
+                return ret
+            except Exception, exc:
+                # If the exception contains output salvaged from a bundle2
+                # reply, we need to make sure it is printed before continuing
+                # to fail. So we build a bundle2 with such output and consume
+                # it directly.
+                #
+                # This is not very elegant but allows a "simple" solution for
+                # issue4594
+                output = getattr(exc, '_bundle2salvagedoutput', ())
+                if output:
+                    bundler = bundle2.bundle20(self._repo.ui)
+                    for out in output:
+                        bundler.addpart(out)
+                    stream = util.chunkbuffer(bundler.getchunks())
+                    b = bundle2.getunbundler(self.ui, stream)
+                    bundle2.processbundle(self._repo, b)
+                raise
         except error.PushRaced, exc:
             raise error.ResponseError(_('push failed:'), str(exc))
 
@@ -174,10 +192,10 @@
 
 class localrepository(object):
 
-    supportedformats = set(('revlogv1', 'generaldelta'))
+    supportedformats = set(('revlogv1', 'generaldelta', 'manifestv2'))
     _basesupported = supportedformats | set(('store', 'fncache', 'shared',
                                              'dotencode'))
-    openerreqs = set(('revlogv1', 'generaldelta'))
+    openerreqs = set(('revlogv1', 'generaldelta', 'manifestv2'))
     requirements = ['revlogv1']
     filtername = None
 
@@ -241,6 +259,8 @@
                     )
                 if self.ui.configbool('format', 'generaldelta', False):
                     requirements.append("generaldelta")
+                if self.ui.configbool('experimental', 'manifestv2', False):
+                    requirements.append("manifestv2")
                 requirements = set(requirements)
             else:
                 raise error.RepoError(_("repository %s not found") % path)
@@ -279,6 +299,7 @@
 
 
         self._branchcaches = {}
+        self._revbranchcache = None
         self.filterpats = {}
         self._datafilters = {}
         self._transref = self._lockref = self._wlockref = None
@@ -302,15 +323,17 @@
         self.names = namespaces.namespaces()
 
     def close(self):
-        pass
+        self._writecaches()
+
+    def _writecaches(self):
+        if self._revbranchcache:
+            self._revbranchcache.write()
 
     def _restrictcapabilities(self, caps):
-        # bundle2 is not ready for prime time, drop it unless explicitly
-        # required by the tests (or some brave tester)
-        if self.ui.configbool('experimental', 'bundle2-exp', False):
+        if self.ui.configbool('experimental', 'bundle2-advertise', True):
             caps = set(caps)
             capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
-            caps.add('bundle2-exp=' + urllib.quote(capsblob))
+            caps.add('bundle2=' + urllib.quote(capsblob))
         return caps
 
     def _applyrequirements(self, requirements):
@@ -323,6 +346,12 @@
         maxchainlen = self.ui.configint('format', 'maxchainlen')
         if maxchainlen is not None:
             self.svfs.options['maxchainlen'] = maxchainlen
+        manifestcachesize = self.ui.configint('format', 'manifestcachesize')
+        if manifestcachesize is not None:
+            self.svfs.options['manifestcachesize'] = manifestcachesize
+        usetreemanifest = self.ui.configbool('experimental', 'treemanifest')
+        if usetreemanifest is not None:
+            self.svfs.options['usetreemanifest'] = usetreemanifest
 
     def _writerequirements(self):
         reqfile = self.vfs("requires", "w")
@@ -417,9 +446,9 @@
         store = obsolete.obsstore(self.svfs, readonly=readonly,
                                   **kwargs)
         if store and readonly:
-            # message is rare enough to not be translated
-            msg = 'obsolete feature not enabled but %i markers found!\n'
-            self.ui.warn(msg % len(list(store)))
+            self.ui.warn(
+                _('obsolete feature not enabled but %i markers found!\n')
+                % len(list(store)))
         return store
 
     @storecache('00changelog.i')
@@ -462,7 +491,8 @@
 
     def __contains__(self, changeid):
         try:
-            return bool(self.lookup(changeid))
+            self[changeid]
+            return True
         except error.RepoLookupError:
             return False
 
@@ -479,7 +509,7 @@
         '''Return a list of revisions matching the given revset'''
         expr = revset.formatspec(expr, *args)
         m = revset.match(None, expr)
-        return m(self, revset.spanset(self))
+        return m(self)
 
     def set(self, expr, *args):
         '''
@@ -520,7 +550,11 @@
             if prevtags and prevtags[-1] != '\n':
                 fp.write('\n')
             for name in names:
-                m = munge and munge(name) or name
+                if munge:
+                    m = munge(name)
+                else:
+                    m = name
+
                 if (self._tagscache.tagtypes and
                     name in self._tagscache.tagtypes):
                     old = self.tags().get(name, nullid)
@@ -718,6 +752,12 @@
         branchmap.updatecache(self)
         return self._branchcaches[self.filtername]
 
+    @unfilteredmethod
+    def revbranchcache(self):
+        if not self._revbranchcache:
+            self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
+        return self._revbranchcache
+
     def branchtip(self, branch, ignoremissing=False):
         '''return the tip node for a given branch
 
@@ -890,12 +930,21 @@
 
     def currenttransaction(self):
         """return the current transaction or None if non exists"""
-        tr = self._transref and self._transref() or None
+        if self._transref:
+            tr = self._transref()
+        else:
+            tr = None
+
         if tr and tr.running():
             return tr
         return None
 
     def transaction(self, desc, report=None):
+        if (self.ui.configbool('devel', 'all')
+                or self.ui.configbool('devel', 'check-locks')):
+            l = self._lockref and self._lockref()
+            if l is None or not l.held:
+                scmutil.develwarn(self.ui, 'transaction with no lock')
         tr = self.currenttransaction()
         if tr is not None:
             return tr.nest()
@@ -906,19 +955,50 @@
                 _("abandoned transaction found"),
                 hint=_("run 'hg recover' to clean up transaction"))
 
+        self.hook('pretxnopen', throw=True, txnname=desc)
+
         self._writejournal(desc)
         renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
-        rp = report and report or self.ui.warn
+        if report:
+            rp = report
+        else:
+            rp = self.ui.warn
         vfsmap = {'plain': self.vfs} # root of .hg/
-        tr = transaction.transaction(rp, self.svfs, vfsmap,
+        # we must avoid cyclic reference between repo and transaction.
+        reporef = weakref.ref(self)
+        def validate(tr):
+            """will run pre-closing hooks"""
+            pending = lambda: tr.writepending() and self.root or ""
+            reporef().hook('pretxnclose', throw=True, pending=pending,
+                           xnname=desc, **tr.hookargs)
+
+        tr = transaction.transaction(rp, self.sopener, vfsmap,
                                      "journal",
                                      "undo",
                                      aftertrans(renames),
-                                     self.store.createmode)
+                                     self.store.createmode,
+                                     validator=validate)
+
+        trid = 'TXN:' + util.sha1("%s#%f" % (id(tr), time.time())).hexdigest()
+        tr.hookargs['TXNID'] = trid
         # note: writing the fncache only during finalize mean that the file is
         # outdated when running hooks. As fncache is used for streaming clone,
         # this is not expected to break anything that happen during the hooks.
         tr.addfinalize('flush-fncache', self.store.write)
+        def txnclosehook(tr2):
+            """To be run if transaction is successful, will schedule a hook run
+            """
+            def hook():
+                reporef().hook('txnclose', throw=False, txnname=desc,
+                               **tr2.hookargs)
+            reporef()._afterlock(hook)
+        tr.addfinalize('txnclose-hook', txnclosehook)
+        def txnaborthook(tr2):
+            """To be run if transaction is aborted
+            """
+            reporef().hook('txnabort', throw=False, txnname=desc,
+                           **tr2.hookargs)
+        tr.addabort('txnabort-hook', txnaborthook)
         self._transref = weakref.ref(tr)
         return tr
 
@@ -1036,6 +1116,9 @@
             else:
                 ui.status(_('working directory now based on '
                             'revision %d\n') % parents)
+            ms = mergemod.mergestate(self)
+            ms.reset(self['.'].node())
+
         # TODO: if we know which new heads may result from this rollback, pass
         # them to destroy(), which will prevent the branchhead cache from being
         # invalidated.
@@ -1123,7 +1206,10 @@
     def lock(self, wait=True):
         '''Lock the repository store (.hg/store) and return a weak reference
         to the lock. Use this before modifying the store (e.g. committing or
-        stripping). If you are opening a transaction, get a lock as well.)'''
+        stripping). If you are opening a transaction, get a lock as well.)
+
+        If both 'lock' and 'wlock' must be acquired, ensure you always acquires
+        'wlock' first to avoid a dead-lock hazard.'''
         l = self._lockref and self._lockref()
         if l is not None and l.held:
             l.lock()
@@ -1143,12 +1229,24 @@
     def wlock(self, wait=True):
         '''Lock the non-store parts of the repository (everything under
         .hg except .hg/store) and return a weak reference to the lock.
-        Use this before modifying files in .hg.'''
+
+        Use this before modifying files in .hg.
+
+        If both 'lock' and 'wlock' must be acquired, ensure you always acquires
+        'wlock' first to avoid a dead-lock hazard.'''
         l = self._wlockref and self._wlockref()
         if l is not None and l.held:
             l.lock()
             return l
 
+        # We do not need to check for non-waiting lock aquisition.  Such
+        # acquisition would not cause dead-lock as they would just fail.
+        if wait and (self.ui.configbool('devel', 'all')
+                     or self.ui.configbool('devel', 'check-locks')):
+            l = self._lockref and self._lockref()
+            if l is not None and l.held:
+                scmutil.develwarn(self.ui, '"wlock" acquired after "lock"')
+
         def unlock():
             if self.dirstate.pendingparentchange():
                 self.dirstate.invalidate()
@@ -1169,11 +1267,15 @@
         """
 
         fname = fctx.path()
-        text = fctx.data()
-        flog = self.file(fname)
         fparent1 = manifest1.get(fname, nullid)
         fparent2 = manifest2.get(fname, nullid)
+        if isinstance(fctx, context.filectx):
+            node = fctx.filenode()
+            if node in [fparent1, fparent2]:
+                self.ui.debug('reusing %s filelog entry\n' % fname)
+                return node
 
+        flog = self.file(fname)
         meta = {}
         copy = fctx.renamed()
         if copy and copy[0] != fname:
@@ -1208,7 +1310,7 @@
 
             # Here, we used to search backwards through history to try to find
             # where the file copy came from if the source of a copy was not in
-            # the parent diretory. However, this doesn't actually make sense to
+            # the parent directory. However, this doesn't actually make sense to
             # do (what does a copy from something not in your working copy even
             # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
             # the user that copy information was dropped, so if they didn't
@@ -1235,6 +1337,7 @@
                 fparent2 = nullid
 
         # is the file changed?
+        text = fctx.data()
         if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
             changelist.append(fname)
             return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
@@ -1270,8 +1373,7 @@
             wctx = self[None]
             merge = len(wctx.parents()) > 1
 
-            if (not force and merge and match and
-                (match.files() or match.anypats())):
+            if not force and merge and not match.always():
                 raise util.Abort(_('cannot partially commit a merge '
                                    '(do not specify files or patterns)'))
 
@@ -1302,10 +1404,10 @@
                         if not force:
                             raise util.Abort(
                                 _("commit with new subrepo %s excluded") % s)
-                    if wctx.sub(s).dirty(True):
+                    dirtyreason = wctx.sub(s).dirtyreason(True)
+                    if dirtyreason:
                         if not self.ui.configbool('ui', 'commitsubrepos'):
-                            raise util.Abort(
-                                _("uncommitted changes in subrepo %s") % s,
+                            raise util.Abort(dirtyreason,
                                 hint=_("use --subrepos for recursive commit"))
                         subs.append(s)
                         commitsubs.add(s)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/manifest.c	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,921 @@
+/*
+ * manifest.c - manifest type that does on-demand parsing.
+ *
+ * Copyright 2015, Google Inc.
+ *
+ * This software may be used and distributed according to the terms of
+ * the GNU General Public License, incorporated herein by reference.
+ */
+#include <Python.h>
+
+#include <assert.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "util.h"
+
+#define DEFAULT_LINES 100000
+
+typedef struct {
+	char *start;
+	Py_ssize_t len; /* length of line including terminal newline */
+	char hash_suffix;
+	bool from_malloc;
+	bool deleted;
+} line;
+
+typedef struct {
+	PyObject_HEAD
+	PyObject *pydata;
+	line *lines;
+	int numlines; /* number of line entries */
+	int livelines; /* number of non-deleted lines */
+	int maxlines; /* allocated number of lines */
+	bool dirty;
+} lazymanifest;
+
+#define MANIFEST_OOM -1
+#define MANIFEST_NOT_SORTED -2
+#define MANIFEST_MALFORMED -3
+
+/* defined in parsers.c */
+PyObject *unhexlify(const char *str, int len);
+
+/* get the length of the path for a line */
+static size_t pathlen(line *l) {
+	return strlen(l->start);
+}
+
+/* get the node value of a single line */
+static PyObject *nodeof(line *l) {
+	char *s = l->start;
+	ssize_t llen = pathlen(l);
+	PyObject *hash = unhexlify(s + llen + 1, 40);
+	if (!hash) {
+		return NULL;
+	}
+	if (l->hash_suffix != '\0') {
+		char newhash[21];
+		memcpy(newhash, PyString_AsString(hash), 20);
+		Py_DECREF(hash);
+		newhash[20] = l->hash_suffix;
+		hash = PyString_FromStringAndSize(newhash, 21);
+	}
+	return hash;
+}
+
+/* get the node hash and flags of a line as a tuple */
+static PyObject *hashflags(line *l)
+{
+	char *s = l->start;
+	size_t plen = pathlen(l);
+	PyObject *hash = nodeof(l);
+
+	/* 40 for hash, 1 for null byte, 1 for newline */
+	size_t hplen = plen + 42;
+	Py_ssize_t flen = l->len - hplen;
+	PyObject *flags;
+	PyObject *tup;
+
+	if (!hash)
+		return NULL;
+	flags = PyString_FromStringAndSize(s + hplen - 1, flen);
+	if (!flags) {
+		Py_DECREF(hash);
+		return NULL;
+	}
+	tup = PyTuple_Pack(2, hash, flags);
+	Py_DECREF(flags);
+	Py_DECREF(hash);
+	return tup;
+}
+
+/* if we're about to run out of space in the line index, add more */
+static bool realloc_if_full(lazymanifest *self)
+{
+	if (self->numlines == self->maxlines) {
+		self->maxlines *= 2;
+		self->lines = realloc(self->lines, self->maxlines * sizeof(line));
+	}
+	return !!self->lines;
+}
+
+/*
+ * Find the line boundaries in the manifest that 'data' points to and store
+ * information about each line in 'self'.
+ */
+static int find_lines(lazymanifest *self, char *data, Py_ssize_t len)
+{
+	char *prev = NULL;
+	while (len > 0) {
+		line *l;
+		char *next = memchr(data, '\n', len);
+		if (!next) {
+			return MANIFEST_MALFORMED;
+		}
+		next++; /* advance past newline */
+		if (!realloc_if_full(self)) {
+			return MANIFEST_OOM; /* no memory */
+		}
+		if (prev && strcmp(prev, data) > -1) {
+			/* This data isn't sorted, so we have to abort. */
+			return MANIFEST_NOT_SORTED;
+		}
+		l = self->lines + ((self->numlines)++);
+		l->start = data;
+		l->len = next - data;
+		l->hash_suffix = '\0';
+		l->from_malloc = false;
+		l->deleted = false;
+		len = len - l->len;
+		prev = data;
+		data = next;
+	}
+	self->livelines = self->numlines;
+	return 0;
+}
+
+static int lazymanifest_init(lazymanifest *self, PyObject *args)
+{
+	char *data;
+	Py_ssize_t len;
+	int err, ret;
+	PyObject *pydata;
+	if (!PyArg_ParseTuple(args, "S", &pydata)) {
+		return -1;
+	}
+	err = PyString_AsStringAndSize(pydata, &data, &len);
+
+	self->dirty = false;
+	if (err == -1)
+		return -1;
+	self->pydata = pydata;
+	Py_INCREF(self->pydata);
+	Py_BEGIN_ALLOW_THREADS
+	self->lines = malloc(DEFAULT_LINES * sizeof(line));
+	self->maxlines = DEFAULT_LINES;
+	self->numlines = 0;
+	if (!self->lines)
+		ret = MANIFEST_OOM;
+	else
+		ret = find_lines(self, data, len);
+	Py_END_ALLOW_THREADS
+	switch (ret) {
+	case 0:
+		break;
+	case MANIFEST_OOM:
+		PyErr_NoMemory();
+		break;
+	case MANIFEST_NOT_SORTED:
+		PyErr_Format(PyExc_ValueError,
+			     "Manifest lines not in sorted order.");
+		break;
+	case MANIFEST_MALFORMED:
+		PyErr_Format(PyExc_ValueError,
+			     "Manifest did not end in a newline.");
+		break;
+	default:
+		PyErr_Format(PyExc_ValueError,
+			     "Unknown problem parsing manifest.");
+	}
+	return ret == 0 ? 0 : -1;
+}
+
+static void lazymanifest_dealloc(lazymanifest *self)
+{
+	/* free any extra lines we had to allocate */
+	int i;
+	for (i = 0; i < self->numlines; i++) {
+		if (self->lines[i].from_malloc) {
+			free(self->lines[i].start);
+		}
+	}
+	if (self->lines) {
+		free(self->lines);
+		self->lines = NULL;
+	}
+	if (self->pydata) {
+		Py_DECREF(self->pydata);
+		self->pydata = NULL;
+	}
+	PyObject_Del(self);
+}
+
+/* iteration support */
+
+typedef struct {
+	PyObject_HEAD lazymanifest *m;
+	Py_ssize_t pos;
+} lmIter;
+
+static void lmiter_dealloc(PyObject *o)
+{
+	lmIter *self = (lmIter *)o;
+	Py_DECREF(self->m);
+	PyObject_Del(self);
+}
+
+static line *lmiter_nextline(lmIter *self)
+{
+	do {
+		self->pos++;
+		if (self->pos >= self->m->numlines) {
+			return NULL;
+		}
+		/* skip over deleted manifest entries */
+	} while (self->m->lines[self->pos].deleted);
+	return self->m->lines + self->pos;
+}
+
+static PyObject *lmiter_iterentriesnext(PyObject *o)
+{
+	size_t pl;
+	line *l;
+	Py_ssize_t consumed;
+	PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL;
+	l = lmiter_nextline((lmIter *)o);
+	if (!l) {
+		goto bail;
+	}
+	pl = pathlen(l);
+	path = PyString_FromStringAndSize(l->start, pl);
+	hash = nodeof(l);
+	consumed = pl + 41;
+	flags = PyString_FromStringAndSize(l->start + consumed,
+									   l->len - consumed - 1);
+	if (!path || !hash || !flags) {
+		goto bail;
+	}
+	ret = PyTuple_Pack(3, path, hash, flags);
+ bail:
+	Py_XDECREF(path);
+	Py_XDECREF(hash);
+	Py_XDECREF(flags);
+	return ret;
+}
+
+static PyTypeObject lazymanifestEntriesIterator = {
+	PyObject_HEAD_INIT(NULL)
+	0,                               /*ob_size */
+	"parsers.lazymanifest.entriesiterator", /*tp_name */
+	sizeof(lmIter),                  /*tp_basicsize */
+	0,                               /*tp_itemsize */
+	lmiter_dealloc,                  /*tp_dealloc */
+	0,                               /*tp_print */
+	0,                               /*tp_getattr */
+	0,                               /*tp_setattr */
+	0,                               /*tp_compare */
+	0,                               /*tp_repr */
+	0,                               /*tp_as_number */
+	0,                               /*tp_as_sequence */
+	0,                               /*tp_as_mapping */
+	0,                               /*tp_hash */
+	0,                               /*tp_call */
+	0,                               /*tp_str */
+	0,                               /*tp_getattro */
+	0,                               /*tp_setattro */
+	0,                               /*tp_as_buffer */
+	/* tp_flags: Py_TPFLAGS_HAVE_ITER tells python to
+	   use tp_iter and tp_iternext fields. */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_ITER,
+	"Iterator for 3-tuples in a lazymanifest.",  /* tp_doc */
+	0,                               /* tp_traverse */
+	0,                               /* tp_clear */
+	0,                               /* tp_richcompare */
+	0,                               /* tp_weaklistoffset */
+	PyObject_SelfIter,               /* tp_iter: __iter__() method */
+	lmiter_iterentriesnext,          /* tp_iternext: next() method */
+};
+
+static PyObject *lmiter_iterkeysnext(PyObject *o)
+{
+	size_t pl;
+	line *l = lmiter_nextline((lmIter *)o);
+	if (!l) {
+		return NULL;
+	}
+	pl = pathlen(l);
+	return PyString_FromStringAndSize(l->start, pl);
+}
+
+static PyTypeObject lazymanifestKeysIterator = {
+	PyObject_HEAD_INIT(NULL)
+	0,                               /*ob_size */
+	"parsers.lazymanifest.keysiterator", /*tp_name */
+	sizeof(lmIter),                  /*tp_basicsize */
+	0,                               /*tp_itemsize */
+	lmiter_dealloc,                  /*tp_dealloc */
+	0,                               /*tp_print */
+	0,                               /*tp_getattr */
+	0,                               /*tp_setattr */
+	0,                               /*tp_compare */
+	0,                               /*tp_repr */
+	0,                               /*tp_as_number */
+	0,                               /*tp_as_sequence */
+	0,                               /*tp_as_mapping */
+	0,                               /*tp_hash */
+	0,                               /*tp_call */
+	0,                               /*tp_str */
+	0,                               /*tp_getattro */
+	0,                               /*tp_setattro */
+	0,                               /*tp_as_buffer */
+	/* tp_flags: Py_TPFLAGS_HAVE_ITER tells python to
+	   use tp_iter and tp_iternext fields. */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_ITER,
+	"Keys iterator for a lazymanifest.",  /* tp_doc */
+	0,                               /* tp_traverse */
+	0,                               /* tp_clear */
+	0,                               /* tp_richcompare */
+	0,                               /* tp_weaklistoffset */
+	PyObject_SelfIter,               /* tp_iter: __iter__() method */
+	lmiter_iterkeysnext,             /* tp_iternext: next() method */
+};
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self);
+
+static PyObject *lazymanifest_getentriesiter(lazymanifest *self)
+{
+	lmIter *i = NULL;
+	lazymanifest *t = lazymanifest_copy(self);
+	if (!t) {
+		PyErr_NoMemory();
+		return NULL;
+	}
+	i = PyObject_New(lmIter, &lazymanifestEntriesIterator);
+	if (i) {
+		i->m = t;
+		i->pos = -1;
+	} else {
+		Py_DECREF(t);
+		PyErr_NoMemory();
+	}
+	return (PyObject *)i;
+}
+
+static PyObject *lazymanifest_getkeysiter(lazymanifest *self)
+{
+	lmIter *i = NULL;
+	lazymanifest *t = lazymanifest_copy(self);
+	if (!t) {
+		PyErr_NoMemory();
+		return NULL;
+	}
+	i = PyObject_New(lmIter, &lazymanifestKeysIterator);
+	if (i) {
+		i->m = t;
+		i->pos = -1;
+	} else {
+		Py_DECREF(t);
+		PyErr_NoMemory();
+	}
+	return (PyObject *)i;
+}
+
+/* __getitem__ and __setitem__ support */
+
+static Py_ssize_t lazymanifest_size(lazymanifest *self)
+{
+	return self->livelines;
+}
+
+static int linecmp(const void *left, const void *right)
+{
+	return strcmp(((const line *)left)->start,
+		      ((const line *)right)->start);
+}
+
+static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key)
+{
+	line needle;
+	line *hit;
+	if (!PyString_Check(key)) {
+		PyErr_Format(PyExc_TypeError,
+			     "getitem: manifest keys must be a string.");
+		return NULL;
+	}
+	needle.start = PyString_AsString(key);
+	hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+		      &linecmp);
+	if (!hit || hit->deleted) {
+		PyErr_Format(PyExc_KeyError, "No such manifest entry.");
+		return NULL;
+	}
+	return hashflags(hit);
+}
+
+static int lazymanifest_delitem(lazymanifest *self, PyObject *key)
+{
+	line needle;
+	line *hit;
+	if (!PyString_Check(key)) {
+		PyErr_Format(PyExc_TypeError,
+			     "delitem: manifest keys must be a string.");
+		return -1;
+	}
+	needle.start = PyString_AsString(key);
+	hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+		      &linecmp);
+	if (!hit || hit->deleted) {
+		PyErr_Format(PyExc_KeyError,
+			     "Tried to delete nonexistent manifest entry.");
+		return -1;
+	}
+	self->dirty = true;
+	hit->deleted = true;
+	self->livelines--;
+	return 0;
+}
+
+/* Do a binary search for the insertion point for new, creating the
+ * new entry if needed. */
+static int internalsetitem(lazymanifest *self, line *new) {
+	int start = 0, end = self->numlines;
+	while (start < end) {
+		int pos = start + (end - start) / 2;
+		int c = linecmp(new, self->lines + pos);
+		if (c < 0)
+			end = pos;
+		else if (c > 0)
+			start = pos + 1;
+		else {
+			if (self->lines[pos].deleted)
+				self->livelines++;
+			if (self->lines[pos].from_malloc)
+				free(self->lines[pos].start);
+			start = pos;
+			goto finish;
+		}
+	}
+	/* being here means we need to do an insert */
+	if (!realloc_if_full(self)) {
+		PyErr_NoMemory();
+		return -1;
+	}
+	memmove(self->lines + start + 1, self->lines + start,
+		(self->numlines - start) * sizeof(line));
+	self->numlines++;
+	self->livelines++;
+finish:
+	self->lines[start] = *new;
+	self->dirty = true;
+	return 0;
+}
+
+static int lazymanifest_setitem(
+	lazymanifest *self, PyObject *key, PyObject *value)
+{
+	char *path;
+	Py_ssize_t plen;
+	PyObject *pyhash;
+	Py_ssize_t hlen;
+	char *hash;
+	PyObject *pyflags;
+	char *flags;
+	Py_ssize_t flen;
+	size_t dlen;
+	char *dest;
+	int i;
+	line new;
+	if (!PyString_Check(key)) {
+		PyErr_Format(PyExc_TypeError,
+			     "setitem: manifest keys must be a string.");
+		return -1;
+	}
+	if (!value) {
+		return lazymanifest_delitem(self, key);
+	}
+	if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) {
+		PyErr_Format(PyExc_TypeError,
+			     "Manifest values must be a tuple of (node, flags).");
+		return -1;
+	}
+	if (PyString_AsStringAndSize(key, &path, &plen) == -1) {
+		return -1;
+	}
+
+	pyhash = PyTuple_GetItem(value, 0);
+	if (!PyString_Check(pyhash)) {
+		PyErr_Format(PyExc_TypeError,
+			     "node must be a 20-byte string");
+		return -1;
+	}
+	hlen = PyString_Size(pyhash);
+	/* Some parts of the codebase try and set 21 or 22
+	 * byte "hash" values in order to perturb things for
+	 * status. We have to preserve at least the 21st
+	 * byte. Sigh. If there's a 22nd byte, we drop it on
+	 * the floor, which works fine.
+	 */
+	if (hlen != 20 && hlen != 21 && hlen != 22) {
+		PyErr_Format(PyExc_TypeError,
+			     "node must be a 20-byte string");
+		return -1;
+	}
+	hash = PyString_AsString(pyhash);
+
+	pyflags = PyTuple_GetItem(value, 1);
+	if (!PyString_Check(pyflags) || PyString_Size(pyflags) > 1) {
+		PyErr_Format(PyExc_TypeError,
+			     "flags must a 0 or 1 byte string");
+		return -1;
+	}
+	if (PyString_AsStringAndSize(pyflags, &flags, &flen) == -1) {
+		return -1;
+	}
+	/* one null byte and one newline */
+	dlen = plen + 41 + flen + 1;
+	dest = malloc(dlen);
+	if (!dest) {
+		PyErr_NoMemory();
+		return -1;
+	}
+	memcpy(dest, path, plen + 1);
+	for (i = 0; i < 20; i++) {
+		/* Cast to unsigned, so it will not get sign-extended when promoted
+		 * to int (as is done when passing to a variadic function)
+		 */
+		sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]);
+	}
+	memcpy(dest + plen + 41, flags, flen);
+	dest[plen + 41 + flen] = '\n';
+	new.start = dest;
+	new.len = dlen;
+	new.hash_suffix = '\0';
+	if (hlen > 20) {
+		new.hash_suffix = hash[20];
+	}
+	new.from_malloc = true;     /* is `start` a pointer we allocated? */
+	new.deleted = false;        /* is this entry deleted? */
+	if (internalsetitem(self, &new)) {
+		return -1;
+	}
+	return 0;
+}
+
+static PyMappingMethods lazymanifest_mapping_methods = {
+	(lenfunc)lazymanifest_size,             /* mp_length */
+	(binaryfunc)lazymanifest_getitem,       /* mp_subscript */
+	(objobjargproc)lazymanifest_setitem,    /* mp_ass_subscript */
+};
+
+/* sequence methods (important or __contains__ builds an iterator */
+
+static int lazymanifest_contains(lazymanifest *self, PyObject *key)
+{
+	line needle;
+	line *hit;
+	if (!PyString_Check(key)) {
+		/* Our keys are always strings, so if the contains
+		 * check is for a non-string, just return false. */
+		return 0;
+	}
+	needle.start = PyString_AsString(key);
+	hit = bsearch(&needle, self->lines, self->numlines, sizeof(line),
+		      &linecmp);
+	if (!hit || hit->deleted) {
+		return 0;
+	}
+	return 1;
+}
+
+static PySequenceMethods lazymanifest_seq_meths = {
+	(lenfunc)lazymanifest_size, /* sq_length */
+	0, /* sq_concat */
+	0, /* sq_repeat */
+	0, /* sq_item */
+	0, /* sq_slice */
+	0, /* sq_ass_item */
+	0, /* sq_ass_slice */
+	(objobjproc)lazymanifest_contains, /* sq_contains */
+	0, /* sq_inplace_concat */
+	0, /* sq_inplace_repeat */
+};
+
+
+/* Other methods (copy, diff, etc) */
+static PyTypeObject lazymanifestType;
+
+/* If the manifest has changes, build the new manifest text and reindex it. */
+static int compact(lazymanifest *self) {
+	int i;
+	ssize_t need = 0;
+	char *data;
+	line *src, *dst;
+	PyObject *pydata;
+	if (!self->dirty)
+		return 0;
+	for (i = 0; i < self->numlines; i++) {
+		if (!self->lines[i].deleted) {
+			need += self->lines[i].len;
+		}
+	}
+	pydata = PyString_FromStringAndSize(NULL, need);
+	if (!pydata)
+		return -1;
+	data = PyString_AsString(pydata);
+	if (!data) {
+		return -1;
+	}
+	src = self->lines;
+	dst = self->lines;
+	for (i = 0; i < self->numlines; i++, src++) {
+		char *tofree = NULL;
+		if (src->from_malloc) {
+			tofree = src->start;
+		}
+		if (!src->deleted) {
+			memcpy(data, src->start, src->len);
+			*dst = *src;
+			dst->start = data;
+			dst->from_malloc = false;
+			data += dst->len;
+			dst++;
+		}
+		free(tofree);
+	}
+	Py_DECREF(self->pydata);
+	self->pydata = pydata;
+	self->numlines = self->livelines;
+	self->dirty = false;
+	return 0;
+}
+
+static PyObject *lazymanifest_text(lazymanifest *self)
+{
+	if (compact(self) != 0) {
+		PyErr_NoMemory();
+		return NULL;
+	}
+	Py_INCREF(self->pydata);
+	return self->pydata;
+}
+
+static lazymanifest *lazymanifest_copy(lazymanifest *self)
+{
+	lazymanifest *copy = NULL;
+	if (compact(self) != 0) {
+		goto nomem;
+	}
+	copy = PyObject_New(lazymanifest, &lazymanifestType);
+	if (!copy) {
+		goto nomem;
+	}
+	copy->numlines = self->numlines;
+	copy->livelines = self->livelines;
+	copy->dirty = false;
+	copy->lines = malloc(self->maxlines *sizeof(line));
+	if (!copy->lines) {
+		goto nomem;
+	}
+	memcpy(copy->lines, self->lines, self->numlines * sizeof(line));
+	copy->maxlines = self->maxlines;
+	copy->pydata = self->pydata;
+	Py_INCREF(copy->pydata);
+	return copy;
+ nomem:
+	PyErr_NoMemory();
+	Py_XDECREF(copy);
+	return NULL;
+}
+
+static lazymanifest *lazymanifest_filtercopy(
+	lazymanifest *self, PyObject *matchfn)
+{
+	lazymanifest *copy = NULL;
+	int i;
+	if (!PyCallable_Check(matchfn)) {
+		PyErr_SetString(PyExc_TypeError, "matchfn must be callable");
+		return NULL;
+	}
+	/* compact ourselves first to avoid double-frees later when we
+	 * compact tmp so that it doesn't have random pointers to our
+	 * underlying from_malloc-data (self->pydata is safe) */
+	if (compact(self) != 0) {
+		goto nomem;
+	}
+	copy = PyObject_New(lazymanifest, &lazymanifestType);
+	copy->dirty = true;
+	copy->lines = malloc(self->maxlines * sizeof(line));
+	if (!copy->lines) {
+		goto nomem;
+	}
+	copy->maxlines = self->maxlines;
+	copy->numlines = 0;
+	copy->pydata = self->pydata;
+	Py_INCREF(self->pydata);
+	for (i = 0; i < self->numlines; i++) {
+		PyObject *arg = PyString_FromString(self->lines[i].start);
+		PyObject *arglist = PyTuple_Pack(1, arg);
+		PyObject *result = PyObject_CallObject(matchfn, arglist);
+		Py_DECREF(arglist);
+		Py_DECREF(arg);
+		/* if the callback raised an exception, just let it
+		 * through and give up */
+		if (!result) {
+			free(copy->lines);
+			Py_DECREF(self->pydata);
+			return NULL;
+		}
+		if (PyObject_IsTrue(result)) {
+			assert(!(self->lines[i].from_malloc));
+			copy->lines[copy->numlines++] = self->lines[i];
+		}
+		Py_DECREF(result);
+	}
+	copy->livelines = copy->numlines;
+	return copy;
+ nomem:
+	PyErr_NoMemory();
+	Py_XDECREF(copy);
+	return NULL;
+}
+
+static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args)
+{
+	lazymanifest *other;
+	PyObject *pyclean = NULL;
+	bool listclean;
+	PyObject *emptyTup = NULL, *ret = NULL;
+	PyObject *es;
+	int sneedle = 0, oneedle = 0;
+	if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) {
+		return NULL;
+	}
+	listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean);
+	es = PyString_FromString("");
+	if (!es) {
+		goto nomem;
+	}
+	emptyTup = PyTuple_Pack(2, Py_None, es);
+	Py_DECREF(es);
+	if (!emptyTup) {
+		goto nomem;
+	}
+	ret = PyDict_New();
+	if (!ret) {
+		goto nomem;
+	}
+	while (sneedle != self->numlines || oneedle != other->numlines) {
+		line *left = self->lines + sneedle;
+		line *right = other->lines + oneedle;
+		int result;
+		PyObject *key;
+		PyObject *outer;
+		/* If we're looking at a deleted entry and it's not
+		 * the end of the manifest, just skip it. */
+		if (left->deleted && sneedle < self->numlines) {
+			sneedle++;
+			continue;
+		}
+		if (right->deleted && oneedle < other->numlines) {
+			oneedle++;
+			continue;
+		}
+		/* if we're at the end of either manifest, then we
+		 * know the remaining items are adds so we can skip
+		 * the strcmp. */
+		if (sneedle == self->numlines) {
+			result = 1;
+		} else if (oneedle == other->numlines) {
+			result = -1;
+		} else {
+			result = linecmp(left, right);
+		}
+		key = result <= 0 ?
+			PyString_FromString(left->start) :
+			PyString_FromString(right->start);
+		if (!key)
+			goto nomem;
+		if (result < 0) {
+			PyObject *l = hashflags(left);
+			if (!l) {
+				goto nomem;
+			}
+			outer = PyTuple_Pack(2, l, emptyTup);
+			Py_DECREF(l);
+			if (!outer) {
+				goto nomem;
+			}
+			PyDict_SetItem(ret, key, outer);
+			Py_DECREF(outer);
+			sneedle++;
+		} else if (result > 0) {
+			PyObject *r = hashflags(right);
+			if (!r) {
+				goto nomem;
+			}
+			outer = PyTuple_Pack(2, emptyTup, r);
+			Py_DECREF(r);
+			if (!outer) {
+				goto nomem;
+			}
+			PyDict_SetItem(ret, key, outer);
+			Py_DECREF(outer);
+			oneedle++;
+		} else {
+			/* file exists in both manifests */
+			if (left->len != right->len
+			    || memcmp(left->start, right->start, left->len)
+			    || left->hash_suffix != right->hash_suffix) {
+				PyObject *l = hashflags(left);
+				PyObject *r;
+				if (!l) {
+					goto nomem;
+				}
+				r = hashflags(right);
+				if (!r) {
+					Py_DECREF(l);
+					goto nomem;
+				}
+				outer = PyTuple_Pack(2, l, r);
+				Py_DECREF(l);
+				Py_DECREF(r);
+				if (!outer) {
+					goto nomem;
+				}
+				PyDict_SetItem(ret, key, outer);
+				Py_DECREF(outer);
+			} else if (listclean) {
+				PyDict_SetItem(ret, key, Py_None);
+			}
+			sneedle++;
+			oneedle++;
+		}
+		Py_DECREF(key);
+	}
+	Py_DECREF(emptyTup);
+	return ret;
+ nomem:
+	PyErr_NoMemory();
+	Py_XDECREF(ret);
+	Py_XDECREF(emptyTup);
+	return NULL;
+}
+
+static PyMethodDef lazymanifest_methods[] = {
+	{"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS,
+	 "Iterate over file names in this lazymanifest."},
+	{"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS,
+	 "Iterate over (path, nodeid, flags) typles in this lazymanifest."},
+	{"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS,
+	 "Make a copy of this lazymanifest."},
+	{"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O,
+	 "Make a copy of this manifest filtered by matchfn."},
+	{"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS,
+	 "Compare this lazymanifest to another one."},
+	{"text", (PyCFunction)lazymanifest_text, METH_NOARGS,
+	 "Encode this manifest to text."},
+	{NULL},
+};
+
+static PyTypeObject lazymanifestType = {
+	PyObject_HEAD_INIT(NULL)
+	0,                                                /* ob_size */
+	"parsers.lazymanifest",                           /* tp_name */
+	sizeof(lazymanifest),                             /* tp_basicsize */
+	0,                                                /* tp_itemsize */
+	(destructor)lazymanifest_dealloc,                 /* tp_dealloc */
+	0,                                                /* tp_print */
+	0,                                                /* tp_getattr */
+	0,                                                /* tp_setattr */
+	0,                                                /* tp_compare */
+	0,                                                /* tp_repr */
+	0,                                                /* tp_as_number */
+	&lazymanifest_seq_meths,                          /* tp_as_sequence */
+	&lazymanifest_mapping_methods,                    /* tp_as_mapping */
+	0,                                                /* tp_hash */
+	0,                                                /* tp_call */
+	0,                                                /* tp_str */
+	0,                                                /* tp_getattro */
+	0,                                                /* tp_setattro */
+	0,                                                /* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN, /* tp_flags */
+	"TODO(augie)",                                    /* tp_doc */
+	0,                                                /* tp_traverse */
+	0,                                                /* tp_clear */
+	0,                                                /* tp_richcompare */
+	0,                                             /* tp_weaklistoffset */
+	(getiterfunc)lazymanifest_getkeysiter,                /* tp_iter */
+	0,                                                /* tp_iternext */
+	lazymanifest_methods,                             /* tp_methods */
+	0,                                                /* tp_members */
+	0,                                                /* tp_getset */
+	0,                                                /* tp_base */
+	0,                                                /* tp_dict */
+	0,                                                /* tp_descr_get */
+	0,                                                /* tp_descr_set */
+	0,                                                /* tp_dictoffset */
+	(initproc)lazymanifest_init,                      /* tp_init */
+	0,                                                /* tp_alloc */
+};
+
+void manifest_module_init(PyObject * mod)
+{
+	lazymanifestType.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&lazymanifestType) < 0)
+		return;
+	Py_INCREF(&lazymanifestType);
+
+	PyModule_AddObject(mod, "lazymanifest",
+			   (PyObject *)&lazymanifestType);
+}
--- a/mercurial/manifest.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/manifest.py	Thu Apr 16 20:57:51 2015 -0500
@@ -8,53 +8,271 @@
 from i18n import _
 import mdiff, parsers, error, revlog, util
 import array, struct
+import os
 
-class manifestdict(dict):
-    def __init__(self, mapping=None, flags=None):
-        if mapping is None:
-            mapping = {}
-        if flags is None:
-            flags = {}
-        dict.__init__(self, mapping)
-        self._flags = flags
+propertycache = util.propertycache
+
+def _parsev1(data):
+    # This method does a little bit of excessive-looking
+    # precondition checking. This is so that the behavior of this
+    # class exactly matches its C counterpart to try and help
+    # prevent surprise breakage for anyone that develops against
+    # the pure version.
+    if data and data[-1] != '\n':
+        raise ValueError('Manifest did not end in a newline.')
+    prev = None
+    for l in data.splitlines():
+        if prev is not None and prev > l:
+            raise ValueError('Manifest lines not in sorted order.')
+        prev = l
+        f, n = l.split('\0')
+        if len(n) > 40:
+            yield f, revlog.bin(n[:40]), n[40:]
+        else:
+            yield f, revlog.bin(n), ''
+
+def _parsev2(data):
+    metadataend = data.find('\n')
+    # Just ignore metadata for now
+    pos = metadataend + 1
+    prevf = ''
+    while pos < len(data):
+        end = data.find('\n', pos + 1) # +1 to skip stem length byte
+        if end == -1:
+            raise ValueError('Manifest ended with incomplete file entry.')
+        stemlen = ord(data[pos])
+        items = data[pos + 1:end].split('\0')
+        f = prevf[:stemlen] + items[0]
+        if prevf > f:
+            raise ValueError('Manifest entries not in sorted order.')
+        fl = items[1]
+        # Just ignore metadata (items[2:] for now)
+        n = data[end + 1:end + 21]
+        yield f, n, fl
+        pos = end + 22
+        prevf = f
+
+def _parse(data):
+    """Generates (path, node, flags) tuples from a manifest text"""
+    if data.startswith('\0'):
+        return iter(_parsev2(data))
+    else:
+        return iter(_parsev1(data))
+
+def _text(it, usemanifestv2):
+    """Given an iterator over (path, node, flags) tuples, returns a manifest
+    text"""
+    if usemanifestv2:
+        return _textv2(it)
+    else:
+        return _textv1(it)
+
+def _textv1(it):
+    files = []
+    lines = []
+    _hex = revlog.hex
+    for f, n, fl in it:
+        files.append(f)
+        # if this is changed to support newlines in filenames,
+        # be sure to check the templates/ dir again (especially *-raw.tmpl)
+        lines.append("%s\0%s%s\n" % (f, _hex(n), fl))
+
+    _checkforbidden(files)
+    return ''.join(lines)
+
+def _textv2(it):
+    files = []
+    lines = ['\0\n']
+    prevf = ''
+    for f, n, fl in it:
+        files.append(f)
+        stem = os.path.commonprefix([prevf, f])
+        stemlen = min(len(stem), 255)
+        lines.append("%c%s\0%s\n%s\n" % (stemlen, f[stemlen:], fl, n))
+        prevf = f
+    _checkforbidden(files)
+    return ''.join(lines)
+
+class _lazymanifest(dict):
+    """This is the pure implementation of lazymanifest.
+
+    It has not been optimized *at all* and is not lazy.
+    """
+
+    def __init__(self, data):
+        dict.__init__(self)
+        for f, n, fl in _parse(data):
+            self[f] = n, fl
+
     def __setitem__(self, k, v):
-        assert v is not None
-        dict.__setitem__(self, k, v)
-    def flags(self, f):
-        return self._flags.get(f, "")
-    def setflag(self, f, flags):
-        """Set the flags (symlink, executable) for path f."""
-        self._flags[f] = flags
+        node, flag = v
+        assert node is not None
+        if len(node) > 21:
+            node = node[:21] # match c implementation behavior
+        dict.__setitem__(self, k, (node, flag))
+
+    def __iter__(self):
+        return iter(sorted(dict.keys(self)))
+
+    def iterkeys(self):
+        return iter(sorted(dict.keys(self)))
+
+    def iterentries(self):
+        return ((f, e[0], e[1]) for f, e in sorted(self.iteritems()))
+
     def copy(self):
-        return manifestdict(self, dict.copy(self._flags))
-    def intersectfiles(self, files):
-        '''make a new manifestdict with the intersection of self with files
+        c = _lazymanifest('')
+        c.update(self)
+        return c
+
+    def diff(self, m2, clean=False):
+        '''Finds changes between the current manifest and m2.'''
+        diff = {}
+
+        for fn, e1 in self.iteritems():
+            if fn not in m2:
+                diff[fn] = e1, (None, '')
+            else:
+                e2 = m2[fn]
+                if e1 != e2:
+                    diff[fn] = e1, e2
+                elif clean:
+                    diff[fn] = None
+
+        for fn, e2 in m2.iteritems():
+            if fn not in self:
+                diff[fn] = (None, ''), e2
+
+        return diff
+
+    def filtercopy(self, filterfn):
+        c = _lazymanifest('')
+        for f, n, fl in self.iterentries():
+            if filterfn(f):
+                c[f] = n, fl
+        return c
+
+    def text(self):
+        """Get the full data of this manifest as a bytestring."""
+        return _textv1(self.iterentries())
+
+try:
+    _lazymanifest = parsers.lazymanifest
+except AttributeError:
+    pass
+
+class manifestdict(object):
+    def __init__(self, data=''):
+        if data.startswith('\0'):
+            #_lazymanifest can not parse v2
+            self._lm = _lazymanifest('')
+            for f, n, fl in _parsev2(data):
+                self._lm[f] = n, fl
+        else:
+            self._lm = _lazymanifest(data)
+
+    def __getitem__(self, key):
+        return self._lm[key][0]
+
+    def find(self, key):
+        return self._lm[key]
+
+    def __len__(self):
+        return len(self._lm)
+
+    def __setitem__(self, key, node):
+        self._lm[key] = node, self.flags(key, '')
+
+    def __contains__(self, key):
+        return key in self._lm
+
+    def __delitem__(self, key):
+        del self._lm[key]
 
-        The algorithm assumes that files is much smaller than self.'''
-        ret = manifestdict()
-        for fn in files:
-            if fn in self:
-                ret[fn] = self[fn]
-                flags = self._flags.get(fn, None)
-                if flags:
-                    ret._flags[fn] = flags
-        return ret
+    def __iter__(self):
+        return self._lm.__iter__()
+
+    def iterkeys(self):
+        return self._lm.iterkeys()
+
+    def keys(self):
+        return list(self.iterkeys())
+
+    def filesnotin(self, m2):
+        '''Set of files in this manifest that are not in the other'''
+        files = set(self)
+        files.difference_update(m2)
+        return files
+
+    @propertycache
+    def _dirs(self):
+        return util.dirs(self)
+
+    def dirs(self):
+        return self._dirs
+
+    def hasdir(self, dir):
+        return dir in self._dirs
+
+    def _filesfastpath(self, match):
+        '''Checks whether we can correctly and quickly iterate over matcher
+        files instead of over manifest files.'''
+        files = match.files()
+        return (len(files) < 100 and (match.isexact() or
+            (not match.anypats() and util.all(fn in self for fn in files))))
+
+    def walk(self, match):
+        '''Generates matching file names.
+
+        Equivalent to manifest.matches(match).iterkeys(), but without creating
+        an entirely new manifest.
+
+        It also reports nonexistent files by marking them bad with match.bad().
+        '''
+        if match.always():
+            for f in iter(self):
+                yield f
+            return
+
+        fset = set(match.files())
+
+        # avoid the entire walk if we're only looking for specific files
+        if self._filesfastpath(match):
+            for fn in sorted(fset):
+                yield fn
+            return
+
+        for fn in self:
+            if fn in fset:
+                # specified pattern is the exact name
+                fset.remove(fn)
+            if match(fn):
+                yield fn
+
+        # for dirstate.walk, files=['.'] means "walk the whole tree".
+        # follow that here, too
+        fset.discard('.')
+
+        for fn in sorted(fset):
+            if not self.hasdir(fn):
+                match.bad(fn, None)
 
     def matches(self, match):
         '''generate a new manifest filtered by the match argument'''
         if match.always():
             return self.copy()
 
-        files = match.files()
-        if (match.matchfn == match.exact or
-            (not match.anypats() and util.all(fn in self for fn in files))):
-            return self.intersectfiles(files)
+        if self._filesfastpath(match):
+            m = manifestdict()
+            lm = self._lm
+            for fn in match.files():
+                if fn in lm:
+                    m._lm[fn] = lm[fn]
+            return m
 
-        mf = self.copy()
-        for fn in mf.keys():
-            if not match(fn):
-                del mf[fn]
-        return mf
+        m = manifestdict()
+        m._lm = self._lm.filtercopy(match)
+        return m
 
     def diff(self, m2, clean=False):
         '''Finds changes between the current manifest and m2.
@@ -71,35 +289,37 @@
         the nodeid will be None and the flags will be the empty
         string.
         '''
-        diff = {}
+        return self._lm.diff(m2._lm, clean)
+
+    def setflag(self, key, flag):
+        self._lm[key] = self[key], flag
 
-        for fn, n1 in self.iteritems():
-            fl1 = self._flags.get(fn, '')
-            n2 = m2.get(fn, None)
-            fl2 = m2._flags.get(fn, '')
-            if n2 is None:
-                fl2 = ''
-            if n1 != n2 or fl1 != fl2:
-                diff[fn] = ((n1, fl1), (n2, fl2))
-            elif clean:
-                diff[fn] = None
+    def get(self, key, default=None):
+        try:
+            return self._lm[key][0]
+        except KeyError:
+            return default
 
-        for fn, n2 in m2.iteritems():
-            if fn not in self:
-                fl2 = m2._flags.get(fn, '')
-                diff[fn] = ((None, ''), (n2, fl2))
-
-        return diff
+    def flags(self, key, default=''):
+        try:
+            return self._lm[key][1]
+        except KeyError:
+            return default
 
-    def text(self):
-        """Get the full data of this manifest as a bytestring."""
-        fl = sorted(self)
-        _checkforbidden(fl)
+    def copy(self):
+        c = manifestdict()
+        c._lm = self._lm.copy()
+        return c
 
-        hex, flags = revlog.hex, self.flags
-        # if this is changed to support newlines in filenames,
-        # be sure to check the templates/ dir again (especially *-raw.tmpl)
-        return ''.join("%s\0%s%s\n" % (f, hex(self[f]), flags(f)) for f in fl)
+    def iteritems(self):
+        return (x[:2] for x in self._lm.iterentries())
+
+    def text(self, usemanifestv2=False):
+        if usemanifestv2:
+            return _textv2(self._lm.iterentries())
+        else:
+            # use (probably) native version for v1
+            return self._lm.text()
 
     def fastdelta(self, base, changes):
         """Given a base manifest text as an array.array and a list of changes
@@ -119,7 +339,8 @@
             # bs will either be the index of the item or the insert point
             start, end = _msearch(addbuf, f, start)
             if not todelete:
-                l = "%s\0%s%s\n" % (f, revlog.hex(self[f]), self.flags(f))
+                h, fl = self._lm[f]
+                l = "%s\0%s%s\n" % (f, revlog.hex(h), fl)
             else:
                 if start == end:
                     # item we want to delete was not found, error out
@@ -213,21 +434,363 @@
                    + content for start, end, content in x)
     return deltatext, newaddlist
 
-def _parse(lines):
-    mfdict = manifestdict()
-    parsers.parse_manifest(mfdict, mfdict._flags, lines)
-    return mfdict
+def _splittopdir(f):
+    if '/' in f:
+        dir, subpath = f.split('/', 1)
+        return dir + '/', subpath
+    else:
+        return '', f
+
+class treemanifest(object):
+    def __init__(self, dir='', text=''):
+        self._dir = dir
+        self._dirs = {}
+        # Using _lazymanifest here is a little slower than plain old dicts
+        self._files = {}
+        self._flags = {}
+        self.parse(text)
+
+    def _subpath(self, path):
+        return self._dir + path
+
+    def __len__(self):
+        size = len(self._files)
+        for m in self._dirs.values():
+            size += m.__len__()
+        return size
+
+    def _isempty(self):
+        return (not self._files and (not self._dirs or
+                util.all(m._isempty() for m in self._dirs.values())))
+
+    def __str__(self):
+        return '<treemanifest dir=%s>' % self._dir
+
+    def iteritems(self):
+        for p, n in sorted(self._dirs.items() + self._files.items()):
+            if p in self._files:
+                yield self._subpath(p), n
+            else:
+                for f, sn in n.iteritems():
+                    yield f, sn
+
+    def iterkeys(self):
+        for p in sorted(self._dirs.keys() + self._files.keys()):
+            if p in self._files:
+                yield self._subpath(p)
+            else:
+                for f in self._dirs[p].iterkeys():
+                    yield f
+
+    def keys(self):
+        return list(self.iterkeys())
+
+    def __iter__(self):
+        return self.iterkeys()
+
+    def __contains__(self, f):
+        if f is None:
+            return False
+        dir, subpath = _splittopdir(f)
+        if dir:
+            if dir not in self._dirs:
+                return False
+            return self._dirs[dir].__contains__(subpath)
+        else:
+            return f in self._files
+
+    def get(self, f, default=None):
+        dir, subpath = _splittopdir(f)
+        if dir:
+            if dir not in self._dirs:
+                return default
+            return self._dirs[dir].get(subpath, default)
+        else:
+            return self._files.get(f, default)
+
+    def __getitem__(self, f):
+        dir, subpath = _splittopdir(f)
+        if dir:
+            return self._dirs[dir].__getitem__(subpath)
+        else:
+            return self._files[f]
+
+    def flags(self, f):
+        dir, subpath = _splittopdir(f)
+        if dir:
+            if dir not in self._dirs:
+                return ''
+            return self._dirs[dir].flags(subpath)
+        else:
+            if f in self._dirs:
+                return ''
+            return self._flags.get(f, '')
+
+    def find(self, f):
+        dir, subpath = _splittopdir(f)
+        if dir:
+            return self._dirs[dir].find(subpath)
+        else:
+            return self._files[f], self._flags.get(f, '')
+
+    def __delitem__(self, f):
+        dir, subpath = _splittopdir(f)
+        if dir:
+            self._dirs[dir].__delitem__(subpath)
+            # If the directory is now empty, remove it
+            if self._dirs[dir]._isempty():
+                del self._dirs[dir]
+        else:
+            del self._files[f]
+            if f in self._flags:
+                del self._flags[f]
+
+    def __setitem__(self, f, n):
+        assert n is not None
+        dir, subpath = _splittopdir(f)
+        if dir:
+            if dir not in self._dirs:
+                self._dirs[dir] = treemanifest(self._subpath(dir))
+            self._dirs[dir].__setitem__(subpath, n)
+        else:
+            self._files[f] = n[:21] # to match manifestdict's behavior
+
+    def setflag(self, f, flags):
+        """Set the flags (symlink, executable) for path f."""
+        dir, subpath = _splittopdir(f)
+        if dir:
+            if dir not in self._dirs:
+                self._dirs[dir] = treemanifest(self._subpath(dir))
+            self._dirs[dir].setflag(subpath, flags)
+        else:
+            self._flags[f] = flags
+
+    def copy(self):
+        copy = treemanifest(self._dir)
+        for d in self._dirs:
+            copy._dirs[d] = self._dirs[d].copy()
+        copy._files = dict.copy(self._files)
+        copy._flags = dict.copy(self._flags)
+        return copy
+
+    def filesnotin(self, m2):
+        '''Set of files in this manifest that are not in the other'''
+        files = set()
+        def _filesnotin(t1, t2):
+            for d, m1 in t1._dirs.iteritems():
+                if d in t2._dirs:
+                    m2 = t2._dirs[d]
+                    _filesnotin(m1, m2)
+                else:
+                    files.update(m1.iterkeys())
+
+            for fn in t1._files.iterkeys():
+                if fn not in t2._files:
+                    files.add(t1._subpath(fn))
+
+        _filesnotin(self, m2)
+        return files
+
+    @propertycache
+    def _alldirs(self):
+        return util.dirs(self)
+
+    def dirs(self):
+        return self._alldirs
+
+    def hasdir(self, dir):
+        topdir, subdir = _splittopdir(dir)
+        if topdir:
+            if topdir in self._dirs:
+                return self._dirs[topdir].hasdir(subdir)
+            return False
+        return (dir + '/') in self._dirs
+
+    def walk(self, match):
+        '''Generates matching file names.
+
+        Equivalent to manifest.matches(match).iterkeys(), but without creating
+        an entirely new manifest.
+
+        It also reports nonexistent files by marking them bad with match.bad().
+        '''
+        if match.always():
+            for f in iter(self):
+                yield f
+            return
+
+        fset = set(match.files())
+
+        for fn in self._walk(match):
+            if fn in fset:
+                # specified pattern is the exact name
+                fset.remove(fn)
+            yield fn
+
+        # for dirstate.walk, files=['.'] means "walk the whole tree".
+        # follow that here, too
+        fset.discard('.')
+
+        for fn in sorted(fset):
+            if not self.hasdir(fn):
+                match.bad(fn, None)
+
+    def _walk(self, match, alldirs=False):
+        '''Recursively generates matching file names for walk().
+
+        Will visit all subdirectories if alldirs is True, otherwise it will
+        only visit subdirectories for which match.visitdir is True.'''
+
+        if not alldirs:
+            # substring to strip trailing slash
+            visit = match.visitdir(self._dir[:-1] or '.')
+            if not visit:
+                return
+            alldirs = (visit == 'all')
+
+        # yield this dir's files and walk its submanifests
+        for p in sorted(self._dirs.keys() + self._files.keys()):
+            if p in self._files:
+                fullp = self._subpath(p)
+                if match(fullp):
+                    yield fullp
+            else:
+                for f in self._dirs[p]._walk(match, alldirs):
+                    yield f
+
+    def matches(self, match):
+        '''generate a new manifest filtered by the match argument'''
+        if match.always():
+            return self.copy()
+
+        return self._matches(match)
+
+    def _matches(self, match, alldirs=False):
+        '''recursively generate a new manifest filtered by the match argument.
+
+        Will visit all subdirectories if alldirs is True, otherwise it will
+        only visit subdirectories for which match.visitdir is True.'''
+
+        ret = treemanifest(self._dir)
+        if not alldirs:
+            # substring to strip trailing slash
+            visit = match.visitdir(self._dir[:-1] or '.')
+            if not visit:
+                return ret
+            alldirs = (visit == 'all')
+
+        for fn in self._files:
+            fullp = self._subpath(fn)
+            if not match(fullp):
+                continue
+            ret._files[fn] = self._files[fn]
+            if fn in self._flags:
+                ret._flags[fn] = self._flags[fn]
+
+        for dir, subm in self._dirs.iteritems():
+            m = subm._matches(match, alldirs)
+            if not m._isempty():
+                ret._dirs[dir] = m
+
+        return ret
+
+    def diff(self, m2, clean=False):
+        '''Finds changes between the current manifest and m2.
+
+        Args:
+          m2: the manifest to which this manifest should be compared.
+          clean: if true, include files unchanged between these manifests
+                 with a None value in the returned dictionary.
+
+        The result is returned as a dict with filename as key and
+        values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
+        nodeid in the current/other manifest and fl1/fl2 is the flag
+        in the current/other manifest. Where the file does not exist,
+        the nodeid will be None and the flags will be the empty
+        string.
+        '''
+        result = {}
+        emptytree = treemanifest()
+        def _diff(t1, t2):
+            for d, m1 in t1._dirs.iteritems():
+                m2 = t2._dirs.get(d, emptytree)
+                _diff(m1, m2)
+
+            for d, m2 in t2._dirs.iteritems():
+                if d not in t1._dirs:
+                    _diff(emptytree, m2)
+
+            for fn, n1 in t1._files.iteritems():
+                fl1 = t1._flags.get(fn, '')
+                n2 = t2._files.get(fn, None)
+                fl2 = t2._flags.get(fn, '')
+                if n1 != n2 or fl1 != fl2:
+                    result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
+                elif clean:
+                    result[t1._subpath(fn)] = None
+
+            for fn, n2 in t2._files.iteritems():
+                if fn not in t1._files:
+                    fl2 = t2._flags.get(fn, '')
+                    result[t2._subpath(fn)] = ((None, ''), (n2, fl2))
+
+        _diff(self, m2)
+        return result
+
+    def parse(self, text):
+        for f, n, fl in _parse(text):
+            self[f] = n
+            if fl:
+                self.setflag(f, fl)
+
+    def text(self, usemanifestv2=False):
+        """Get the full data of this manifest as a bytestring."""
+        flags = self.flags
+        return _text(((f, self[f], flags(f)) for f in self.keys()),
+                     usemanifestv2)
 
 class manifest(revlog.revlog):
     def __init__(self, opener):
-        # we expect to deal with not more than four revs at a time,
-        # during a commit --amend
-        self._mancache = util.lrucachedict(4)
+        # During normal operations, we expect to deal with not more than four
+        # revs at a time (such as during commit --amend). When rebasing large
+        # stacks of commits, the number can go up, hence the config knob below.
+        cachesize = 4
+        usetreemanifest = False
+        usemanifestv2 = False
+        opts = getattr(opener, 'options', None)
+        if opts is not None:
+            cachesize = opts.get('manifestcachesize', cachesize)
+            usetreemanifest = opts.get('usetreemanifest', usetreemanifest)
+            usemanifestv2 = opts.get('manifestv2', usemanifestv2)
+        self._mancache = util.lrucachedict(cachesize)
         revlog.revlog.__init__(self, opener, "00manifest.i")
+        self._treeinmem = usetreemanifest
+        self._treeondisk = usetreemanifest
+        self._usemanifestv2 = usemanifestv2
+
+    def _newmanifest(self, data=''):
+        if self._treeinmem:
+            return treemanifest('', data)
+        return manifestdict(data)
+
+    def _slowreaddelta(self, node):
+        r0 = self.deltaparent(self.rev(node))
+        m0 = self.read(self.node(r0))
+        m1 = self.read(node)
+        md = self._newmanifest()
+        for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).iteritems():
+            if n1:
+                md[f] = n1
+                if fl1:
+                    md.setflag(f, fl1)
+        return md
 
     def readdelta(self, node):
+        if self._usemanifestv2 or self._treeondisk:
+            return self._slowreaddelta(node)
         r = self.rev(node)
-        return _parse(mdiff.patchtext(self.revdiff(self.deltaparent(r), r)))
+        d = mdiff.patchtext(self.revdiff(self.deltaparent(r), r))
+        return self._newmanifest(d)
 
     def readfast(self, node):
         '''use the faster of readdelta or read'''
@@ -239,31 +802,27 @@
 
     def read(self, node):
         if node == revlog.nullid:
-            return manifestdict() # don't upset local cache
+            return self._newmanifest() # don't upset local cache
         if node in self._mancache:
             return self._mancache[node][0]
         text = self.revision(node)
         arraytext = array.array('c', text)
-        mapping = _parse(text)
-        self._mancache[node] = (mapping, arraytext)
-        return mapping
+        m = self._newmanifest(text)
+        self._mancache[node] = (m, arraytext)
+        return m
 
     def find(self, node, f):
         '''look up entry for a single file efficiently.
         return (node, flags) pair if found, (None, None) if not.'''
-        if node in self._mancache:
-            mapping = self._mancache[node][0]
-            return mapping.get(f), mapping.flags(f)
-        text = self.revision(node)
-        start, end = _msearch(text, f)
-        if start == end:
+        m = self.read(node)
+        try:
+            return m.find(f)
+        except KeyError:
             return None, None
-        l = text[start:end]
-        f, n = l.split('\0')
-        return revlog.bin(n[:40]), n[40:-1]
 
-    def add(self, map, transaction, link, p1, p2, added, removed):
-        if p1 in self._mancache:
+    def add(self, m, transaction, link, p1, p2, added, removed):
+        if (p1 in self._mancache and not self._treeinmem
+            and not self._usemanifestv2):
             # If our first parent is in the manifest cache, we can
             # compute a delta here using properties we know about the
             # manifest up-front, which may save time later for the
@@ -277,19 +836,19 @@
             # since the lists are already sorted
             work.sort()
 
-            arraytext, deltatext = map.fastdelta(self._mancache[p1][1], work)
+            arraytext, deltatext = m.fastdelta(self._mancache[p1][1], work)
             cachedelta = self.rev(p1), deltatext
             text = util.buffer(arraytext)
+            n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
         else:
             # The first parent manifest isn't already loaded, so we'll
             # just encode a fulltext of the manifest and pass that
             # through to the revlog layer, and let it handle the delta
             # process.
-            text = map.text()
+            text = m.text(self._usemanifestv2)
             arraytext = array.array('c', text)
-            cachedelta = None
+            n = self.addrevision(text, transaction, link, p1, p2)
 
-        n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
-        self._mancache[n] = (map, arraytext)
+        self._mancache[n] = (m, arraytext)
 
         return n
--- a/mercurial/match.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/match.py	Thu Apr 16 20:57:51 2015 -0500
@@ -9,6 +9,8 @@
 import util, pathutil
 from i18n import _
 
+propertycache = util.propertycache
+
 def _rematcher(regex):
     '''compile the regexp with the best available regexp engine and return a
     matcher function'''
@@ -34,6 +36,15 @@
         other.append((kind, pat))
     return fset, other
 
+def _kindpatsalwaysmatch(kindpats):
+    """"Checks whether the kindspats match everything, as e.g.
+    'relpath:.' does.
+    """
+    for kind, pat in kindpats:
+        if pat != '' or kind not in ['relpath', 'glob']:
+            return False
+    return True
+
 class match(object):
     def __init__(self, root, cwd, patterns, include=[], exclude=[],
                  default='glob', exact=False, auditor=None, ctx=None):
@@ -63,17 +74,16 @@
         self._cwd = cwd
         self._files = [] # exact files and roots of patterns
         self._anypats = bool(include or exclude)
-        self._ctx = ctx
         self._always = False
         self._pathrestricted = bool(include or exclude or patterns)
 
         matchfns = []
         if include:
-            kindpats = _normalize(include, 'glob', root, cwd, auditor)
+            kindpats = self._normalize(include, 'glob', root, cwd, auditor)
             self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)')
             matchfns.append(im)
         if exclude:
-            kindpats = _normalize(exclude, 'glob', root, cwd, auditor)
+            kindpats = self._normalize(exclude, 'glob', root, cwd, auditor)
             self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)')
             matchfns.append(lambda f: not em(f))
         if exact:
@@ -83,11 +93,12 @@
                 self._files = list(patterns)
             matchfns.append(self.exact)
         elif patterns:
-            kindpats = _normalize(patterns, default, root, cwd, auditor)
-            self._files = _roots(kindpats)
-            self._anypats = self._anypats or _anypats(kindpats)
-            self.patternspat, pm = _buildmatch(ctx, kindpats, '$')
-            matchfns.append(pm)
+            kindpats = self._normalize(patterns, default, root, cwd, auditor)
+            if not _kindpatsalwaysmatch(kindpats):
+                self._files = _roots(kindpats)
+                self._anypats = self._anypats or _anypats(kindpats)
+                self.patternspat, pm = _buildmatch(ctx, kindpats, '$')
+                matchfns.append(pm)
 
         if not matchfns:
             m = util.always
@@ -148,6 +159,20 @@
         else: optimal roots'''
         return self._files
 
+    @propertycache
+    def _dirs(self):
+        return set(util.dirs(self._fmap)) | set(['.'])
+
+    def visitdir(self, dir):
+        '''Helps while traversing a directory tree. Returns the string 'all' if
+        the given directory and all subdirectories should be visited. Otherwise
+        returns True or False indicating whether the given directory should be
+        visited. If 'all' is returned, calling this method on a subdirectory
+        gives an undefined result.'''
+        if not self._fmap or self.exact(dir):
+            return 'all'
+        return dir in self._dirs
+
     def exact(self, f):
         '''Returns True if f is in .files().'''
         return f in self._fmap
@@ -161,6 +186,34 @@
         - optimization might be possible and necessary.'''
         return self._always
 
+    def isexact(self):
+        return self.matchfn == self.exact
+
+    def _normalize(self, patterns, default, root, cwd, auditor):
+        '''Convert 'kind:pat' from the patterns list to tuples with kind and
+        normalized and rooted patterns and with listfiles expanded.'''
+        kindpats = []
+        for kind, pat in [_patsplit(p, default) for p in patterns]:
+            if kind in ('glob', 'relpath'):
+                pat = pathutil.canonpath(root, cwd, pat, auditor)
+            elif kind in ('relglob', 'path'):
+                pat = util.normpath(pat)
+            elif kind in ('listfile', 'listfile0'):
+                try:
+                    files = util.readfile(pat)
+                    if kind == 'listfile0':
+                        files = files.split('\0')
+                    else:
+                        files = files.splitlines()
+                    files = [f for f in files if f]
+                except EnvironmentError:
+                    raise util.Abort(_("unable to read file list (%s)") % pat)
+                kindpats += self._normalize(files, default, root, cwd, auditor)
+                continue
+            # else: re or relre - which cannot be normalized
+            kindpats.append((kind, pat))
+        return kindpats
+
 def exact(root, cwd, files):
     return match(root, cwd, files, exact=True)
 
@@ -220,6 +273,34 @@
     def rel(self, f):
         return self._matcher.rel(self._path + "/" + f)
 
+class icasefsmatcher(match):
+    """A matcher for wdir on case insensitive filesystems, which normalizes the
+    given patterns to the case in the filesystem.
+    """
+
+    def __init__(self, root, cwd, patterns, include, exclude, default, auditor,
+                 ctx):
+        init = super(icasefsmatcher, self).__init__
+        self._dsnormalize = ctx.repo().dirstate.normalize
+
+        init(root, cwd, patterns, include, exclude, default, auditor=auditor,
+             ctx=ctx)
+
+        # m.exact(file) must be based off of the actual user input, otherwise
+        # inexact case matches are treated as exact, and not noted without -v.
+        if self._files:
+            self._fmap = set(_roots(self._kp))
+
+    def _normalize(self, patterns, default, root, cwd, auditor):
+        self._kp = super(icasefsmatcher, self)._normalize(patterns, default,
+                                                          root, cwd, auditor)
+        kindpats = []
+        for kind, pats in self._kp:
+            if kind not in ('re', 'relre'):  # regex can't be normalized
+                pats = self._dsnormalize(pats)
+            kindpats.append((kind, pats))
+        return kindpats
+
 def patkind(pattern, default=None):
     '''If pattern is 'kind:pat' with a known kind, return kind.'''
     return _patsplit(pattern, default)[0]
@@ -370,31 +451,6 @@
                 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
         raise util.Abort(_("invalid pattern"))
 
-def _normalize(patterns, default, root, cwd, auditor):
-    '''Convert 'kind:pat' from the patterns list to tuples with kind and
-    normalized and rooted patterns and with listfiles expanded.'''
-    kindpats = []
-    for kind, pat in [_patsplit(p, default) for p in patterns]:
-        if kind in ('glob', 'relpath'):
-            pat = pathutil.canonpath(root, cwd, pat, auditor)
-        elif kind in ('relglob', 'path'):
-            pat = util.normpath(pat)
-        elif kind in ('listfile', 'listfile0'):
-            try:
-                files = util.readfile(pat)
-                if kind == 'listfile0':
-                    files = files.split('\0')
-                else:
-                    files = files.splitlines()
-                files = [f for f in files if f]
-            except EnvironmentError:
-                raise util.Abort(_("unable to read file list (%s)") % pat)
-            kindpats += _normalize(files, default, root, cwd, auditor)
-            continue
-        # else: re or relre - which cannot be normalized
-        kindpats.append((kind, pat))
-    return kindpats
-
 def _roots(kindpats):
     '''return roots and exact explicitly listed files from patterns
 
--- a/mercurial/mdiff.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/mdiff.py	Thu Apr 16 20:57:51 2015 -0500
@@ -367,6 +367,9 @@
 def trivialdiffheader(length):
     return struct.pack(">lll", 0, 0, length)
 
+def replacediffheader(oldlen, newlen):
+    return struct.pack(">lll", 0, oldlen, newlen)
+
 patches = mpatch.patches
 patchedsize = mpatch.patchedsize
 textdiff = bdiff.bdiff
--- a/mercurial/merge.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/merge.py	Thu Apr 16 20:57:51 2015 -0500
@@ -1045,9 +1045,7 @@
                 raise util.Abort(_("uncommitted changes"),
                                  hint=_("use 'hg status' to list changes"))
             for s in sorted(wc.substate):
-                if wc.sub(s).dirty():
-                    raise util.Abort(_("uncommitted changes in "
-                                       "subrepository '%s'") % s)
+                wc.sub(s).bailifchanged()
 
         elif not overwrite:
             if p1 == p2: # no-op update
@@ -1186,9 +1184,17 @@
     labels - merge labels eg ['local', 'graft']
 
     """
+    # If we're grafting a descendant onto an ancestor, be sure to pass
+    # mergeancestor=True to update. This does two things: 1) allows the merge if
+    # the destination is the same as the parent of the ctx (so we can use graft
+    # to copy commits), and 2) informs update that the incoming changes are
+    # newer than the destination so it doesn't prompt about "remote changed foo
+    # which local deleted".
+    mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
 
     stats = update(repo, ctx.node(), True, True, False, pctx.node(),
-                   labels=labels)
+                   mergeancestor=mergeancestor, labels=labels)
+
     # drop the second merge parent
     repo.dirstate.beginparentchange()
     repo.setparents(repo['.'].node(), nullid)
--- a/mercurial/namespaces.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/namespaces.py	Thu Apr 16 20:57:51 2015 -0500
@@ -142,7 +142,7 @@
                  is used
         colorname: the name to use for colored log output; if not specified
                    logname is used
-        logfmt: the format to use for (l10n-ed) log output; if not specified
+        logfmt: the format to use for (i18n-ed) log output; if not specified
                 it is composed from logname
         listnames: function to list all names
         namemap: function that inputs a node, output name(s)
--- a/mercurial/obsolete.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/obsolete.py	Thu Apr 16 20:57:51 2015 -0500
@@ -68,15 +68,14 @@
 
 """
 import struct
-import util, base85, node
+import util, base85, node, parsers
 import phases
 from i18n import _
 
 _pack = struct.pack
 _unpack = struct.unpack
 _calcsize = struct.calcsize
-
-_SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
+propertycache = util.propertycache
 
 # the obsolete feature is not mature enough to be enabled by default.
 # you have to rely on third party extension extension to enable this.
@@ -146,7 +145,7 @@
 _fm0fsize = _calcsize(_fm0fixed)
 _fm0fnodesize = _calcsize(_fm0node)
 
-def _fm0readmarkers(data, off=0):
+def _fm0readmarkers(data, off):
     # Loop on markers
     l = len(data)
     while off + _fm0fsize <= l:
@@ -285,7 +284,7 @@
 _fm1metapair = 'BB'
 _fm1metapairsize = _calcsize('BB')
 
-def _fm1readmarkers(data, off=0):
+def _fm1purereadmarkers(data, off):
     # make some global constants local for performance
     noneflag = _fm1parentnone
     sha2flag = usingsha256
@@ -301,6 +300,7 @@
     # Loop on markers
     stop = len(data) - _fm1fsize
     ufixed = util.unpacker(_fm1fixed)
+
     while off <= stop:
         # read fixed part
         o1 = off + fsize
@@ -395,6 +395,13 @@
         data.append(value)
     return ''.join(data)
 
+def _fm1readmarkers(data, off):
+    native = getattr(parsers, 'fm1readmarkers', None)
+    if not native:
+        return _fm1purereadmarkers(data, off)
+    stop = len(data) - _fm1fsize
+    return native(data, off, stop)
+
 # mapping to read/write various marker formats
 # <version> -> (decoder, encoder)
 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
@@ -462,15 +469,35 @@
         """The flags field of the marker"""
         return self._data[2]
 
-def _checkinvalidmarkers(obsstore):
+@util.nogc
+def _addsuccessors(successors, markers):
+    for mark in markers:
+        successors.setdefault(mark[0], set()).add(mark)
+
+@util.nogc
+def _addprecursors(precursors, markers):
+    for mark in markers:
+        for suc in mark[1]:
+            precursors.setdefault(suc, set()).add(mark)
+
+@util.nogc
+def _addchildren(children, markers):
+    for mark in markers:
+        parents = mark[5]
+        if parents is not None:
+            for p in parents:
+                children.setdefault(p, set()).add(mark)
+
+def _checkinvalidmarkers(markers):
     """search for marker with invalid data and raise error if needed
 
     Exist as a separated function to allow the evolve extension for a more
     subtle handling.
     """
-    if node.nullid in obsstore.precursors:
-        raise util.Abort(_('bad obsolescence marker detected: '
-                           'invalid successors nullid'))
+    for mark in markers:
+        if node.nullid in mark[1]:
+            raise util.Abort(_('bad obsolescence marker detected: '
+                               'invalid successors nullid'))
 
 class obsstore(object):
     """Store obsolete markers
@@ -494,16 +521,13 @@
         # caches for various obsolescence related cache
         self.caches = {}
         self._all = []
-        self.precursors = {}
-        self.successors = {}
-        self.children = {}
         self.sopener = sopener
         data = sopener.tryread('obsstore')
         self._version = defaultformat
         self._readonly = readonly
         if data:
             self._version, markers = _readmarkers(data)
-            self._load(markers)
+            self._addmarkers(markers)
 
     def __iter__(self):
         return iter(self._all)
@@ -566,12 +590,6 @@
         if new:
             f = self.sopener('obsstore', 'ab')
             try:
-                # Whether the file's current position is at the begin or at
-                # the end after opening a file for appending is implementation
-                # defined. So we must seek to the end before calling tell(),
-                # or we may get a zero offset for non-zero sized files on
-                # some platforms (issue3543).
-                f.seek(0, _SEEK_END)
                 offset = f.tell()
                 transaction.add('obsstore', offset)
                 # offset == 0: new file - add the version header
@@ -581,7 +599,7 @@
                 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
                 # call 'filecacheentry.refresh()'  here
                 f.close()
-            self._load(new)
+            self._addmarkers(new)
             # new marker *may* have changed several set. invalidate the cache.
             self.caches.clear()
         # records the number of new markers for the transaction hooks
@@ -596,19 +614,37 @@
         version, markers = _readmarkers(data)
         return self.add(transaction, markers)
 
-    @util.nogc
-    def _load(self, markers):
-        for mark in markers:
-            self._all.append(mark)
-            pre, sucs = mark[:2]
-            self.successors.setdefault(pre, set()).add(mark)
-            for suc in sucs:
-                self.precursors.setdefault(suc, set()).add(mark)
-            parents = mark[5]
-            if parents is not None:
-                for p in parents:
-                    self.children.setdefault(p, set()).add(mark)
-        _checkinvalidmarkers(self)
+    @propertycache
+    def successors(self):
+        successors = {}
+        _addsuccessors(successors, self._all)
+        return successors
+
+    @propertycache
+    def precursors(self):
+        precursors = {}
+        _addprecursors(precursors, self._all)
+        return precursors
+
+    @propertycache
+    def children(self):
+        children = {}
+        _addchildren(children, self._all)
+        return children
+
+    def _cached(self, attr):
+        return attr in self.__dict__
+
+    def _addmarkers(self, markers):
+        markers = list(markers) # to allow repeated iteration
+        self._all.extend(markers)
+        if self._cached('successors'):
+            _addsuccessors(self.successors, markers)
+        if self._cached('precursors'):
+            _addprecursors(self.precursors, markers)
+        if self._cached('children'):
+            _addchildren(self.children, markers)
+        _checkinvalidmarkers(markers)
 
     def relevantmarkers(self, nodes):
         """return a set of all obsolescence markers relevant to a set of nodes.
@@ -726,13 +762,13 @@
 
 def precursormarkers(ctx):
     """obsolete marker marking this changeset as a successors"""
-    for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
-        yield marker(ctx._repo, data)
+    for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
+        yield marker(ctx.repo(), data)
 
 def successormarkers(ctx):
     """obsolete marker making this changeset obsolete"""
-    for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
-        yield marker(ctx._repo, data)
+    for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
+        yield marker(ctx.repo(), data)
 
 def allsuccessors(obsstore, nodes, ignoreflags=0):
     """Yield node for every successor of <nodes>.
@@ -1128,8 +1164,12 @@
     for ctx in repo.set('(not public()) - obsolete()'):
         mark = obsstore.precursors.get(ctx.node(), ())
         toprocess = set(mark)
+        seen = set()
         while toprocess:
             prec = toprocess.pop()[0]
+            if prec in seen:
+                continue # emergency cycle hanging prevention
+            seen.add(prec)
             if prec not in newermap:
                 successorssets(repo, prec, newermap)
             newer = [n for n in newermap[prec] if n]
--- a/mercurial/osutil.c	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/osutil.c	Thu Apr 16 20:57:51 2015 -0500
@@ -24,6 +24,11 @@
 #include <unistd.h>
 #endif
 
+#ifdef __APPLE__
+#include <sys/attr.h>
+#include <sys/vnode.h>
+#endif
+
 #include "util.h"
 
 /* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */
@@ -286,7 +291,8 @@
 	return stat;
 }
 
-static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
+static PyObject *_listdir_stat(char *path, int pathlen, int keepstat,
+			       char *skip)
 {
 	PyObject *list, *elem, *stat = NULL, *ret = NULL;
 	char fullpath[PATH_MAX + 10];
@@ -337,7 +343,7 @@
 #else
 			strncpy(fullpath + pathlen + 1, ent->d_name,
 				PATH_MAX - pathlen);
-			fullpath[PATH_MAX] = 0;
+			fullpath[PATH_MAX] = '\0';
 			err = lstat(fullpath, &st);
 #endif
 			if (err == -1) {
@@ -391,6 +397,198 @@
 	return ret;
 }
 
+#ifdef __APPLE__
+
+typedef struct {
+	u_int32_t length;
+	attrreference_t name;
+	fsobj_type_t obj_type;
+	struct timespec mtime;
+#if __LITTLE_ENDIAN__
+	mode_t access_mask;
+	uint16_t padding;
+#else
+	uint16_t padding;
+	mode_t access_mask;
+#endif
+	off_t size;
+} __attribute__((packed)) attrbuf_entry;
+
+int attrkind(attrbuf_entry *entry)
+{
+	switch (entry->obj_type) {
+	case VREG: return S_IFREG;
+	case VDIR: return S_IFDIR;
+	case VLNK: return S_IFLNK;
+	case VBLK: return S_IFBLK;
+	case VCHR: return S_IFCHR;
+	case VFIFO: return S_IFIFO;
+	case VSOCK: return S_IFSOCK;
+	}
+	return -1;
+}
+
+/* get these many entries at a time */
+#define LISTDIR_BATCH_SIZE 50
+
+static PyObject *_listdir_batch(char *path, int pathlen, int keepstat,
+				char *skip, bool *fallback)
+{
+	PyObject *list, *elem, *stat = NULL, *ret = NULL;
+	int kind, err;
+	unsigned long index;
+	unsigned int count, old_state, new_state;
+	bool state_seen = false;
+	attrbuf_entry *entry;
+	/* from the getattrlist(2) man page: a path can be no longer than
+	   (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will
+	   silently truncate attribute data if attrBufSize is too small." So
+	   pass in a buffer big enough for the worst case. */
+	char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)];
+	unsigned int basep_unused;
+
+	struct stat st;
+	int dfd = -1;
+
+	/* these must match the attrbuf_entry struct, otherwise you'll end up
+	   with garbage */
+	struct attrlist requested_attr = {0};
+	requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT;
+	requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE |
+				     ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK);
+	requested_attr.fileattr = ATTR_FILE_TOTALSIZE;
+
+	*fallback = false;
+
+	if (pathlen >= PATH_MAX) {
+		errno = ENAMETOOLONG;
+		PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+		goto error_value;
+	}
+
+	dfd = open(path, O_RDONLY);
+	if (dfd == -1) {
+		PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+		goto error_value;
+	}
+
+	list = PyList_New(0);
+	if (!list)
+		goto error_dir;
+
+	do {
+		count = LISTDIR_BATCH_SIZE;
+		err = getdirentriesattr(dfd, &requested_attr, &attrbuf,
+					sizeof(attrbuf), &count, &basep_unused,
+					&new_state, 0);
+		if (err < 0) {
+			if (errno == ENOTSUP) {
+				/* We're on a filesystem that doesn't support
+				   getdirentriesattr. Fall back to the
+				   stat-based implementation. */
+				*fallback = true;
+			} else
+				PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+			goto error;
+		}
+
+		if (!state_seen) {
+			old_state = new_state;
+			state_seen = true;
+		} else if (old_state != new_state) {
+			/* There's an edge case with getdirentriesattr. Consider
+			   the following initial list of files:
+
+			   a
+			   b
+			   <--
+			   c
+			   d
+
+			   If the iteration is paused at the arrow, and b is
+			   deleted before it is resumed, getdirentriesattr will
+			   not return d at all!  Ordinarily we're expected to
+			   restart the iteration from the beginning. To avoid
+			   getting stuck in a retry loop here, fall back to
+			   stat. */
+			*fallback = true;
+			goto error;
+		}
+
+		entry = (attrbuf_entry *)attrbuf;
+
+		for (index = 0; index < count; index++) {
+			char *filename = ((char *)&entry->name) +
+				entry->name.attr_dataoffset;
+
+			if (!strcmp(filename, ".") || !strcmp(filename, ".."))
+				continue;
+
+			kind = attrkind(entry);
+			if (kind == -1) {
+				PyErr_Format(PyExc_OSError,
+					     "unknown object type %u for file "
+					     "%s%s!",
+					     entry->obj_type, path, filename);
+				goto error;
+			}
+
+			/* quit early? */
+			if (skip && kind == S_IFDIR && !strcmp(filename, skip)) {
+				ret = PyList_New(0);
+				goto error;
+			}
+
+			if (keepstat) {
+				/* from the getattrlist(2) man page: "Only the
+				   permission bits ... are valid". */
+				st.st_mode = (entry->access_mask & ~S_IFMT) | kind;
+				st.st_mtime = entry->mtime.tv_sec;
+				st.st_size = entry->size;
+				stat = makestat(&st);
+				if (!stat)
+					goto error;
+				elem = Py_BuildValue("siN", filename, kind, stat);
+			} else
+				elem = Py_BuildValue("si", filename, kind);
+			if (!elem)
+				goto error;
+			stat = NULL;
+
+			PyList_Append(list, elem);
+			Py_DECREF(elem);
+
+			entry = (attrbuf_entry *)((char *)entry + entry->length);
+		}
+	} while (err == 0);
+
+	ret = list;
+	Py_INCREF(ret);
+
+error:
+	Py_DECREF(list);
+	Py_XDECREF(stat);
+error_dir:
+	close(dfd);
+error_value:
+	return ret;
+}
+
+#endif /* __APPLE__ */
+
+static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
+{
+#ifdef __APPLE__
+	PyObject *ret;
+	bool fallback = false;
+
+	ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback);
+	if (ret != NULL || !fallback)
+		return ret;
+#endif
+	return _listdir_stat(path, pathlen, keepstat, skip);
+}
+
 static PyObject *statfiles(PyObject *self, PyObject *args)
 {
 	PyObject *names, *stats;
--- a/mercurial/parsers.c	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/parsers.c	Thu Apr 16 20:57:51 2015 -0500
@@ -56,6 +56,27 @@
 	'\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
 };
 
+static char uppertable[128] = {
+	'\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
+	'\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f',
+	'\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17',
+	'\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f',
+	'\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27',
+	'\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f',
+	'\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37',
+	'\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f',
+	'\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47',
+	'\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f',
+	'\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57',
+	'\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f',
+	'\x60',
+		'\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */
+	'\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */
+	'\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */
+	'\x58', '\x59', '\x5a', 					/* x-z */
+				'\x7b', '\x7c', '\x7d', '\x7e', '\x7f'
+};
+
 static inline int hexdigit(const char *p, Py_ssize_t off)
 {
 	int8_t val = hextable[(unsigned char)p[off]];
@@ -71,7 +92,7 @@
 /*
  * Turn a hex-encoded string into binary.
  */
-static PyObject *unhexlify(const char *str, int len)
+PyObject *unhexlify(const char *str, int len)
 {
 	PyObject *ret;
 	char *d;
@@ -93,14 +114,17 @@
 	return ret;
 }
 
-static PyObject *asciilower(PyObject *self, PyObject *args)
+static inline PyObject *_asciitransform(PyObject *str_obj,
+					const char table[128],
+					PyObject *fallback_fn)
 {
 	char *str, *newstr;
-	int i, len;
+	Py_ssize_t i, len;
 	PyObject *newobj = NULL;
+	PyObject *ret = NULL;
 
-	if (!PyArg_ParseTuple(args, "s#", &str, &len))
-		goto quit;
+	str = PyBytes_AS_STRING(str_obj);
+	len = PyBytes_GET_SIZE(str_obj);
 
 	newobj = PyBytes_FromStringAndSize(NULL, len);
 	if (!newobj)
@@ -111,19 +135,120 @@
 	for (i = 0; i < len; i++) {
 		char c = str[i];
 		if (c & 0x80) {
-			PyObject *err = PyUnicodeDecodeError_Create(
-				"ascii", str, len, i, (i + 1),
-				"unexpected code byte");
-			PyErr_SetObject(PyExc_UnicodeDecodeError, err);
-			Py_XDECREF(err);
+			if (fallback_fn != NULL) {
+				ret = PyObject_CallFunctionObjArgs(fallback_fn,
+					str_obj, NULL);
+			} else {
+				PyObject *err = PyUnicodeDecodeError_Create(
+					"ascii", str, len, i, (i + 1),
+					"unexpected code byte");
+				PyErr_SetObject(PyExc_UnicodeDecodeError, err);
+				Py_XDECREF(err);
+			}
 			goto quit;
 		}
-		newstr[i] = lowertable[(unsigned char)c];
+		newstr[i] = table[(unsigned char)c];
 	}
 
-	return newobj;
+	ret = newobj;
+	Py_INCREF(ret);
 quit:
 	Py_XDECREF(newobj);
+	return ret;
+}
+
+static PyObject *asciilower(PyObject *self, PyObject *args)
+{
+	PyObject *str_obj;
+	if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj))
+		return NULL;
+	return _asciitransform(str_obj, lowertable, NULL);
+}
+
+static PyObject *asciiupper(PyObject *self, PyObject *args)
+{
+	PyObject *str_obj;
+	if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj))
+		return NULL;
+	return _asciitransform(str_obj, uppertable, NULL);
+}
+
+static PyObject *make_file_foldmap(PyObject *self, PyObject *args)
+{
+	PyObject *dmap, *spec_obj, *normcase_fallback;
+	PyObject *file_foldmap = NULL;
+	enum normcase_spec spec;
+	PyObject *k, *v;
+	dirstateTupleObject *tuple;
+	Py_ssize_t pos = 0;
+	const char *table;
+
+	if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap",
+			      &PyDict_Type, &dmap,
+			      &PyInt_Type, &spec_obj,
+			      &PyFunction_Type, &normcase_fallback))
+		goto quit;
+
+	spec = (int)PyInt_AS_LONG(spec_obj);
+	switch (spec) {
+	case NORMCASE_LOWER:
+		table = lowertable;
+		break;
+	case NORMCASE_UPPER:
+		table = uppertable;
+		break;
+	case NORMCASE_OTHER:
+		table = NULL;
+		break;
+	default:
+		PyErr_SetString(PyExc_TypeError, "invalid normcasespec");
+		goto quit;
+	}
+
+#if PY_VERSION_HEX >= 0x02060000
+	/* _PyDict_NewPresized expects a minused parameter, but it actually
+	   creates a dictionary that's the nearest power of two bigger than the
+	   parameter. For example, with the initial minused = 1000, the
+	   dictionary created has size 1024. Of course in a lot of cases that
+	   can be greater than the maximum load factor Python's dict object
+	   expects (= 2/3), so as soon as we cross the threshold we'll resize
+	   anyway. So create a dictionary that's 3/2 the size. Also add some
+	   more to deal with additions outside this function. */
+	file_foldmap = _PyDict_NewPresized((PyDict_Size(dmap) / 5) * 8);
+#else
+	file_foldmap = PyDict_New();
+#endif
+
+	if (file_foldmap == NULL)
+		goto quit;
+
+	while (PyDict_Next(dmap, &pos, &k, &v)) {
+		if (!dirstate_tuple_check(v)) {
+			PyErr_SetString(PyExc_TypeError,
+					"expected a dirstate tuple");
+			goto quit;
+		}
+
+		tuple = (dirstateTupleObject *)v;
+		if (tuple->state != 'r') {
+			PyObject *normed;
+			if (table != NULL) {
+				normed = _asciitransform(k, table,
+					normcase_fallback);
+			} else {
+				normed = PyObject_CallFunctionObjArgs(
+					normcase_fallback, k, NULL);
+			}
+
+			if (normed == NULL)
+				goto quit;
+			if (PyDict_SetItem(file_foldmap, normed, k) == -1)
+				goto quit;
+		}
+	}
+	return file_foldmap;
+quit:
+	Py_XDECREF(file_foldmap);
 	return NULL;
 }
 
@@ -911,6 +1036,111 @@
 	}
 }
 
+static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
+                                    Py_ssize_t marker, char *phases)
+{
+	PyObject *iter = NULL;
+	PyObject *iter_item = NULL;
+	Py_ssize_t min_idx = index_length(self) + 1;
+	long iter_item_long;
+
+	if (PyList_GET_SIZE(list) != 0) {
+		iter = PyObject_GetIter(list);
+		if (iter == NULL)
+			return -2;
+		while ((iter_item = PyIter_Next(iter)))
+		{
+			iter_item_long = PyInt_AS_LONG(iter_item);
+			Py_DECREF(iter_item);
+			if (iter_item_long < min_idx)
+				min_idx = iter_item_long;
+			phases[iter_item_long] = marker;
+		}
+		Py_DECREF(iter);
+	}
+
+	return min_idx;
+}
+
+static inline void set_phase_from_parents(char *phases, int parent_1,
+                                          int parent_2, Py_ssize_t i)
+{
+	if (parent_1 >= 0 && phases[parent_1] > phases[i])
+		phases[i] = phases[parent_1];
+	if (parent_2 >= 0 && phases[parent_2] > phases[i])
+		phases[i] = phases[parent_2];
+}
+
+static PyObject *compute_phases(indexObject *self, PyObject *args)
+{
+	PyObject *roots = Py_None;
+	PyObject *phaseslist = NULL;
+	PyObject *phaseroots = NULL;
+	PyObject *rev = NULL;
+	PyObject *p1 = NULL;
+	PyObject *p2 = NULL;
+	Py_ssize_t addlen = self->added ? PyList_GET_SIZE(self->added) : 0;
+	Py_ssize_t len = index_length(self) - 1;
+	Py_ssize_t numphase = 0;
+	Py_ssize_t minrevallphases = 0;
+	Py_ssize_t minrevphase = 0;
+	Py_ssize_t i = 0;
+	int parent_1, parent_2;
+	char *phases = NULL;
+	const char *data;
+
+	if (!PyArg_ParseTuple(args, "O", &roots))
+		goto release_none;
+	if (roots == NULL || !PyList_Check(roots))
+		goto release_none;
+
+	phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
+	if (phases == NULL)
+		goto release_none;
+	/* Put the phase information of all the roots in phases */
+	numphase = PyList_GET_SIZE(roots)+1;
+	minrevallphases = len + 1;
+	for (i = 0; i < numphase-1; i++) {
+		phaseroots = PyList_GET_ITEM(roots, i);
+		if (!PyList_Check(phaseroots))
+			goto release_phases;
+		minrevphase = add_roots_get_min(self, phaseroots, i+1, phases);
+		if (minrevphase == -2) /* Error from add_roots_get_min */
+			goto release_phases;
+		minrevallphases = MIN(minrevallphases, minrevphase);
+	}
+	/* Propagate the phase information from the roots to the revs */
+	if (minrevallphases != -1) {
+		for (i = minrevallphases; i < self->raw_length; i++) {
+			data = index_deref(self, i);
+			set_phase_from_parents(phases, getbe32(data+24), getbe32(data+28), i);
+		}
+		for (i = 0; i < addlen; i++) {
+			rev = PyList_GET_ITEM(self->added, i);
+			p1 = PyTuple_GET_ITEM(rev, 5);
+			p2 = PyTuple_GET_ITEM(rev, 6);
+			if (!PyInt_Check(p1) || !PyInt_Check(p2)) {
+				PyErr_SetString(PyExc_TypeError, "revlog parents are invalid");
+				goto release_phases;
+			}
+			parent_1 = (int)PyInt_AS_LONG(p1);
+			parent_2 = (int)PyInt_AS_LONG(p2);
+			set_phase_from_parents(phases, parent_1, parent_2, i+self->raw_length);
+		}
+	}
+	/* Transform phase list to a python list */
+	phaseslist = PyList_New(len);
+	if (phaseslist == NULL)
+		goto release_phases;
+	for (i = 0; i < len; i++)
+		PyList_SET_ITEM(phaseslist, i, PyInt_FromLong(phases[i]));
+
+release_phases:
+	free(phases);
+release_none:
+	return phaseslist;
+}
+
 static PyObject *index_headrevs(indexObject *self, PyObject *args)
 {
 	Py_ssize_t i, len, addlen;
@@ -1102,6 +1332,11 @@
 static int nt_new(indexObject *self)
 {
 	if (self->ntlength == self->ntcapacity) {
+		if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) {
+			PyErr_SetString(PyExc_MemoryError,
+					"overflow in nt_new");
+			return -1;
+		}
 		self->ntcapacity *= 2;
 		self->nt = realloc(self->nt,
 				   self->ntcapacity * sizeof(nodetree));
@@ -1163,7 +1398,7 @@
 static int nt_init(indexObject *self)
 {
 	if (self->nt == NULL) {
-		if (self->raw_length > INT_MAX) {
+		if (self->raw_length > INT_MAX / sizeof(nodetree)) {
 			PyErr_SetString(PyExc_ValueError, "overflow in nt_init");
 			return -1;
 		}
@@ -1676,108 +1911,6 @@
 }
 
 /*
- * Given a (possibly overlapping) set of revs, return the greatest
- * common ancestors: those with the longest path to the root.
- */
-static PyObject *index_ancestors(indexObject *self, PyObject *args)
-{
-	PyObject *ret = NULL, *gca = NULL;
-	Py_ssize_t argcount, i, len;
-	bitmask repeat = 0;
-	int revcount = 0;
-	int *revs;
-
-	argcount = PySequence_Length(args);
-	revs = malloc(argcount * sizeof(*revs));
-	if (argcount > 0 && revs == NULL)
-		return PyErr_NoMemory();
-	len = index_length(self) - 1;
-
-	for (i = 0; i < argcount; i++) {
-		static const int capacity = 24;
-		PyObject *obj = PySequence_GetItem(args, i);
-		bitmask x;
-		long val;
-
-		if (!PyInt_Check(obj)) {
-			PyErr_SetString(PyExc_TypeError,
-					"arguments must all be ints");
-			Py_DECREF(obj);
-			goto bail;
-		}
-		val = PyInt_AsLong(obj);
-		Py_DECREF(obj);
-		if (val == -1) {
-			ret = PyList_New(0);
-			goto done;
-		}
-		if (val < 0 || val >= len) {
-			PyErr_SetString(PyExc_IndexError,
-					"index out of range");
-			goto bail;
-		}
-		/* this cheesy bloom filter lets us avoid some more
-		 * expensive duplicate checks in the common set-is-disjoint
-		 * case */
-		x = 1ull << (val & 0x3f);
-		if (repeat & x) {
-			int k;
-			for (k = 0; k < revcount; k++) {
-				if (val == revs[k])
-					goto duplicate;
-			}
-		}
-		else repeat |= x;
-		if (revcount >= capacity) {
-			PyErr_Format(PyExc_OverflowError,
-				     "bitset size (%d) > capacity (%d)",
-				     revcount, capacity);
-			goto bail;
-		}
-		revs[revcount++] = (int)val;
-	duplicate:;
-	}
-
-	if (revcount == 0) {
-		ret = PyList_New(0);
-		goto done;
-	}
-	if (revcount == 1) {
-		PyObject *obj;
-		ret = PyList_New(1);
-		if (ret == NULL)
-			goto bail;
-		obj = PyInt_FromLong(revs[0]);
-		if (obj == NULL)
-			goto bail;
-		PyList_SET_ITEM(ret, 0, obj);
-		goto done;
-	}
-
-	gca = find_gca_candidates(self, revs, revcount);
-	if (gca == NULL)
-		goto bail;
-
-	if (PyList_GET_SIZE(gca) <= 1) {
-		ret = gca;
-		Py_INCREF(gca);
-	}
-	else ret = find_deepest(self, gca);
-
-done:
-	free(revs);
-	Py_XDECREF(gca);
-
-	return ret;
-
-bail:
-	free(revs);
-	Py_XDECREF(gca);
-	Py_XDECREF(ret);
-	return NULL;
-}
-
-/*
  * Given a (possibly overlapping) set of revs, return all the
  * common ancestors heads: heads(::args[0] and ::a[1] and ...)
  */
@@ -1871,6 +2004,24 @@
 }
 
 /*
+ * Given a (possibly overlapping) set of revs, return the greatest
+ * common ancestors: those with the longest path to the root.
+ */
+static PyObject *index_ancestors(indexObject *self, PyObject *args)
+{
+	PyObject *gca = index_commonancestorsheads(self, args);
+	if (gca == NULL)
+		return NULL;
+
+	if (PyList_GET_SIZE(gca) <= 1) {
+		Py_INCREF(gca);
+		return gca;
+	}
+
+	return find_deepest(self, gca);
+}
+
+/*
  * Invalidate any trie entries introduced by added revs.
  */
 static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
@@ -2127,6 +2278,8 @@
 	 "clear the index caches"},
 	{"get", (PyCFunction)index_m_get, METH_VARARGS,
 	 "get an index entry"},
+	{"computephases", (PyCFunction)compute_phases, METH_VARARGS,
+		"compute phases"},
 	{"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
 	 "get head revisions"}, /* Can do filtering since 3.2 */
 	{"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
@@ -2230,6 +2383,157 @@
 	return NULL;
 }
 
+#define BUMPED_FIX 1
+#define USING_SHA_256 2
+
+static PyObject *readshas(
+	const char *source, unsigned char num, Py_ssize_t hashwidth)
+{
+	int i;
+	PyObject *list = PyTuple_New(num);
+	if (list == NULL) {
+		return NULL;
+	}
+	for (i = 0; i < num; i++) {
+		PyObject *hash = PyString_FromStringAndSize(source, hashwidth);
+		if (hash == NULL) {
+			Py_DECREF(list);
+			return NULL;
+		}
+		PyTuple_SetItem(list, i, hash);
+		source += hashwidth;
+	}
+	return list;
+}
+
+static PyObject *fm1readmarker(const char *data, uint32_t *msize)
+{
+	const char *meta;
+
+	double mtime;
+	int16_t tz;
+	uint16_t flags;
+	unsigned char nsuccs, nparents, nmetadata;
+	Py_ssize_t hashwidth = 20;
+
+	PyObject *prec = NULL, *parents = NULL, *succs = NULL;
+	PyObject *metadata = NULL, *ret = NULL;
+	int i;
+
+	*msize = getbe32(data);
+	data += 4;
+	mtime = getbefloat64(data);
+	data += 8;
+	tz = getbeint16(data);
+	data += 2;
+	flags = getbeuint16(data);
+	data += 2;
+
+	if (flags & USING_SHA_256) {
+		hashwidth = 32;
+	}
+
+	nsuccs = (unsigned char)(*data++);
+	nparents = (unsigned char)(*data++);
+	nmetadata = (unsigned char)(*data++);
+
+	prec = PyString_FromStringAndSize(data, hashwidth);
+	data += hashwidth;
+	if (prec == NULL) {
+		goto bail;
+	}
+
+	succs = readshas(data, nsuccs, hashwidth);
+	if (succs == NULL) {
+		goto bail;
+	}
+	data += nsuccs * hashwidth;
+
+	if (nparents == 1 || nparents == 2) {
+		parents = readshas(data, nparents, hashwidth);
+		if (parents == NULL) {
+			goto bail;
+		}
+		data += nparents * hashwidth;
+	} else {
+		parents = Py_None;
+	}
+
+	meta = data + (2 * nmetadata);
+	metadata = PyTuple_New(nmetadata);
+	if (metadata == NULL) {
+		goto bail;
+	}
+	for (i = 0; i < nmetadata; i++) {
+		PyObject *tmp, *left = NULL, *right = NULL;
+		Py_ssize_t metasize = (unsigned char)(*data++);
+		left = PyString_FromStringAndSize(meta, metasize);
+		meta += metasize;
+		metasize = (unsigned char)(*data++);
+		right = PyString_FromStringAndSize(meta, metasize);
+		meta += metasize;
+		if (!left || !right) {
+			Py_XDECREF(left);
+			Py_XDECREF(right);
+			goto bail;
+		}
+		tmp = PyTuple_Pack(2, left, right);
+		Py_DECREF(left);
+		Py_DECREF(right);
+		if (!tmp) {
+			goto bail;
+		}
+		PyTuple_SetItem(metadata, i, tmp);
+	}
+	ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags,
+			    metadata, mtime, (int)tz * 60, parents);
+bail:
+	Py_XDECREF(prec);
+	Py_XDECREF(succs);
+	Py_XDECREF(metadata);
+	if (parents != Py_None)
+		Py_XDECREF(parents);
+	return ret;
+}
+
+
+static PyObject *fm1readmarkers(PyObject *self, PyObject *args) {
+	const char *data;
+	Py_ssize_t datalen;
+	/* only unsigned long because python 2.4, should be Py_ssize_t */
+	unsigned long offset, stop;
+	PyObject *markers = NULL;
+
+	/* replace kk with nn when we drop Python 2.4 */
+	if (!PyArg_ParseTuple(args, "s#kk", &data, &datalen, &offset, &stop)) {
+		return NULL;
+	}
+	data += offset;
+	markers = PyList_New(0);
+	if (!markers) {
+		return NULL;
+	}
+	while (offset < stop) {
+		uint32_t msize;
+		int error;
+		PyObject *record = fm1readmarker(data, &msize);
+		if (!record) {
+			goto bail;
+		}
+		error = PyList_Append(markers, record);
+		Py_DECREF(record);
+		if (error) {
+			goto bail;
+		}
+		data += msize;
+		offset += msize;
+	}
+	return markers;
+bail:
+	Py_DECREF(markers);
+	return NULL;
+}
+
 static char parsers_doc[] = "Efficient content parsing.";
 
 PyObject *encodedir(PyObject *self, PyObject *args);
@@ -2242,13 +2546,19 @@
 	{"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
 	{"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
 	{"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
+	{"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
+	{"make_file_foldmap", make_file_foldmap, METH_VARARGS,
+	 "make file foldmap\n"},
 	{"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
 	{"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
 	{"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
+	{"fm1readmarkers", fm1readmarkers, METH_VARARGS,
+			"parse v1 obsolete markers\n"},
 	{NULL, NULL}
 };
 
 void dirs_module_init(PyObject *mod);
+void manifest_module_init(PyObject *mod);
 
 static void module_init(PyObject *mod)
 {
@@ -2263,6 +2573,7 @@
 	PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
 
 	dirs_module_init(mod);
+	manifest_module_init(mod);
 
 	indexType.tp_new = PyType_GenericNew;
 	if (PyType_Ready(&indexType) < 0 ||
--- a/mercurial/patch.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/patch.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,7 +6,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import cStringIO, email, os, errno, re, posixpath
+import cStringIO, email, os, errno, re, posixpath, copy
 import tempfile, zlib, shutil
 # On python2.4 you have to import these by name or they fail to
 # load. This was not a problem on Python 2.7.
@@ -15,7 +15,9 @@
 
 from i18n import _
 from node import hex, short
+import cStringIO
 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
+import pathutil
 
 gitre = re.compile('diff --git a/(.*) b/(.*)')
 tabsplitter = re.compile(r'(\t+|[^\t]+)')
@@ -259,8 +261,17 @@
     if not diffs_seen:
         os.unlink(tmpname)
         return None, message, user, date, branch, None, None, None
-    p1 = parents and parents.pop(0) or None
-    p2 = parents and parents.pop(0) or None
+
+    if parents:
+        p1 = parents.pop(0)
+    else:
+        p1 = None
+
+    if parents:
+        p2 = parents.pop(0)
+    else:
+        p2 = None
+
     return tmpname, message, user, date, branch, nodeid, p1, p2
 
 class patchmeta(object):
@@ -804,6 +815,277 @@
         self.write_rej()
         return len(self.rej)
 
+class header(object):
+    """patch header
+    """
+    diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
+    diff_re = re.compile('diff -r .* (.*)$')
+    allhunks_re = re.compile('(?:index|deleted file) ')
+    pretty_re = re.compile('(?:new file|deleted file) ')
+    special_re = re.compile('(?:index|new|deleted|copy|rename) ')
+
+    def __init__(self, header):
+        self.header = header
+        self.hunks = []
+
+    def binary(self):
+        return util.any(h.startswith('index ') for h in self.header)
+
+    def pretty(self, fp):
+        for h in self.header:
+            if h.startswith('index '):
+                fp.write(_('this modifies a binary file (all or nothing)\n'))
+                break
+            if self.pretty_re.match(h):
+                fp.write(h)
+                if self.binary():
+                    fp.write(_('this is a binary file\n'))
+                break
+            if h.startswith('---'):
+                fp.write(_('%d hunks, %d lines changed\n') %
+                         (len(self.hunks),
+                          sum([max(h.added, h.removed) for h in self.hunks])))
+                break
+            fp.write(h)
+
+    def write(self, fp):
+        fp.write(''.join(self.header))
+
+    def allhunks(self):
+        return util.any(self.allhunks_re.match(h) for h in self.header)
+
+    def files(self):
+        match = self.diffgit_re.match(self.header[0])
+        if match:
+            fromfile, tofile = match.groups()
+            if fromfile == tofile:
+                return [fromfile]
+            return [fromfile, tofile]
+        else:
+            return self.diff_re.match(self.header[0]).groups()
+
+    def filename(self):
+        return self.files()[-1]
+
+    def __repr__(self):
+        return '<header %s>' % (' '.join(map(repr, self.files())))
+
+    def special(self):
+        return util.any(self.special_re.match(h) for h in self.header)
+
+class recordhunk(object):
+    """patch hunk
+
+    XXX shouldn't we merge this with the other hunk class?
+    """
+    maxcontext = 3
+
+    def __init__(self, header, fromline, toline, proc, before, hunk, after):
+        def trimcontext(number, lines):
+            delta = len(lines) - self.maxcontext
+            if False and delta > 0:
+                return number + delta, lines[:self.maxcontext]
+            return number, lines
+
+        self.header = header
+        self.fromline, self.before = trimcontext(fromline, before)
+        self.toline, self.after = trimcontext(toline, after)
+        self.proc = proc
+        self.hunk = hunk
+        self.added, self.removed = self.countchanges(self.hunk)
+
+    def __eq__(self, v):
+        if not isinstance(v, recordhunk):
+            return False
+
+        return ((v.hunk == self.hunk) and
+                (v.proc == self.proc) and
+                (self.fromline == v.fromline) and
+                (self.header.files() == v.header.files()))
+
+    def __hash__(self):
+        return hash((tuple(self.hunk),
+            tuple(self.header.files()),
+            self.fromline,
+            self.proc))
+
+    def countchanges(self, hunk):
+        """hunk -> (n+,n-)"""
+        add = len([h for h in hunk if h[0] == '+'])
+        rem = len([h for h in hunk if h[0] == '-'])
+        return add, rem
+
+    def write(self, fp):
+        delta = len(self.before) + len(self.after)
+        if self.after and self.after[-1] == '\\ No newline at end of file\n':
+            delta -= 1
+        fromlen = delta + self.removed
+        tolen = delta + self.added
+        fp.write('@@ -%d,%d +%d,%d @@%s\n' %
+                 (self.fromline, fromlen, self.toline, tolen,
+                  self.proc and (' ' + self.proc)))
+        fp.write(''.join(self.before + self.hunk + self.after))
+
+    pretty = write
+
+    def filename(self):
+        return self.header.filename()
+
+    def __repr__(self):
+        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def filterpatch(ui, headers):
+    """Interactively filter patch chunks into applied-only chunks"""
+
+    def prompt(skipfile, skipall, query, chunk):
+        """prompt query, and process base inputs
+
+        - y/n for the rest of file
+        - y/n for the rest
+        - ? (help)
+        - q (quit)
+
+        Return True/False and possibly updated skipfile and skipall.
+        """
+        newpatches = None
+        if skipall is not None:
+            return skipall, skipfile, skipall, newpatches
+        if skipfile is not None:
+            return skipfile, skipfile, skipall, newpatches
+        while True:
+            resps = _('[Ynesfdaq?]'
+                      '$$ &Yes, record this change'
+                      '$$ &No, skip this change'
+                      '$$ &Edit this change manually'
+                      '$$ &Skip remaining changes to this file'
+                      '$$ Record remaining changes to this &file'
+                      '$$ &Done, skip remaining changes and files'
+                      '$$ Record &all changes to all remaining files'
+                      '$$ &Quit, recording no changes'
+                      '$$ &? (display help)')
+            r = ui.promptchoice("%s %s" % (query, resps))
+            ui.write("\n")
+            if r == 8: # ?
+                for c, t in ui.extractchoices(resps)[1]:
+                    ui.write('%s - %s\n' % (c, t.lower()))
+                continue
+            elif r == 0: # yes
+                ret = True
+            elif r == 1: # no
+                ret = False
+            elif r == 2: # Edit patch
+                if chunk is None:
+                    ui.write(_('cannot edit patch for whole file'))
+                    ui.write("\n")
+                    continue
+                if chunk.header.binary():
+                    ui.write(_('cannot edit patch for binary file'))
+                    ui.write("\n")
+                    continue
+                # Patch comment based on the Git one (based on comment at end of
+                # http://mercurial.selenic.com/wiki/RecordExtension)
+                phelp = '---' + _("""
+To remove '-' lines, make them ' ' lines (context).
+To remove '+' lines, delete them.
+Lines starting with # will be removed from the patch.
+
+If the patch applies cleanly, the edited hunk will immediately be
+added to the record list. If it does not apply cleanly, a rejects
+file will be generated: you can use that when you try again. If
+all lines of the hunk are removed, then the edit is aborted and
+the hunk is left unchanged.
+""")
+                (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
+                        suffix=".diff", text=True)
+                ncpatchfp = None
+                try:
+                    # Write the initial patch
+                    f = os.fdopen(patchfd, "w")
+                    chunk.header.write(f)
+                    chunk.write(f)
+                    f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
+                    f.close()
+                    # Start the editor and wait for it to complete
+                    editor = ui.geteditor()
+                    ui.system("%s \"%s\"" % (editor, patchfn),
+                              environ={'HGUSER': ui.username()},
+                              onerr=util.Abort, errprefix=_("edit failed"))
+                    # Remove comment lines
+                    patchfp = open(patchfn)
+                    ncpatchfp = cStringIO.StringIO()
+                    for line in patchfp:
+                        if not line.startswith('#'):
+                            ncpatchfp.write(line)
+                    patchfp.close()
+                    ncpatchfp.seek(0)
+                    newpatches = parsepatch(ncpatchfp)
+                finally:
+                    os.unlink(patchfn)
+                    del ncpatchfp
+                # Signal that the chunk shouldn't be applied as-is, but
+                # provide the new patch to be used instead.
+                ret = False
+            elif r == 3: # Skip
+                ret = skipfile = False
+            elif r == 4: # file (Record remaining)
+                ret = skipfile = True
+            elif r == 5: # done, skip remaining
+                ret = skipall = False
+            elif r == 6: # all
+                ret = skipall = True
+            elif r == 7: # quit
+                raise util.Abort(_('user quit'))
+            return ret, skipfile, skipall, newpatches
+
+    seen = set()
+    applied = {}        # 'filename' -> [] of chunks
+    skipfile, skipall = None, None
+    pos, total = 1, sum(len(h.hunks) for h in headers)
+    for h in headers:
+        pos += len(h.hunks)
+        skipfile = None
+        fixoffset = 0
+        hdr = ''.join(h.header)
+        if hdr in seen:
+            continue
+        seen.add(hdr)
+        if skipall is None:
+            h.pretty(ui)
+        msg = (_('examine changes to %s?') %
+               _(' and ').join("'%s'" % f for f in h.files()))
+        r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
+        if not r:
+            continue
+        applied[h.filename()] = [h]
+        if h.allhunks():
+            applied[h.filename()] += h.hunks
+            continue
+        for i, chunk in enumerate(h.hunks):
+            if skipfile is None and skipall is None:
+                chunk.pretty(ui)
+            if total == 1:
+                msg = _("record this change to '%s'?") % chunk.filename()
+            else:
+                idx = pos - len(h.hunks) + i
+                msg = _("record change %d/%d to '%s'?") % (idx, total,
+                                                           chunk.filename())
+            r, skipfile, skipall, newpatches = prompt(skipfile,
+                    skipall, msg, chunk)
+            if r:
+                if fixoffset:
+                    chunk = copy.copy(chunk)
+                    chunk.toline += fixoffset
+                applied[chunk.filename()].append(chunk)
+            elif newpatches is not None:
+                for newpatch in newpatches:
+                    for newhunk in newpatch.hunks:
+                        if fixoffset:
+                            newhunk.toline += fixoffset
+                        applied[newhunk.filename()].append(newhunk)
+            else:
+                fixoffset += chunk.removed - chunk.added
+    return sum([h for h in applied.itervalues()
+               if h[0].special() or len(h) > 1], [])
 class hunk(object):
     def __init__(self, desc, num, lr, context):
         self.number = num
@@ -1087,11 +1369,115 @@
             return s
     return s[:i]
 
-def pathstrip(path, strip):
+def parsepatch(originalchunks):
+    """patch -> [] of headers -> [] of hunks """
+    class parser(object):
+        """patch parsing state machine"""
+        def __init__(self):
+            self.fromline = 0
+            self.toline = 0
+            self.proc = ''
+            self.header = None
+            self.context = []
+            self.before = []
+            self.hunk = []
+            self.headers = []
+
+        def addrange(self, limits):
+            fromstart, fromend, tostart, toend, proc = limits
+            self.fromline = int(fromstart)
+            self.toline = int(tostart)
+            self.proc = proc
+
+        def addcontext(self, context):
+            if self.hunk:
+                h = recordhunk(self.header, self.fromline, self.toline,
+                        self.proc, self.before, self.hunk, context)
+                self.header.hunks.append(h)
+                self.fromline += len(self.before) + h.removed
+                self.toline += len(self.before) + h.added
+                self.before = []
+                self.hunk = []
+                self.proc = ''
+            self.context = context
+
+        def addhunk(self, hunk):
+            if self.context:
+                self.before = self.context
+                self.context = []
+            self.hunk = hunk
+
+        def newfile(self, hdr):
+            self.addcontext([])
+            h = header(hdr)
+            self.headers.append(h)
+            self.header = h
+
+        def addother(self, line):
+            pass # 'other' lines are ignored
+
+        def finished(self):
+            self.addcontext([])
+            return self.headers
+
+        transitions = {
+            'file': {'context': addcontext,
+                     'file': newfile,
+                     'hunk': addhunk,
+                     'range': addrange},
+            'context': {'file': newfile,
+                        'hunk': addhunk,
+                        'range': addrange,
+                        'other': addother},
+            'hunk': {'context': addcontext,
+                     'file': newfile,
+                     'range': addrange},
+            'range': {'context': addcontext,
+                      'hunk': addhunk},
+            'other': {'other': addother},
+            }
+
+    p = parser()
+    fp = cStringIO.StringIO()
+    fp.write(''.join(originalchunks))
+    fp.seek(0)
+
+    state = 'context'
+    for newstate, data in scanpatch(fp):
+        try:
+            p.transitions[state][newstate](p, data)
+        except KeyError:
+            raise PatchError('unhandled transition: %s -> %s' %
+                                   (state, newstate))
+        state = newstate
+    del fp
+    return p.finished()
+
+def pathtransform(path, strip, prefix):
+    '''turn a path from a patch into a path suitable for the repository
+
+    prefix, if not empty, is expected to be normalized with a / at the end.
+
+    Returns (stripped components, path in repository).
+
+    >>> pathtransform('a/b/c', 0, '')
+    ('', 'a/b/c')
+    >>> pathtransform('   a/b/c   ', 0, '')
+    ('', '   a/b/c')
+    >>> pathtransform('   a/b/c   ', 2, '')
+    ('a/b/', 'c')
+    >>> pathtransform('a/b/c', 0, 'd/e/')
+    ('', 'd/e/a/b/c')
+    >>> pathtransform('   a//b/c   ', 2, 'd/e/')
+    ('a//b/', 'd/e/c')
+    >>> pathtransform('a/b/c', 3, '')
+    Traceback (most recent call last):
+    PatchError: unable to strip away 1 of 3 dirs from a/b/c
+    '''
     pathlen = len(path)
     i = 0
     if strip == 0:
-        return '', path.rstrip()
+        return '', prefix + path.rstrip()
     count = strip
     while count > 0:
         i = path.find('/', i)
@@ -1103,16 +1489,16 @@
         while i < pathlen - 1 and path[i] == '/':
             i += 1
         count -= 1
-    return path[:i].lstrip(), path[i:].rstrip()
+    return path[:i].lstrip(), prefix + path[i:].rstrip()
 
-def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip):
+def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
     nulla = afile_orig == "/dev/null"
     nullb = bfile_orig == "/dev/null"
     create = nulla and hunk.starta == 0 and hunk.lena == 0
     remove = nullb and hunk.startb == 0 and hunk.lenb == 0
-    abase, afile = pathstrip(afile_orig, strip)
+    abase, afile = pathtransform(afile_orig, strip, prefix)
     gooda = not nulla and backend.exists(afile)
-    bbase, bfile = pathstrip(bfile_orig, strip)
+    bbase, bfile = pathtransform(bfile_orig, strip, prefix)
     if afile == bfile:
         goodb = gooda
     else:
@@ -1135,13 +1521,19 @@
     fname = None
     if not missing:
         if gooda and goodb:
-            fname = isbackup and afile or bfile
+            if isbackup:
+                fname = afile
+            else:
+                fname = bfile
         elif gooda:
             fname = afile
 
     if not fname:
         if not nullb:
-            fname = isbackup and afile or bfile
+            if isbackup:
+                fname = afile
+            else:
+                fname = bfile
         elif not nulla:
             fname = afile
         else:
@@ -1154,6 +1546,58 @@
         gp.op = 'DELETE'
     return gp
 
+def scanpatch(fp):
+    """like patch.iterhunks, but yield different events
+
+    - ('file',    [header_lines + fromfile + tofile])
+    - ('context', [context_lines])
+    - ('hunk',    [hunk_lines])
+    - ('range',   (-start,len, +start,len, proc))
+    """
+    lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+    lr = linereader(fp)
+
+    def scanwhile(first, p):
+        """scan lr while predicate holds"""
+        lines = [first]
+        while True:
+            line = lr.readline()
+            if not line:
+                break
+            if p(line):
+                lines.append(line)
+            else:
+                lr.push(line)
+                break
+        return lines
+
+    while True:
+        line = lr.readline()
+        if not line:
+            break
+        if line.startswith('diff --git a/') or line.startswith('diff -r '):
+            def notheader(line):
+                s = line.split(None, 1)
+                return not s or s[0] not in ('---', 'diff')
+            header = scanwhile(line, notheader)
+            fromfile = lr.readline()
+            if fromfile.startswith('---'):
+                tofile = lr.readline()
+                header += [fromfile, tofile]
+            else:
+                lr.push(fromfile)
+            yield 'file', header
+        elif line[0] == ' ':
+            yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
+        elif line[0] in '-+':
+            yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
+        else:
+            m = lines_re.match(line)
+            if m:
+                yield 'range', m.groups()
+            else:
+                yield 'other', line
+
 def scangitpatch(lr, firstline):
     """
     Git patches can emit:
@@ -1335,7 +1779,7 @@
             raise PatchError(_('unexpected delta opcode 0'))
     return out
 
-def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
+def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
     """Reads a patch from fp and tries to apply it.
 
     Returns 0 for a clean patch, -1 if any rejects were found and 1 if
@@ -1346,13 +1790,18 @@
     patching then normalized according to 'eolmode'.
     """
     return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
-                      eolmode=eolmode)
+                      prefix=prefix, eolmode=eolmode)
 
-def _applydiff(ui, fp, patcher, backend, store, strip=1,
+def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
                eolmode='strict'):
 
+    if prefix:
+        prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
+                                    prefix)
+        if prefix != '':
+            prefix += '/'
     def pstrip(p):
-        return pathstrip(p, strip - 1)[1]
+        return pathtransform(p, strip - 1, prefix)[1]
 
     rejects = 0
     err = 0
@@ -1375,7 +1824,8 @@
                 if gp.oldpath:
                     gp.oldpath = pstrip(gp.oldpath)
             else:
-                gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
+                gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
+                                   prefix)
             if gp.op == 'RENAME':
                 backend.unlink(gp.oldpath)
             if not first_hunk:
@@ -1472,7 +1922,8 @@
                          util.explainexit(code)[0])
     return fuzz
 
-def patchbackend(ui, backend, patchobj, strip, files=None, eolmode='strict'):
+def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
+                 eolmode='strict'):
     if files is None:
         files = set()
     if eolmode is None:
@@ -1487,7 +1938,7 @@
     except TypeError:
         fp = patchobj
     try:
-        ret = applydiff(ui, fp, backend, store, strip=strip,
+        ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
                         eolmode=eolmode)
     finally:
         if fp != patchobj:
@@ -1498,19 +1949,19 @@
         raise PatchError(_('patch failed to apply'))
     return ret > 0
 
-def internalpatch(ui, repo, patchobj, strip, files=None, eolmode='strict',
-                  similarity=0):
+def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
+                  eolmode='strict', similarity=0):
     """use builtin patch to apply <patchobj> to the working directory.
     returns whether patch was applied with fuzz factor."""
     backend = workingbackend(ui, repo, similarity)
-    return patchbackend(ui, backend, patchobj, strip, files, eolmode)
+    return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
 
-def patchrepo(ui, repo, ctx, store, patchobj, strip, files=None,
+def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
               eolmode='strict'):
     backend = repobackend(ui, repo, ctx, store)
-    return patchbackend(ui, backend, patchobj, strip, files, eolmode)
+    return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
 
-def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
+def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
           similarity=0):
     """Apply <patchname> to the working directory.
 
@@ -1529,7 +1980,7 @@
     if patcher:
         return _externalpatch(ui, repo, patcher, patchname, strip,
                               files, similarity)
-    return internalpatch(ui, repo, patchname, strip, files, eolmode,
+    return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
                          similarity)
 
 def changedfiles(ui, repo, patchpath, strip=1):
@@ -1541,11 +1992,12 @@
             if state == 'file':
                 afile, bfile, first_hunk, gp = values
                 if gp:
-                    gp.path = pathstrip(gp.path, strip - 1)[1]
+                    gp.path = pathtransform(gp.path, strip - 1, '')[1]
                     if gp.oldpath:
-                        gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
+                        gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
                 else:
-                    gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
+                    gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
+                                       '')
                 changed.add(gp.path)
                 if gp.op == 'RENAME':
                     changed.add(gp.oldpath)
@@ -1607,7 +2059,7 @@
     return mdiff.diffopts(**buildopts)
 
 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
-         losedatafn=None, prefix=''):
+         losedatafn=None, prefix='', relroot=''):
     '''yields diff of changes to files between two nodes, or node and
     working directory.
 
@@ -1624,7 +2076,9 @@
 
     prefix is a filename prefix that is prepended to all filenames on
     display (used for subrepos).
-    '''
+
+    relroot, if not empty, must be normalized with a trailing /. Any match
+    patterns that fall outside it will be ignored.'''
 
     if opts is None:
         opts = mdiff.defaultopts
@@ -1651,6 +2105,13 @@
     ctx1 = repo[node1]
     ctx2 = repo[node2]
 
+    relfiltered = False
+    if relroot != '' and match.always():
+        # as a special case, create a new matcher with just the relroot
+        pats = [relroot]
+        match = scmutil.match(ctx2, pats, default='path')
+        relfiltered = True
+
     if not changes:
         changes = repo.status(ctx1, ctx2, match=match)
     modified, added, removed = changes[:3]
@@ -1658,16 +2119,35 @@
     if not modified and not added and not removed:
         return []
 
-    hexfunc = repo.ui.debugflag and hex or short
+    if repo.ui.debugflag:
+        hexfunc = hex
+    else:
+        hexfunc = short
     revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
 
     copy = {}
     if opts.git or opts.upgrade:
-        copy = copies.pathcopies(ctx1, ctx2)
+        copy = copies.pathcopies(ctx1, ctx2, match=match)
+
+    if relroot is not None:
+        if not relfiltered:
+            # XXX this would ideally be done in the matcher, but that is
+            # generally meant to 'or' patterns, not 'and' them. In this case we
+            # need to 'and' all the patterns from the matcher with relroot.
+            def filterrel(l):
+                return [f for f in l if f.startswith(relroot)]
+            modified = filterrel(modified)
+            added = filterrel(added)
+            removed = filterrel(removed)
+            relfiltered = True
+        # filter out copies where either side isn't inside the relative root
+        copy = dict(((dst, src) for (dst, src) in copy.iteritems()
+                     if dst.startswith(relroot)
+                     and src.startswith(relroot)))
 
     def difffn(opts, losedata):
         return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
-                       copy, getfilectx, opts, losedata, prefix)
+                       copy, getfilectx, opts, losedata, prefix, relroot)
     if opts.upgrade and not opts.git:
         try:
             def losedata(fn):
@@ -1736,19 +2216,55 @@
     '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
     return difflabel(diff, *args, **kw)
 
-def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
-            copy, getfilectx, opts, losedatafn, prefix):
+def _filepairs(ctx1, modified, added, removed, copy, opts):
+    '''generates tuples (f1, f2, copyop), where f1 is the name of the file
+    before and f2 is the the name after. For added files, f1 will be None,
+    and for removed files, f2 will be None. copyop may be set to None, 'copy'
+    or 'rename' (the latter two only if opts.git is set).'''
+    gone = set()
 
-    def join(f):
-        return posixpath.join(prefix, f)
+    copyto = dict([(v, k) for k, v in copy.items()])
+
+    addedset, removedset = set(added), set(removed)
+    # Fix up  added, since merged-in additions appear as
+    # modifications during merges
+    for f in modified:
+        if f not in ctx1:
+            addedset.add(f)
 
-    def addmodehdr(header, omode, nmode):
-        if omode != nmode:
-            header.append('old mode %s\n' % omode)
-            header.append('new mode %s\n' % nmode)
+    for f in sorted(modified + added + removed):
+        copyop = None
+        f1, f2 = f, f
+        if f in addedset:
+            f1 = None
+            if f in copy:
+                if opts.git:
+                    f1 = copy[f]
+                    if f1 in removedset and f1 not in gone:
+                        copyop = 'rename'
+                        gone.add(f1)
+                    else:
+                        copyop = 'copy'
+        elif f in removedset:
+            f2 = None
+            if opts.git:
+                # have we already reported a copy above?
+                if (f in copyto and copyto[f] in addedset
+                    and copy[copyto[f]] == f):
+                    continue
+        yield f1, f2, copyop
 
-    def addindexmeta(meta, oindex, nindex):
-        meta.append('index %s..%s\n' % (oindex, nindex))
+def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
+            copy, getfilectx, opts, losedatafn, prefix, relroot):
+    '''given input data, generate a diff and yield it in blocks
+
+    If generating a diff would lose data like flags or binary data and
+    losedatafn is not None, it will be called.
+
+    relroot is removed and prefix is added to every path in the diff output.
+
+    If relroot is not empty, this function expects every path in modified,
+    added, removed and copy to start with it.'''
 
     def gitindex(text):
         if not text:
@@ -1764,120 +2280,88 @@
         aprefix = 'a/'
         bprefix = 'b/'
 
-    def diffline(a, b, revs):
-        if opts.git:
-            line = 'diff --git %s%s %s%s\n' % (aprefix, a, bprefix, b)
-        elif not repo.ui.quiet:
-            if revs:
-                revinfo = ' '.join(["-r %s" % rev for rev in revs])
-                line = 'diff %s %s\n' % (revinfo, a)
-            else:
-                line = 'diff %s\n' % a
-        else:
-            line = ''
-        return line
+    def diffline(f, revs):
+        revinfo = ' '.join(["-r %s" % rev for rev in revs])
+        return 'diff %s %s' % (revinfo, f)
 
     date1 = util.datestr(ctx1.date())
     date2 = util.datestr(ctx2.date())
 
-    gone = set()
     gitmode = {'l': '120000', 'x': '100755', '': '100644'}
 
-    copyto = dict([(v, k) for k, v in copy.items()])
-
-    if opts.git:
-        revs = None
+    if relroot != '' and (repo.ui.configbool('devel', 'all')
+                          or repo.ui.configbool('devel', 'check-relroot')):
+        for f in modified + added + removed + copy.keys() + copy.values():
+            if f is not None and not f.startswith(relroot):
+                raise AssertionError(
+                    "file %s doesn't start with relroot %s" % (f, relroot))
 
-    modifiedset, addedset, removedset = set(modified), set(added), set(removed)
-    # Fix up modified and added, since merged-in additions appear as
-    # modifications during merges
-    for f in modifiedset.copy():
-        if f not in ctx1:
-            addedset.add(f)
-            modifiedset.remove(f)
-    for f in sorted(modified + added + removed):
-        to = None
-        tn = None
-        binarydiff = False
-        header = []
-        if f not in addedset:
-            to = getfilectx(f, ctx1).data()
-        if f not in removedset:
-            tn = getfilectx(f, ctx2).data()
-        a, b = f, f
+    for f1, f2, copyop in _filepairs(
+            ctx1, modified, added, removed, copy, opts):
+        content1 = None
+        content2 = None
+        flag1 = None
+        flag2 = None
+        if f1:
+            content1 = getfilectx(f1, ctx1).data()
+            if opts.git or losedatafn:
+                flag1 = ctx1.flags(f1)
+        if f2:
+            content2 = getfilectx(f2, ctx2).data()
+            if opts.git or losedatafn:
+                flag2 = ctx2.flags(f2)
+        binary = False
         if opts.git or losedatafn:
-            if f in addedset:
-                mode = gitmode[ctx2.flags(f)]
-                if f in copy or f in copyto:
-                    if opts.git:
-                        if f in copy:
-                            a = copy[f]
-                        else:
-                            a = copyto[f]
-                        omode = gitmode[ctx1.flags(a)]
-                        addmodehdr(header, omode, mode)
-                        if a in removedset and a not in gone:
-                            op = 'rename'
-                            gone.add(a)
-                        else:
-                            op = 'copy'
-                        header.append('%s from %s\n' % (op, join(a)))
-                        header.append('%s to %s\n' % (op, join(f)))
-                        to = getfilectx(a, ctx1).data()
-                    else:
-                        losedatafn(f)
-                else:
-                    if opts.git:
-                        header.append('new file mode %s\n' % mode)
-                    elif ctx2.flags(f):
-                        losedatafn(f)
-                if util.binary(to) or util.binary(tn):
-                    if opts.git:
-                        binarydiff = True
-                    else:
-                        losedatafn(f)
-                if not opts.git and not tn:
-                    # regular diffs cannot represent new empty file
-                    losedatafn(f)
-            elif f in removedset:
-                if opts.git:
-                    # have we already reported a copy above?
-                    if ((f in copy and copy[f] in addedset
-                         and copyto[copy[f]] == f) or
-                        (f in copyto and copyto[f] in addedset
-                         and copy[copyto[f]] == f)):
-                        continue
-                    else:
-                        header.append('deleted file mode %s\n' %
-                                      gitmode[ctx1.flags(f)])
-                        if util.binary(to):
-                            binarydiff = True
-                elif not to or util.binary(to):
-                    # regular diffs cannot represent empty file deletion
-                    losedatafn(f)
-            else:
-                oflag = ctx1.flags(f)
-                nflag = ctx2.flags(f)
-                binary = util.binary(to) or util.binary(tn)
-                if opts.git:
-                    addmodehdr(header, gitmode[oflag], gitmode[nflag])
-                    if binary:
-                        binarydiff = True
-                elif binary or nflag != oflag:
-                    losedatafn(f)
+            binary = util.binary(content1) or util.binary(content2)
+
+        if losedatafn and not opts.git:
+            if (binary or
+                # copy/rename
+                f2 in copy or
+                # empty file creation
+                (not f1 and not content2) or
+                # empty file deletion
+                (not content1 and not f2) or
+                # create with flags
+                (not f1 and flag2) or
+                # change flags
+                (f1 and f2 and flag1 != flag2)):
+                losedatafn(f2 or f1)
 
-        if opts.git or revs:
-            header.insert(0, diffline(join(a), join(b), revs))
-        if binarydiff and not opts.nobinary:
-            text = mdiff.b85diff(to, tn)
-            if text and opts.git:
-                addindexmeta(header, gitindex(to), gitindex(tn))
+        path1 = f1 or f2
+        path2 = f2 or f1
+        path1 = posixpath.join(prefix, path1[len(relroot):])
+        path2 = posixpath.join(prefix, path2[len(relroot):])
+        header = []
+        if opts.git:
+            header.append('diff --git %s%s %s%s' %
+                          (aprefix, path1, bprefix, path2))
+            if not f1: # added
+                header.append('new file mode %s' % gitmode[flag2])
+            elif not f2: # removed
+                header.append('deleted file mode %s' % gitmode[flag1])
+            else:  # modified/copied/renamed
+                mode1, mode2 = gitmode[flag1], gitmode[flag2]
+                if mode1 != mode2:
+                    header.append('old mode %s' % mode1)
+                    header.append('new mode %s' % mode2)
+                if copyop is not None:
+                    header.append('%s from %s' % (copyop, path1))
+                    header.append('%s to %s' % (copyop, path2))
+        elif revs and not repo.ui.quiet:
+            header.append(diffline(path1, revs))
+
+        if binary and opts.git and not opts.nobinary:
+            text = mdiff.b85diff(content1, content2)
+            if text:
+                header.append('index %s..%s' %
+                              (gitindex(content1), gitindex(content2)))
         else:
-            text = mdiff.unidiff(to, date1,
-                                 tn, date2,
-                                 join(a), join(b), opts=opts)
+            text = mdiff.unidiff(content1, date1,
+                                 content2, date2,
+                                 path1, path2, opts=opts)
         if header and (text or len(header) > 1):
-            yield ''.join(header)
+            yield '\n'.join(header) + '\n'
         if text:
             yield text
 
--- a/mercurial/phases.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/phases.py	Thu Apr 16 20:57:51 2015 -0500
@@ -172,19 +172,36 @@
         for a in 'phaseroots dirty opener _phaserevs'.split():
             setattr(self, a, getattr(phcache, a))
 
+    def _getphaserevsnative(self, repo):
+        repo = repo.unfiltered()
+        nativeroots = []
+        for phase in trackedphases:
+            nativeroots.append(map(repo.changelog.rev, self.phaseroots[phase]))
+        return repo.changelog.computephases(nativeroots)
+
+    def _computephaserevspure(self, repo):
+        repo = repo.unfiltered()
+        revs = [public] * len(repo.changelog)
+        self._phaserevs = revs
+        self._populatephaseroots(repo)
+        for phase in trackedphases:
+            roots = map(repo.changelog.rev, self.phaseroots[phase])
+            if roots:
+                for rev in roots:
+                    revs[rev] = phase
+                for rev in repo.changelog.descendants(roots):
+                    revs[rev] = phase
+
     def getphaserevs(self, repo):
         if self._phaserevs is None:
-            repo = repo.unfiltered()
-            revs = [public] * len(repo.changelog)
-            self._phaserevs = revs
-            self._populatephaseroots(repo)
-            for phase in trackedphases:
-                roots = map(repo.changelog.rev, self.phaseroots[phase])
-                if roots:
-                    for rev in roots:
-                        revs[rev] = phase
-                    for rev in repo.changelog.descendants(roots):
-                        revs[rev] = phase
+            try:
+                if repo.ui.configbool('experimental',
+                                      'nativephaseskillswitch'):
+                    self._computephaserevspure(repo)
+                else:
+                    self._phaserevs = self._getphaserevsnative(repo)
+            except AttributeError:
+                self._computephaserevspure(repo)
         return self._phaserevs
 
     def invalidate(self):
--- a/mercurial/posix.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/posix.py	Thu Apr 16 20:57:51 2015 -0500
@@ -16,6 +16,7 @@
 oslink = os.link
 unlink = os.unlink
 rename = os.rename
+removedirs = os.removedirs
 expandglobs = False
 
 umask = os.umask(0)
@@ -200,6 +201,11 @@
 def normcase(path):
     return path.lower()
 
+# what normcase does to ASCII strings
+normcasespec = encoding.normcasespecs.lower
+# fallback normcase function for non-ASCII strings
+normcasefallback = normcase
+
 if sys.platform == 'darwin':
 
     def normcase(path):
@@ -223,7 +229,11 @@
         try:
             return encoding.asciilower(path)  # exception for non-ASCII
         except UnicodeDecodeError:
-            pass
+            return normcasefallback(path)
+
+    normcasespec = encoding.normcasespecs.lower
+
+    def normcasefallback(path):
         try:
             u = path.decode('utf-8')
         except UnicodeDecodeError:
@@ -302,6 +312,9 @@
 
         return encoding.upper(path)
 
+    normcasespec = encoding.normcasespecs.other
+    normcasefallback = normcase
+
     # Cygwin translates native ACLs to POSIX permissions,
     # but these translations are not supported by native
     # tools, so the exec bit tends to be set erroneously.
--- a/mercurial/pure/parsers.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/pure/parsers.py	Thu Apr 16 20:57:51 2015 -0500
@@ -5,15 +5,13 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial.node import bin, nullid
-from mercurial import util
+from mercurial.node import nullid
 import struct, zlib, cStringIO
 
 _pack = struct.pack
 _unpack = struct.unpack
 _compress = zlib.compress
 _decompress = zlib.decompress
-_sha = util.sha1
 
 # Some code below makes tuples directly because it's more convenient. However,
 # code outside this module should always use dirstatetuple.
@@ -21,15 +19,6 @@
     # x is a tuple
     return x
 
-def parse_manifest(mfdict, fdict, lines):
-    for l in lines.splitlines():
-        f, n = l.split('\0')
-        if len(n) > 40:
-            fdict[f] = n[40:]
-            mfdict[f] = bin(n[:40])
-        else:
-            mfdict[f] = bin(n)
-
 def parse_index2(data, inline):
     def gettype(q):
         return int(q & 0xFFFF)
--- a/mercurial/pvec.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/pvec.py	Thu Apr 16 20:57:51 2015 -0500
@@ -142,7 +142,7 @@
 
 def ctxpvec(ctx):
     '''construct a pvec for ctx while filling in the cache'''
-    r = ctx._repo
+    r = ctx.repo()
     if not util.safehasattr(r, "_pveccache"):
         r._pveccache = {}
     pvc = r._pveccache
--- a/mercurial/repair.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/repair.py	Thu Apr 16 20:57:51 2015 -0500
@@ -42,7 +42,7 @@
     name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
 
     if usebundle2:
-        bundletype = "HG2Y"
+        bundletype = "HG20"
     elif compress:
         bundletype = "HG10BZ"
     else:
@@ -137,6 +137,7 @@
     # create a changegroup for all the branches we need to keep
     backupfile = None
     vfs = repo.vfs
+    node = nodelist[-1]
     if backup:
         backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
         repo.ui.status(_("saved backup bundle to %s\n") %
@@ -181,6 +182,8 @@
                 repo.ui.pushbuffer()
             if isinstance(gen, bundle2.unbundle20):
                 tr = repo.transaction('strip')
+                tr.hookargs = {'source': 'strip',
+                               'url': 'bundle:' + vfs.join(chgrpfile)}
                 try:
                     bundle2.processbundle(repo, gen, lambda: tr)
                     tr.close()
--- a/mercurial/repoview.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/repoview.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,6 +6,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import heapq
 import copy
 import error
 import phases
@@ -13,6 +14,7 @@
 import obsolete
 import struct
 import tags as tagsmod
+from node import nullrev
 
 def hideablerevs(repo):
     """Revisions candidates to be hidden
@@ -20,23 +22,46 @@
     This is a standalone function to help extensions to wrap it."""
     return obsolete.getrevs(repo, 'obsolete')
 
-def _getstaticblockers(repo):
-    """Cacheable revisions blocking hidden changesets from being filtered.
+def _getstatichidden(repo):
+    """Revision to be hidden (disregarding dynamic blocker)
 
-    Additional non-cached hidden blockers are computed in _getdynamicblockers.
-    This is a standalone function to help extensions to wrap it."""
+    To keep a consistent graph, we cannot hide any revisions with
+    non-hidden descendants. This function computes the set of
+    revisions that could be hidden while keeping the graph consistent.
+
+    A second pass will be done to apply "dynamic blocker" like bookmarks or
+    working directory parents.
+
+    """
     assert not repo.changelog.filteredrevs
-    hideable = hideablerevs(repo)
-    blockers = set()
-    if hideable:
-        # We use cl to avoid recursive lookup from repo[xxx]
-        cl = repo.changelog
-        firsthideable = min(hideable)
-        revs = cl.revs(start=firsthideable)
-        tofilter = repo.revs(
-            '(%ld) and children(%ld)', list(revs), list(hideable))
-        blockers.update([r for r in tofilter if r not in hideable])
-    return blockers
+    hidden = set(hideablerevs(repo))
+    if hidden:
+        getphase = repo._phasecache.phase
+        getparentrevs = repo.changelog.parentrevs
+        # Skip heads which are public (guaranteed to not be hidden)
+        heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)]
+        heapq.heapify(heap)
+        heappop = heapq.heappop
+        heappush = heapq.heappush
+        seen = set() # no need to init it with heads, they have no children
+        while heap:
+            rev = -heappop(heap)
+            # All children have been processed so at that point, if no children
+            # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden.
+            blocker = rev not in hidden
+            for parent in getparentrevs(rev):
+                if parent == nullrev:
+                    continue
+                if blocker:
+                    # If visible, ensure parent will be visible too
+                    hidden.discard(parent)
+                # - Avoid adding the same revision twice
+                # - Skip nodes which are public (guaranteed to not be hidden)
+                pre = len(seen)
+                seen.add(parent)
+                if pre < len(seen) and getphase(repo, rev):
+                    heappush(heap, -parent)
+    return hidden
 
 def _getdynamicblockers(repo):
     """Non-cacheable revisions blocking hidden changesets from being filtered.
@@ -137,8 +162,7 @@
         cl = repo.changelog
         hidden = tryreadcache(repo, hideable)
         if hidden is None:
-            blocked = cl.ancestors(_getstaticblockers(repo), inclusive=True)
-            hidden = frozenset(r for r in hideable if r not in blocked)
+            hidden = frozenset(_getstatichidden(repo))
             trywritehiddencache(repo, hideable, hidden)
 
         # check if we have wd parents, bookmarks or tags pointing to hidden
--- a/mercurial/revlog.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/revlog.py	Thu Apr 16 20:57:51 2015 -0500
@@ -277,6 +277,8 @@
 
     def tip(self):
         return self.node(len(self.index) - 2)
+    def __contains__(self, rev):
+        return 0 <= rev < len(self)
     def __len__(self):
         return len(self.index) - 1
     def __iter__(self):
@@ -722,6 +724,9 @@
         except AttributeError:
             return self._headrevs()
 
+    def computephases(self, roots):
+        return self.index.computephases(roots)
+
     def _headrevs(self):
         count = len(self)
         if not count:
@@ -1124,7 +1129,12 @@
                               % self.indexfile)
 
         trindex = trinfo[2]
-        dataoff = self.start(trindex)
+        if trindex is not None:
+            dataoff = self.start(trindex)
+        else:
+            # revlog was stripped at start of transaction, use all leftover data
+            trindex = len(self) - 1
+            dataoff = self.end(-2)
 
         tr.add(self.datafile, dataoff)
 
@@ -1231,8 +1241,18 @@
             if dfh:
                 dfh.flush()
             ifh.flush()
-            basetext = self.revision(self.node(cachedelta[0]))
-            btext[0] = mdiff.patch(basetext, cachedelta[1])
+            baserev = cachedelta[0]
+            delta = cachedelta[1]
+            # special case deltas which replace entire base; no need to decode
+            # base revision. this neatly avoids censored bases, which throw when
+            # they're decoded.
+            hlen = struct.calcsize(">lll")
+            if delta[:hlen] == mdiff.replacediffheader(self.rawsize(baserev),
+                                                       len(delta) - hlen):
+                btext[0] = delta[hlen:]
+            else:
+                basetext = self.revision(self.node(baserev))
+                btext[0] = mdiff.patch(basetext, delta)
             try:
                 self.checkhash(btext[0], p1, p2, node)
                 if flags & REVIDX_ISCENSORED:
@@ -1249,8 +1269,14 @@
                 delta = cachedelta[1]
             else:
                 t = buildtext()
-                ptext = self.revision(self.node(rev))
-                delta = mdiff.textdiff(ptext, t)
+                if self.iscensored(rev):
+                    # deltas based on a censored revision must replace the
+                    # full content in one patch, so delta works everywhere
+                    header = mdiff.replacediffheader(self.rawsize(rev), len(t))
+                    delta = header + t
+                else:
+                    ptext = self.revision(self.node(rev))
+                    delta = mdiff.textdiff(ptext, t)
             data = self.compress(delta)
             l = len(data[1]) + len(data[0])
             if basecache[0] == rev:
@@ -1368,7 +1394,10 @@
             transaction.add(self.indexfile, isize, r)
             transaction.add(self.datafile, end)
             dfh = self.opener(self.datafile, "a")
-
+        def flush():
+            if dfh:
+                dfh.flush()
+            ifh.flush()
         try:
             # loop through our set of deltas
             chain = None
@@ -1401,9 +1430,24 @@
                                       _('unknown delta base'))
 
                 baserev = self.rev(deltabase)
+
+                if baserev != nullrev and self.iscensored(baserev):
+                    # if base is censored, delta must be full replacement in a
+                    # single patch operation
+                    hlen = struct.calcsize(">lll")
+                    oldlen = self.rawsize(baserev)
+                    newlen = len(delta) - hlen
+                    if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
+                        raise error.CensoredBaseError(self.indexfile,
+                                                      self.node(baserev))
+
+                flags = REVIDX_DEFAULT_FLAGS
+                if self._peek_iscensored(baserev, delta, flush):
+                    flags |= REVIDX_ISCENSORED
+
                 chain = self._addrevision(node, None, transaction, link,
-                                          p1, p2, REVIDX_DEFAULT_FLAGS,
-                                          (baserev, delta), ifh, dfh)
+                                          p1, p2, flags, (baserev, delta),
+                                          ifh, dfh)
                 if not dfh and not self._inline:
                     # addrevision switched from inline to conventional
                     # reopen the index
@@ -1417,6 +1461,14 @@
 
         return content
 
+    def iscensored(self, rev):
+        """Check if a file revision is censored."""
+        return False
+
+    def _peek_iscensored(self, baserev, delta, flush):
+        """Quickly check if a delta produces a censored revision."""
+        return False
+
     def getstrippoint(self, minlink):
         """find the minimum rev that must be stripped to strip the linkrev
 
--- a/mercurial/revset.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/revset.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 import re
-import parser, util, error, discovery, hbisect, phases
+import parser, util, error, hbisect, phases
 import node
 import heapq
 import match as matchmod
@@ -18,7 +18,10 @@
 
 def _revancestors(repo, revs, followfirst):
     """Like revlog.ancestors(), but supports followfirst."""
-    cut = followfirst and 1 or None
+    if followfirst:
+        cut = 1
+    else:
+        cut = None
     cl = repo.changelog
 
     def iterate():
@@ -49,7 +52,10 @@
 
 def _revdescendants(repo, revs, followfirst):
     """Like revlog.descendants() but supports followfirst."""
-    cut = followfirst and 1 or None
+    if followfirst:
+        cut = 1
+    else:
+        cut = None
 
     def iterate():
         cl = repo.changelog
@@ -235,7 +241,8 @@
                 yield ('symbol', sym, s)
             pos -= 1
         else:
-            raise error.ParseError(_("syntax error"), pos)
+            raise error.ParseError(_("syntax error in revset '%s'") %
+                                   program, pos)
         pos += 1
     yield ('end', None, pos)
 
@@ -323,8 +330,6 @@
 
 def stringset(repo, subset, x):
     x = repo[x].rev()
-    if x == -1 and len(subset) == len(repo):
-        return baseset([-1])
     if x in subset:
         return baseset([x])
     return baseset()
@@ -349,7 +354,7 @@
     return r & subset
 
 def dagrange(repo, subset, x, y):
-    r = spanset(repo)
+    r = fullreposet(repo)
     xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
     return xs & subset
 
@@ -370,7 +375,7 @@
 def func(repo, subset, a, b):
     if a[0] == 'symbol' and a[1] in symbols:
         return symbols[a[1]](repo, subset, b)
-    raise error.ParseError(_("not a function: %s") % a[1])
+    raise error.UnknownIdentifier(a[1], symbols.keys())
 
 # functions
 
@@ -396,7 +401,7 @@
     """
     # i18n: "ancestor" is a keyword
     l = getlist(x)
-    rl = spanset(repo)
+    rl = fullreposet(repo)
     anc = None
 
     # (getset(repo, rl, i) for i in l) generates a list of lists
@@ -412,7 +417,7 @@
     return baseset()
 
 def _ancestors(repo, subset, x, followfirst=False):
-    heads = getset(repo, spanset(repo), x)
+    heads = getset(repo, fullreposet(repo), x)
     if not heads:
         return baseset()
     s = _revancestors(repo, heads, followfirst)
@@ -524,10 +529,7 @@
     a regular expression. To match a branch that actually starts with `re:`,
     use the prefix `literal:`.
     """
-    import branchmap
-    urepo = repo.unfiltered()
-    ucl = urepo.changelog
-    getbi = branchmap.revbranchcache(urepo, readonly=True).branchinfo
+    getbi = repo.revbranchcache().branchinfo
 
     try:
         b = getstring(x, '')
@@ -540,16 +542,16 @@
             # note: falls through to the revspec case if no branch with
             # this name exists
             if pattern in repo.branchmap():
-                return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
+                return subset.filter(lambda r: matcher(getbi(r)[0]))
         else:
-            return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
-
-    s = getset(repo, spanset(repo), x)
+            return subset.filter(lambda r: matcher(getbi(r)[0]))
+
+    s = getset(repo, fullreposet(repo), x)
     b = set()
     for r in s:
-        b.add(getbi(ucl, r)[0])
+        b.add(getbi(r)[0])
     c = s.__contains__
-    return subset.filter(lambda r: c(r) or getbi(ucl, r)[0] in b)
+    return subset.filter(lambda r: c(r) or getbi(r)[0] in b)
 
 def bumped(repo, subset, x):
     """``bumped()``
@@ -708,7 +710,7 @@
     return subset.filter(matches)
 
 def _descendants(repo, subset, x, followfirst=False):
-    roots = getset(repo, spanset(repo), x)
+    roots = getset(repo, fullreposet(repo), x)
     if not roots:
         return baseset()
     s = _revdescendants(repo, roots, followfirst)
@@ -744,9 +746,9 @@
     is the same as passing all().
     """
     if x is not None:
-        sources = getset(repo, spanset(repo), x)
+        sources = getset(repo, fullreposet(repo), x)
     else:
-        sources = getall(repo, spanset(repo), x)
+        sources = fullreposet(repo)
 
     dests = set()
 
@@ -976,7 +978,7 @@
 
 def follow(repo, subset, x):
     """``follow([file])``
-    An alias for ``::.`` (ancestors of the working copy's first parent).
+    An alias for ``::.`` (ancestors of the working directory's first parent).
     If a filename is specified, the history of the given file is followed,
     including copies.
     """
@@ -994,7 +996,7 @@
     """
     # i18n: "all" is a keyword
     getargs(x, 0, 0, _("all takes no arguments"))
-    return subset
+    return subset & spanset(repo)  # drop "null" if any
 
 def grep(repo, subset, x):
     """``grep(regex)``
@@ -1145,7 +1147,7 @@
         # i18n: "limit" is a keyword
         raise error.ParseError(_("limit expects a number"))
     ss = subset
-    os = getset(repo, spanset(repo), l[0])
+    os = getset(repo, fullreposet(repo), l[0])
     result = []
     it = iter(os)
     for x in xrange(lim):
@@ -1172,7 +1174,7 @@
         # i18n: "last" is a keyword
         raise error.ParseError(_("last expects a number"))
     ss = subset
-    os = getset(repo, spanset(repo), l[0])
+    os = getset(repo, fullreposet(repo), l[0])
     os.reverse()
     result = []
     it = iter(os)
@@ -1189,7 +1191,7 @@
     """``max(set)``
     Changeset with highest revision number in set.
     """
-    os = getset(repo, spanset(repo), x)
+    os = getset(repo, fullreposet(repo), x)
     if os:
         m = os.max()
         if m in subset:
@@ -1226,7 +1228,7 @@
     """``min(set)``
     Changeset with lowest revision number in set.
     """
-    os = getset(repo, spanset(repo), x)
+    os = getset(repo, fullreposet(repo), x)
     if os:
         m = os.min()
         if m in subset:
@@ -1322,7 +1324,7 @@
     cl = repo.changelog
     # i18n: "only" is a keyword
     args = getargs(x, 1, 2, _('only takes one or two arguments'))
-    include = getset(repo, spanset(repo), args[0])
+    include = getset(repo, fullreposet(repo), args[0])
     if len(args) == 1:
         if not include:
             return baseset()
@@ -1331,7 +1333,7 @@
         exclude = [rev for rev in cl.headrevs()
             if not rev in descendants and not rev in include]
     else:
-        exclude = getset(repo, spanset(repo), args[1])
+        exclude = getset(repo, fullreposet(repo), args[1])
 
     results = set(cl.findmissingrevs(common=exclude, heads=include))
     return subset & results
@@ -1345,9 +1347,9 @@
     for the first operation is selected.
     """
     if x is not None:
-        dests = getset(repo, spanset(repo), x)
+        dests = getset(repo, fullreposet(repo), x)
     else:
-        dests = getall(repo, spanset(repo), x)
+        dests = fullreposet(repo)
 
     def _firstsrc(rev):
         src = _getrevsource(repo, rev)
@@ -1370,7 +1372,9 @@
     Changesets not found in the specified destination repository, or the
     default push location.
     """
-    import hg # avoid start-up nasties
+    # Avoid cycles.
+    import discovery
+    import hg
     # i18n: "outgoing" is a keyword
     l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
     # i18n: "outgoing" is a keyword
@@ -1400,7 +1404,7 @@
 
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, spanset(repo), x):
+    for r in getset(repo, fullreposet(repo), x):
         ps.add(cl.parentrevs(r)[0])
     ps -= set([node.nullrev])
     return subset & ps
@@ -1421,7 +1425,7 @@
 
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, spanset(repo), x):
+    for r in getset(repo, fullreposet(repo), x):
         ps.add(cl.parentrevs(r)[1])
     ps -= set([node.nullrev])
     return subset & ps
@@ -1435,7 +1439,7 @@
     else:
         ps = set()
         cl = repo.changelog
-        for r in getset(repo, spanset(repo), x):
+        for r in getset(repo, fullreposet(repo), x):
             ps.update(cl.parentrevs(r))
     ps -= set([node.nullrev])
     return subset & ps
@@ -1548,7 +1552,7 @@
     except (TypeError, ValueError):
         # i18n: "rev" is a keyword
         raise error.ParseError(_("rev expects a number"))
-    if l not in fullreposet(repo) and l != node.nullrev:
+    if l not in repo.changelog and l != node.nullrev:
         return baseset()
     return subset & baseset([l])
 
@@ -1676,7 +1680,7 @@
     """``roots(set)``
     Changesets in set with no parent changeset in set.
     """
-    s = getset(repo, spanset(repo), x)
+    s = getset(repo, fullreposet(repo), x)
     subset = baseset([r for r in s if r in subset])
     cs = _children(repo, subset, s)
     return subset - cs
@@ -1754,6 +1758,49 @@
     l.sort()
     return baseset([e[-1] for e in l])
 
+def subrepo(repo, subset, x):
+    """``subrepo([pattern])``
+    Changesets that add, modify or remove the given subrepo.  If no subrepo
+    pattern is named, any subrepo changes are returned.
+    """
+    # i18n: "subrepo" is a keyword
+    args = getargs(x, 0, 1, _('subrepo takes at most one argument'))
+    if len(args) != 0:
+        pat = getstring(args[0], _("subrepo requires a pattern"))
+
+    m = matchmod.exact(repo.root, repo.root, ['.hgsubstate'])
+
+    def submatches(names):
+        k, p, m = _stringmatcher(pat)
+        for name in names:
+            if m(name):
+                yield name
+
+    def matches(x):
+        c = repo[x]
+        s = repo.status(c.p1().node(), c.node(), match=m)
+
+        if len(args) == 0:
+            return s.added or s.modified or s.removed
+
+        if s.added:
+            return util.any(submatches(c.substate.keys()))
+
+        if s.modified:
+            subs = set(c.p1().substate.keys())
+            subs.update(c.substate.keys())
+
+            for path in submatches(subs):
+                if c.p1().substate.get(path) != c.substate.get(path):
+                    return True
+
+        if s.removed:
+            return util.any(submatches(c.p1().substate.keys()))
+
+        return False
+
+    return subset.filter(matches)
+
 def _stringmatcher(pattern):
     """
     accepts a string, possibly starting with 're:' or 'literal:' prefix.
@@ -1851,6 +1898,14 @@
     """
     return author(repo, subset, x)
 
+# experimental
+def wdir(repo, subset, x):
+    # i18n: "wdir" is a keyword
+    getargs(x, 0, 0, _("wdir takes no arguments"))
+    if None in subset:
+        return baseset([None])
+    return baseset()
+
 # for internal use
 def _list(repo, subset, x):
     s = getstring(x, "internal error")
@@ -1941,11 +1996,13 @@
     "roots": roots,
     "sort": sort,
     "secret": secret,
+    "subrepo": subrepo,
     "matching": matching,
     "tag": tag,
     "tagged": tagged,
     "user": user,
     "unstable": unstable,
+    "wdir": wdir,
     "_list": _list,
     "_intlist": _intlist,
     "_hexlist": _hexlist,
@@ -2018,6 +2075,7 @@
     "tagged",
     "user",
     "unstable",
+    "wdir",
     "_list",
     "_intlist",
     "_hexlist",
@@ -2153,7 +2211,7 @@
     if isinstance(tree, tuple):
         arg = _getaliasarg(tree)
         if arg is not None and (not known or arg not in known):
-            raise error.ParseError(_("not a function: %s") % '_aliasarg')
+            raise error.UnknownIdentifier('_aliasarg', [])
         for t in tree:
             _checkaliasarg(t, known)
 
@@ -2243,6 +2301,71 @@
     except error.ParseError, inst:
         return (decl, None, None, parseerrordetail(inst))
 
+def _parsealiasdefn(defn, args):
+    """Parse alias definition ``defn``
+
+    This function also replaces alias argument references in the
+    specified definition by ``_aliasarg(ARGNAME)``.
+
+    ``args`` is a list of alias argument names, or None if the alias
+    is declared as a symbol.
+
+    This returns "tree" as parsing result.
+
+    >>> args = ['$1', '$2', 'foo']
+    >>> print prettyformat(_parsealiasdefn('$1 or foo', args))
+    (or
+      (func
+        ('symbol', '_aliasarg')
+        ('string', '$1'))
+      (func
+        ('symbol', '_aliasarg')
+        ('string', 'foo')))
+    >>> try:
+    ...     _parsealiasdefn('$1 or $bar', args)
+    ... except error.ParseError, inst:
+    ...     print parseerrordetail(inst)
+    at 6: '$' not for alias arguments
+    >>> args = ['$1', '$10', 'foo']
+    >>> print prettyformat(_parsealiasdefn('$10 or foobar', args))
+    (or
+      (func
+        ('symbol', '_aliasarg')
+        ('string', '$10'))
+      ('symbol', 'foobar'))
+    >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args))
+    (or
+      ('string', '$1')
+      ('string', 'foo'))
+    """
+    def tokenizedefn(program, lookup=None):
+        if args:
+            argset = set(args)
+        else:
+            argset = set()
+
+        for t, value, pos in _tokenizealias(program, lookup=lookup):
+            if t == 'symbol':
+                if value in argset:
+                    # emulate tokenization of "_aliasarg('ARGNAME')":
+                    # "_aliasarg()" is an unknown symbol only used separate
+                    # alias argument placeholders from regular strings.
+                    yield ('symbol', '_aliasarg', pos)
+                    yield ('(', None, pos)
+                    yield ('string', value, pos)
+                    yield (')', None, pos)
+                    continue
+                elif value.startswith('$'):
+                    raise error.ParseError(_("'$' not for alias arguments"),
+                                           pos)
+            yield (t, value, pos)
+
+    p = parser.parser(tokenizedefn, elements)
+    tree, pos = p.parse(defn)
+    if pos != len(defn):
+        raise error.ParseError(_('invalid token'), pos)
+    return tree
+
 class revsetalias(object):
     # whether own `error` information is already shown or not.
     # this avoids showing same warning multiple times at each `findaliases`.
@@ -2260,16 +2383,8 @@
                            ' "%s": %s') % (self.name, self.error)
             return
 
-        if self.args:
-            for arg in self.args:
-                # _aliasarg() is an unknown symbol only used separate
-                # alias argument placeholders from regular strings.
-                value = value.replace(arg, '_aliasarg(%r)' % (arg,))
-
         try:
-            self.replacement, pos = parse(value)
-            if pos != len(value):
-                raise error.ParseError(_('invalid token'), pos)
+            self.replacement = _parsealiasdefn(value, self.args)
             # Check for placeholder injection
             _checkaliasarg(self.replacement, self.args)
         except error.ParseError, inst:
@@ -2379,6 +2494,10 @@
     p = parser.parser(tokenize, elements)
     return p.parse(spec, lookup=lookup)
 
+def posttreebuilthook(tree, repo):
+    # hook for extensions to execute code on the optimized tree
+    pass
+
 def match(ui, spec, repo=None):
     if not spec:
         raise error.ParseError(_("empty query"))
@@ -2392,7 +2511,10 @@
         tree = findaliases(ui, tree, showwarning=ui.warn)
     tree = foldconcat(tree)
     weight, tree = optimize(tree, True)
-    def mfunc(repo, subset):
+    posttreebuilthook(tree, repo)
+    def mfunc(repo, subset=None):
+        if subset is None:
+            subset = fullreposet(repo)
         if util.safehasattr(subset, 'isascending'):
             result = getset(repo, subset, tree)
         else:
@@ -2602,6 +2724,8 @@
         """Returns a new object with the intersection of the two collections.
 
         This is part of the mandatory API for smartset."""
+        if isinstance(other, fullreposet):
+            return self
         return self.filter(other.__contains__, cache=False)
 
     def __add__(self, other):
@@ -2720,6 +2844,10 @@
                 return self._asclist[0]
         return None
 
+    def __repr__(self):
+        d = {None: '', False: '-', True: '+'}[self._ascending]
+        return '<%s%s %r>' % (type(self).__name__, d, self._list)
+
 class filteredset(abstractsmartset):
     """Duck type for baseset class which iterates lazily over the revisions in
     the subset and contains a function which tests for membership in the
@@ -2804,6 +2932,9 @@
             return x
         return None
 
+    def __repr__(self):
+        return '<%s %r>' % (type(self).__name__, self._subset)
+
 class addset(abstractsmartset):
     """Represent the addition of two sets
 
@@ -2977,6 +3108,10 @@
         self.reverse()
         return val
 
+    def __repr__(self):
+        d = {None: '', False: '-', True: '+'}[self._ascending]
+        return '<%s%s %r, %r>' % (type(self).__name__, d, self._r1, self._r2)
+
 class generatorset(abstractsmartset):
     """Wrap a generator for lazy iteration
 
@@ -3146,18 +3281,11 @@
             return it().next()
         return None
 
-def spanset(repo, start=None, end=None):
-    """factory function to dispatch between fullreposet and actual spanset
-
-    Feel free to update all spanset call sites and kill this function at some
-    point.
-    """
-    if start is None and end is None:
-        return fullreposet(repo)
-    return _spanset(repo, start, end)
-
-
-class _spanset(abstractsmartset):
+    def __repr__(self):
+        d = {False: '-', True: '+'}[self._ascending]
+        return '<%s%s>' % (type(self).__name__, d)
+
+class spanset(abstractsmartset):
     """Duck type for baseset class which represents a range of revisions and
     can work lazily and without having all the range in memory
 
@@ -3261,15 +3389,26 @@
             return x
         return None
 
-class fullreposet(_spanset):
+    def __repr__(self):
+        d = {False: '-', True: '+'}[self._ascending]
+        return '<%s%s %d:%d>' % (type(self).__name__, d,
+                                 self._start, self._end - 1)
+
+class fullreposet(spanset):
     """a set containing all revisions in the repo
 
-    This class exists to host special optimization.
+    This class exists to host special optimization and magic to handle virtual
+    revisions such as "null".
     """
 
     def __init__(self, repo):
         super(fullreposet, self).__init__(repo)
 
+    def __contains__(self, rev):
+        # assumes the given rev is valid
+        hidden = self._hiddenrevs
+        return not (hidden and rev in hidden)
+
     def __and__(self, other):
         """As self contains the whole repo, all of the other set should also be
         in self. Therefore `self & other = other`.
@@ -3288,5 +3427,19 @@
         other.sort(reverse=self.isdescending())
         return other
 
+def prettyformatset(revs):
+    lines = []
+    rs = repr(revs)
+    p = 0
+    while p < len(rs):
+        q = rs.find('<', p + 1)
+        if q < 0:
+            q = len(rs)
+        l = rs.count('<', 0, p) - rs.count('>', 0, p)
+        assert l >= 0
+        lines.append((l, rs[p:q].rstrip()))
+        p = q
+    return '\n'.join('  ' * l + s for l, s in lines)
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = symbols.values()
--- a/mercurial/scmutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/scmutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -7,10 +7,10 @@
 
 from i18n import _
 from mercurial.node import nullrev
-import util, error, osutil, revset, similar, encoding, phases, parsers
+import util, error, osutil, revset, similar, encoding, phases
 import pathutil
 import match as matchmod
-import os, errno, re, glob, tempfile
+import os, errno, re, glob, tempfile, shutil, stat, inspect
 
 if os.name == 'nt':
     import scmwindows as scmplatform
@@ -172,6 +172,40 @@
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
 
+def develwarn(tui, msg):
+    """issue a developer warning message"""
+    msg = 'devel-warn: ' + msg
+    if tui.tracebackflag:
+        util.debugstacktrace(msg, 2)
+    else:
+        curframe = inspect.currentframe()
+        calframe = inspect.getouterframes(curframe, 2)
+        tui.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[2][1:4]))
+
+def filteredhash(repo, maxrev):
+    """build hash of filtered revisions in the current repoview.
+
+    Multiple caches perform up-to-date validation by checking that the
+    tiprev and tipnode stored in the cache file match the current repository.
+    However, this is not sufficient for validating repoviews because the set
+    of revisions in the view may change without the repository tiprev and
+    tipnode changing.
+
+    This function hashes all the revs filtered from the view and returns
+    that SHA-1 digest.
+    """
+    cl = repo.changelog
+    if not cl.filteredrevs:
+        return None
+    key = None
+    revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
+    if revs:
+        s = util.sha1()
+        for rev in revs:
+            s.update('%s;' % rev)
+        key = s.digest()
+    return key
+
 class abstractvfs(object):
     """Abstract base class; cannot be instantiated"""
 
@@ -316,6 +350,31 @@
     def readlink(self, path):
         return os.readlink(self.join(path))
 
+    def removedirs(self, path=None):
+        """Remove a leaf directory and all empty intermediate ones
+        """
+        return util.removedirs(self.join(path))
+
+    def rmtree(self, path=None, ignore_errors=False, forcibly=False):
+        """Remove a directory tree recursively
+
+        If ``forcibly``, this tries to remove READ-ONLY files, too.
+        """
+        if forcibly:
+            def onerror(function, path, excinfo):
+                if function is not os.remove:
+                    raise
+                # read-only files cannot be unlinked under Windows
+                s = os.stat(path)
+                if (s.st_mode & stat.S_IWRITE) != 0:
+                    raise
+                os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
+                os.remove(path)
+        else:
+            onerror = None
+        return shutil.rmtree(self.join(path),
+                             ignore_errors=ignore_errors, onerror=onerror)
+
     def setflags(self, path, l, x):
         return util.setflags(self.join(path), l, x)
 
@@ -331,6 +390,22 @@
     def utime(self, path=None, t=None):
         return os.utime(self.join(path), t)
 
+    def walk(self, path=None, onerror=None):
+        """Yield (dirpath, dirs, files) tuple for each directories under path
+
+        ``dirpath`` is relative one from the root of this vfs. This
+        uses ``os.sep`` as path separator, even you specify POSIX
+        style ``path``.
+
+        "The root of this vfs" is represented as empty ``dirpath``.
+        """
+        root = os.path.normpath(self.join(None))
+        # when dirpath == root, dirpath[prefixlen:] becomes empty
+        # because len(dirpath) < prefixlen.
+        prefixlen = len(pathutil.normasprefix(root))
+        for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
+            yield (dirpath[prefixlen:], dirs, files)
+
 class vfs(abstractvfs):
     '''Operate files relative to a base directory
 
@@ -445,9 +520,9 @@
         else:
             self.write(dst, src)
 
-    def join(self, path):
+    def join(self, path, *insidef):
         if path:
-            return os.path.join(self.base, path)
+            return os.path.join(self.base, path, *insidef)
         else:
             return self.base
 
@@ -475,9 +550,9 @@
     def __call__(self, path, *args, **kwargs):
         return self.vfs(self._filter(path), *args, **kwargs)
 
-    def join(self, path):
+    def join(self, path, *insidef):
         if path:
-            return self.vfs.join(self._filter(path))
+            return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
         else:
             return self.vfs.join(path)
 
@@ -582,6 +657,13 @@
             _rcpath = osrcpath()
     return _rcpath
 
+def intrev(repo, rev):
+    """Return integer for a given revision that can be used in comparison or
+    arithmetic operation"""
+    if rev is None:
+        return len(repo)
+    return rev
+
 def revsingle(repo, revspec, default='.'):
     if not revspec and revspec != 0:
         return repo[default]
@@ -628,12 +710,22 @@
         return repo[val].rev()
 
     seen, l = set(), revset.baseset([])
+
+    revsetaliases = [alias for (alias, _) in
+                     repo.ui.configitems("revsetalias")]
+
     for spec in revs:
         if l and not seen:
             seen = set(l)
         # attempt to parse old-style ranges first to deal with
         # things like old-tag which contain query metacharacters
         try:
+            # ... except for revset aliases without arguments. These
+            # should be parsed as soon as possible, because they might
+            # clash with a hash prefix.
+            if spec in revsetaliases:
+                raise error.RepoLookupError
+
             if isinstance(spec, int):
                 seen.add(spec)
                 l = l + revset.baseset([spec])
@@ -641,6 +733,9 @@
 
             if _revrangesep in spec:
                 start, end = spec.split(_revrangesep, 1)
+                if start in revsetaliases or end in revsetaliases:
+                    raise error.RepoLookupError
+
                 start = revfix(repo, start, 0)
                 end = revfix(repo, end, len(repo) - 1)
                 if end == nullrev and start < 0:
@@ -672,11 +767,11 @@
         # fall through to new-style queries if old-style fails
         m = revset.match(repo.ui, spec, repo)
         if seen or l:
-            dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
+            dl = [r for r in m(repo) if r not in seen]
             l = l + revset.baseset(dl)
             seen.update(dl)
         else:
-            l = m(repo, revset.spanset(repo))
+            l = m(repo)
 
     return l
 
@@ -710,8 +805,10 @@
     m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
                          default)
     def badfn(f, msg):
-        ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
+        ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
     m.bad = badfn
+    if m.always():
+        pats = []
     return m, pats
 
 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
@@ -1061,48 +1158,3 @@
             del obj.__dict__[self.name]
         except KeyError:
             raise AttributeError(self.name)
-
-class dirs(object):
-    '''a multiset of directory names from a dirstate or manifest'''
-
-    def __init__(self, map, skip=None):
-        self._dirs = {}
-        addpath = self.addpath
-        if util.safehasattr(map, 'iteritems') and skip is not None:
-            for f, s in map.iteritems():
-                if s[0] != skip:
-                    addpath(f)
-        else:
-            for f in map:
-                addpath(f)
-
-    def addpath(self, path):
-        dirs = self._dirs
-        for base in finddirs(path):
-            if base in dirs:
-                dirs[base] += 1
-                return
-            dirs[base] = 1
-
-    def delpath(self, path):
-        dirs = self._dirs
-        for base in finddirs(path):
-            if dirs[base] > 1:
-                dirs[base] -= 1
-                return
-            del dirs[base]
-
-    def __iter__(self):
-        return self._dirs.iterkeys()
-
-    def __contains__(self, d):
-        return d in self._dirs
-
-if util.safehasattr(parsers, 'dirs'):
-    dirs = parsers.dirs
-
-def finddirs(path):
-    pos = path.rfind('/')
-    while pos != -1:
-        yield path[:pos]
-        pos = path.rfind('/', 0, pos)
--- a/mercurial/sslutil.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/sslutil.py	Thu Apr 16 20:57:51 2015 -0500
@@ -10,12 +10,16 @@
 
 from mercurial import util
 from mercurial.i18n import _
+
+_canloaddefaultcerts = False
 try:
     # avoid using deprecated/broken FakeSocket in python 2.6
     import ssl
     CERT_REQUIRED = ssl.CERT_REQUIRED
     try:
         ssl_context = ssl.SSLContext
+        _canloaddefaultcerts = util.safehasattr(ssl_context,
+                                                'load_default_certs')
 
         def ssl_wrap_socket(sock, keyfile, certfile, cert_reqs=ssl.CERT_NONE,
                             ca_certs=None, serverhostname=None):
@@ -35,6 +39,8 @@
             sslcontext.verify_mode = cert_reqs
             if ca_certs is not None:
                 sslcontext.load_verify_locations(cafile=ca_certs)
+            elif _canloaddefaultcerts:
+                sslcontext.load_default_certs()
 
             sslsocket = sslcontext.wrap_socket(sock,
                                                server_hostname=serverhostname)
@@ -123,29 +129,40 @@
       for using system certificate store CAs in addition to the provided
       cacerts file
     """
-    if sys.platform != 'darwin' or util.mainfrozen():
+    if sys.platform != 'darwin' or util.mainfrozen() or not sys.executable:
         return False
-    exe = (sys.executable or '').lower()
+    exe = os.path.realpath(sys.executable).lower()
     return (exe.startswith('/usr/bin/python') or
             exe.startswith('/system/library/frameworks/python.framework/'))
 
+def _defaultcacerts():
+    """return path to CA certificates; None for system's store; ! to disable"""
+    if _plainapplepython():
+        dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
+        if os.path.exists(dummycert):
+            return dummycert
+    if _canloaddefaultcerts:
+        return None
+    return '!'
+
 def sslkwargs(ui, host):
     kws = {}
     hostfingerprint = ui.config('hostfingerprints', host)
     if hostfingerprint:
         return kws
     cacerts = ui.config('web', 'cacerts')
-    if cacerts:
+    if cacerts == '!':
+        pass
+    elif cacerts:
         cacerts = util.expandpath(cacerts)
         if not os.path.exists(cacerts):
             raise util.Abort(_('could not find web.cacerts: %s') % cacerts)
-    elif cacerts is None and _plainapplepython():
-        dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
-        if os.path.exists(dummycert):
-            ui.debug('using %s to enable OS X system CA\n' % dummycert)
-            ui.setconfig('web', 'cacerts', dummycert, 'dummy')
-            cacerts = dummycert
-    if cacerts:
+    else:
+        cacerts = _defaultcacerts()
+        if cacerts and cacerts != '!':
+            ui.debug('using %s to enable OS X system CA\n' % cacerts)
+        ui.setconfig('web', 'cacerts', cacerts, 'defaultcacerts')
+    if cacerts != '!':
         kws.update({'ca_certs': cacerts,
                     'cert_reqs': CERT_REQUIRED,
                     })
@@ -194,7 +211,7 @@
                                  hint=_('check hostfingerprint configuration'))
             self.ui.debug('%s certificate matched fingerprint %s\n' %
                           (host, nicefingerprint))
-        elif cacerts:
+        elif cacerts != '!':
             msg = _verifycert(peercert2, host)
             if msg:
                 raise util.Abort(_('%s certificate error: %s') % (host, msg),
--- a/mercurial/statichttprepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/statichttprepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -141,8 +141,10 @@
         self._tags = None
         self.nodetagscache = None
         self._branchcaches = {}
+        self._revbranchcache = None
         self.encodepats = None
         self.decodepats = None
+        self._transref = None
 
     def _restrictcapabilities(self, caps):
         caps = super(statichttprepository, self)._restrictcapabilities(caps)
--- a/mercurial/subrepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/subrepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 import copy
-import errno, os, re, shutil, posixpath, sys
+import errno, os, re, posixpath, sys
 import xml.dom.minidom
 import stat, subprocess, tarfile
 from i18n import _
@@ -70,11 +70,14 @@
                 if err.errno != errno.ENOENT:
                     raise
                 # handle missing subrepo spec files as removed
-                ui.warn(_("warning: subrepo spec file %s not found\n") % f)
+                ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
+                        util.pathto(ctx.repo().root, ctx.repo().getcwd(), f))
                 return
             p.parse(f, data, sections, remap, read)
         else:
-            raise util.Abort(_("subrepo spec file %s not found") % f)
+            repo = ctx.repo()
+            raise util.Abort(_("subrepo spec file \'%s\' not found") %
+                             util.pathto(repo.root, repo.getcwd(), f))
 
     if '.hgsub' in ctx:
         read('.hgsub')
@@ -92,9 +95,11 @@
                 try:
                     revision, path = l.split(" ", 1)
                 except ValueError:
+                    repo = ctx.repo()
                     raise util.Abort(_("invalid subrepository revision "
-                                       "specifier in .hgsubstate line %d")
-                                     % (i + 1))
+                                       "specifier in \'%s\' line %d")
+                                     % (util.pathto(repo.root, repo.getcwd(),
+                                        '.hgsubstate'), (i + 1)))
                 rev[path] = revision
         except IOError, err:
             if err.errno != errno.ENOENT:
@@ -127,7 +132,7 @@
             src = src.lstrip() # strip any extra whitespace after ']'
 
         if not util.url(src).isabs():
-            parent = _abssource(ctx._repo, abort=False)
+            parent = _abssource(ctx.repo(), abort=False)
             if parent:
                 parent = util.url(parent)
                 parent.path = posixpath.join(parent.path or '', src)
@@ -275,11 +280,7 @@
 
 def subrelpath(sub):
     """return path to this subrepo as seen from outermost repo"""
-    if util.safehasattr(sub, '_relpath'):
-        return sub._relpath
-    if not util.safehasattr(sub, '_repo'):
-        return sub._path
-    return reporelpath(sub._repo)
+    return sub._relpath
 
 def _abssource(repo, push=False, abort=True):
     """return pull/push path of repo - either based on parent repo .hgsub info
@@ -308,8 +309,8 @@
     if abort:
         raise util.Abort(_("default path for subrepository not found"))
 
-def _sanitize(ui, path, ignore):
-    for dirname, dirs, names in os.walk(path):
+def _sanitize(ui, vfs, ignore):
+    for dirname, dirs, names in vfs.walk():
         for i, d in enumerate(dirs):
             if d.lower() == ignore:
                 del dirs[i]
@@ -319,8 +320,8 @@
         for f in names:
             if f.lower() == 'hgrc':
                 ui.warn(_("warning: removing potentially hostile 'hgrc' "
-                          "in '%s'\n") % dirname)
-                os.unlink(os.path.join(dirname, f))
+                          "in '%s'\n") % vfs.join(dirname))
+                vfs.unlink(vfs.reljoin(dirname, f))
 
 def subrepo(ctx, path):
     """return instance of the right subrepo class for subrepo in path"""
@@ -332,7 +333,7 @@
     import hg as h
     hg = h
 
-    pathutil.pathauditor(ctx._repo.root)(path)
+    pathutil.pathauditor(ctx.repo().root)(path)
     state = ctx.substate[path]
     if state[2] not in types:
         raise util.Abort(_('unknown subrepo type %s') % state[2])
@@ -373,8 +374,18 @@
 
 class abstractsubrepo(object):
 
-    def __init__(self, ui):
-        self.ui = ui
+    def __init__(self, ctx, path):
+        """Initialize abstractsubrepo part
+
+        ``ctx`` is the context referring this subrepository in the
+        parent repository.
+
+        ``path`` is the path to this subrepositiry as seen from
+        innermost repository.
+        """
+        self.ui = ctx.repo().ui
+        self._ctx = ctx
+        self._path = path
 
     def storeclean(self, path):
         """
@@ -390,6 +401,25 @@
         """
         raise NotImplementedError
 
+    def dirtyreason(self, ignoreupdate=False):
+        """return reason string if it is ``dirty()``
+
+        Returned string should have enough information for the message
+        of exception.
+
+        This returns None, otherwise.
+        """
+        if self.dirty(ignoreupdate=ignoreupdate):
+            return _("uncommitted changes in subrepository '%s'"
+                     ) % subrelpath(self)
+
+    def bailifchanged(self, ignoreupdate=False):
+        """raise Abort if subrepository is ``dirty()``
+        """
+        dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
+        if dirtyreason:
+            raise util.Abort(dirtyreason)
+
     def basestate(self):
         """current working directory base state, disregarding .hgsubstate
         state and working directory modifications"""
@@ -469,6 +499,10 @@
         """return file flags"""
         return ''
 
+    def printfiles(self, ui, m, fm, fmt):
+        """handle the files command for this subrepo"""
+        return 1
+
     def archive(self, archiver, prefix, match=None):
         if match is not None:
             files = [f for f in self.files() if match(f)]
@@ -482,7 +516,7 @@
             flags = self.fileflags(name)
             mode = 'x' in flags and 0755 or 0644
             symlink = 'l' in flags
-            archiver.addfile(os.path.join(prefix, self._path, name),
+            archiver.addfile(self.wvfs.reljoin(prefix, self._path, name),
                              mode, symlink, self.filedata(name))
             self.ui.progress(_('archiving (%s)') % relpath, i + 1,
                              unit=_('files'), total=total)
@@ -514,12 +548,23 @@
     def shortid(self, revid):
         return revid
 
+    @propertycache
+    def wvfs(self):
+        """return vfs to access the working directory of this subrepository
+        """
+        return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
+
+    @propertycache
+    def _relpath(self):
+        """return path to this subrepository as seen from outermost repository
+        """
+        return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
+
 class hgsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
-        super(hgsubrepo, self).__init__(ctx._repo.ui)
-        self._path = path
+        super(hgsubrepo, self).__init__(ctx, path)
         self._state = state
-        r = ctx._repo
+        r = ctx.repo()
         root = r.wjoin(path)
         create = not r.wvfs.exists('%s/.hg' % path)
         self._repo = hg.repository(r.baseui, root, create=create)
@@ -623,9 +668,10 @@
     @annotatesubrepoerror
     def add(self, ui, match, prefix, explicitonly, **opts):
         return cmdutil.add(ui, self._repo, match,
-                           os.path.join(prefix, self._path), explicitonly,
-                           **opts)
+                           self.wvfs.reljoin(prefix, self._path),
+                           explicitonly, **opts)
 
+    @annotatesubrepoerror
     def addremove(self, m, prefix, opts, dry_run, similarity):
         # In the same way as sub directories are processed, once in a subrepo,
         # always entry any of its subrepos.  Don't corrupt the options that will
@@ -633,7 +679,7 @@
         opts = copy.copy(opts)
         opts['subrepos'] = True
         return scmutil.addremove(self._repo, m,
-                                 os.path.join(prefix, self._path), opts,
+                                 self.wvfs.reljoin(prefix, self._path), opts,
                                  dry_run, similarity)
 
     @annotatesubrepoerror
@@ -680,7 +726,7 @@
             s = subrepo(ctx, subpath)
             submatch = matchmod.narrowmatcher(subpath, match)
             total += s.archive(
-                archiver, os.path.join(prefix, self._path), submatch)
+                archiver, self.wvfs.reljoin(prefix, self._path), submatch)
         return total
 
     @annotatesubrepoerror
@@ -734,7 +780,8 @@
             self.ui.status(_('cloning subrepo %s from %s\n')
                            % (subrelpath(self), srcurl))
             parentrepo = self._repo._subparent
-            shutil.rmtree(self._repo.path)
+            # use self._repo.vfs instead of self.wvfs to remove .hg only
+            self._repo.vfs.rmtree()
             other, cloned = hg.clone(self._repo._subparent.baseui, {},
                                      other, self._repo.root,
                                      update=False)
@@ -835,7 +882,7 @@
     def files(self):
         rev = self._state[1]
         ctx = self._repo[rev]
-        return ctx.manifest()
+        return ctx.manifest().keys()
 
     def filedata(self, name):
         rev = self._state[1]
@@ -846,6 +893,17 @@
         ctx = self._repo[rev]
         return ctx.flags(name)
 
+    @annotatesubrepoerror
+    def printfiles(self, ui, m, fm, fmt):
+        # If the parent context is a workingctx, use the workingctx here for
+        # consistency.
+        if self._ctx.rev() is None:
+            ctx = self._repo[None]
+        else:
+            rev = self._state[1]
+            ctx = self._repo[rev]
+        return cmdutil.files(ui, ctx, m, fm, fmt, True)
+
     def walk(self, match):
         ctx = self._repo[None]
         return ctx.walk(match)
@@ -853,13 +911,13 @@
     @annotatesubrepoerror
     def forget(self, match, prefix):
         return cmdutil.forget(self.ui, self._repo, match,
-                              os.path.join(prefix, self._path), True)
+                              self.wvfs.reljoin(prefix, self._path), True)
 
     @annotatesubrepoerror
     def removefiles(self, matcher, prefix, after, force, subrepos):
         return cmdutil.remove(self.ui, self._repo, matcher,
-                              os.path.join(prefix, self._path), after, force,
-                              subrepos)
+                              self.wvfs.reljoin(prefix, self._path),
+                              after, force, subrepos)
 
     @annotatesubrepoerror
     def revert(self, substate, *pats, **opts):
@@ -877,13 +935,11 @@
             opts['date'] = None
             opts['rev'] = substate[1]
 
-            pats = []
-            if not opts.get('all'):
-                pats = ['set:modified()']
             self.filerevert(*pats, **opts)
 
         # Update the repo to the revision specified in the given substate
-        self.get(substate, overwrite=True)
+        if not opts.get('dry_run'):
+            self.get(substate, overwrite=True)
 
     def filerevert(self, *pats, **opts):
         ctx = self._repo[opts['rev']]
@@ -897,12 +953,23 @@
     def shortid(self, revid):
         return revid[:12]
 
+    @propertycache
+    def wvfs(self):
+        """return own wvfs for efficiency and consitency
+        """
+        return self._repo.wvfs
+
+    @propertycache
+    def _relpath(self):
+        """return path to this subrepository as seen from outermost repository
+        """
+        # Keep consistent dir separators by avoiding vfs.join(self._path)
+        return reporelpath(self._repo)
+
 class svnsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
-        super(svnsubrepo, self).__init__(ctx._repo.ui)
-        self._path = path
+        super(svnsubrepo, self).__init__(ctx, path)
         self._state = state
-        self._ctx = ctx
         self._exe = util.findexe('svn')
         if not self._exe:
             raise util.Abort(_("'svn' executable not found for subrepo '%s'")
@@ -923,7 +990,8 @@
                 cmd.append('--non-interactive')
         cmd.extend(commands)
         if filename is not None:
-            path = os.path.join(self._ctx._repo.origroot, self._path, filename)
+            path = self.wvfs.reljoin(self._ctx.repo().origroot,
+                                     self._path, filename)
             cmd.append(path)
         env = dict(os.environ)
         # Avoid localized output, preserve current locale for everything else.
@@ -1055,20 +1123,9 @@
             return
         self.ui.note(_('removing subrepo %s\n') % self._path)
 
-        def onerror(function, path, excinfo):
-            if function is not os.remove:
-                raise
-            # read-only files cannot be unlinked under Windows
-            s = os.stat(path)
-            if (s.st_mode & stat.S_IWRITE) != 0:
-                raise
-            os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
-            os.remove(path)
-
-        path = self._ctx._repo.wjoin(self._path)
-        shutil.rmtree(path, onerror=onerror)
+        self.wvfs.rmtree(forcibly=True)
         try:
-            os.removedirs(os.path.dirname(path))
+            self._ctx.repo().wvfs.removedirs(os.path.dirname(self._path))
         except OSError:
             pass
 
@@ -1083,7 +1140,7 @@
         # update to a directory which has since been deleted and recreated.
         args.append('%s@%s' % (state[0], state[1]))
         status, err = self._svncommand(args, failok=True)
-        _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
+        _sanitize(self.ui, self.wvfs, '.svn')
         if not re.search('Checked out revision [0-9]+.', status):
             if ('is already a working copy for a different URL' in err
                 and (self._wcchanged()[:2] == (False, False))):
@@ -1129,13 +1186,10 @@
 
 class gitsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
-        super(gitsubrepo, self).__init__(ctx._repo.ui)
+        super(gitsubrepo, self).__init__(ctx, path)
         self._state = state
-        self._ctx = ctx
-        self._path = path
-        self._relpath = os.path.join(reporelpath(ctx._repo), path)
-        self._abspath = ctx._repo.wjoin(path)
-        self._subparent = ctx._repo
+        self._abspath = ctx.repo().wjoin(path)
+        self._subparent = ctx.repo()
         self._ensuregit()
 
     def _ensuregit(self):
@@ -1244,7 +1298,7 @@
         return retdata, p.returncode
 
     def _gitmissing(self):
-        return not os.path.exists(os.path.join(self._abspath, '.git'))
+        return not self.wvfs.exists('.git')
 
     def _gitstate(self):
         return self._gitcommand(['rev-parse', 'HEAD'])
@@ -1387,7 +1441,7 @@
                 self._gitcommand(['reset', 'HEAD'])
                 cmd.append('-f')
             self._gitcommand(cmd + args)
-            _sanitize(self.ui, self._abspath, '.git')
+            _sanitize(self.ui, self.wvfs, '.git')
 
         def rawcheckout():
             # no branch to checkout, check it out with no branch
@@ -1436,7 +1490,7 @@
             if tracking[remote] != self._gitcurrentbranch():
                 checkout([tracking[remote]])
             self._gitcommand(['merge', '--ff', remote])
-            _sanitize(self.ui, self._abspath, '.git')
+            _sanitize(self.ui, self.wvfs, '.git')
         else:
             # a real merge would be required, just checkout the revision
             rawcheckout()
@@ -1472,7 +1526,7 @@
                 self.get(state) # fast forward merge
             elif base != self._state[1]:
                 self._gitcommand(['merge', '--no-commit', revision])
-            _sanitize(self.ui, self._abspath, '.git')
+            _sanitize(self.ui, self.wvfs, '.git')
 
         if self.dirty():
             if self._gitstate() != revision:
@@ -1524,6 +1578,47 @@
             return False
 
     @annotatesubrepoerror
+    def add(self, ui, match, prefix, explicitonly, **opts):
+        if self._gitmissing():
+            return []
+
+        (modified, added, removed,
+         deleted, unknown, ignored, clean) = self.status(None, unknown=True,
+                                                         clean=True)
+
+        tracked = set()
+        # dirstates 'amn' warn, 'r' is added again
+        for l in (modified, added, deleted, clean):
+            tracked.update(l)
+
+        # Unknown files not of interest will be rejected by the matcher
+        files = unknown
+        files.extend(match.files())
+
+        rejected = []
+
+        files = [f for f in sorted(set(files)) if match(f)]
+        for f in files:
+            exact = match.exact(f)
+            command = ["add"]
+            if exact:
+                command.append("-f") #should be added, even if ignored
+            if ui.verbose or not exact:
+                ui.status(_('adding %s\n') % match.rel(f))
+
+            if f in tracked:  # hg prints 'adding' even if already tracked
+                if exact:
+                    rejected.append(f)
+                continue
+            if not opts.get('dry_run'):
+                self._gitcommand(command + [f])
+
+        for f in rejected:
+            ui.warn(_("%s already tracked!\n") % match.abs(f))
+
+        return rejected
+
+    @annotatesubrepoerror
     def remove(self):
         if self._gitmissing():
             return
@@ -1535,14 +1630,13 @@
         # local-only history
         self.ui.note(_('removing subrepo %s\n') % self._relpath)
         self._gitcommand(['config', 'core.bare', 'true'])
-        for f in os.listdir(self._abspath):
+        for f, kind in self.wvfs.readdir():
             if f == '.git':
                 continue
-            path = os.path.join(self._abspath, f)
-            if os.path.isdir(path) and not os.path.islink(path):
-                shutil.rmtree(path)
+            if kind == stat.S_IFDIR:
+                self.wvfs.rmtree(f)
             else:
-                os.remove(path)
+                self.wvfs.unlink(f)
 
     def archive(self, archiver, prefix, match=None):
         total = 0
@@ -1567,7 +1661,7 @@
                 data = info.linkname
             else:
                 data = tar.extractfile(info).read()
-            archiver.addfile(os.path.join(prefix, self._path, info.name),
+            archiver.addfile(self.wvfs.reljoin(prefix, self._path, info.name),
                              info.mode, info.issym(), data)
             total += 1
             self.ui.progress(_('archiving (%s)') % relpath, i + 1,
@@ -1577,11 +1671,30 @@
 
 
     @annotatesubrepoerror
+    def cat(self, match, prefix, **opts):
+        rev = self._state[1]
+        if match.anypats():
+            return 1 #No support for include/exclude yet
+
+        if not match.files():
+            return 1
+
+        for f in match.files():
+            output = self._gitcommand(["show", "%s:%s" % (rev, f)])
+            fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
+                                     self._ctx.node(),
+                                     pathname=self.wvfs.reljoin(prefix, f))
+            fp.write(output)
+            fp.close()
+        return 0
+
+
+    @annotatesubrepoerror
     def status(self, rev2, **opts):
         rev1 = self._state[1]
         if self._gitmissing() or not rev1:
             # if the repo is missing, return no results
-            return [], [], [], [], [], [], []
+            return scmutil.status([], [], [], [], [], [], [])
         modified, added, removed = [], [], []
         self._gitupdatestat()
         if rev2:
@@ -1603,13 +1716,42 @@
 
         deleted, unknown, ignored, clean = [], [], [], []
 
-        if not rev2:
-            command = ['ls-files', '--others', '--exclude-standard']
-            out = self._gitcommand(command)
-            for line in out.split('\n'):
-                if len(line) == 0:
-                    continue
-                unknown.append(line)
+        command = ['status', '--porcelain', '-z']
+        if opts.get('unknown'):
+            command += ['--untracked-files=all']
+        if opts.get('ignored'):
+            command += ['--ignored']
+        out = self._gitcommand(command)
+
+        changedfiles = set()
+        changedfiles.update(modified)
+        changedfiles.update(added)
+        changedfiles.update(removed)
+        for line in out.split('\0'):
+            if not line:
+                continue
+            st = line[0:2]
+            #moves and copies show 2 files on one line
+            if line.find('\0') >= 0:
+                filename1, filename2 = line[3:].split('\0')
+            else:
+                filename1 = line[3:]
+                filename2 = None
+
+            changedfiles.add(filename1)
+            if filename2:
+                changedfiles.add(filename2)
+
+            if st == '??':
+                unknown.append(filename1)
+            elif st == '!!':
+                ignored.append(filename1)
+
+        if opts.get('clean'):
+            out = self._gitcommand(['ls-files'])
+            for f in out.split('\n'):
+                if not f in changedfiles:
+                    clean.append(f)
 
         return scmutil.status(modified, added, removed, deleted,
                               unknown, ignored, clean)
@@ -1624,7 +1766,7 @@
             # for Git, this also implies '-p'
             cmd.append('-U%d' % diffopts.context)
 
-        gitprefix = os.path.join(prefix, self._path)
+        gitprefix = self.wvfs.reljoin(prefix, self._path)
 
         if diffopts.noprefix:
             cmd.extend(['--src-prefix=%s/' % gitprefix,
@@ -1645,17 +1787,15 @@
         if node2:
             cmd.append(node2)
 
-        if match.anypats():
-            return #No support for include/exclude yet
-
         output = ""
         if match.always():
             output += self._gitcommand(cmd) + '\n'
-        elif match.files():
-            for f in match.files():
-                output += self._gitcommand(cmd + [f]) + '\n'
-        elif match(gitprefix): #Subrepo is matched
-            output += self._gitcommand(cmd) + '\n'
+        else:
+            st = self.status(node2)[:3]
+            files = [f for sublist in st for f in sublist]
+            for f in files:
+                if match(f):
+                    output += self._gitcommand(cmd + ['--', f]) + '\n'
 
         if output.strip():
             ui.write(output)
@@ -1670,10 +1810,10 @@
                 bakname = "%s.orig" % name
                 self.ui.note(_('saving current version of %s as %s\n') %
                         (name, bakname))
-                util.rename(os.path.join(self._abspath, name),
-                            os.path.join(self._abspath, bakname))
+                self.wvfs.rename(name, bakname)
 
-        self.get(substate, overwrite=True)
+        if not opts.get('dry_run'):
+            self.get(substate, overwrite=True)
         return []
 
     def shortid(self, revid):
--- a/mercurial/tags.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/tags.py	Thu Apr 16 20:57:51 2015 -0500
@@ -15,21 +15,78 @@
 import util
 import encoding
 import error
+from array import array
 import errno
 import time
 
+# Tags computation can be expensive and caches exist to make it fast in
+# the common case.
+#
+# The "hgtagsfnodes1" cache file caches the .hgtags filenode values for
+# each revision in the repository. The file is effectively an array of
+# fixed length records. Read the docs for "hgtagsfnodescache" for technical
+# details.
+#
+# The .hgtags filenode cache grows in proportion to the length of the
+# changelog. The file is truncated when the # changelog is stripped.
+#
+# The purpose of the filenode cache is to avoid the most expensive part
+# of finding global tags, which is looking up the .hgtags filenode in the
+# manifest for each head. This can take dozens or over 100ms for
+# repositories with very large manifests. Multiplied by dozens or even
+# hundreds of heads and there is a significant performance concern.
+#
+# There also exist a separate cache file for each repository filter.
+# These "tags-*" files store information about the history of tags.
+#
+# The tags cache files consists of a cache validation line followed by
+# a history of tags.
+#
+# The cache validation line has the format:
+#
+#   <tiprev> <tipnode> [<filteredhash>]
+#
+# <tiprev> is an integer revision and <tipnode> is a 40 character hex
+# node for that changeset. These redundantly identify the repository
+# tip from the time the cache was written. In addition, <filteredhash>,
+# if present, is a 40 character hex hash of the contents of the filtered
+# revisions for this filter. If the set of filtered revs changes, the
+# hash will change and invalidate the cache.
+#
+# The history part of the tags cache consists of lines of the form:
+#
+#   <node> <tag>
+#
+# (This format is identical to that of .hgtags files.)
+#
+# <tag> is the tag name and <node> is the 40 character hex changeset
+# the tag is associated with.
+#
+# Tags are written sorted by tag name.
+#
+# Tags associated with multiple changesets have an entry for each changeset.
+# The most recent changeset (in terms of revlog ordering for the head
+# setting it) for each tag is last.
+
 def findglobaltags(ui, repo, alltags, tagtypes):
-    '''Find global tags in repo by reading .hgtags from every head that
-    has a distinct version of it, using a cache to avoid excess work.
-    Updates the dicts alltags, tagtypes in place: alltags maps tag name
-    to (node, hist) pair (see _readtags() below), and tagtypes maps tag
-    name to tag type ("global" in this case).'''
+    '''Find global tags in a repo.
+
+    "alltags" maps tag name to (node, hist) 2-tuples.
+
+    "tagtypes" maps tag name to tag type. Global tags always have the
+    "global" tag type.
+
+    The "alltags" and "tagtypes" dicts are updated in place. Empty dicts
+    should be passed in.
+
+    The tags cache is read and updated as a side-effect of calling.
+    '''
     # This is so we can be lazy and assume alltags contains only global
     # tags when we pass it to _writetagcache().
     assert len(alltags) == len(tagtypes) == 0, \
            "findglobaltags() should be called first"
 
-    (heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
+    (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
     if cachetags is not None:
         assert not shouldwrite
         # XXX is this really 100% correct?  are there oddball special
@@ -38,9 +95,9 @@
         _updatetags(cachetags, 'global', alltags, tagtypes)
         return
 
-    seen = set()                    # set of fnode
+    seen = set()  # set of fnode
     fctx = None
-    for head in reversed(heads):        # oldest to newest
+    for head in reversed(heads):  # oldest to newest
         assert head in repo.changelog.nodemap, \
                "tag cache returned bogus head %s" % short(head)
 
@@ -57,10 +114,10 @@
 
     # and update the cache (if necessary)
     if shouldwrite:
-        _writetagcache(ui, repo, heads, tagfnode, alltags)
+        _writetagcache(ui, repo, valid, alltags)
 
 def readlocaltags(ui, repo, alltags, tagtypes):
-    '''Read local tags in repo.  Update alltags and tagtypes.'''
+    '''Read local tags in repo. Update alltags and tagtypes.'''
     try:
         data = repo.vfs.read("localtags")
     except IOError, inst:
@@ -86,14 +143,18 @@
 
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
     '''Read tag definitions from a file (or any source of lines).
+
     This function returns two sortdicts with similar information:
+
     - the first dict, bintaghist, contains the tag information as expected by
       the _readtags function, i.e. a mapping from tag name to (node, hist):
         - node is the node id from the last line read for that name,
         - hist is the list of node ids previously associated with it (in file
-          order).  All node ids are binary, not hex.
+          order). All node ids are binary, not hex.
+
     - the second dict, hextaglines, is a mapping from tag name to a list of
       [hexnode, line number] pairs, ordered from the oldest to the newest node.
+
     When calcnodelines is False the hextaglines dict is not calculated (an
     empty dict is returned). This is done to improve this function's
     performance in cases where the line numbers are not needed.
@@ -139,10 +200,13 @@
 
 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
     '''Read tag definitions from a file (or any source of lines).
-    Return a mapping from tag name to (node, hist): node is the node id
-    from the last line read for that name, and hist is the list of node
-    ids previously associated with it (in file order).  All node ids are
-    binary, not hex.'''
+
+    Returns a mapping from tag name to (node, hist).
+
+    "node" is the node id from the last line read for that name. "hist"
+    is the list of node ids previously associated with it (in file order).
+    All node ids are binary, not hex.
+    '''
     filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode,
                                        calcnodelines=calcnodelines)
     for tag, taghist in filetags.items():
@@ -174,64 +238,54 @@
         ahist.extend([n for n in bhist if n not in ahist])
         alltags[name] = anode, ahist
 
-
-# The tag cache only stores info about heads, not the tag contents
-# from each head.  I.e. it doesn't try to squeeze out the maximum
-# performance, but is simpler has a better chance of actually
-# working correctly.  And this gives the biggest performance win: it
-# avoids looking up .hgtags in the manifest for every head, and it
-# can avoid calling heads() at all if there have been no changes to
-# the repo.
+def _filename(repo):
+    """name of a tagcache file for a given repo or repoview"""
+    filename = 'cache/tags2'
+    if repo.filtername:
+        filename = '%s-%s' % (filename, repo.filtername)
+    return filename
 
 def _readtagcache(ui, repo):
-    '''Read the tag cache and return a tuple (heads, fnodes, cachetags,
-    shouldwrite).  If the cache is completely up-to-date, cachetags is a
-    dict of the form returned by _readtags(); otherwise, it is None and
-    heads and fnodes are set.  In that case, heads is the list of all
-    heads currently in the repository (ordered from tip to oldest) and
-    fnodes is a mapping from head to .hgtags filenode.  If those two are
-    set, caller is responsible for reading tag info from each head.'''
+    '''Read the tag cache.
+
+    Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
+
+    If the cache is completely up-to-date, "cachetags" is a dict of the
+    form returned by _readtags() and "heads", "fnodes", and "validinfo" are
+    None and "shouldwrite" is False.
+
+    If the cache is not up to date, "cachetags" is None. "heads" is a list
+    of all heads currently in the repository, ordered from tip to oldest.
+    "validinfo" is a tuple describing cache validation info. This is used
+    when writing the tags cache. "fnodes" is a mapping from head to .hgtags
+    filenode. "shouldwrite" is True.
+
+    If the cache is not up to date, the caller is responsible for reading tag
+    info from each returned head. (See findglobaltags().)
+    '''
+    import scmutil  # avoid cycle
 
     try:
-        cachefile = repo.vfs('cache/tags', 'r')
+        cachefile = repo.vfs(_filename(repo), 'r')
         # force reading the file for static-http
         cachelines = iter(cachefile)
     except IOError:
         cachefile = None
 
-    # The cache file consists of lines like
-    #   <headrev> <headnode> [<tagnode>]
-    # where <headrev> and <headnode> redundantly identify a repository
-    # head from the time the cache was written, and <tagnode> is the
-    # filenode of .hgtags on that head.  Heads with no .hgtags file will
-    # have no <tagnode>.  The cache is ordered from tip to oldest (which
-    # is part of why <headrev> is there: a quick visual check is all
-    # that's required to ensure correct order).
-    #
-    # This information is enough to let us avoid the most expensive part
-    # of finding global tags, which is looking up <tagnode> in the
-    # manifest for each head.
-    cacherevs = []                      # list of headrev
-    cacheheads = []                     # list of headnode
-    cachefnode = {}                     # map headnode to filenode
+    cacherev = None
+    cachenode = None
+    cachehash = None
     if cachefile:
         try:
-            for line in cachelines:
-                if line == "\n":
-                    break
-                line = line.split()
-                cacherevs.append(int(line[0]))
-                headnode = bin(line[1])
-                cacheheads.append(headnode)
-                if len(line) == 3:
-                    fnode = bin(line[2])
-                    cachefnode[headnode] = fnode
+            validline = cachelines.next()
+            validline = validline.split()
+            cacherev = int(validline[0])
+            cachenode = bin(validline[1])
+            if len(validline) > 2:
+                cachehash = bin(validline[2])
         except Exception:
-            # corruption of the tags cache, just recompute it
-            ui.warn(_('.hg/cache/tags is corrupt, rebuilding it\n'))
-            cacheheads = []
-            cacherevs = []
-            cachefnode = {}
+            # corruption of the cache, just recompute it.
+            pass
 
     tipnode = repo.changelog.tip()
     tiprev = len(repo.changelog) - 1
@@ -240,18 +294,22 @@
     # (Unchanged tip trivially means no changesets have been added.
     # But, thanks to localrepository.destroyed(), it also means none
     # have been destroyed by strip or rollback.)
-    if cacheheads and cacheheads[0] == tipnode and cacherevs[0] == tiprev:
+    if (cacherev == tiprev
+            and cachenode == tipnode
+            and cachehash == scmutil.filteredhash(repo, tiprev)):
         tags = _readtags(ui, repo, cachelines, cachefile.name)
         cachefile.close()
-        return (None, None, tags, False)
+        return (None, None, None, tags, False)
     if cachefile:
         cachefile.close()               # ignore rest of file
 
+    valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev))
+
     repoheads = repo.heads()
     # Case 2 (uncommon): empty repo; get out quickly and don't bother
     # writing an empty cache.
     if repoheads == [nullid]:
-        return ([], {}, {}, False)
+        return ([], {}, valid, {}, False)
 
     # Case 3 (uncommon): cache file missing or empty.
 
@@ -269,75 +327,53 @@
     if not len(repo.file('.hgtags')):
         # No tags have ever been committed, so we can avoid a
         # potentially expensive search.
-        return (repoheads, cachefnode, None, True)
+        return ([], {}, valid, None, True)
 
     starttime = time.time()
 
-    newheads = [head
-                for head in repoheads
-                if head not in set(cacheheads)]
-
     # Now we have to lookup the .hgtags filenode for every new head.
     # This is the most expensive part of finding tags, so performance
     # depends primarily on the size of newheads.  Worst case: no cache
     # file, so newheads == repoheads.
-    for head in reversed(newheads):
-        cctx = repo[head]
-        try:
-            fnode = cctx.filenode('.hgtags')
+    fnodescache = hgtagsfnodescache(repo.unfiltered())
+    cachefnode = {}
+    for head in reversed(repoheads):
+        fnode = fnodescache.getfnode(head)
+        if fnode != nullid:
             cachefnode[head] = fnode
-        except error.LookupError:
-            # no .hgtags file on this head
-            pass
+
+    fnodescache.write()
 
     duration = time.time() - starttime
     ui.log('tagscache',
-           'resolved %d tags cache entries from %d manifests in %0.4f '
+           '%d/%d cache hits/lookups in %0.4f '
            'seconds\n',
-           len(cachefnode), len(newheads), duration)
+           fnodescache.hitcount, fnodescache.lookupcount, duration)
 
     # Caller has to iterate over all heads, but can use the filenodes in
     # cachefnode to get to each .hgtags revision quickly.
-    return (repoheads, cachefnode, None, True)
+    return (repoheads, cachefnode, valid, None, True)
 
-def _writetagcache(ui, repo, heads, tagfnode, cachetags):
-
+def _writetagcache(ui, repo, valid, cachetags):
+    filename = _filename(repo)
     try:
-        cachefile = repo.vfs('cache/tags', 'w', atomictemp=True)
+        cachefile = repo.vfs(filename, 'w', atomictemp=True)
     except (OSError, IOError):
         return
 
-    ui.log('tagscache', 'writing tags cache file with %d heads and %d tags\n',
-            len(heads), len(cachetags))
+    ui.log('tagscache', 'writing .hg/%s with %d tags\n',
+           filename, len(cachetags))
 
-    realheads = repo.heads()            # for sanity checks below
-    for head in heads:
-        # temporary sanity checks; these can probably be removed
-        # once this code has been in crew for a few weeks
-        assert head in repo.changelog.nodemap, \
-               'trying to write non-existent node %s to tag cache' % short(head)
-        assert head in realheads, \
-               'trying to write non-head %s to tag cache' % short(head)
-        assert head != nullid, \
-               'trying to write nullid to tag cache'
-
-        # This can't fail because of the first assert above.  When/if we
-        # remove that assert, we might want to catch LookupError here
-        # and downgrade it to a warning.
-        rev = repo.changelog.rev(head)
-
-        fnode = tagfnode.get(head)
-        if fnode:
-            cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
-        else:
-            cachefile.write('%d %s\n' % (rev, hex(head)))
+    if valid[2]:
+        cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
+    else:
+        cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))
 
     # Tag names in the cache are in UTF-8 -- which is the whole reason
     # we keep them in UTF-8 throughout this module.  If we converted
     # them local encoding on input, we would lose info writing them to
     # the cache.
-    cachefile.write('\n')
-    for (name, (node, hist)) in cachetags.iteritems():
+    for (name, (node, hist)) in sorted(cachetags.iteritems()):
         for n in hist:
             cachefile.write("%s %s\n" % (hex(n), name))
         cachefile.write("%s %s\n" % (hex(node), name))
@@ -346,3 +382,153 @@
         cachefile.close()
     except (OSError, IOError):
         pass
+
+_fnodescachefile = 'cache/hgtagsfnodes1'
+_fnodesrecsize = 4 + 20 # changeset fragment + filenode
+_fnodesmissingrec = '\xff' * 24
+
+class hgtagsfnodescache(object):
+    """Persistent cache mapping revisions to .hgtags filenodes.
+
+    The cache is an array of records. Each item in the array corresponds to
+    a changelog revision. Values in the array contain the first 4 bytes of
+    the node hash and the 20 bytes .hgtags filenode for that revision.
+
+    The first 4 bytes are present as a form of verification. Repository
+    stripping and rewriting may change the node at a numeric revision in the
+    changelog. The changeset fragment serves as a verifier to detect
+    rewriting. This logic is shared with the rev branch cache (see
+    branchmap.py).
+
+    The instance holds in memory the full cache content but entries are
+    only parsed on read.
+
+    Instances behave like lists. ``c[i]`` works where i is a rev or
+    changeset node. Missing indexes are populated automatically on access.
+    """
+    def __init__(self, repo):
+        assert repo.filtername is None
+
+        self._repo = repo
+
+        # Only for reporting purposes.
+        self.lookupcount = 0
+        self.hitcount = 0
+
+        self._raw = array('c')
+
+        data = repo.vfs.tryread(_fnodescachefile)
+        self._raw.fromstring(data)
+
+        # The end state of self._raw is an array that is of the exact length
+        # required to hold a record for every revision in the repository.
+        # We truncate or extend the array as necessary. self._dirtyoffset is
+        # defined to be the start offset at which we need to write the output
+        # file. This offset is also adjusted when new entries are calculated
+        # for array members.
+        cllen = len(repo.changelog)
+        wantedlen = cllen * _fnodesrecsize
+        rawlen = len(self._raw)
+
+        self._dirtyoffset = None
+
+        if rawlen < wantedlen:
+            self._dirtyoffset = rawlen
+            self._raw.extend('\xff' * (wantedlen - rawlen))
+        elif rawlen > wantedlen:
+            # There's no easy way to truncate array instances. This seems
+            # slightly less evil than copying a potentially large array slice.
+            for i in range(rawlen - wantedlen):
+                self._raw.pop()
+            self._dirtyoffset = len(self._raw)
+
+    def getfnode(self, node):
+        """Obtain the filenode of the .hgtags file at a specified revision.
+
+        If the value is in the cache, the entry will be validated and returned.
+        Otherwise, the filenode will be computed and returned.
+
+        If an .hgtags does not exist at the specified revision, nullid is
+        returned.
+        """
+        ctx = self._repo[node]
+        rev = ctx.rev()
+
+        self.lookupcount += 1
+
+        offset = rev * _fnodesrecsize
+        record = self._raw[offset:offset + _fnodesrecsize].tostring()
+        properprefix = node[0:4]
+
+        # Validate and return existing entry.
+        if record != _fnodesmissingrec:
+            fileprefix = record[0:4]
+
+            if fileprefix == properprefix:
+                self.hitcount += 1
+                return record[4:]
+
+            # Fall through.
+
+        # If we get here, the entry is either missing or invalid. Populate it.
+        try:
+            fnode = ctx.filenode('.hgtags')
+        except error.LookupError:
+            # No .hgtags file on this revision.
+            fnode = nullid
+
+        # Slices on array instances only accept other array.
+        entry = array('c', properprefix + fnode)
+        self._raw[offset:offset + _fnodesrecsize] = entry
+        # self._dirtyoffset could be None.
+        self._dirtyoffset = min(self._dirtyoffset, offset) or 0
+
+        return fnode
+
+    def write(self):
+        """Perform all necessary writes to cache file.
+
+        This may no-op if no writes are needed or if a write lock could
+        not be obtained.
+        """
+        if self._dirtyoffset is None:
+            return
+
+        data = self._raw[self._dirtyoffset:]
+        if not data:
+            return
+
+        repo = self._repo
+
+        try:
+            lock = repo.wlock(wait=False)
+        except error.LockHeld:
+            repo.ui.log('tagscache',
+                        'not writing .hg/%s because lock held\n' %
+                        (_fnodescachefile))
+            return
+
+        try:
+            try:
+                f = repo.vfs.open(_fnodescachefile, 'ab')
+                try:
+                    # if the file has been truncated
+                    actualoffset = f.tell()
+                    if actualoffset < self._dirtyoffset:
+                        self._dirtyoffset = actualoffset
+                        data = self._raw[self._dirtyoffset:]
+                    f.seek(self._dirtyoffset)
+                    f.truncate()
+                    repo.ui.log('tagscache',
+                                'writing %d bytes to %s\n' % (
+                                len(data), _fnodescachefile))
+                    f.write(data)
+                    self._dirtyoffset = None
+                finally:
+                    f.close()
+            except (IOError, OSError), inst:
+                repo.ui.log('tagscache',
+                            "couldn't write %s: %s\n" % (
+                            _fnodescachefile, inst))
+        finally:
+            lock.release()
--- a/mercurial/templatefilters.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templatefilters.py	Thu Apr 16 20:57:51 2015 -0500
@@ -234,6 +234,10 @@
     """:localdate: Date. Converts a date to local date."""
     return (util.parsedate(text)[0], util.makedate()[1])
 
+def lower(text):
+    """:lower: Any text. Converts the text to lowercase."""
+    return encoding.lower(text)
+
 def nonempty(str):
     """:nonempty: Any text. Returns '(none)' if the string is empty."""
     return str or "(none)"
@@ -344,6 +348,10 @@
     """
     return indent(text, '\t')
 
+def upper(text):
+    """:upper: Any text. Converts the text to uppercase."""
+    return encoding.upper(text)
+
 def urlescape(text):
     """:urlescape: Any text. Escapes all "special" characters. For example,
     "foo bar" becomes "foo%20bar".
@@ -387,6 +395,7 @@
     "json": json,
     "jsonescape": jsonescape,
     "localdate": localdate,
+    "lower": lower,
     "nonempty": nonempty,
     "obfuscate": obfuscate,
     "permissions": permissions,
@@ -402,6 +411,7 @@
     "strip": strip,
     "stripdir": stripdir,
     "tabindent": tabindent,
+    "upper": upper,
     "urlescape": urlescape,
     "user": userfilter,
     "emailuser": emailuser,
--- a/mercurial/templatekw.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templatekw.py	Thu Apr 16 20:57:51 2015 -0500
@@ -12,11 +12,15 @@
 # This helper class allows us to handle both:
 #  "{files}" (legacy command-line-specific list hack) and
 #  "{files % '{file}\n'}" (hgweb-style with inlining and function support)
+# and to access raw values:
+#  "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
+#  "{get(extras, key)}"
 
 class _hybrid(object):
-    def __init__(self, gen, values, joinfmt=None):
+    def __init__(self, gen, values, makemap, joinfmt=None):
         self.gen = gen
         self.values = values
+        self._makemap = makemap
         if joinfmt:
             self.joinfmt = joinfmt
         else:
@@ -24,16 +28,23 @@
     def __iter__(self):
         return self.gen
     def __call__(self):
+        makemap = self._makemap
         for x in self.values:
-            yield x
+            yield makemap(x)
+    def __contains__(self, x):
+        return x in self.values
     def __len__(self):
         return len(self.values)
+    def __getattr__(self, name):
+        if name != 'get':
+            raise AttributeError(name)
+        return getattr(self.values, name)
 
 def showlist(name, values, plural=None, element=None, **args):
     if not element:
         element = name
     f = _showlist(name, values, plural, **args)
-    return _hybrid(f, [{element: x} for x in values])
+    return _hybrid(f, values, lambda x: {element: x})
 
 def _showlist(name, values, plural=None, **args):
     '''expand set of values.
@@ -200,9 +211,9 @@
     repo = args['ctx']._repo
     bookmarks = args['ctx'].bookmarks()
     current = repo._bookmarkcurrent
-    c = [{'bookmark': x, 'current': current} for x in bookmarks]
+    makemap = lambda v: {'bookmark': v, 'current': current}
     f = _showlist('bookmark', bookmarks, **args)
-    return _hybrid(f, c, lambda x: x['bookmark'])
+    return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
 
 def showchildren(**args):
     """:children: List of strings. The children of the changeset."""
@@ -241,9 +252,12 @@
     """:extras: List of dicts with key, value entries of the 'extras'
     field of this changeset."""
     extras = args['ctx'].extra()
-    c = [{'key': x[0], 'value': x[1]} for x in sorted(extras.items())]
+    extras = util.sortdict((k, extras[k]) for k in sorted(extras))
+    makemap = lambda k: {'key': k, 'value': extras[k]}
+    c = [makemap(k) for k in extras]
     f = _showlist('extra', c, plural='extras', **args)
-    return _hybrid(f, c, lambda x: '%s=%s' % (x['key'], x['value']))
+    return _hybrid(f, extras, makemap,
+                   lambda x: '%s=%s' % (x['key'], x['value']))
 
 def showfileadds(**args):
     """:file_adds: List of strings. Files added by this changeset."""
@@ -267,9 +281,12 @@
             if rename:
                 copies.append((fn, rename[0]))
 
-    c = [{'name': x[0], 'source': x[1]} for x in copies]
+    copies = util.sortdict(copies)
+    makemap = lambda k: {'name': k, 'source': copies[k]}
+    c = [makemap(k) for k in copies]
     f = _showlist('file_copy', c, plural='file_copies', **args)
-    return _hybrid(f, c, lambda x: '%s (%s)' % (x['name'], x['source']))
+    return _hybrid(f, copies, makemap,
+                   lambda x: '%s (%s)' % (x['name'], x['source']))
 
 # showfilecopiesswitch() displays file copies only if copy records are
 # provided before calling the templater, usually with a --copies
@@ -279,9 +296,12 @@
     only if the --copied switch is set.
     """
     copies = args['revcache'].get('copies') or []
-    c = [{'name': x[0], 'source': x[1]} for x in copies]
+    copies = util.sortdict(copies)
+    makemap = lambda k: {'name': k, 'source': copies[k]}
+    c = [makemap(k) for k in copies]
     f = _showlist('file_copy', c, plural='file_copies', **args)
-    return _hybrid(f, c, lambda x: '%s (%s)' % (x['name'], x['source']))
+    return _hybrid(f, copies, makemap,
+                   lambda x: '%s (%s)' % (x['name'], x['source']))
 
 def showfiledels(**args):
     """:file_dels: List of strings. Files removed by this changeset."""
@@ -313,9 +333,9 @@
 
 def showmanifest(**args):
     repo, ctx, templ = args['repo'], args['ctx'], args['templ']
+    mnode = ctx.manifestnode()
     args = args.copy()
-    args.update({'rev': repo.manifest.rev(ctx.changeset()[0]),
-                 'node': hex(ctx.changeset()[0])})
+    args.update({'rev': repo.manifest.rev(mnode), 'node': hex(mnode)})
     return templ('manifest', **args)
 
 def shownode(repo, ctx, templ, **args):
@@ -377,7 +397,7 @@
 def shownames(namespace, **args):
     """helper method to generate a template keyword for a namespace"""
     ctx = args['ctx']
-    repo = ctx._repo
+    repo = ctx.repo()
     ns = repo.names[namespace]
     names = ns.names(repo, ctx.node())
     return showlist(ns.templatename, names, plural=namespace, **args)
--- a/mercurial/templater.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templater.py	Thu Apr 16 20:57:51 2015 -0500
@@ -162,8 +162,13 @@
 
 def runfilter(context, mapping, data):
     func, data, filt = data
+    # func() may return string, generator of strings or arbitrary object such
+    # as date tuple, but filter does not want generator.
+    thing = func(context, mapping, data)
+    if isinstance(thing, types.GeneratorType):
+        thing = stringify(thing)
     try:
-        return filt(func(context, mapping, data))
+        return filt(thing)
     except (ValueError, AttributeError, TypeError):
         if isinstance(data, tuple):
             dt = data[1]
@@ -214,6 +219,8 @@
     raise error.ParseError(_("unknown function '%s'") % n)
 
 def date(context, mapping, args):
+    """:date(date[, fmt]): Format a date. See :hg:`help dates` for formatting
+    strings."""
     if not (1 <= len(args) <= 2):
         # i18n: "date" is a keyword
         raise error.ParseError(_("date expects one or two arguments"))
@@ -225,6 +232,8 @@
     return util.datestr(date)
 
 def diff(context, mapping, args):
+    """:diff([includepattern [, excludepattern]]): Show a diff, optionally
+    specifying files to include or exclude."""
     if len(args) > 2:
         # i18n: "diff" is a keyword
         raise error.ParseError(_("diff expects one, two or no arguments"))
@@ -242,6 +251,8 @@
     return ''.join(chunks)
 
 def fill(context, mapping, args):
+    """:fill(text[, width[, initialident[, hangindent]]]): Fill many
+    paragraphs with optional indentation. See the "fill" filter."""
     if not (1 <= len(args) <= 4):
         # i18n: "fill" is a keyword
         raise error.ParseError(_("fill expects one to four arguments"))
@@ -265,8 +276,8 @@
     return templatefilters.fill(text, width, initindent, hangindent)
 
 def pad(context, mapping, args):
-    """usage: pad(text, width, fillchar=' ', right=False)
-    """
+    """:pad(text, width[, fillchar=' '[, right=False]]): Pad text with a
+    fill character."""
     if not (2 <= len(args) <= 4):
         # i18n: "pad" is a keyword
         raise error.ParseError(_("pad() expects two to four arguments"))
@@ -291,6 +302,9 @@
         return text.ljust(width, fillchar)
 
 def get(context, mapping, args):
+    """:get(dict, key): Get an attribute/key from an object. Some keywords
+    are complex types. This function allows you to obtain the value of an
+    attribute on these type."""
     if len(args) != 2:
         # i18n: "get" is a keyword
         raise error.ParseError(_("get() expects two arguments"))
@@ -312,6 +326,8 @@
         yield t
 
 def if_(context, mapping, args):
+    """:if(expr, then[, else]): Conditionally execute based on the result of
+    an expression."""
     if not (2 <= len(args) <= 3):
         # i18n: "if" is a keyword
         raise error.ParseError(_("if expects two or three arguments"))
@@ -323,6 +339,8 @@
         yield _evalifliteral(args[2], context, mapping)
 
 def ifcontains(context, mapping, args):
+    """:ifcontains(search, thing, then[, else]): Conditionally execute based
+    on whether the item "search" is in "thing"."""
     if not (3 <= len(args) <= 4):
         # i18n: "ifcontains" is a keyword
         raise error.ParseError(_("ifcontains expects three or four arguments"))
@@ -330,15 +348,14 @@
     item = stringify(args[0][0](context, mapping, args[0][1]))
     items = args[1][0](context, mapping, args[1][1])
 
-    # Iterating over items gives a formatted string, so we iterate
-    # directly over the raw values.
-    if ((callable(items) and item in [i.values()[0] for i in items()]) or
-        (isinstance(items, str) and item in items)):
+    if item in items:
         yield _evalifliteral(args[2], context, mapping)
     elif len(args) == 4:
         yield _evalifliteral(args[3], context, mapping)
 
 def ifeq(context, mapping, args):
+    """:ifeq(expr1, expr2, then[, else]): Conditionally execute based on
+    whether 2 items are equivalent."""
     if not (3 <= len(args) <= 4):
         # i18n: "ifeq" is a keyword
         raise error.ParseError(_("ifeq expects three or four arguments"))
@@ -351,6 +368,7 @@
         yield _evalifliteral(args[3], context, mapping)
 
 def join(context, mapping, args):
+    """:join(list, sep): Join items in a list with a delimiter."""
     if not (1 <= len(args) <= 2):
         # i18n: "join" is a keyword
         raise error.ParseError(_("join expects one or two arguments"))
@@ -373,6 +391,9 @@
         yield x
 
 def label(context, mapping, args):
+    """:label(label, expr): Apply a label to generated content. Content with
+    a label applied can result in additional post-processing, such as
+    automatic colorization."""
     if len(args) != 2:
         # i18n: "label" is a keyword
         raise error.ParseError(_("label expects two arguments"))
@@ -381,19 +402,19 @@
     yield _evalifliteral(args[1], context, mapping)
 
 def revset(context, mapping, args):
-    """usage: revset(query[, formatargs...])
-    """
+    """:revset(query[, formatargs...]): Execute a revision set query. See
+    :hg:`help revset`."""
     if not len(args) > 0:
         # i18n: "revset" is a keyword
         raise error.ParseError(_("revset expects one or more arguments"))
 
     raw = args[0][1]
     ctx = mapping['ctx']
-    repo = ctx._repo
+    repo = ctx.repo()
 
     def query(expr):
         m = revsetmod.match(repo.ui, expr)
-        return m(repo, revsetmod.spanset(repo))
+        return m(repo)
 
     if len(args) > 1:
         formatargs = list([a[0](context, mapping, a[1]) for a in args[1:]])
@@ -411,6 +432,7 @@
     return templatekw.showlist("revision", revs, **mapping)
 
 def rstdoc(context, mapping, args):
+    """:rstdoc(text, style): Format ReStructuredText."""
     if len(args) != 2:
         # i18n: "rstdoc" is a keyword
         raise error.ParseError(_("rstdoc expects two arguments"))
@@ -421,8 +443,8 @@
     return minirst.format(text, style=style, keep=['verbose'])
 
 def shortest(context, mapping, args):
-    """usage: shortest(node, minlength=4)
-    """
+    """:shortest(node, minlength=4): Obtain the shortest representation of
+    a node."""
     if not (1 <= len(args) <= 2):
         # i18n: "shortest" is a keyword
         raise error.ParseError(_("shortest() expects one or two arguments"))
@@ -473,6 +495,7 @@
                 return shortest
 
 def strip(context, mapping, args):
+    """:strip(text[, chars]): Strip characters from a string."""
     if not (1 <= len(args) <= 2):
         # i18n: "strip" is a keyword
         raise error.ParseError(_("strip expects one or two arguments"))
@@ -484,6 +507,8 @@
     return text.strip()
 
 def sub(context, mapping, args):
+    """:sub(pattern, replacement, expression): Perform text substitution
+    using regular expressions."""
     if len(args) != 3:
         # i18n: "sub" is a keyword
         raise error.ParseError(_("sub expects three arguments"))
@@ -494,6 +519,8 @@
     yield re.sub(pat, rpl, src)
 
 def startswith(context, mapping, args):
+    """:startswith(pattern, text): Returns the value from the "text" argument
+    if it begins with the content from the "pattern" argument."""
     if len(args) != 2:
         # i18n: "startswith" is a keyword
         raise error.ParseError(_("startswith expects two arguments"))
@@ -506,7 +533,7 @@
 
 
 def word(context, mapping, args):
-    """return nth word from a string"""
+    """:word(number, text[, separator]): Return the nth word from a string."""
     if not (2 <= len(args) <= 3):
         # i18n: "word" is a keyword
         raise error.ParseError(_("word expects two or three arguments, got %d")
@@ -654,7 +681,10 @@
         self.mapfile = mapfile or 'template'
         self.cache = cache.copy()
         self.map = {}
-        self.base = (mapfile and os.path.dirname(mapfile)) or ''
+        if mapfile:
+            self.base = os.path.dirname(mapfile)
+        else:
+            self.base = ''
         self.filters = templatefilters.filters.copy()
         self.filters.update(filters)
         self.defaults = defaults
@@ -763,3 +793,6 @@
                     return style, mapfile
 
     raise RuntimeError("No hgweb templates found in %r" % paths)
+
+# tell hggettext to extract docstrings from these functions:
+i18nfunctions = funcs.values()
--- a/mercurial/templates/gitweb/map	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/gitweb/map	Thu Apr 16 20:57:51 2015 -0500
@@ -140,7 +140,7 @@
   <tr>
     <td>parent {rev}</td>
     <td style="font-family:monospace">
-      {changesetlink} {ifeq(node, basenode, '(current diff)', \'({difffrom})\')}
+      {changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')}
     </td>
   </tr>'
 difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/json/changelist.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,5 @@
+\{
+  "node": {node|json},
+  "changeset_count": {changesets|json},
+  "changesets": [{join(entries%changelistentry, ", ")}]
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/json/map	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,174 @@
+mimetype = 'application/json'
+filerevision = '"not yet implemented"'
+search = '"not yet implemented"'
+# changelog and shortlog are the same web API but with different
+# number of entries.
+changelog = changelist.tmpl
+shortlog = changelist.tmpl
+changelistentry = '\{
+  "node": {node|json},
+  "date": {date|json},
+  "desc": {desc|json},
+  "bookmarks": [{join(bookmarks%changelistentryname, ", ")}],
+  "tags": [{join(tags%changelistentryname, ", ")}],
+  "user": {author|json}
+  }'
+changelistentryname = '{name|json}'
+changeset = '\{
+  "node": {node|json},
+  "date": {date|json},
+  "desc": {desc|json},
+  "branch": {if(branch, branch%changesetbranch, "default"|json)},
+  "bookmarks": [{join(changesetbookmark, ", ")}],
+  "tags": [{join(changesettag, ", ")}],
+  "user": {author|json},
+  "parents": [{join(parent%changesetparent, ", ")}],
+  "phase": {phase|json}
+  }'
+changesetbranch = '{name|json}'
+changesetbookmark = '{bookmark|json}'
+changesettag = '{tag|json}'
+changesetparent = '{node|json}'
+manifest = '\{
+  "node": {node|json},
+  "abspath": {path|json},
+  "directories": [{join(dentries%direntry, ", ")}],
+  "files": [{join(fentries%fileentry, ", ")}],
+  "bookmarks": [{join(bookmarks%name, ", ")}],
+  "tags": [{join(tags%name, ", ")}]
+  }'
+name = '{name|json}'
+direntry = '\{
+  "abspath": {path|json},
+  "basename": {basename|json},
+  "emptydirs": {emptydirs|json}
+  }'
+fileentry = '\{
+  "abspath": {file|json},
+  "basename": {basename|json},
+  "date": {date|json},
+  "size": {size|json},
+  "flags": {permissions|json}
+  }'
+tags = '\{
+  "node": {node|json},
+  "tags": [{join(entriesnotip%tagentry, ", ")}]
+  }'
+tagentry = '\{
+  "tag": {tag|json},
+  "node": {node|json},
+  "date": {date|json}
+  }'
+bookmarks = '\{
+  "node": {node|json},
+  "bookmarks": [{join(entries%bookmarkentry, ", ")}]
+  }'
+bookmarkentry = '\{
+  "bookmark": {bookmark|json},
+  "node": {node|json},
+  "date": {date|json}
+  }'
+branches = '\{
+  "branches": [{join(entries%branchentry, ", ")}]
+  }'
+branchentry = '\{
+  "branch": {branch|json},
+  "node": {node|json},
+  "date": {date|json},
+  "status": {status|json}
+  }'
+summary = '"not yet implemented"'
+filediff = '\{
+  "path": {file|json},
+  "node": {node|json},
+  "date": {date|json},
+  "desc": {desc|json},
+  "author": {author|json},
+  "parents": [{join(parent%changesetparent, ", ")}],
+  "children": [{join(child%changesetparent, ", ")}],
+  "diff": [{join(diff%diffblock, ", ")}]
+  }'
+diffblock = '\{
+  "blockno": {blockno|json},
+  "lines": [{join(lines, ", ")}]
+  }'
+difflineplus = '\{
+  "t": "+",
+  "n": {lineno|json},
+  "l": {line|json}
+  }'
+difflineminus = '\{
+  "t": "-",
+  "n": {lineno|json},
+  "l": {line|json}
+  }'
+difflineat = '\{
+  "t": "@",
+  "n": {lineno|json},
+  "l": {line|json}
+  }'
+diffline = '\{
+  "t": "",
+  "n": {lineno|json},
+  "l": {line|json}
+  }'
+filecomparison = '\{
+  "path": {file|json},
+  "node": {node|json},
+  "date": {date|json},
+  "desc": {desc|json},
+  "author": {author|json},
+  "parents": [{join(parent%changesetparent, ", ")}],
+  "children": [{join(child%changesetparent, ", ")}],
+  "leftnode": {leftnode|json},
+  "rightnode": {rightnode|json},
+  "comparison": [{join(comparison, ", ")}]
+  }'
+comparisonblock = '\{
+  "lines": [{join(lines, ", ")}]
+  }'
+comparisonline = '\{
+  "t": {type|json},
+  "ln": {leftlineno|json},
+  "ll": {leftline|json},
+  "rn": {rightlineno|json},
+  "rl": {rightline|json}
+  }'
+fileannotate = '\{
+  "abspath": {file|json},
+  "node": {node|json},
+  "author": {author|json},
+  "date": {date|json},
+  "desc": {desc|json},
+  "parents": [{join(parent%changesetparent, ", ")}],
+  "children": [{join(child%changesetparent, ", ")}],
+  "permissions": {permissions|json},
+  "annotate": [{join(annotate%fileannotation, ", ")}]
+  }'
+fileannotation = '\{
+  "node": {node|json},
+  "author": {author|json},
+  "desc": {desc|json},
+  "abspath": {file|json},
+  "targetline": {targetline|json},
+  "line": {line|json},
+  "lineno": {lineno|json},
+  "revdate": {revdate|json}
+  }'
+filelog = '"not yet implemented"'
+graph = '"not yet implemented"'
+helptopics = '\{
+  "topics": [{join(topics%helptopicentry, ", ")}],
+  "earlycommands": [{join(earlycommands%helptopicentry, ", ")}],
+  "othercommands": [{join(othercommands%helptopicentry, ", ")}]
+  }'
+helptopicentry = '\{
+  "topic": {topic|json},
+  "summary": {summary|json}
+  }'
+help = '\{
+  "topic": {topic|json},
+  "rawdoc": {doc|json}
+  }'
+filenodelink = ''
+filenolink = ''
--- a/mercurial/templates/map-cmdline.default	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/map-cmdline.default	Thu Apr 16 20:57:51 2015 -0500
@@ -58,8 +58,8 @@
 user = '{label("log.user",
                "user:        {author}")}\n'
 
-summary = '{label("log.summary",
-                  "summary:     {desc|firstline}")}\n'
+summary = '{if(desc|strip, "{label('log.summary',
+                                   'summary:     {desc|firstline}')}\n")}'
 
 ldate = '{label("log.date",
                 "date:        {date|date}")}\n'
@@ -67,7 +67,7 @@
 extra = '{label("ui.debug log.extra",
                 "extra:       {key}={value|stringescape}")}\n'
 
-description = '{label("ui.note log.description",
-                       "description:")}
-               {label("ui.note log.description",
-                       "{desc|strip}")}\n\n'
+description = '{if(desc|strip, "{label('ui.note log.description',
+                                       'description:')}
+                                {label('ui.note log.description',
+                                       '{desc|strip}')}\n\n")}'
--- a/mercurial/templates/monoblue/bookmarks.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/bookmarks.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li class="current">bookmarks</li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/branches.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/branches.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li class="current">branches</li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/changelog.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/changelog.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -27,7 +27,7 @@
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
             {archives%archiveentry}
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/graph.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/graph.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -27,7 +27,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/help.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/help.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li class="current">help</li>
+            <li class="current">help</li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/helptopics.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/helptopics.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}help{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li class="current">help</li>
+            <li class="current">help</li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/manifest.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/manifest.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li class="current">files</li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/map	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/map	Thu Apr 16 20:57:51 2015 -0500
@@ -93,7 +93,7 @@
   <tr class="parity{parity}">
     <td class="linenr">
       <a href="{url|urlescape}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
-	 title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
+         title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
     </td>
     <td class="lineno">
       <a href="#{lineid}" id="{lineid}">{linenumber}</a>
@@ -129,7 +129,7 @@
   <dd>{changesetlink}</dd>'
 changesetparentdiff = '
   <dt>parent {rev}</dt>
-  <dd>{changesetlink} {ifeq(node, basenode, '(current diff)', \'({difffrom})\')}</dd>'
+  <dd>{changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')}</dd>'
 difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
 filerevbranch = '<dt>branch</dt><dd>{name|escape}</dd>'
 filerevparent = '
--- a/mercurial/templates/monoblue/shortlog.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/shortlog.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,8 +26,8 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    {archives%archiveentry}
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            {archives%archiveentry}
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/summary.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/summary.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/monoblue/tags.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/monoblue/tags.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -26,7 +26,7 @@
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
             <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
-	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
+            <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
 
--- a/mercurial/templates/paper/bookmarks.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/bookmarks.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -23,7 +23,6 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-bookmarks" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
@@ -42,10 +41,12 @@
 </form>
 
 <table class="bigtable">
+<thead>
 <tr>
  <th>bookmark</th>
  <th>node</th>
 </tr>
+</thead>
 <tbody class="stripes2">
 {entries%bookmarkentry}
 </tbody>
--- a/mercurial/templates/paper/branches.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/branches.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -23,7 +23,6 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-branches" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
@@ -42,10 +41,12 @@
 </form>
 
 <table class="bigtable">
+<thead>
 <tr>
  <th>branch</th>
  <th>node</th>
 </tr>
+</thead>
 <tbody class="stripes2">
 {entries % branchentry}
 </tbody>
--- a/mercurial/templates/paper/changeset.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/changeset.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -48,7 +48,8 @@
 </tr>
 <tr>
  <th class="date">date</th>
- <td class="date age">{date|rfc822date}</td></tr>
+ <td class="date age">{date|rfc822date}</td>
+</tr>
 <tr>
  <th class="author">parents</th>
  <td class="author">{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}</td>
@@ -68,8 +69,7 @@
     <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
     <div id="diffstatdetails" style="display:none;">
       <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
-      <p></p>
-      <table class="stripes2">{diffstat}</table>
+      <table class="diffstat-table stripes2">{diffstat}</table>
     </div>
   </td>
 </tr>
--- a/mercurial/templates/paper/fileannotate.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/fileannotate.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -68,10 +68,12 @@
 
 <div class="overflow">
 <table class="bigtable">
+<thead>
 <tr>
  <th class="annotate">rev</th>
  <th class="line">&nbsp;&nbsp;line source</th>
 </tr>
+</thead>
 <tbody class="stripes2">
   {annotate%annotateline}
 </tbody>
--- a/mercurial/templates/paper/filelog.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/filelog.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -35,7 +35,6 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log/{node|short}/{file|urlescape}" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
@@ -59,11 +58,13 @@
 | {nav%filenav}</div>
 
 <table class="bigtable">
+<thead>
  <tr>
   <th class="age">age</th>
   <th class="author">author</th>
   <th class="description">description</th>
  </tr>
+</thead>
 <tbody class="stripes2">
 {entries%filelogentry}
 </tbody>
--- a/mercurial/templates/paper/graph.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/graph.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -28,7 +28,6 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
--- a/mercurial/templates/paper/index.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/index.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -12,6 +12,7 @@
 <h2 class="breadcrumb"><a href="/">Mercurial</a> {pathdef%breadcrumb}</h2>
 
 <table class="bigtable">
+    <thead>
     <tr>
         <th><a href="?sort={sort_name}">Name</a></th>
         <th><a href="?sort={sort_description}">Description</a></th>
@@ -20,6 +21,7 @@
         <th>&nbsp;</th>
         <th>&nbsp;</th>
     </tr>
+    </thead>
     <tbody class="stripes2">
     {entries%indexentry}
     </tbody>
--- a/mercurial/templates/paper/manifest.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/manifest.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -39,11 +39,13 @@
 </form>
 
 <table class="bigtable">
+<thead>
 <tr>
   <th class="name">name</th>
   <th class="size">size</th>
   <th class="permissions">permissions</th>
 </tr>
+</thead>
 <tbody class="stripes2">
 <tr class="fileline">
   <td class="name"><a href="{url|urlescape}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td>
--- a/mercurial/templates/paper/search.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/search.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -43,11 +43,13 @@
 </div>
 
 <table class="bigtable">
+<thead>
  <tr>
   <th class="age">age</th>
   <th class="author">author</th>
   <th class="description">description</th>
  </tr>
+</thead>
 <tbody class="stripes2">
 {entries}
 </tbody>
--- a/mercurial/templates/paper/shortlog.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/shortlog.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -30,7 +30,6 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
@@ -55,11 +54,13 @@
 </div>
 
 <table class="bigtable">
+<thead>
  <tr>
   <th class="age">age</th>
   <th class="author">author</th>
   <th class="description">description</th>
  </tr>
+</thead>
 <tbody class="stripes2">
 {entries%shortlogentry}
 </tbody>
--- a/mercurial/templates/paper/tags.tmpl	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/paper/tags.tmpl	Thu Apr 16 20:57:51 2015 -0500
@@ -23,7 +23,6 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-tags" title="subscribe to atom feed">
 <img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
@@ -42,10 +41,12 @@
 </form>
 
 <table class="bigtable">
+<thead>
 <tr>
  <th>tag</th>
  <th>node</th>
 </tr>
+</thead>
 <tbody class="stripes2">
 {entries%tagentry}
 </tbody>
--- a/mercurial/templates/static/style-paper.css	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/templates/static/style-paper.css	Thu Apr 16 20:57:51 2015 -0500
@@ -60,6 +60,10 @@
   border: 0;
 }
 
+div.atom-logo {
+  margin-top: 10px;
+}
+
 .atom-logo img{
   width: 14px;
   height: 14px;
@@ -104,6 +108,9 @@
 .minusline { color: #dc143c; } /* crimson */
 .atline { color: purple; }
 
+.diffstat-table {
+  margin-top: 1em;
+}
 .diffstat-file {
   white-space: nowrap;
   font-size: 90%;
@@ -232,8 +239,9 @@
 
 .sourcelines > span {
   display: inline-block;
+  box-sizing: border-box;
   width: 100%;
-  padding: 1px 0px;
+  padding: 1px 0px 1px 5em;
   counter-increment: lineno;
 }
 
@@ -244,8 +252,8 @@
   -ms-user-select: none;
   user-select: none;
   display: inline-block;
+  margin-left: -5em;
   width: 4em;
-  margin-right: 1em;
   font-size: smaller;
   color: #999;
   text-align: right;
--- a/mercurial/transaction.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/transaction.py	Thu Apr 16 20:57:51 2015 -0500
@@ -83,7 +83,7 @@
 
 class transaction(object):
     def __init__(self, report, opener, vfsmap, journalname, undoname=None,
-                 after=None, createmode=None):
+                 after=None, createmode=None, validator=None):
         """Begin a new transaction
 
         Begins a new transaction that allows rolling back writes in the event of
@@ -107,6 +107,12 @@
         self.journal = journalname
         self.undoname = undoname
         self._queue = []
+        # A callback to validate transaction content before closing it.
+        # should raise exception is anything is wrong.
+        # target user is repository hooks.
+        if validator is None:
+            validator = lambda tr: None
+        self.validator = validator
         # a dict of arguments to be passed to hooks
         self.hookargs = {}
         self.file = opener.open(self.journal, "w")
@@ -378,6 +384,7 @@
     def close(self):
         '''commit the transaction'''
         if self.count == 1:
+            self.validator(self)  # will raise exception if needed
             self._generatefiles()
             categories = sorted(self._finalizecallback)
             for cat in categories:
@@ -535,6 +542,6 @@
                         backupentries.append((l, f, b, bool(c)))
             else:
                 report(_("journal was created by a different version of "
-                         "Mercurial"))
+                         "Mercurial\n"))
 
     _playback(file, report, opener, vfsmap, entries, backupentries)
--- a/mercurial/ui.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/ui.py	Thu Apr 16 20:57:51 2015 -0500
@@ -158,7 +158,7 @@
 
         if self.plain():
             for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
-                      'logtemplate', 'style',
+                      'logtemplate', 'statuscopies', 'style',
                       'traceback', 'verbose'):
                 if k in cfg['ui']:
                     del cfg['ui'][k]
@@ -531,10 +531,14 @@
         if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
             return loc
 
-        path = self.config('paths', loc)
-        if not path and default is not None:
-            path = self.config('paths', default)
-        return path or loc
+        p = self.paths.getpath(loc, default=default)
+        if p:
+            return p.loc
+        return loc
+
+    @util.propertycache
+    def paths(self):
+        return paths(self)
 
     def pushbuffer(self, error=False):
         """install a buffer to capture standard output of the ui object
@@ -805,7 +809,7 @@
             environ = {'HGUSER': user}
             if 'transplant_source' in extra:
                 environ.update({'HGREVISION': hex(extra['transplant_source'])})
-            for label in ('source', 'rebase_source'):
+            for label in ('intermediate-source', 'source', 'rebase_source'):
                 if label in extra:
                     environ.update({'HGREVISION': extra[label]})
                     break
@@ -923,3 +927,48 @@
         ui.write(ui.label(s, 'label')).
         '''
         return msg
+
+class paths(dict):
+    """Represents a collection of paths and their configs.
+
+    Data is initially derived from ui instances and the config files they have
+    loaded.
+    """
+    def __init__(self, ui):
+        dict.__init__(self)
+
+        for name, loc in ui.configitems('paths'):
+            # No location is the same as not existing.
+            if not loc:
+                continue
+            self[name] = path(name, rawloc=loc)
+
+    def getpath(self, name, default=None):
+        """Return a ``path`` for the specified name, falling back to a default.
+
+        Returns the first of ``name`` or ``default`` that is present, or None
+        if neither is present.
+        """
+        try:
+            return self[name]
+        except KeyError:
+            if default is not None:
+                try:
+                    return self[default]
+                except KeyError:
+                    pass
+
+        return None
+
+class path(object):
+    """Represents an individual path and its configuration."""
+
+    def __init__(self, name, rawloc=None):
+        """Construct a path from its config options.
+
+        ``name`` is the symbolic name of the path.
+        ``rawloc`` is the raw location, as defined in the config.
+        """
+        self.name = name
+        # We'll do more intelligent things with rawloc in the future.
+        self.loc = rawloc
--- a/mercurial/unionrepo.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/unionrepo.py	Thu Apr 16 20:57:51 2015 -0500
@@ -160,8 +160,11 @@
     def baserevdiff(self, rev1, rev2):
         return filelog.filelog.revdiff(self, rev1, rev2)
 
-    def _file(self, f):
-        self._repo.file(f)
+    def iscensored(self, rev):
+        """Check if a revision is censored."""
+        if rev <= self.repotiprev:
+            return filelog.filelog.iscensored(self, rev)
+        return self.revlog2.iscensored(rev)
 
 class unionpeer(localrepo.localpeer):
     def canpush(self):
--- a/mercurial/util.h	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/util.h	Thu Apr 16 20:57:51 2015 -0500
@@ -172,6 +172,22 @@
 		(d[3]));
 }
 
+static inline int16_t getbeint16(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 8) |
+		(d[1]));
+}
+
+static inline uint16_t getbeuint16(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 8) |
+		(d[1]));
+}
+
 static inline void putbe32(uint32_t x, char *c)
 {
 	c[0] = (x >> 24) & 0xff;
@@ -180,4 +196,34 @@
 	c[3] = (x) & 0xff;
 }
 
+static inline double getbefloat64(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+	double ret;
+	int i;
+	uint64_t t = 0;
+	for (i = 0; i < 8; i++) {
+		t = (t<<8) + d[i];
+	}
+	memcpy(&ret, &t, sizeof(t));
+	return ret;
+}
+
+/* This should be kept in sync with normcasespecs in encoding.py. */
+enum normcase_spec {
+	NORMCASE_LOWER = -1,
+	NORMCASE_UPPER = 1,
+	NORMCASE_OTHER = 0
+};
+
+#define MIN(a, b) (((a)<(b))?(a):(b))
+/* VC9 doesn't include bool and lacks stdbool.h based on my searching */
+#ifdef _MSC_VER
+#define true 1
+#define false 0
+typedef unsigned char bool;
+#else
+#include <stdbool.h>
+#endif
+
 #endif /* _HG_UTIL_H_ */
--- a/mercurial/util.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/util.py	Thu Apr 16 20:57:51 2015 -0500
@@ -15,7 +15,7 @@
 
 import i18n
 _ = i18n._
-import error, osutil, encoding
+import error, osutil, encoding, parsers
 import errno, shutil, sys, tempfile, traceback
 import re as remod
 import os, time, datetime, calendar, textwrap, signal, collections
@@ -48,6 +48,8 @@
 nlinks = platform.nlinks
 normpath = platform.normpath
 normcase = platform.normcase
+normcasespec = platform.normcasespec
+normcasefallback = platform.normcasefallback
 openhardlinks = platform.openhardlinks
 oslink = platform.oslink
 parsepatchoutput = platform.parsepatchoutput
@@ -57,6 +59,7 @@
 quotecommand = platform.quotecommand
 readpipe = platform.readpipe
 rename = platform.rename
+removedirs = platform.removedirs
 samedevice = platform.samedevice
 samefile = platform.samefile
 samestat = platform.samestat
@@ -359,8 +362,10 @@
     def __iter__(self):
         return self._list.__iter__()
     def update(self, src):
-        for k in src:
-            self[k] = src[k]
+        if isinstance(src, dict):
+            src = src.iteritems()
+        for k, v in src:
+            self[k] = v
     def clear(self):
         dict.clear(self)
         self._list = []
@@ -737,20 +742,27 @@
         except shutil.Error, inst:
             raise Abort(str(inst))
 
-def copyfiles(src, dst, hardlink=None):
-    """Copy a directory tree using hardlinks if possible"""
+def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
+    """Copy a directory tree using hardlinks if possible."""
+    num = 0
 
     if hardlink is None:
         hardlink = (os.stat(src).st_dev ==
                     os.stat(os.path.dirname(dst)).st_dev)
+    if hardlink:
+        topic = _('linking')
+    else:
+        topic = _('copying')
 
-    num = 0
     if os.path.isdir(src):
         os.mkdir(dst)
         for name, kind in osutil.listdir(src):
             srcname = os.path.join(src, name)
             dstname = os.path.join(dst, name)
-            hardlink, n = copyfiles(srcname, dstname, hardlink)
+            def nprog(t, pos):
+                if pos is not None:
+                    return progress(t, pos + num)
+            hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
             num += n
     else:
         if hardlink:
@@ -762,6 +774,8 @@
         else:
             shutil.copy(src, dst)
         num += 1
+        progress(topic, num)
+    progress(topic, None)
 
     return hardlink, num
 
@@ -1352,11 +1366,11 @@
         formats = defaultdateformats
     date = date.strip()
 
-    if date == _('now'):
+    if date == 'now' or date == _('now'):
         return makedate()
-    if date == _('today'):
+    if date == 'today' or date == _('today'):
         date = datetime.date.today().strftime('%b %d')
-    elif date == _('yesterday'):
+    elif date == 'yesterday' or date == _('yesterday'):
         date = (datetime.date.today() -
                 datetime.timedelta(days=1)).strftime('%b %d')
 
@@ -2227,5 +2241,50 @@
             f.write(' %-*s in %s\n' % (fnmax, fnln, func))
     f.flush()
 
+class dirs(object):
+    '''a multiset of directory names from a dirstate or manifest'''
+
+    def __init__(self, map, skip=None):
+        self._dirs = {}
+        addpath = self.addpath
+        if safehasattr(map, 'iteritems') and skip is not None:
+            for f, s in map.iteritems():
+                if s[0] != skip:
+                    addpath(f)
+        else:
+            for f in map:
+                addpath(f)
+
+    def addpath(self, path):
+        dirs = self._dirs
+        for base in finddirs(path):
+            if base in dirs:
+                dirs[base] += 1
+                return
+            dirs[base] = 1
+
+    def delpath(self, path):
+        dirs = self._dirs
+        for base in finddirs(path):
+            if dirs[base] > 1:
+                dirs[base] -= 1
+                return
+            del dirs[base]
+
+    def __iter__(self):
+        return self._dirs.iterkeys()
+
+    def __contains__(self, d):
+        return d in self._dirs
+
+if safehasattr(parsers, 'dirs'):
+    dirs = parsers.dirs
+
+def finddirs(path):
+    pos = path.rfind('/')
+    while pos != -1:
+        yield path[:pos]
+        pos = path.rfind('/', 0, pos)
+
 # convenient shortcut
 dst = debugstacktrace
--- a/mercurial/verify.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/verify.py	Thu Apr 16 20:57:51 2015 -0500
@@ -169,7 +169,7 @@
             for f, fn in mf.readdelta(n).iteritems():
                 if not f:
                     err(lr, _("file without name in manifest"))
-                elif f != "/dev/null":
+                elif f != "/dev/null": # ignore this in very old repos
                     filenodes.setdefault(_normpath(f), {}).setdefault(fn, lr)
         except Exception, inst:
             exc(lr, _("reading manifest delta %s") % short(n), inst)
--- a/mercurial/win32.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/win32.py	Thu Apr 16 20:57:51 2015 -0500
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import ctypes, errno, os, subprocess, random
+import ctypes, errno, msvcrt, os, subprocess, random
 
 _kernel32 = ctypes.windll.kernel32
 _advapi32 = ctypes.windll.advapi32
@@ -26,6 +26,7 @@
 _ERROR_SUCCESS = 0
 _ERROR_NO_MORE_FILES = 18
 _ERROR_INVALID_PARAMETER = 87
+_ERROR_BROKEN_PIPE = 109
 _ERROR_INSUFFICIENT_BUFFER = 122
 
 # WPARAM is defined as UINT_PTR (unsigned type)
@@ -211,6 +212,10 @@
 _kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD]
 _kernel32.CreateToolhelp32Snapshot.restype = _BOOL
 
+_kernel32.PeekNamedPipe.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD,
+    ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+_kernel32.PeekNamedPipe.restype = _BOOL
+
 _kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p]
 _kernel32.Process32First.restype = _BOOL
 
@@ -260,6 +265,19 @@
     res2 = _getfileinfo(path2)
     return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
 
+def peekpipe(pipe):
+    handle = msvcrt.get_osfhandle(pipe.fileno())
+    avail = _DWORD()
+
+    if not _kernel32.PeekNamedPipe(handle, None, 0, None, ctypes.byref(avail),
+                                   None):
+        err = _kernel32.GetLastError()
+        if err == _ERROR_BROKEN_PIPE:
+            return 0
+        raise ctypes.WinError(err)
+
+    return avail.value
+
 def testpid(pid):
     '''return True if pid is still running or unable to
     determine, False otherwise'''
@@ -279,7 +297,7 @@
     buf = ctypes.create_string_buffer(size + 1)
     len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size)
     if len == 0:
-        raise ctypes.WinError()
+        raise ctypes.WinError() # Note: WinError is a function
     elif len == size:
         raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
     return buf.value
--- a/mercurial/windows.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/windows.py	Thu Apr 16 20:57:51 2015 -0500
@@ -26,14 +26,22 @@
 unlink = win32.unlink
 
 umask = 0022
+_SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
 
-# wrap osutil.posixfile to provide friendlier exceptions
 def posixfile(name, mode='r', buffering=-1):
+    '''Open a file with even more POSIX-like semantics'''
     try:
-        return osutil.posixfile(name, mode, buffering)
+        fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
+
+        # The position when opening in append mode is implementation defined, so
+        # make it consistent with other platforms, which position at EOF.
+        if 'a' in mode:
+            fp.seek(0, _SEEK_END)
+
+        return fp
     except WindowsError, err:
+        # convert to a friendlier exception
         raise IOError(err.errno, '%s: %s' % (name, err.strerror))
-posixfile.__doc__ = osutil.posixfile.__doc__
 
 class winstdout(object):
     '''stdout on windows misbehaves if sent through a pipe'''
@@ -133,6 +141,10 @@
 def normcase(path):
     return encoding.upper(path)
 
+# see posix.py for definitions
+normcasespec = encoding.normcasespecs.upper
+normcasefallback = encoding.upperfallback
+
 def samestat(s1, s2):
     return False
 
@@ -258,7 +270,7 @@
     If gid is None, return the name of the current group."""
     return None
 
-def _removedirs(name):
+def removedirs(name):
     """special version of os.removedirs that does not remove symlinked
     directories or junction points if they actually contain files"""
     if osutil.listdir(name):
@@ -285,7 +297,7 @@
             raise
     # try removing directories that might now be empty
     try:
-        _removedirs(os.path.dirname(f))
+        removedirs(os.path.dirname(f))
     except OSError:
         pass
 
@@ -351,7 +363,7 @@
     """Read all available data from a pipe."""
     chunks = []
     while True:
-        size = os.fstat(pipe.fileno()).st_size
+        size = win32.peekpipe(pipe)
         if not size:
             break
 
--- a/mercurial/wireproto.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/mercurial/wireproto.py	Thu Apr 16 20:57:51 2015 -0500
@@ -363,8 +363,10 @@
             opts[key] = value
         f = self._callcompressable("getbundle", **opts)
         bundlecaps = kwargs.get('bundlecaps')
-        if bundlecaps is not None and 'HG2Y' in bundlecaps:
-            return bundle2.unbundle20(self.ui, f)
+        if bundlecaps is None:
+            bundlecaps = () # kwargs could have it to None
+        if util.any((cap.startswith('HG2') for cap in bundlecaps)):
+            return bundle2.getunbundler(self.ui, f)
         else:
             return changegroupmod.cg1unpacker(f, 'UN')
 
@@ -401,7 +403,7 @@
         else:
             # bundle2 push. Send a stream, fetch a stream.
             stream = self._calltwowaystream('unbundle', cg, heads=heads)
-            ret = bundle2.unbundle20(self.ui, stream)
+            ret = bundle2.getunbundler(self.ui, stream)
         return ret
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
@@ -613,9 +615,9 @@
         # otherwise, add 'streamreqs' detailing our local revlog format
         else:
             caps.append('streamreqs=%s' % ','.join(requiredformats))
-    if repo.ui.configbool('experimental', 'bundle2-exp', False):
+    if repo.ui.configbool('experimental', 'bundle2-advertise', True):
         capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
-        caps.append('bundle2-exp=' + urllib.quote(capsblob))
+        caps.append('bundle2=' + urllib.quote(capsblob))
     caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
     caps.append('httpheader=1024')
     return caps
@@ -839,35 +841,40 @@
         finally:
             fp.close()
             os.unlink(tempname)
-    except error.BundleValueError, exc:
-            bundler = bundle2.bundle20(repo.ui)
-            errpart = bundler.newpart('b2x:error:unsupportedcontent')
+
+    except (error.BundleValueError, util.Abort, error.PushRaced), exc:
+        # handle non-bundle2 case first
+        if not getattr(exc, 'duringunbundle2', False):
+            try:
+                raise
+            except util.Abort:
+                # The old code we moved used sys.stderr directly.
+                # We did not change it to minimise code change.
+                # This need to be moved to something proper.
+                # Feel free to do it.
+                sys.stderr.write("abort: %s\n" % exc)
+                return pushres(0)
+            except error.PushRaced:
+                return pusherr(str(exc))
+
+        bundler = bundle2.bundle20(repo.ui)
+        for out in getattr(exc, '_bundle2salvagedoutput', ()):
+            bundler.addpart(out)
+        try:
+            raise
+        except error.BundleValueError, exc:
+            errpart = bundler.newpart('error:unsupportedcontent')
             if exc.parttype is not None:
                 errpart.addparam('parttype', exc.parttype)
             if exc.params:
                 errpart.addparam('params', '\0'.join(exc.params))
-            return streamres(bundler.getchunks())
-    except util.Abort, inst:
-        # The old code we moved used sys.stderr directly.
-        # We did not change it to minimise code change.
-        # This need to be moved to something proper.
-        # Feel free to do it.
-        if getattr(inst, 'duringunbundle2', False):
-            bundler = bundle2.bundle20(repo.ui)
-            manargs = [('message', str(inst))]
+        except util.Abort, exc:
+            manargs = [('message', str(exc))]
             advargs = []
-            if inst.hint is not None:
-                advargs.append(('hint', inst.hint))
-            bundler.addpart(bundle2.bundlepart('b2x:error:abort',
+            if exc.hint is not None:
+                advargs.append(('hint', exc.hint))
+            bundler.addpart(bundle2.bundlepart('error:abort',
                                                manargs, advargs))
-            return streamres(bundler.getchunks())
-        else:
-            sys.stderr.write("abort: %s\n" % inst)
-            return pushres(0)
-    except error.PushRaced, exc:
-        if getattr(exc, 'duringunbundle2', False):
-            bundler = bundle2.bundle20(repo.ui)
-            bundler.newpart('b2x:error:pushraced', [('message', str(exc))])
-            return streamres(bundler.getchunks())
-        else:
-            return pusherr(str(exc))
+        except error.PushRaced, exc:
+            bundler.newpart('error:pushraced', [('message', str(exc))])
+        return streamres(bundler.getchunks())
--- a/setup.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/setup.py	Thu Apr 16 20:57:51 2015 -0500
@@ -63,6 +63,8 @@
         raise SystemExit(
             "Couldn't import standard bz2 (incomplete Python install).")
 
+ispypy = "PyPy" in sys.version
+
 import os, stat, subprocess, time
 import re
 import shutil
@@ -276,7 +278,7 @@
 
 
 class hgdist(Distribution):
-    pure = 0
+    pure = ispypy
 
     global_options = Distribution.global_options + \
                      [('pure', None, "use pure (slow) Python "
@@ -491,6 +493,7 @@
     Extension('mercurial.mpatch', ['mercurial/mpatch.c'],
               depends=common_depends),
     Extension('mercurial.parsers', ['mercurial/dirs.c',
+                                    'mercurial/manifest.c',
                                     'mercurial/parsers.c',
                                     'mercurial/pathencode.c'],
               depends=common_depends),
@@ -555,7 +558,7 @@
 if py2exeloaded:
     extra['console'] = [
         {'script':'hg',
-         'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
+         'copyright':'Copyright (C) 2005-2015 Matt Mackall and others',
          'product_version':version}]
     # sub command of 'build' because 'py2exe' does not handle sub_commands
     build.sub_commands.insert(0, ('build_hgextindex', None))
--- a/tests/get-with-headers.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/get-with-headers.py	Thu Apr 16 20:57:51 2015 -0500
@@ -6,6 +6,14 @@
 import httplib, sys
 
 try:
+    import json
+except ImportError:
+    try:
+        import simplejson as json
+    except ImportError:
+        json = None
+
+try:
     import msvcrt, os
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
@@ -20,6 +28,10 @@
 if '--headeronly' in sys.argv:
     sys.argv.remove('--headeronly')
     headeronly = True
+formatjson = False
+if '--json' in sys.argv:
+    sys.argv.remove('--json')
+    formatjson = True
 
 reasons = {'Not modified': 'Not Modified'} # python 2.4
 
@@ -44,7 +56,23 @@
     if not headeronly:
         print
         data = response.read()
-        sys.stdout.write(data)
+
+        # Pretty print JSON. This also has the beneficial side-effect
+        # of verifying emitted JSON is well-formed.
+        if formatjson:
+            if not json:
+                print 'no json module not available'
+                print 'did you forget a #require json?'
+                sys.exit(1)
+
+            # json.dumps() will print trailing newlines. Eliminate them
+            # to make tests easier to write.
+            data = json.loads(data)
+            lines = json.dumps(data, sort_keys=True, indent=2).splitlines()
+            for line in lines:
+                print line.rstrip()
+        else:
+            sys.stdout.write(data)
 
         if twice and response.getheader('ETag', None):
             tag = response.getheader('ETag')
--- a/tests/hghave.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/hghave.py	Thu Apr 16 20:57:51 2015 -0500
@@ -320,6 +320,11 @@
     except ImportError:
         return False
 
+@check("defaultcacerts", "can verify SSL certs by system's CA certs store")
+def has_defaultcacerts():
+    from mercurial import sslutil
+    return sslutil._defaultcacerts() != '!'
+
 @check("windows", "Windows")
 def has_windows():
     return os.name == 'nt'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/mockblackbox.py	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,11 @@
+from mercurial import util
+
+def makedate():
+    return 0, 0
+def getuser():
+    return 'bob'
+
+# mock the date and user apis so the output is always the same
+def uisetup(ui):
+    util.makedate = makedate
+    util.getuser = getuser
--- a/tests/run-tests.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/run-tests.py	Thu Apr 16 20:57:51 2015 -0500
@@ -76,6 +76,8 @@
 if sys.version_info < (2, 5):
     subprocess._cleanup = lambda: None
 
+wifexited = getattr(os, "WIFEXITED", lambda x: False)
+
 closefds = os.name == 'posix'
 def Popen4(cmd, wd, timeout, env=None):
     processlock.acquire()
@@ -170,6 +172,8 @@
         help="shortcut for --with-hg=<testdir>/../hg")
     parser.add_option("--loop", action="store_true",
         help="loop tests repeatedly")
+    parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
+        help="run each test N times (default=1)", default=1)
     parser.add_option("-n", "--nodiff", action="store_true",
         help="skip showing test changes")
     parser.add_option("-p", "--port", type="int",
@@ -258,6 +262,10 @@
         parser.error("sorry, coverage options do not work when --local "
                      "is specified")
 
+    if options.anycoverage and options.with_hg:
+        parser.error("sorry, coverage options do not work when --with-hg "
+                     "is specified")
+
     global verbose
     if options.verbose:
         verbose = ''
@@ -459,7 +467,14 @@
 
         # Remove any previous output files.
         if os.path.exists(self.errpath):
-            os.remove(self.errpath)
+            try:
+                os.remove(self.errpath)
+            except OSError, e:
+                # We might have raced another test to clean up a .err
+                # file, so ignore ENOENT when removing a previous .err
+                # file.
+                if e.errno != errno.ENOENT:
+                    raise
 
     def run(self, result):
         """Run this test and report results against a TestResult instance."""
@@ -528,14 +543,13 @@
 
         This will return a tuple describing the result of the test.
         """
-        replacements = self._getreplacements()
         env = self._getenv()
         self._daemonpids.append(env['DAEMON_PIDS'])
         self._createhgrc(env['HGRCPATH'])
 
         vlog('# Test', self.name)
 
-        ret, out = self._run(replacements, env)
+        ret, out = self._run(env)
         self._finished = True
         self._ret = ret
         self._out = out
@@ -608,7 +622,7 @@
 
         vlog("# Ret was:", self._ret)
 
-    def _run(self, replacements, env):
+    def _run(self, env):
         # This should be implemented in child classes to run tests.
         raise SkipTest('unknown test type')
 
@@ -691,6 +705,8 @@
         hgrc.write('commit = -d "0 0"\n')
         hgrc.write('shelve = --date "0 0"\n')
         hgrc.write('tag = -d "0 0"\n')
+        hgrc.write('[devel]\n')
+        hgrc.write('all = true\n')
         hgrc.write('[largefiles]\n')
         hgrc.write('usercache = %s\n' %
                    (os.path.join(self._testtmp, '.cache/largefiles')))
@@ -707,6 +723,55 @@
         # Failed is denoted by AssertionError (by default at least).
         raise AssertionError(msg)
 
+    def _runcommand(self, cmd, env, normalizenewlines=False):
+        """Run command in a sub-process, capturing the output (stdout and
+        stderr).
+
+        Return a tuple (exitcode, output). output is None in debug mode.
+        """
+        if self._debug:
+            proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
+                                    env=env)
+            ret = proc.wait()
+            return (ret, None)
+
+        proc = Popen4(cmd, self._testtmp, self._timeout, env)
+        def cleanup():
+            terminate(proc)
+            ret = proc.wait()
+            if ret == 0:
+                ret = signal.SIGTERM << 8
+            killdaemons(env['DAEMON_PIDS'])
+            return ret
+
+        output = ''
+        proc.tochild.close()
+
+        try:
+            output = proc.fromchild.read()
+        except KeyboardInterrupt:
+            vlog('# Handling keyboard interrupt')
+            cleanup()
+            raise
+
+        ret = proc.wait()
+        if wifexited(ret):
+            ret = os.WEXITSTATUS(ret)
+
+        if proc.timeout:
+            ret = 'timeout'
+
+        if ret:
+            killdaemons(env['DAEMON_PIDS'])
+
+        for s, r in self._getreplacements():
+            output = re.sub(s, r, output)
+
+        if normalizenewlines:
+            output = output.replace('\r\n', '\n')
+
+        return ret, output.splitlines(True)
+
 class PythonTest(Test):
     """A Python-based test."""
 
@@ -714,14 +779,13 @@
     def refpath(self):
         return os.path.join(self._testdir, '%s.out' % self.name)
 
-    def _run(self, replacements, env):
+    def _run(self, env):
         py3kswitch = self._py3kwarnings and ' -3' or ''
         cmd = '%s%s "%s"' % (PYTHON, py3kswitch, self.path)
         vlog("# Running", cmd)
-        if os.name == 'nt':
-            replacements.append((r'\r\n', '\n'))
-        result = run(cmd, self._testtmp, replacements, env,
-                   debug=self._debug, timeout=self._timeout)
+        normalizenewlines = os.name == 'nt'
+        result = self._runcommand(cmd, env,
+                                  normalizenewlines=normalizenewlines)
         if self._aborted:
             raise KeyboardInterrupt()
 
@@ -751,7 +815,7 @@
     def refpath(self):
         return os.path.join(self._testdir, self.name)
 
-    def _run(self, replacements, env):
+    def _run(self, env):
         f = open(self.path, 'rb')
         lines = f.readlines()
         f.close()
@@ -768,8 +832,7 @@
         cmd = '%s "%s"' % (self._shell, fname)
         vlog("# Running", cmd)
 
-        exitcode, output = run(cmd, self._testtmp, replacements, env,
-                               debug=self._debug, timeout=self._timeout)
+        exitcode, output = self._runcommand(cmd, env)
 
         if self._aborted:
             raise KeyboardInterrupt()
@@ -1062,49 +1125,6 @@
     def _stringescape(s):
         return TTest.ESCAPESUB(TTest._escapef, s)
 
-
-wifexited = getattr(os, "WIFEXITED", lambda x: False)
-def run(cmd, wd, replacements, env, debug=False, timeout=None):
-    """Run command in a sub-process, capturing the output (stdout and stderr).
-    Return a tuple (exitcode, output).  output is None in debug mode."""
-    if debug:
-        proc = subprocess.Popen(cmd, shell=True, cwd=wd, env=env)
-        ret = proc.wait()
-        return (ret, None)
-
-    proc = Popen4(cmd, wd, timeout, env)
-    def cleanup():
-        terminate(proc)
-        ret = proc.wait()
-        if ret == 0:
-            ret = signal.SIGTERM << 8
-        killdaemons(env['DAEMON_PIDS'])
-        return ret
-
-    output = ''
-    proc.tochild.close()
-
-    try:
-        output = proc.fromchild.read()
-    except KeyboardInterrupt:
-        vlog('# Handling keyboard interrupt')
-        cleanup()
-        raise
-
-    ret = proc.wait()
-    if wifexited(ret):
-        ret = os.WEXITSTATUS(ret)
-
-    if proc.timeout:
-        ret = 'timeout'
-
-    if ret:
-        killdaemons(env['DAEMON_PIDS'])
-
-    for s, r in replacements:
-        output = re.sub(s, r, output)
-    return ret, output.splitlines(True)
-
 iolock = threading.RLock()
 
 class SkipTest(Exception):
@@ -1140,8 +1160,6 @@
         self.warned = []
 
         self.times = []
-        self._started = {}
-        self._stopped = {}
         # Data stored for the benefit of generating xunit reports.
         self.successes = []
         self.faildata = {}
@@ -1263,21 +1281,18 @@
         # child's processes along with real elapsed time taken by a process.
         # This module has one limitation. It can only work for Linux user
         # and not for Windows.
-        self._started[test.name] = os.times()
+        test.started = os.times()
 
     def stopTest(self, test, interrupted=False):
         super(TestResult, self).stopTest(test)
 
-        self._stopped[test.name] = os.times()
+        test.stopped = os.times()
 
-        starttime = self._started[test.name]
-        endtime = self._stopped[test.name]
+        starttime = test.started
+        endtime = test.stopped
         self.times.append((test.name, endtime[2] - starttime[2],
                     endtime[3] - starttime[3], endtime[4] - starttime[4]))
 
-        del self._started[test.name]
-        del self._stopped[test.name]
-
         if interrupted:
             iolock.acquire()
             self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
@@ -1288,7 +1303,8 @@
     """Custom unittest TestSuite that knows how to execute Mercurial tests."""
 
     def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
-                 retest=False, keywords=None, loop=False,
+                 retest=False, keywords=None, loop=False, runs_per_test=1,
+                 loadtest=None,
                  *args, **kwargs):
         """Create a new instance that can run tests with a configuration.
 
@@ -1323,13 +1339,21 @@
         self._retest = retest
         self._keywords = keywords
         self._loop = loop
+        self._runs_per_test = runs_per_test
+        self._loadtest = loadtest
 
     def run(self, result):
         # We have a number of filters that need to be applied. We do this
         # here instead of inside Test because it makes the running logic for
         # Test simpler.
         tests = []
+        num_tests = [0]
         for test in self._tests:
+            def get():
+                num_tests[0] += 1
+                if getattr(test, 'should_reload', False):
+                    return self._loadtest(test.name, num_tests[0])
+                return test
             if not os.path.exists(test.path):
                 result.addSkip(test, "Doesn't exist")
                 continue
@@ -1356,8 +1380,8 @@
 
                     if ignored:
                         continue
-
-            tests.append(test)
+            for _ in xrange(self._runs_per_test):
+                tests.append(get())
 
         runtests = list(tests)
         done = queue.Queue()
@@ -1373,24 +1397,44 @@
                 done.put(('!', test, 'run-test raised an error, see traceback'))
                 raise
 
+        stoppedearly = False
+
         try:
             while tests or running:
                 if not done.empty() or running == self._jobs or not tests:
                     try:
                         done.get(True, 1)
+                        running -= 1
                         if result and result.shouldStop:
+                            stoppedearly = True
                             break
                     except queue.Empty:
                         continue
-                    running -= 1
                 if tests and not running == self._jobs:
                     test = tests.pop(0)
                     if self._loop:
-                        tests.append(test)
+                        if getattr(test, 'should_reload', False):
+                            num_tests[0] += 1
+                            tests.append(
+                                self._loadtest(test.name, num_tests[0]))
+                        else:
+                            tests.append(test)
                     t = threading.Thread(target=job, name=test.name,
                                          args=(test, result))
                     t.start()
                     running += 1
+
+            # If we stop early we still need to wait on started tests to
+            # finish. Otherwise, there is a race between the test completing
+            # and the test's cleanup code running. This could result in the
+            # test reporting incorrect.
+            if stoppedearly:
+                while running:
+                    try:
+                        done.get(True, 1)
+                        running -= 1
+                    except queue.Empty:
+                        continue
         except KeyboardInterrupt:
             for test in runtests:
                 test.abort()
@@ -1451,7 +1495,11 @@
                     t = doc.createElement('testcase')
                     t.setAttribute('name', tc)
                     t.setAttribute('time', '%.3f' % timesd[tc])
-                    cd = doc.createCDATASection(cdatasafe(err))
+                    # createCDATASection expects a unicode or it will convert
+                    # using default conversion rules, which will fail if
+                    # string isn't ASCII.
+                    err = cdatasafe(err).decode('utf-8', 'replace')
+                    cd = doc.createCDATASection(err)
                     t.appendChild(cd)
                     s.appendChild(t)
                 xuf.write(doc.toprettyxml(indent='  ', encoding='utf-8'))
@@ -1545,6 +1593,7 @@
 
     def __init__(self):
         self.options = None
+        self._hgroot = None
         self._testdir = None
         self._hgtmp = None
         self._installdir = None
@@ -1646,6 +1695,11 @@
 
         runtestdir = os.path.abspath(os.path.dirname(__file__))
         path = [self._bindir, runtestdir] + os.environ["PATH"].split(os.pathsep)
+        if os.path.islink(__file__):
+            # test helper will likely be at the end of the symlink
+            realfile = os.path.realpath(__file__)
+            realdir = os.path.abspath(os.path.dirname(realfile))
+            path.insert(2, realdir)
         if self._tmpbindir != self._bindir:
             path = [self._tmpbindir] + path
         os.environ["PATH"] = os.pathsep.join(path)
@@ -1729,7 +1783,8 @@
                               retest=self.options.retest,
                               keywords=self.options.keywords,
                               loop=self.options.loop,
-                              tests=tests)
+                              runs_per_test=self.options.runs_per_test,
+                              tests=tests, loadtest=self._gettest)
             verbosity = 1
             if self.options.verbose:
                 verbosity = 2
@@ -1769,14 +1824,16 @@
         refpath = os.path.join(self._testdir, test)
         tmpdir = os.path.join(self._hgtmp, 'child%d' % count)
 
-        return testcls(refpath, tmpdir,
-                       keeptmpdir=self.options.keep_tmpdir,
-                       debug=self.options.debug,
-                       timeout=self.options.timeout,
-                       startport=self.options.port + count * 3,
-                       extraconfigopts=self.options.extra_config_opt,
-                       py3kwarnings=self.options.py3k_warnings,
-                       shell=self.options.shell)
+        t = testcls(refpath, tmpdir,
+                    keeptmpdir=self.options.keep_tmpdir,
+                    debug=self.options.debug,
+                    timeout=self.options.timeout,
+                    startport=self.options.port + count * 3,
+                    extraconfigopts=self.options.extra_config_opt,
+                    py3kwarnings=self.options.py3k_warnings,
+                    shell=self.options.shell)
+        t.should_reload = True
+        return t
 
     def _cleanup(self):
         """Clean up state from this test invocation."""
@@ -1836,7 +1893,10 @@
         compiler = ''
         if self.options.compiler:
             compiler = '--compiler ' + self.options.compiler
-        pure = self.options.pure and "--pure" or ""
+        if self.options.pure:
+            pure = "--pure"
+        else:
+            pure = ""
         py3 = ''
         if sys.version_info[0] == 3:
             py3 = '--c2to3'
@@ -1844,6 +1904,7 @@
         # Run installer in hg root
         script = os.path.realpath(sys.argv[0])
         hgroot = os.path.dirname(os.path.dirname(script))
+        self._hgroot = hgroot
         os.chdir(hgroot)
         nohome = '--home=""'
         if os.name == 'nt':
@@ -1863,6 +1924,17 @@
                   'prefix': self._installdir, 'libdir': self._pythondir,
                   'bindir': self._bindir,
                   'nohome': nohome, 'logfile': installerrs})
+
+        # setuptools requires install directories to exist.
+        def makedirs(p):
+            try:
+                os.makedirs(p)
+            except OSError, e:
+                if e.errno != errno.EEXIST:
+                    raise
+        makedirs(self._pythondir)
+        makedirs(self._bindir)
+
         vlog("# Running", cmd)
         if os.system(cmd) == 0:
             if not self.options.verbose:
@@ -1870,7 +1942,7 @@
         else:
             f = open(installerrs, 'rb')
             for line in f:
-                print line
+                sys.stdout.write(line)
             f.close()
             sys.exit(1)
         os.chdir(self._testdir)
@@ -1912,8 +1984,14 @@
             rc = os.path.join(self._testdir, '.coveragerc')
             vlog('# Installing coverage rc to %s' % rc)
             os.environ['COVERAGE_PROCESS_START'] = rc
-            fn = os.path.join(self._installdir, '..', '.coverage')
-            os.environ['COVERAGE_FILE'] = fn
+            covdir = os.path.join(self._installdir, '..', 'coverage')
+            try:
+                os.mkdir(covdir)
+            except OSError, e:
+                if e.errno != errno.EEXIST:
+                    raise
+
+            os.environ['COVERAGE_DIR'] = covdir
 
     def _checkhglib(self, verb):
         """Ensure that the 'mercurial' package imported by python is
@@ -1946,27 +2024,31 @@
 
     def _outputcoverage(self):
         """Produce code coverage output."""
+        from coverage import coverage
+
         vlog('# Producing coverage report')
-        os.chdir(self._pythondir)
+        # chdir is the easiest way to get short, relative paths in the
+        # output.
+        os.chdir(self._hgroot)
+        covdir = os.path.join(self._installdir, '..', 'coverage')
+        cov = coverage(data_file=os.path.join(covdir, 'cov'))
 
-        def covrun(*args):
-            cmd = 'coverage %s' % ' '.join(args)
-            vlog('# Running: %s' % cmd)
-            os.system(cmd)
+        # Map install directory paths back to source directory.
+        cov.config.paths['srcdir'] = ['.', self._pythondir]
 
-        covrun('-c')
-        omit = ','.join(os.path.join(x, '*') for x in
-                        [self._bindir, self._testdir])
-        covrun('-i', '-r', '"--omit=%s"' % omit) # report
+        cov.combine()
+
+        omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
+        cov.report(ignore_errors=True, omit=omit)
+
         if self.options.htmlcov:
             htmldir = os.path.join(self._testdir, 'htmlcov')
-            covrun('-i', '-b', '"--directory=%s"' % htmldir,
-                   '"--omit=%s"' % omit)
+            cov.html_report(directory=htmldir, omit=omit)
         if self.options.annotate:
             adir = os.path.join(self._testdir, 'annotated')
             if not os.path.isdir(adir):
                 os.mkdir(adir)
-            covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
+            cov.annotate(directory=adir, omit=omit)
 
     def _findprogram(self, program):
         """Search PATH for a executable program"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/seq.py	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+#
+# A portable replacement for 'seq'
+#
+# Usage:
+#   seq STOP              [1, STOP] stepping by 1
+#   seq START STOP        [START, STOP] stepping by 1
+#   seq START STEP STOP   [START, STOP] stepping by STEP
+
+import sys
+
+start = 1
+if len(sys.argv) > 2:
+    start = int(sys.argv[1])
+
+step = 1
+if len(sys.argv) > 3:
+    step = int(sys.argv[2])
+
+stop = int(sys.argv[-1]) + 1
+
+for i in xrange(start, stop, step):
+    print i
--- a/tests/sitecustomize.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/sitecustomize.py	Thu Apr 16 20:57:51 2015 -0500
@@ -1,5 +1,16 @@
-try:
-    import coverage
-    getattr(coverage, 'process_startup', lambda: None)()
-except ImportError:
-    pass
+import os
+
+if os.environ.get('COVERAGE_PROCESS_START'):
+    try:
+        import coverage
+        import random
+
+        # uuid is better, but not available in Python 2.4.
+        covpath = os.path.join(os.environ['COVERAGE_DIR'],
+                               'cov.%s' % random.randrange(0, 1000000000000))
+        cov = coverage.coverage(data_file=covpath, auto_data=True)
+        cov._warn_no_data = False
+        cov._warn_unimported_source = False
+        cov.start()
+    except ImportError:
+        pass
--- a/tests/test-add.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-add.t	Thu Apr 16 20:57:51 2015 -0500
@@ -176,12 +176,48 @@
   $ mkdir CapsDir1/CapsDir/SubDir
   $ echo def > CapsDir1/CapsDir/SubDir/Def.txt
 
-  $ hg add -v capsdir1/capsdir
+  $ hg add capsdir1/capsdir
   adding CapsDir1/CapsDir/AbC.txt (glob)
   adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
 
   $ hg forget capsdir1/capsdir/abc.txt
   removing CapsDir1/CapsDir/AbC.txt (glob)
+
+  $ hg forget capsdir1/capsdir
+  removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
+
+  $ hg add capsdir1
+  adding CapsDir1/CapsDir/AbC.txt (glob)
+  adding CapsDir1/CapsDir/SubDir/Def.txt (glob)
+
+  $ hg ci -m "AbCDef" capsdir1/capsdir
+
+  $ hg status -A capsdir1/capsdir
+  C CapsDir1/CapsDir/AbC.txt
+  C CapsDir1/CapsDir/SubDir/Def.txt
+
+  $ hg files capsdir1/capsdir
+  CapsDir1/CapsDir/AbC.txt (glob)
+  CapsDir1/CapsDir/SubDir/Def.txt (glob)
+
+  $ echo xyz > CapsDir1/CapsDir/SubDir/Def.txt
+  $ hg ci -m xyz capsdir1/capsdir/subdir/def.txt
+
+  $ hg revert -r '.^' capsdir1/capsdir
+  reverting CapsDir1/CapsDir/SubDir/Def.txt (glob)
+
+  $ hg diff capsdir1/capsdir
+  diff -r 5112e00e781d CapsDir1/CapsDir/SubDir/Def.txt
+  --- a/CapsDir1/CapsDir/SubDir/Def.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/CapsDir1/CapsDir/SubDir/Def.txt	* +0000 (glob)
+  @@ -1,1 +1,1 @@
+  -xyz
+  +def
+
+  $ hg remove -f 'glob:**.txt' -X capsdir1/capsdir
+  $ hg remove -f 'glob:**.txt' -I capsdir1/capsdir
+  removing CapsDir1/CapsDir/AbC.txt (glob)
+  removing CapsDir1/CapsDir/SubDir/Def.txt (glob)
 #endif
 
   $ cd ..
--- a/tests/test-addremove.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-addremove.t	Thu Apr 16 20:57:51 2015 -0500
@@ -30,12 +30,12 @@
   adding foo
   $ hg forget foo
 #if windows
-  $ hg -v addremove nonexistant
-  nonexistant: The system cannot find the file specified
+  $ hg -v addremove nonexistent
+  nonexistent: The system cannot find the file specified
   [1]
 #else
-  $ hg -v addremove nonexistant
-  nonexistant: No such file or directory
+  $ hg -v addremove nonexistent
+  nonexistent: No such file or directory
   [1]
 #endif
   $ cd ..
@@ -88,13 +88,13 @@
 
   $ rm c
 #if windows
-  $ hg ci -A -m "c" nonexistant
-  nonexistant: The system cannot find the file specified
+  $ hg ci -A -m "c" nonexistent
+  nonexistent: The system cannot find the file specified
   abort: failed to mark all new/missing files as added/removed
   [255]
 #else
-  $ hg ci -A -m "c" nonexistant
-  nonexistant: No such file or directory
+  $ hg ci -A -m "c" nonexistent
+  nonexistent: No such file or directory
   abort: failed to mark all new/missing files as added/removed
   [255]
 #endif
--- a/tests/test-alias.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-alias.t	Thu Apr 16 20:57:51 2015 -0500
@@ -360,9 +360,11 @@
   sub
   $ hg --cwd .. subalias > /dev/null
   hg: unknown command 'subalias'
+  (did you mean one of idalias?)
   [255]
   $ hg -R .. subalias > /dev/null
   hg: unknown command 'subalias'
+  (did you mean one of idalias?)
   [255]
 
 
@@ -370,12 +372,18 @@
 
   $ hg mainalias > /dev/null
   hg: unknown command 'mainalias'
+  (did you mean one of idalias?)
   [255]
   $ hg -R .. mainalias
   main
   $ hg --cwd .. mainalias
   main
 
+typos get useful suggestions
+  $ hg --cwd .. manalias
+  hg: unknown command 'manalias'
+  (did you mean one of idalias, mainalias, manifest?)
+  [255]
 
 shell aliases with escaped $ chars
 
--- a/tests/test-annotate.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-annotate.t	Thu Apr 16 20:57:51 2015 -0500
@@ -398,6 +398,88 @@
   20: 4 baz:4
   16: 5
 
+annotate clean file
+
+  $ hg annotate -ncr "wdir()" foo
+  11 472b18db256d : foo
+
+annotate modified file
+
+  $ echo foofoo >> foo
+  $ hg annotate -r "wdir()" foo
+  11 : foo
+  20+: foofoo
+
+  $ hg annotate -cr "wdir()" foo
+  472b18db256d : foo
+  b6bedd5477e7+: foofoo
+
+  $ hg annotate -ncr "wdir()" foo
+  11 472b18db256d : foo
+  20 b6bedd5477e7+: foofoo
+
+  $ hg annotate --debug -ncr "wdir()" foo
+  11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
+  20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
+
+  $ hg annotate -udr "wdir()" foo
+  test Thu Jan 01 00:00:00 1970 +0000: foo
+  test [A-Za-z0-9:+ ]+: foofoo (re)
+
+  $ hg annotate -ncr "wdir()" -Tjson foo
+  [
+   {
+    "line": "foo\n",
+    "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd",
+    "rev": 11
+   },
+   {
+    "line": "foofoo\n",
+    "node": null,
+    "rev": null
+   }
+  ]
+
+annotate added file
+
+  $ echo bar > bar
+  $ hg add bar
+  $ hg annotate -ncr "wdir()" bar
+  20 b6bedd5477e7+: bar
+
+annotate renamed file
+
+  $ hg rename foo renamefoo2
+  $ hg annotate -ncr "wdir()" renamefoo2
+  11 472b18db256d : foo
+  20 b6bedd5477e7+: foofoo
+
+annotate missing file
+
+  $ rm baz
+#if windows
+  $ hg annotate -ncr "wdir()" baz
+  abort: $TESTTMP\repo\baz: The system cannot find the file specified
+  [255]
+#else
+  $ hg annotate -ncr "wdir()" baz
+  abort: No such file or directory: $TESTTMP/repo/baz
+  [255]
+#endif
+
+annotate removed file
+
+  $ hg rm baz
+#if windows
+  $ hg annotate -ncr "wdir()" baz
+  abort: $TESTTMP\repo\baz: The system cannot find the file specified
+  [255]
+#else
+  $ hg annotate -ncr "wdir()" baz
+  abort: No such file or directory: $TESTTMP/repo/baz
+  [255]
+#endif
+
 Test annotate with whitespace options
 
   $ cd ..
--- a/tests/test-basic.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-basic.t	Thu Apr 16 20:57:51 2015 -0500
@@ -5,6 +5,7 @@
   defaults.commit=-d "0 0"
   defaults.shelve=--date "0 0"
   defaults.tag=-d "0 0"
+  devel.all=true
   largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
   ui.slash=True
   ui.interactive=False
--- a/tests/test-blackbox.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-blackbox.t	Thu Apr 16 20:57:51 2015 -0500
@@ -1,20 +1,8 @@
 setup
-  $ cat > mock.py <<EOF
-  > from mercurial import util
-  > 
-  > def makedate():
-  >     return 0, 0
-  > def getuser():
-  >     return 'bob'
-  > # mock the date and user apis so the output is always the same
-  > def uisetup(ui):
-  >     util.makedate = makedate
-  >     util.getuser = getuser
-  > EOF
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
   > blackbox=
-  > mock=`pwd`/mock.py
+  > mock=$TESTDIR/mockblackbox.py
   > mq=
   > EOF
   $ hg init blackboxtest
@@ -124,18 +112,6 @@
   1970/01/01 00:00:00 bob> wrote base branch cache with 1 labels and 2 nodes
   1970/01/01 00:00:00 bob> strip tip exited 0 after * seconds (glob)
 
-tags cache gets logged
-  $ hg up tip
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg tag -m 'create test tag' test-tag
-  $ hg tags
-  tip                                3:5b5562c08298
-  test-tag                           2:d02f48003e62
-  $ hg blackbox -l 3
-  1970/01/01 00:00:00 bob> resolved 1 tags cache entries from 1 manifests in ?.???? seconds (glob)
-  1970/01/01 00:00:00 bob> writing tags cache file with 2 heads and 1 tags
-  1970/01/01 00:00:00 bob> tags exited 0 after ?.?? seconds (glob)
-
 extension and python hooks - use the eol extension for a pythonhook
 
   $ echo '[extensions]' >> .hg/hgrc
@@ -144,9 +120,10 @@
   $ echo 'update = echo hooked' >> .hg/hgrc
   $ hg update
   hooked
-  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg blackbox -l 4
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg blackbox -l 5
   1970/01/01 00:00:00 bob> update
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 0 tags
   1970/01/01 00:00:00 bob> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
   1970/01/01 00:00:00 bob> exthook-update: echo hooked finished in * seconds (glob)
   1970/01/01 00:00:00 bob> update exited 0 after * seconds (glob)
@@ -160,7 +137,7 @@
   $ hg status
   $ hg status
   $ hg tip -q
-  3:5b5562c08298
+  2:d02f48003e62
   $ ls .hg/blackbox.log*
   .hg/blackbox.log
   .hg/blackbox.log.1
--- a/tests/test-bookmarks-pushpull.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bookmarks-pushpull.t	Thu Apr 16 20:57:51 2015 -0500
@@ -164,6 +164,40 @@
      Z                         2:0d2164f0ce0d
      foo                       -1:000000000000
    * foobar                    1:9b140be10808
+
+(test that too many divergence of bookmark)
+
+  $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
+  $ hg pull ../a
+  pulling from ../a
+  searching for changes
+  no changes found
+  warning: failed to assign numbered name to divergent bookmark X
+  divergent bookmark @ stored as @1
+  $ hg bookmarks | grep '^   X' | grep -v ':000000000000'
+     X                         1:9b140be10808
+     X@foo                     2:0d2164f0ce0d
+
+(test that remotely diverged bookmarks are reused if they aren't changed)
+
+  $ hg bookmarks | grep '^   @'
+     @                         1:9b140be10808
+     @1                        2:0d2164f0ce0d
+     @foo                      2:0d2164f0ce0d
+  $ hg pull ../a
+  pulling from ../a
+  searching for changes
+  no changes found
+  warning: failed to assign numbered name to divergent bookmark X
+  divergent bookmark @ stored as @1
+  $ hg bookmarks | grep '^   @'
+     @                         1:9b140be10808
+     @1                        2:0d2164f0ce0d
+     @foo                      2:0d2164f0ce0d
+
+  $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
+  $ hg bookmarks -d "@1"
+
   $ hg push -f ../a
   pushing to ../a
   searching for changes
@@ -368,8 +402,11 @@
   $ hg out -B http://localhost:$HGPORT/
   comparing with http://localhost:$HGPORT/
   searching for changed bookmarks
-  no changed bookmarks found
-  [1]
+     @                         0d2164f0ce0d
+     X                         0d2164f0ce0d
+     Z                         0d2164f0ce0d
+     foo                                   
+     foobar                                
   $ hg push -B Z http://localhost:$HGPORT/
   pushing to http://localhost:$HGPORT/
   searching for changes
@@ -380,6 +417,8 @@
   $ hg in -B http://localhost:$HGPORT/
   comparing with http://localhost:$HGPORT/
   searching for changed bookmarks
+     @                         9b140be10808
+     X                         9b140be10808
      Z                         0d2164f0ce0d
      foo                       000000000000
      foobar                    9b140be10808
@@ -409,6 +448,121 @@
 
   $ cd ..
 
+Test to show result of bookmarks comparision
+
+  $ mkdir bmcomparison
+  $ cd bmcomparison
+
+  $ hg init source
+  $ hg -R source debugbuilddag '+2*2*3*4'
+  $ hg -R source log -G --template '{rev}:{node|short}'
+  o  4:e7bd5218ca15
+  |
+  | o  3:6100d3090acf
+  |/
+  | o  2:fa942426a6fd
+  |/
+  | o  1:66f7d451a68b
+  |/
+  o  0:1ea73414a91b
+  
+  $ hg -R source bookmarks -r 0 SAME
+  $ hg -R source bookmarks -r 0 ADV_ON_REPO1
+  $ hg -R source bookmarks -r 0 ADV_ON_REPO2
+  $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1
+  $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2
+  $ hg -R source bookmarks -r 1 DIVERGED
+
+  $ hg clone -U source repo1
+
+(test that incoming/outgoing exit with 1, if there is no bookmark to
+be excahnged)
+
+  $ hg -R repo1 incoming -B
+  comparing with $TESTTMP/bmcomparison/source
+  searching for changed bookmarks
+  no changed bookmarks found
+  [1]
+  $ hg -R repo1 outgoing -B
+  comparing with $TESTTMP/bmcomparison/source
+  searching for changed bookmarks
+  no changed bookmarks found
+  [1]
+
+  $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1
+  $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1
+  $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1
+  $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED
+  $ hg -R repo1 -q --config extensions.mq= strip 4
+  $ hg -R repo1 log -G --template '{node|short} ({bookmarks})'
+  o  6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED)
+  |
+  | o  fa942426a6fd (ADV_ON_REPO1)
+  |/
+  | o  66f7d451a68b (ADD_ON_REPO1 DIVERGED)
+  |/
+  o  1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME)
+  
+
+  $ hg clone -U source repo2
+  $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2
+  $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2
+  $ hg -R repo2 bookmarks -f -r 2 DIVERGED
+  $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2
+  $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED
+  $ hg -R repo2 -q --config extensions.mq= strip 3
+  $ hg -R repo2 log -G --template '{node|short} ({bookmarks})'
+  o  e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED)
+  |
+  | o  fa942426a6fd (DIVERGED)
+  |/
+  | o  66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2)
+  |/
+  o  1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME)
+  
+
+(test that difference of bookmarks between repositories are fully shown)
+
+  $ hg -R repo1 incoming -B repo2 -v
+  comparing with repo2
+  searching for changed bookmarks
+     ADD_ON_REPO2              66f7d451a68b added
+     ADV_ON_REPO2              66f7d451a68b advanced
+     DIFF_ADV_ON_REPO2         e7bd5218ca15 changed
+     DIFF_DIVERGED             e7bd5218ca15 changed
+     DIVERGED                  fa942426a6fd diverged
+  $ hg -R repo1 outgoing -B repo2 -v
+  comparing with repo2
+  searching for changed bookmarks
+     ADD_ON_REPO1              66f7d451a68b added
+     ADD_ON_REPO2                           deleted
+     ADV_ON_REPO1              fa942426a6fd advanced
+     DIFF_ADV_ON_REPO1         6100d3090acf advanced
+     DIFF_ADV_ON_REPO2         1ea73414a91b changed
+     DIFF_DIVERGED             6100d3090acf changed
+     DIVERGED                  66f7d451a68b diverged
+
+  $ hg -R repo2 incoming -B repo1 -v
+  comparing with repo1
+  searching for changed bookmarks
+     ADD_ON_REPO1              66f7d451a68b added
+     ADV_ON_REPO1              fa942426a6fd advanced
+     DIFF_ADV_ON_REPO1         6100d3090acf changed
+     DIFF_DIVERGED             6100d3090acf changed
+     DIVERGED                  66f7d451a68b diverged
+  $ hg -R repo2 outgoing -B repo1 -v
+  comparing with repo1
+  searching for changed bookmarks
+     ADD_ON_REPO1                           deleted
+     ADD_ON_REPO2              66f7d451a68b added
+     ADV_ON_REPO2              66f7d451a68b advanced
+     DIFF_ADV_ON_REPO1         1ea73414a91b changed
+     DIFF_ADV_ON_REPO2         e7bd5218ca15 advanced
+     DIFF_DIVERGED             e7bd5218ca15 changed
+     DIVERGED                  fa942426a6fd diverged
+
+  $ cd ..
+
 Pushing a bookmark should only push the changes required by that
 bookmark, not all outgoing changes:
   $ hg clone http://localhost:$HGPORT/ addmarks
@@ -460,6 +614,13 @@
   $ hg -R ../b id -r W
   cc978a373a53 tip W
 
+Check summary output for incoming/outgoing bookmarks
+
+  $ hg bookmarks -d X
+  $ hg bookmarks -d Y
+  $ hg summary --remote | grep '^remote:'
+  remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob)
+
   $ cd ..
 
 pushing an unchanged bookmark should result in no changes
--- a/tests/test-bookmarks-rebase.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bookmarks-rebase.t	Thu Apr 16 20:57:51 2015 -0500
@@ -66,3 +66,27 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0
   
+aborted rebase should restore active bookmark.
+
+  $ hg up 1
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark two)
+  $ echo 'e' > d
+  $ hg ci -A -m "4"
+  adding d
+  created new head
+  $ hg bookmark three
+  $ hg rebase -s three -d two
+  rebasing 4:dd7c838e8362 "4" (tip three)
+  merging d
+  warning: conflicts during merge.
+  merging d incomplete! (edit conflicts, then use 'hg resolve --mark')
+  unresolved conflicts (see hg resolve, then hg rebase --continue)
+  [1]
+  $ hg rebase --abort
+  rebase aborted
+  $ hg bookmark
+     one                       1:925d80f479bb
+   * three                     4:dd7c838e8362
+     two                       3:42e5ed2cdcf4
+
--- a/tests/test-branches.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-branches.t	Thu Apr 16 20:57:51 2015 -0500
@@ -547,11 +547,22 @@
   0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....|
   0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....|
   0070: f8 94 c2 56 80 00 00 03                         |...V....|
+
+#if unix-permissions no-root
+no errors when revbranchcache is not writable
+
+  $ echo >> .hg/cache/rbc-revs-v1
+  $ chmod a-w .hg/cache/rbc-revs-v1
+  $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n'
+  5
+  $ chmod a+w .hg/cache/rbc-revs-v1
+#endif
+
 recovery from invalid cache revs file with trailing data
   $ echo >> .hg/cache/rbc-revs-v1
   $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  5
   truncating cache/rbc-revs-v1 to 120
-  5
   $ f --size .hg/cache/rbc-revs*
   .hg/cache/rbc-revs-v1: size=120
 recovery from invalid cache file with partial last record
@@ -560,8 +571,8 @@
   $ f --size .hg/cache/rbc-revs*
   .hg/cache/rbc-revs-v1: size=119
   $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  5
   truncating cache/rbc-revs-v1 to 112
-  5
   $ f --size .hg/cache/rbc-revs*
   .hg/cache/rbc-revs-v1: size=120
 recovery from invalid cache file with missing record - no truncation
@@ -579,11 +590,11 @@
   $ f -qDB 112 rbc-revs-v1 >> .hg/cache/rbc-revs-v1
   $ f --size .hg/cache/rbc-revs*
   .hg/cache/rbc-revs-v1: size=120
-  $ hg log -r 'branch(.)' -T '{rev} '
-  3 4 8 9 10 11 12 13  (no-eol)
+  $ hg log -r 'branch(.)' -T '{rev} ' --debug
+  3 4 8 9 10 11 12 13 truncating cache/rbc-revs-v1 to 8
   $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
-  truncating cache/rbc-revs-v1 to 8
   5
+  truncating cache/rbc-revs-v1 to 104
   $ f --size --hexdump --bytes=16 .hg/cache/rbc-revs*
   .hg/cache/rbc-revs-v1: size=120
   0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
--- a/tests/test-bundle-type.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle-type.t	Thu Apr 16 20:57:51 2015 -0500
@@ -87,6 +87,7 @@
   $ hg init tgarbage
   $ cd tgarbage
   $ hg pull ../bgarbage
+  pulling from ../bgarbage
   abort: ../bgarbage: not a Mercurial bundle
   [255]
   $ cd ..
--- a/tests/test-bundle.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle.t	Thu Apr 16 20:57:51 2015 -0500
@@ -224,7 +224,7 @@
   adding manifests
   adding file changes
   added 9 changesets with 7 changes to 4 files (+1 heads)
-  changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_SOURCE=pull HG_URL=bundle:../full.hg
+  changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=bundle:../full.hg (glob)
   (run 'hg heads' to see heads, 'hg merge' to merge)
 
 Rollback empty
@@ -247,7 +247,7 @@
   adding manifests
   adding file changes
   added 9 changesets with 7 changes to 4 files (+1 heads)
-  changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_SOURCE=pull HG_URL=bundle:empty+full.hg
+  changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=bundle:empty+full.hg (glob)
   (run 'hg heads' to see heads, 'hg merge' to merge)
 
 Create partial clones
--- a/tests/test-bundle2-exchange.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle2-exchange.t	Thu Apr 16 20:57:51 2015 -0500
@@ -25,10 +25,9 @@
   > [phases]
   > publish=False
   > [hooks]
-  > changegroup = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" changegroup"
-  > b2x-pretransactionclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
-  > b2x-transactionclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
-  > b2x-transactionclose.env = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
+  > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
+  > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
+  > txnclose.env = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" txnclose"
   > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
   > EOF
 
@@ -39,13 +38,19 @@
   $ touch a
   $ hg add a
   $ hg commit -m 'a'
+  pre-close-tip:3903775176ed draft 
+  postclose-tip:3903775176ed draft 
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
 
   $ hg unbundle $TESTDIR/bundles/rebase.hg
   adding changesets
   adding manifests
   adding file changes
   added 8 changesets with 7 changes to 7 files (+3 heads)
-  changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=unbundle HG_URL=bundle:*/rebase.hg (glob)
+  pre-close-tip:02de42196ebe draft 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob)
+  bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
   (run 'hg heads' to see heads, 'hg merge' to merge)
 
   $ cd ..
@@ -56,11 +61,20 @@
 Add more obsolescence information
 
   $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
+  pre-close-tip:02de42196ebe draft 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
   $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
+  pre-close-tip:02de42196ebe draft 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
 
 clone --pull
 
   $ hg -R main phase --public cd010b8cd998
+  pre-close-tip:000000000000 public 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
   $ hg clone main other --pull --rev 9520eea781bc
   adding changesets
   adding manifests
@@ -69,8 +83,8 @@
   1 new obsolescence markers
   pre-close-tip:9520eea781bc draft 
   postclose-tip:9520eea781bc draft 
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
-  changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg -R other log -G
@@ -84,6 +98,9 @@
 pull
 
   $ hg -R main phase --public 9520eea781bc
+  pre-close-tip:000000000000 public 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
   $ hg -R other pull -r 24b6387c8c8c
   pulling from $TESTTMP/main (glob)
   searching for changes
@@ -94,8 +111,8 @@
   1 new obsolescence markers
   pre-close-tip:24b6387c8c8c draft 
   postclose-tip:24b6387c8c8c draft 
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
-  changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg -R other log -G
   o  2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com>  F
@@ -111,12 +128,16 @@
 pull empty (with phase movement)
 
   $ hg -R main phase --public 24b6387c8c8c
+  pre-close-tip:000000000000 public 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
   $ hg -R other pull -r 24b6387c8c8c
   pulling from $TESTTMP/main (glob)
   no changes found
   pre-close-tip:000000000000 public 
   postclose-tip:24b6387c8c8c public 
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
+  txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
   $ hg -R other log -G
   o  2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com>  F
   |
@@ -135,7 +156,8 @@
   no changes found
   pre-close-tip:24b6387c8c8c public 
   postclose-tip:24b6387c8c8c public 
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
+  txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
   $ hg -R other log -G
   o  2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com>  F
   |
@@ -151,14 +173,29 @@
 
   $ hg -R main bookmark --rev eea13746799a book_eea1
   $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
+  pre-close-tip:02de42196ebe draft 
+  postclose-tip:02de42196ebe draft 
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
   $ hg -R main bookmark --rev 02de42196ebe book_02de
   $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
   $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
   $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
   $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
   $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
   $ hg -R main bookmark --rev 32af7686d403 book_32af
   $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob)
 
   $ hg -R other bookmark --rev cd010b8cd998 book_eea1
   $ hg -R other bookmark --rev cd010b8cd998 book_02de
@@ -167,6 +204,9 @@
   $ hg -R other bookmark --rev cd010b8cd998 book_32af
 
   $ hg -R main phase --public eea13746799a
+  pre-close-tip:000000000000 public 
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
 
 push
   $ hg -R main push other --rev eea13746799a --bookmark book_eea1
@@ -180,8 +220,7 @@
   lock:  free
   wlock: free
   postclose-tip:eea13746799a public book_eea1
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
-  changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_SOURCE=push HG_URL=push
+  txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
   remote: adding changesets
   remote: adding manifests
   remote: adding file changes
@@ -190,7 +229,8 @@
   updating bookmark book_eea1
   pre-close-tip:02de42196ebe draft book_02de
   postclose-tip:02de42196ebe draft book_02de
-  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=file:$TESTTMP/other
+  txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
+  file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
   $ hg -R other log -G
   o    3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
   |\
@@ -218,8 +258,8 @@
   updating bookmark book_02de
   pre-close-tip:02de42196ebe draft book_02de
   postclose-tip:02de42196ebe draft book_02de
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
-  changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
+  txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  ssh://user@dummy/main HG_URL=ssh://user@dummy/main
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg -R other debugobsolete
   1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -243,8 +283,8 @@
   updating bookmark book_42cc
   pre-close-tip:42ccdea3bb16 draft book_42cc
   postclose-tip:42ccdea3bb16 draft book_42cc
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
-  changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
+  txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob)
+  http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
   (run 'hg heads .' to see heads, 'hg merge' to merge)
   $ cat main-error.log
   $ hg -R other debugobsolete
@@ -270,11 +310,11 @@
   remote: lock:  free
   remote: wlock: free
   remote: postclose-tip:5fddd98957c8 draft book_5fdd
-  remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
-  remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
+  remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob)
   pre-close-tip:02de42196ebe draft book_02de
   postclose-tip:02de42196ebe draft book_02de
-  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=ssh://user@dummy/other
+  txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
+  ssh://user@dummy/other HG_URL=ssh://user@dummy/other
   $ hg -R other log -G
   o  6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
   |
@@ -304,6 +344,9 @@
   $ cat other.pid >> $DAEMON_PIDS
 
   $ hg -R main phase --public 32af7686d403
+  pre-close-tip:000000000000 public 
+  postclose-tip:02de42196ebe draft book_02de
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob)
   $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
   pushing to http://localhost:$HGPORT2/
   searching for changes
@@ -315,7 +358,8 @@
   updating bookmark book_32af
   pre-close-tip:02de42196ebe draft book_02de
   postclose-tip:02de42196ebe draft book_02de
-  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=http://localhost:$HGPORT2/
+  txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob)
+  http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
   $ cat other-error.log
 
 Check final content.
@@ -382,7 +426,7 @@
   >         bundler.newpart('test:unknown')
   >     if reason == 'race':
   >         # 20 Bytes of crap
-  >         bundler.newpart('b2x:check:heads', data='01234567890123456789')
+  >         bundler.newpart('check:heads', data='01234567890123456789')
   > 
   > @bundle2.parthandler("test:abort")
   > def handleabort(op, part):
@@ -400,6 +444,9 @@
   $ echo 'I' > I
   $ hg add I
   $ hg ci -m 'I'
+  pre-close-tip:e7ec4e813ba6 draft 
+  postclose-tip:e7ec4e813ba6 draft 
+  txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   $ hg id
   e7ec4e813ba6 tip
   $ cd ..
@@ -501,7 +548,7 @@
   > [failpush]
   > reason =
   > [hooks]
-  > b2x-pretransactionclose.failpush = false
+  > pretxnclose.failpush = false
   > EOF
 
   $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
@@ -514,13 +561,21 @@
   pre-close-tip:e7ec4e813ba6 draft 
   transaction abort!
   rollback completed
-  abort: b2x-pretransactionclose.failpush hook exited with status 1
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  abort: pretxnclose.failpush hook exited with status 1
   [255]
 
   $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
   pushing to ssh://user@dummy/other
   searching for changes
-  abort: b2x-pretransactionclose.failpush hook exited with status 1
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  abort: pretxnclose.failpush hook exited with status 1
   remote: pre-close-tip:e7ec4e813ba6 draft 
   remote: transaction abort!
   remote: rollback completed
@@ -529,7 +584,11 @@
   $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
   pushing to http://localhost:$HGPORT2/
   searching for changes
-  abort: b2x-pretransactionclose.failpush hook exited with status 1
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  abort: pretxnclose.failpush hook exited with status 1
   [255]
 
 (check that no 'pending' files remain)
--- a/tests/test-bundle2-format.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle2-format.t	Thu Apr 16 20:57:51 2015 -0500
@@ -92,11 +92,11 @@
   > 
   >     if opts['reply']:
   >         capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
-  >         bundler.newpart('b2x:replycaps', data=capsstring)
+  >         bundler.newpart('replycaps', data=capsstring)
   > 
   >     if opts['pushrace']:
   >         # also serve to test the assignement of data outside of init
-  >         part = bundler.newpart('b2x:check:heads')
+  >         part = bundler.newpart('check:heads')
   >         part.data = '01234567890123456789'
   > 
   >     revs = opts['rev']
@@ -109,7 +109,7 @@
   >             headcommon  = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
   >             outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
   >             cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
-  >             bundler.newpart('b2x:changegroup', data=cg.getchunks(),
+  >             bundler.newpart('changegroup', data=cg.getchunks(),
   >                             mandatory=False)
   > 
   >     if opts['parts']:
@@ -136,7 +136,7 @@
   >        def genraise():
   >            yield 'first line\n'
   >            raise RuntimeError('Someone set up us the bomb!')
-  >        bundler.newpart('b2x:output', data=genraise(), mandatory=False)
+  >        bundler.newpart('output', data=genraise(), mandatory=False)
   > 
   >     if path is None:
   >        file = sys.stdout
@@ -157,7 +157,7 @@
   >         lock = repo.lock()
   >         tr = repo.transaction('processbundle')
   >         try:
-  >             unbundler = bundle2.unbundle20(ui, sys.stdin)
+  >             unbundler = bundle2.getunbundler(ui, sys.stdin)
   >             op = bundle2.processbundle(repo, unbundler, lambda: tr)
   >             tr.close()
   >         except error.BundleValueError, exc:
@@ -183,7 +183,7 @@
   > @command('statbundle2', [], '')
   > def cmdstatbundle2(ui, repo):
   >     """print statistic on the bundle2 container read from stdin"""
-  >     unbundler = bundle2.unbundle20(ui, sys.stdin)
+  >     unbundler = bundle2.getunbundler(ui, sys.stdin)
   >     try:
   >         params = unbundler.params
   >     except error.BundleValueError, exc:
@@ -237,7 +237,7 @@
 Test bundling
 
   $ hg bundle2
-  HG2Y\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
 
 Test unbundling
 
@@ -267,7 +267,7 @@
 Test generation simple option
 
   $ hg bundle2 --param 'caution'
-  HG2Y\x00\x00\x00\x07caution\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00\x07caution\x00\x00\x00\x00 (no-eol) (esc)
 
 Test unbundling
 
@@ -279,7 +279,7 @@
 Test generation multiple option
 
   $ hg bundle2 --param 'caution' --param 'meal'
-  HG2Y\x00\x00\x00\x0ccaution meal\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00\x0ccaution meal\x00\x00\x00\x00 (no-eol) (esc)
 
 Test unbundling
 
@@ -295,7 +295,7 @@
 Test generation
 
   $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
-  HG2Y\x00\x00\x00\x1ccaution meal=vegan elephants\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00\x1ccaution meal=vegan elephants\x00\x00\x00\x00 (no-eol) (esc)
 
 Test unbundling
 
@@ -313,7 +313,7 @@
 Test generation
 
   $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
-  HG2Y\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
 
 Test unbundling
 
@@ -337,7 +337,7 @@
 bundling debug
 
   $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
-  start emission of HG2Y stream
+  start emission of HG20 stream
   bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
   start of parts
   end of bundle
@@ -345,12 +345,12 @@
 file content is ok
 
   $ cat ../out.hg2
-  HG2Y\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00\x00\x00 (no-eol) (esc)
 
 unbundling debug
 
   $ hg statbundle2 --debug < ../out.hg2
-  start processing of HG2Y stream
+  start processing of HG20 stream
   reading bundle2 stream parameters
   ignoring unknown parameter 'e|! 7/'
   ignoring unknown parameter 'simple'
@@ -384,7 +384,7 @@
 =================
 
   $ hg bundle2 --parts ../parts.hg2 --debug
-  start emission of HG2Y stream
+  start emission of HG20 stream
   bundle parameter: 
   start of parts
   bundle part: "test:empty"
@@ -397,7 +397,7 @@
   end of bundle
 
   $ cat ../parts.hg2
-  HG2Y\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
   test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
   test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10	test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
   Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
@@ -437,7 +437,7 @@
   parts count:   7
 
   $ hg statbundle2 --debug < ../parts.hg2
-  start processing of HG2Y stream
+  start processing of HG20 stream
   reading bundle2 stream parameters
   options count: 0
   start extraction of bundle2 parts
@@ -516,7 +516,7 @@
 Process the bundle
 
   $ hg unbundle2 --debug < ../parts.hg2
-  start processing of HG2Y stream
+  start processing of HG20 stream
   reading bundle2 stream parameters
   start extraction of bundle2 parts
   part header size: 17
@@ -610,21 +610,18 @@
 The reply is a bundle
 
   $ cat ../reply.hg2
-  HG2Y\x00\x00\x00\x00\x00\x00\x00\x1f (esc)
-  b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
       Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
       Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
       Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
-  \x00\x00\x00\x00\x00\x00\x00\x1f (esc)
-  b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
+  \x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
   debugreply:     'city=!'
   debugreply:         'celeste,ville'
   debugreply:     'elephants'
   debugreply:         'babar'
   debugreply:         'celeste'
   debugreply:     'ping-pong'
-  \x00\x00\x00\x00\x00\x00\x00\x1e	test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x00\x00\x1f (esc)
-  b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
+  \x00\x00\x00\x00\x00\x00\x00\x1e	test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
   replying to ping request (id 7)
   \x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
 
@@ -632,11 +629,11 @@
 
   $ hg statbundle2 < ../reply.hg2
   options count: 0
-    :b2x:output:
+    :output:
       mandatory: 0
       advisory: 1
       payload: 217 bytes
-    :b2x:output:
+    :output:
       mandatory: 0
       advisory: 1
       payload: 201 bytes
@@ -644,7 +641,7 @@
       mandatory: 1
       advisory: 0
       payload: 0 bytes
-    :b2x:output:
+    :output:
       mandatory: 0
       advisory: 1
       payload: 61 bytes
@@ -714,10 +711,10 @@
   9520eea781bcca16c1e15acc0ba14335a0e8e5ba
   eea13746799a9e0bfd88f29d3c2e9dc9389f524f
   02de42196ebee42ef284b6780a87cdc96e8eaab6
-  start emission of HG2Y stream
+  start emission of HG20 stream
   bundle parameter: 
   start of parts
-  bundle part: "b2x:changegroup"
+  bundle part: "changegroup"
   bundling: 1/4 changesets (25.00%)
   bundling: 2/4 changesets (50.00%)
   bundling: 3/4 changesets (75.00%)
@@ -732,7 +729,7 @@
   end of bundle
 
   $ cat ../rev.hg2
-  HG2Y\x00\x00\x00\x00\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00\x12\x0bchangegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
   \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
   \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
   \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
@@ -757,7 +754,7 @@
 
   $ hg debugbundle ../rev.hg2
   Stream params: {}
-  b2x:changegroup -- '{}'
+  changegroup -- '{}'
       32af7686d403cf45b5d95f2d70cebea587ac806a
       9520eea781bcca16c1e15acc0ba14335a0e8e5ba
       eea13746799a9e0bfd88f29d3c2e9dc9389f524f
@@ -776,8 +773,7 @@
   addchangegroup return: 1
 
   $ cat ../rev-reply.hg2
-  HG2Y\x00\x00\x00\x00\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x00\x00\x1f (esc)
-  b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00/\x11reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x00\x00\x1b\x06output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
   adding manifests
   adding file changes
   added 0 changesets with 0 changes to 3 files
@@ -793,8 +789,8 @@
 Should still be a valid bundle
 
   $ cat ../genfailed.hg2
-  HG2Y\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
-  b2x:output\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00L\x0fb2x:error:abort\x00\x00\x00\x00\x01\x00\x07-messageunexpected error: Someone set up us the bomb!\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
+  HG20\x00\x00\x00\x00\x00\x00\x00\r (no-eol) (esc)
+  \x06output\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00H\x0berror:abort\x00\x00\x00\x00\x01\x00\x07-messageunexpected error: Someone set up us the bomb!\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
 
 And its handling on the other size raise a clean exception
 
--- a/tests/test-bundle2-multiple-changegroups.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle2-multiple-changegroups.t	Thu Apr 16 20:57:51 2015 -0500
@@ -14,13 +14,13 @@
   >     intermediates = [repo[r].p1().node() for r in heads]
   >     cg = changegroup.getchangegroup(repo, source, heads=intermediates,
   >                                      common=common, bundlecaps=bundlecaps)
-  >     bundler.newpart('b2x:output', data='changegroup1')
-  >     bundler.newpart('b2x:changegroup', data=cg.getchunks())
+  >     bundler.newpart('output', data='changegroup1')
+  >     bundler.newpart('changegroup', data=cg.getchunks())
   >     cg = changegroup.getchangegroup(repo, source, heads=heads,
   >                                      common=common + intermediates,
   >                                      bundlecaps=bundlecaps)
-  >     bundler.newpart('b2x:output', data='changegroup2')
-  >     bundler.newpart('b2x:changegroup', data=cg.getchunks())
+  >     bundler.newpart('output', data='changegroup2')
+  >     bundler.newpart('changegroup', data=cg.getchunks())
   > 
   > def _pull(repo, *args, **kwargs):
   >   pullop = _orig_pull(repo, *args, **kwargs)
@@ -82,17 +82,17 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   remote: changegroup2
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   pullop.cgresult is 1
   (run 'hg update' to get a working copy)
   $ hg update
@@ -152,20 +152,20 @@
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   remote: changegroup2
   adding changesets
   adding manifests
   adding file changes
   added 3 changesets with 3 changes to 3 files (+1 heads)
-  pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   pullop.cgresult is 3
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg log -G
@@ -225,17 +225,17 @@
   adding manifests
   adding file changes
   added 1 changesets with 0 changes to 0 files (-1 heads)
-  pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   remote: changegroup2
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
-  incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/repo
+  pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
+  incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
   pullop.cgresult is -2
   (run 'hg update' to get a working copy)
   $ hg log -G
--- a/tests/test-bundle2-pushback.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle2-pushback.t	Thu Apr 16 20:57:51 2015 -0500
@@ -6,21 +6,21 @@
   > from mercurial import bundle2, pushkey, exchange, util
   > def _newhandlechangegroup(op, inpart):
   >     """This function wraps the changegroup part handler for getbundle.
-  >     It issues an additional b2x:pushkey part to send a new
+  >     It issues an additional pushkey part to send a new
   >     bookmark back to the client"""
   >     result = bundle2.handlechangegroup(op, inpart)
-  >     if 'b2x:pushback' in op.reply.capabilities:
+  >     if 'pushback' in op.reply.capabilities:
   >         params = {'namespace': 'bookmarks',
   >                   'key': 'new-server-mark',
   >                   'old': '',
   >                   'new': 'tip'}
   >         encodedparams = [(k, pushkey.encode(v)) for (k,v) in params.items()]
-  >         op.reply.newpart('b2x:pushkey', mandatoryparams=encodedparams)
+  >         op.reply.newpart('pushkey', mandatoryparams=encodedparams)
   >     else:
-  >         op.reply.newpart('b2x:output', data='pushback not enabled')
+  >         op.reply.newpart('output', data='pushback not enabled')
   >     return result
   > _newhandlechangegroup.params = bundle2.handlechangegroup.params
-  > bundle2.parthandlermapping['b2x:changegroup'] = _newhandlechangegroup
+  > bundle2.parthandlermapping['changegroup'] = _newhandlechangegroup
   > EOF
 
   $ cat >> $HGRCPATH <<EOF
--- a/tests/test-bundle2-remote-changegroup.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-bundle2-remote-changegroup.t	Thu Apr 16 20:57:51 2015 -0500
@@ -35,7 +35,7 @@
   >     def newpart(name, data=''):
   >         """wrapper around bundler.newpart adding an extra part making the
   >         client output information about each processed part"""
-  >         bundler.newpart('b2x:output', data=name)
+  >         bundler.newpart('output', data=name)
   >         part = bundler.newpart(name, data=data)
   >         return part
   > 
@@ -50,13 +50,13 @@
   >            bundledata = open(file, 'rb').read()
   >            digest = util.digester.preferred(b2caps['digests'])
   >            d = util.digester([digest], bundledata)
-  >            part = newpart('b2x:remote-changegroup')
+  >            part = newpart('remote-changegroup')
   >            part.addparam('url', url)
   >            part.addparam('size', str(len(bundledata)))
   >            part.addparam('digests', digest)
   >            part.addparam('digest:%s' % digest, d[digest])
   >         elif verb == 'raw-remote-changegroup':
-  >            part = newpart('b2x:remote-changegroup')
+  >            part = newpart('remote-changegroup')
   >            for k, v in eval(args).items():
   >                part.addparam(k, str(v))
   >         elif verb == 'changegroup':
@@ -65,7 +65,7 @@
   >             heads = [repo.lookup(r) for r in repo.revs(heads)]
   >             cg = changegroup.getchangegroup(repo, 'changegroup',
   >                 heads=heads, common=common)
-  >             newpart('b2x:changegroup', cg.getchunks())
+  >             newpart('changegroup', cg.getchunks())
   >         else:
   >             raise Exception('unknown verb')
   > 
@@ -137,7 +137,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -180,12 +180,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: b2x:changegroup
+  remote: changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -228,12 +228,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:changegroup
+  remote: changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -279,17 +279,17 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 1 changes to 1 files
-  remote: b2x:changegroup
+  remote: changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -324,7 +324,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -338,7 +338,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -354,7 +354,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -372,7 +372,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -388,7 +388,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -404,7 +404,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -433,12 +433,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -467,7 +467,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: remote-changegroup: missing "url" param
   [255]
 
@@ -479,7 +479,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: remote-changegroup: missing "size" param
   [255]
 
@@ -491,7 +491,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: remote-changegroup: invalid value for param "size"
   [255]
 
@@ -503,7 +503,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -522,8 +522,8 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
-  abort: missing support for b2x:remote-changegroup - digest:foo
+  remote: remote-changegroup
+  abort: missing support for remote-changegroup - digest:foo
   [255]
 
 Missing digest
@@ -534,7 +534,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: remote-changegroup: missing "digest:sha1" param
   [255]
 
@@ -546,7 +546,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: remote-changegroup does not support ssh urls
   [255]
 
@@ -561,14 +561,14 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
   [255]
 
 Not a bundle 1.0
 
   $ cat > notbundle10.hg << EOF
-  > HG2Y
+  > HG20
   > EOF
   $ cat > repo/.hg/bundle2maker << EOF
   > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg
@@ -576,7 +576,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: b2x:remote-changegroup
+  remote: remote-changegroup
   abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
   [255]
 
--- a/tests/test-casefolding.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-casefolding.t	Thu Apr 16 20:57:51 2015 -0500
@@ -28,7 +28,6 @@
   a
   committing manifest
   committing changelog
-  couldn't read revision branch cache names: * (glob)
   committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
 
 Case-changing renames should work:
--- a/tests/test-cat.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-cat.t	Thu Apr 16 20:57:51 2015 -0500
@@ -22,10 +22,22 @@
   $ hg cat -r 1 b
   1
 
-Test fileset
+Test multiple files
 
   $ echo 3 > c
   $ hg ci -Am addmore c
+  $ hg cat b c
+  1
+  3
+  $ hg cat .
+  1
+  3
+  $ hg cat . c
+  1
+  3
+
+Test fileset
+
   $ hg cat 'set:not(b) or a'
   3
   $ hg cat 'set:c or b'
@@ -51,3 +63,8 @@
   tmp/h_45116003780e
   tmp/r_2
 
+Test working directory
+
+  $ echo b-wdir > b
+  $ hg cat -r 'wdir()' b
+  b-wdir
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-censor.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,480 @@
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > censor=
+  > EOF
+  $ cp $HGRCPATH $HGRCPATH.orig
+
+Create repo with unimpeachable content
+
+  $ hg init r
+  $ cd r
+  $ echo 'Initially untainted file' > target
+  $ echo 'Normal file here' > bystander
+  $ hg add target bystander
+  $ hg ci -m init
+
+Clone repo so we can test pull later
+
+  $ cd ..
+  $ hg clone r rpull
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd r
+
+Introduce content which will ultimately require censorship. Name the first
+censored node C1, second C2, and so on
+
+  $ echo 'Tainted file' > target
+  $ echo 'Passwords: hunter2' >> target
+  $ hg ci -m taint target
+  $ C1=`hg id --debug -i`
+
+  $ echo 'hunter3' >> target
+  $ echo 'Normal file v2' > bystander
+  $ hg ci -m moretaint target bystander
+  $ C2=`hg id --debug -i`
+
+Add a new sanitized versions to correct our mistake. Name the first head H1,
+the second head H2, and so on
+
+  $ echo 'Tainted file is now sanitized' > target
+  $ hg ci -m sanitized target
+  $ H1=`hg id --debug -i`
+
+  $ hg update -r $C2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo 'Tainted file now super sanitized' > target
+  $ hg ci -m 'super sanitized' target
+  created new head
+  $ H2=`hg id --debug -i`
+
+Verify target contents before censorship at each revision
+
+  $ hg cat -r $H1 target
+  Tainted file is now sanitized
+  $ hg cat -r $H2 target
+  Tainted file now super sanitized
+  $ hg cat -r $C2 target
+  Tainted file
+  Passwords: hunter2
+  hunter3
+  $ hg cat -r $C1 target
+  Tainted file
+  Passwords: hunter2
+  $ hg cat -r 0 target
+  Initially untainted file
+
+Try to censor revision with too large of a tombstone message
+
+  $ hg censor -r $C1 -t 'blah blah blah blah blah blah blah blah bla' target
+  abort: censor tombstone must be no longer than censored data
+  [255]
+
+Censor revision with 2 offenses
+
+  $ hg censor -r $C2 -t "remove password" target
+  $ hg cat -r $H1 target
+  Tainted file is now sanitized
+  $ hg cat -r $H2 target
+  Tainted file now super sanitized
+  $ hg cat -r $C2 target
+  abort: censored node: 1e0247a9a4b7
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg cat -r $C1 target
+  Tainted file
+  Passwords: hunter2
+  $ hg cat -r 0 target
+  Initially untainted file
+
+Censor revision with 1 offense
+
+  $ hg censor -r $C1 target
+  $ hg cat -r $H1 target
+  Tainted file is now sanitized
+  $ hg cat -r $H2 target
+  Tainted file now super sanitized
+  $ hg cat -r $C2 target
+  abort: censored node: 1e0247a9a4b7
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg cat -r $C1 target
+  abort: censored node: 613bc869fceb
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg cat -r 0 target
+  Initially untainted file
+
+Can only checkout target at uncensored revisions, -X is workaround for --all
+
+  $ hg revert -r $C2 target
+  abort: censored node: 1e0247a9a4b7
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg revert -r $C1 target
+  abort: censored node: 613bc869fceb
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg revert -r $C1 --all
+  reverting bystander
+  reverting target
+  abort: censored node: 613bc869fceb
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg revert -r $C1 --all -X target
+  $ cat target
+  Tainted file now super sanitized
+  $ hg revert -r 0 --all
+  reverting target
+  $ cat target
+  Initially untainted file
+  $ hg revert -r $H2 --all
+  reverting bystander
+  reverting target
+  $ cat target
+  Tainted file now super sanitized
+
+Uncensored file can be viewed at any revision
+
+  $ hg cat -r $H1 bystander
+  Normal file v2
+  $ hg cat -r $C2 bystander
+  Normal file v2
+  $ hg cat -r $C1 bystander
+  Normal file here
+  $ hg cat -r 0 bystander
+  Normal file here
+
+Can update to children of censored revision
+
+  $ hg update -r $H1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Tainted file is now sanitized
+  $ hg update -r $H2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Tainted file now super sanitized
+
+Set censor policy to abort in trusted $HGRC so hg verify fails
+
+  $ cp $HGRCPATH.orig $HGRCPATH
+  $ cat >> $HGRCPATH <<EOF
+  > [censor]
+  > policy = abort
+  > EOF
+
+Repo fails verification due to censorship
+
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+   target@1: censored file data
+   target@2: censored file data
+  2 files, 5 changesets, 7 total revisions
+  2 integrity errors encountered!
+  (first damaged changeset appears to be 1)
+  [1]
+
+Cannot update to revision with censored data
+
+  $ hg update -r $C2
+  abort: censored node: 1e0247a9a4b7
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg update -r $C1
+  abort: censored node: 613bc869fceb
+  (set censor.policy to ignore errors)
+  [255]
+  $ hg update -r 0
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg update -r $H2
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Set censor policy to ignore in trusted $HGRC so hg verify passes
+
+  $ cp $HGRCPATH.orig $HGRCPATH
+  $ cat >> $HGRCPATH <<EOF
+  > [censor]
+  > policy = ignore
+  > EOF
+
+Repo passes verification with warnings with explicit config
+
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 5 changesets, 7 total revisions
+
+May update to revision with censored data with explicit config
+
+  $ hg update -r $C2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  $ hg update -r $C1
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  $ hg update -r 0
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Initially untainted file
+  $ hg update -r $H2
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Tainted file now super sanitized
+
+Can merge in revision with censored data. Test requires one branch of history
+with the file censored, but we can't censor at a head, so advance H1.
+
+  $ hg update -r $H1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ C3=$H1
+  $ echo 'advanced head H1' > target
+  $ hg ci -m 'advance head H1' target
+  $ H1=`hg id --debug -i`
+  $ hg censor -r $C3 target
+  $ hg update -r $H2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg merge -r $C3
+  merging target
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+Revisions present in repository heads may not be censored
+
+  $ hg update -C -r $H2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg censor -r $H2 target
+  abort: cannot censor file in heads (78a8fc215e79)
+  (clean/delete and commit first)
+  [255]
+  $ echo 'twiddling thumbs' > bystander
+  $ hg ci -m 'bystander commit'
+  $ H2=`hg id --debug -i`
+  $ hg censor -r "$H2^" target
+  abort: cannot censor file in heads (efbe78065929)
+  (clean/delete and commit first)
+  [255]
+
+Cannot censor working directory
+
+  $ echo 'seriously no passwords' > target
+  $ hg ci -m 'extend second head arbitrarily' target
+  $ H2=`hg id --debug -i`
+  $ hg update -r "$H2^"
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg censor -r . target
+  abort: cannot censor working directory
+  (clean/delete/update first)
+  [255]
+  $ hg update -r $H2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+Can re-add file after being deleted + censored
+
+  $ C4=$H2
+  $ hg rm target
+  $ hg ci -m 'delete target so it may be censored'
+  $ H2=`hg id --debug -i`
+  $ hg censor -r $C4 target
+  $ hg cat -r $C4 target
+  $ hg cat -r "$H2^^" target
+  Tainted file now super sanitized
+  $ echo 'fresh start' > target
+  $ hg add target
+  $ hg ci -m reincarnated target
+  $ H2=`hg id --debug -i`
+  $ hg cat -r $H2 target
+  fresh start
+  $ hg cat -r "$H2^" target
+  target: no such file in rev 452ec1762369
+  [1]
+  $ hg cat -r $C4 target
+  $ hg cat -r "$H2^^^" target
+  Tainted file now super sanitized
+
+Can censor after revlog has expanded to no longer permit inline storage
+
+  $ for x in `python $TESTDIR/seq.py 0 50000`
+  > do
+  >   echo "Password: hunter$x" >> target
+  > done
+  $ hg ci -m 'add 100k passwords'
+  $ H2=`hg id --debug -i`
+  $ C5=$H2
+  $ hg revert -r "$H2^" target
+  $ hg ci -m 'cleaned 100k passwords'
+  $ H2=`hg id --debug -i`
+  $ hg censor -r $C5 target
+  $ hg cat -r $C5 target
+  $ hg cat -r $H2 target
+  fresh start
+
+Repo with censored nodes can be cloned and cloned nodes are censored
+
+  $ cd ..
+  $ hg clone r rclone
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd rclone
+  $ hg cat -r $H1 target
+  advanced head H1
+  $ hg cat -r $H2~5 target
+  Tainted file now super sanitized
+  $ hg cat -r $C2 target
+  $ hg cat -r $C1 target
+  $ hg cat -r 0 target
+  Initially untainted file
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 12 changesets, 13 total revisions
+
+Repo cloned before tainted content introduced can pull censored nodes
+
+  $ cd ../rpull
+  $ hg cat -r tip target
+  Initially untainted file
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 1 changesets, 2 total revisions
+  $ hg pull -r $H1 -r $H2
+  pulling from $TESTTMP/r (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 11 changesets with 11 changes to 2 files (+1 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+  $ hg update 4
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Tainted file now super sanitized
+  $ hg cat -r $H1 target
+  advanced head H1
+  $ hg cat -r $H2~5 target
+  Tainted file now super sanitized
+  $ hg cat -r $C2 target
+  $ hg cat -r $C1 target
+  $ hg cat -r 0 target
+  Initially untainted file
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 12 changesets, 13 total revisions
+
+Censored nodes can be pushed if they censor previously unexchanged nodes
+
+  $ echo 'Passwords: hunter2hunter2' > target
+  $ hg ci -m 're-add password from clone' target
+  created new head
+  $ H3=`hg id --debug -i`
+  $ REV=$H3
+  $ echo 'Re-sanitized; nothing to see here' > target
+  $ hg ci -m 're-sanitized' target
+  $ H2=`hg id --debug -i`
+  $ CLEANREV=$H2
+  $ hg cat -r $REV target
+  Passwords: hunter2hunter2
+  $ hg censor -r $REV target
+  $ hg cat -r $REV target
+  $ hg cat -r $CLEANREV target
+  Re-sanitized; nothing to see here
+  $ hg push -f -r $H2
+  pushing to $TESTTMP/r (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 1 files (+1 heads)
+
+  $ cd ../r
+  $ hg cat -r $REV target
+  $ hg cat -r $CLEANREV target
+  Re-sanitized; nothing to see here
+  $ hg update $CLEANREV
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Re-sanitized; nothing to see here
+
+Censored nodes can be bundled up and unbundled in another repo
+
+  $ hg bundle --base 0 ../pwbundle
+  13 changesets found
+  $ cd ../rclone
+  $ hg unbundle ../pwbundle
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files (+1 heads)
+  (run 'hg heads .' to see heads, 'hg merge' to merge)
+  $ hg cat -r $REV target
+  $ hg cat -r $CLEANREV target
+  Re-sanitized; nothing to see here
+  $ hg update $CLEANREV
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Re-sanitized; nothing to see here
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 14 changesets, 15 total revisions
+
+Censored nodes can be imported on top of censored nodes, consecutively
+
+  $ hg init ../rimport
+  $ hg bundle --base 1 ../rimport/splitbundle
+  12 changesets found
+  $ cd ../rimport
+  $ hg pull -r $H1 -r $H2 ../r
+  pulling from ../r
+  adding changesets
+  adding manifests
+  adding file changes
+  added 8 changesets with 10 changes to 2 files (+1 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+  $ hg unbundle splitbundle
+  adding changesets
+  adding manifests
+  adding file changes
+  added 6 changesets with 5 changes to 2 files (+1 heads)
+  (run 'hg heads .' to see heads, 'hg merge' to merge)
+  $ hg update $H2
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat target
+  Re-sanitized; nothing to see here
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  2 files, 14 changesets, 15 total revisions
+  $ cd ../r
+
+Can import bundle where first revision of a file is censored
+
+  $ hg init ../rinit
+  $ hg censor -r 0 target
+  $ hg bundle -r 0 --base null ../rinit/initbundle
+  1 changesets found
+  $ cd ../rinit
+  $ hg unbundle initbundle
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 2 changes to 2 files
+  (run 'hg update' to get a working copy)
+  $ hg cat -r 0 target
--- a/tests/test-children.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-children.t	Thu Apr 16 20:57:51 2015 -0500
@@ -122,4 +122,12 @@
   summary:     2
   
 
+should be compatible with templater (don't pass fctx to displayer)
+  $ hg children file0 -Tdefault
+  changeset:   2:8f5eea5023c2
+  user:        test
+  date:        Thu Jan 01 00:00:02 1970 +0000
+  summary:     2
+  
+
   $ cd ..
--- a/tests/test-churn.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-churn.t	Thu Apr 16 20:57:51 2015 -0500
@@ -171,4 +171,27 @@
   El Ni\xc3\xb1o         1 *************** (esc)
   with space      1 ***************
 
+Test --template argument, with backwards compatiblity
+
+  $ hg churn -t '{author|user}'
+  user1      4 ***************************************************************
+  user3      3 ***********************************************
+  user2      2 ********************************
+  nino       1 ****************
+  with       1 ****************
+             0 
+  user4      0 
+  $ hg churn -T '{author|user}'
+  user1      4 ***************************************************************
+  user3      3 ***********************************************
+  user2      2 ********************************
+  nino       1 ****************
+  with       1 ****************
+             0 
+  user4      0 
+  $ hg churn -t 'alltogether'
+  alltogether     11 *********************************************************
+  $ hg churn -T 'alltogether'
+  alltogether     11 *********************************************************
+
   $ cd ..
--- a/tests/test-clone.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-clone.t	Thu Apr 16 20:57:51 2015 -0500
@@ -65,9 +65,25 @@
 
 #if hardlink
   $ hg --debug clone -U . ../c
+  linking: 1
+  linking: 2
+  linking: 3
+  linking: 4
+  linking: 5
+  linking: 6
+  linking: 7
+  linking: 8
   linked 8 files
 #else
   $ hg --debug clone -U . ../c
+  linking: 1
+  copying: 2
+  copying: 3
+  copying: 4
+  copying: 5
+  copying: 6
+  copying: 7
+  copying: 8
   copied 8 files
 #endif
   $ cd ../c
--- a/tests/test-command-template.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-command-template.t	Thu Apr 16 20:57:51 2015 -0500
@@ -47,6 +47,9 @@
   fourth (second)
   $ hg log -T '{file_copies % "{source} -> {name}\n"}' -r .
   second -> fourth
+  $ hg log -T '{rev} {ifcontains("fourth", file_copies, "t", "f")}\n' -r .:7
+  8 t
+  7 f
 
 Quoting for ui.logtemplate
 
@@ -93,6 +96,10 @@
 
 Default style is like normal output:
 
+  $ echo c >> c
+  $ hg add c
+  $ hg commit -qm ' '
+
   $ hg log > log.out
   $ hg log --style default > style.out
   $ cmp log.out style.out || diff -u log.out style.out
@@ -132,6 +139,8 @@
 
   $ mv $HGRCPATH-bak $HGRCPATH
 
+  $ hg --config extensions.strip= strip -q .
+
 Revision with no copies (used to print a traceback):
 
   $ hg tip -v --template '\n'
@@ -1868,6 +1877,16 @@
   o  0: children: 1, tags: 0, file_adds: 1, ancestors: 1
   
 
+Upper/lower filters:
+
+  $ hg log -r0 --template '{branch|upper}\n'
+  DEFAULT
+  $ hg log -r0 --template '{author|lower}\n'
+  user name <user@hostname>
+  $ hg log -r0 --template '{date|upper}\n'
+  abort: template filter 'upper' is not compatible with keyword 'date'
+  [255]
+
 Error on syntax:
 
   $ echo 'x = "f' >> t
@@ -1905,6 +1924,11 @@
   hg: parse error: unknown function 'foo'
   [255]
 
+Pass generator object created by template function to filter
+
+  $ hg log -l 1 --template '{if(author, author)|user}\n'
+  test
+
 Test diff function:
 
   $ hg diff -c 8
@@ -2290,6 +2314,14 @@
   $ hg log -r 0 --template '{if(branches, "yes", "no")}\n'
   no
 
+Test get function:
+
+  $ hg log -r 0 --template '{get(extras, "branch")}\n'
+  default
+  $ hg log -r 0 --template '{get(files, "should_fail")}\n'
+  hg: parse error: get() expects a dict as first argument
+  [255]
+
 Test shortest(node) function:
 
   $ echo b > b
@@ -2393,6 +2425,10 @@
   2 bar foo
   1 baz
   0 
+  $ hg log --template "{rev} {ifcontains('foo', bookmarks, 't', 'f')}\n"
+  2 t
+  1 f
+  0 f
 
 Test stringify on sub expressions
 
--- a/tests/test-commandserver.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-commandserver.t	Thu Apr 16 20:57:51 2015 -0500
@@ -178,6 +178,7 @@
   defaults.commit=-d "0 0"
   defaults.shelve=--date "0 0"
   defaults.tag=-d "0 0"
+  devel.all=true
   largefiles.usercache=$TESTTMP/.cache/largefiles
   ui.slash=True
   ui.interactive=False
--- a/tests/test-commit-amend.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-commit-amend.t	Thu Apr 16 20:57:51 2015 -0500
@@ -1096,7 +1096,7 @@
   $ hg ci -m add
   $ 
   $ hg debugrename newdirname/newfile.py
-  newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def
+  newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob)
   $ hg status -C --change .
   A newdirname/newfile.py
   $ hg status -C --rev 1
@@ -1115,7 +1115,7 @@
   $ echo a >> newdirname/commonfile.py
   $ hg ci --amend -m bug
   $ hg debugrename newdirname/newfile.py
-  newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def
+  newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob)
   $ hg debugindex newdirname/newfile.py
      rev    offset  length   base linkrev nodeid       p1           p2
        0         0      88      0       3 34a4d536c0c0 000000000000 000000000000
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-commit-interactive-curses.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,203 @@
+Set up a repo
+
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > interactive = true
+  > [experimental]
+  > crecord = true
+  > crecordtest = testModeCommands
+  > EOF
+
+  $ hg init a
+  $ cd a
+
+Committing some changes but stopping on the way
+
+  $ echo "a" > a
+  $ hg add a
+  $ cat <<EOF >testModeCommands
+  > TOGGLE
+  > X
+  > EOF
+  $ hg commit -i  -m "a" -d "0 0"
+  no changes to record
+  $ hg tip
+  changeset:   -1:000000000000
+  tag:         tip
+  user:        
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  
+
+Committing some changes
+
+  $ cat <<EOF >testModeCommands
+  > X
+  > EOF
+  $ hg commit -i  -m "a" -d "0 0"
+  $ hg tip
+  changeset:   0:cb9a9f314b8b
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a
+  
+Committing only one file
+
+  $ echo "a" >> a
+  >>> open('b', 'wb').write("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")
+  $ hg add b
+  $ cat <<EOF >testModeCommands
+  > TOGGLE
+  > KEY_DOWN
+  > X
+  > EOF
+  $ hg commit -i  -m "one file" -d "0 0"
+  $ hg tip
+  changeset:   1:fb2705a663ea
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     one file
+  
+  $ hg cat -r tip a
+  a
+  $ cat a
+  a
+  a
+
+Committing only one hunk
+
+- Untoggle all the hunks, go down to the second file
+- unfold it
+- go down to second hunk (1 for the first hunk, 1 for the first hunkline, 1 for the second hunk, 1 for the second hunklike)
+- toggle the second hunk
+- commit
+
+  $ echo "x" > c
+  $ cat b >> c
+  $ echo "y" >> c
+  $ mv c b
+  $ cat <<EOF >testModeCommands
+  > A
+  > KEY_DOWN
+  > f
+  > KEY_DOWN
+  > KEY_DOWN
+  > KEY_DOWN
+  > KEY_DOWN
+  > TOGGLE
+  > X
+  > EOF
+  $ hg commit -i  -m "one hunk" -d "0 0"
+  $ hg tip
+  changeset:   2:7d10dfe755a8
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     one hunk
+  
+  $ hg cat -r tip b
+  1
+  2
+  3
+  4
+  5
+  6
+  7
+  8
+  9
+  10
+  y
+  $ cat b
+  x
+  1
+  2
+  3
+  4
+  5
+  6
+  7
+  8
+  9
+  10
+  y
+  $ hg commit -m "other hunks"
+  $ hg tip
+  changeset:   3:a6735021574d
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     other hunks
+  
+  $ hg cat -r tip b
+  x
+  1
+  2
+  3
+  4
+  5
+  6
+  7
+  8
+  9
+  10
+  y
+
+Editing patch of newly added file
+
+  $ cat > editor.sh << '__EOF__'
+  > cat "$1"  | sed "s/first/very/g"  > tt
+  > mv tt  "$1"
+  > __EOF__
+  $ cat > newfile << '__EOF__'
+  > This is the first line
+  > This is the second line
+  > This is the third line
+  > __EOF__
+  $ hg add newfile
+  $ cat <<EOF >testModeCommands
+  > f
+  > KEY_DOWN
+  > KEY_DOWN
+  > KEY_DOWN
+  > e
+  > X
+  > EOF
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -d '23 0' -medit-patch-new
+  $ hg tip
+  changeset:   4:6a0a43e9eff5
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:23 1970 +0000
+  summary:     edit-patch-new
+  
+  $ hg cat -r tip newfile
+  This is the very line
+  This is the second line
+  This is the third line
+
+  $ cat newfile
+  This is the first line
+  This is the second line
+  This is the third line
+
+Newly added files can be selected with the curses interface
+
+  $ hg update -C .
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo "hello" > x
+  $ hg add x
+  $ cat <<EOF >testModeCommands
+  > TOGGLE
+  > TOGGLE
+  > X
+  > EOF
+  $ hg st
+  A x
+  ? editor.sh
+  ? testModeCommands
+  $ hg commit -i  -m "newly added file" -d "0 0"
+  $ hg st
+  ? editor.sh
+  ? testModeCommands
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-commit-interactive.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,1434 @@
+Set up a repo
+
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > interactive = true
+  > [extensions]
+  > record =
+  > EOF
+
+  $ hg init a
+  $ cd a
+
+Select no files
+
+  $ touch empty-rw
+  $ hg add empty-rw
+
+  $ hg commit -i empty-rw<<EOF
+  > n
+  > EOF
+  diff --git a/empty-rw b/empty-rw
+  new file mode 100644
+  examine changes to 'empty-rw'? [Ynesfdaq?] n
+  
+  no changes to record
+
+  $ hg tip -p
+  changeset:   -1:000000000000
+  tag:         tip
+  user:        
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  
+  
+
+Select files but no hunks
+
+  $ hg commit -i  empty-rw<<EOF
+  > y
+  > n
+  > EOF
+  diff --git a/empty-rw b/empty-rw
+  new file mode 100644
+  examine changes to 'empty-rw'? [Ynesfdaq?] y
+  
+  abort: empty commit message
+  [255]
+
+  $ hg tip -p
+  changeset:   -1:000000000000
+  tag:         tip
+  user:        
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  
+  
+
+Record empty file
+
+  $ hg commit -i -d '0 0' -m empty empty-rw<<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/empty-rw b/empty-rw
+  new file mode 100644
+  examine changes to 'empty-rw'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   0:c0708cf4e46e
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     empty
+  
+  
+
+Summary shows we updated to the new cset
+
+  $ hg summary
+  parent: 0:c0708cf4e46e tip
+   empty
+  branch: default
+  commit: (clean)
+  update: (current)
+
+Rename empty file
+
+  $ hg mv empty-rw empty-rename
+  $ hg commit -i -d '1 0' -m rename<<EOF
+  > y
+  > EOF
+  diff --git a/empty-rw b/empty-rename
+  rename from empty-rw
+  rename to empty-rename
+  examine changes to 'empty-rw' and 'empty-rename'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   1:d695e8dcb197
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     rename
+  
+  
+
+Copy empty file
+
+  $ hg cp empty-rename empty-copy
+  $ hg commit -i -d '2 0' -m copy<<EOF
+  > y
+  > EOF
+  diff --git a/empty-rename b/empty-copy
+  copy from empty-rename
+  copy to empty-copy
+  examine changes to 'empty-rename' and 'empty-copy'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   2:1d4b90bea524
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:02 1970 +0000
+  summary:     copy
+  
+  
+
+Delete empty file
+
+  $ hg rm empty-copy
+  $ hg commit -i -d '3 0' -m delete<<EOF
+  > y
+  > EOF
+  diff --git a/empty-copy b/empty-copy
+  deleted file mode 100644
+  examine changes to 'empty-copy'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   3:b39a238f01a1
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:03 1970 +0000
+  summary:     delete
+  
+  
+
+Add binary file
+
+  $ hg bundle --base -2 tip.bundle
+  1 changesets found
+  $ hg add tip.bundle
+  $ hg commit -i -d '4 0' -m binary<<EOF
+  > y
+  > EOF
+  diff --git a/tip.bundle b/tip.bundle
+  new file mode 100644
+  this is a binary file
+  examine changes to 'tip.bundle'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   4:ad816da3711e
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:04 1970 +0000
+  summary:     binary
+  
+  diff -r b39a238f01a1 -r ad816da3711e tip.bundle
+  Binary file tip.bundle has changed
+  
+
+Change binary file
+
+  $ hg bundle --base -2 tip.bundle
+  1 changesets found
+  $ hg commit -i -d '5 0' -m binary-change<<EOF
+  > y
+  > EOF
+  diff --git a/tip.bundle b/tip.bundle
+  this modifies a binary file (all or nothing)
+  examine changes to 'tip.bundle'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   5:dccd6f3eb485
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:05 1970 +0000
+  summary:     binary-change
+  
+  diff -r ad816da3711e -r dccd6f3eb485 tip.bundle
+  Binary file tip.bundle has changed
+  
+
+Rename and change binary file
+
+  $ hg mv tip.bundle top.bundle
+  $ hg bundle --base -2 top.bundle
+  1 changesets found
+  $ hg commit -i -d '6 0' -m binary-change-rename<<EOF
+  > y
+  > EOF
+  diff --git a/tip.bundle b/top.bundle
+  rename from tip.bundle
+  rename to top.bundle
+  this modifies a binary file (all or nothing)
+  examine changes to 'tip.bundle' and 'top.bundle'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   6:7fa44105f5b3
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:06 1970 +0000
+  summary:     binary-change-rename
+  
+  diff -r dccd6f3eb485 -r 7fa44105f5b3 tip.bundle
+  Binary file tip.bundle has changed
+  diff -r dccd6f3eb485 -r 7fa44105f5b3 top.bundle
+  Binary file top.bundle has changed
+  
+
+Add plain file
+
+  $ for i in 1 2 3 4 5 6 7 8 9 10; do
+  >     echo $i >> plain
+  > done
+
+  $ hg add plain
+  $ hg commit -i -d '7 0' -m plain plain<<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  new file mode 100644
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -0,0 +1,10 @@
+  +1
+  +2
+  +3
+  +4
+  +5
+  +6
+  +7
+  +8
+  +9
+  +10
+  record this change to 'plain'? [Ynesfdaq?] y
+  
+  $ hg tip -p
+  changeset:   7:11fb457c1be4
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:07 1970 +0000
+  summary:     plain
+  
+  diff -r 7fa44105f5b3 -r 11fb457c1be4 plain
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:07 1970 +0000
+  @@ -0,0 +1,10 @@
+  +1
+  +2
+  +3
+  +4
+  +5
+  +6
+  +7
+  +8
+  +9
+  +10
+  
+Modify end of plain file with username unset
+
+  $ echo 11 >> plain
+  $ unset HGUSER
+  $ hg commit -i --config ui.username= -d '8 0' -m end plain
+  abort: no username supplied
+  (use "hg config --edit" to set your username)
+  [255]
+
+
+Modify end of plain file, also test that diffopts are accounted for
+
+  $ HGUSER="test"
+  $ export HGUSER
+  $ hg commit -i --config diff.showfunc=true -d '8 0' -m end plain <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  1 hunks, 1 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -8,3 +8,4 @@ 7
+   8
+   9
+   10
+  +11
+  record this change to 'plain'? [Ynesfdaq?] y
+  
+
+Modify end of plain file, no EOL
+
+  $ hg tip --template '{node}' >> plain
+  $ hg commit -i -d '9 0' -m noeol plain <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  1 hunks, 1 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -9,3 +9,4 @@
+   9
+   10
+   11
+  +7264f99c5f5ff3261504828afa4fb4d406c3af54
+  \ No newline at end of file
+  record this change to 'plain'? [Ynesfdaq?] y
+  
+
+Modify end of plain file, add EOL
+
+  $ echo >> plain
+  $ echo 1 > plain2
+  $ hg add plain2
+  $ hg commit -i -d '10 0' -m eol plain plain2 <<EOF
+  > y
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  1 hunks, 1 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -9,4 +9,4 @@
+   9
+   10
+   11
+  -7264f99c5f5ff3261504828afa4fb4d406c3af54
+  \ No newline at end of file
+  +7264f99c5f5ff3261504828afa4fb4d406c3af54
+  record change 1/2 to 'plain'? [Ynesfdaq?] y
+  
+  diff --git a/plain2 b/plain2
+  new file mode 100644
+  examine changes to 'plain2'? [Ynesfdaq?] y
+  
+  @@ -0,0 +1,1 @@
+  +1
+  record change 2/2 to 'plain2'? [Ynesfdaq?] y
+  
+Modify beginning, trim end, record both, add another file to test
+changes numbering
+
+  $ rm plain
+  $ for i in 2 2 3 4 5 6 7 8 9 10; do
+  >   echo $i >> plain
+  > done
+  $ echo 2 >> plain2
+
+  $ hg commit -i -d '10 0' -m begin-and-end plain plain2 <<EOF
+  > y
+  > y
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  2 hunks, 3 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,4 +1,4 @@
+  -1
+  +2
+   2
+   3
+   4
+  record change 1/3 to 'plain'? [Ynesfdaq?] y
+  
+  @@ -8,5 +8,3 @@
+   8
+   9
+   10
+  -11
+  -7264f99c5f5ff3261504828afa4fb4d406c3af54
+  record change 2/3 to 'plain'? [Ynesfdaq?] y
+  
+  diff --git a/plain2 b/plain2
+  1 hunks, 1 lines changed
+  examine changes to 'plain2'? [Ynesfdaq?] y
+  
+  @@ -1,1 +1,2 @@
+   1
+  +2
+  record change 3/3 to 'plain2'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   11:21df83db12b8
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:10 1970 +0000
+  summary:     begin-and-end
+  
+  diff -r ddb8b281c3ff -r 21df83db12b8 plain
+  --- a/plain	Thu Jan 01 00:00:10 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:10 1970 +0000
+  @@ -1,4 +1,4 @@
+  -1
+  +2
+   2
+   3
+   4
+  @@ -8,5 +8,3 @@
+   8
+   9
+   10
+  -11
+  -7264f99c5f5ff3261504828afa4fb4d406c3af54
+  diff -r ddb8b281c3ff -r 21df83db12b8 plain2
+  --- a/plain2	Thu Jan 01 00:00:10 1970 +0000
+  +++ b/plain2	Thu Jan 01 00:00:10 1970 +0000
+  @@ -1,1 +1,2 @@
+   1
+  +2
+  
+
+Trim beginning, modify end
+
+  $ rm plain
+  > for i in 4 5 6 7 8 9 10.new; do
+  >   echo $i >> plain
+  > done
+
+Record end
+
+  $ hg commit -i -d '11 0' -m end-only plain <<EOF
+  > y
+  > n
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  2 hunks, 4 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,9 +1,6 @@
+  -2
+  -2
+  -3
+   4
+   5
+   6
+   7
+   8
+   9
+  record change 1/2 to 'plain'? [Ynesfdaq?] n
+  
+  @@ -4,7 +1,7 @@
+   4
+   5
+   6
+   7
+   8
+   9
+  -10
+  +10.new
+  record change 2/2 to 'plain'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   12:99337501826f
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:11 1970 +0000
+  summary:     end-only
+  
+  diff -r 21df83db12b8 -r 99337501826f plain
+  --- a/plain	Thu Jan 01 00:00:10 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:11 1970 +0000
+  @@ -7,4 +7,4 @@
+   7
+   8
+   9
+  -10
+  +10.new
+  
+
+Record beginning
+
+  $ hg commit -i -d '12 0' -m begin-only plain <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  1 hunks, 3 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,3 @@
+  -2
+  -2
+  -3
+   4
+   5
+   6
+  record this change to 'plain'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   13:bbd45465d540
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:12 1970 +0000
+  summary:     begin-only
+  
+  diff -r 99337501826f -r bbd45465d540 plain
+  --- a/plain	Thu Jan 01 00:00:11 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:12 1970 +0000
+  @@ -1,6 +1,3 @@
+  -2
+  -2
+  -3
+   4
+   5
+   6
+  
+
+Add to beginning, trim from end
+
+  $ rm plain
+  $ for i in 1 2 3 4 5 6 7 8 9; do
+  >  echo $i >> plain
+  > done
+
+Record end
+
+  $ hg commit -i --traceback -d '13 0' -m end-again plain<<EOF
+  > y
+  > n
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  2 hunks, 4 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,9 @@
+  +1
+  +2
+  +3
+   4
+   5
+   6
+   7
+   8
+   9
+  record change 1/2 to 'plain'? [Ynesfdaq?] n
+  
+  @@ -1,7 +4,6 @@
+   4
+   5
+   6
+   7
+   8
+   9
+  -10.new
+  record change 2/2 to 'plain'? [Ynesfdaq?] y
+  
+
+Add to beginning, middle, end
+
+  $ rm plain
+  $ for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do
+  >   echo $i >> plain
+  > done
+
+Record beginning, middle, and test that format-breaking diffopts are ignored
+
+  $ hg commit -i --config diff.noprefix=True -d '14 0' -m middle-only plain <<EOF
+  > y
+  > y
+  > y
+  > n
+  > EOF
+  diff --git a/plain b/plain
+  3 hunks, 7 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,2 +1,5 @@
+  +1
+  +2
+  +3
+   4
+   5
+  record change 1/3 to 'plain'? [Ynesfdaq?] y
+  
+  @@ -1,6 +4,8 @@
+   4
+   5
+  +5.new
+  +5.reallynew
+   6
+   7
+   8
+   9
+  record change 2/3 to 'plain'? [Ynesfdaq?] y
+  
+  @@ -3,4 +8,6 @@
+   6
+   7
+   8
+   9
+  +10
+  +11
+  record change 3/3 to 'plain'? [Ynesfdaq?] n
+  
+
+  $ hg tip -p
+  changeset:   15:f34a7937ec33
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:14 1970 +0000
+  summary:     middle-only
+  
+  diff -r 82c065d0b850 -r f34a7937ec33 plain
+  --- a/plain	Thu Jan 01 00:00:13 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:14 1970 +0000
+  @@ -1,5 +1,10 @@
+  +1
+  +2
+  +3
+   4
+   5
+  +5.new
+  +5.reallynew
+   6
+   7
+   8
+  
+
+Record end
+
+  $ hg commit -i -d '15 0' -m end-only plain <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/plain b/plain
+  1 hunks, 2 lines changed
+  examine changes to 'plain'? [Ynesfdaq?] y
+  
+  @@ -9,3 +9,5 @@
+   7
+   8
+   9
+  +10
+  +11
+  record this change to 'plain'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   16:f9900b71a04c
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:15 1970 +0000
+  summary:     end-only
+  
+  diff -r f34a7937ec33 -r f9900b71a04c plain
+  --- a/plain	Thu Jan 01 00:00:14 1970 +0000
+  +++ b/plain	Thu Jan 01 00:00:15 1970 +0000
+  @@ -9,3 +9,5 @@
+   7
+   8
+   9
+  +10
+  +11
+  
+
+  $ mkdir subdir
+  $ cd subdir
+  $ echo a > a
+  $ hg ci -d '16 0' -Amsubdir
+  adding subdir/a
+
+  $ echo a >> a
+  $ hg commit -i -d '16 0' -m subdir-change a <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/a b/subdir/a
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/a'? [Ynesfdaq?] y
+  
+  @@ -1,1 +1,2 @@
+   a
+  +a
+  record this change to 'subdir/a'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   18:61be427a9deb
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:16 1970 +0000
+  summary:     subdir-change
+  
+  diff -r a7ffae4d61cb -r 61be427a9deb subdir/a
+  --- a/subdir/a	Thu Jan 01 00:00:16 1970 +0000
+  +++ b/subdir/a	Thu Jan 01 00:00:16 1970 +0000
+  @@ -1,1 +1,2 @@
+   a
+  +a
+  
+
+  $ echo a > f1
+  $ echo b > f2
+  $ hg add f1 f2
+
+  $ hg ci -mz -d '17 0'
+
+  $ echo a >> f1
+  $ echo b >> f2
+
+Help, quit
+
+  $ hg commit -i <<EOF
+  > ?
+  > q
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] ?
+  
+  y - yes, record this change
+  n - no, skip this change
+  e - edit this change manually
+  s - skip remaining changes to this file
+  f - record remaining changes to this file
+  d - done, skip remaining changes and files
+  a - record all changes to all remaining files
+  q - quit, recording no changes
+  ? - ? (display help)
+  examine changes to 'subdir/f1'? [Ynesfdaq?] q
+  
+  abort: user quit
+  [255]
+
+Skip
+
+  $ hg commit -i <<EOF
+  > s
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] s
+  
+  diff --git a/subdir/f2 b/subdir/f2
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
+  [255]
+
+No
+
+  $ hg commit -i <<EOF
+  > n
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] n
+  
+  diff --git a/subdir/f2 b/subdir/f2
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
+  [255]
+
+f, quit
+
+  $ hg commit -i <<EOF
+  > f
+  > q
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] f
+  
+  diff --git a/subdir/f2 b/subdir/f2
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f2'? [Ynesfdaq?] q
+  
+  abort: user quit
+  [255]
+
+s, all
+
+  $ hg commit -i -d '18 0' -mx <<EOF
+  > s
+  > a
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] s
+  
+  diff --git a/subdir/f2 b/subdir/f2
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f2'? [Ynesfdaq?] a
+  
+
+  $ hg tip -p
+  changeset:   20:b3df3dda369a
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:18 1970 +0000
+  summary:     x
+  
+  diff -r 6e02d6c9906d -r b3df3dda369a subdir/f2
+  --- a/subdir/f2	Thu Jan 01 00:00:17 1970 +0000
+  +++ b/subdir/f2	Thu Jan 01 00:00:18 1970 +0000
+  @@ -1,1 +1,2 @@
+   b
+  +b
+  
+
+f
+
+  $ hg commit -i -d '19 0' -my <<EOF
+  > f
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] f
+  
+
+  $ hg tip -p
+  changeset:   21:38ec577f126b
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:19 1970 +0000
+  summary:     y
+  
+  diff -r b3df3dda369a -r 38ec577f126b subdir/f1
+  --- a/subdir/f1	Thu Jan 01 00:00:18 1970 +0000
+  +++ b/subdir/f1	Thu Jan 01 00:00:19 1970 +0000
+  @@ -1,1 +1,2 @@
+   a
+  +a
+  
+
+#if execbit
+
+Preserve chmod +x
+
+  $ chmod +x f1
+  $ echo a >> f1
+  $ hg commit -i -d '20 0' -mz <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  old mode 100644
+  new mode 100755
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -1,2 +1,3 @@
+   a
+   a
+  +a
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   22:3261adceb075
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:20 1970 +0000
+  summary:     z
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  old mode 100644
+  new mode 100755
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -1,2 +1,3 @@
+   a
+   a
+  +a
+  
+
+Preserve execute permission on original
+
+  $ echo b >> f1
+  $ hg commit -i -d '21 0' -maa <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,4 @@
+   a
+   a
+   a
+  +b
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   23:b429867550db
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:21 1970 +0000
+  summary:     aa
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -1,3 +1,4 @@
+   a
+   a
+   a
+  +b
+  
+
+Preserve chmod -x
+
+  $ chmod -x f1
+  $ echo c >> f1
+  $ hg commit -i -d '22 0' -mab <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  old mode 100755
+  new mode 100644
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -2,3 +2,4 @@
+   a
+   a
+   b
+  +c
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   24:0b082130c20a
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:22 1970 +0000
+  summary:     ab
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  old mode 100755
+  new mode 100644
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -2,3 +2,4 @@
+   a
+   a
+   b
+  +c
+  
+
+#else
+
+Slightly bogus tests to get almost same repo structure as when x bit is used
+- but with different hashes.
+
+Mock "Preserve chmod +x"
+
+  $ echo a >> f1
+  $ hg commit -i -d '20 0' -mz <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -1,2 +1,3 @@
+   a
+   a
+  +a
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   22:0d463bd428f5
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:20 1970 +0000
+  summary:     z
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -1,2 +1,3 @@
+   a
+   a
+  +a
+  
+
+Mock "Preserve execute permission on original"
+
+  $ echo b >> f1
+  $ hg commit -i -d '21 0' -maa <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,4 @@
+   a
+   a
+   a
+  +b
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   23:0eab41a3e524
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:21 1970 +0000
+  summary:     aa
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -1,3 +1,4 @@
+   a
+   a
+   a
+  +b
+  
+
+Mock "Preserve chmod -x"
+
+  $ chmod -x f1
+  $ echo c >> f1
+  $ hg commit -i -d '22 0' -mab <<EOF
+  > y
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -2,3 +2,4 @@
+   a
+   a
+   b
+  +c
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip --config diff.git=True -p
+  changeset:   24:f4f718f27b7c
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:22 1970 +0000
+  summary:     ab
+  
+  diff --git a/subdir/f1 b/subdir/f1
+  --- a/subdir/f1
+  +++ b/subdir/f1
+  @@ -2,3 +2,4 @@
+   a
+   a
+   b
+  +c
+  
+
+#endif
+
+  $ cd ..
+
+
+Abort early when a merge is in progress
+
+  $ hg up 4
+  1 files updated, 0 files merged, 6 files removed, 0 files unresolved
+
+  $ touch iwillmergethat
+  $ hg add iwillmergethat
+
+  $ hg branch thatbranch
+  marked working directory as branch thatbranch
+  (branches are permanent and global, did you want a bookmark?)
+
+  $ hg ci -m'new head'
+
+  $ hg up default
+  6 files updated, 0 files merged, 2 files removed, 0 files unresolved
+
+  $ hg merge thatbranch
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ hg commit -i -m'will abort'
+  abort: cannot partially commit a merge (use "hg commit" instead)
+  [255]
+
+  $ hg up -C
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+Editing patch (and ignoring trailing text)
+
+  $ cat > editor.sh << '__EOF__'
+  > sed -e 7d -e '5s/^-/ /' -e '/^# ---/i\
+  > trailing\nditto' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ cat > editedfile << '__EOF__'
+  > This is the first line
+  > This is the second line
+  > This is the third line
+  > __EOF__
+  $ hg add editedfile
+  $ hg commit -medit-patch-1
+  $ cat > editedfile << '__EOF__'
+  > This line has changed
+  > This change will be committed
+  > This is the third line
+  > __EOF__
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -d '23 0' -medit-patch-2 <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 2 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This is the second line
+  +This line has changed
+  +This change will be committed
+   This is the third line
+  record this change to 'editedfile'? [Ynesfdaq?] e
+  
+  $ cat editedfile
+  This line has changed
+  This change will be committed
+  This is the third line
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ hg revert editedfile
+
+Trying to edit patch for whole file
+
+  $ echo "This is the fourth line" >> editedfile
+  $ hg commit -i <<EOF
+  > e
+  > q
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 1 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] e
+  
+  cannot edit patch for whole file
+  examine changes to 'editedfile'? [Ynesfdaq?] q
+  
+  abort: user quit
+  [255]
+  $ hg revert editedfile
+
+Removing changes from patch
+
+  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
+  $ mv tmp editedfile
+  $ echo "This line has been added" >> editedfile
+  $ cat > editor.sh << '__EOF__'
+  > sed -e 's/^[-+]/ /' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] e
+  
+  no changes to record
+  $ cat editedfile
+  This change will not be committed
+  This is the second line
+  This line has been added
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ hg revert editedfile
+
+Invalid patch
+
+  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
+  $ mv tmp editedfile
+  $ echo "This line has been added" >> editedfile
+  $ cat > editor.sh << '__EOF__'
+  > sed s/This/That/ "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] e
+  
+  patching file editedfile
+  Hunk #1 FAILED at 0
+  1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej
+  abort: patch failed to apply
+  [255]
+  $ cat editedfile
+  This change will not be committed
+  This is the second line
+  This line has been added
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ cat editedfile.rej
+  --- editedfile
+  +++ editedfile
+  @@ -1,3 +1,3 @@
+  -That is the first line
+  -That change will be committed
+  -That is the third line
+  +That change will not be committed
+  +That is the second line
+  +That line has been added
+
+Malformed patch - error handling
+
+  $ cat > editor.sh << '__EOF__'
+  > sed -e '/^@/p' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] e
+  
+  abort: error parsing patch: unhandled transition: range -> range
+  [255]
+
+random text in random positions is still an error
+
+  $ cat > editor.sh << '__EOF__'
+  > sed -e '/^@/i\
+  > other' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] e
+  
+  abort: error parsing patch: unhandled transition: file -> other
+  [255]
+
+  $ hg up -C
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+With win32text
+
+  $ echo '[extensions]' >> .hg/hgrc
+  $ echo 'win32text = ' >> .hg/hgrc
+  $ echo '[decode]' >> .hg/hgrc
+  $ echo '** = cleverdecode:' >> .hg/hgrc
+  $ echo '[encode]' >> .hg/hgrc
+  $ echo '** = cleverencode:' >> .hg/hgrc
+  $ echo '[patch]' >> .hg/hgrc
+  $ echo 'eol = crlf' >> .hg/hgrc
+
+Ignore win32text deprecation warning for now:
+
+  $ echo '[win32text]' >> .hg/hgrc
+  $ echo 'warn = no' >> .hg/hgrc
+
+  $ echo d >> subdir/f1
+  $ hg commit -i -d '24 0' -mw1 <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -3,3 +3,4 @@
+   a
+   b
+   c
+  +d
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+
+  $ hg tip -p
+  changeset:   28:* (glob)
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:24 1970 +0000
+  summary:     w1
+  
+  diff -r ???????????? -r ???????????? subdir/f1 (glob)
+  --- a/subdir/f1	Thu Jan 01 00:00:23 1970 +0000
+  +++ b/subdir/f1	Thu Jan 01 00:00:24 1970 +0000
+  @@ -3,3 +3,4 @@
+   a
+   b
+   c
+  +d
+  
+
+
+Test --user when ui.username not set
+  $ unset HGUSER
+  $ echo e >> subdir/f1
+  $ hg commit -i  --config ui.username= -d '8 0' --user xyz -m "user flag" <<EOF
+  > y
+  > y
+  > EOF
+  diff --git a/subdir/f1 b/subdir/f1
+  1 hunks, 1 lines changed
+  examine changes to 'subdir/f1'? [Ynesfdaq?] y
+  
+  @@ -4,3 +4,4 @@
+   b
+   c
+   d
+  +e
+  record this change to 'subdir/f1'? [Ynesfdaq?] y
+  
+  $ hg log --template '{author}\n' -l 1
+  xyz
+  $ HGUSER="test"
+  $ export HGUSER
+
+
+Editing patch of newly added file
+
+  $ cat > editor.sh << '__EOF__'
+  > cat "$1"  | sed "s/first/very/g"  > tt
+  > mv tt  "$1"
+  > __EOF__
+  $ cat > newfile << '__EOF__'
+  > This is the first line
+  > This is the second line
+  > This is the third line
+  > __EOF__
+  $ hg add newfile
+  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -d '23 0' -medit-patch-new <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/newfile b/newfile
+  new file mode 100644
+  examine changes to 'newfile'? [Ynesfdaq?] y
+  
+  @@ -0,0 +1,3 @@
+  +This is the first line
+  +This is the second line
+  +This is the third line
+  record this change to 'newfile'? [Ynesfdaq?] e
+  
+  $ hg cat -r tip newfile
+  This is the very line
+  This is the second line
+  This is the third line
+
+  $ cat newfile
+  This is the first line
+  This is the second line
+  This is the third line
+
+  $ cd ..
--- a/tests/test-completion.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-completion.t	Thu Apr 16 20:57:51 2015 -0500
@@ -202,8 +202,8 @@
   add: include, exclude, subrepos, dry-run
   annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
   clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
-  commit: addremove, close-branch, amend, secret, edit, include, exclude, message, logfile, date, user, subrepos
-  diff: rev, change, text, git, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
+  commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
+  diff: rev, change, text, git, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
   export: output, switch-parent, rev, text, git, nodates
   forget: include, exclude
   init: ssh, remotecmd, insecure
@@ -262,13 +262,13 @@
   debugsuccessorssets: 
   debugwalk: include, exclude
   debugwireargs: three, four, five, ssh, remotecmd, insecure
-  files: rev, print0, include, exclude, template
+  files: rev, print0, include, exclude, template, subrepos
   graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
   grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
   heads: rev, topo, active, closed, style, template
   help: extension, command, keyword
   identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
-  import: strip, base, edit, force, no-commit, bypass, partial, exact, import-branch, message, logfile, date, user, similarity
+  import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
   incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
   locate: rev, print0, fullpath, include, exclude
   manifest: rev, all, template
@@ -278,8 +278,8 @@
   phase: public, draft, secret, force, rev
   recover: 
   rename: after, force, include, exclude, dry-run
-  resolve: all, list, mark, unmark, no-status, tool, include, exclude
-  revert: all, date, rev, no-backup, include, exclude, dry-run
+  resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
+  revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
   rollback: dry-run, force
   root: 
   tag: force, local, rev, remove, edit, message, date, user
--- a/tests/test-context.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-context.py	Thu Apr 16 20:57:51 2015 -0500
@@ -51,7 +51,7 @@
 for d in ctxb.diff(ctxa, git=True):
     print d
 
-# test safeness and correctness of "cxt.status()"
+# test safeness and correctness of "ctx.status()"
 print '= checking context.status():'
 
 # ancestor "wcctx ~ 2"
--- a/tests/test-convert-cvs.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-convert-cvs.t	Thu Apr 16 20:57:51 2015 -0500
@@ -397,11 +397,12 @@
   Author: * (glob)
   Branch: HEAD
   Tag: (none) 
+  Branchpoints: branch 
   Log:
   ci1
   
   Members: 
-  	b/c:1.2->1.3 
+  	a:1.1->1.2 
   
   ---------------------
   PatchSet 6 
@@ -409,12 +410,11 @@
   Author: * (glob)
   Branch: HEAD
   Tag: (none) 
-  Branchpoints: branch 
   Log:
   ci1
   
   Members: 
-  	a:1.1->1.2 
+  	b/c:1.2->1.3 
   
   ---------------------
   PatchSet 7 
--- a/tests/test-convert-datesort.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-convert-datesort.t	Thu Apr 16 20:57:51 2015 -0500
@@ -85,9 +85,9 @@
   $ hg -R t-datesort log -G --template '{rev} "{desc}"\n'
   o    12 "c1"
   |\
-  | o  11 "b2x"
+  | _  11 "b2x"
   | |
-  | | o  10 "a7x"
+  | | _  10 "a7x"
   | | |
   o | |  9 "c0"
   | | |
@@ -136,9 +136,9 @@
   $ hg -R t-sourcesort log -G --template '{rev} "{desc}"\n'
   o    12 "c1"
   |\
-  | o  11 "b2x"
+  | _  11 "b2x"
   | |
-  | | o  10 "a7x"
+  | | _  10 "a7x"
   | | |
   o | |  9 "c0"
   | | |
@@ -189,11 +189,11 @@
   |\
   | o  11 "c0"
   | |
-  o |  10 "b2x"
+  _ |  10 "b2x"
   | |
   o |  9 "b1"
   | |
-  | | o  8 "a7x"
+  | | _  8 "a7x"
   | | |
   | | o  7 "a6"
   | | |
--- a/tests/test-convert-filemap.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-convert-filemap.t	Thu Apr 16 20:57:51 2015 -0500
@@ -387,15 +387,15 @@
   1 addb
   0 closedefault
   $ glog -R branchpruning-hg1
-  o  5 "closedefault" files:
+  _  5 "closedefault" files:
   |
   o  4 "addb" files: b
   |
-  | o  3 "closeempty" files:
+  | _  3 "closeempty" files:
   | |
   | o  2 "emptybranch" files:
   |/
-  | o  1 "closefoo" files:
+  | _  1 "closefoo" files:
   |/
   o  0 "adda" files: a
   
@@ -422,7 +422,7 @@
   1 closeempty
   0 closedefault
   $ glog -R branchpruning-hg2
-  o  1 "closedefault" files:
+  _  1 "closedefault" files:
   |
   o  0 "addb" files: b
   
--- a/tests/test-convert-git.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-convert-git.t	Thu Apr 16 20:57:51 2015 -0500
@@ -170,7 +170,79 @@
 
 full conversion
 
-  $ hg -q convert --datesort git-repo2 fullrepo
+  $ hg convert --datesort git-repo2 fullrepo \
+  > --config extensions.progress= --config progress.assume-tty=1 \
+  > --config progress.delay=0 --config progress.changedelay=0 \
+  > --config progress.refresh=0 --config progress.width=60
+  \r (no-eol) (esc)
+  scanning [===>                                        ] 1/9\r (no-eol) (esc)
+  scanning [========>                                   ] 2/9\r (no-eol) (esc)
+  scanning [=============>                              ] 3/9\r (no-eol) (esc)
+  scanning [==================>                         ] 4/9\r (no-eol) (esc)
+  scanning [=======================>                    ] 5/9\r (no-eol) (esc)
+  scanning [============================>               ] 6/9\r (no-eol) (esc)
+  scanning [=================================>          ] 7/9\r (no-eol) (esc)
+  scanning [======================================>     ] 8/9\r (no-eol) (esc)
+  scanning [===========================================>] 9/9\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [                                          ] 0/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [===>                                      ] 1/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [========>                                 ] 2/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=============>                            ] 3/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=================>                        ] 4/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [======================>                   ] 5/9\r (no-eol) (esc)
+  getting files [===>                                   ] 1/8\r (no-eol) (esc)
+  getting files [========>                              ] 2/8\r (no-eol) (esc)
+  getting files [=============>                         ] 3/8\r (no-eol) (esc)
+  getting files [==================>                    ] 4/8\r (no-eol) (esc)
+  getting files [=======================>               ] 5/8\r (no-eol) (esc)
+  getting files [============================>          ] 6/8\r (no-eol) (esc)
+  getting files [=================================>     ] 7/8\r (no-eol) (esc)
+  getting files [======================================>] 8/8\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [===========================>              ] 6/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [===============================>          ] 7/9\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [====================================>     ] 8/9\r (no-eol) (esc)
+  getting files [==================>                    ] 1/2\r (no-eol) (esc)
+  getting files [======================================>] 2/2\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  initializing destination fullrepo repository
+  scanning source...
+  sorting...
+  converting...
+  8 add foo
+  7 change foo
+  6 add quux
+  5 add bar
+  4 add baz
+  3 Octopus merge
+  2 change bar
+  1 change foo
+  0 Discard change to foo
+  updating bookmarks
   $ hg up -q -R fullrepo
   $ glog -R fullrepo
   @    9 "Discard change to foo" files: foo
--- a/tests/test-convert-svn-encoding.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-convert-svn-encoding.t	Thu Apr 16 20:57:51 2015 -0500
@@ -53,7 +53,6 @@
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@1
   converting: 0/6 revisions (0.00%)
   committing changelog
-  couldn't read revision branch cache names: * (glob)
   4 hello
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2
   converting: 1/6 revisions (16.67%)
--- a/tests/test-copy.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-copy.t	Thu Apr 16 20:57:51 2015 -0500
@@ -138,7 +138,7 @@
 moving a missing file
   $ rm foo
   $ hg mv foo foo3
-  foo: deleted in working copy
+  foo: deleted in working directory
   foo3 does not exist!
   $ hg up -qC .
 
--- a/tests/test-debugcommands.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-debugcommands.t	Thu Apr 16 20:57:51 2015 -0500
@@ -18,6 +18,7 @@
       deltas    :  0 ( 0.00%)
   
   avg chain length  : 0
+  max chain length  : 0
   compression ratio : 0
   
   uncompressed data size (min/max/avg) : 43 / 43 / 43
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-devel-warnings.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,90 @@
+
+  $ cat << EOF > buggylocking.py
+  > """A small extension that acquire locks in the wrong order
+  > """
+  > 
+  > from mercurial import cmdutil
+  > 
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > 
+  > @command('buggylocking', [], '')
+  > def buggylocking(ui, repo):
+  >     tr = repo.transaction('buggy')
+  >     lo = repo.lock()
+  >     wl = repo.wlock()
+  >     wl.release()
+  >     lo.release()
+  > 
+  > @command('properlocking', [], '')
+  > def properlocking(ui, repo):
+  >     """check that reentrance is fine"""
+  >     wl = repo.wlock()
+  >     lo = repo.lock()
+  >     tr = repo.transaction('proper')
+  >     tr2 = repo.transaction('proper')
+  >     lo2 = repo.lock()
+  >     wl2 = repo.wlock()
+  >     wl2.release()
+  >     lo2.release()
+  >     tr2.close()
+  >     tr.close()
+  >     lo.release()
+  >     wl.release()
+  > 
+  > @command('nowaitlocking', [], '')
+  > def nowaitlocking(ui, repo):
+  >     lo = repo.lock()
+  >     wl = repo.wlock(wait=False)
+  >     wl.release()
+  >     lo.release()
+  > EOF
+
+  $ cat << EOF >> $HGRCPATH
+  > [extensions]
+  > buggylocking=$TESTTMP/buggylocking.py
+  > [devel]
+  > all=1
+  > EOF
+
+  $ hg init lock-checker
+  $ cd lock-checker
+  $ hg buggylocking
+  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
+  devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
+  $ cat << EOF >> $HGRCPATH
+  > [devel]
+  > all=0
+  > check-locks=1
+  > EOF
+  $ hg buggylocking
+  devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking)
+  devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking)
+  $ hg buggylocking --traceback
+  devel-warn: transaction with no lock at:
+   */hg:* in * (glob)
+   */mercurial/dispatch.py:* in run (glob)
+   */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _runcatch (glob)
+   */mercurial/dispatch.py:* in _dispatch (glob)
+   */mercurial/dispatch.py:* in runcommand (glob)
+   */mercurial/dispatch.py:* in _runcommand (glob)
+   */mercurial/dispatch.py:* in checkargs (glob)
+   */mercurial/dispatch.py:* in <lambda> (glob)
+   */mercurial/util.py:* in check (glob)
+   $TESTTMP/buggylocking.py:* in buggylocking (glob)
+  devel-warn: "wlock" acquired after "lock" at:
+   */hg:* in * (glob)
+   */mercurial/dispatch.py:* in run (glob)
+   */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _runcatch (glob)
+   */mercurial/dispatch.py:* in _dispatch (glob)
+   */mercurial/dispatch.py:* in runcommand (glob)
+   */mercurial/dispatch.py:* in _runcommand (glob)
+   */mercurial/dispatch.py:* in checkargs (glob)
+   */mercurial/dispatch.py:* in <lambda> (glob)
+   */mercurial/util.py:* in check (glob)
+   $TESTTMP/buggylocking.py:* in buggylocking (glob)
+  $ hg properlocking
+  $ hg nowaitlocking
+  $ cd ..
--- a/tests/test-diff-subdir.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-diff-subdir.t	Thu Apr 16 20:57:51 2015 -0500
@@ -44,4 +44,24 @@
   @@ -0,0 +1,1 @@
   +2
 
+relative to beta
+
   $ cd ..
+  $ hg diff --nodates --root beta
+  diff -r 7d5ef1aea329 two
+  --- a/two
+  +++ b/two
+  @@ -0,0 +1,1 @@
+  +2
+
+inside beta
+
+  $ cd beta
+  $ hg diff --nodates --root .
+  diff -r 7d5ef1aea329 two
+  --- a/two
+  +++ b/two
+  @@ -0,0 +1,1 @@
+  +2
+
+  $ cd ..
--- a/tests/test-diff-unified.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-diff-unified.t	Thu Apr 16 20:57:51 2015 -0500
@@ -286,4 +286,51 @@
   -b
   +a
 
+showfunc diff
+  $ cat > f1 << EOF
+  > int main() {
+  >     int a = 0;
+  >     int b = 1;
+  >     int c = 2;
+  >     int d = 3;
+  >     return a + b + c + d;
+  > }
+  > EOF
+  $ hg commit -m addfunction
+  $ cat > f1 << EOF
+  > int main() {
+  >     int a = 0;
+  >     int b = 1;
+  >     int c = 2;
+  >     int e = 3;
+  >     return a + b + c + e;
+  > }
+  > EOF
+  $ hg diff --git
+  diff --git a/f1 b/f1
+  --- a/f1
+  +++ b/f1
+  @@ -2,6 +2,6 @@
+       int a = 0;
+       int b = 1;
+       int c = 2;
+  -    int d = 3;
+  -    return a + b + c + d;
+  +    int e = 3;
+  +    return a + b + c + e;
+   }
+  $ hg diff --config diff.showfunc=True --git
+  diff --git a/f1 b/f1
+  --- a/f1
+  +++ b/f1
+  @@ -2,6 +2,6 @@ int main() {
+       int a = 0;
+       int b = 1;
+       int c = 2;
+  -    int d = 3;
+  -    return a + b + c + d;
+  +    int e = 3;
+  +    return a + b + c + e;
+   }
+
   $ cd ..
--- a/tests/test-diffstat.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-diffstat.t	Thu Apr 16 20:57:51 2015 -0500
@@ -69,4 +69,39 @@
    file with spaces |  Bin 
    1 files changed, 0 insertions(+), 0 deletions(-)
 
+diffstat within directories:
+
+  $ hg rm -f 'file with spaces'
+
+  $ mkdir dir1 dir2
+  $ echo new1 > dir1/new
+  $ echo new2 > dir2/new
+  $ hg add dir1/new dir2/new
+  $ hg diff --stat
+   dir1/new |  1 +
+   dir2/new |  1 +
+   2 files changed, 2 insertions(+), 0 deletions(-)
+
+  $ hg diff --stat --root dir1
+   new |  1 +
+   1 files changed, 1 insertions(+), 0 deletions(-)
+
+  $ hg diff --stat --root dir1 dir2
+  warning: dir2 not inside relative root dir1
+
+  $ hg diff --stat --root dir1 -I dir1/old
+
+  $ cd dir1
+  $ hg diff --stat .
+   dir1/new |  1 +
+   1 files changed, 1 insertions(+), 0 deletions(-)
+  $ hg diff --stat --root .
+   new |  1 +
+   1 files changed, 1 insertions(+), 0 deletions(-)
+
+  $ hg diff --stat --root ../dir1 ../dir2
+  warning: ../dir2 not inside relative root . (glob)
+
+  $ hg diff --stat --root . -I old
+
   $ cd ..
--- a/tests/test-doctest.py	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-doctest.py	Thu Apr 16 20:57:51 2015 -0500
@@ -19,6 +19,7 @@
 testmod('mercurial.hgweb.hgwebdir_mod')
 testmod('mercurial.match')
 testmod('mercurial.minirst')
+testmod('mercurial.patch')
 testmod('mercurial.pathutil')
 testmod('mercurial.revset')
 testmod('mercurial.store')
--- a/tests/test-extension.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-extension.t	Thu Apr 16 20:57:51 2015 -0500
@@ -946,6 +946,9 @@
 Declare the version as supporting this hg version, show regular bts link:
   $ hgver=`$PYTHON -c 'from mercurial import util; print util.version().split("+")[0]'`
   $ echo 'testedwith = """'"$hgver"'"""' >> throw.py
+  $ if [ -z "$hgver" ]; then
+  >   echo "unable to fetch a mercurial version. Make sure __version__ is correct";
+  > fi
   $ rm -f throw.pyc throw.pyo
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
   ** unknown exception encountered, please report by visiting
@@ -1140,3 +1143,27 @@
   C sub3/3
 
   $ cd ..
+
+Test synopsis and docstring extending
+
+  $ hg init exthelp
+  $ cat > exthelp.py <<EOF
+  > from mercurial import commands, extensions
+  > def exbookmarks(orig, *args, **opts):
+  >     return orig(*args, **opts)
+  > def uisetup(ui):
+  >     synopsis = ' GREPME [--foo] [-x]'
+  >     docstring = '''
+  >     GREPME make sure that this is in the help!
+  >     '''
+  >     extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
+  >                            synopsis, docstring)
+  > EOF
+  $ abspath=`pwd`/exthelp.py
+  $ echo '[extensions]' >> $HGRCPATH
+  $ echo "exthelp = $abspath" >> $HGRCPATH
+  $ cd exthelp
+  $ hg help bookmarks | grep GREPME
+  hg bookmarks [OPTIONS]... [NAME]... GREPME [--foo] [-x]
+      GREPME make sure that this is in the help!
+
--- a/tests/test-fetch.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-fetch.t	Thu Apr 16 20:57:51 2015 -0500
@@ -339,7 +339,8 @@
   marked working directory as branch topic
   (branches are permanent and global, did you want a bookmark?)
   $ hg -R n2 fetch -m merge n1
-  abort: working dir not at branch tip (use "hg update" to check out branch tip)
+  abort: working directory not at branch tip
+  (use "hg update" to check out branch tip)
   [255]
 
 parent should be 0 (fetch did not update or merge anything)
--- a/tests/test-fileset-generated.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-fileset-generated.t	Thu Apr 16 20:57:51 2015 -0500
@@ -141,39 +141,34 @@
   
 Test revert
 
-BROKEN: the files that get undeleted were not modified, they were removed,
-and content1_content2_missing-tracked was also not modified, it was deleted
-
   $ hg revert 'set:modified()'
   reverting content1_content1_content3-tracked
   reverting content1_content2_content1-tracked
-  undeleting content1_content2_content1-untracked
-  undeleting content1_content2_content2-untracked
   reverting content1_content2_content3-tracked
-  undeleting content1_content2_content3-untracked
-  reverting content1_content2_missing-tracked
-  undeleting content1_content2_missing-untracked
   reverting missing_content2_content3-tracked
 
-BROKEN: only the files that get forgotten are correct
-
   $ hg revert 'set:added()'
   forgetting content1_missing_content1-tracked
   forgetting content1_missing_content3-tracked
-  undeleting missing_content2_content2-untracked
-  undeleting missing_content2_content3-untracked
-  reverting missing_content2_missing-tracked
-  undeleting missing_content2_missing-untracked
   forgetting missing_missing_content3-tracked
 
   $ hg revert 'set:removed()'
   undeleting content1_content1_content1-untracked
   undeleting content1_content1_content3-untracked
   undeleting content1_content1_missing-untracked
+  undeleting content1_content2_content1-untracked
+  undeleting content1_content2_content2-untracked
+  undeleting content1_content2_content3-untracked
+  undeleting content1_content2_missing-untracked
+  undeleting missing_content2_content2-untracked
+  undeleting missing_content2_content3-untracked
+  undeleting missing_content2_missing-untracked
 
   $ hg revert 'set:deleted()'
   reverting content1_content1_missing-tracked
+  reverting content1_content2_missing-tracked
   forgetting content1_missing_missing-tracked
+  reverting missing_content2_missing-tracked
   forgetting missing_missing_missing-tracked
 
   $ hg revert 'set:unknown()'
--- a/tests/test-fileset.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-fileset.t	Thu Apr 16 20:57:51 2015 -0500
@@ -111,6 +111,13 @@
   $ hg add b2link
 #endif
 
+#if no-windows
+  $ echo foo > con.xml
+  $ fileset 'not portable()'
+  con.xml
+  $ hg --config ui.portablefilenames=ignore add con.xml
+#endif
+
   >>> file('1k', 'wb').write(' '*1024)
   >>> file('2k', 'wb').write(' '*2048)
   $ hg add 1k 2k
@@ -220,6 +227,12 @@
   b2link
 #endif
 
+#if no-windows
+  $ fileset -r1 'not portable()'
+  con.xml
+  $ hg forget 'con.xml'
+#endif
+
   $ fileset -r4 'subrepo("re:su.*")'
   sub
   $ fileset -r4 'subrepo("sub")'
--- a/tests/test-gendoc.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-gendoc.t	Thu Apr 16 20:57:51 2015 -0500
@@ -1,4 +1,5 @@
 #require docutils
+#require gettext
 
 Test document extraction
 
--- a/tests/test-getbundle.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-getbundle.t	Thu Apr 16 20:57:51 2015 -0500
@@ -170,7 +170,7 @@
   $ hg debuggetbundle repo bundle -t bundle2
   $ hg debugbundle bundle
   Stream params: {}
-  b2x:changegroup -- "{'version': '01'}"
+  changegroup -- "{'version': '01'}"
       7704483d56b2a7b5db54dcee7c62378ac629b348
       29a4d1f17bd3f0779ca0525bebb1cfb51067c738
       713346a995c363120712aed1aee7e04afd867638
--- a/tests/test-git-export.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-git-export.t	Thu Apr 16 20:57:51 2015 -0500
@@ -5,46 +5,279 @@
 
 New file:
 
-  $ echo new > new
+  $ mkdir dir1
+  $ echo new > dir1/new
   $ hg ci -Amnew
-  adding new
+  adding dir1/new
   $ hg diff --git -r 0
-  diff --git a/new b/new
+  diff --git a/dir1/new b/dir1/new
   new file mode 100644
   --- /dev/null
-  +++ b/new
+  +++ b/dir1/new
   @@ -0,0 +1,1 @@
   +new
 
 Copy:
 
-  $ hg cp new copy
+  $ mkdir dir2
+  $ hg cp dir1/new dir1/copy
+  $ echo copy1 >> dir1/copy
+  $ hg cp dir1/new dir2/copy
+  $ echo copy2 >> dir2/copy
   $ hg ci -mcopy
   $ hg diff --git -r 1:tip
+  diff --git a/dir1/new b/dir1/copy
+  copy from dir1/new
+  copy to dir1/copy
+  --- a/dir1/new
+  +++ b/dir1/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  diff --git a/dir1/new b/dir2/copy
+  copy from dir1/new
+  copy to dir2/copy
+  --- a/dir1/new
+  +++ b/dir2/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy2
+
+Cross and same-directory copies with a relative root:
+
+  $ hg diff --git --root .. -r 1:tip
+  abort: .. not under root '$TESTTMP'
+  [255]
+  $ hg diff --git --root doesnotexist -r 1:tip
+  $ hg diff --git --root . -r 1:tip
+  diff --git a/dir1/new b/dir1/copy
+  copy from dir1/new
+  copy to dir1/copy
+  --- a/dir1/new
+  +++ b/dir1/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  diff --git a/dir1/new b/dir2/copy
+  copy from dir1/new
+  copy to dir2/copy
+  --- a/dir1/new
+  +++ b/dir2/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy2
+  $ hg diff --git --root dir1 -r 1:tip
+  diff --git a/new b/copy
+  copy from new
+  copy to copy
+  --- a/new
+  +++ b/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+
+  $ hg diff --git --root dir2/ -r 1:tip
+  diff --git a/copy b/copy
+  new file mode 100644
+  --- /dev/null
+  +++ b/copy
+  @@ -0,0 +1,2 @@
+  +new
+  +copy2
+
+  $ hg diff --git --root dir1 -r 1:tip -I '**/copy'
   diff --git a/new b/copy
   copy from new
   copy to copy
+  --- a/new
+  +++ b/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+
+  $ hg diff --git --root dir1 -r 1:tip dir2
+  warning: dir2 not inside relative root dir1
+
+  $ hg diff --git --root dir1 -r 1:tip 'dir2/{copy}'
+  warning: dir2/{copy} not inside relative root dir1 (glob)
+
+  $ cd dir1
+  $ hg diff --git --root .. -r 1:tip
+  diff --git a/dir1/new b/dir1/copy
+  copy from dir1/new
+  copy to dir1/copy
+  --- a/dir1/new
+  +++ b/dir1/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  diff --git a/dir1/new b/dir2/copy
+  copy from dir1/new
+  copy to dir2/copy
+  --- a/dir1/new
+  +++ b/dir2/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy2
+
+  $ hg diff --git --root ../.. -r 1:tip
+  abort: ../.. not under root '$TESTTMP'
+  [255]
+  $ hg diff --git --root ../doesnotexist -r 1:tip
+  $ hg diff --git --root .. -r 1:tip
+  diff --git a/dir1/new b/dir1/copy
+  copy from dir1/new
+  copy to dir1/copy
+  --- a/dir1/new
+  +++ b/dir1/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  diff --git a/dir1/new b/dir2/copy
+  copy from dir1/new
+  copy to dir2/copy
+  --- a/dir1/new
+  +++ b/dir2/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy2
+
+  $ hg diff --git --root . -r 1:tip
+  diff --git a/new b/copy
+  copy from new
+  copy to copy
+  --- a/new
+  +++ b/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  $ hg diff --git --root . -r 1:tip copy
+  diff --git a/new b/copy
+  copy from new
+  copy to copy
+  --- a/new
+  +++ b/copy
+  @@ -1,1 +1,2 @@
+   new
+  +copy1
+  $ hg diff --git --root . -r 1:tip ../dir2
+  warning: ../dir2 not inside relative root . (glob)
+  $ hg diff --git --root . -r 1:tip '../dir2/*'
+  warning: ../dir2/* not inside relative root . (glob)
+  $ cd ..
 
 Rename:
 
-  $ hg mv copy rename
+  $ hg mv dir1/copy dir1/rename1
+  $ echo rename1 >> dir1/rename1
+  $ hg mv dir2/copy dir1/rename2
+  $ echo rename2 >> dir1/rename2
   $ hg ci -mrename
   $ hg diff --git -r 2:tip
-  diff --git a/copy b/rename
+  diff --git a/dir1/copy b/dir1/rename1
+  rename from dir1/copy
+  rename to dir1/rename1
+  --- a/dir1/copy
+  +++ b/dir1/rename1
+  @@ -1,2 +1,3 @@
+   new
+   copy1
+  +rename1
+  diff --git a/dir2/copy b/dir1/rename2
+  rename from dir2/copy
+  rename to dir1/rename2
+  --- a/dir2/copy
+  +++ b/dir1/rename2
+  @@ -1,2 +1,3 @@
+   new
+   copy2
+  +rename2
+
+Cross and same-directory renames with a relative root:
+
+  $ hg diff --root dir1 --git -r 2:tip
+  diff --git a/copy b/rename1
   rename from copy
-  rename to rename
+  rename to rename1
+  --- a/copy
+  +++ b/rename1
+  @@ -1,2 +1,3 @@
+   new
+   copy1
+  +rename1
+  diff --git a/rename2 b/rename2
+  new file mode 100644
+  --- /dev/null
+  +++ b/rename2
+  @@ -0,0 +1,3 @@
+  +new
+  +copy2
+  +rename2
+
+  $ hg diff --root dir2 --git -r 2:tip
+  diff --git a/copy b/copy
+  deleted file mode 100644
+  --- a/copy
+  +++ /dev/null
+  @@ -1,2 +0,0 @@
+  -new
+  -copy2
+
+  $ hg diff --root dir1 --git -r 2:tip -I '**/copy'
+  diff --git a/copy b/copy
+  deleted file mode 100644
+  --- a/copy
+  +++ /dev/null
+  @@ -1,2 +0,0 @@
+  -new
+  -copy1
+
+  $ hg diff --root dir1 --git -r 2:tip -I '**/rename*'
+  diff --git a/copy b/rename1
+  copy from copy
+  copy to rename1
+  --- a/copy
+  +++ b/rename1
+  @@ -1,2 +1,3 @@
+   new
+   copy1
+  +rename1
+  diff --git a/rename2 b/rename2
+  new file mode 100644
+  --- /dev/null
+  +++ b/rename2
+  @@ -0,0 +1,3 @@
+  +new
+  +copy2
+  +rename2
 
 Delete:
 
-  $ hg rm rename
+  $ hg rm dir1/*
   $ hg ci -mdelete
   $ hg diff --git -r 3:tip
-  diff --git a/rename b/rename
+  diff --git a/dir1/new b/dir1/new
   deleted file mode 100644
-  --- a/rename
+  --- a/dir1/new
   +++ /dev/null
   @@ -1,1 +0,0 @@
   -new
+  diff --git a/dir1/rename1 b/dir1/rename1
+  deleted file mode 100644
+  --- a/dir1/rename1
+  +++ /dev/null
+  @@ -1,3 +0,0 @@
+  -new
+  -copy1
+  -rename1
+  diff --git a/dir1/rename2 b/dir1/rename2
+  deleted file mode 100644
+  --- a/dir1/rename2
+  +++ /dev/null
+  @@ -1,3 +0,0 @@
+  -new
+  -copy2
+  -rename2
 
   $ cat > src <<EOF
   > 1
--- a/tests/test-globalopts.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-globalopts.t	Thu Apr 16 20:57:51 2015 -0500
@@ -309,7 +309,7 @@
    grep          search for a pattern in specified files and revisions
    heads         show branch heads
    help          show help for a given topic or a help overview
-   identify      identify the working copy or specified revision
+   identify      identify the working directory or specified revision
    import        import an ordered set of patches
    incoming      show new changesets found in source
    init          create a new repository in the given directory
@@ -390,7 +390,7 @@
    grep          search for a pattern in specified files and revisions
    heads         show branch heads
    help          show help for a given topic or a help overview
-   identify      identify the working copy or specified revision
+   identify      identify the working directory or specified revision
    import        import an ordered set of patches
    incoming      show new changesets found in source
    init          create a new repository in the given directory
--- a/tests/test-glog.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-glog.t	Thu Apr 16 20:57:51 2015 -0500
@@ -1541,6 +1541,9 @@
   $ testlog --follow
   []
   []
+  $ testlog -rnull
+  ['null']
+  []
   $ echo a > a
   $ echo aa > aa
   $ echo f > f
@@ -1764,6 +1767,13 @@
   nodetag 1
   nodetag 0
 
+Test --follow null parent
+
+  $ hg up -q null
+  $ testlog -f
+  []
+  []
+
 Test --follow-first
 
   $ hg up -q 3
@@ -2192,13 +2202,6 @@
       (func
         ('symbol', 'rev')
         ('symbol', '6'))))
-  --- log.nodes	* (glob)
-  +++ glog.nodes	* (glob)
-  @@ -1,3 +1,3 @@
-  -nodetag 6
-   nodetag 8
-   nodetag 7
-  +nodetag 6
 
 Test --follow-first and forward --rev
 
@@ -2240,6 +2243,14 @@
         ('symbol', 'rev')
         ('symbol', '6'))))
 
+Test --follow with --rev of graphlog extension
+
+  $ hg --config extensions.graphlog= glog -qfr1
+  o  1:216d4c92cf98
+  |
+  o  0:f8035bb17114
+  
+
 Test subdir
 
   $ hg up -q 3
@@ -2354,4 +2365,14 @@
      date:        Thu Jan 01 00:00:00 1970 +0000
   
 
+should not draw line down to null due to the magic of fullreposet
+
+  $ hg log -G -r 'all()' | tail -6
+  |
+  o  changeset:   0:f8035bb17114
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     add a
+  
+
   $ cd ..
--- a/tests/test-graft.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-graft.t	Thu Apr 16 20:57:51 2015 -0500
@@ -313,7 +313,7 @@
   2:5c095ad7e90f871700f02dd1fa5012cb4498a2d4
 
   $ hg log --debug -r tip
-  changeset:   13:9db0f28fd3747e92c57d015f53b5593aeec53c2d
+  changeset:   13:7a4785234d87ec1aa420ed6b11afe40fa73e12a9
   tag:         tip
   phase:       draft
   parent:      12:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f
@@ -324,6 +324,7 @@
   files+:      b
   files-:      a
   extra:       branch=default
+  extra:       intermediate-source=ef0ef43d49e79e81ddafdc7997401ba0041efc82
   extra:       source=5c095ad7e90f871700f02dd1fa5012cb4498a2d4
   description:
   2
@@ -338,10 +339,10 @@
 Disallow grafting already grafted csets with the same origin onto each other
   $ hg up -q 13
   $ hg graft 2
-  skipping revision 2:5c095ad7e90f (already grafted to 13:9db0f28fd374)
+  skipping revision 2:5c095ad7e90f (already grafted to 13:7a4785234d87)
   [255]
   $ hg graft 7
-  skipping already grafted revision 7:ef0ef43d49e7 (13:9db0f28fd374 also has origin 2:5c095ad7e90f)
+  skipping already grafted revision 7:ef0ef43d49e7 (13:7a4785234d87 also has origin 2:5c095ad7e90f)
   [255]
 
   $ hg up -q 7
@@ -349,7 +350,7 @@
   skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
   [255]
   $ hg graft tip
-  skipping already grafted revision 13:9db0f28fd374 (7:ef0ef43d49e7 also has origin 2:5c095ad7e90f)
+  skipping already grafted revision 13:7a4785234d87 (7:ef0ef43d49e7 also has origin 2:5c095ad7e90f)
   [255]
 
 Graft with --log
@@ -543,7 +544,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     3
   
-  changeset:   13:9db0f28fd374
+  changeset:   13:7a4785234d87
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -578,7 +579,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
   
-  changeset:   13:9db0f28fd374
+  changeset:   13:7a4785234d87
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -621,7 +622,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
   
-  changeset:   13:9db0f28fd374
+  changeset:   13:7a4785234d87
   user:        foo
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -637,7 +638,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
   
-  changeset:   22:e95864da75a0
+  changeset:   22:d1cb6591fa4b
   branch:      dev
   tag:         tip
   user:        foo
@@ -649,11 +650,11 @@
 
   $ hg graft 'origin(13) or destination(origin(13))'
   skipping ancestor revision 21:7e61b508e709
-  skipping ancestor revision 22:e95864da75a0
-  skipping revision 2:5c095ad7e90f (already grafted to 22:e95864da75a0)
+  skipping ancestor revision 22:d1cb6591fa4b
+  skipping revision 2:5c095ad7e90f (already grafted to 22:d1cb6591fa4b)
   grafting 7:ef0ef43d49e7 "2"
   warning: can't find ancestor for 'b' copied from 'a'!
-  grafting 13:9db0f28fd374 "2"
+  grafting 13:7a4785234d87 "2"
   warning: can't find ancestor for 'b' copied from 'a'!
   grafting 19:9627f653b421 "2"
   merging b
@@ -664,7 +665,7 @@
   $ hg graft 19 0 6
   skipping ungraftable merge revision 6
   skipping ancestor revision 0:68795b066622
-  skipping already grafted revision 19:9627f653b421 (22:e95864da75a0 also has origin 2:5c095ad7e90f)
+  skipping already grafted revision 19:9627f653b421 (22:d1cb6591fa4b also has origin 2:5c095ad7e90f)
   [255]
   $ hg graft 19 0 6 --force
   skipping ungraftable merge revision 6
@@ -679,12 +680,12 @@
   $ hg ci -m 28
   $ hg backout 28
   reverting a
-  changeset 29:8389853bba65 backs out changeset 28:cd42a33e1848
+  changeset 29:53177ba928f6 backs out changeset 28:50a516bb8b57
   $ hg graft 28
-  skipping ancestor revision 28:cd42a33e1848
+  skipping ancestor revision 28:50a516bb8b57
   [255]
   $ hg graft 28 --force
-  grafting 28:cd42a33e1848 "28"
+  grafting 28:50a516bb8b57 "28"
   merging a
   $ cat a
   abc
@@ -694,7 +695,7 @@
   $ echo def > a
   $ hg ci -m 31
   $ hg graft 28 --force --tool internal:fail
-  grafting 28:cd42a33e1848 "28"
+  grafting 28:50a516bb8b57 "28"
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
@@ -707,7 +708,7 @@
   $ hg resolve -m a
   (no more unresolved files)
   $ hg graft -c
-  grafting 28:cd42a33e1848 "28"
+  grafting 28:50a516bb8b57 "28"
   $ cat a
   abc
 
@@ -719,7 +720,7 @@
   $ hg --config extensions.strip= strip 2
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-d323a1e4-backup.hg (glob)
   $ hg graft tmp
-  skipping already grafted revision 8:9db0f28fd374 (2:ef0ef43d49e7 also has unknown origin 5c095ad7e90f)
+  skipping already grafted revision 8:7a4785234d87 (2:ef0ef43d49e7 also has unknown origin 5c095ad7e90f)
   [255]
 
 Empty graft
@@ -728,5 +729,45 @@
   $ hg tag -f something
   $ hg graft -qr 27
   $ hg graft -f 27
-  grafting 27:3d35c4c79e5a "28"
-  note: graft of 27:3d35c4c79e5a created no changes to commit
+  grafting 27:ed6c7e54e319 "28"
+  note: graft of 27:ed6c7e54e319 created no changes to commit
+
+  $ cd ..
+
+Graft to duplicate a commit
+
+  $ hg init graftsibling
+  $ cd graftsibling
+  $ touch a
+  $ hg commit -qAm a
+  $ touch b
+  $ hg commit -qAm b
+  $ hg log -G -T '{rev}\n'
+  @  1
+  |
+  o  0
+  
+  $ hg up -q 0
+  $ hg graft -r 1
+  grafting 1:0e067c57feba "b" (tip)
+  $ hg log -G -T '{rev}\n'
+  @  2
+  |
+  | o  1
+  |/
+  o  0
+  
+Graft to duplicate a commit twice
+
+  $ hg up -q 0
+  $ hg graft -r 2
+  grafting 2:044ec77f6389 "b" (tip)
+  $ hg log -G -T '{rev}\n'
+  @  3
+  |
+  | o  2
+  |/
+  | o  1
+  |/
+  o  0
+  
--- a/tests/test-grep.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-grep.t	Thu Apr 16 20:57:51 2015 -0500
@@ -82,6 +82,10 @@
   port:1:2:+:eggs:export
   port:0:1:+:spam:import
 
+  $ hg up -q null
+  $ hg grep -f port
+  [1]
+
   $ cd ..
   $ hg init t2
   $ cd t2
--- a/tests/test-hardlinks.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hardlinks.t	Thu Apr 16 20:57:51 2015 -0500
@@ -58,6 +58,13 @@
 Create hardlinked clone r2:
 
   $ hg clone -U --debug r1 r2
+  linking: 1
+  linking: 2
+  linking: 3
+  linking: 4
+  linking: 5
+  linking: 6
+  linking: 7
   linked 7 files
 
 Create non-hardlinked clone r3:
--- a/tests/test-help.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-help.t	Thu Apr 16 20:57:51 2015 -0500
@@ -72,7 +72,7 @@
    grep          search for a pattern in specified files and revisions
    heads         show branch heads
    help          show help for a given topic or a help overview
-   identify      identify the working copy or specified revision
+   identify      identify the working directory or specified revision
    import        import an ordered set of patches
    incoming      show new changesets found in source
    init          create a new repository in the given directory
@@ -147,7 +147,7 @@
    grep          search for a pattern in specified files and revisions
    heads         show branch heads
    help          show help for a given topic or a help overview
-   identify      identify the working copy or specified revision
+   identify      identify the working directory or specified revision
    import        import an ordered set of patches
    incoming      show new changesets found in source
    init          create a new repository in the given directory
@@ -245,6 +245,7 @@
        acl           hooks for controlling repository access
        blackbox      log repository events to a blackbox for debugging
        bugzilla      hooks for integrating with the Bugzilla bug tracker
+       censor        erase file content at a given revision
        churn         command to display statistics about repository history
        color         colorize output from some commands
        convert       import revisions from foreign VCS repositories into
@@ -411,7 +412,7 @@
   Mercurial Distributed SCM (version *) (glob)
   (see http://mercurial.selenic.com for more information)
   
-  Copyright (C) 2005-2014 Matt Mackall and others
+  Copyright (C) 2005-2015 Matt Mackall and others
   This is free software; see the source for copying conditions. There is NO
   warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
 
@@ -508,6 +509,7 @@
    -B --ignore-blank-lines  ignore changes whose lines are all blank
    -U --unified NUM         number of lines of context to show
       --stat                output diffstat-style summary of changes
+      --root DIR            produce diffs relative to subdirectory
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
    -S --subrepos            recurse into subrepositories
@@ -689,7 +691,7 @@
    grep          search for a pattern in specified files and revisions
    heads         show branch heads
    help          show help for a given topic or a help overview
-   identify      identify the working copy or specified revision
+   identify      identify the working directory or specified revision
    import        import an ordered set of patches
    incoming      show new changesets found in source
    init          create a new repository in the given directory
@@ -1101,6 +1103,125 @@
   abort: help section not found
   [255]
 
+Test dynamic list of merge tools only shows up once
+  $ hg help merge-tools
+  Merge Tools
+  """""""""""
+  
+      To merge files Mercurial uses merge tools.
+  
+      A merge tool combines two different versions of a file into a merged file.
+      Merge tools are given the two files and the greatest common ancestor of
+      the two file versions, so they can determine the changes made on both
+      branches.
+  
+      Merge tools are used both for "hg resolve", "hg merge", "hg update", "hg
+      backout" and in several extensions.
+  
+      Usually, the merge tool tries to automatically reconcile the files by
+      combining all non-overlapping changes that occurred separately in the two
+      different evolutions of the same initial base file. Furthermore, some
+      interactive merge programs make it easier to manually resolve conflicting
+      merges, either in a graphical way, or by inserting some conflict markers.
+      Mercurial does not include any interactive merge programs but relies on
+      external tools for that.
+  
+      Available merge tools
+      =====================
+  
+      External merge tools and their properties are configured in the merge-
+      tools configuration section - see hgrc(5) - but they can often just be
+      named by their executable.
+  
+      A merge tool is generally usable if its executable can be found on the
+      system and if it can handle the merge. The executable is found if it is an
+      absolute or relative executable path or the name of an application in the
+      executable search path. The tool is assumed to be able to handle the merge
+      if it can handle symlinks if the file is a symlink, if it can handle
+      binary files if the file is binary, and if a GUI is available if the tool
+      requires a GUI.
+  
+      There are some internal merge tools which can be used. The internal merge
+      tools are:
+  
+      ":dump"
+        Creates three versions of the files to merge, containing the contents of
+        local, other and base. These files can then be used to perform a merge
+        manually. If the file to be merged is named "a.txt", these files will
+        accordingly be named "a.txt.local", "a.txt.other" and "a.txt.base" and
+        they will be placed in the same directory as "a.txt".
+  
+      ":fail"
+        Rather than attempting to merge files that were modified on both
+        branches, it marks them as unresolved. The resolve command must be used
+        to resolve these conflicts.
+  
+      ":local"
+        Uses the local version of files as the merged version.
+  
+      ":merge"
+        Uses the internal non-interactive simple merge algorithm for merging
+        files. It will fail if there are any conflicts and leave markers in the
+        partially merged file. Markers will have two sections, one for each side
+        of merge.
+  
+      ":merge3"
+        Uses the internal non-interactive simple merge algorithm for merging
+        files. It will fail if there are any conflicts and leave markers in the
+        partially merged file. Marker will have three sections, one from each
+        side of the merge and one for the base content.
+  
+      ":other"
+        Uses the other version of files as the merged version.
+  
+      ":prompt"
+        Asks the user which of the local or the other version to keep as the
+        merged version.
+  
+      ":tagmerge"
+        Uses the internal tag merge algorithm (experimental).
+  
+      Internal tools are always available and do not require a GUI but will by
+      default not handle symlinks or binary files.
+  
+      Choosing a merge tool
+      =====================
+  
+      Mercurial uses these rules when deciding which merge tool to use:
+  
+      1. If a tool has been specified with the --tool option to merge or
+         resolve, it is used.  If it is the name of a tool in the merge-tools
+         configuration, its configuration is used. Otherwise the specified tool
+         must be executable by the shell.
+      2. If the "HGMERGE" environment variable is present, its value is used and
+         must be executable by the shell.
+      3. If the filename of the file to be merged matches any of the patterns in
+         the merge-patterns configuration section, the first usable merge tool
+         corresponding to a matching pattern is used. Here, binary capabilities
+         of the merge tool are not considered.
+      4. If ui.merge is set it will be considered next. If the value is not the
+         name of a configured tool, the specified value is used and must be
+         executable by the shell. Otherwise the named tool is used if it is
+         usable.
+      5. If any usable merge tools are present in the merge-tools configuration
+         section, the one with the highest priority is used.
+      6. If a program named "hgmerge" can be found on the system, it is used -
+         but it will by default not be used for symlinks and binary files.
+      7. If the file to be merged is not binary and is not a symlink, then
+         internal ":merge" is used.
+      8. The merge of the file fails and must be resolved before commit.
+  
+      Note:
+         After selecting a merge program, Mercurial will by default attempt to
+         merge the files using a simple merge algorithm first. Only if it
+         doesn't succeed because of conflicting changes Mercurial will actually
+         execute the merge program. Whether to use the simple merge algorithm
+         first can be controlled by the premerge setting of the merge tool.
+         Premerge is enabled by default unless the file is binary or a symlink.
+  
+      See the merge-tools and ui sections of hgrc(5) for details on the
+      configuration of merge tools.
+
 Test usage of section marks in help documents
 
   $ cd "$TESTDIR"/../doc
@@ -1536,7 +1657,7 @@
   identify
   </a>
   </td><td>
-  identify the working copy or specified revision
+  identify the working directory or specified revision
   </td></tr>
   <tr><td>
   <a href="/help/import">
--- a/tests/test-hgrc.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgrc.t	Thu Apr 16 20:57:51 2015 -0500
@@ -71,7 +71,7 @@
   Mercurial Distributed SCM (version *) (glob)
   (see http://mercurial.selenic.com for more information)
   
-  Copyright (C) 2005-2014 Matt Mackall and others
+  Copyright (C) 2005-2015 Matt Mackall and others
   This is free software; see the source for copying conditions. There is NO
   warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
   $ unset FAKEPATH
--- a/tests/test-hgweb-commands.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-commands.t	Thu Apr 16 20:57:51 2015 -0500
@@ -726,7 +726,6 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -752,11 +751,13 @@
   </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -873,7 +874,8 @@
   </tr>
   <tr>
    <th class="date">date</th>
-   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
+   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
+  </tr>
   <tr>
    <th class="author">parents</th>
    <td class="author"></td>
@@ -894,8 +896,7 @@
       <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
         <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
-        <p></p>
-        <table class="stripes2">  <tr>
+        <table class="diffstat-table stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">da/foo</a></td>
       <td class="diffstat-total" align="right">1</td>
       <td class="diffstat-graph">
@@ -1012,11 +1013,13 @@
   </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -1869,7 +1872,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
   200 Script output follows
   
-  lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024
+  lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1*%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 (glob)
 
 heads
 
@@ -2049,7 +2052,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
   200 Script output follows
   
-  lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch stream-preferred stream unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024
+  lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch stream-preferred stream bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1*%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 (glob)
 
 heads
 
--- a/tests/test-hgweb-descend-empties.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-descend-empties.t	Thu Apr 16 20:57:51 2015 -0500
@@ -81,11 +81,13 @@
   </form>
   
   <table class="bigtable">
+  <thead>
   <tr>
     <th class="name">name</th>
     <th class="size">size</th>
     <th class="permissions">permissions</th>
   </tr>
+  </thead>
   <tbody class="stripes2">
   <tr class="fileline">
     <td class="name"><a href="/file/9087c84a0f5d/">[up]</a></td>
--- a/tests/test-hgweb-diffs.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-diffs.t	Thu Apr 16 20:57:51 2015 -0500
@@ -97,7 +97,8 @@
   </tr>
   <tr>
    <th class="date">date</th>
-   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
+   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
+  </tr>
   <tr>
    <th class="author">parents</th>
    <td class="author"></td>
@@ -118,8 +119,7 @@
       <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
         <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
-        <p></p>
-        <table class="stripes2">  <tr>
+        <table class="diffstat-table stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
       <td class="diffstat-graph">
@@ -369,7 +369,8 @@
   </tr>
   <tr>
    <th class="date">date</th>
-   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
+   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
+  </tr>
   <tr>
    <th class="author">parents</th>
    <td class="author"></td>
@@ -390,8 +391,7 @@
       <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
         <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
-        <p></p>
-        <table class="stripes2">  <tr>
+        <table class="diffstat-table stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
       <td class="diffstat-graph">
--- a/tests/test-hgweb-empty.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-empty.t	Thu Apr 16 20:57:51 2015 -0500
@@ -48,7 +48,6 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -74,11 +73,13 @@
   </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
   
   </tbody>
@@ -158,7 +159,6 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -184,11 +184,13 @@
   </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
   
   </tbody>
@@ -264,7 +266,6 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -431,11 +432,13 @@
   </form>
   
   <table class="bigtable">
+  <thead>
   <tr>
     <th class="name">name</th>
     <th class="size">size</th>
     <th class="permissions">permissions</th>
   </tr>
+  </thead>
   <tbody class="stripes2">
   <tr class="fileline">
     <td class="name"><a href="/file/000000000000/">[up]</a></td>
--- a/tests/test-hgweb-filelog.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-filelog.t	Thu Apr 16 20:57:51 2015 -0500
@@ -156,7 +156,6 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/01de2d66a28d/a" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -181,11 +180,13 @@
   | <a href="/log/5ed941583260/a">(0)</a> <a href="/log/tip/a">tip</a> </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -266,7 +267,6 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/01de2d66a28d/a" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -291,11 +291,13 @@
   | <a href="/log/5ed941583260/a">(0)</a> <a href="/log/tip/a">tip</a> </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -376,7 +378,6 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/5ed941583260/a" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -401,11 +402,13 @@
   | <a href="/log/5ed941583260/a">(0)</a> <a href="/log/tip/a">tip</a> </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -481,7 +484,6 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/5ed941583260/a" title="subscribe to atom feed">
   <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
@@ -506,11 +508,13 @@
   | <a href="/log/5ed941583260/a">(0)</a> <a href="/log/tip/a">tip</a> </div>
   
   <table class="bigtable">
+  <thead>
    <tr>
     <th class="age">age</th>
     <th class="author">author</th>
     <th class="description">description</th>
    </tr>
+  </thead>
   <tbody class="stripes2">
    <tr>
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-hgweb-json.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,1111 @@
+#require json
+#require serve
+
+  $ request() {
+  >   $TESTDIR/get-with-headers.py --json localhost:$HGPORT "$1"
+  > }
+
+  $ hg init test
+  $ cd test
+  $ mkdir da
+  $ echo foo > da/foo
+  $ echo foo > foo
+  $ hg -q ci -A -m initial
+  $ echo bar > foo
+  $ hg ci -m 'modify foo'
+  $ echo bar > da/foo
+  $ hg ci -m 'modify da/foo'
+  $ hg bookmark bookmark1
+  $ hg up default
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (leaving bookmark bookmark1)
+  $ hg mv foo foo-new
+  $ hg commit -m 'move foo'
+  $ hg tag -m 'create tag' tag1
+  $ hg phase --public -r .
+  $ echo baz > da/foo
+  $ hg commit -m 'another commit to da/foo'
+  $ hg tag -m 'create tag2' tag2
+  $ hg bookmark bookmark2
+  $ hg -q up -r 0
+  $ hg -q branch test-branch
+  $ echo branch > foo
+  $ hg commit -m 'create test branch'
+  $ echo branch_commit_2 > foo
+  $ hg commit -m 'another commit in test-branch'
+  $ hg -q up default
+  $ hg merge --tool :local test-branch
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg commit -m 'merge test-branch into default'
+
+  $ hg log -G
+  @    changeset:   9:cc725e08502a
+  |\   tag:         tip
+  | |  parent:      6:ceed296fe500
+  | |  parent:      8:ed66c30e87eb
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     merge test-branch into default
+  | |
+  | o  changeset:   8:ed66c30e87eb
+  | |  branch:      test-branch
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     another commit in test-branch
+  | |
+  | o  changeset:   7:6ab967a8ab34
+  | |  branch:      test-branch
+  | |  parent:      0:06e557f3edf6
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     create test branch
+  | |
+  o |  changeset:   6:ceed296fe500
+  | |  bookmark:    bookmark2
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     create tag2
+  | |
+  o |  changeset:   5:f2890a05fea4
+  | |  tag:         tag2
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     another commit to da/foo
+  | |
+  o |  changeset:   4:93a8ce14f891
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     create tag
+  | |
+  o |  changeset:   3:78896eb0e102
+  | |  tag:         tag1
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     move foo
+  | |
+  o |  changeset:   2:8d7c456572ac
+  | |  bookmark:    bookmark1
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     modify da/foo
+  | |
+  o |  changeset:   1:f8bbb9024b10
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     modify foo
+  |
+  o  changeset:   0:06e557f3edf6
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     initial
+  
+
+  $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E error.log
+  $ cat hg.pid >> $DAEMON_PIDS
+
+(Try to keep these in roughly the order they are defined in webcommands.py)
+
+(log is handled by filelog/ and changelog/ - ignore it)
+
+(rawfile/ doesn't use templating - nothing to test)
+
+file/{revision}/{path} shows file revision
+
+  $ request json-file/06e557f3edf6/foo
+  200 Script output follows
+  
+  "not yet implemented"
+
+file/{revision} shows root directory info
+
+  $ request json-file/cc725e08502a
+  200 Script output follows
+  
+  {
+    "abspath": "/",
+    "bookmarks": [],
+    "directories": [
+      {
+        "abspath": "/da",
+        "basename": "da",
+        "emptydirs": ""
+      }
+    ],
+    "files": [
+      {
+        "abspath": ".hgtags",
+        "basename": ".hgtags",
+        "date": [
+          0.0,
+          0
+        ],
+        "flags": "",
+        "size": 92
+      },
+      {
+        "abspath": "foo-new",
+        "basename": "foo-new",
+        "date": [
+          0.0,
+          0
+        ],
+        "flags": "",
+        "size": 4
+      }
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+    "tags": [
+      "tip"
+    ]
+  }
+
+changelog/ shows information about several changesets
+
+  $ request json-changelog
+  200 Script output follows
+  
+  {
+    "changeset_count": 10,
+    "changesets": [
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "merge test-branch into default",
+        "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+        "tags": [
+          "tip"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit in test-branch",
+        "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create test branch",
+        "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark2"
+        ],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag2",
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit to da/foo",
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "tags": [
+          "tag2"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag",
+        "node": "93a8ce14f89156426b7fa981af8042da53f03aa0",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "move foo",
+        "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+        "tags": [
+          "tag1"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark1"
+        ],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify da/foo",
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify foo",
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "tags": [],
+        "user": "test"
+      }
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7"
+  }
+
+changelog/{revision} shows information starting at a specific changeset
+
+  $ request json-changelog/f8bbb9024b10
+  200 Script output follows
+  
+  {
+    "changeset_count": 10,
+    "changesets": [
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify foo",
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "tags": [],
+        "user": "test"
+      }
+    ],
+    "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+  }
+
+shortlog/ shows information about a set of changesets
+
+  $ request json-shortlog
+  200 Script output follows
+  
+  {
+    "changeset_count": 10,
+    "changesets": [
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "merge test-branch into default",
+        "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+        "tags": [
+          "tip"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit in test-branch",
+        "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create test branch",
+        "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark2"
+        ],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag2",
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "another commit to da/foo",
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "tags": [
+          "tag2"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "create tag",
+        "node": "93a8ce14f89156426b7fa981af8042da53f03aa0",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "move foo",
+        "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+        "tags": [
+          "tag1"
+        ],
+        "user": "test"
+      },
+      {
+        "bookmarks": [
+          "bookmark1"
+        ],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify da/foo",
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "modify foo",
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "tags": [],
+        "user": "test"
+      },
+      {
+        "bookmarks": [],
+        "date": [
+          0.0,
+          0
+        ],
+        "desc": "initial",
+        "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+        "tags": [],
+        "user": "test"
+      }
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7"
+  }
+
+changeset/ renders the tip changeset
+
+  $ request json-rev
+  200 Script output follows
+  
+  {
+    "bookmarks": [],
+    "branch": "default",
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "merge test-branch into default",
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+    "parents": [
+      "ceed296fe500c3fac9541e31dad860cb49c89e45",
+      "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
+    ],
+    "phase": "draft",
+    "tags": [
+      "tip"
+    ],
+    "user": "test"
+  }
+
+changeset/{revision} shows tags
+
+  $ request json-rev/78896eb0e102
+  200 Script output follows
+  
+  {
+    "bookmarks": [],
+    "branch": "default",
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "move foo",
+    "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+    "parents": [
+      "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+    ],
+    "phase": "public",
+    "tags": [
+      "tag1"
+    ],
+    "user": "test"
+  }
+
+changeset/{revision} shows bookmarks
+
+  $ request json-rev/8d7c456572ac
+  200 Script output follows
+  
+  {
+    "bookmarks": [
+      "bookmark1"
+    ],
+    "branch": "default",
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "modify da/foo",
+    "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5",
+    "parents": [
+      "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+    ],
+    "phase": "public",
+    "tags": [],
+    "user": "test"
+  }
+
+changeset/{revision} shows branches
+
+  $ request json-rev/6ab967a8ab34
+  200 Script output follows
+  
+  {
+    "bookmarks": [],
+    "branch": "test-branch",
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "create test branch",
+    "node": "6ab967a8ab3489227a83f80e920faa039a71819f",
+    "parents": [
+      "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+    ],
+    "phase": "draft",
+    "tags": [],
+    "user": "test"
+  }
+
+manifest/{revision}/{path} shows info about a directory at a revision
+
+  $ request json-manifest/06e557f3edf6/
+  200 Script output follows
+  
+  {
+    "abspath": "/",
+    "bookmarks": [],
+    "directories": [
+      {
+        "abspath": "/da",
+        "basename": "da",
+        "emptydirs": ""
+      }
+    ],
+    "files": [
+      {
+        "abspath": "foo",
+        "basename": "foo",
+        "date": [
+          0.0,
+          0
+        ],
+        "flags": "",
+        "size": 4
+      }
+    ],
+    "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+    "tags": []
+  }
+
+tags/ shows tags info
+
+  $ request json-tags
+  200 Script output follows
+  
+  {
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+    "tags": [
+      {
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78",
+        "tag": "tag2"
+      },
+      {
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "78896eb0e102174ce9278438a95e12543e4367a7",
+        "tag": "tag1"
+      }
+    ]
+  }
+
+bookmarks/ shows bookmarks info
+
+  $ request json-bookmarks
+  200 Script output follows
+  
+  {
+    "bookmarks": [
+      {
+        "bookmark": "bookmark1",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5"
+      },
+      {
+        "bookmark": "bookmark2",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "ceed296fe500c3fac9541e31dad860cb49c89e45"
+      }
+    ],
+    "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7"
+  }
+
+branches/ shows branches info
+
+  $ request json-branches
+  200 Script output follows
+  
+  {
+    "branches": [
+      {
+        "branch": "default",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7",
+        "status": "open"
+      },
+      {
+        "branch": "test-branch",
+        "date": [
+          0.0,
+          0
+        ],
+        "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90",
+        "status": "inactive"
+      }
+    ]
+  }
+
+summary/ shows a summary of repository state
+
+  $ request json-summary
+  200 Script output follows
+  
+  "not yet implemented"
+
+filediff/{revision}/{path} shows changes to a file in a revision
+
+  $ request json-diff/f8bbb9024b10/foo
+  200 Script output follows
+  
+  {
+    "author": "test",
+    "children": [],
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "modify foo",
+    "diff": [
+      {
+        "blockno": 1,
+        "lines": [
+          {
+            "l": "--- a/foo\tThu Jan 01 00:00:00 1970 +0000\n",
+            "n": 1,
+            "t": "-"
+          },
+          {
+            "l": "+++ b/foo\tThu Jan 01 00:00:00 1970 +0000\n",
+            "n": 2,
+            "t": "+"
+          },
+          {
+            "l": "@@ -1,1 +1,1 @@\n",
+            "n": 3,
+            "t": "@"
+          },
+          {
+            "l": "-foo\n",
+            "n": 4,
+            "t": "-"
+          },
+          {
+            "l": "+bar\n",
+            "n": 5,
+            "t": "+"
+          }
+        ]
+      }
+    ],
+    "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+    "parents": [
+      "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+    ],
+    "path": "foo"
+  }
+
+comparison/{revision}/{path} shows information about before and after for a file
+
+  $ request json-comparison/f8bbb9024b10/foo
+  200 Script output follows
+  
+  {
+    "author": "test",
+    "children": [],
+    "comparison": [
+      {
+        "lines": [
+          {
+            "ll": "foo",
+            "ln": 1,
+            "rl": "bar",
+            "rn": 1,
+            "t": "replace"
+          }
+        ]
+      }
+    ],
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "modify foo",
+    "leftnode": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e",
+    "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+    "parents": [
+      "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+    ],
+    "path": "foo",
+    "rightnode": "f8bbb9024b10f93cdbb8d940337398291d40dea8"
+  }
+
+annotate/{revision}/{path} shows annotations for each line
+
+  $ request json-annotate/f8bbb9024b10/foo
+  200 Script output follows
+  
+  {
+    "abspath": "foo",
+    "annotate": [
+      {
+        "abspath": "foo",
+        "author": "test",
+        "desc": "modify foo",
+        "line": "bar\n",
+        "lineno": 1,
+        "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+        "revdate": [
+          0.0,
+          0
+        ],
+        "targetline": 1
+      }
+    ],
+    "author": "test",
+    "children": [],
+    "date": [
+      0.0,
+      0
+    ],
+    "desc": "modify foo",
+    "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8",
+    "parents": [
+      "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e"
+    ],
+    "permissions": ""
+  }
+
+filelog/{revision}/{path} shows history of a single file
+
+  $ request json-filelog/f8bbb9024b10/foo
+  200 Script output follows
+  
+  "not yet implemented"
+
+(archive/ doesn't use templating, so ignore it)
+
+(static/ doesn't use templating, so ignore it)
+
+graph/ shows information that can be used to render a graph of the DAG
+
+  $ request json-graph
+  200 Script output follows
+  
+  "not yet implemented"
+
+help/ shows help topics
+
+  $ request json-help
+  200 Script output follows
+  
+  {
+    "earlycommands": [
+      {
+        "summary": "add the specified files on the next commit",
+        "topic": "add"
+      },
+      {
+        "summary": "show changeset information by line for each file",
+        "topic": "annotate"
+      },
+      {
+        "summary": "make a copy of an existing repository",
+        "topic": "clone"
+      },
+      {
+        "summary": "commit the specified files or all outstanding changes",
+        "topic": "commit"
+      },
+      {
+        "summary": "diff repository (or selected files)",
+        "topic": "diff"
+      },
+      {
+        "summary": "dump the header and diffs for one or more changesets",
+        "topic": "export"
+      },
+      {
+        "summary": "forget the specified files on the next commit",
+        "topic": "forget"
+      },
+      {
+        "summary": "create a new repository in the given directory",
+        "topic": "init"
+      },
+      {
+        "summary": "show revision history of entire repository or files",
+        "topic": "log"
+      },
+      {
+        "summary": "merge another revision into working directory",
+        "topic": "merge"
+      },
+      {
+        "summary": "pull changes from the specified source",
+        "topic": "pull"
+      },
+      {
+        "summary": "push changes to the specified destination",
+        "topic": "push"
+      },
+      {
+        "summary": "remove the specified files on the next commit",
+        "topic": "remove"
+      },
+      {
+        "summary": "start stand-alone webserver",
+        "topic": "serve"
+      },
+      {
+        "summary": "show changed files in the working directory",
+        "topic": "status"
+      },
+      {
+        "summary": "summarize working directory state",
+        "topic": "summary"
+      },
+      {
+        "summary": "update working directory (or switch revisions)",
+        "topic": "update"
+      }
+    ],
+    "othercommands": [
+      {
+        "summary": "add all new files, delete all missing files",
+        "topic": "addremove"
+      },
+      {
+        "summary": "create an unversioned archive of a repository revision",
+        "topic": "archive"
+      },
+      {
+        "summary": "reverse effect of earlier changeset",
+        "topic": "backout"
+      },
+      {
+        "summary": "subdivision search of changesets",
+        "topic": "bisect"
+      },
+      {
+        "summary": "create a new bookmark or list existing bookmarks",
+        "topic": "bookmarks"
+      },
+      {
+        "summary": "set or show the current branch name",
+        "topic": "branch"
+      },
+      {
+        "summary": "list repository named branches",
+        "topic": "branches"
+      },
+      {
+        "summary": "create a changegroup file",
+        "topic": "bundle"
+      },
+      {
+        "summary": "output the current or given revision of files",
+        "topic": "cat"
+      },
+      {
+        "summary": "show combined config settings from all hgrc files",
+        "topic": "config"
+      },
+      {
+        "summary": "mark files as copied for the next commit",
+        "topic": "copy"
+      },
+      {
+        "summary": "list tracked files",
+        "topic": "files"
+      },
+      {
+        "summary": "copy changes from other branches onto the current branch",
+        "topic": "graft"
+      },
+      {
+        "summary": "search for a pattern in specified files and revisions",
+        "topic": "grep"
+      },
+      {
+        "summary": "show branch heads",
+        "topic": "heads"
+      },
+      {
+        "summary": "show help for a given topic or a help overview",
+        "topic": "help"
+      },
+      {
+        "summary": "identify the working directory or specified revision",
+        "topic": "identify"
+      },
+      {
+        "summary": "import an ordered set of patches",
+        "topic": "import"
+      },
+      {
+        "summary": "show new changesets found in source",
+        "topic": "incoming"
+      },
+      {
+        "summary": "output the current or given revision of the project manifest",
+        "topic": "manifest"
+      },
+      {
+        "summary": "show changesets not found in the destination",
+        "topic": "outgoing"
+      },
+      {
+        "summary": "show aliases for remote repositories",
+        "topic": "paths"
+      },
+      {
+        "summary": "set or show the current phase name",
+        "topic": "phase"
+      },
+      {
+        "summary": "roll back an interrupted transaction",
+        "topic": "recover"
+      },
+      {
+        "summary": "rename files; equivalent of copy + remove",
+        "topic": "rename"
+      },
+      {
+        "summary": "redo merges or set/view the merge status of files",
+        "topic": "resolve"
+      },
+      {
+        "summary": "restore files to their checkout state",
+        "topic": "revert"
+      },
+      {
+        "summary": "print the root (top) of the current working directory",
+        "topic": "root"
+      },
+      {
+        "summary": "add one or more tags for the current or given revision",
+        "topic": "tag"
+      },
+      {
+        "summary": "list repository tags",
+        "topic": "tags"
+      },
+      {
+        "summary": "apply one or more changegroup files",
+        "topic": "unbundle"
+      },
+      {
+        "summary": "verify the integrity of the repository",
+        "topic": "verify"
+      },
+      {
+        "summary": "output version and copyright information",
+        "topic": "version"
+      }
+    ],
+    "topics": [
+      {
+        "summary": "Configuration Files",
+        "topic": "config"
+      },
+      {
+        "summary": "Date Formats",
+        "topic": "dates"
+      },
+      {
+        "summary": "Diff Formats",
+        "topic": "diffs"
+      },
+      {
+        "summary": "Environment Variables",
+        "topic": "environment"
+      },
+      {
+        "summary": "Using Additional Features",
+        "topic": "extensions"
+      },
+      {
+        "summary": "Specifying File Sets",
+        "topic": "filesets"
+      },
+      {
+        "summary": "Glossary",
+        "topic": "glossary"
+      },
+      {
+        "summary": "Syntax for Mercurial Ignore Files",
+        "topic": "hgignore"
+      },
+      {
+        "summary": "Configuring hgweb",
+        "topic": "hgweb"
+      },
+      {
+        "summary": "Merge Tools",
+        "topic": "merge-tools"
+      },
+      {
+        "summary": "Specifying Multiple Revisions",
+        "topic": "multirevs"
+      },
+      {
+        "summary": "File Name Patterns",
+        "topic": "patterns"
+      },
+      {
+        "summary": "Working with Phases",
+        "topic": "phases"
+      },
+      {
+        "summary": "Specifying Single Revisions",
+        "topic": "revisions"
+      },
+      {
+        "summary": "Specifying Revision Sets",
+        "topic": "revsets"
+      },
+      {
+        "summary": "Subrepositories",
+        "topic": "subrepos"
+      },
+      {
+        "summary": "Template Usage",
+        "topic": "templating"
+      },
+      {
+        "summary": "URL Paths",
+        "topic": "urls"
+      }
+    ]
+  }
+
+help/{topic} shows an individual help topic
+
+  $ request json-help/phases
+  200 Script output follows
+  
+  {
+    "rawdoc": "Working with Phases\n*", (glob)
+    "topic": "phases"
+  }
--- a/tests/test-hgweb-removed.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb-removed.t	Thu Apr 16 20:57:51 2015 -0500
@@ -78,7 +78,8 @@
   </tr>
   <tr>
    <th class="date">date</th>
-   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td></tr>
+   <td class="date age">Thu, 01 Jan 1970 00:00:00 +0000</td>
+  </tr>
   <tr>
    <th class="author">parents</th>
    <td class="author"><a href="/rev/cb9a9f314b8b">cb9a9f314b8b</a> </td>
@@ -99,8 +100,7 @@
       <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
         <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
-        <p></p>
-        <table class="stripes2">  <tr>
+        <table class="diffstat-table stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
       <td class="diffstat-graph">
--- a/tests/test-hgweb.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgweb.t	Thu Apr 16 20:57:51 2015 -0500
@@ -272,11 +272,13 @@
   </form>
   
   <table class="bigtable">
+  <thead>
   <tr>
     <th class="name">name</th>
     <th class="size">size</th>
     <th class="permissions">permissions</th>
   </tr>
+  </thead>
   <tbody class="stripes2">
   <tr class="fileline">
     <td class="name"><a href="/file/2ef0ac749a14/">[up]</a></td>
--- a/tests/test-hgwebdir.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hgwebdir.t	Thu Apr 16 20:57:51 2015 -0500
@@ -201,6 +201,7 @@
   <h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
   
   <table class="bigtable">
+      <thead>
       <tr>
           <th><a href="?sort=name">Name</a></th>
           <th><a href="?sort=description">Description</a></th>
@@ -209,6 +210,7 @@
           <th>&nbsp;</th>
           <th>&nbsp;</th>
       </tr>
+      </thead>
       <tbody class="stripes2">
       
   <tr>
@@ -699,6 +701,7 @@
   <h2 class="breadcrumb"><a href="/">Mercurial</a> &gt; <a href="/t">t</a> </h2>
   
   <table class="bigtable">
+      <thead>
       <tr>
           <th><a href="?sort=name">Name</a></th>
           <th><a href="?sort=description">Description</a></th>
@@ -707,6 +710,7 @@
           <th>&nbsp;</th>
           <th>&nbsp;</th>
       </tr>
+      </thead>
       <tbody class="stripes2">
       
   <tr>
@@ -1128,6 +1132,7 @@
   <h2 class="breadcrumb"><a href="/">Mercurial</a> </h2>
   
   <table class="bigtable">
+      <thead>
       <tr>
           <th><a href="?sort=name">Name</a></th>
           <th><a href="?sort=description">Description</a></th>
@@ -1136,6 +1141,7 @@
           <th>&nbsp;</th>
           <th>&nbsp;</th>
       </tr>
+      </thead>
       <tbody class="stripes2">
       
       </tbody>
--- a/tests/test-highlight.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-highlight.t	Thu Apr 16 20:57:51 2015 -0500
@@ -268,10 +268,12 @@
   
   <div class="overflow">
   <table class="bigtable">
+  <thead>
   <tr>
    <th class="annotate">rev</th>
    <th class="line">&nbsp;&nbsp;line source</th>
   </tr>
+  </thead>
   <tbody class="stripes2">
     
   <tr id="l1">
--- a/tests/test-histedit-arguments.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-arguments.t	Thu Apr 16 20:57:51 2015 -0500
@@ -103,6 +103,15 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg up --quiet
 
+Test config specified default
+-----------------------------
+
+  $ HGEDITOR=cat hg histedit --config "histedit.defaultrev=only(.) - ::eb57da33312f" --commands - << EOF
+  > pick c8e68270e35a 3 four
+  > pick 08d98a8350f3 4 five
+  > EOF
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
 Run on a revision not descendants of the initial parent
 --------------------------------------------------------------------
 
@@ -111,6 +120,13 @@
 Mercurial earlier than 2.7 by renaming ".hg/histedit-state"
 temporarily.
 
+  $ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2::
+  @  4 08d9 five
+  |
+  o  3 c8e6 four
+  |
+  o  2 eb57 three
+  |
   $ HGEDITOR=cat hg histedit -r 4 --commands - << EOF
   > edit 08d98a8350f3 4 five
   > EOF
@@ -122,15 +138,23 @@
 
   $ mv .hg/histedit-state .hg/histedit-state.back
   $ hg update --quiet --clean 2
+  $ echo alpha >> alpha
   $ mv .hg/histedit-state.back .hg/histedit-state
 
   $ hg histedit --continue
-  abort: c8e68270e35a is not an ancestor of working directory
-  (use "histedit --abort" to clear broken state)
-  [255]
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg (glob)
+  $ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2::
+  @  4 f5ed five
+  |
+  | o  3 c8e6 four
+  |/
+  o  2 eb57 three
+  |
 
-  $ hg histedit --abort
-  $ hg update --quiet --clean
+  $ hg unbundle -q $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg
+  $ hg strip -q -r f5ed --config extensions.strip=
+  $ hg up -q 08d98a8350f3
 
 Test that missing revisions are detected
 ---------------------------------------
--- a/tests/test-histedit-bookmark-motion.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-bookmark-motion.t	Thu Apr 16 20:57:51 2015 -0500
@@ -92,7 +92,7 @@
   histedit: moving bookmarks two from 177f92b77385 to b346ab9a313d
   histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b
   saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-48787b8d-backup.hg (glob)
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-60cea58b-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
   $ hg log --graph
   @  changeset:   3:cacdfd884a93
   |  bookmark:    five
--- a/tests/test-histedit-drop.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-drop.t	Thu Apr 16 20:57:51 2015 -0500
@@ -96,7 +96,6 @@
 Check histedit_source
 
   $ hg log --debug --rev f518305ce889
-  invalid branchheads cache (visible): tip differs
   changeset:   4:f518305ce889c07cb5bd05522176d75590ef3324
   tag:         tip
   phase:       draft
--- a/tests/test-histedit-edit.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-edit.t	Thu Apr 16 20:57:51 2015 -0500
@@ -3,13 +3,14 @@
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
   > histedit=
+  > strip=
   > EOF
 
   $ initrepo ()
   > {
   >     hg init r
   >     cd r
-  >     for x in a b c d e f ; do
+  >     for x in a b c d e f g; do
   >         echo $x > $x
   >         hg add $x
   >         hg ci -m $x
@@ -20,10 +21,15 @@
 
 log before edit
   $ hg log --graph
-  @  changeset:   5:652413bf663e
+  @  changeset:   6:3c6a8ed2ebe8
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     g
+  |
+  o  changeset:   5:652413bf663e
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
   o  changeset:   4:e860deea161a
@@ -58,11 +64,19 @@
   > pick 055a42cdd887 d
   > edit e860deea161a e
   > pick 652413bf663e f
+  > pick 3c6a8ed2ebe8 g
   > EOF
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 3 files removed, 0 files unresolved
   Make changes as needed, you may commit or record as needed now.
   When you are finished, run hg histedit --continue to resume.
 
+edit the plan
+  $ hg histedit --edit-plan --commands - 2>&1 << EOF
+  > edit e860deea161a e
+  > pick 652413bf663e f
+  > drop 3c6a8ed2ebe8 g
+  > EOF
+
 Go at a random point and try to continue
 
   $ hg id -n
@@ -72,10 +86,22 @@
   (use 'hg histedit --continue' or 'hg histedit --abort')
   [255]
 
+Try to delete necessary commit
+  $ hg strip -r 652413b
+  abort: histedit in progress, can't strip 652413bf663e
+  [255]
+
 commit, then edit the revision
   $ hg ci -m 'wat'
   created new head
   $ echo a > e
+
+qnew should fail while we're in the middle of the edit step
+
+  $ hg --config extensions.mq= qnew please-fail
+  abort: histedit in progress
+  (use 'hg histedit --continue' or 'hg histedit --abort')
+  [255]
   $ HGEDITOR='echo foobaz > ' hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -121,6 +147,34 @@
   $ hg cat e
   a
 
+Stripping necessary commits should not break --abort
+
+  $ hg histedit 1a60820cd1f6 --commands - 2>&1 << EOF| fixbundle
+  > edit 1a60820cd1f6 wat
+  > pick a5e1ba2f7afb foobaz
+  > pick b5f70786f9b0 g
+  > EOF
+  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  Make changes as needed, you may commit or record as needed now.
+  When you are finished, run hg histedit --continue to resume.
+
+  $ mv .hg/histedit-state .hg/histedit-state.bak
+  $ hg strip -q -r b5f70786f9b0
+  $ mv .hg/histedit-state.bak .hg/histedit-state
+  $ hg histedit --abort
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 3 files
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg log -r .
+  changeset:   6:b5f70786f9b0
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     f
+  
+
 check histedit_source
 
   $ hg log --debug --rev 5
--- a/tests/test-histedit-fold-non-commute.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-fold-non-commute.t	Thu Apr 16 20:57:51 2015 -0500
@@ -132,6 +132,7 @@
   $ hg resolve --mark e
   (no more unresolved files)
   $ hg histedit --continue 2>&1 | fixbundle
+  7b4e2f4b7bcd: empty changeset
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -274,6 +275,7 @@
   $ hg resolve --mark e
   (no more unresolved files)
   $ hg histedit --continue 2>&1 | fixbundle
+  7b4e2f4b7bcd: empty changeset
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
--- a/tests/test-histedit-fold.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-fold.t	Thu Apr 16 20:57:51 2015 -0500
@@ -307,6 +307,7 @@
   $ hg resolve --mark file
   (no more unresolved files)
   $ hg histedit --continue
+  251d831eeec5: empty changeset
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/*-backup.hg (glob)
   $ hg logt --graph
--- a/tests/test-histedit-non-commute-abort.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-non-commute-abort.t	Thu Apr 16 20:57:51 2015 -0500
@@ -70,8 +70,6 @@
   > pick 652413bf663e f
   > EOF
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  remote changed e which local deleted
-  use (c)hanged version or leave (d)eleted? c
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   merging e
   warning: conflicts during merge.
--- a/tests/test-histedit-non-commute.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-non-commute.t	Thu Apr 16 20:57:51 2015 -0500
@@ -170,6 +170,7 @@
   $ hg resolve --mark e
   (no more unresolved files)
   $ hg histedit --continue 2>&1 | fixbundle
+  7b4e2f4b7bcd: empty changeset
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -253,6 +254,7 @@
   $ hg resolve --mark e
   (no more unresolved files)
   $ hg histedit --continue 2>&1 | fixbundle
+  7b4e2f4b7bcd: empty changeset
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
--- a/tests/test-histedit-obsolete.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-histedit-obsolete.t	Thu Apr 16 20:57:51 2015 -0500
@@ -64,7 +64,7 @@
   > fold e860deea161a 4 e
   > pick 652413bf663e 5 f
   > EOF
-  saved backup bundle to $TESTTMP/base/.hg/strip-backup/96e494a2d553-60cea58b-backup.hg (glob)
+  saved backup bundle to $TESTTMP/base/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob)
   $ hg log --graph --hidden
   @  8:cacdfd884a93 f
   |
@@ -427,9 +427,9 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/58019c66f35f-be4b3835-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/83d1858e070b-08306a6b-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/859969f5ed7e-86c99c41-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/58019c66f35f-96092fce-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/83d1858e070b-f3469cf8-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/859969f5ed7e-d89a19d7-backup.hg (glob)
   $ hg log -G
   @  19:f9daec13fb98 (secret) i
   |
--- a/tests/test-hook.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-hook.t	Thu Apr 16 20:57:51 2015 -0500
@@ -12,13 +12,20 @@
   > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
   > pre-cat = python "$TESTDIR/printenv.py" pre-cat
   > post-cat = python "$TESTDIR/printenv.py" post-cat
+  > pretxnopen = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxnopen"
+  > pretxnclose = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxnclose"
+  > txnclose = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" txnclose"
+  > txnabort = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" txnabort"
   > EOF
   $ echo a > a
   $ hg add a
   $ hg commit -m a
   precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
   0:cb9a9f314b8b
+  pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_XNNAME=commit (glob)
+  txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
   commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
 
@@ -42,8 +49,11 @@
   $ echo b >> a
   $ hg commit -m a1 -d "1 0"
   precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
   1:ab228980c14d
+  pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_XNNAME=commit (glob)
+  txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
   commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
   $ hg update -C 0
@@ -52,8 +62,11 @@
   $ hg add b
   $ hg commit -m b -d '1 0'
   precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
   2:ee9deb46ab31
+  pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_XNNAME=commit (glob)
+  txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
   commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
   created new head
@@ -62,8 +75,11 @@
   (branch merge, don't forget to commit)
   $ hg commit -m merge -d '2 0'
   precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
   3:07f3376c1e65
+  pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_XNNAME=commit (glob)
+  txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
   commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
 
@@ -82,15 +98,15 @@
   $ hg pull ../a
   pulling from ../a
   searching for changes
-  prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
+  prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
   adding changesets
   adding manifests
   adding file changes
   added 3 changesets with 2 changes to 2 files
-  changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
-  incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
-  incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
-  incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
+  changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
+  incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
+  incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
+  incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
   (run 'hg update' to get a working copy)
 
 tag hooks can see env vars
@@ -103,9 +119,12 @@
   $ hg tag -d '3 0' a
   pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
   precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
   4:539e4b31b6dc
+  pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_XNNAME=commit (glob)
   tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
+  txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
   commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
   $ hg tag -l la
@@ -137,11 +156,13 @@
   4:539e4b31b6dc
   $ hg commit -m 'fail' -d '4 0'
   precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
+  pretxnopen hook: HG_TXNNAME=commit
   pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
   5:6f611f8018c1
   5:6f611f8018c1
   pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
   transaction abort!
+  txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
   rollback completed
   abort: pretxncommit.forbid1 hook exited with status 1
   [255]
@@ -198,6 +219,9 @@
   pushing to ../a
   searching for changes
   no changes found
+  pretxnopen hook: HG_TXNNAME=bookmarks
+  pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_XNNAME=bookmarks (glob)
+  txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmarks (glob)
   pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
   exporting bookmark foo
   [1]
@@ -260,7 +284,7 @@
   $ hg pull ../a
   pulling from ../a
   searching for changes
-  prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
+  prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
   abort: prechangegroup.forbid hook exited with status 1
   [255]
 
@@ -280,7 +304,7 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files
   4:539e4b31b6dc
-  pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
+  pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
   transaction abort!
   rollback completed
   abort: pretxnchangegroup.forbid1 hook exited with status 1
--- a/tests/test-http.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-http.t	Thu Apr 16 20:57:51 2015 -0500
@@ -127,7 +127,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=http://localhost:$HGPORT1/
+  changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT1/ (glob)
   (run 'hg update' to get a working copy)
   $ cd ..
 
--- a/tests/test-https.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-https.t	Thu Apr 16 20:57:51 2015 -0500
@@ -119,12 +119,12 @@
 Apple's OpenSSL. This trick do not work with plain OpenSSL.
 
   $ DISABLEOSXDUMMYCERT=
-#if osx
+#if defaultcacerts
   $ hg clone https://localhost:$HGPORT/ copy-pull
   abort: error: *certificate verify failed* (glob)
   [255]
 
-  $ DISABLEOSXDUMMYCERT="--config=web.cacerts="
+  $ DISABLEOSXDUMMYCERT="--config=web.cacerts=!"
 #endif
 
 clone via pull
@@ -156,14 +156,14 @@
   $ echo '[hooks]' >> .hg/hgrc
   $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup" >> .hg/hgrc
   $ hg pull $DISABLEOSXDUMMYCERT
+  pulling from https://localhost:$HGPORT/
   warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  pulling from https://localhost:$HGPORT/
   searching for changes
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=https://localhost:$HGPORT/
+  changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=https://localhost:$HGPORT/ (glob)
   (run 'hg update' to get a working copy)
   $ cd ..
 
@@ -188,28 +188,30 @@
   searching for changes
   no changes found
   $ P=`pwd` hg -R copy-pull pull --insecure
+  pulling from https://localhost:$HGPORT/
   warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  pulling from https://localhost:$HGPORT/
   searching for changes
   no changes found
 
 cacert mismatch
 
   $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/
+  pulling from https://127.0.0.1:$HGPORT/
   abort: 127.0.0.1 certificate error: certificate is for localhost
   (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely)
   [255]
   $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure
+  pulling from https://127.0.0.1:$HGPORT/
   warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  pulling from https://127.0.0.1:$HGPORT/
   searching for changes
   no changes found
   $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  pulling from https://localhost:$HGPORT/
   abort: error: *certificate verify failed* (glob)
   [255]
   $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
+  pulling from https://localhost:$HGPORT/
   warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  pulling from https://localhost:$HGPORT/
   searching for changes
   no changes found
 
@@ -218,6 +220,7 @@
   $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
   $ cat hg1.pid >> $DAEMON_PIDS
   $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
+  pulling from https://localhost:$HGPORT1/
   abort: error: *certificate verify failed* (glob)
   [255]
 
@@ -226,6 +229,7 @@
   $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
   $ cat hg2.pid >> $DAEMON_PIDS
   $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  pulling from https://localhost:$HGPORT2/
   abort: error: *certificate verify failed* (glob)
   [255]
 
@@ -236,7 +240,7 @@
   $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc
 
 - works without cacerts
-  $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=
+  $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=!
   5fed3813f7f5
 
 - fails when cert doesn't match hostname (port is ignored)
@@ -267,8 +271,8 @@
 Test unvalidated https through proxy
 
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback
+  pulling from https://localhost:$HGPORT/
   warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
-  pulling from https://localhost:$HGPORT/
   searching for changes
   no changes found
 
@@ -286,8 +290,10 @@
 Test https with cert problems through proxy
 
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
+  pulling from https://localhost:$HGPORT/
   abort: error: *certificate verify failed* (glob)
   [255]
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
+  pulling from https://localhost:$HGPORT2/
   abort: error: *certificate verify failed* (glob)
   [255]
--- a/tests/test-import-bypass.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-import-bypass.t	Thu Apr 16 20:57:51 2015 -0500
@@ -104,6 +104,86 @@
   $ hg rollback
   repository tip rolled back to revision 1 (undo import)
 
+Test --strip with --bypass
+
+  $ mkdir -p dir/dir2
+  $ echo bb > dir/dir2/b
+  $ echo cc > dir/dir2/c
+  $ echo d > dir/d
+  $ hg ci -Am 'addabcd'
+  adding dir/d
+  adding dir/dir2/b
+  adding dir/dir2/c
+  $ shortlog
+  @  2:d805bc8236b6 test 0 0 - default - addabcd
+  |
+  | o  1:4e322f7ce8e3 test 0 0 - foo - changea
+  |/
+  o  0:07f494440405 test 0 0 - default - adda
+  
+  $ hg import --bypass --strip 2 --prefix dir/ - <<EOF
+  > # HG changeset patch
+  > # User test
+  > # Date 0 0
+  > # Branch foo
+  > changeabcd
+  > 
+  > diff --git a/foo/a b/foo/a
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/foo/a
+  > @@ -0,0 +1 @@
+  > +a
+  > diff --git a/foo/dir2/b b/foo/dir2/b2
+  > rename from foo/dir2/b
+  > rename to foo/dir2/b2
+  > diff --git a/foo/dir2/c b/foo/dir2/c
+  > --- a/foo/dir2/c
+  > +++ b/foo/dir2/c
+  > @@ -0,0 +1 @@
+  > +cc
+  > diff --git a/foo/d b/foo/d
+  > deleted file mode 100644
+  > --- a/foo/d
+  > +++ /dev/null
+  > @@ -1,1 +0,0 @@
+  > -d
+  > EOF
+  applying patch from stdin
+
+  $ shortlog
+  o  3:5bd46886ca3e test 0 0 - default - changeabcd
+  |
+  @  2:d805bc8236b6 test 0 0 - default - addabcd
+  |
+  | o  1:4e322f7ce8e3 test 0 0 - foo - changea
+  |/
+  o  0:07f494440405 test 0 0 - default - adda
+  
+  $ hg diff --change 3 --git
+  diff --git a/dir/a b/dir/a
+  new file mode 100644
+  --- /dev/null
+  +++ b/dir/a
+  @@ -0,0 +1,1 @@
+  +a
+  diff --git a/dir/d b/dir/d
+  deleted file mode 100644
+  --- a/dir/d
+  +++ /dev/null
+  @@ -1,1 +0,0 @@
+  -d
+  diff --git a/dir/dir2/b b/dir/dir2/b2
+  rename from dir/dir2/b
+  rename to dir/dir2/b2
+  diff --git a/dir/dir2/c b/dir/dir2/c
+  --- a/dir/dir2/c
+  +++ b/dir/dir2/c
+  @@ -1,1 +1,2 @@
+   cc
+  +cc
+  $ hg -q --config extensions.strip= strip .
+
 Test unsupported combinations
 
   $ hg import --bypass --no-commit ../test.diff
@@ -112,6 +192,9 @@
   $ hg import --bypass --similarity 50 ../test.diff
   abort: cannot use --similarity with --bypass
   [255]
+  $ hg import --exact --prefix dir/ ../test.diff
+  abort: cannot use --exact with --prefix
+  [255]
 
 Test commit editor
 (this also tests that editor is invoked, if the patch doesn't contain
--- a/tests/test-import-git.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-import-git.t	Thu Apr 16 20:57:51 2015 -0500
@@ -612,12 +612,114 @@
     a
   R a
 
-Renames, similarity and git diff
+Prefix with strip, renames, creates etc
 
   $ hg revert -aC
   undeleting a
   forgetting b
   $ rm b
+  $ mkdir -p dir/dir2
+  $ echo b > dir/dir2/b
+  $ echo c > dir/dir2/c
+  $ echo d > dir/d
+  $ hg ci -Am addbcd
+  adding dir/d
+  adding dir/dir2/b
+  adding dir/dir2/c
+
+prefix '.' is the same as no prefix
+  $ hg import --no-commit --prefix . - <<EOF
+  > diff --git a/dir/a b/dir/a
+  > --- /dev/null
+  > +++ b/dir/a
+  > @@ -0,0 +1 @@
+  > +aaaa
+  > diff --git a/dir/d b/dir/d
+  > --- a/dir/d
+  > +++ b/dir/d
+  > @@ -1,1 +1,2 @@
+  >  d
+  > +dddd
+  > EOF
+  applying patch from stdin
+  $ cat dir/a
+  aaaa
+  $ cat dir/d
+  d
+  dddd
+  $ hg revert -aC
+  forgetting dir/a (glob)
+  reverting dir/d (glob)
+  $ rm dir/a
+
+prefix with default strip
+  $ hg import --no-commit --prefix dir/ - <<EOF
+  > diff --git a/a b/a
+  > --- /dev/null
+  > +++ b/a
+  > @@ -0,0 +1 @@
+  > +aaa
+  > diff --git a/d b/d
+  > --- a/d
+  > +++ b/d
+  > @@ -1,1 +1,2 @@
+  >  d
+  > +dd
+  > EOF
+  applying patch from stdin
+  $ cat dir/a
+  aaa
+  $ cat dir/d
+  d
+  dd
+  $ hg revert -aC
+  forgetting dir/a (glob)
+  reverting dir/d (glob)
+  $ rm dir/a
+(test that prefixes are relative to the cwd)
+  $ mkdir tmpdir
+  $ cd tmpdir
+  $ hg import --no-commit -p2 --prefix ../dir/ - <<EOF
+  > diff --git a/foo/a b/foo/a
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/foo/a
+  > @@ -0,0 +1 @@
+  > +a
+  > diff --git a/foo/dir2/b b/foo/dir2/b2
+  > rename from foo/dir2/b
+  > rename to foo/dir2/b2
+  > diff --git a/foo/dir2/c b/foo/dir2/c
+  > --- a/foo/dir2/c
+  > +++ b/foo/dir2/c
+  > @@ -0,0 +1 @@
+  > +cc
+  > diff --git a/foo/d b/foo/d
+  > deleted file mode 100644
+  > --- a/foo/d
+  > +++ /dev/null
+  > @@ -1,1 +0,0 @@
+  > -d
+  > EOF
+  applying patch from stdin
+  $ hg st --copies
+  M dir/dir2/c
+  A dir/a
+  A dir/dir2/b2
+    dir/dir2/b
+  R dir/d
+  R dir/dir2/b
+  $ cd ..
+
+Renames, similarity and git diff
+
+  $ hg revert -aC
+  forgetting dir/a (glob)
+  undeleting dir/d (glob)
+  undeleting dir/dir2/b (glob)
+  forgetting dir/dir2/b2 (glob)
+  reverting dir/dir2/c (glob)
+  $ rm dir/a dir/dir2/b2
   $ hg import --similarity 90 --no-commit - <<EOF
   > diff --git a/a b/b
   > rename from a
--- a/tests/test-import.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-import.t	Thu Apr 16 20:57:51 2015 -0500
@@ -670,6 +670,25 @@
   $ hg status
   $ cat a
   bb
+
+test --prefix
+
+  $ mkdir -p dir/dir2
+  $ echo b > dir/dir2/b
+  $ hg ci -Am b
+  adding dir/dir2/b
+  $ hg import -p2 --prefix dir - << EOF
+  > foobar
+  > --- drop1/drop2/dir2/b
+  > +++ drop1/drop2/dir2/b
+  > @@ -1,1 +1,1 @@
+  > -b
+  > +cc
+  > EOF
+  applying patch from stdin
+  $ hg status
+  $ cat dir/dir2/b
+  cc
   $ cd ..
 
 
--- a/tests/test-issue3084.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-issue3084.t	Thu Apr 16 20:57:51 2015 -0500
@@ -113,8 +113,6 @@
   $ echo "l" | hg merge --config ui.interactive=Yes
   remote turned local largefile foo into a normal file
   keep (l)argefile or use (n)ormal file? l
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
@@ -249,8 +247,6 @@
 
   $ hg up -Cqr large
   $ hg merge -r normal-id
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
@@ -271,8 +267,6 @@
 
   $ hg up -Cqr large
   $ hg merge -r normal-same
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
@@ -307,8 +301,6 @@
   $ hg merge -r normal2
   remote turned local largefile f into a normal file
   keep (l)argefile or use (n)ormal file? l
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
@@ -372,8 +364,6 @@
   $ hg merge -r normal
   remote turned local largefile f into a normal file
   keep (l)argefile or use (n)ormal file? l
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
--- a/tests/test-keyword.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-keyword.t	Thu Apr 16 20:57:51 2015 -0500
@@ -473,18 +473,24 @@
 
   $ hg -v record -l msg -d '12 2' r<<EOF
   > y
+  > y
   > EOF
   diff --git a/r b/r
   new file mode 100644
   examine changes to 'r'? [Ynesfdaq?] y
   
+  @@ -0,0 +1,1 @@
+  +$Id$
+  record this change to 'r'? [Ynesfdaq?] y
+  
+  resolving manifests
+  patching file r
   committing files:
   r
   committing manifest
   committing changelog
   committed changeset 3:82a2f715724d
   overwriting r expanding keywords
- - status call required for dirstate.normallookup() check
   $ hg status r
   $ hg --verbose rollback
   repository tip rolled back to revision 2 (undo commit)
@@ -501,11 +507,18 @@
   $ hg add i
   $ hg --verbose record -d '13 1' -m recignored<<EOF
   > y
+  > y
   > EOF
   diff --git a/i b/i
   new file mode 100644
   examine changes to 'i'? [Ynesfdaq?] y
   
+  @@ -0,0 +1,1 @@
+  +$Id$
+  record this change to 'i'? [Ynesfdaq?] y
+  
+  resolving manifests
+  patching file i
   committing files:
   i
   committing manifest
--- a/tests/test-largefiles-cache.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-largefiles-cache.t	Thu Apr 16 20:57:51 2015 -0500
@@ -136,7 +136,7 @@
 #endif
 
 Test issue 4053 (remove --after on a deleted, uncommitted file shouldn't say
-it is missing, but a remove on a nonexistant unknown file still should.  Same
+it is missing, but a remove on a nonexistent unknown file still should.  Same
 for a forget.)
 
   $ cd src
@@ -153,3 +153,29 @@
   ENOENT: * (glob)
   not removing z: file is already untracked
   [1]
+
+Largefiles are accessible from the share's store
+  $ cd ..
+  $ hg share -q src share_dst --config extensions.share=
+  $ hg -R share_dst update -r0
+  getting changed largefiles
+  1 largefiles updated, 0 removed
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ echo modified > share_dst/large
+  $ hg -R share_dst ci -m modified
+  created new head
+
+Only dirstate is in the local store for the share, and the largefile is in the
+share source's local store.  Avoid the extra largefiles added in the unix
+conditional above.
+  $ hash=`hg -R share_dst cat share_dst/.hglf/large`
+  $ echo $hash
+  e2fb5f2139d086ded2cb600d5a91a196e76bf020
+
+  $ find share_dst/.hg/largefiles/* | sort
+  share_dst/.hg/largefiles/dirstate
+
+  $ find src/.hg/largefiles/* | egrep "(dirstate|$hash)" | sort
+  src/.hg/largefiles/dirstate
+  src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020
--- a/tests/test-largefiles-misc.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-largefiles-misc.t	Thu Apr 16 20:57:51 2015 -0500
@@ -248,7 +248,7 @@
   commit: 1 subrepos
   update: (current)
   $ hg ci -m "this commit should fail without -S"
-  abort: uncommitted changes in subrepo subrepo
+  abort: uncommitted changes in subrepository 'subrepo'
   (use --subrepos for recursive commit)
   [255]
 
@@ -336,6 +336,13 @@
   ../lf_subrepo_archive/subrepo
   ../lf_subrepo_archive/subrepo/large.txt
   ../lf_subrepo_archive/subrepo/normal.txt
+  $ cat ../lf_subrepo_archive/.hg_archival.txt
+  repo: 41bd42f10efa43698cc02052ea0977771cba506d
+  node: d56a95e6522858bc08a724c4fe2bdee066d1c30b
+  branch: default
+  latesttag: null
+  latesttagdistance: 4
+  changessincelatesttag: 4
 
 Test update with subrepos.
 
@@ -357,11 +364,17 @@
   $ hg update -C
   getting changed largefiles
   1 largefiles updated, 0 removed
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg status -S
 
+  $ hg forget -v subrepo/large.txt
+  removing subrepo/large.txt (glob)
+
+Test reverting a forgotten file
+  $ hg revert -R subrepo subrepo/large.txt
+  $ hg status -SA subrepo/large.txt
+  C subrepo/large.txt
+
   $ hg rm -v subrepo/large.txt
   removing subrepo/large.txt (glob)
   $ hg revert -R subrepo subrepo/large.txt
@@ -443,6 +456,10 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     anotherlarge
   
+  $ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge
+  updated patterns: ['../.hglf/sub/../sub/anotherlarge', '../sub/anotherlarge']
+  1: anotherlarge
+
   $ hg log -G anotherlarge
   @  changeset:   1:9627a577c5e9
   |  tag:         tip
@@ -450,6 +467,30 @@
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     anotherlarge
   |
+
+  $ hg log glob:another*
+  changeset:   1:9627a577c5e9
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     anotherlarge
+  
+  $ hg --debug log -T '{rev}: {desc}\n' -G glob:another*
+  updated patterns: ['glob:../.hglf/sub/another*', 'glob:another*']
+  @  1: anotherlarge
+  |
+
+#if no-msys
+  $ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
+  updated patterns: ['glob:../.hglf/sub/another*']
+  1: anotherlarge
+
+  $ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys
+  updated patterns: ['glob:../.hglf/sub/another*']
+  @  1: anotherlarge
+  |
+#endif
+
   $ echo more >> anotherlarge
   $ hg st .
   M anotherlarge
@@ -460,8 +501,33 @@
   ? sub/anotherlarge.orig
   $ cd ..
 
+Test glob logging from the root dir
+  $ hg log glob:**another*
+  changeset:   1:9627a577c5e9
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     anotherlarge
+  
+  $ hg log -G glob:**another*
+  @  changeset:   1:9627a577c5e9
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     anotherlarge
+  |
+
   $ cd ..
 
+Log from outer space
+  $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge'
+  updated patterns: ['addrm2/.hglf/sub/anotherlarge', 'addrm2/sub/anotherlarge']
+  1: anotherlarge
+  $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge'
+  updated patterns: ['addrm2/.hglf/sub/anotherlarge']
+  1: anotherlarge
+
+
 Check error message while exchange
 =========================================================
 
@@ -737,8 +803,6 @@
   R d1/f
   $ hg merge
   merging d2/f and d1/f to d2/f
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cd ..
--- a/tests/test-largefiles.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-largefiles.t	Thu Apr 16 20:57:51 2015 -0500
@@ -581,8 +581,6 @@
   C sub2/large6
   C sub2/large7
   $ hg up -C '.^'
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg remove large
   $ hg addremove --traceback
@@ -1183,12 +1181,12 @@
   adding manifests
   adding file changes
   added 1 changesets with 2 changes to 2 files (+1 heads)
-  0 largefiles cached
   rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
   Invoking status precommit hook
   M sub/normal4
   M sub2/large6
   saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob)
+  0 largefiles cached
   $ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
   $ hg log --template '{rev}:{node|short}  {desc|firstline}\n'
   9:598410d3eb9a  modify normal file largefile in repo d
@@ -1431,8 +1429,6 @@
 
 verify that largefile .orig file no longer is overwritten on every update -C:
   $ hg update --clean
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cat sub2/large7.orig
   mistake
--- a/tests/test-log.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-log.t	Thu Apr 16 20:57:51 2015 -0500
@@ -46,18 +46,31 @@
   $ hg ci -me -d '5 0'
 
 Make sure largefiles doesn't interfere with logging a regular file
-  $ hg log a --config extensions.largefiles=
-  changeset:   0:9161b9aeaf16
-  user:        test
-  date:        Thu Jan 01 00:00:01 1970 +0000
-  summary:     a
-  
+  $ hg --debug log a -T '{rev}: {desc}\n' --config extensions.largefiles=
+  updated patterns: ['.hglf/a', 'a']
+  0: a
   $ hg log a
   changeset:   0:9161b9aeaf16
   user:        test
   date:        Thu Jan 01 00:00:01 1970 +0000
   summary:     a
   
+  $ hg log glob:a*
+  changeset:   3:2ca5ba701980
+  user:        test
+  date:        Thu Jan 01 00:00:04 1970 +0000
+  summary:     d
+  
+  changeset:   0:9161b9aeaf16
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     a
+  
+  $ hg --debug log glob:a* -T '{rev}: {desc}\n' --config extensions.largefiles=
+  updated patterns: ['glob:.hglf/a*', 'glob:a*']
+  3: d
+  0: a
+
 log on directory
 
   $ hg log dir
@@ -634,7 +647,7 @@
   
 
 
-log -f -r 1:tip
+log -f -r '1 + 4'
 
   $ hg up -C 0
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -642,25 +655,24 @@
   $ hg ci -Amb2 -d '1 0'
   adding b2
   created new head
-  $ hg log -f -r 1:tip
+  $ hg log -f -r '1 + 4'
+  changeset:   4:ddb82e70d1a1
+  tag:         tip
+  parent:      0:67e992f2c4f3
+  user:        test
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     b2
+  
   changeset:   1:3d5bf5654eda
   user:        test
   date:        Thu Jan 01 00:00:01 1970 +0000
   summary:     r1
   
-  changeset:   2:60c670bf5b30
+  changeset:   0:67e992f2c4f3
   user:        test
   date:        Thu Jan 01 00:00:01 1970 +0000
-  summary:     r2
+  summary:     base
   
-  changeset:   3:e62f78d544b4
-  parent:      1:3d5bf5654eda
-  user:        test
-  date:        Thu Jan 01 00:00:01 1970 +0000
-  summary:     b1
-  
-
-
 log -f -r null
 
   $ hg log -f -r null
@@ -675,10 +687,17 @@
   
 
 
+log -f with null parent
+
+  $ hg up -C null
+  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ hg log -f
+
+
 log -r .  with two parents
 
   $ hg up -C 3
-  2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg merge tip
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
@@ -1342,6 +1361,11 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     add foo, related
   
+  changeset:   2:c4c64aedf0f7
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     add unrelated old foo
+  
   $ cd ..
 
 Issue2383: hg log showing _less_ differences than hg diff
@@ -1599,6 +1623,70 @@
   user:        
   date:        Thu Jan 01 00:00:00 1970 +0000
   
+working-directory revision requires special treatment
+
+  $ hg log -r 'wdir()'
+  changeset:   0:65624cd9070a+
+  user:        test
+  date:        [A-Za-z0-9:+ ]+ (re)
+  
+  $ hg log -r 'wdir()' -q
+  0:65624cd9070a+
+
+  $ hg log -r 'wdir()' --debug
+  changeset:   0:65624cd9070a035fa7191a54f2b8af39f16b0c08+
+  phase:       draft
+  parent:      0:65624cd9070a035fa7191a54f2b8af39f16b0c08
+  parent:      -1:0000000000000000000000000000000000000000
+  user:        test
+  date:        [A-Za-z0-9:+ ]+ (re)
+  extra:       branch=default
+  
+  $ hg log -r 'wdir()' -Tjson
+  [
+   {
+    "rev": null,
+    "node": null,
+    "branch": "default",
+    "phase": "draft",
+    "user": "test",
+    "date": [*, 0], (glob)
+    "desc": "",
+    "bookmarks": [],
+    "tags": ["tip"],
+    "parents": ["65624cd9070a035fa7191a54f2b8af39f16b0c08"]
+   }
+  ]
+
+  $ hg log -r 'wdir()' -Tjson -q
+  [
+   {
+    "rev": null,
+    "node": null
+   }
+  ]
+
+  $ hg log -r 'wdir()' -Tjson --debug
+  [
+   {
+    "rev": null,
+    "node": null,
+    "branch": "default",
+    "phase": "draft",
+    "user": "test",
+    "date": [*, 0], (glob)
+    "desc": "",
+    "bookmarks": [],
+    "tags": ["tip"],
+    "parents": ["65624cd9070a035fa7191a54f2b8af39f16b0c08"],
+    "manifest": null,
+    "extra": {"branch": "default"},
+    "modified": [],
+    "added": [],
+    "removed": []
+   }
+  ]
+
 Check that adding an arbitrary name shows up in log automatically
 
   $ cat > ../names.py <<EOF
@@ -1655,7 +1743,7 @@
   |
   o  a
   
-Ensure that largefiles doesn't intefere with following a normal file
+Ensure that largefiles doesn't interfere with following a normal file
   $ hg  --config extensions.largefiles= log -f d -T '{desc}' -G
   @  c
   |
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-manifest.py	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,468 @@
+import binascii
+import unittest
+import itertools
+
+import silenttestrunner
+
+from mercurial import manifest as manifestmod
+from mercurial import match as matchmod
+
+EMTPY_MANIFEST = ''
+EMTPY_MANIFEST_V2 = '\0\n'
+
+HASH_1 = '1' * 40
+BIN_HASH_1 = binascii.unhexlify(HASH_1)
+HASH_2 = 'f' * 40
+BIN_HASH_2 = binascii.unhexlify(HASH_2)
+HASH_3 = '1234567890abcdef0987654321deadbeef0fcafe'
+BIN_HASH_3 = binascii.unhexlify(HASH_3)
+A_SHORT_MANIFEST = (
+    'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
+    'foo\0%(hash1)s%(flag1)s\n'
+    ) % {'hash1': HASH_1,
+         'flag1': '',
+         'hash2': HASH_2,
+         'flag2': 'l',
+         }
+
+# Same data as A_SHORT_MANIFEST
+A_SHORT_MANIFEST_V2 = (
+    '\0\n'
+    '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+    '\x00foo\0%(flag1)s\n%(hash1)s\n'
+    ) % {'hash1': BIN_HASH_1,
+         'flag1': '',
+         'hash2': BIN_HASH_2,
+         'flag2': 'l',
+         }
+
+# Same data as A_SHORT_MANIFEST
+A_METADATA_MANIFEST = (
+    '\0foo\0bar\n'
+    '\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata
+    '\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata
+    ) % {'hash1': BIN_HASH_1,
+         'flag1': '',
+         'hash2': BIN_HASH_2,
+         'flag2': 'l',
+         }
+
+A_STEM_COMPRESSED_MANIFEST = (
+    '\0\n'
+    '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n'
+    '\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars
+    '\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters
+    '\x00%(verylongdir)sx/x\0\n%(hash1)s\n'
+    '\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars
+    ) % {'hash1': BIN_HASH_1,
+         'flag1': '',
+         'hash2': BIN_HASH_2,
+         'flag2': 'l',
+         'verylongdir': 255 * 'x',
+         }
+
+A_DEEPER_MANIFEST = (
+    'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
+    'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n'
+    'a/b/c/foo.py\0%(hash3)s%(flag1)s\n'
+    'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n'
+    'a/b/d/baz.py\0%(hash3)s%(flag1)s\n'
+    'a/b/d/qux.py\0%(hash1)s%(flag2)s\n'
+    'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n'
+    'a/b/dog.py\0%(hash3)s%(flag1)s\n'
+    'a/b/fish.py\0%(hash2)s%(flag1)s\n'
+    'a/c/london.py\0%(hash3)s%(flag2)s\n'
+    'a/c/paper.txt\0%(hash2)s%(flag2)s\n'
+    'a/c/paris.py\0%(hash2)s%(flag1)s\n'
+    'a/d/apple.py\0%(hash3)s%(flag1)s\n'
+    'a/d/pizza.py\0%(hash3)s%(flag2)s\n'
+    'a/green.py\0%(hash1)s%(flag2)s\n'
+    'a/purple.py\0%(hash2)s%(flag1)s\n'
+    'app.py\0%(hash3)s%(flag1)s\n'
+    'readme.txt\0%(hash2)s%(flag1)s\n'
+    ) % {'hash1': HASH_1,
+         'flag1': '',
+         'hash2': HASH_2,
+         'flag2': 'l',
+         'hash3': HASH_3,
+         }
+
+HUGE_MANIFEST_ENTRIES = 200001
+
+A_HUGE_MANIFEST = ''.join(sorted(
+    'file%d\0%s%s\n' % (i, h, f) for i, h, f in
+    itertools.izip(xrange(200001),
+                   itertools.cycle((HASH_1, HASH_2)),
+                   itertools.cycle(('', 'x', 'l')))))
+
+class basemanifesttests(object):
+    def parsemanifest(self, text):
+        raise NotImplementedError('parsemanifest not implemented by test case')
+
+    def assertIn(self, thing, container, msg=None):
+        # assertIn new in 2.7, use it if available, otherwise polyfill
+        sup = getattr(unittest.TestCase, 'assertIn', False)
+        if sup:
+            return sup(self, thing, container, msg=msg)
+        if not msg:
+            msg = 'Expected %r in %r' % (thing, container)
+        self.assert_(thing in container, msg)
+
+    def testEmptyManifest(self):
+        m = self.parsemanifest(EMTPY_MANIFEST)
+        self.assertEqual(0, len(m))
+        self.assertEqual([], list(m))
+
+    def testEmptyManifestv2(self):
+        m = self.parsemanifest(EMTPY_MANIFEST_V2)
+        self.assertEqual(0, len(m))
+        self.assertEqual([], list(m))
+
+    def testManifest(self):
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        self.assertEqual(['bar/baz/qux.py', 'foo'], list(m))
+        self.assertEqual(BIN_HASH_2, m['bar/baz/qux.py'])
+        self.assertEqual('l', m.flags('bar/baz/qux.py'))
+        self.assertEqual(BIN_HASH_1, m['foo'])
+        self.assertEqual('', m.flags('foo'))
+        self.assertRaises(KeyError, lambda : m['wat'])
+
+    def testParseManifestV2(self):
+        m1 = self.parsemanifest(A_SHORT_MANIFEST)
+        m2 = self.parsemanifest(A_SHORT_MANIFEST_V2)
+        # Should have same content as A_SHORT_MANIFEST
+        self.assertEqual(m1.text(), m2.text())
+
+    def testParseManifestMetadata(self):
+        # Metadata is for future-proofing and should be accepted but ignored
+        m = self.parsemanifest(A_METADATA_MANIFEST)
+        self.assertEqual(A_SHORT_MANIFEST, m.text())
+
+    def testParseManifestStemCompression(self):
+        m = self.parsemanifest(A_STEM_COMPRESSED_MANIFEST)
+        self.assertIn('bar/baz/qux.py', m)
+        self.assertIn('bar/qux/foo.py', m)
+        self.assertIn('bar/qux/foz.py', m)
+        self.assertIn(256 * 'x' + '/x', m)
+        self.assertIn(256 * 'x' + '/y', m)
+        self.assertEqual(A_STEM_COMPRESSED_MANIFEST, m.text(usemanifestv2=True))
+
+    def testTextV2(self):
+        m1 = self.parsemanifest(A_SHORT_MANIFEST)
+        v2text = m1.text(usemanifestv2=True)
+        self.assertEqual(A_SHORT_MANIFEST_V2, v2text)
+
+    def testSetItem(self):
+        want = BIN_HASH_1
+
+        m = self.parsemanifest(EMTPY_MANIFEST)
+        m['a'] = want
+        self.assertIn('a', m)
+        self.assertEqual(want, m['a'])
+        self.assertEqual('a\0' + HASH_1 + '\n', m.text())
+
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        m['a'] = want
+        self.assertEqual(want, m['a'])
+        self.assertEqual('a\0' + HASH_1 + '\n' + A_SHORT_MANIFEST,
+                         m.text())
+
+    def testSetFlag(self):
+        want = 'x'
+
+        m = self.parsemanifest(EMTPY_MANIFEST)
+        # first add a file; a file-less flag makes no sense
+        m['a'] = BIN_HASH_1
+        m.setflag('a', want)
+        self.assertEqual(want, m.flags('a'))
+        self.assertEqual('a\0' + HASH_1 + want + '\n', m.text())
+
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        # first add a file; a file-less flag makes no sense
+        m['a'] = BIN_HASH_1
+        m.setflag('a', want)
+        self.assertEqual(want, m.flags('a'))
+        self.assertEqual('a\0' + HASH_1 + want + '\n' + A_SHORT_MANIFEST,
+                         m.text())
+
+    def testCopy(self):
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        m['a'] =  BIN_HASH_1
+        m2 = m.copy()
+        del m
+        del m2 # make sure we don't double free() anything
+
+    def testCompaction(self):
+        unhex = binascii.unhexlify
+        h1, h2 = unhex(HASH_1), unhex(HASH_2)
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        m['alpha'] = h1
+        m['beta'] = h2
+        del m['foo']
+        want = 'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
+            HASH_1, HASH_2, HASH_2)
+        self.assertEqual(want, m.text())
+        self.assertEqual(3, len(m))
+        self.assertEqual(['alpha', 'bar/baz/qux.py', 'beta'], list(m))
+        self.assertEqual(h1, m['alpha'])
+        self.assertEqual(h2, m['bar/baz/qux.py'])
+        self.assertEqual(h2, m['beta'])
+        self.assertEqual('', m.flags('alpha'))
+        self.assertEqual('l', m.flags('bar/baz/qux.py'))
+        self.assertEqual('', m.flags('beta'))
+        self.assertRaises(KeyError, lambda : m['foo'])
+
+    def testSetGetNodeSuffix(self):
+        clean = self.parsemanifest(A_SHORT_MANIFEST)
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        h = m['foo']
+        f = m.flags('foo')
+        want = h + 'a'
+        # Merge code wants to set 21-byte fake hashes at times
+        m['foo'] = want
+        self.assertEqual(want, m['foo'])
+        self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
+                          ('foo', BIN_HASH_1 + 'a')],
+                         list(m.iteritems()))
+        # Sometimes it even tries a 22-byte fake hash, but we can
+        # return 21 and it'll work out
+        m['foo'] = want + '+'
+        self.assertEqual(want, m['foo'])
+        # make sure the suffix survives a copy
+        match = matchmod.match('', '', ['re:foo'])
+        m2 = m.matches(match)
+        self.assertEqual(want, m2['foo'])
+        self.assertEqual(1, len(m2))
+        m2 = m.copy()
+        self.assertEqual(want, m2['foo'])
+        # suffix with iteration
+        self.assertEqual([('bar/baz/qux.py', BIN_HASH_2),
+                          ('foo', want)],
+                         list(m.iteritems()))
+
+        # shows up in diff
+        self.assertEqual({'foo': ((want, f), (h, ''))}, m.diff(clean))
+        self.assertEqual({'foo': ((h, ''), (want, f))}, clean.diff(m))
+
+    def testMatchException(self):
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        match = matchmod.match('', '', ['re:.*'])
+        def filt(path):
+            if path == 'foo':
+                assert False
+            return True
+        match.matchfn = filt
+        self.assertRaises(AssertionError, m.matches, match)
+
+    def testRemoveItem(self):
+        m = self.parsemanifest(A_SHORT_MANIFEST)
+        del m['foo']
+        self.assertRaises(KeyError, lambda : m['foo'])
+        self.assertEqual(1, len(m))
+        self.assertEqual(1, len(list(m)))
+        # now restore and make sure everything works right
+        m['foo'] = 'a' * 20
+        self.assertEqual(2, len(m))
+        self.assertEqual(2, len(list(m)))
+
+    def testManifestDiff(self):
+        MISSING = (None, '')
+        addl = 'z-only-in-left\0' + HASH_1 + '\n'
+        addr = 'z-only-in-right\0' + HASH_2 + 'x\n'
+        left = self.parsemanifest(
+            A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + 'x') + addl)
+        right = self.parsemanifest(A_SHORT_MANIFEST + addr)
+        want = {
+            'foo': ((BIN_HASH_3, 'x'),
+                    (BIN_HASH_1, '')),
+            'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
+            'z-only-in-right': (MISSING, (BIN_HASH_2, 'x')),
+            }
+        self.assertEqual(want, left.diff(right))
+
+        want = {
+            'bar/baz/qux.py': (MISSING, (BIN_HASH_2, 'l')),
+            'foo': (MISSING, (BIN_HASH_3, 'x')),
+            'z-only-in-left': (MISSING, (BIN_HASH_1, '')),
+            }
+        self.assertEqual(want, self.parsemanifest(EMTPY_MANIFEST).diff(left))
+
+        want = {
+            'bar/baz/qux.py': ((BIN_HASH_2, 'l'), MISSING),
+            'foo': ((BIN_HASH_3, 'x'), MISSING),
+            'z-only-in-left': ((BIN_HASH_1, ''), MISSING),
+            }
+        self.assertEqual(want, left.diff(self.parsemanifest(EMTPY_MANIFEST)))
+        copy = right.copy()
+        del copy['z-only-in-right']
+        del right['foo']
+        want = {
+            'foo': (MISSING, (BIN_HASH_1, '')),
+            'z-only-in-right': ((BIN_HASH_2, 'x'), MISSING),
+            }
+        self.assertEqual(want, right.diff(copy))
+
+        short = self.parsemanifest(A_SHORT_MANIFEST)
+        pruned = short.copy()
+        del pruned['foo']
+        want = {
+            'foo': ((BIN_HASH_1, ''), MISSING),
+            }
+        self.assertEqual(want, short.diff(pruned))
+        want = {
+            'foo': (MISSING, (BIN_HASH_1, '')),
+            }
+        self.assertEqual(want, pruned.diff(short))
+        want = {
+            'bar/baz/qux.py': None,
+            'foo': (MISSING, (BIN_HASH_1, '')),
+            }
+        self.assertEqual(want, pruned.diff(short, True))
+
+    def testReversedLines(self):
+        backwards = ''.join(
+            l + '\n' for l in reversed(A_SHORT_MANIFEST.split('\n')) if l)
+        try:
+            self.parsemanifest(backwards)
+            self.fail('Should have raised ValueError')
+        except ValueError, v:
+            self.assertIn('Manifest lines not in sorted order.', str(v))
+
+    def testNoTerminalNewline(self):
+        try:
+            self.parsemanifest(A_SHORT_MANIFEST + 'wat')
+            self.fail('Should have raised ValueError')
+        except ValueError, v:
+            self.assertIn('Manifest did not end in a newline.', str(v))
+
+    def testNoNewLineAtAll(self):
+        try:
+            self.parsemanifest('wat')
+            self.fail('Should have raised ValueError')
+        except ValueError, v:
+            self.assertIn('Manifest did not end in a newline.', str(v))
+
+    def testHugeManifest(self):
+        m = self.parsemanifest(A_HUGE_MANIFEST)
+        self.assertEqual(HUGE_MANIFEST_ENTRIES, len(m))
+        self.assertEqual(len(m), len(list(m)))
+
+    def testMatchesMetadata(self):
+        '''Tests matches() for a few specific files to make sure that both
+        the set of files as well as their flags and nodeids are correct in
+        the resulting manifest.'''
+        m = self.parsemanifest(A_HUGE_MANIFEST)
+
+        match = matchmod.match('/', '',
+                ['file1', 'file200', 'file300'], exact=True)
+        m2 = m.matches(match)
+
+        w = ('file1\0%sx\n'
+             'file200\0%sl\n'
+             'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
+        self.assertEqual(w, m2.text())
+
+    def testMatchesNonexistentFile(self):
+        '''Tests matches() for a small set of specific files, including one
+        nonexistent file to make sure in only matches against existing files.
+        '''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '',
+                ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt', 'nonexistent'],
+                exact=True)
+        m2 = m.matches(match)
+
+        self.assertEqual(
+                ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt'],
+                m2.keys())
+
+    def testMatchesNonexistentDirectory(self):
+        '''Tests matches() for a relpath match on a directory that doesn't
+        actually exist.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '', ['a/f'], default='relpath')
+        m2 = m.matches(match)
+
+        self.assertEqual([], m2.keys())
+
+    def testMatchesExactLarge(self):
+        '''Tests matches() for files matching a large list of exact files.
+        '''
+        m = self.parsemanifest(A_HUGE_MANIFEST)
+
+        flist = m.keys()[80:300]
+        match = matchmod.match('/', '', flist, exact=True)
+        m2 = m.matches(match)
+
+        self.assertEqual(flist, m2.keys())
+
+    def testMatchesFull(self):
+        '''Tests matches() for what should be a full match.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '', [''])
+        m2 = m.matches(match)
+
+        self.assertEqual(m.keys(), m2.keys())
+
+    def testMatchesDirectory(self):
+        '''Tests matches() on a relpath match on a directory, which should
+        match against all files within said directory.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '', ['a/b'], default='relpath')
+        m2 = m.matches(match)
+
+        self.assertEqual([
+            'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
+            'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
+            'a/b/fish.py'], m2.keys())
+
+    def testMatchesExactPath(self):
+        '''Tests matches() on an exact match on a directory, which should
+        result in an empty manifest because you can't perform an exact match
+        against a directory.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '', ['a/b'], exact=True)
+        m2 = m.matches(match)
+
+        self.assertEqual([], m2.keys())
+
+    def testMatchesCwd(self):
+        '''Tests matches() on a relpath match with the current directory ('.')
+        when not in the root directory.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', 'a/b', ['.'], default='relpath')
+        m2 = m.matches(match)
+
+        self.assertEqual([
+            'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt',
+            'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py',
+            'a/b/fish.py'], m2.keys())
+
+    def testMatchesWithPattern(self):
+        '''Tests matches() for files matching a pattern that reside
+        deeper than the specified directory.'''
+        m = self.parsemanifest(A_DEEPER_MANIFEST)
+
+        match = matchmod.match('/', '', ['a/b/*/*.txt'])
+        m2 = m.matches(match)
+
+        self.assertEqual(
+                ['a/b/c/bar.txt', 'a/b/c/foo.txt', 'a/b/d/ten.txt'],
+                m2.keys())
+
+class testmanifestdict(unittest.TestCase, basemanifesttests):
+    def parsemanifest(self, text):
+        return manifestmod.manifestdict(text)
+
+class testtreemanifest(unittest.TestCase, basemanifesttests):
+    def parsemanifest(self, text):
+        return manifestmod.treemanifest('', text)
+
+if __name__ == '__main__':
+    silenttestrunner.main(__name__)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-manifestv2.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,31 @@
+Check that entry is added to .hg/requires
+
+  $ hg --config experimental.manifestv2=True init repo
+  $ cd repo
+  $ grep manifestv2 .hg/requires
+  manifestv2
+
+Set up simple repo
+
+  $ echo a > file1
+  $ echo b > file2
+  $ echo c > file3
+  $ hg ci -Aqm 'initial'
+  $ echo d > file2
+  $ hg ci -m 'modify file2'
+
+Check that 'hg verify', which uses manifest.readdelta(), works
+
+  $ hg verify
+  checking changesets
+  checking manifests
+  crosschecking files in changesets and manifests
+  checking files
+  3 files, 2 changesets, 4 total revisions
+
+Check that manifest revlog is smaller than for v1
+
+  $ hg debugindex -m
+     rev    offset  length   base linkrev nodeid       p1           p2
+       0         0      81      0       0 57361477c778 000000000000 000000000000
+       1        81      33      0       1 aeaab5a2ef74 57361477c778 000000000000
--- a/tests/test-merge-tools.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-merge-tools.t	Thu Apr 16 20:57:51 2015 -0500
@@ -603,7 +603,8 @@
   true.priority=1
   true.executable=cat
   # hg update -C 1
-  $ hg debugsetparent 0
+  $ hg update -q 0
+  $ hg revert -q -r 1 .
   $ hg update -r 2
   merging f
   revision 1
@@ -628,7 +629,8 @@
   true.priority=1
   true.executable=cat
   # hg update -C 1
-  $ hg debugsetparent 0
+  $ hg update -q 0
+  $ hg revert -q -r 1 .
   $ hg update -r 2 --tool false
   merging f
   merging f failed!
--- a/tests/test-module-imports.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-module-imports.t	Thu Apr 16 20:57:51 2015 -0500
@@ -21,6 +21,9 @@
 these may expose other cycles.
 
   $ hg locate 'mercurial/**.py' | sed 's-\\-/-g' | xargs python "$import_checker"
+  mercurial/crecord.py mixed imports
+     stdlib:    fcntl, termios
+     relative:  curses
   mercurial/dispatch.py mixed imports
      stdlib:    commands
      relative:  error, extensions, fancyopts, hg, hook, util
@@ -29,11 +32,11 @@
      relative:  error, merge, util
   mercurial/revset.py mixed imports
      stdlib:    parser
-     relative:  discovery, error, hbisect, phases, util
+     relative:  error, hbisect, phases, util
   mercurial/templater.py mixed imports
      stdlib:    parser
      relative:  config, error, templatefilters, templatekw, util
   mercurial/ui.py mixed imports
      stdlib:    formatter
      relative:  config, error, scmutil, util
-  Import cycle: mercurial.cmdutil -> mercurial.context -> mercurial.subrepo -> mercurial.cmdutil -> mercurial.cmdutil
+  Import cycle: mercurial.cmdutil -> mercurial.context -> mercurial.subrepo -> mercurial.cmdutil
--- a/tests/test-mq-eol.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-eol.t	Thu Apr 16 20:57:51 2015 -0500
@@ -60,7 +60,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh eol.diff
   [2]
   $ hg qpop
@@ -72,7 +72,7 @@
   $ hg --config patch.eol='LFCR' qpush
   applying eol.diff
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh eol.diff
   [2]
   $ hg qpop
@@ -169,7 +169,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh patch1
   [2]
   $ hg qpop
@@ -192,7 +192,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file a.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh patch1
   [2]
   $ hg qpop
--- a/tests/test-mq-missingfiles.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-missingfiles.t	Thu Apr 16 20:57:51 2015 -0500
@@ -44,7 +44,7 @@
   unable to find 'b' for patching
   2 out of 2 hunks FAILED -- saving rejects to file b.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh changeb
   [2]
 
@@ -97,7 +97,7 @@
   2 out of 2 hunks FAILED -- saving rejects to file bb.rej
   b not tracked!
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh changebb
   [2]
   $ cat a
@@ -149,7 +149,7 @@
   unable to find 'b' for patching
   1 out of 1 hunks FAILED -- saving rejects to file b.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh changeb
   [2]
   $ hg st
--- a/tests/test-mq-qpush-exact.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-qpush-exact.t	Thu Apr 16 20:57:51 2015 -0500
@@ -203,7 +203,7 @@
   file fp0 already exists
   1 out of 1 hunks FAILED -- saving rejects to file fp0.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh p0
   [2]
   $ cat fp0
@@ -230,7 +230,7 @@
   file fp1 already exists
   1 out of 1 hunks FAILED -- saving rejects to file fp1.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh p1
   [2]
   $ cat fp1
--- a/tests/test-mq-qpush-fail.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-qpush-fail.t	Thu Apr 16 20:57:51 2015 -0500
@@ -284,7 +284,7 @@
   b
   committing manifest
   committing changelog
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh p3
   [2]
   $ cat a.orig
--- a/tests/test-mq-subrepo-svn.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-subrepo-svn.t	Thu Apr 16 20:57:51 2015 -0500
@@ -50,7 +50,7 @@
   $ cd ..
   $ hg status -S        # doesn't show status for svn subrepos (yet)
   $ hg qnew -m1 1.diff
-  abort: uncommitted changes in subrepository sub
+  abort: uncommitted changes in subrepository 'sub'
   [255]
 
   $ cd ..
--- a/tests/test-mq-subrepo.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq-subrepo.t	Thu Apr 16 20:57:51 2015 -0500
@@ -102,7 +102,7 @@
   A .hgsub
   A sub/a
   % qnew -X path:no-effect -m0 0.diff
-  abort: uncommitted changes in subrepository sub
+  abort: uncommitted changes in subrepository 'sub'
   [255]
   % update substate when adding .hgsub w/clean updated subrepo
   A .hgsub
@@ -117,7 +117,7 @@
   M .hgsub
   A sub2/a
   % qnew --cwd .. -R repo-2499-qnew -X path:no-effect -m1 1.diff
-  abort: uncommitted changes in subrepository sub2
+  abort: uncommitted changes in subrepository 'sub2'
   [255]
   % update substate when modifying .hgsub w/clean updated subrepo
   M .hgsub
@@ -161,7 +161,7 @@
   A .hgsub
   A sub/a
   % qrefresh
-  abort: uncommitted changes in subrepository sub
+  abort: uncommitted changes in subrepository 'sub'
   [255]
   % update substate when adding .hgsub w/clean updated subrepo
   A .hgsub
@@ -177,7 +177,7 @@
   M .hgsub
   A sub2/a
   % qrefresh
-  abort: uncommitted changes in subrepository sub2
+  abort: uncommitted changes in subrepository 'sub2'
   [255]
   % update substate when modifying .hgsub w/clean updated subrepo
   M .hgsub
@@ -295,7 +295,12 @@
   new file mode 100644
   examine changes to '.hgsub'? [Ynesfdaq?] y
   
-  abort: uncommitted changes in subrepository sub
+  @@ -0,0 +1,1 @@
+  +sub = sub
+  record this change to '.hgsub'? [Ynesfdaq?] y
+  
+  warning: subrepo spec file '.hgsub' not found
+  abort: uncommitted changes in subrepository 'sub'
   [255]
   % update substate when adding .hgsub w/clean updated subrepo
   A .hgsub
@@ -304,10 +309,14 @@
   new file mode 100644
   examine changes to '.hgsub'? [Ynesfdaq?] y
   
+  @@ -0,0 +1,1 @@
+  +sub = sub
+  record this change to '.hgsub'? [Ynesfdaq?] y
+  
+  warning: subrepo spec file '.hgsub' not found
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
-
   $ testmod qrecord --config ui.interactive=1 -m1 1.diff <<EOF
   > y
   > y
@@ -326,7 +335,7 @@
   +sub2 = sub2
   record this change to '.hgsub'? [Ynesfdaq?] y
   
-  abort: uncommitted changes in subrepository sub2
+  abort: uncommitted changes in subrepository 'sub2'
   [255]
   % update substate when modifying .hgsub w/clean updated subrepo
   M .hgsub
--- a/tests/test-mq.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-mq.t	Thu Apr 16 20:57:51 2015 -0500
@@ -311,14 +311,13 @@
 qpush with dump of tag cache
 Dump the tag cache to ensure that it has exactly one head after qpush.
 
-  $ rm -f .hg/cache/tags
+  $ rm -f .hg/cache/tags2-visible
   $ hg tags > /dev/null
 
-.hg/cache/tags (pre qpush):
+.hg/cache/tags2-visible (pre qpush):
 
-  $ cat .hg/cache/tags
+  $ cat .hg/cache/tags2-visible
   1 [\da-f]{40} (re)
-  
   $ hg qpush
   applying test.patch
   now at: test.patch
@@ -326,11 +325,10 @@
   2: draft
   $ hg tags > /dev/null
 
-.hg/cache/tags (post qpush):
+.hg/cache/tags2-visible (post qpush):
 
-  $ cat .hg/cache/tags
+  $ cat .hg/cache/tags2-visible
   2 [\da-f]{40} (re)
-  
   $ checkundo qpush
   $ cd ..
 
@@ -870,7 +868,7 @@
   file foo already exists
   1 out of 1 hunks FAILED -- saving rejects to file foo.rej
   patch failed, unable to continue (try -v)
-  patch failed, rejects left in working dir
+  patch failed, rejects left in working directory
   errors during apply, please fix and refresh bar
   [2]
   $ hg st
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsolete-tag-cache.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,113 @@
+  $ cat >> $HGRCPATH << EOF
+  > [extensions]
+  > blackbox=
+  > rebase=
+  > mock=$TESTDIR/mockblackbox.py
+  > 
+  > [experimental]
+  > evolution = createmarkers
+  > EOF
+
+Create a repo with some tags
+
+  $ hg init repo
+  $ cd repo
+  $ echo initial > foo
+  $ hg -q commit -A -m initial
+  $ hg tag -m 'test tag' test1
+  $ echo first > first
+  $ hg -q commit -A -m first
+  $ hg tag -m 'test2 tag' test2
+  $ hg -q up -r 0
+  $ echo newhead > newhead
+  $ hg commit -A -m newhead
+  adding newhead
+  created new head
+  $ hg tag -m 'test head 2 tag' head2
+
+  $ hg log -G -T '{rev}:{node|short} {tags} {desc}\n'
+  @  5:2942a772f72a tip test head 2 tag
+  |
+  o  4:042eb6bfcc49 head2 newhead
+  |
+  | o  3:c3cb30f2d2cd  test2 tag
+  | |
+  | o  2:d75775ffbc6b test2 first
+  | |
+  | o  1:5f97d42da03f  test tag
+  |/
+  o  0:55482a6fb4b1 test1 initial
+  
+
+Trigger tags cache population by doing something that accesses tags info
+
+  $ hg tags
+  tip                                5:2942a772f72a
+  head2                              4:042eb6bfcc49
+  test2                              2:d75775ffbc6b
+  test1                              0:55482a6fb4b1
+
+  $ cat .hg/cache/tags2-visible
+  5 2942a772f72a444bef4bef13874d515f50fa27b6
+  042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2
+  55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1
+  d75775ffbc6bca1794d300f5571272879bd280da test2
+
+Hiding a non-tip changeset should change filtered hash and cause tags recompute
+
+  $ hg debugobsolete -d '0 0' c3cb30f2d2cd0aae008cc91a07876e3c5131fd22 -u dummyuser
+
+  $ hg tags
+  tip                                5:2942a772f72a
+  head2                              4:042eb6bfcc49
+  test1                              0:55482a6fb4b1
+
+  $ cat .hg/cache/tags2-visible
+  5 2942a772f72a444bef4bef13874d515f50fa27b6 f34fbc9a9769ba9eff5aff3d008a6b49f85c08b1
+  042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2
+  55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1
+
+  $ hg blackbox -l 4
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> 2/2 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 2 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+Hiding another changeset should cause the filtered hash to change
+
+  $ hg debugobsolete -d '0 0' d75775ffbc6bca1794d300f5571272879bd280da -u dummyuser
+  $ hg debugobsolete -d '0 0' 5f97d42da03fd56f3b228b03dfe48af5c0adf75b -u dummyuser
+
+  $ hg tags
+  tip                                5:2942a772f72a
+  head2                              4:042eb6bfcc49
+
+  $ cat .hg/cache/tags2-visible
+  5 2942a772f72a444bef4bef13874d515f50fa27b6 2fce1eec33263d08a4d04293960fc73a555230e4
+  042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2
+
+  $ hg blackbox -l 4
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> 1/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+Resolving tags on an unfiltered repo writes a separate tags cache
+
+  $ hg --hidden tags
+  tip                                5:2942a772f72a
+  head2                              4:042eb6bfcc49
+  test2                              2:d75775ffbc6b
+  test1                              0:55482a6fb4b1
+
+  $ cat .hg/cache/tags2
+  5 2942a772f72a444bef4bef13874d515f50fa27b6
+  042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2
+  55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1
+  d75775ffbc6bca1794d300f5571272879bd280da test2
+
+  $ hg blackbox -l 4
+  1970/01/01 00:00:00 bob> --hidden tags
+  1970/01/01 00:00:00 bob> 2/2 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2 with 3 tags
+  1970/01/01 00:00:00 bob> --hidden tags exited 0 after * seconds (glob)
--- a/tests/test-obsolete.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-obsolete.t	Thu Apr 16 20:57:51 2015 -0500
@@ -11,7 +11,7 @@
   >    hg ci -m "add $1"
   > }
   $ getid() {
-  >    hg id --debug --hidden -ir "desc('$1')"
+  >    hg log -T "{node}\n" --hidden -r "desc('$1')"
   > }
 
   $ cat > debugkeys.py <<EOF
@@ -187,6 +187,8 @@
   [255]
   $ hg debugrevspec 'rev(6)'
   $ hg debugrevspec 'rev(4)'
+  $ hg debugrevspec 'null'
+  -1
 
 Check that public changeset are not accounted as obsolete:
 
@@ -248,6 +250,30 @@
   o  0:1f0dee641bb7 (public) [ ] add a
   
 
+  $ cd ..
+
+Revision 0 is hidden
+--------------------
+
+  $ hg init rev0hidden
+  $ cd rev0hidden
+
+  $ mkcommit kill0
+  $ hg up -q null
+  $ hg debugobsolete `getid kill0`
+  $ mkcommit a
+  $ mkcommit b
+
+Should pick the first visible revision as "repo" node
+
+  $ hg archive ../archive-null
+  $ cat ../archive-null/.hg_archival.txt
+  repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
+  node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
+  branch: default
+  latesttag: null
+  latesttagdistance: 2
+  changessincelatesttag: 2
 
 
   $ cd ..
@@ -590,6 +616,15 @@
 
 #if serve
 
+Test the debug output for exchange
+----------------------------------
+
+  $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' --config 'experimental.bundle2-exp=True'
+  pulling from ../tmpb
+  searching for changes
+  no changes found
+  obsmarker-exchange: 346 bytes received
+
 check hgweb does not explode
 ====================================
 
@@ -621,7 +656,7 @@
 
 check filelog view
 
-  $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg id --debug --id`/'babar'
+  $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
   200 Script output follows
 
   $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/68'
@@ -753,3 +788,43 @@
   $ hg tags
   visible                            0:193e9254ce7e
   tip                                0:193e9254ce7e
+
+#if serve
+
+Test issue 4506
+
+  $ cd ..
+  $ hg init repo-issue4506
+  $ cd repo-issue4506
+  $ echo "0" > foo
+  $ hg add foo
+  $ hg ci -m "content-0"
+
+  $ hg up null
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo "1" > bar
+  $ hg add bar
+  $ hg ci -m "content-1"
+  created new head
+  $ hg up 0
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg graft 1
+  grafting 1:1c9eddb02162 "content-1" (tip)
+
+  $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
+
+  $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+  $ cat hg.pid >> $DAEMON_PIDS
+
+  $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/1'
+  404 Not Found
+  [1]
+  $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'file/tip/bar'
+  200 Script output follows
+  $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'annotate/tip/bar'
+  200 Script output follows
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+
+#endif
+
--- a/tests/test-phases.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-phases.t	Thu Apr 16 20:57:51 2015 -0500
@@ -456,8 +456,12 @@
   o  0 public A
   
 
-move changeset forward and backward
+move changeset forward and backward and test kill switch
 
+  $ cat <<EOF >> $HGRCPATH
+  > [experimental]
+  > nativephaseskillswitch = true
+  > EOF
   $ hg phase --draft --force 1::4
   $ hg log -G --template "{rev} {phase} {desc}\n"
   @    7 secret merge B' and E
@@ -478,6 +482,10 @@
   
 test partial failure
 
+  $ cat <<EOF >> $HGRCPATH
+  > [experimental]
+  > nativephaseskillswitch = false
+  > EOF
   $ hg phase --public 7
   $ hg phase --draft '5 or 7'
   cannot move 1 changesets to a higher phase, use --force
--- a/tests/test-pull.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-pull.t	Thu Apr 16 20:57:51 2015 -0500
@@ -76,7 +76,7 @@
   abort: file:// URLs can only refer to localhost
   [255]
 
-  $ hg pull -q file:../test
+  $ hg pull -q file:../test  # no-msys
 
 It's tricky to make file:// URLs working on every platform with
 regular shell commands.
--- a/tests/test-push-http.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-push-http.t	Thu Apr 16 20:57:51 2015 -0500
@@ -67,7 +67,7 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
-  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:127.0.0.1:
+  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob)
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
@@ -83,7 +83,7 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
-  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:127.0.0.1:
+  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob)
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
@@ -99,7 +99,7 @@
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
-  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:127.0.0.1:
+  remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob)
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
--- a/tests/test-push-warn.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-push-warn.t	Thu Apr 16 20:57:51 2015 -0500
@@ -19,6 +19,14 @@
   $ hg add t3
   $ hg commit -m "3"
 
+Specifying a revset that evaluates to null will abort
+
+  $ hg push -r '0 & 1' ../a
+  pushing to ../a
+  abort: specified revisions evaluate to an empty set
+  (use different revision arguments)
+  [255]
+
   $ hg push ../a
   pushing to ../a
   searching for changes
@@ -411,7 +419,7 @@
   adding c
   created new head
 
-  $ for i in `seq 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
+  $ for i in `python $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done
 
   $ hg -R i push h
   pushing to h
--- a/tests/test-qrecord.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-qrecord.t	Thu Apr 16 20:57:51 2015 -0500
@@ -60,7 +60,7 @@
                             committing
       --close-branch        mark a branch as closed, hiding it from the branch
                             list
-      --amend               amend the parent of the working dir
+      --amend               amend the parent of the working directory
    -s --secret              use the secret phase for committing
    -e --edit                invoke editor on commit messages
    -I --include PATTERN [+] include names matching the given patterns
--- a/tests/test-rebase-conflicts.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-rebase-conflicts.t	Thu Apr 16 20:57:51 2015 -0500
@@ -322,6 +322,6 @@
   files: 1/1 chunks (100.00%)
   added 2 changesets with 2 changes to 1 files
   invalid branchheads cache (served): tip differs
-  truncating cache/rbc-revs-v1 to 72
   rebase completed
   updating the branch cache
+  truncating cache/rbc-revs-v1 to 72
--- a/tests/test-rebase-named-branches.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-rebase-named-branches.t	Thu Apr 16 20:57:51 2015 -0500
@@ -352,7 +352,7 @@
   $ hg ci -qm 'c2 closed' --close
   $ hg up -qr 2
   $ hg tglog
-  o  4: 'c2 closed' c
+  _  4: 'c2 closed' c
   |
   o  3: 'b1' b
   |
@@ -366,7 +366,7 @@
   nothing to rebase - working directory parent is also destination
   [1]
   $ hg tglog
-  o  4: 'c2 closed' c
+  _  4: 'c2 closed' c
   |
   o  3: 'b1' b
   |
--- a/tests/test-rebase-pull.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-rebase-pull.t	Thu Apr 16 20:57:51 2015 -0500
@@ -165,5 +165,47 @@
   |
   o  0: 'C1'
   
+pull --rebase works with bundle2 turned on
 
-
+  $ cd ../a
+  $ echo R4 > R4
+  $ hg ci -Am R4
+  adding R4
+  $ hg tglog
+  @  5: 'R4'
+  |
+  o  4: 'R3'
+  |
+  o  3: 'R2'
+  |
+  o  2: 'R1'
+  |
+  o  1: 'C2'
+  |
+  o  0: 'C1'
+  
+  $ cd ../c
+  $ hg pull --rebase --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02
+  pulling from $TESTTMP/a (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+  rebasing 5:518d153c0ba3 "L1"
+  saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-backup.hg (glob)
+  $ hg tglog
+  @  6: 'L1'
+  |
+  o  5: 'R4'
+  |
+  o  4: 'R3'
+  |
+  o  3: 'R2'
+  |
+  o  2: 'R1'
+  |
+  o  1: 'C2'
+  |
+  o  0: 'C1'
+  
--- a/tests/test-record.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-record.t	Thu Apr 16 20:57:51 2015 -0500
@@ -10,6 +10,59 @@
   $ hg init a
   $ cd a
 
+Record help
+
+  $ hg record -h
+  hg record [OPTION]... [FILE]...
+  
+  interactively select changes to commit
+  
+      If a list of files is omitted, all changes reported by "hg status" will be
+      candidates for recording.
+  
+      See "hg help dates" for a list of formats valid for -d/--date.
+  
+      You will be prompted for whether to record changes to each modified file,
+      and for files with multiple changes, for each change to use. For each
+      query, the following responses are possible:
+  
+        y - record this change
+        n - skip this change
+        e - edit this change manually
+  
+        s - skip remaining changes to this file
+        f - record remaining changes to this file
+  
+        d - done, skip remaining changes and files
+        a - record all changes to all remaining files
+        q - quit, recording no changes
+  
+        ? - display help
+  
+      This command is not available when committing a merge.
+  
+  options ([+] can be repeated):
+  
+   -A --addremove           mark new/missing files as added/removed before
+                            committing
+      --close-branch        mark a branch as closed, hiding it from the branch
+                            list
+      --amend               amend the parent of the working directory
+   -s --secret              use the secret phase for committing
+   -e --edit                invoke editor on commit messages
+   -I --include PATTERN [+] include names matching the given patterns
+   -X --exclude PATTERN [+] exclude names matching the given patterns
+   -m --message TEXT        use text as commit message
+   -l --logfile FILE        read commit message from file
+   -d --date DATE           record the specified date as commit date
+   -u --user USER           record the specified user as committer
+   -S --subrepos            recurse into subrepositories
+   -w --ignore-all-space    ignore white space when comparing lines
+   -b --ignore-space-change ignore changes in the amount of white space
+   -B --ignore-blank-lines  ignore changes whose lines are all blank
+  
+  (some details hidden, use --verbose to show complete help)
+
 Select no files
 
   $ touch empty-rw
@@ -32,1348 +85,4 @@
   
   
 
-Select files but no hunks
 
-  $ hg record empty-rw<<EOF
-  > y
-  > n
-  > EOF
-  diff --git a/empty-rw b/empty-rw
-  new file mode 100644
-  examine changes to 'empty-rw'? [Ynesfdaq?] y
-  
-  abort: empty commit message
-  [255]
-
-  $ hg tip -p
-  changeset:   -1:000000000000
-  tag:         tip
-  user:        
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  
-  
-
-Record empty file
-
-  $ hg record -d '0 0' -m empty empty-rw<<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/empty-rw b/empty-rw
-  new file mode 100644
-  examine changes to 'empty-rw'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   0:c0708cf4e46e
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     empty
-  
-  
-
-Summary shows we updated to the new cset
-
-  $ hg summary
-  parent: 0:c0708cf4e46e tip
-   empty
-  branch: default
-  commit: (clean)
-  update: (current)
-
-Rename empty file
-
-  $ hg mv empty-rw empty-rename
-  $ hg record -d '1 0' -m rename<<EOF
-  > y
-  > EOF
-  diff --git a/empty-rw b/empty-rename
-  rename from empty-rw
-  rename to empty-rename
-  examine changes to 'empty-rw' and 'empty-rename'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   1:d695e8dcb197
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:01 1970 +0000
-  summary:     rename
-  
-  
-
-Copy empty file
-
-  $ hg cp empty-rename empty-copy
-  $ hg record -d '2 0' -m copy<<EOF
-  > y
-  > EOF
-  diff --git a/empty-rename b/empty-copy
-  copy from empty-rename
-  copy to empty-copy
-  examine changes to 'empty-rename' and 'empty-copy'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   2:1d4b90bea524
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:02 1970 +0000
-  summary:     copy
-  
-  
-
-Delete empty file
-
-  $ hg rm empty-copy
-  $ hg record -d '3 0' -m delete<<EOF
-  > y
-  > EOF
-  diff --git a/empty-copy b/empty-copy
-  deleted file mode 100644
-  examine changes to 'empty-copy'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   3:b39a238f01a1
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:03 1970 +0000
-  summary:     delete
-  
-  
-
-Add binary file
-
-  $ hg bundle --base -2 tip.bundle
-  1 changesets found
-  $ hg add tip.bundle
-  $ hg record -d '4 0' -m binary<<EOF
-  > y
-  > EOF
-  diff --git a/tip.bundle b/tip.bundle
-  new file mode 100644
-  this is a binary file
-  examine changes to 'tip.bundle'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   4:ad816da3711e
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:04 1970 +0000
-  summary:     binary
-  
-  diff -r b39a238f01a1 -r ad816da3711e tip.bundle
-  Binary file tip.bundle has changed
-  
-
-Change binary file
-
-  $ hg bundle --base -2 tip.bundle
-  1 changesets found
-  $ hg record -d '5 0' -m binary-change<<EOF
-  > y
-  > EOF
-  diff --git a/tip.bundle b/tip.bundle
-  this modifies a binary file (all or nothing)
-  examine changes to 'tip.bundle'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   5:dccd6f3eb485
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:05 1970 +0000
-  summary:     binary-change
-  
-  diff -r ad816da3711e -r dccd6f3eb485 tip.bundle
-  Binary file tip.bundle has changed
-  
-
-Rename and change binary file
-
-  $ hg mv tip.bundle top.bundle
-  $ hg bundle --base -2 top.bundle
-  1 changesets found
-  $ hg record -d '6 0' -m binary-change-rename<<EOF
-  > y
-  > EOF
-  diff --git a/tip.bundle b/top.bundle
-  rename from tip.bundle
-  rename to top.bundle
-  this modifies a binary file (all or nothing)
-  examine changes to 'tip.bundle' and 'top.bundle'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   6:7fa44105f5b3
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:06 1970 +0000
-  summary:     binary-change-rename
-  
-  diff -r dccd6f3eb485 -r 7fa44105f5b3 tip.bundle
-  Binary file tip.bundle has changed
-  diff -r dccd6f3eb485 -r 7fa44105f5b3 top.bundle
-  Binary file top.bundle has changed
-  
-
-Add plain file
-
-  $ for i in 1 2 3 4 5 6 7 8 9 10; do
-  >     echo $i >> plain
-  > done
-
-  $ hg add plain
-  $ hg record -d '7 0' -m plain plain<<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  new file mode 100644
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   7:11fb457c1be4
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:07 1970 +0000
-  summary:     plain
-  
-  diff -r 7fa44105f5b3 -r 11fb457c1be4 plain
-  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:07 1970 +0000
-  @@ -0,0 +1,10 @@
-  +1
-  +2
-  +3
-  +4
-  +5
-  +6
-  +7
-  +8
-  +9
-  +10
-  
-Modify end of plain file with username unset
-
-  $ echo 11 >> plain
-  $ unset HGUSER
-  $ hg record --config ui.username= -d '8 0' -m end plain
-  abort: no username supplied
-  (use "hg config --edit" to set your username)
-  [255]
-
-
-Modify end of plain file, also test that diffopts are accounted for
-
-  $ HGUSER="test"
-  $ export HGUSER
-  $ hg record --config diff.showfunc=true -d '8 0' -m end plain <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -8,3 +8,4 @@ 7
-   8
-   9
-   10
-  +11
-  record this change to 'plain'? [Ynesfdaq?] y
-  
-
-Modify end of plain file, no EOL
-
-  $ hg tip --template '{node}' >> plain
-  $ hg record -d '9 0' -m noeol plain <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -9,3 +9,4 @@
-   9
-   10
-   11
-  +7264f99c5f5ff3261504828afa4fb4d406c3af54
-  \ No newline at end of file
-  record this change to 'plain'? [Ynesfdaq?] y
-  
-
-Modify end of plain file, add EOL
-
-  $ echo >> plain
-  $ echo 1 > plain2
-  $ hg add plain2
-  $ hg record -d '10 0' -m eol plain plain2 <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -9,4 +9,4 @@
-   9
-   10
-   11
-  -7264f99c5f5ff3261504828afa4fb4d406c3af54
-  \ No newline at end of file
-  +7264f99c5f5ff3261504828afa4fb4d406c3af54
-  record change 1/2 to 'plain'? [Ynesfdaq?] y
-  
-  diff --git a/plain2 b/plain2
-  new file mode 100644
-  examine changes to 'plain2'? [Ynesfdaq?] y
-  
-
-Modify beginning, trim end, record both, add another file to test
-changes numbering
-
-  $ rm plain
-  $ for i in 2 2 3 4 5 6 7 8 9 10; do
-  >   echo $i >> plain
-  > done
-  $ echo 2 >> plain2
-
-  $ hg record -d '10 0' -m begin-and-end plain plain2 <<EOF
-  > y
-  > y
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  2 hunks, 3 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,4 +1,4 @@
-  -1
-  +2
-   2
-   3
-   4
-  record change 1/3 to 'plain'? [Ynesfdaq?] y
-  
-  @@ -8,5 +8,3 @@
-   8
-   9
-   10
-  -11
-  -7264f99c5f5ff3261504828afa4fb4d406c3af54
-  record change 2/3 to 'plain'? [Ynesfdaq?] y
-  
-  diff --git a/plain2 b/plain2
-  1 hunks, 1 lines changed
-  examine changes to 'plain2'? [Ynesfdaq?] y
-  
-  @@ -1,1 +1,2 @@
-   1
-  +2
-  record change 3/3 to 'plain2'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   11:21df83db12b8
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:10 1970 +0000
-  summary:     begin-and-end
-  
-  diff -r ddb8b281c3ff -r 21df83db12b8 plain
-  --- a/plain	Thu Jan 01 00:00:10 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:10 1970 +0000
-  @@ -1,4 +1,4 @@
-  -1
-  +2
-   2
-   3
-   4
-  @@ -8,5 +8,3 @@
-   8
-   9
-   10
-  -11
-  -7264f99c5f5ff3261504828afa4fb4d406c3af54
-  diff -r ddb8b281c3ff -r 21df83db12b8 plain2
-  --- a/plain2	Thu Jan 01 00:00:10 1970 +0000
-  +++ b/plain2	Thu Jan 01 00:00:10 1970 +0000
-  @@ -1,1 +1,2 @@
-   1
-  +2
-  
-
-Trim beginning, modify end
-
-  $ rm plain
-  > for i in 4 5 6 7 8 9 10.new; do
-  >   echo $i >> plain
-  > done
-
-Record end
-
-  $ hg record -d '11 0' -m end-only plain <<EOF
-  > y
-  > n
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  2 hunks, 4 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,9 +1,6 @@
-  -2
-  -2
-  -3
-   4
-   5
-   6
-   7
-   8
-   9
-  record change 1/2 to 'plain'? [Ynesfdaq?] n
-  
-  @@ -4,7 +1,7 @@
-   4
-   5
-   6
-   7
-   8
-   9
-  -10
-  +10.new
-  record change 2/2 to 'plain'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   12:99337501826f
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:11 1970 +0000
-  summary:     end-only
-  
-  diff -r 21df83db12b8 -r 99337501826f plain
-  --- a/plain	Thu Jan 01 00:00:10 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:11 1970 +0000
-  @@ -7,4 +7,4 @@
-   7
-   8
-   9
-  -10
-  +10.new
-  
-
-Record beginning
-
-  $ hg record -d '12 0' -m begin-only plain <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  1 hunks, 3 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,6 +1,3 @@
-  -2
-  -2
-  -3
-   4
-   5
-   6
-  record this change to 'plain'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   13:bbd45465d540
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:12 1970 +0000
-  summary:     begin-only
-  
-  diff -r 99337501826f -r bbd45465d540 plain
-  --- a/plain	Thu Jan 01 00:00:11 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:12 1970 +0000
-  @@ -1,6 +1,3 @@
-  -2
-  -2
-  -3
-   4
-   5
-   6
-  
-
-Add to beginning, trim from end
-
-  $ rm plain
-  $ for i in 1 2 3 4 5 6 7 8 9; do
-  >  echo $i >> plain
-  > done
-
-Record end
-
-  $ hg record --traceback -d '13 0' -m end-again plain<<EOF
-  > y
-  > n
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  2 hunks, 4 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,6 +1,9 @@
-  +1
-  +2
-  +3
-   4
-   5
-   6
-   7
-   8
-   9
-  record change 1/2 to 'plain'? [Ynesfdaq?] n
-  
-  @@ -1,7 +4,6 @@
-   4
-   5
-   6
-   7
-   8
-   9
-  -10.new
-  record change 2/2 to 'plain'? [Ynesfdaq?] y
-  
-
-Add to beginning, middle, end
-
-  $ rm plain
-  $ for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do
-  >   echo $i >> plain
-  > done
-
-Record beginning, middle, and test that format-breaking diffopts are ignored
-
-  $ hg record --config diff.noprefix=True -d '14 0' -m middle-only plain <<EOF
-  > y
-  > y
-  > y
-  > n
-  > EOF
-  diff --git a/plain b/plain
-  3 hunks, 7 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,2 +1,5 @@
-  +1
-  +2
-  +3
-   4
-   5
-  record change 1/3 to 'plain'? [Ynesfdaq?] y
-  
-  @@ -1,6 +4,8 @@
-   4
-   5
-  +5.new
-  +5.reallynew
-   6
-   7
-   8
-   9
-  record change 2/3 to 'plain'? [Ynesfdaq?] y
-  
-  @@ -3,4 +8,6 @@
-   6
-   7
-   8
-   9
-  +10
-  +11
-  record change 3/3 to 'plain'? [Ynesfdaq?] n
-  
-
-  $ hg tip -p
-  changeset:   15:f34a7937ec33
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:14 1970 +0000
-  summary:     middle-only
-  
-  diff -r 82c065d0b850 -r f34a7937ec33 plain
-  --- a/plain	Thu Jan 01 00:00:13 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:14 1970 +0000
-  @@ -1,5 +1,10 @@
-  +1
-  +2
-  +3
-   4
-   5
-  +5.new
-  +5.reallynew
-   6
-   7
-   8
-  
-
-Record end
-
-  $ hg record -d '15 0' -m end-only plain <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/plain b/plain
-  1 hunks, 2 lines changed
-  examine changes to 'plain'? [Ynesfdaq?] y
-  
-  @@ -9,3 +9,5 @@
-   7
-   8
-   9
-  +10
-  +11
-  record this change to 'plain'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   16:f9900b71a04c
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:15 1970 +0000
-  summary:     end-only
-  
-  diff -r f34a7937ec33 -r f9900b71a04c plain
-  --- a/plain	Thu Jan 01 00:00:14 1970 +0000
-  +++ b/plain	Thu Jan 01 00:00:15 1970 +0000
-  @@ -9,3 +9,5 @@
-   7
-   8
-   9
-  +10
-  +11
-  
-
-  $ mkdir subdir
-  $ cd subdir
-  $ echo a > a
-  $ hg ci -d '16 0' -Amsubdir
-  adding subdir/a
-
-  $ echo a >> a
-  $ hg record -d '16 0' -m subdir-change a <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/a b/subdir/a
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/a'? [Ynesfdaq?] y
-  
-  @@ -1,1 +1,2 @@
-   a
-  +a
-  record this change to 'subdir/a'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   18:61be427a9deb
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:16 1970 +0000
-  summary:     subdir-change
-  
-  diff -r a7ffae4d61cb -r 61be427a9deb subdir/a
-  --- a/subdir/a	Thu Jan 01 00:00:16 1970 +0000
-  +++ b/subdir/a	Thu Jan 01 00:00:16 1970 +0000
-  @@ -1,1 +1,2 @@
-   a
-  +a
-  
-
-  $ echo a > f1
-  $ echo b > f2
-  $ hg add f1 f2
-
-  $ hg ci -mz -d '17 0'
-
-  $ echo a >> f1
-  $ echo b >> f2
-
-Help, quit
-
-  $ hg record <<EOF
-  > ?
-  > q
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] ?
-  
-  y - yes, record this change
-  n - no, skip this change
-  e - edit this change manually
-  s - skip remaining changes to this file
-  f - record remaining changes to this file
-  d - done, skip remaining changes and files
-  a - record all changes to all remaining files
-  q - quit, recording no changes
-  ? - ? (display help)
-  examine changes to 'subdir/f1'? [Ynesfdaq?] q
-  
-  abort: user quit
-  [255]
-
-Skip
-
-  $ hg record <<EOF
-  > s
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] s
-  
-  diff --git a/subdir/f2 b/subdir/f2
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
-  [255]
-
-No
-
-  $ hg record <<EOF
-  > n
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] n
-  
-  diff --git a/subdir/f2 b/subdir/f2
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
-  [255]
-
-f, quit
-
-  $ hg record <<EOF
-  > f
-  > q
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] f
-  
-  diff --git a/subdir/f2 b/subdir/f2
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynesfdaq?] q
-  
-  abort: user quit
-  [255]
-
-s, all
-
-  $ hg record -d '18 0' -mx <<EOF
-  > s
-  > a
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] s
-  
-  diff --git a/subdir/f2 b/subdir/f2
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynesfdaq?] a
-  
-
-  $ hg tip -p
-  changeset:   20:b3df3dda369a
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:18 1970 +0000
-  summary:     x
-  
-  diff -r 6e02d6c9906d -r b3df3dda369a subdir/f2
-  --- a/subdir/f2	Thu Jan 01 00:00:17 1970 +0000
-  +++ b/subdir/f2	Thu Jan 01 00:00:18 1970 +0000
-  @@ -1,1 +1,2 @@
-   b
-  +b
-  
-
-f
-
-  $ hg record -d '19 0' -my <<EOF
-  > f
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] f
-  
-
-  $ hg tip -p
-  changeset:   21:38ec577f126b
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:19 1970 +0000
-  summary:     y
-  
-  diff -r b3df3dda369a -r 38ec577f126b subdir/f1
-  --- a/subdir/f1	Thu Jan 01 00:00:18 1970 +0000
-  +++ b/subdir/f1	Thu Jan 01 00:00:19 1970 +0000
-  @@ -1,1 +1,2 @@
-   a
-  +a
-  
-
-#if execbit
-
-Preserve chmod +x
-
-  $ chmod +x f1
-  $ echo a >> f1
-  $ hg record -d '20 0' -mz <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  old mode 100644
-  new mode 100755
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -1,2 +1,3 @@
-   a
-   a
-  +a
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   22:3261adceb075
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:20 1970 +0000
-  summary:     z
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  old mode 100644
-  new mode 100755
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -1,2 +1,3 @@
-   a
-   a
-  +a
-  
-
-Preserve execute permission on original
-
-  $ echo b >> f1
-  $ hg record -d '21 0' -maa <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,4 @@
-   a
-   a
-   a
-  +b
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   23:b429867550db
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:21 1970 +0000
-  summary:     aa
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -1,3 +1,4 @@
-   a
-   a
-   a
-  +b
-  
-
-Preserve chmod -x
-
-  $ chmod -x f1
-  $ echo c >> f1
-  $ hg record -d '22 0' -mab <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  old mode 100755
-  new mode 100644
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -2,3 +2,4 @@
-   a
-   a
-   b
-  +c
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   24:0b082130c20a
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:22 1970 +0000
-  summary:     ab
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  old mode 100755
-  new mode 100644
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -2,3 +2,4 @@
-   a
-   a
-   b
-  +c
-  
-
-#else
-
-Slightly bogus tests to get almost same repo structure as when x bit is used
-- but with different hashes.
-
-Mock "Preserve chmod +x"
-
-  $ echo a >> f1
-  $ hg record -d '20 0' -mz <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -1,2 +1,3 @@
-   a
-   a
-  +a
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   22:0d463bd428f5
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:20 1970 +0000
-  summary:     z
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -1,2 +1,3 @@
-   a
-   a
-  +a
-  
-
-Mock "Preserve execute permission on original"
-
-  $ echo b >> f1
-  $ hg record -d '21 0' -maa <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,4 @@
-   a
-   a
-   a
-  +b
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   23:0eab41a3e524
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:21 1970 +0000
-  summary:     aa
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -1,3 +1,4 @@
-   a
-   a
-   a
-  +b
-  
-
-Mock "Preserve chmod -x"
-
-  $ chmod -x f1
-  $ echo c >> f1
-  $ hg record -d '22 0' -mab <<EOF
-  > y
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -2,3 +2,4 @@
-   a
-   a
-   b
-  +c
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip --config diff.git=True -p
-  changeset:   24:f4f718f27b7c
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:22 1970 +0000
-  summary:     ab
-  
-  diff --git a/subdir/f1 b/subdir/f1
-  --- a/subdir/f1
-  +++ b/subdir/f1
-  @@ -2,3 +2,4 @@
-   a
-   a
-   b
-  +c
-  
-
-#endif
-
-  $ cd ..
-
-
-Abort early when a merge is in progress
-
-  $ hg up 4
-  1 files updated, 0 files merged, 6 files removed, 0 files unresolved
-
-  $ touch iwillmergethat
-  $ hg add iwillmergethat
-
-  $ hg branch thatbranch
-  marked working directory as branch thatbranch
-  (branches are permanent and global, did you want a bookmark?)
-
-  $ hg ci -m'new head'
-
-  $ hg up default
-  6 files updated, 0 files merged, 2 files removed, 0 files unresolved
-
-  $ hg merge thatbranch
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
-
-  $ hg record -m'will abort'
-  abort: cannot partially commit a merge (use "hg commit" instead)
-  [255]
-
-  $ hg up -C
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-
-Editing patch (and ignoring trailing text)
-
-  $ cat > editor.sh << '__EOF__'
-  > sed -e 7d -e '5s/^-/ /' -e '/^# ---/i\
-  > trailing\nditto' "$1" > tmp
-  > mv tmp "$1"
-  > __EOF__
-  $ cat > editedfile << '__EOF__'
-  > This is the first line
-  > This is the second line
-  > This is the third line
-  > __EOF__
-  $ hg add editedfile
-  $ hg commit -medit-patch-1
-  $ cat > editedfile << '__EOF__'
-  > This line has changed
-  > This change will be committed
-  > This is the third line
-  > __EOF__
-  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg record -d '23 0' -medit-patch-2 <<EOF
-  > y
-  > e
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 2 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,3 @@
-  -This is the first line
-  -This is the second line
-  +This line has changed
-  +This change will be committed
-   This is the third line
-  record this change to 'editedfile'? [Ynesfdaq?] e
-  
-  $ cat editedfile
-  This line has changed
-  This change will be committed
-  This is the third line
-  $ hg cat -r tip editedfile
-  This is the first line
-  This change will be committed
-  This is the third line
-  $ hg revert editedfile
-
-Trying to edit patch for whole file
-
-  $ echo "This is the fourth line" >> editedfile
-  $ hg record <<EOF
-  > e
-  > q
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 1 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] e
-  
-  cannot edit patch for whole file
-  examine changes to 'editedfile'? [Ynesfdaq?] q
-  
-  abort: user quit
-  [255]
-  $ hg revert editedfile
-
-Removing changes from patch
-
-  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
-  $ mv tmp editedfile
-  $ echo "This line has been added" >> editedfile
-  $ cat > editor.sh << '__EOF__'
-  > sed -e 's/^[-+]/ /' "$1" > tmp
-  > mv tmp "$1"
-  > __EOF__
-  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg record <<EOF
-  > y
-  > e
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 3 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,3 @@
-  -This is the first line
-  -This change will be committed
-  -This is the third line
-  +This change will not be committed
-  +This is the second line
-  +This line has been added
-  record this change to 'editedfile'? [Ynesfdaq?] e
-  
-  no changes to record
-  $ cat editedfile
-  This change will not be committed
-  This is the second line
-  This line has been added
-  $ hg cat -r tip editedfile
-  This is the first line
-  This change will be committed
-  This is the third line
-  $ hg revert editedfile
-
-Invalid patch
-
-  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
-  $ mv tmp editedfile
-  $ echo "This line has been added" >> editedfile
-  $ cat > editor.sh << '__EOF__'
-  > sed s/This/That/ "$1" > tmp
-  > mv tmp "$1"
-  > __EOF__
-  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg record <<EOF
-  > y
-  > e
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 3 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,3 @@
-  -This is the first line
-  -This change will be committed
-  -This is the third line
-  +This change will not be committed
-  +This is the second line
-  +This line has been added
-  record this change to 'editedfile'? [Ynesfdaq?] e
-  
-  patching file editedfile
-  Hunk #1 FAILED at 0
-  1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej
-  abort: patch failed to apply
-  [255]
-  $ cat editedfile
-  This change will not be committed
-  This is the second line
-  This line has been added
-  $ hg cat -r tip editedfile
-  This is the first line
-  This change will be committed
-  This is the third line
-  $ cat editedfile.rej
-  --- editedfile
-  +++ editedfile
-  @@ -1,3 +1,3 @@
-  -That is the first line
-  -That change will be committed
-  -That is the third line
-  +That change will not be committed
-  +That is the second line
-  +That line has been added
-
-Malformed patch - error handling
-
-  $ cat > editor.sh << '__EOF__'
-  > sed -e '/^@/p' "$1" > tmp
-  > mv tmp "$1"
-  > __EOF__
-  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg record <<EOF
-  > y
-  > e
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 3 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,3 @@
-  -This is the first line
-  -This change will be committed
-  -This is the third line
-  +This change will not be committed
-  +This is the second line
-  +This line has been added
-  record this change to 'editedfile'? [Ynesfdaq?] e
-  
-  abort: error parsing patch: unhandled transition: range -> range
-  [255]
-
-random text in random positions is still an error
-
-  $ cat > editor.sh << '__EOF__'
-  > sed -e '/^@/i\
-  > other' "$1" > tmp
-  > mv tmp "$1"
-  > __EOF__
-  $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg record <<EOF
-  > y
-  > e
-  > EOF
-  diff --git a/editedfile b/editedfile
-  1 hunks, 3 lines changed
-  examine changes to 'editedfile'? [Ynesfdaq?] y
-  
-  @@ -1,3 +1,3 @@
-  -This is the first line
-  -This change will be committed
-  -This is the third line
-  +This change will not be committed
-  +This is the second line
-  +This line has been added
-  record this change to 'editedfile'? [Ynesfdaq?] e
-  
-  abort: error parsing patch: unhandled transition: file -> other
-  [255]
-
-  $ hg up -C
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-
-With win32text
-
-  $ echo '[extensions]' >> .hg/hgrc
-  $ echo 'win32text = ' >> .hg/hgrc
-  $ echo '[decode]' >> .hg/hgrc
-  $ echo '** = cleverdecode:' >> .hg/hgrc
-  $ echo '[encode]' >> .hg/hgrc
-  $ echo '** = cleverencode:' >> .hg/hgrc
-  $ echo '[patch]' >> .hg/hgrc
-  $ echo 'eol = crlf' >> .hg/hgrc
-
-Ignore win32text deprecation warning for now:
-
-  $ echo '[win32text]' >> .hg/hgrc
-  $ echo 'warn = no' >> .hg/hgrc
-
-  $ echo d >> subdir/f1
-  $ hg record -d '24 0' -mw1 <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -3,3 +3,4 @@
-   a
-   b
-   c
-  +d
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-
-  $ hg tip -p
-  changeset:   28:* (glob)
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:24 1970 +0000
-  summary:     w1
-  
-  diff -r ???????????? -r ???????????? subdir/f1 (glob)
-  --- a/subdir/f1	Thu Jan 01 00:00:23 1970 +0000
-  +++ b/subdir/f1	Thu Jan 01 00:00:24 1970 +0000
-  @@ -3,3 +3,4 @@
-   a
-   b
-   c
-  +d
-  
-Test --user when ui.username not set
-  $ unset HGUSER
-  $ echo e >> subdir/f1
-  $ hg record  --config ui.username= -d '8 0' --user xyz -m "user flag" <<EOF
-  > y
-  > y
-  > EOF
-  diff --git a/subdir/f1 b/subdir/f1
-  1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynesfdaq?] y
-  
-  @@ -4,3 +4,4 @@
-   b
-   c
-   d
-  +e
-  record this change to 'subdir/f1'? [Ynesfdaq?] y
-  
-  $ hg log --template '{author}\n' -l 1
-  xyz
-  $ HGUSER="test"
-  $ export HGUSER
-
-  $ cd ..
--- a/tests/test-rename-merge2.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-rename-merge2.t	Thu Apr 16 20:57:51 2015 -0500
@@ -100,6 +100,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev '*' '*' (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -138,6 +140,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * (glob)
+  merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -174,6 +178,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -208,6 +214,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -238,6 +246,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -265,6 +275,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -298,6 +310,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 1 files merged, 1 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -324,6 +338,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -348,11 +364,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -388,6 +408,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   note: possible conflict - a was renamed multiple times to:
    b
    c
@@ -416,11 +438,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -449,11 +475,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 1 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -481,11 +511,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -514,11 +548,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 1 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -546,11 +584,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -577,11 +619,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -612,11 +658,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -646,11 +696,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
   my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 3/3 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -680,11 +734,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging a and b to b
   my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -713,11 +771,15 @@
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b and a to b
   my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/b * * (glob)
+  merge tool returned: 0
    rev: versions differ -> m
   updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   0 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
@@ -760,6 +822,8 @@
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337
+  launching merge tool: python ../merge $TESTTMP/t/t/rev * * (glob)
+  merge tool returned: 0
   1 files updated, 2 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   --------------
--- a/tests/test-rename.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-rename.t	Thu Apr 16 20:57:51 2015 -0500
@@ -620,10 +620,16 @@
   $ hg rename d1/d11/a1 .hg
   abort: path contains illegal component: .hg/a1 (glob)
   [255]
+  $ hg --config extensions.largefiles= rename d1/d11/a1 .hg
+  abort: path contains illegal component: .hg/a1 (glob)
+  [255]
   $ hg status -C
   $ hg rename d1/d11/a1 ..
   abort: ../a1 not under root '$TESTTMP' (glob)
   [255]
+  $ hg --config extensions.largefiles= rename d1/d11/a1 ..
+  abort: ../a1 not under root '$TESTTMP' (glob)
+  [255]
   $ hg status -C
 
   $ mv d1/d11/a1 .hg
--- a/tests/test-resolve.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-resolve.t	Thu Apr 16 20:57:51 2015 -0500
@@ -43,10 +43,15 @@
   U file1
   U file2
 
-resolving an unknown path should emit a warning
+  $ hg resolve -l --no-status
+  file1
+  file2
+
+resolving an unknown path should emit a warning, but not for -l
 
   $ hg resolve -m does-not-exist
   arguments do not match paths that need resolving
+  $ hg resolve -l does-not-exist
 
 resolve the failure
 
@@ -59,6 +64,18 @@
   R file1
   U file2
 
+  $ hg resolve -l -Tjson
+  [
+   {
+    "path": "file1",
+    "status": "R"
+   },
+   {
+    "path": "file2",
+    "status": "U"
+   }
+  ]
+
 resolve -m without paths should mark all resolved
 
   $ hg resolve -m
@@ -69,6 +86,10 @@
 
   $ hg resolve -l
 
+  $ hg resolve -l -Tjson
+  [
+  ]
+
 resolve --all should abort when no merge in progress
 
   $ hg resolve --all
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-revert-interactive.t	Thu Apr 16 20:57:51 2015 -0500
@@ -0,0 +1,272 @@
+Revert interactive tests
+1 add and commit file f
+2 add commit file folder1/g
+3 add and commit file folder2/h
+4 add and commit file folder1/i
+5 commit change to file f
+6 commit changes to files folder1/g folder2/h
+7 commit changes to files folder1/g folder2/h
+8 revert interactive to commit id 2 (line 3 above), check that folder1/i is removed and
+9 make workdir match 7
+10 run the same test than 8 from within folder1 and check same expectations
+
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > interactive = true
+  > [extensions]
+  > record =
+  > EOF
+
+
+  $ mkdir -p a/folder1 a/folder2
+  $ cd a
+  $ hg init
+  >>> open('f', 'wb').write("1\n2\n3\n4\n5\n")
+  $ hg add f ; hg commit -m "adding f"
+  $ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g"
+  $ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h"
+  $ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i"
+  >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n")
+  $ hg commit -m "modifying f"
+  >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n")
+  $ hg commit -m "modifying folder1/g"
+  >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n")
+  $ hg commit -m "modifying folder2/h"
+  $ hg tip
+  changeset:   6:59dd6e4ab63a
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     modifying folder2/h
+  
+  $ hg revert -i -r 2 --all -- << EOF
+  > y
+  > y
+  > y
+  > y
+  > y
+  > n
+  > n
+  > EOF
+  reverting f
+  reverting folder1/g (glob)
+  removing folder1/i (glob)
+  reverting folder2/h (glob)
+  diff -r 89ac3d72e4a4 f
+  2 hunks, 2 lines changed
+  examine changes to 'f'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,5 @@
+  -a
+   1
+   2
+   3
+   4
+   5
+  record change 1/6 to 'f'? [Ynesfdaq?] y
+  
+  @@ -2,6 +1,5 @@
+   1
+   2
+   3
+   4
+   5
+  -b
+  record change 2/6 to 'f'? [Ynesfdaq?] y
+  
+  diff -r 89ac3d72e4a4 folder1/g
+  2 hunks, 2 lines changed
+  examine changes to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,5 @@
+  -c
+   1
+   2
+   3
+   4
+   5
+  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -2,6 +1,5 @@
+   1
+   2
+   3
+   4
+   5
+  -d
+  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  
+  diff -r 89ac3d72e4a4 folder2/h
+  2 hunks, 2 lines changed
+  examine changes to 'folder2/h'? [Ynesfdaq?] n
+  
+  $ cat f
+  1
+  2
+  3
+  4
+  5
+  $ cat folder1/g
+  1
+  2
+  3
+  4
+  5
+  d
+  $ cat folder2/h
+  e
+  1
+  2
+  3
+  4
+  5
+  f
+
+Test that --interactive lift the need for --all
+
+  $ echo q | hg revert -i -r 2
+  reverting folder1/g (glob)
+  reverting folder2/h (glob)
+  diff -r 89ac3d72e4a4 folder1/g
+  1 hunks, 1 lines changed
+  examine changes to 'folder1/g'? [Ynesfdaq?] q
+  
+  abort: user quit
+  [255]
+  $ rm folder1/g.orig
+
+
+  $ hg update -C 6
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg revert -i -r 2 --all -- << EOF
+  > y
+  > y
+  > y
+  > y
+  > y
+  > n
+  > n
+  > EOF
+  reverting f
+  reverting folder1/g (glob)
+  removing folder1/i (glob)
+  reverting folder2/h (glob)
+  diff -r 89ac3d72e4a4 f
+  2 hunks, 2 lines changed
+  examine changes to 'f'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,5 @@
+  -a
+   1
+   2
+   3
+   4
+   5
+  record change 1/6 to 'f'? [Ynesfdaq?] y
+  
+  @@ -2,6 +1,5 @@
+   1
+   2
+   3
+   4
+   5
+  -b
+  record change 2/6 to 'f'? [Ynesfdaq?] y
+  
+  diff -r 89ac3d72e4a4 folder1/g
+  2 hunks, 2 lines changed
+  examine changes to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -1,6 +1,5 @@
+  -c
+   1
+   2
+   3
+   4
+   5
+  record change 3/6 to 'folder1/g'? [Ynesfdaq?] y
+  
+  @@ -2,6 +1,5 @@
+   1
+   2
+   3
+   4
+   5
+  -d
+  record change 4/6 to 'folder1/g'? [Ynesfdaq?] n
+  
+  diff -r 89ac3d72e4a4 folder2/h
+  2 hunks, 2 lines changed
+  examine changes to 'folder2/h'? [Ynesfdaq?] n
+  
+  $ cat f
+  1
+  2
+  3
+  4
+  5
+  $ cat folder1/g
+  1
+  2
+  3
+  4
+  5
+  d
+  $ cat folder2/h
+  e
+  1
+  2
+  3
+  4
+  5
+  f
+  $ hg st
+  M f
+  M folder1/g
+  R folder1/i
+  $ hg revert --interactive f << EOF
+  > y
+  > y
+  > n
+  > n
+  > EOF
+  diff -r 59dd6e4ab63a f
+  2 hunks, 2 lines changed
+  examine changes to 'f'? [Ynesfdaq?] y
+  
+  @@ -1,5 +1,6 @@
+  +a
+   1
+   2
+   3
+   4
+   5
+  record change 1/2 to 'f'? [Ynesfdaq?] y
+  
+  @@ -1,5 +2,6 @@
+   1
+   2
+   3
+   4
+   5
+  +b
+  record change 2/2 to 'f'? [Ynesfdaq?] n
+  
+  $ hg st
+  M f
+  M folder1/g
+  R folder1/i
+  ? f.orig
+  $ cat f
+  a
+  1
+  2
+  3
+  4
+  5
+  $ cat f.orig
+  1
+  2
+  3
+  4
+  5
--- a/tests/test-revset-dirstate-parents.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-revset-dirstate-parents.t	Thu Apr 16 20:57:51 2015 -0500
@@ -16,14 +16,20 @@
   (func
     ('symbol', 'p1')
     None)
+  * set:
+  <baseset []>
   $ try 'p2()'
   (func
     ('symbol', 'p2')
     None)
+  * set:
+  <baseset []>
   $ try 'parents()'
   (func
     ('symbol', 'parents')
     None)
+  * set:
+  <baseset+ []>
 
 null revision
   $ log 'p1()'
--- a/tests/test-revset.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-revset.t	Thu Apr 16 20:57:51 2015 -0500
@@ -93,31 +93,81 @@
   $ echo "[paths]" >> .hg/hgrc
   $ echo "default = ../remote1" >> .hg/hgrc
 
+trivial
+
+  $ try 0:1
+  (range
+    ('symbol', '0')
+    ('symbol', '1'))
+  * set:
+  <spanset+ 0:1>
+  0
+  1
+  $ try 3::6
+  (dagrange
+    ('symbol', '3')
+    ('symbol', '6'))
+  * set:
+  <baseset [3, 5, 6]>
+  3
+  5
+  6
+  $ try '0|1|2'
+  (or
+    (or
+      ('symbol', '0')
+      ('symbol', '1'))
+    ('symbol', '2'))
+  * set:
+  <addset
+    <addset
+      <baseset [0]>,
+      <baseset [1]>>,
+    <baseset [2]>>
+  0
+  1
+  2
+
 names that should work without quoting
 
   $ try a
   ('symbol', 'a')
+  * set:
+  <baseset [0]>
   0
   $ try b-a
   (minus
     ('symbol', 'b')
     ('symbol', 'a'))
+  * set:
+  <filteredset
+    <baseset [1]>>
   1
   $ try _a_b_c_
   ('symbol', '_a_b_c_')
+  * set:
+  <baseset [6]>
   6
   $ try _a_b_c_-a
   (minus
     ('symbol', '_a_b_c_')
     ('symbol', 'a'))
+  * set:
+  <filteredset
+    <baseset [6]>>
   6
   $ try .a.b.c.
   ('symbol', '.a.b.c.')
+  * set:
+  <baseset [7]>
   7
   $ try .a.b.c.-a
   (minus
     ('symbol', '.a.b.c.')
     ('symbol', 'a'))
+  * set:
+  <filteredset
+    <baseset [7]>>
   7
   $ try -- '-a-b-c-' # complains
   hg: parse error at 7: not a prefix: end
@@ -139,6 +189,8 @@
   [255]
   $ try é
   ('symbol', '\xc3\xa9')
+  * set:
+  <baseset [9]>
   9
 
 no quoting needed
@@ -154,6 +206,9 @@
   (minus
     ('string', '-a-b-c-')
     ('symbol', 'a'))
+  * set:
+  <filteredset
+    <baseset [4]>>
   4
 
   $ log '1 or 2'
@@ -170,6 +225,10 @@
       ('symbol', '1')
       ('symbol', '2'))
     ('symbol', '3'))
+  * set:
+  <addset
+    <baseset []>,
+    <baseset [3]>>
   3
   $ try '1|2&3'
   (or
@@ -177,6 +236,10 @@
     (and
       ('symbol', '2')
       ('symbol', '3')))
+  * set:
+  <addset
+    <baseset [1]>,
+    <baseset []>>
   1
   $ try '1&2&3' # associativity
   (and
@@ -184,6 +247,8 @@
       ('symbol', '1')
       ('symbol', '2'))
     ('symbol', '3'))
+  * set:
+  <baseset []>
   $ try '1|(2|3)'
   (or
     ('symbol', '1')
@@ -191,6 +256,12 @@
       (or
         ('symbol', '2')
         ('symbol', '3'))))
+  * set:
+  <addset
+    <baseset [1]>,
+    <addset
+      <baseset [2]>,
+      <baseset [3]>>>
   1
   2
   3
@@ -325,10 +396,16 @@
   (func
     ('symbol', 'grep')
     ('string', '\x08issue\\d+'))
+  * set:
+  <filteredset
+    <fullreposet+ 0:9>>
   $ try 'grep(r"\bissue\d+")'
   (func
     ('symbol', 'grep')
     ('string', '\\bissue\\d+'))
+  * set:
+  <filteredset
+    <fullreposet+ 0:9>>
   6
   $ try 'grep(r"\")'
   hg: parse error at 7: unterminated string
@@ -478,8 +555,44 @@
   [255]
 
 Test null revision
+  $ log '(null)'
+  -1
+  $ log '(null:0)'
+  -1
+  0
+  $ log '(0:null)'
+  0
+  -1
+  $ log 'null::0'
+  -1
+  0
+  $ log 'null:tip - 0:'
+  -1
+  $ log 'null: and null::' | head -1
+  -1
+  $ log 'null: or 0:' | head -2
+  -1
+  0
   $ log 'ancestors(null)'
   -1
+  $ log 'reverse(null:)' | tail -2
+  0
+  -1
+  $ log 'first(null:)'
+  -1
+  $ log 'min(null:)'
+  -1
+  $ log 'tip:null and all()' | tail -2
+  1
+  0
+
+Test working-directory revision
+  $ hg debugrevspec 'wdir()'
+  None
+  $ hg debugrevspec 'tip or wdir()'
+  9
+  None
+  $ hg debugrevspec '0:tip and wdir()'
 
   $ log 'outgoing()'
   8
@@ -655,6 +768,8 @@
     (list
       ('symbol', '3')
       ('symbol', '1')))
+  * set:
+  <baseset+ [3]>
   3
   $ try --optimize 'ancestors(1) - ancestors(3)'
   (minus
@@ -670,6 +785,8 @@
     (list
       ('symbol', '1')
       ('symbol', '3')))
+  * set:
+  <baseset+ []>
   $ try --optimize 'not ::2 and ::6'
   (and
     (not
@@ -683,6 +800,8 @@
     (list
       ('symbol', '6')
       ('symbol', '2')))
+  * set:
+  <baseset+ [3, 4, 5, 6]>
   3
   4
   5
@@ -702,6 +821,8 @@
     (list
       ('symbol', '6')
       ('symbol', '4')))
+  * set:
+  <baseset+ [3, 5, 6]>
   3
   5
   6
@@ -860,6 +981,23 @@
   hg: parse error: ^ expects a number 0, 1, or 2
   [255]
 
+Bogus function gets suggestions
+  $ log 'add()'
+  hg: parse error: unknown identifier: add
+  (did you mean 'adds'?)
+  [255]
+  $ log 'added()'
+  hg: parse error: unknown identifier: added
+  (did you mean 'adds'?)
+  [255]
+  $ log 'remo()'
+  hg: parse error: unknown identifier: remo
+  (did you mean one of remote, removes?)
+  [255]
+  $ log 'babar()'
+  hg: parse error: unknown identifier: babar
+  [255]
+
 multiple revspecs
 
   $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n'
@@ -921,6 +1059,9 @@
   (func
     ('symbol', 'merge')
     None)
+  * set:
+  <filteredset
+    <fullreposet+ 0:9>>
   6
 
 test alias recursion
@@ -932,6 +1073,11 @@
     (func
       ('symbol', 'merge')
       None))
+  * set:
+  <addset+
+    <filteredset
+      <fullreposet+ 0:9>>,
+    <generatorset+>>
   6
   7
 
@@ -961,6 +1107,12 @@
     (or
       ('symbol', '1')
       ('symbol', '2')))
+  * set:
+  <addset
+    <baseset [3]>,
+    <addset
+      <baseset [1]>,
+      <baseset [2]>>>
   3
   1
   2
@@ -981,6 +1133,8 @@
     (range
       ('symbol', '2')
       ('symbol', '5')))
+  * set:
+  <baseset [5]>
   5
 
 test variable isolation, variable placeholders are rewritten as string
@@ -1011,23 +1165,48 @@
     (range
       ('symbol', '2')
       ('symbol', '5')))
-  abort: failed to parse the definition of revset alias "injectparamasstring2": not a function: _aliasarg
+  abort: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
   [255]
   $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
   ('symbol', 'tip')
   warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
-  warning: failed to parse the definition of revset alias "injectparamasstring2": not a function: _aliasarg
+  warning: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg
+  * set:
+  <baseset [9]>
   9
   >>> data = file('.hg/hgrc', 'rb').read()
   >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', ''))
 
   $ try 'tip'
   ('symbol', 'tip')
+  * set:
+  <baseset [9]>
   9
 
   $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
   ('symbol', 'tip')
   warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
+  * set:
+  <baseset [9]>
+  9
+  $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc
+  $ try 'strictreplacing("foo", tip)'
+  (func
+    ('symbol', 'strictreplacing')
+    (list
+      ('string', 'foo')
+      ('symbol', 'tip')))
+  (or
+    ('symbol', 'tip')
+    (func
+      ('symbol', 'desc')
+      ('string', '$1')))
+  * set:
+  <addset
+    <baseset [9]>,
+    <filteredset
+      <filteredset
+        <fullreposet+ 0:9>>>>
   9
 
   $ try 'd(2:5)'
@@ -1045,6 +1224,8 @@
           ('symbol', '2')
           ('symbol', '5'))
         ('symbol', 'date'))))
+  * set:
+  <baseset [4, 5, 3, 2]>
   4
   5
   3
@@ -1066,6 +1247,8 @@
           ('symbol', '2')
           ('symbol', '3'))
         ('symbol', 'date'))))
+  * set:
+  <baseset [3, 2]>
   3
   2
   $ try 'rs()'
@@ -1111,9 +1294,67 @@
           ('symbol', '2')
           ('symbol', '3'))
         ('symbol', 'date'))))
+  * set:
+  <baseset [3, 2]>
   3
   2
 
+issue4553: check that revset aliases override existing hash prefix
+
+  $ hg log -qr e
+  6:e0cc66ef77e8
+
+  $ hg log -qr e --config revsetalias.e="all()"
+  0:2785f51eece5
+  1:d75937da8da0
+  2:5ed5505e9f1c
+  3:8528aa5637f2
+  4:2326846efdab
+  5:904fa392b941
+  6:e0cc66ef77e8
+  7:013af1973af4
+  8:d5d0dcbdc4d9
+  9:24286f4ae135
+
+  $ hg log -qr e: --config revsetalias.e="0"
+  0:2785f51eece5
+  1:d75937da8da0
+  2:5ed5505e9f1c
+  3:8528aa5637f2
+  4:2326846efdab
+  5:904fa392b941
+  6:e0cc66ef77e8
+  7:013af1973af4
+  8:d5d0dcbdc4d9
+  9:24286f4ae135
+
+  $ hg log -qr :e --config revsetalias.e="9"
+  0:2785f51eece5
+  1:d75937da8da0
+  2:5ed5505e9f1c
+  3:8528aa5637f2
+  4:2326846efdab
+  5:904fa392b941
+  6:e0cc66ef77e8
+  7:013af1973af4
+  8:d5d0dcbdc4d9
+  9:24286f4ae135
+
+  $ hg log -qr e:
+  6:e0cc66ef77e8
+  7:013af1973af4
+  8:d5d0dcbdc4d9
+  9:24286f4ae135
+
+  $ hg log -qr :e
+  0:2785f51eece5
+  1:d75937da8da0
+  2:5ed5505e9f1c
+  3:8528aa5637f2
+  4:2326846efdab
+  5:904fa392b941
+  6:e0cc66ef77e8
+
 issue2549 - correct optimizations
 
   $ log 'limit(1 or 2 or 3, 2) and not 2'
@@ -1195,6 +1436,8 @@
       ('symbol', '1ee'))
     ('string', 'ce5'))
   ('string', '2785f51eece5')
+  * set:
+  <baseset [0]>
   0
 
   $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
@@ -1216,6 +1459,8 @@
       ('symbol', '1ee'))
     ('string', 'ce5'))
   ('string', '2785f51eece5')
+  * set:
+  <baseset [0]>
   0
 
 (check concatenation in alias nesting)
@@ -1288,4 +1533,9 @@
   1
   3
 
+test error message of bad revset
+  $ hg log -r 'foo\\'
+  hg: parse error at 3: syntax error in revset 'foo\\'
+  [255]
+
   $ cd ..
--- a/tests/test-run-tests.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-run-tests.t	Thu Apr 16 20:57:51 2015 -0500
@@ -31,6 +31,10 @@
   > this test is still more bytes than success.
   > EOF
 
+  >>> fh = open('test-failure-unicode.t', 'wb')
+  >>> fh.write(u'  $ echo babar\u03b1\n'.encode('utf-8'))
+  >>> fh.write(u'  l\u03b5\u03b5t\n'.encode('utf-8'))
+
   $ $TESTDIR/run-tests.py --with-hg=`which hg`
   
   --- $TESTTMP/test-failure.t
@@ -44,10 +48,21 @@
   
   ERROR: test-failure.t output changed
   !.
+  --- $TESTTMP/test-failure-unicode.t
+  +++ $TESTTMP/test-failure-unicode.t.err
+  @@ -1,2 +1,2 @@
+     $ echo babar\xce\xb1 (esc)
+  -  l\xce\xb5\xce\xb5t (esc)
+  +  babar\xce\xb1 (esc)
+  
+  ERROR: test-failure-unicode.t output changed
+  !
   Failed test-failure.t: output changed
-  # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+  Failed test-failure-unicode.t: output changed
+  # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
   python hash seed: * (glob)
   [1]
+
 test --xunit support
   $ $TESTDIR/run-tests.py --with-hg=`which hg` --xunit=xunit.xml
   
@@ -62,14 +77,32 @@
   
   ERROR: test-failure.t output changed
   !.
+  --- $TESTTMP/test-failure-unicode.t
+  +++ $TESTTMP/test-failure-unicode.t.err
+  @@ -1,2 +1,2 @@
+     $ echo babar\xce\xb1 (esc)
+  -  l\xce\xb5\xce\xb5t (esc)
+  +  babar\xce\xb1 (esc)
+  
+  ERROR: test-failure-unicode.t output changed
+  !
   Failed test-failure.t: output changed
-  # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+  Failed test-failure-unicode.t: output changed
+  # Ran 3 tests, 0 skipped, 0 warned, 2 failed.
   python hash seed: * (glob)
   [1]
   $ cat xunit.xml
   <?xml version="1.0" encoding="utf-8"?>
-  <testsuite errors="0" failures="1" name="run-tests" skipped="0" tests="2">
+  <testsuite errors="0" failures="2" name="run-tests" skipped="0" tests="3">
     <testcase name="test-success.t" time="*"/> (glob)
+    <testcase name="test-failure-unicode.t" time="*"> (glob)
+  <![CDATA[--- $TESTTMP/test-failure-unicode.t
+  +++ $TESTTMP/test-failure-unicode.t.err
+  @@ -1,2 +1,2 @@
+     $ echo babar\xce\xb1 (esc)
+  -  l\xce\xb5\xce\xb5t (esc)
+  +  babar\xce\xb1 (esc)
+  ]]>  </testcase>
     <testcase name="test-failure.t" time="*"> (glob)
   <![CDATA[--- $TESTTMP/test-failure.t
   +++ $TESTTMP/test-failure.t.err
@@ -82,6 +115,8 @@
   ]]>  </testcase>
   </testsuite>
 
+  $ rm test-failure-unicode.t
+
 test for --retest
 ====================
 
@@ -230,7 +265,8 @@
    this test is still more bytes than success.
   
   Failed test-failure*.t: output changed (glob)
-  # Ran 2 tests, 0 skipped, 0 warned, 1 failed.
+  Failed test-nothing.t: output changed
+  # Ran 2 tests, 0 skipped, 0 warned, 2 failed.
   python hash seed: * (glob)
   [1]
 
--- a/tests/test-setdiscovery.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-setdiscovery.t	Thu Apr 16 20:57:51 2015 -0500
@@ -364,9 +364,9 @@
 #if false
 generate new bundles:
   $ hg init r1
-  $ for i in `seq 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
+  $ for i in `python $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
   $ hg clone -q r1 r2
-  $ for i in `seq 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
+  $ for i in `python $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
   $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
   $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
   $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
--- a/tests/test-shelve.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-shelve.t	Thu Apr 16 20:57:51 2015 -0500
@@ -17,6 +17,57 @@
   $ echo x > x
   $ hg addremove -q
 
+shelve has a help message
+  $ hg shelve -h
+  hg shelve [OPTION]... [FILE]...
+  
+  save and set aside changes from the working directory
+  
+      Shelving takes files that "hg status" reports as not clean, saves the
+      modifications to a bundle (a shelved change), and reverts the files so
+      that their state in the working directory becomes clean.
+  
+      To restore these changes to the working directory, using "hg unshelve";
+      this will work even if you switch to a different commit.
+  
+      When no files are specified, "hg shelve" saves all not-clean files. If
+      specific files or directories are named, only changes to those files are
+      shelved.
+  
+      Each shelved change has a name that makes it easier to find later. The
+      name of a shelved change defaults to being based on the active bookmark,
+      or if there is no active bookmark, the current named branch.  To specify a
+      different name, use "--name".
+  
+      To see a list of existing shelved changes, use the "--list" option. For
+      each shelved change, this will print its name, age, and description; use "
+      --patch" or "--stat" for more details.
+  
+      To delete specific shelved changes, use "--delete". To delete all shelved
+      changes, use "--cleanup".
+  
+  (use "hg help -e shelve" to show help for the shelve extension)
+  
+  options ([+] can be repeated):
+  
+   -A --addremove           mark new/missing files as added/removed before
+                            shelving
+      --cleanup             delete all shelved changes
+      --date DATE           shelve with the specified commit date
+   -d --delete              delete the named shelved change(s)
+   -e --edit                invoke editor on commit messages
+   -l --list                list current shelves
+   -m --message TEXT        use text as shelve message
+   -n --name NAME           use the given name for the shelved commit
+   -p --patch               show patch
+   -i --interactive         interactive mode, only works while creating a shelve
+      --stat                output diffstat-style summary of changes
+   -I --include PATTERN [+] include names matching the given patterns
+   -X --exclude PATTERN [+] exclude names matching the given patterns
+      --mq                  operate on patch repository
+  
+  (some details hidden, use --verbose to show complete help)
+
 shelving in an empty repo should be possible
 (this tests also that editor is not invoked, if '--edit' is not
 specified)
@@ -81,11 +132,11 @@
 ensure that our shelved changes exist
 
   $ hg shelve -l
-  default-01      (*)    changes to '[mq]: second.patch' (glob)
-  default         (*)    changes to '[mq]: second.patch' (glob)
+  default-01      (*)* changes to '[mq]: second.patch' (glob)
+  default         (*)* changes to '[mq]: second.patch' (glob)
 
   $ hg shelve -l -p default
-  default         (*)    changes to '[mq]: second.patch' (glob)
+  default         (*)* changes to '[mq]: second.patch' (glob)
   
   diff --git a/a/a b/a/a
   --- a/a/a
@@ -740,4 +791,76 @@
   abort: options '--delete' and '--name' may not be used together
   [255]
 
+Test interactive shelve
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > interactive = true
+  > EOF
+  $ echo 'a' >> a/b
+  $ cat a/a >> a/b
+  $ echo 'x' >> a/b
+  $ mv a/b a/a
+  $ echo 'a' >> foo/foo
+  $ hg st
+  M a/a
+  ? a/a.orig
+  ? foo/foo
+  $ cat a/a
+  a
+  a
+  c
+  x
+  x
+  $ cat foo/foo
+  foo
+  a
+  $ hg shelve --interactive << EOF
+  > y
+  > y
+  > n
+  > EOF
+  diff --git a/a/a b/a/a
+  2 hunks, 2 lines changed
+  examine changes to 'a/a'? [Ynesfdaq?] y
+  
+  @@ -1,3 +1,4 @@
+  +a
+   a
+   c
+   x
+  record change 1/2 to 'a/a'? [Ynesfdaq?] y
+  
+  @@ -1,3 +2,4 @@
+   a
+   c
+   x
+  +x
+  record change 2/2 to 'a/a'? [Ynesfdaq?] n
+  
+  shelved as test
+  merging a/a
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  $ cat a/a
+  a
+  c
+  x
+  x
+  $ cat foo/foo
+  foo
+  a
+  $ hg st
+  M a/a
+  ? foo/foo
+  $ hg unshelve
+  unshelving change 'test'
+  temporarily committing pending changes (restore with 'hg unshelve --abort')
+  rebasing shelved changes
+  rebasing 6:65b5d1c34c34 "changes to 'create conflict'" (tip)
+  merging a/a
+  $ cat a/a
+  a
+  a
+  c
+  x
+  x
   $ cd ..
--- a/tests/test-ssh.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-ssh.t	Thu Apr 16 20:57:51 2015 -0500
@@ -116,6 +116,14 @@
   searching for changes
   no changes found
 
+pull from wrong ssh URL
+
+  $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
+  pulling from ssh://user@dummy/doesnotexist
+  remote: abort: there is no Mercurial repository here (.hg not found)!
+  abort: no suitable response from remote hg!
+  [255]
+
 local change
 
   $ echo bleah > foo
@@ -446,11 +454,12 @@
   Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
+  Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R local serve --stdio
   Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
-  changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
+  changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
@@ -460,7 +469,7 @@
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
-  changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
+  changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
   Got arguments 1:user@dummy 2:hg init 'a repo'
   Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
@@ -468,4 +477,4 @@
   Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
   Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
   Got arguments 1:user@dummy 2:hg -R remote serve --stdio
-  changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
+  changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob)
--- a/tests/test-static-http.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-static-http.t	Thu Apr 16 20:57:51 2015 -0500
@@ -68,7 +68,7 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  changegroup hook: HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/remote
+  changegroup hook: HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT/remote (glob)
   (run 'hg update' to get a working copy)
 
 trying to push
--- a/tests/test-status-color.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-status-color.t	Thu Apr 16 20:57:51 2015 -0500
@@ -338,8 +338,8 @@
 hg resolve with one unresolved, one resolved:
 
   $ hg resolve --color=always -l
-  \x1b[0;31;1mU a\x1b[0m (esc)
-  \x1b[0;32;1mR b\x1b[0m (esc)
+  \x1b[0;31;1mU \x1b[0m\x1b[0;31;1ma\x1b[0m (esc)
+  \x1b[0;32;1mR \x1b[0m\x1b[0;32;1mb\x1b[0m (esc)
 
 color coding of error message with current availability of curses
 
--- a/tests/test-status.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-status.t	Thu Apr 16 20:57:51 2015 -0500
@@ -240,6 +240,17 @@
   $ rm deleted
   $ hg copy modified copied
 
+Specify working directory revision explicitly, that should be the same as
+"hg status"
+
+  $ hg status --change "wdir()"
+  M modified
+  A added
+  A copied
+  R removed
+  ! deleted
+  ? unknown
+
 Run status with 2 different flags.
 Check if result is the same or different.
 If result is not as expected, raise error
@@ -427,6 +438,15 @@
     b
   R b
 
+using ui.statuscopies setting
+  $ hg st --config ui.statuscopies=true
+  M a
+    b
+  R b
+  $ hg st --config ui.statuscopies=false
+  M a
+  R b
+
 Other "bug" highlight, the revision status does not report the copy information.
 This is buggy behavior.
 
--- a/tests/test-strip.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-strip.t	Thu Apr 16 20:57:51 2015 -0500
@@ -218,10 +218,71 @@
   
   $ hg debugbundle .hg/strip-backup/*
   Stream params: {}
-  b2x:changegroup -- "{'version': '02'}"
+  changegroup -- "{'version': '02'}"
+      264128213d290d868c54642d13aeaa3675551a78
+  $ hg incoming .hg/strip-backup/*
+  comparing with .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
+  searching for changes
+  changeset:   4:264128213d29
+  tag:         tip
+  parent:      1:ef3a871183d7
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+  $ restore
+  $ hg up -C 4
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02 --traceback strip 4
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
+  $ hg parents
+  changeset:   1:ef3a871183d7
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  $ hg debugbundle .hg/strip-backup/*
+  Stream params: {}
+  changegroup -- "{'version': '02'}"
       264128213d290d868c54642d13aeaa3675551a78
-  $ restore
-
+  $ hg pull .hg/strip-backup/*
+  pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files (+1 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+  $ rm .hg/strip-backup/*
+  $ hg log --graph
+  o  changeset:   4:264128213d29
+  |  tag:         tip
+  |  parent:      1:ef3a871183d7
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     c
+  |
+  | o  changeset:   3:443431ffac4f
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     e
+  | |
+  | o  changeset:   2:65bd5f99a4a3
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     d
+  |
+  @  changeset:   1:ef3a871183d7
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     b
+  |
+  o  changeset:   0:9ab35a2d17cb
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     a
+  
   $ hg up -C 2
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg merge 4
@@ -435,10 +496,11 @@
 Verify strip protects against stripping wc parent when there are uncommitted mods
 
   $ echo b > b
+  $ echo bb > bar
   $ hg add b
   $ hg ci -m 'b'
   $ hg log --graph
-  @  changeset:   1:7519abd79d14
+  @  changeset:   1:76dcf9fab855
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
@@ -449,9 +511,24 @@
      date:        Thu Jan 01 00:00:00 1970 +0000
      summary:     a
   
+  $ hg up 0
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo c > bar
+  $ hg up -t false
+  merging bar
+  merging bar failed!
+  1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+  $ hg sum
+  parent: 1:76dcf9fab855 tip
+   b
+  branch: default
+  commit: 1 modified, 1 unknown, 1 unresolved
+  update: (current)
+  mq:     3 unapplied
 
   $ echo c > b
-  $ echo c > bar
   $ hg strip tip
   abort: local changes found
   [255]
@@ -467,6 +544,16 @@
   $ hg status
   M bar
   ? b
+  ? bar.orig
+
+  $ rm bar.orig
+  $ hg sum
+  parent: 0:9ab35a2d17cb tip
+   a
+  branch: default
+  commit: 1 modified, 1 unknown
+  update: (current)
+  mq:     3 unapplied
 
 Strip adds, removes, modifies with --keep
 
@@ -575,7 +662,7 @@
    -f --force          force removal of changesets, discard uncommitted changes
                        (no backup)
       --no-backup      no backups
-   -k --keep           do not modify working copy during strip
+   -k --keep           do not modify working directory during strip
    -B --bookmark VALUE remove revs only reachable from given bookmark
       --mq             operate on patch repository
   
--- a/tests/test-subrepo-deep-nested-change.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-subrepo-deep-nested-change.t	Thu Apr 16 20:57:51 2015 -0500
@@ -46,12 +46,29 @@
 
 Clone main
 
-  $ hg clone main cloned
+  $ hg --config extensions.largefiles= clone main cloned
   updating to branch default
   cloning subrepo sub1 from $TESTTMP/sub1
   cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
+Largefiles is NOT enabled in the clone if the source repo doesn't require it
+  $ cat cloned/.hg/hgrc
+  # example repository config (see "hg help config" for more info)
+  [paths]
+  default = $TESTTMP/main (glob)
+  
+  # path aliases to other clones of this repo in URLs or filesystem paths
+  # (see "hg help config.paths" for more info)
+  #
+  # default-push = ssh://jdoe@example.net/hg/jdoes-fork
+  # my-fork      = ssh://jdoe@example.net/hg/jdoes-fork
+  # my-clone     = /home/jdoe/jdoes-clone
+  
+  [ui]
+  # name and email (local to this repository, optional), e.g.
+  # username = Jane Doe <jdoe@example.com>
+
 Checking cloned repo ids
 
   $ printf "cloned " ; hg id -R cloned
@@ -124,9 +141,14 @@
   $ hg status -S
   $ hg remove sub1/sub2/folder/test.txt
   $ hg remove sub1/.hgsubstate
+  $ mv sub1/.hgsub sub1/x.hgsub
   $ hg status -S
+  warning: subrepo spec file 'sub1/.hgsub' not found (glob)
   R sub1/.hgsubstate
   R sub1/sub2/folder/test.txt
+  ! sub1/.hgsub
+  ? sub1/x.hgsub
+  $ mv sub1/x.hgsub sub1/.hgsub
   $ hg update -Cq
   $ touch sub1/foo
   $ hg forget sub1/sub2/folder/test.txt
@@ -169,6 +191,24 @@
   adding foo/bar/abc
   committing subrepository sub1
   committing subrepository sub1/sub2 (glob)
+
+  $ hg forget sub1/sub2/sub2
+  $ echo x > sub1/sub2/x.txt
+  $ hg add sub1/sub2/x.txt
+
+Files sees uncommitted adds and removes in subrepos
+  $ hg files -S
+  .hgsub
+  .hgsubstate
+  foo/bar/abc (glob)
+  main
+  sub1/.hgsub (glob)
+  sub1/.hgsubstate (glob)
+  sub1/foo (glob)
+  sub1/sub1 (glob)
+  sub1/sub2/folder/bar (glob)
+  sub1/sub2/x.txt (glob)
+
   $ hg rollback -q
   $ hg up -Cq
 
@@ -319,6 +359,31 @@
   ../archive_lf/sub1/sub2/large.bin
   $ rm -rf ../archive_lf
 
+The local repo enables largefiles if a largefiles repo is cloned
+  $ hg showconfig extensions
+  abort: repository requires features unknown to this Mercurial: largefiles!
+  (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
+  [255]
+  $ hg --config extensions.largefiles= clone -qU . ../lfclone
+  $ cat ../lfclone/.hg/hgrc
+  # example repository config (see "hg help config" for more info)
+  [paths]
+  default = $TESTTMP/cloned (glob)
+  
+  # path aliases to other clones of this repo in URLs or filesystem paths
+  # (see "hg help config.paths" for more info)
+  #
+  # default-push = ssh://jdoe@example.net/hg/jdoes-fork
+  # my-fork      = ssh://jdoe@example.net/hg/jdoes-fork
+  # my-clone     = /home/jdoe/jdoes-clone
+  
+  [ui]
+  # name and email (local to this repository, optional), e.g.
+  # username = Jane Doe <jdoe@example.com>
+  
+  [extensions]
+  largefiles=
+
 Find an exact match to a standin (should archive nothing)
   $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
   $ find ../archive_lf 2> /dev/null | sort
@@ -351,6 +416,7 @@
   R sub1/sub2/test.txt
   ? foo/bar/abc
   ? sub1/sub2/untracked.txt
+  ? sub1/sub2/x.txt
   $ hg add sub1/sub2
   $ hg ci -Sqm 'forget testing'
 
@@ -377,4 +443,105 @@
   A a.dat
   A a.txt
 
+  $ hg ci -m "add a.*"
+  $ hg mv a.dat b.dat
+  $ hg mv foo/bar/abc foo/bar/def
+  $ hg status -C
+  A b.dat
+    a.dat
+  A foo/bar/def
+    foo/bar/abc
+  R a.dat
+  R foo/bar/abc
+
+  $ hg ci -m "move large and normal"
+  $ hg status -C --rev '.^' --rev .
+  A b.dat
+    a.dat
+  A foo/bar/def
+    foo/bar/abc
+  R a.dat
+  R foo/bar/abc
+
+
+  $ echo foo > main
+  $ hg ci -m "mod parent only"
+  $ hg init sub3
+  $ echo "sub3 = sub3" >> .hgsub
+  $ echo xyz > sub3/a.txt
+  $ hg add sub3/a.txt
+  $ hg ci -Sm "add sub3"
+  committing subrepository sub3
+  $ cat .hgsub | grep -v sub3 > .hgsub1
+  $ mv .hgsub1 .hgsub
+  $ hg ci -m "remove sub3"
+
+  $ hg log -r "subrepo()" --style compact
+  0   7f491f53a367   1970-01-01 00:00 +0000   test
+    main import
+  
+  1   ffe6649062fe   1970-01-01 00:00 +0000   test
+    deep nested modif should trigger a commit
+  
+  2   9bb10eebee29   1970-01-01 00:00 +0000   test
+    add test.txt
+  
+  3   7c64f035294f   1970-01-01 00:00 +0000   test
+    add large files
+  
+  4   f734a59e2e35   1970-01-01 00:00 +0000   test
+    forget testing
+  
+  11   9685a22af5db   1970-01-01 00:00 +0000   test
+    add sub3
+  
+  12[tip]   2e0485b475b9   1970-01-01 00:00 +0000   test
+    remove sub3
+  
+  $ hg log -r "subrepo('sub3')" --style compact
+  11   9685a22af5db   1970-01-01 00:00 +0000   test
+    add sub3
+  
+  12[tip]   2e0485b475b9   1970-01-01 00:00 +0000   test
+    remove sub3
+  
+  $ hg log -r "subrepo('bogus')" --style compact
+
+
+Test .hgsubstate in the R state
+
+  $ hg rm .hgsub .hgsubstate
+  $ hg ci -m 'trash subrepo tracking'
+
+  $ hg log -r "subrepo('re:sub\d+')" --style compact
+  0   7f491f53a367   1970-01-01 00:00 +0000   test
+    main import
+  
+  1   ffe6649062fe   1970-01-01 00:00 +0000   test
+    deep nested modif should trigger a commit
+  
+  2   9bb10eebee29   1970-01-01 00:00 +0000   test
+    add test.txt
+  
+  3   7c64f035294f   1970-01-01 00:00 +0000   test
+    add large files
+  
+  4   f734a59e2e35   1970-01-01 00:00 +0000   test
+    forget testing
+  
+  11   9685a22af5db   1970-01-01 00:00 +0000   test
+    add sub3
+  
+  12   2e0485b475b9   1970-01-01 00:00 +0000   test
+    remove sub3
+  
+  13[tip]   a68b2c361653   1970-01-01 00:00 +0000   test
+    trash subrepo tracking
+  
+
+Restore the trashed subrepo tracking
+
+  $ hg rollback -q
+  $ hg update -Cq .
+
   $ cd ..
--- a/tests/test-subrepo-git.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-subrepo-git.t	Thu Apr 16 20:57:51 2015 -0500
@@ -134,6 +134,7 @@
   $ hg status --subrepos
   ? s/f
   $ hg add .
+  adding f
   $ git add f
   $ cd ..
 
@@ -174,6 +175,8 @@
   pulling subrepo s from $TESTTMP/gitroot
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
+  $ hg st --subrepos s
+  A s/f
   $ cat s/f
   f
   $ cat s/g
@@ -782,9 +785,57 @@
   \s*foobar |\s*2 +- (re)
    2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) (re)
 
-ensure adding include/exclude ignores the subrepo
+adding an include should ignore the other elements
   $ hg diff --subrepos -I s/foobar
+  diff --git a/s/foobar b/s/foobar
+  index 8a5a5e2..bd5812a 100644
+  --- a/s/foobar
+  +++ b/s/foobar
+  @@ -1,4 +1,4 @@
+  -woopwoop
+  +woop    woop
+   
+   foo
+   bar
+
+adding an exclude should ignore this element
   $ hg diff --subrepos -X s/foobar
+  diff --git a/s/barfoo b/s/barfoo
+  new file mode 100644
+  index 0000000..257cc56
+  --- /dev/null
+  +++ b/s/barfoo
+  @@ -0,0 +1 @@
+  +foo
+
+moving a file should show a removal and an add
+  $ hg revert --all
+  reverting subrepo ../gitroot
+  $ cd s
+  $ git mv foobar woop
+  $ cd ..
+  $ hg diff --subrepos
+  diff --git a/s/foobar b/s/foobar
+  deleted file mode 100644
+  index 8a5a5e2..0000000
+  --- a/s/foobar
+  +++ /dev/null
+  @@ -1,4 +0,0 @@
+  -woopwoop
+  -
+  -foo
+  -bar
+  diff --git a/s/woop b/s/woop
+  new file mode 100644
+  index 0000000..8a5a5e2
+  --- /dev/null
+  +++ b/s/woop
+  @@ -0,0 +1,4 @@
+  +woopwoop
+  +
+  +foo
+  +bar
+  $ rm s/woop
 
 revert the subrepository
   $ hg revert --all
@@ -802,4 +853,235 @@
   $ hg status --subrepos
   ? s/barfoo
 
+show file at specific revision
+  $ cat > s/foobar << EOF
+  > woop    woop
+  > fooo bar
+  > EOF
+  $ hg commit --subrepos -m "updated foobar"
+  committing subrepository s
+  $ cat > s/foobar << EOF
+  > current foobar
+  > (should not be visible using hg cat)
+  > EOF
+
+  $ hg cat -r . s/foobar
+  woop    woop
+  fooo bar (no-eol)
+  $ hg cat -r "parents(.)" s/foobar > catparents
+
+  $ mkdir -p tmp/s
+
+  $ hg cat -r "parents(.)" --output tmp/%% s/foobar
+  $ diff tmp/% catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%s s/foobar
+  $ diff tmp/foobar catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%d/otherfoobar s/foobar
+  $ diff tmp/s/otherfoobar catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%p s/foobar
+  $ diff tmp/s/foobar catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%H s/foobar
+  $ diff tmp/255ee8cf690ec86e99b1e80147ea93ece117cd9d catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%R s/foobar
+  $ diff tmp/10 catparents
+
+  $ hg cat -r "parents(.)" --output tmp/%h s/foobar
+  $ diff tmp/255ee8cf690e catparents
+
+  $ rm tmp/10
+  $ hg cat -r "parents(.)" --output tmp/%r s/foobar
+  $ diff tmp/10 catparents
+
+  $ mkdir tmp/tc
+  $ hg cat -r "parents(.)" --output tmp/%b/foobar s/foobar
+  $ diff tmp/tc/foobar catparents
+
+cleanup
+  $ rm -r tmp
+  $ rm catparents
+
+add git files, using either files or patterns
+  $ echo "hsss! hsssssssh!" > s/snake.python
+  $ echo "ccc" > s/c.c
+  $ echo "cpp" > s/cpp.cpp
+
+  $ hg add s/snake.python s/c.c s/cpp.cpp
+  $ hg st --subrepos s
+  M s/foobar
+  A s/c.c
+  A s/cpp.cpp
+  A s/snake.python
+  ? s/barfoo
+  $ hg revert s
+  reverting subrepo ../gitroot
+
+  $ hg add --subrepos "glob:**.python"
+  adding s/snake.python (glob)
+  $ hg st --subrepos s
+  A s/snake.python
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  $ hg revert s
+  reverting subrepo ../gitroot
+
+  $ hg add --subrepos s
+  adding s/barfoo (glob)
+  adding s/c.c (glob)
+  adding s/cpp.cpp (glob)
+  adding s/foobar.orig (glob)
+  adding s/snake.python (glob)
+  $ hg st --subrepos s
+  A s/barfoo
+  A s/c.c
+  A s/cpp.cpp
+  A s/foobar.orig
+  A s/snake.python
+  $ hg revert s
+  reverting subrepo ../gitroot
+make sure everything is reverted correctly
+  $ hg st --subrepos s
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  ? s/snake.python
+
+  $ hg add --subrepos --exclude "path:s/c.c"
+  adding s/barfoo (glob)
+  adding s/cpp.cpp (glob)
+  adding s/foobar.orig (glob)
+  adding s/snake.python (glob)
+  $ hg st --subrepos s
+  A s/barfoo
+  A s/cpp.cpp
+  A s/foobar.orig
+  A s/snake.python
+  ? s/c.c
+  $ hg revert --all -q
+
+.hgignore should not have influence in subrepos
+  $ cat > .hgignore << EOF
+  > syntax: glob
+  > *.python
+  > EOF
+  $ hg add .hgignore
+  $ hg add --subrepos "glob:**.python" s/barfoo
+  adding s/snake.python (glob)
+  $ hg st --subrepos s
+  A s/barfoo
+  A s/snake.python
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  $ hg revert --all -q
+
+.gitignore should have influence,
+except for explicitly added files (no patterns)
+  $ cat > s/.gitignore << EOF
+  > *.python
+  > EOF
+  $ hg add s/.gitignore
+  $ hg st --subrepos s
+  A s/.gitignore
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  $ hg st --subrepos s --all
+  A s/.gitignore
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  I s/snake.python
+  C s/f
+  C s/foobar
+  C s/g
+  $ hg add --subrepos "glob:**.python"
+  $ hg st --subrepos s
+  A s/.gitignore
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+  $ hg add --subrepos s/snake.python
+  $ hg st --subrepos s
+  A s/.gitignore
+  A s/snake.python
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+
+correctly do a dry run
+  $ hg add --subrepos s --dry-run
+  adding s/barfoo (glob)
+  adding s/c.c (glob)
+  adding s/cpp.cpp (glob)
+  adding s/foobar.orig (glob)
+  $ hg st --subrepos s
+  A s/.gitignore
+  A s/snake.python
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+
+error given when adding an already tracked file
+  $ hg add s/.gitignore
+  s/.gitignore already tracked!
+  [1]
+  $ hg add s/g
+  s/g already tracked!
+  [1]
+
+removed files can be re-added
+removing files using 'rm' or 'git rm' has the same effect,
+since we ignore the staging area
+  $ hg ci --subrepos -m 'snake'
+  committing subrepository s
+  $ cd s
+  $ rm snake.python
+(remove leftover .hg so Mercurial doesn't look for a root here)
+  $ rm -rf .hg
+  $ hg status --subrepos --all .
+  R snake.python
+  ? barfoo
+  ? c.c
+  ? cpp.cpp
+  ? foobar.orig
+  C .gitignore
+  C f
+  C foobar
+  C g
+  $ git rm snake.python
+  rm 'snake.python'
+  $ hg status --subrepos --all .
+  R snake.python
+  ? barfoo
+  ? c.c
+  ? cpp.cpp
+  ? foobar.orig
+  C .gitignore
+  C f
+  C foobar
+  C g
+  $ touch snake.python
   $ cd ..
+  $ hg add s/snake.python
+  $ hg status -S
+  M s/snake.python
+  ? .hgignore
+  ? s/barfoo
+  ? s/c.c
+  ? s/cpp.cpp
+  ? s/foobar.orig
+
+  $ cd ..
--- a/tests/test-subrepo-missing.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-subrepo-missing.t	Thu Apr 16 20:57:51 2015 -0500
@@ -24,7 +24,7 @@
   $ cp .hgsubstate .hgsubstate.old
   >>> file('.hgsubstate', 'wb').write('\ninvalid')
   $ hg st --subrepos
-  abort: invalid subrepository revision specifier in .hgsubstate line 2
+  abort: invalid subrepository revision specifier in '.hgsubstate' line 2
   [255]
   $ mv .hgsubstate.old .hgsubstate
 
@@ -32,8 +32,9 @@
 
   $ rm .hgsub
   $ hg revert .hgsub
-  warning: subrepo spec file .hgsub not found
-  warning: subrepo spec file .hgsub not found
+  warning: subrepo spec file '.hgsub' not found
+  warning: subrepo spec file '.hgsub' not found
+  warning: subrepo spec file '.hgsub' not found
 
 delete .hgsubstate and revert it
 
@@ -44,11 +45,11 @@
 
   $ rm .hgsub
   $ hg up 0
-  warning: subrepo spec file .hgsub not found
-  warning: subrepo spec file .hgsub not found
+  warning: subrepo spec file '.hgsub' not found
+  warning: subrepo spec file '.hgsub' not found
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg st
-  warning: subrepo spec file .hgsub not found
+  warning: subrepo spec file '.hgsub' not found
   ! .hgsub
   $ ls subrepo
   a
@@ -56,8 +57,8 @@
 delete .hgsubstate and update
 
   $ hg up -C
-  warning: subrepo spec file .hgsub not found
-  warning: subrepo spec file .hgsub not found
+  warning: subrepo spec file '.hgsub' not found
+  warning: subrepo spec file '.hgsub' not found
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ rm .hgsubstate
   $ hg up 0
--- a/tests/test-subrepo-recursion.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-subrepo-recursion.t	Thu Apr 16 20:57:51 2015 -0500
@@ -59,7 +59,7 @@
 Commits:
 
   $ hg commit -m fails
-  abort: uncommitted changes in subrepo foo
+  abort: uncommitted changes in subrepository 'foo'
   (use --subrepos for recursive commit)
   [255]
 
@@ -260,6 +260,8 @@
   > [progress]
   > assume-tty = 1
   > delay = 0
+  > # set changedelay really large so we don't see nested topics
+  > changedelay = 30000
   > format = topic bar number
   > refresh = 0
   > width = 60
@@ -341,8 +343,26 @@
 Test archiving a revision that references a subrepo that is not yet
 cloned:
 
+#if hardlink
   $ hg clone -U . ../empty
+  \r (no-eol) (esc)
+  linking [ <=>                                           ] 1\r (no-eol) (esc)
+  linking [  <=>                                          ] 2\r (no-eol) (esc)
+  linking [   <=>                                         ] 3\r (no-eol) (esc)
+  linking [    <=>                                        ] 4\r (no-eol) (esc)
+  linking [     <=>                                       ] 5\r (no-eol) (esc)
+  linking [      <=>                                      ] 6\r (no-eol) (esc)
+  linking [       <=>                                     ] 7\r (no-eol) (esc)
+  linking [        <=>                                    ] 8\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+#else
+  $ hg clone -U . ../empty
+  \r (no-eol) (esc)
+  linking [ <=>                                           ] 1 (no-eol)
+#endif
+
   $ cd ../empty
+#if hardlink
   $ hg archive --subrepos -r tip ../archive.tar.gz
   \r (no-eol) (esc)
   archiving [                                           ] 0/3\r (no-eol) (esc)
@@ -355,6 +375,16 @@
   archiving [==========================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
+  linking [ <=>                                           ] 1\r (no-eol) (esc)
+  linking [  <=>                                          ] 2\r (no-eol) (esc)
+  linking [   <=>                                         ] 3\r (no-eol) (esc)
+  linking [    <=>                                        ] 4\r (no-eol) (esc)
+  linking [     <=>                                       ] 5\r (no-eol) (esc)
+  linking [      <=>                                      ] 6\r (no-eol) (esc)
+  linking [       <=>                                     ] 7\r (no-eol) (esc)
+  linking [        <=>                                    ] 8\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
   archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
   archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
   archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
@@ -365,6 +395,14 @@
   archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
   \r (no-eol) (esc)
+  linking [ <=>                                           ] 1\r (no-eol) (esc)
+  linking [  <=>                                          ] 2\r (no-eol) (esc)
+  linking [   <=>                                         ] 3\r (no-eol) (esc)
+  linking [    <=>                                        ] 4\r (no-eol) (esc)
+  linking [     <=>                                       ] 5\r (no-eol) (esc)
+  linking [      <=>                                      ] 6\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
   archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
   archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
   archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
@@ -372,6 +410,29 @@
                                                               \r (no-eol) (esc)
   cloning subrepo foo from $TESTTMP/repo/foo
   cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
+#else
+Note there's a slight output glitch on non-hardlink systems: the last
+"linking" progress topic never gets closed, leading to slight output corruption on that platform.
+  $ hg archive --subrepos -r tip ../archive.tar.gz
+  \r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  linking [ <=>                                           ] 1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  linking [  <=>                                          ] 1cloning subrepo foo from $TESTTMP/repo/foo
+  cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
+#endif
 
 The newly cloned subrepos contain no working copy:
 
@@ -500,9 +561,19 @@
   $ hg init test
   $ cd test
   $ hg init x
+  $ echo abc > abc.txt
+  $ hg ci -Am "abc"
+  adding abc.txt
   $ echo "x = x" >> .hgsub
   $ hg add .hgsub
   $ touch a x/a
   $ hg add a x/a
 
+  $ hg ci -Sm "added x"
+  committing subrepository x
+  $ echo abc > x/a
+  $ hg revert --rev '.^' "set:subrepo('glob:x*')"
+  abort: subrepository 'x' does not exist in 25ac2c9b3180!
+  [255]
+
   $ cd ..
--- a/tests/test-subrepo.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-subrepo.t	Thu Apr 16 20:57:51 2015 -0500
@@ -25,8 +25,13 @@
   abort: can't commit subrepos without .hgsub
   [255]
 
+  $ hg -R s add s/a
+  $ hg files -S
+  .hgsub
+  a
+  s/a (glob)
+
   $ hg -R s ci -Ams0
-  adding a
   $ hg sum
   parent: 0:f7b1eb17ad24 tip
    0
@@ -50,9 +55,16 @@
 Revert subrepo and test subrepo fileset keyword:
 
   $ echo b > s/a
+  $ hg revert --dry-run "set:subrepo('glob:s*')"
+  reverting subrepo s
+  reverting s/a (glob)
+  $ cat s/a
+  b
   $ hg revert "set:subrepo('glob:s*')"
   reverting subrepo s
   reverting s/a (glob)
+  $ cat s/a
+  a
   $ rm s/a.orig
 
 Revert subrepo with no backup. The "reverting s/a" line is gone since
@@ -84,7 +96,7 @@
 
   $ echo b >> s/a
   $ hg backout tip
-  abort: uncommitted changes in subrepo s
+  abort: uncommitted changes in subrepository 's'
   [255]
   $ hg revert -C -R s s/a
 
@@ -134,7 +146,7 @@
 
   $ echo c > s/a
   $ hg --config ui.commitsubrepos=no ci -m4
-  abort: uncommitted changes in subrepo s
+  abort: uncommitted changes in subrepository 's'
   (use --subrepos for recursive commit)
   [255]
   $ hg id
--- a/tests/test-tags.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-tags.t	Thu Apr 16 20:57:51 2015 -0500
@@ -1,7 +1,19 @@
+setup
+
+  $ cat >> $HGRCPATH << EOF
+  > [extensions]
+  > blackbox=
+  > mock=$TESTDIR/mockblackbox.py
+  > EOF
+
 Helper functions:
 
   $ cacheexists() {
-  >   [ -f .hg/cache/tags ] && echo "tag cache exists" || echo "no tag cache"
+  >   [ -f .hg/cache/tags2-visible ] && echo "tag cache exists" || echo "no tag cache"
+  > }
+
+  $ fnodescacheexists() {
+  >   [ -f .hg/cache/hgtagsfnodes1 ] && echo "fnodes cache exists" || echo "no fnodes cache"
   > }
 
   $ dumptags() {
@@ -20,10 +32,14 @@
   $ cd t
   $ cacheexists
   no tag cache
+  $ fnodescacheexists
+  no fnodes cache
   $ hg id
   000000000000 tip
   $ cacheexists
   no tag cache
+  $ fnodescacheexists
+  no fnodes cache
   $ echo a > a
   $ hg add a
   $ hg commit -m "test"
@@ -33,15 +49,20 @@
   acb14030fe0a tip
   $ cacheexists
   tag cache exists
+No fnodes cache because .hgtags file doesn't exist
+(this is an implementation detail)
+  $ fnodescacheexists
+  no fnodes cache
 
 Try corrupting the cache
 
-  $ printf 'a b' > .hg/cache/tags
+  $ printf 'a b' > .hg/cache/tags2-visible
   $ hg identify
-  .hg/cache/tags is corrupt, rebuilding it
   acb14030fe0a tip
   $ cacheexists
   tag cache exists
+  $ fnodescacheexists
+  no fnodes cache
   $ hg identify
   acb14030fe0a tip
 
@@ -67,21 +88,75 @@
   $ hg identify
   b9154636be93 tip
 
+We should have a fnodes cache now that we have a real tag
+The cache should have an empty entry for rev 0 and a valid entry for rev 1.
+
+
+  $ fnodescacheexists
+  fnodes cache exists
+  $ f --size --hexdump .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=48
+  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
+  0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
+
 Repeat with cold tag cache:
 
-  $ rm -f .hg/cache/tags
+  $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
   $ hg identify
   b9154636be93 tip
 
+  $ fnodescacheexists
+  fnodes cache exists
+  $ f --size --hexdump .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=48
+  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
+  0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
+
 And again, but now unable to write tag cache:
 
 #if unix-permissions
-  $ rm -f .hg/cache/tags
-  $ chmod 555 .hg
+  $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
+  $ chmod 555 .hg/cache
+  $ hg identify
+  b9154636be93 tip
+  $ chmod 755 .hg/cache
+#endif
+
+Tag cache debug info written to blackbox log
+
+  $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
   $ hg identify
   b9154636be93 tip
-  $ chmod 755 .hg
-#endif
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> identify
+  1970/01/01 00:00:00 bob> writing 48 bytes to cache/hgtagsfnodes1
+  1970/01/01 00:00:00 bob> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> identify exited 0 after ?.?? seconds (glob)
+
+Failure to acquire lock results in no write
+
+  $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
+  $ echo 'foo:1' > .hg/wlock
+  $ hg identify
+  b9154636be93 tip
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> identify
+  1970/01/01 00:00:00 bob> not writing .hg/cache/hgtagsfnodes1 because lock held
+  1970/01/01 00:00:00 bob> 0/1 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> identify exited 0 after * seconds (glob)
+
+  $ fnodescacheexists
+  no fnodes cache
+
+  $ rm .hg/wlock
+
+  $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
+  $ hg identify
+  b9154636be93 tip
 
 Create a branch:
 
@@ -102,9 +177,29 @@
   $ hg add b
   $ hg commit -m "branch"
   created new head
+
+Creating a new commit shouldn't append the .hgtags fnodes cache until
+tags info is accessed
+
+  $ f --size --hexdump .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=48
+  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
+  0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
+
   $ hg id
   c8edf04160c7 tip
 
+First 4 bytes of record 3 are changeset fragment
+
+  $ f --size --hexdump .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=72
+  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
+  0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
+  0030: c8 ed f0 41 00 00 00 00 00 00 00 00 00 00 00 00 |...A............|
+  0040: 00 00 00 00 00 00 00 00                         |........|
+
 Merge the two heads:
 
   $ hg merge 1
@@ -216,15 +311,123 @@
 
 Dump cache:
 
-  $ cat .hg/cache/tags
-  4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
-  3 6fa450212aeb2a21ed616a54aea39a4a27894cd7 7d3b718c964ef37b89e550ebdafd5789e76ce1b0
-  2 7a94127795a33c10a370c93f731fd9fea0b79af6 0c04f2a8af31de17fab7422878ee5a2dadbc943d
-  
+  $ cat .hg/cache/tags2-visible
+  4 0c192d7d5e6b78a714de54a2e9627952a877e25a
   bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
   bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
   78391a272241d70354aa14c874552cad6b51bb42 bar
 
+  $ f --size --hexdump .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=120
+  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
+  0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
+  0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
+  0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
+  0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
+  0070: 78 ee 5a 2d ad bc 94 3d                         |x.Z-...=|
+
+Corrupt the .hgtags fnodes cache
+Extra junk data at the end should get overwritten on next cache update
+
+  $ echo extra >> .hg/cache/hgtagsfnodes1
+  $ echo dummy1 > foo
+  $ hg commit -m throwaway1
+
+  $ hg tags
+  tip                                5:8dbfe60eff30
+  bar                                1:78391a272241
+
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
+  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+#if unix-permissions no-root
+Errors writing to .hgtags fnodes cache are silently ignored
+
+  $ echo dummy2 > foo
+  $ hg commit -m throwaway2
+
+  $ chmod a-w .hg/cache/hgtagsfnodes1
+  $ rm -f .hg/cache/tags2-visible
+
+  $ hg tags
+  tip                                6:b968051b5cf3
+  bar                                1:78391a272241
+
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1'
+  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+  $ chmod a+w .hg/cache/hgtagsfnodes1
+
+  $ rm -f .hg/cache/tags2-visible
+  $ hg tags
+  tip                                6:b968051b5cf3
+  bar                                1:78391a272241
+
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
+  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+  $ f --size .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=168
+
+  $ hg -q --config extensions.strip= strip -r 6 --no-backup
+#endif
+
+Stripping doesn't truncate the tags cache until new data is available
+
+  $ rm -f .hg/cache/hgtagsfnodes1 .hg/cache/tags2-visible
+  $ hg tags
+  tip                                5:8dbfe60eff30
+  bar                                1:78391a272241
+
+  $ f --size .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=144
+
+  $ hg -q --config extensions.strip= strip -r 5 --no-backup
+  $ hg tags
+  tip                                4:0c192d7d5e6b
+  bar                                1:78391a272241
+
+  $ hg blackbox -l 4
+  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
+  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+
+  $ f --size .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=120
+
+  $ echo dummy > foo
+  $ hg commit -m throwaway3
+
+  $ hg tags
+  tip                                5:035f65efb448
+  bar                                1:78391a272241
+
+  $ hg blackbox -l 5
+  1970/01/01 00:00:00 bob> tags
+  1970/01/01 00:00:00 bob> writing 24 bytes to cache/hgtagsfnodes1
+  1970/01/01 00:00:00 bob> 2/3 cache hits/lookups in * seconds (glob)
+  1970/01/01 00:00:00 bob> writing .hg/cache/tags2-visible with 1 tags
+  1970/01/01 00:00:00 bob> tags exited 0 after * seconds (glob)
+  $ f --size .hg/cache/hgtagsfnodes1
+  .hg/cache/hgtagsfnodes1: size=144
+
+  $ hg -q --config extensions.strip= strip -r 5 --no-backup
+
 Test tag removal:
 
   $ hg tag --remove bar     # rev 5
@@ -328,7 +531,7 @@
   $ hg tags                  # partly stale
   tip                                4:735c3ca72986
   bar                                0:bbd179dfa0a7
-  $ rm -f .hg/cache/tags
+  $ rm -f .hg/cache/tags2-visible
   $ hg tags                  # cold cache
   tip                                4:735c3ca72986
   bar                                0:bbd179dfa0a7
--- a/tests/test-tools.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-tools.t	Thu Apr 16 20:57:51 2015 -0500
@@ -43,7 +43,7 @@
   foo: mode=644
 #endif
 
-  $ seq 10 > bar
+  $ python $TESTDIR/seq.py 10 > bar
 #if unix-permissions symlink
   $ chmod +x bar
   $ f bar --newer foo --mode --type --size --dump --links --bytes 7
--- a/tests/test-up-local-change.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-up-local-change.t	Thu Apr 16 20:57:51 2015 -0500
@@ -55,6 +55,8 @@
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
+  launching merge tool: true $TESTTMP/r2/a * (glob)
+  merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   $ hg parents
   changeset:   1:1e71731e6fbb
@@ -76,6 +78,8 @@
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb
+  launching merge tool: true $TESTTMP/r2/a * (glob)
+  merge tool returned: 0
   0 files updated, 1 files merged, 1 files removed, 0 files unresolved
   $ hg parents
   changeset:   0:c19d34741b0a
@@ -105,6 +109,8 @@
   picked tool 'true' for a (binary False symlink False)
   merging a
   my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a
+  launching merge tool: true $TESTTMP/r2/a * (glob)
+  merge tool returned: 0
   1 files updated, 1 files merged, 0 files removed, 0 files unresolved
   $ hg parents
   changeset:   1:1e71731e6fbb
--- a/tests/test-update-branches.t	Thu Apr 16 22:33:53 2015 +0900
+++ b/tests/test-update-branches.t	Thu Apr 16 20:57:51 2015 -0500
@@ -161,7 +161,7 @@
   M foo
 
   $ revtest '-c dirtysub linear'   dirtysub 1 2 -c
-  abort: uncommitted changes
+  abort: uncommitted changes in subrepository 'sub'
   parent=1
   M sub/suba