merge default into stable for 3.3 feature freeze stable 3.3-rc
authorMatt Mackall <mpm@selenic.com>
Sat, 17 Jan 2015 18:28:30 -0800
branchstable
changeset 23918 db8e3f7948b1
parent 23826 c90d195320c5 (current diff)
parent 23917 3cbb5bf4035d (diff)
child 23919 50bcd4e51c41
merge default into stable for 3.3 feature freeze
contrib/mergetools.hgrc
--- a/Makefile	Sat Jan 10 21:31:59 2015 +0900
+++ b/Makefile	Sat Jan 17 18:28:30 2015 -0800
@@ -155,6 +155,17 @@
 	mkdir -p packages/fedora20
 	contrib/dockerrpm fedora20
 
+fedora21:
+	mkdir -p packages/fedora21
+	contrib/buildrpm
+	cp rpmbuild/RPMS/*/* packages/fedora21
+	cp rpmbuild/SRPMS/* packages/fedora21
+	rm -rf rpmbuild
+
+docker-fedora21:
+	mkdir -p packages/fedora21
+	contrib/dockerrpm fedora21
+
 centos5:
 	mkdir -p packages/centos5
 	contrib/buildrpm --withpython
@@ -188,5 +199,5 @@
 .PHONY: help all local build doc clean install install-bin install-doc \
 	install-home install-home-bin install-home-doc \
 	dist dist-notests check tests check-code update-pot \
-	osx fedora20 docker-fedora20 \
+	osx fedora20 docker-fedora20 fedora21 docker-fedora21 \
 	centos5 docker-centos5 centos6 docker-centos6 centos7 docker-centos7
--- a/contrib/Makefile.python	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/Makefile.python	Sat Jan 17 18:28:30 2015 -0800
@@ -1,4 +1,4 @@
-PYTHONVER=2.7.6
+PYTHONVER=2.7.9
 PYTHONNAME=python-
 PREFIX=$(HOME)/bin/prefix-$(PYTHONNAME)$(PYTHONVER)
 SYMLINKDIR=$(HOME)/bin
@@ -27,7 +27,7 @@
 # debian: apt-get install zlib1g-dev libbz2-dev libssl-dev
 	@echo
 	@echo 'To build a nice collection of interesting Python versions:'
-	@echo '  $$ for v in 2.{4{,.2,.3},5{,.6},6{,.1,.2,.9},7{,.6}}; do'
+	@echo '  $$ for v in 2.{4{,.2,.3},5{,.6},6{,.1,.2,.9},7{,.8,.9}}; do'
 	@echo '    make -f Makefile.python symlink PYTHONVER=$$v || break; done'
 	@echo 'To run a Mercurial test on all these Python versions:'
 	@echo '  $$ for py in `cd ~/bin && ls $(PYTHONNAME)2.*`; do'
@@ -44,7 +44,7 @@
 PYTHON_SRCFILE=$(PYTHON_SRCDIR).tgz
 
 $(PREFIX)/bin/python:
-	[ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ]
+	[ -f $(PYTHON_SRCFILE) ] || wget http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || curl -OL http://www.python.org/ftp/python/$(PYTHONVER)/$(PYTHON_SRCFILE) || [ -f $(PYTHON_SRCFILE) ]
 	rm -rf $(PYTHON_SRCDIR)
 	tar xf $(PYTHON_SRCFILE)
 	# Ubuntu disables SSLv2 the hard way, disable it on old Pythons too
@@ -60,7 +60,7 @@
 	printf 'import sys, zlib, bz2\nif sys.version_info >= (2,6):\n import ssl' | $(PREFIX)/bin/python
 	rm -rf $(PYTHON_SRCDIR)
 
-DOCUTILSVER=0.11
+DOCUTILSVER=0.12
 DOCUTILS_SRCDIR=docutils-$(DOCUTILSVER)
 DOCUTILS_SRCFILE=$(DOCUTILS_SRCDIR).tar.gz
 
--- a/contrib/bash_completion	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/bash_completion	Sat Jan 17 18:28:30 2015 -0800
@@ -110,7 +110,7 @@
 
 _hg_labels()
 {
-    local labels="$(_hg_cmd debuglabelcomplete "$cur")"
+    local labels="$(_hg_cmd debugnamecomplete "$cur")"
     local IFS=$'\n'
     COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$labels' -- "$cur"))
 }
--- a/contrib/buildrpm	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/buildrpm	Sat Jan 17 18:28:30 2015 -0800
@@ -17,7 +17,8 @@
         ;;
     --withpython | --with-python)
         shift
-        PYTHONVER=2.7.8
+        PYTHONVER=2.7.9
+        PYTHONMD5=5eebcaa0030dc4061156d3429657fb83
         ;;
     --rpmbuilddir )
         shift
@@ -76,11 +77,18 @@
     cd build
     PYTHON_SRCFILE=Python-$PYTHONVER.tgz
     [ -f $PYTHON_SRCFILE ] || curl -Lo $PYTHON_SRCFILE http://www.python.org/ftp/python/$PYTHONVER/$PYTHON_SRCFILE
+    if [ "$PYTHONMD5" ]; then
+        echo "$PYTHONMD5 $PYTHON_SRCFILE" | md5sum -w -c
+    fi
     ln -f $PYTHON_SRCFILE $RPMBUILDDIR/SOURCES/$PYTHON_SRCFILE
 
     DOCUTILSVER=`sed -ne "s/^%global docutilsname docutils-//p" $specfile`
     DOCUTILS_SRCFILE=docutils-$DOCUTILSVER.tar.gz
     [ -f $DOCUTILS_SRCFILE ] || curl -Lo $DOCUTILS_SRCFILE http://downloads.sourceforge.net/project/docutils/docutils/$DOCUTILSVER/$DOCUTILS_SRCFILE
+    DOCUTILSMD5=`sed -ne "s/^%global docutilsmd5 //p" $specfile`
+    if [ "$DOCUTILSMD5" ]; then
+        echo "$DOCUTILSMD5 $DOCUTILS_SRCFILE" | md5sum -w -c
+    fi
     ln -f $DOCUTILS_SRCFILE $RPMBUILDDIR/SOURCES/$DOCUTILS_SRCFILE
 )
 fi
--- a/contrib/check-code.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/check-code.py	Sat Jan 17 18:28:30 2015 -0800
@@ -94,7 +94,7 @@
     (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
     (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
     (r'echo -n', "don't use 'echo -n', use printf"),
-    (r'(^| )\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
+    (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
     (r'head -c', "don't use 'head -c', use 'dd'"),
     (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
     (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
@@ -291,7 +291,7 @@
      "always assign an opened file to a variable, and close it afterwards"),
     (r'[\s\(](open|file)\([^)]*\)\.',
      "always assign an opened file to a variable, and close it afterwards"),
-    (r'(?i)descendent', "the proper spelling is descendAnt"),
+    (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
     (r'\.debug\(\_', "don't mark debug messages for translation"),
     (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
     (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/apache-server/Dockerfile	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,23 @@
+FROM debian:wheezy
+
+ENV DEBIAN_FRONTEND noninteractive
+ENV WSGI_PROCESSES 4
+ENV WSGI_THREADS 1
+ENV WSGI_MAX_REQUESTS 100000
+
+EXPOSE 80
+VOLUME ["/var/hg/htdocs", "/var/hg/repos"]
+
+RUN apt-get update && apt-get -y install libapache2-mod-wsgi python-dev vim
+
+# Install our own Apache site.
+RUN a2dissite 000-default
+ADD vhost.conf /etc/apache2/sites-available/hg
+RUN a2ensite hg
+
+ADD hgwebconfig /defaulthgwebconfig
+
+ADD entrypoint.sh /entrypoint.sh
+ENTRYPOINT ["/entrypoint.sh"]
+
+CMD ["/usr/sbin/apache2", "-DFOREGROUND"]
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/apache-server/README.rst	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,144 @@
+====================
+Apache Docker Server
+====================
+
+This directory contains code for running a Mercurial hgweb server via
+mod_wsgi with the Apache HTTP Server inside a Docker container.
+
+.. important::
+
+   This container is intended for testing purposes only: it is
+   **not** meant to be suitable for production use.
+
+Building Image
+==============
+
+The first step is to build a Docker image containing Apache and mod_wsgi::
+
+  $ docker build -t hg-apache .
+
+.. important::
+
+   You should rebuild the image whenever the content of this directory
+   changes. Rebuilding after pulling or when you haven't run the container
+   in a while is typically a good idea.
+
+Running the Server
+==================
+
+To run the container, you'll execute something like::
+
+  $ docker run --rm -it -v `pwd`/../../..:/var/hg/source -p 8000:80 hg-apache
+
+If you aren't a Docker expert:
+
+* ``--rm`` will remove the container when it stops (so it doesn't clutter
+  your system)
+* ``-i`` will launch the container in interactive mode so stdin is attached
+* ``-t`` will allocate a pseudo TTY
+* ``-v src:dst`` will mount the host filesystem at ``src`` into ``dst``
+  in the container. In our example, we assume you are running from this
+  directory and use the source code a few directories up.
+* ``-p 8000:80`` will publish port ``80`` on the container to port ``8000``
+  on the host, allowing you to access the HTTP server on the host interface.
+* ``hg-apache`` is the container image to run. This should correspond to what
+  we build with ``docker build``.
+
+.. important::
+
+   The container **requires** that ``/var/hg/source`` contain the Mercurial
+   source code.
+
+   Upon start, the container will attempt an install of the source in that
+   directory. If the architecture of the host machine doesn't match that of
+   the Docker host (e.g. when running Boot2Docker under OS X), Mercurial's
+   Python C extensions will fail to run. Be sure to ``make clean`` your
+   host's source tree before mounting it in the container to avoid this.
+
+When starting the container, you should see some start-up actions (including
+a Mercurial install) and some output saying Apache has started::
+
+Now if you load ``http://localhost:8000/`` (or whatever interface Docker
+is using), you should see hgweb running!
+
+For your convenience, we've created an empty repository available at
+``/repo``. Feel free to populate it with ``hg push``.
+
+Customizing the Server
+======================
+
+By default, the Docker container installs a basic hgweb config and an
+empty dummy repository. It also uses some reasonable defaults for
+mod_wsgi.
+
+Customizing the WSGI Dispatcher And Mercurial Config
+----------------------------------------------------
+
+By default, the Docker environment installs a custom ``hgweb.wsgi``
+file (based on the example in ``contrib/hgweb.wsgi``). The file
+is installed into ``/var/hg/htdocs/hgweb.wsgi``.
+
+A default hgweb configuration file is also installed. The ``hgwebconfig``
+file from this directory is installed into ``/var/hg/htdocs/config``.
+
+You have a few options for customizing these files.
+
+The simplest is to hack up ``hgwebconfig`` and ``entrypoint.sh`` in
+this directory and to rebuild the Docker image. This has the downside
+that the Mercurial working copy is modified and you may accidentally
+commit unwanted changes.
+
+The next simplest is to copy this directory somewhere, make your changes,
+then rebuild the image. No working copy changes involved.
+
+The preferred solution is to mount a host file into the container and
+overwrite the built-in defaults.
+
+For example, say we create a custom hgweb config file in ``~/hgweb``. We
+can start the container like so to install our custom config file::
+
+  $ docker run -v ~/hgweb:/var/hg/htdocs/config ...
+
+You can do something similar to install a custom WSGI dispatcher::
+
+  $ docker run -v ~/hgweb.wsgi:/var/hg/htdocs/hgweb.wsgi ...
+
+Managing Repositories
+---------------------
+
+Repositories are served from ``/var/hg/repos`` by default. This directory
+is configured as a Docker volume. This means you can mount an existing
+data volume container in the container so repository data is persisted
+across container invocations. See
+https://docs.docker.com/userguide/dockervolumes/ for more.
+
+Alternatively, if you just want to perform lightweight repository
+manipulation, open a shell in the container::
+
+  $ docker exec -it <container> /bin/bash
+
+Then run ``hg init``, etc to manipulate the repositories in ``/var/hg/repos``.
+
+mod_wsgi Configuration Settings
+-------------------------------
+
+mod_wsgi settings can be controlled with the following environment
+variables.
+
+WSGI_PROCESSES
+   Number of WSGI processes to run.
+WSGI_THREADS
+   Number of threads to run in each WSGI process
+WSGI_MAX_REQUESTS
+   Maximum number of requests each WSGI process may serve before it is
+   reaped.
+
+See https://code.google.com/p/modwsgi/wiki/ConfigurationDirectives#WSGIDaemonProcess
+for more on these settings.
+
+.. note::
+
+   The default is to use 1 thread per process. The reason is that Mercurial
+   doesn't perform well in multi-threaded mode due to the GIL. Most people
+   run a single thread per process in production for this reason, so that's
+   what we default to.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/apache-server/entrypoint.sh	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,80 @@
+#!/bin/bash
+
+# This script gets executed on container start. Its job is to set up
+# the Mercurial environment and invoke the server.
+
+# Mercurial can be started in two modes.
+# If the MERCURIAL_SOURCE environment variable is set and it points to a
+# Mercurial source directory, we will install Mercurial from that directory.
+# Otherwise, we download the Mercurial source and install it manually.
+
+set -e
+
+SOURCE_DIR=/var/hg/source
+INSTALL_DIR=/var/hg/install
+REPOS_DIR=/var/hg/repos
+HTDOCS_DIR=/var/hg/htdocs
+
+if [ ! -d ${SOURCE_DIR} ]; then
+  echo "Mercurial source not available at ${SOURCE_DIR}"
+  echo "You need to mount a volume containing the Mercurial source code"
+  echo "when running the container. For example:"
+  echo ""
+  echo "  $ docker run -v ~/src/hg:/${SOURCE_DIR} hg-apache"
+  echo ""
+  echo "This container will now stop running."
+  exit 1
+fi
+
+echo "Installing Mercurial from ${SOURCE_DIR} into ${INSTALL_DIR}"
+pushd ${SOURCE_DIR}
+/usr/bin/python2.7 setup.py install --root=/ --prefix=${INSTALL_DIR} --force
+popd
+
+mkdir -p ${HTDOCS_DIR}
+
+# Provide a default config if the user hasn't supplied one.
+if [ ! -f ${HTDOCS_DIR}/config ]; then
+  cp /defaulthgwebconfig ${HTDOCS_DIR}/config
+fi
+
+if [ ! -f ${HTDOCS_DIR}/hgweb.wsgi ]; then
+  cat >> ${HTDOCS_DIR}/hgweb.wsgi << EOF
+config = '${HTDOCS_DIR}/config'
+
+import sys
+sys.path.insert(0, '${INSTALL_DIR}/lib/python2.7/site-packages')
+
+from mercurial import demandimport
+demandimport.enable()
+
+from mercurial.hgweb import hgweb
+application = hgweb(config)
+EOF
+fi
+
+mkdir -p ${REPOS_DIR}
+
+if [ ! -d ${REPOS_DIR}/repo ]; then
+  ${INSTALL_DIR}/bin/hg init ${REPOS_DIR}/repo
+  chown -R www-data:www-data ${REPOS_DIR}/repo
+fi
+
+# This is necessary to make debuginstall happy.
+if [ ! -f ~/.hgrc ]; then
+  cat >> ~/.hgrc << EOF
+[ui]
+username = Dummy User <nobody@example.com>
+EOF
+fi
+
+echo "Verifying Mercurial installation looks happy"
+${INSTALL_DIR}/bin/hg debuginstall
+
+. /etc/apache2/envvars
+
+echo "Starting Apache HTTP Server on port 80"
+echo "We hope you remembered to publish this port when running the container!"
+echo "If this is an interactive container, simply CTRL^C to stop."
+
+exec "$@"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/apache-server/hgwebconfig	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,6 @@
+[paths]
+/ = /var/hg/repos/*
+
+[web]
+allow_push = *
+push_ssl = False
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/apache-server/vhost.conf	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,24 @@
+# Apache won't be able to resolve its own hostname, so we sneak this
+# into the global context to silence a confusing-to-user warning on
+# server start.
+ServerName hg
+
+<VirtualHost *:80>
+  DocumentRoot /var/hg/htdocs
+  <Directory />
+    Options FollowSymLinks
+    AllowOverride None
+  </Directory>
+
+  SetEnv HGENCODING UTF-8
+  SetEnv LC_TYPE UTF-8
+
+  WSGIDaemonProcess hg processes=${WSGI_PROCESSES} threads=${WSGI_THREADS} maximum-requests=${WSGI_MAX_REQUESTS} user=www-data group=www-data display-name=hg-wsgi
+  WSGIProcessGroup hg
+
+  WSGIScriptAliasMatch ^(.*) /var/hg/htdocs/hgweb.wsgi$1
+
+  ErrorLog ${APACHE_LOG_DIR}/error.log
+  LogLevel warn
+  CustomLog ${APACHE_LOG_DIR}/access.log combined
+</VirtualHost>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/docker/fedora21	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,8 @@
+FROM fedora:21
+RUN yum install -y gcc
+RUN yum install -y python-devel python-docutils
+RUN yum install -y make
+RUN yum install -y rpm-build
+RUN yum install -y gettext
+# For creating repo meta data
+RUN yum install -y createrepo
--- a/contrib/mercurial.spec	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/mercurial.spec	Sat Jan 17 18:28:30 2015 -0800
@@ -6,7 +6,8 @@
 
 %global pythonver %{withpython}
 %global pythonname Python-%{withpython}
-%global docutilsname docutils-0.11
+%global docutilsname docutils-0.12
+%global docutilsmd5 4622263b62c5c771c03502afa3157768
 %global pythonhg python-hg
 %global hgpyprefix /usr/%{pythonhg}
 # byte compilation will fail on some some Python /test/ files
@@ -126,7 +127,6 @@
 install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
 
 mkdir -p $RPM_BUILD_ROOT/%{_sysconfdir}/mercurial/hgrc.d
-install -m 644 contrib/mergetools.hgrc $RPM_BUILD_ROOT%{_sysconfdir}/mercurial/hgrc.d/mergetools.rc
 
 %clean
 rm -rf $RPM_BUILD_ROOT
@@ -149,7 +149,6 @@
 %config(noreplace) %{_sysconfdir}/bash_completion.d/mercurial.sh
 %dir %{_sysconfdir}/mercurial
 %dir %{_sysconfdir}/mercurial/hgrc.d
-%config(noreplace) %{_sysconfdir}/mercurial/hgrc.d/mergetools.rc
 %if "%{?withpython}"
 %{_bindir}/%{pythonhg}
 %{hgpyprefix}
--- a/contrib/mergetools.hgrc	Sat Jan 10 21:31:59 2015 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,135 +0,0 @@
-# Some default global settings for common merge tools
-
-[merge-tools]
-kdiff3.args=--auto --L1 base --L2 local --L3 other $base $local $other -o $output
-kdiff3.regkey=Software\KDiff3
-kdiff3.regkeyalt=Software\Wow6432Node\KDiff3
-kdiff3.regappend=\kdiff3.exe
-kdiff3.fixeol=True
-kdiff3.gui=True
-kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
-
-gvimdiff.args=--nofork -d -g -O $local $other $base
-gvimdiff.regkey=Software\Vim\GVim
-gvimdiff.regkeyalt=Software\Wow6432Node\Vim\GVim
-gvimdiff.regname=path
-gvimdiff.priority=-9
-gvimdiff.diffargs=--nofork -d -g -O $parent $child
-
-vimdiff.args=$local $other $base -c 'redraw | echomsg "hg merge conflict, type \":cq\" to abort vimdiff"'
-vimdiff.check=changed
-vimdiff.priority=-10
-
-merge.check=conflicts
-merge.priority=-100
-
-gpyfm.gui=True
-
-meld.gui=True
-meld.args=--label='local' $local --label='merged' $base --label='other' $other -o $output
-meld.check=changed
-meld.diffargs=-a --label='$plabel1' $parent --label='$clabel' $child
-
-tkdiff.args=$local $other -a $base -o $output
-tkdiff.gui=True
-tkdiff.priority=-8
-tkdiff.diffargs=-L '$plabel1' $parent -L '$clabel' $child
-
-xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 local --title2 base --title3 other --merged-filename $output --merge $local $base $other
-xxdiff.gui=True
-xxdiff.priority=-8
-xxdiff.diffargs=--title1 '$plabel1' $parent --title2 '$clabel' $child
-
-diffmerge.regkey=Software\SourceGear\SourceGear DiffMerge\
-diffmerge.regkeyalt=Software\Wow6432Node\SourceGear\SourceGear DiffMerge\
-diffmerge.regname=Location
-diffmerge.priority=-7
-diffmerge.args=-nosplash -merge -title1=local -title2=merged -title3=other $local $base $other -result=$output
-diffmerge.check=changed
-diffmerge.gui=True
-diffmerge.diffargs=--nosplash --title1='$plabel1' --title2='$clabel' $parent $child
-
-p4merge.args=$base $local $other $output
-p4merge.regkey=Software\Perforce\Environment
-p4merge.regkeyalt=Software\Wow6432Node\Perforce\Environment
-p4merge.regname=P4INSTROOT
-p4merge.regappend=\p4merge.exe
-p4merge.gui=True
-p4merge.priority=-8
-p4merge.diffargs=$parent $child
-
-p4mergeosx.executable = /Applications/p4merge.app/Contents/MacOS/p4merge
-p4mergeosx.args = $base $local $other $output
-p4mergeosx.gui = True
-p4mergeosx.priority=-8
-p4mergeosx.diffargs=$parent $child
-
-tortoisemerge.args=/base:$base /mine:$local /theirs:$other /merged:$output
-tortoisemerge.regkey=Software\TortoiseSVN
-tortoisemerge.regkeyalt=Software\Wow6432Node\TortoiseSVN
-tortoisemerge.check=changed
-tortoisemerge.gui=True
-tortoisemerge.priority=-8
-tortoisemerge.diffargs=/base:$parent /mine:$child /basename:'$plabel1' /minename:'$clabel'
-
-ecmerge.args=$base $local $other --mode=merge3 --title0=base --title1=local --title2=other --to=$output
-ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge
-ecmerge.regkeyalt=Software\Wow6432Node\Elli\xc3\xa9 Computing\Merge
-ecmerge.gui=True
-ecmerge.diffargs=$parent $child --mode=diff2 --title1='$plabel1' --title2='$clabel'
-
-# editmerge is a small script shipped in contrib.
-# It needs this config otherwise it behaves the same as internal:local
-editmerge.args=$output
-editmerge.check=changed
-editmerge.premerge=keep
-
-filemerge.executable=/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge
-filemerge.args=-left $other -right $local -ancestor $base -merge $output
-filemerge.gui=True
-
-; Windows version of Beyond Compare
-beyondcompare3.args=$local $other $base $output /ro /lefttitle=local /centertitle=base /righttitle=other /automerge /reviewconflicts /solo
-beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3
-beyondcompare3.regname=ExePath
-beyondcompare3.gui=True
-beyondcompare3.priority=-2
-beyondcompare3.diffargs=/lro /lefttitle='$plabel1' /righttitle='$clabel' /solo /expandall $parent $child
-
-; Linux version of Beyond Compare
-bcompare.args=$local $other $base -mergeoutput=$output -ro -lefttitle=parent1 -centertitle=base -righttitle=parent2 -outputtitle=merged -automerge -reviewconflicts -solo
-bcompare.gui=True
-bcompare.priority=-1
-bcompare.diffargs=-lro -lefttitle='$plabel1' -righttitle='$clabel' -solo -expandall $parent $child
-
-winmerge.args=/e /x /wl /ub /dl other /dr local $other $local $output
-winmerge.regkey=Software\Thingamahoochie\WinMerge
-winmerge.regkeyalt=Software\Wow6432Node\Thingamahoochie\WinMerge\
-winmerge.regname=Executable
-winmerge.check=changed
-winmerge.gui=True
-winmerge.priority=-10
-winmerge.diffargs=/r /e /x /ub /wl /dl '$plabel1' /dr '$clabel' $parent $child
-
-araxis.regkey=SOFTWARE\Classes\TypeLib\{46799e0a-7bd1-4330-911c-9660bb964ea2}\7.0\HELPDIR
-araxis.regappend=\ConsoleCompare.exe
-araxis.priority=-2
-araxis.args=/3 /a2 /wait /merge /title1:"Other" /title2:"Base" /title3:"Local :"$local $other $base $local $output
-araxis.checkconflict=True
-araxis.binary=True
-araxis.gui=True
-araxis.diffargs=/2 /wait /title1:"$plabel1" /title2:"$clabel" $parent $child
-
-diffuse.priority=-3
-diffuse.args=$local $base $other
-diffuse.gui=True
-diffuse.diffargs=$parent $child
-
-UltraCompare.regkey=Software\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
-UltraCompare.regkeyalt=Software\Wow6432Node\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
-UltraCompare.args = $base $local $other -title1 base -title3 other
-UltraCompare.priority = -2
-UltraCompare.gui = True
-UltraCompare.binary = True
-UltraCompare.check = conflicts,changed
-UltraCompare.diffargs=$child $parent -title1 $clabel -title2 $plabel1
--- a/contrib/perf.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/perf.py	Sat Jan 17 18:28:30 2015 -0800
@@ -4,11 +4,30 @@
 from mercurial import cmdutil, scmutil, util, commands, obsolete
 from mercurial import repoview, branchmap, merge, copies
 import time, os, sys
+import functools
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 
-def timer(func, title=None):
+def gettimer(ui, opts=None):
+    """return a timer function and formatter: (timer, formatter)
+
+    This functions exist to gather the creation of formatter in a single
+    place instead of duplicating it in all performance command."""
+
+    # enforce an idle period before execution to counteract power management
+    time.sleep(ui.configint("perf", "presleep", 1))
+
+    if opts is None:
+        opts = {}
+    # redirect all to stderr
+    ui = ui.copy()
+    ui.fout = ui.ferr
+    # get a formatter
+    fm = ui.formatter('perf', opts)
+    return functools.partial(_timer, fm), fm
+
+def _timer(fm, func, title=None):
     results = []
     begin = time.time()
     count = 0
@@ -25,16 +44,25 @@
             break
         if cstop - begin > 10 and count >= 3:
             break
+
+    fm.startitem()
+
     if title:
-        sys.stderr.write("! %s\n" % title)
+        fm.write('title', '! %s\n', title)
     if r:
-        sys.stderr.write("! result: %s\n" % r)
+        fm.write('result', '! result: %s\n', r)
     m = min(results)
-    sys.stderr.write("! wall %f comb %f user %f sys %f (best of %d)\n"
-                     % (m[0], m[1] + m[2], m[1], m[2], count))
+    fm.plain('!')
+    fm.write('wall', ' wall %f', m[0])
+    fm.write('comb', ' comb %f', m[1] + m[2])
+    fm.write('user', ' user %f', m[1])
+    fm.write('sys',  ' sys %f', m[2])
+    fm.write('count',  ' (best of %d)', count)
+    fm.plain('\n')
 
 @command('perfwalk')
 def perfwalk(ui, repo, *pats):
+    timer, fm = gettimer(ui)
     try:
         m = scmutil.match(repo[None], pats, {})
         timer(lambda: len(list(repo.dirstate.walk(m, [], True, False))))
@@ -44,11 +72,14 @@
             timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)]))
         except Exception:
             timer(lambda: len(list(cmdutil.walk(repo, pats, {}))))
+    fm.end()
 
 @command('perfannotate')
 def perfannotate(ui, repo, f):
+    timer, fm = gettimer(ui)
     fc = repo['.'][f]
     timer(lambda: len(fc.annotate(True)))
+    fm.end()
 
 @command('perfstatus',
          [('u', 'unknown', False,
@@ -57,16 +88,21 @@
     #m = match.always(repo.root, repo.getcwd())
     #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
     #                                                False))))
+    timer, fm = gettimer(ui)
     timer(lambda: sum(map(len, repo.status(**opts))))
+    fm.end()
 
 @command('perfaddremove')
 def perfaddremove(ui, repo):
+    timer, fm = gettimer(ui)
     try:
         oldquiet = repo.ui.quiet
         repo.ui.quiet = True
-        timer(lambda: scmutil.addremove(repo, dry_run=True))
+        matcher = scmutil.match(repo[None])
+        timer(lambda: scmutil.addremove(repo, matcher, "", dry_run=True))
     finally:
         repo.ui.quiet = oldquiet
+        fm.end()
 
 def clearcaches(cl):
     # behave somewhat consistently across internal API changes
@@ -79,33 +115,40 @@
 
 @command('perfheads')
 def perfheads(ui, repo):
+    timer, fm = gettimer(ui)
     cl = repo.changelog
     def d():
         len(cl.headrevs())
         clearcaches(cl)
     timer(d)
+    fm.end()
 
 @command('perftags')
 def perftags(ui, repo):
     import mercurial.changelog
     import mercurial.manifest
+    timer, fm = gettimer(ui)
     def t():
-        repo.changelog = mercurial.changelog.changelog(repo.sopener)
-        repo.manifest = mercurial.manifest.manifest(repo.sopener)
+        repo.changelog = mercurial.changelog.changelog(repo.svfs)
+        repo.manifest = mercurial.manifest.manifest(repo.svfs)
         repo._tags = None
         return len(repo.tags())
     timer(t)
+    fm.end()
 
 @command('perfancestors')
 def perfancestors(ui, repo):
+    timer, fm = gettimer(ui)
     heads = repo.changelog.headrevs()
     def d():
         for a in repo.changelog.ancestors(heads):
             pass
     timer(d)
+    fm.end()
 
 @command('perfancestorset')
 def perfancestorset(ui, repo, revset):
+    timer, fm = gettimer(ui)
     revs = repo.revs(revset)
     heads = repo.changelog.headrevs()
     def d():
@@ -113,34 +156,42 @@
         for rev in revs:
             rev in s
     timer(d)
+    fm.end()
 
 @command('perfdirs')
 def perfdirs(ui, repo):
+    timer, fm = gettimer(ui)
     dirstate = repo.dirstate
     'a' in dirstate
     def d():
         dirstate.dirs()
         del dirstate._dirs
     timer(d)
+    fm.end()
 
 @command('perfdirstate')
 def perfdirstate(ui, repo):
+    timer, fm = gettimer(ui)
     "a" in repo.dirstate
     def d():
         repo.dirstate.invalidate()
         "a" in repo.dirstate
     timer(d)
+    fm.end()
 
 @command('perfdirstatedirs')
 def perfdirstatedirs(ui, repo):
+    timer, fm = gettimer(ui)
     "a" in repo.dirstate
     def d():
         "a" in repo.dirstate._dirs
         del repo.dirstate._dirs
     timer(d)
+    fm.end()
 
 @command('perfdirstatefoldmap')
 def perffoldmap(ui, repo):
+    timer, fm = gettimer(ui)
     dirstate = repo.dirstate
     'a' in dirstate
     def d():
@@ -148,19 +199,23 @@
         del dirstate._foldmap
         del dirstate._dirs
     timer(d)
+    fm.end()
 
 @command('perfdirstatewrite')
 def perfdirstatewrite(ui, repo):
+    timer, fm = gettimer(ui)
     ds = repo.dirstate
     "a" in ds
     def d():
         ds._dirty = True
         ds.write()
     timer(d)
+    fm.end()
 
 @command('perfmergecalculate',
          [('r', 'rev', '.', 'rev to merge against')])
 def perfmergecalculate(ui, repo, rev):
+    timer, fm = gettimer(ui)
     wctx = repo[None]
     rctx = scmutil.revsingle(repo, rev, rev)
     ancestor = wctx.ancestor(rctx)
@@ -173,17 +228,21 @@
         merge.calculateupdates(repo, wctx, rctx, ancestor, False, False, False,
                                acceptremote=True)
     timer(d)
+    fm.end()
 
 @command('perfpathcopies', [], "REV REV")
 def perfpathcopies(ui, repo, rev1, rev2):
+    timer, fm = gettimer(ui)
     ctx1 = scmutil.revsingle(repo, rev1, rev1)
     ctx2 = scmutil.revsingle(repo, rev2, rev2)
     def d():
         copies.pathcopies(ctx1, ctx2)
     timer(d)
+    fm.end()
 
 @command('perfmanifest', [], 'REV')
 def perfmanifest(ui, repo, rev):
+    timer, fm = gettimer(ui)
     ctx = scmutil.revsingle(repo, rev, rev)
     t = ctx.manifestnode()
     def d():
@@ -191,67 +250,84 @@
         repo.manifest._cache = None
         repo.manifest.read(t)
     timer(d)
+    fm.end()
 
 @command('perfchangeset')
 def perfchangeset(ui, repo, rev):
+    timer, fm = gettimer(ui)
     n = repo[rev].node()
     def d():
         repo.changelog.read(n)
         #repo.changelog._cache = None
     timer(d)
+    fm.end()
 
 @command('perfindex')
 def perfindex(ui, repo):
     import mercurial.revlog
+    timer, fm = gettimer(ui)
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
     n = repo["tip"].node()
     def d():
-        cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
+        cl = mercurial.revlog.revlog(repo.svfs, "00changelog.i")
         cl.rev(n)
     timer(d)
+    fm.end()
 
 @command('perfstartup')
 def perfstartup(ui, repo):
+    timer, fm = gettimer(ui)
     cmd = sys.argv[0]
     def d():
         os.system("HGRCPATH= %s version -q > /dev/null" % cmd)
     timer(d)
+    fm.end()
 
 @command('perfparents')
 def perfparents(ui, repo):
+    timer, fm = gettimer(ui)
     nl = [repo.changelog.node(i) for i in xrange(1000)]
     def d():
         for n in nl:
             repo.changelog.parents(n)
     timer(d)
+    fm.end()
 
 @command('perflookup')
 def perflookup(ui, repo, rev):
+    timer, fm = gettimer(ui)
     timer(lambda: len(repo.lookup(rev)))
+    fm.end()
 
 @command('perfrevrange')
 def perfrevrange(ui, repo, *specs):
+    timer, fm = gettimer(ui)
     revrange = scmutil.revrange
     timer(lambda: len(revrange(repo, specs)))
+    fm.end()
 
 @command('perfnodelookup')
 def perfnodelookup(ui, repo, rev):
+    timer, fm = gettimer(ui)
     import mercurial.revlog
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
     n = repo[rev].node()
-    cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
+    cl = mercurial.revlog.revlog(repo.svfs, "00changelog.i")
     def d():
         cl.rev(n)
         clearcaches(cl)
     timer(d)
+    fm.end()
 
 @command('perflog',
          [('', 'rename', False, 'ask log to follow renames')])
 def perflog(ui, repo, **opts):
+    timer, fm = gettimer(ui)
     ui.pushbuffer()
     timer(lambda: commands.log(ui, repo, rev=[], date='', user='',
                                copies=opts.get('rename')))
     ui.popbuffer()
+    fm.end()
 
 @command('perfmoonwalk')
 def perfmoonwalk(ui, repo):
@@ -259,52 +335,65 @@
 
     This also loads the changelog data for each revision in the changelog.
     """
+    timer, fm = gettimer(ui)
     def moonwalk():
         for i in xrange(len(repo), -1, -1):
             ctx = repo[i]
             ctx.branch() # read changelog data (in addition to the index)
     timer(moonwalk)
+    fm.end()
 
 @command('perftemplating')
 def perftemplating(ui, repo):
+    timer, fm = gettimer(ui)
     ui.pushbuffer()
     timer(lambda: commands.log(ui, repo, rev=[], date='', user='',
                                template='{date|shortdate} [{rev}:{node|short}]'
                                ' {author|person}: {desc|firstline}\n'))
     ui.popbuffer()
+    fm.end()
 
 @command('perfcca')
 def perfcca(ui, repo):
+    timer, fm = gettimer(ui)
     timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
+    fm.end()
 
 @command('perffncacheload')
 def perffncacheload(ui, repo):
+    timer, fm = gettimer(ui)
     s = repo.store
     def d():
         s.fncache._load()
     timer(d)
+    fm.end()
 
 @command('perffncachewrite')
 def perffncachewrite(ui, repo):
+    timer, fm = gettimer(ui)
     s = repo.store
     s.fncache._load()
     def d():
         s.fncache._dirty = True
         s.fncache.write()
     timer(d)
+    fm.end()
 
 @command('perffncacheencode')
 def perffncacheencode(ui, repo):
+    timer, fm = gettimer(ui)
     s = repo.store
     s.fncache._load()
     def d():
         for p in s.fncache.entries:
             s.encode(p)
     timer(d)
+    fm.end()
 
 @command('perfdiffwd')
 def perfdiffwd(ui, repo):
     """Profile diff of working directory changes"""
+    timer, fm = gettimer(ui)
     options = {
         'w': 'ignore_all_space',
         'b': 'ignore_space_change',
@@ -319,11 +408,13 @@
             ui.popbuffer()
         title = 'diffopts: %s' % (diffopt and ('-' + diffopt) or 'none')
         timer(d, title)
+    fm.end()
 
 @command('perfrevlog',
          [('d', 'dist', 100, 'distance between the revisions')],
          "[INDEXFILE]")
 def perfrevlog(ui, repo, file_, **opts):
+    timer, fm = gettimer(ui)
     from mercurial import revlog
     dist = opts['dist']
     def d():
@@ -332,6 +423,7 @@
             r.revision(r.node(x))
 
     timer(d)
+    fm.end()
 
 @command('perfrevset',
          [('C', 'clear', False, 'clear volatile cache between each call.')],
@@ -342,17 +434,20 @@
     Use the --clean option if need to evaluate the impact of build volatile
     revisions set cache on the revset execution. Volatile cache hold filtered
     and obsolete related cache."""
+    timer, fm = gettimer(ui)
     def d():
         if clear:
             repo.invalidatevolatilesets()
         for r in repo.revs(expr): pass
     timer(d)
+    fm.end()
 
 @command('perfvolatilesets')
 def perfvolatilesets(ui, repo, *names):
     """benchmark the computation of various volatile set
 
     Volatile set computes element related to filtering and obsolescence."""
+    timer, fm = gettimer(ui)
     repo = repo.unfiltered()
 
     def getobs(name):
@@ -380,6 +475,7 @@
 
     for name in allfilter:
         timer(getfiltered(name), title=name)
+    fm.end()
 
 @command('perfbranchmap',
          [('f', 'full', False,
@@ -390,6 +486,7 @@
 
     This benchmarks the full repo.branchmap() call with read and write disabled
     """
+    timer, fm = gettimer(ui)
     def getbranchmap(filtername):
         """generate a benchmark function for the filtername"""
         if filtername is None:
@@ -432,3 +529,13 @@
     finally:
         branchmap.read = oldread
         branchmap.branchcache.write = oldwrite
+    fm.end()
+
+@command('perfloadmarkers')
+def perfloadmarkers(ui, repo):
+    """benchmark the time to parse the on-disk markers for a repo
+
+    Result is the number of markers in the repo."""
+    timer, fm = gettimer(ui)
+    timer(lambda: len(obsolete.obsstore(repo.svfs)))
+    fm.end()
--- a/contrib/revsetbenchmarks.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/revsetbenchmarks.py	Sat Jan 17 18:28:30 2015 -0800
@@ -74,7 +74,7 @@
 
 parser = OptionParser(usage="usage: %prog [options] <revs>")
 parser.add_option("-f", "--file",
-                  help="read revset from FILE (stdin if omited)",
+                  help="read revset from FILE (stdin if omitted)",
                   metavar="FILE")
 parser.add_option("-R", "--repo",
                   help="run benchmark on REPO", metavar="REPO")
--- a/contrib/synthrepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/synthrepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -323,15 +323,32 @@
     initcount = int(opts['initfiles'])
     if initcount and initdirs:
         pctx = repo[None].parents()[0]
+        dirs = set(pctx.dirs())
         files = {}
+
+        def validpath(path):
+            # Don't pick filenames which are already directory names.
+            if path in dirs:
+                return False
+            # Don't pick directories which were used as file names.
+            while path:
+                if path in files:
+                    return False
+                path = os.path.dirname(path)
+            return True
+
         for i in xrange(0, initcount):
             ui.progress(_synthesizing, i, unit=_files, total=initcount)
 
             path = pickpath()
-            while path in pctx.dirs():
+            while not validpath(path):
                 path = pickpath()
             data = '%s contents\n' % path
             files[path] = context.memfilectx(repo, path, data)
+            dir = os.path.dirname(path)
+            while dir and dir not in dirs:
+                dirs.add(dir)
+                dir = os.path.dirname(dir)
 
         def filectxfn(repo, memctx, path):
             return files[path]
@@ -410,16 +427,18 @@
                         break
         if filesadded:
             dirs = list(pctx.dirs())
-            dirs.append('')
+            dirs.insert(0, '')
         for __ in xrange(pick(filesadded)):
-            path = [random.choice(dirs)]
-            if pick(dirsadded):
+            pathstr = ''
+            while pathstr in dirs:
+                path = [random.choice(dirs)]
+                if pick(dirsadded):
+                    path.append(random.choice(words))
                 path.append(random.choice(words))
-            path.append(random.choice(words))
-            path = '/'.join(filter(None, path))
+                pathstr = '/'.join(filter(None, path))
             data = '\n'.join(makeline()
                              for __ in xrange(pick(linesinfilesadded))) + '\n'
-            changes[path] = context.memfilectx(repo, path, data)
+            changes[pathstr] = context.memfilectx(repo, pathstr, data)
         def filectxfn(repo, memctx, path):
             return changes[path]
         if not changes:
@@ -428,6 +447,8 @@
             date = repo['tip'].date()[0] + pick(interarrival)
         else:
             date = time.time() - (86400 * count)
+        # dates in mercurial must be positive, fit in 32-bit signed integers.
+        date = min(0x7fffffff, max(0, date))
         user = random.choice(words) + '@' + random.choice(words)
         mc = context.memctx(repo, pl, makeline(minimum=2),
                             sorted(changes.iterkeys()),
--- a/contrib/undumprevlog	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/undumprevlog	Sat Jan 17 18:28:30 2015 -0800
@@ -10,7 +10,8 @@
     util.setbinary(fp)
 
 opener = scmutil.opener('.', False)
-tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
+tr = transaction.transaction(sys.stderr.write, opener, {'store': opener},
+                             "undump.journal")
 while True:
     l = sys.stdin.readline()
     if not l:
--- a/contrib/win32/mercurial.iss	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/win32/mercurial.iss	Sat Jan 17 18:28:30 2015 -0800
@@ -67,8 +67,6 @@
 Source: contrib\hgweb.fcgi; DestDir: {app}/Contrib
 Source: contrib\hgweb.wsgi; DestDir: {app}/Contrib
 Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
-Source: contrib\mergetools.hgrc; DestDir: {tmp};
-Source: contrib\win32\mercurial.ini; DestDir: {app}; DestName: Mercurial.ini; Check: CheckFile; AfterInstall: ConcatenateFiles;
 Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
 Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local')
 #if ARCH == "x64"
@@ -86,6 +84,7 @@
 Source: doc\*.html; DestDir: {app}\Docs
 Source: doc\style.css; DestDir: {app}\Docs
 Source: mercurial\help\*.txt; DestDir: {app}\help
+Source: mercurial\default.d\*.rc; DestDir: {app}\default.d
 Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist
 Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
 Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
@@ -93,10 +92,13 @@
 
 [INI]
 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: http://mercurial.selenic.com/
-Filename: {app}\Mercurial.ini; Section: web; Key: cacerts; String: {app}\cacert.pem
+Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad
+Filename: {app}\default.d\cacerts.rc; Section: web; Key: cacerts; String: {app}\cacert.pem
 
 [UninstallDelete]
 Type: files; Name: {app}\Mercurial.url
+Type: filesandordirs; Name: {app}\default.d
+Type: files; Name: "{app}\hg.exe.local"
 
 [Icons]
 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
@@ -111,35 +113,7 @@
 [UninstallRun]
 Filename: "{app}\add_path.exe"; Parameters: "/del {app}"
 
-[UninstallDelete]
-Type: files; Name: "{app}\hg.exe.local"
-
 [Code]
-var
-  WriteFile: Boolean;
-  CheckDone: Boolean;
-
-function CheckFile(): Boolean;
-begin
-  if not CheckDone then begin
-    WriteFile := True;
-    if FileExists(ExpandConstant(CurrentFileName)) then begin
-        WriteFile := MsgBox('' + ExpandConstant(CurrentFileName) + '' #13#13 'The file already exists.' #13#13 'Would you like Setup to overwrite it?', mbConfirmation, MB_YESNO) = idYes;
-    end;
-    CheckDone := True;
-  end;
-  Result := WriteFile;
-end;
-
-procedure ConcatenateFiles();
-var
-  MergeConfigs: TArrayOfString;
-begin
-  if LoadStringsFromFile(ExpandConstant('{tmp}\mergetools.hgrc'),MergeConfigs) then begin
-    SaveStringsToFile(ExpandConstant(CurrentFileName),MergeConfigs,True);
-  end;
-end;
-
 procedure Touch(fn: String);
 begin
   SaveStringToFile(ExpandConstant(fn), '', False);
--- a/contrib/wix/mercurial.wxs	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/wix/mercurial.wxs	Sat Jan 17 18:28:30 2015 -0800
@@ -79,7 +79,7 @@
                     ReadOnly='yes' KeyPath='yes'/>
             </Component>
             <Component Id='mergetools.rc' Guid='$(var.mergetools.rc.guid)' Win64='$(var.IsX64)'>
-              <File Id='mergetools.rc' Name='MergeTools.rc' Source='contrib\mergetools.hgrc'
+              <File Id='mergetools.rc' Name='MergeTools.rc' Source='mercurial\default.d\mergetools.rc'
                     ReadOnly='yes' KeyPath='yes'/>
             </Component>
             <Component Id='paths.rc' Guid='$(var.paths.rc.guid)' Win64='$(var.IsX64)'>
--- a/contrib/zsh_completion	Sat Jan 10 21:31:59 2015 +0900
+++ b/contrib/zsh_completion	Sat Jan 17 18:28:30 2015 -0800
@@ -163,7 +163,7 @@
 }
 
 _hg_labels() {
-  labels=("${(f)$(_hg_cmd debuglabelcomplete)}")
+  labels=("${(f)$(_hg_cmd debugnamecomplete)}")
   (( $#labels )) && _describe -t labels 'labels' labels
 }
 
--- a/hgext/blackbox.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/blackbox.py	Sat Jan 17 18:28:30 2015 -0800
@@ -112,7 +112,7 @@
                 lastblackbox = blackbox
 
         def setrepo(self, repo):
-            self._bbopener = repo.opener
+            self._bbopener = repo.vfs
 
     ui.__class__ = blackboxui
 
@@ -141,7 +141,7 @@
         return
 
     limit = opts.get('limit')
-    blackbox = repo.opener('blackbox.log', 'r')
+    blackbox = repo.vfs('blackbox.log', 'r')
     lines = blackbox.read().split('\n')
 
     count = 0
--- a/hgext/color.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/color.py	Sat Jan 17 18:28:30 2015 -0800
@@ -144,7 +144,8 @@
 
 import os
 
-from mercurial import cmdutil, commands, dispatch, extensions, ui as uimod, util
+from mercurial import cmdutil, commands, dispatch, extensions, subrepo, util
+from mercurial import ui as uimod
 from mercurial import templater, error
 from mercurial.i18n import _
 
@@ -301,6 +302,11 @@
            'histedit.remaining': 'red bold',
            'ui.prompt': 'yellow',
            'log.changeset': 'yellow',
+           'patchbomb.finalsummary': '',
+           'patchbomb.from': 'magenta',
+           'patchbomb.to': 'cyan',
+           'patchbomb.subject': 'green',
+           'patchbomb.diffstats': '',
            'rebase.rebased': 'blue',
            'rebase.remaining': 'red bold',
            'resolve.resolved': 'green bold',
@@ -483,7 +489,14 @@
             extstyles()
             configstyles(ui_)
         return orig(ui_, opts, cmd, cmdfunc)
+    def colorgit(orig, gitsub, commands, env=None, stream=False, cwd=None):
+        if gitsub.ui._colormode and len(commands) and commands[0] == "diff":
+                # insert the argument in the front,
+                # the end of git diff arguments is used for paths
+                commands.insert(1, '--color')
+        return orig(gitsub, commands, env, stream, cwd)
     extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
+    extensions.wrapfunction(subrepo.gitsubrepo, '_gitnodir', colorgit)
     templater.funcs['label'] = templatelabel
 
 def extsetup(ui):
--- a/hgext/convert/subversion.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/convert/subversion.py	Sat Jan 17 18:28:30 2015 -0800
@@ -318,8 +318,12 @@
             self.uuid = svn.ra.get_uuid(self.ra)
         except SubversionException:
             ui.traceback()
-            raise NoRepo(_("%s does not look like a Subversion repository")
-                         % self.url)
+            svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
+                                       svn.core.SVN_VER_MINOR,
+                                       svn.core.SVN_VER_MICRO)
+            raise NoRepo(_("%s does not look like a Subversion repository "
+                           "to libsvn version %s")
+                         % (self.url, svnversion))
 
         if rev:
             try:
--- a/hgext/eol.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/eol.py	Sat Jan 17 18:28:30 2015 -0800
@@ -319,7 +319,7 @@
                             # longer match a file it matched before
                             self.dirstate.normallookup(f)
                     # Create or touch the cache to update mtime
-                    self.opener("eol.cache", "w").close()
+                    self.vfs("eol.cache", "w").close()
                     wlock.release()
                 except error.LockUnavailable:
                     # If we cannot lock the repository and clear the
--- a/hgext/extdiff.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/extdiff.py	Sat Jan 17 18:28:30 2015 -0800
@@ -23,10 +23,9 @@
   #cmd.cdiff = gdiff
   #opts.cdiff = -Nprc5
 
-  # add new command called vdiff, runs kdiff3
-  vdiff = kdiff3
-
-  # add new command called meld, runs meld (no need to name twice)
+  # add new command called meld, runs meld (no need to name twice).  If
+  # the meld executable is not available, the meld tool in [merge-tools]
+  # will be used, if available
   meld =
 
   # add new command called vimdiff, runs gvimdiff with DirDiff plugin
@@ -63,7 +62,7 @@
 
 from mercurial.i18n import _
 from mercurial.node import short, nullid
-from mercurial import cmdutil, scmutil, util, commands, encoding
+from mercurial import cmdutil, scmutil, util, commands, encoding, filemerge
 import os, shlex, shutil, tempfile, re
 
 cmdtable = {}
@@ -90,7 +89,7 @@
     wopener = scmutil.opener(base)
     fns_and_mtime = []
     ctx = repo[node]
-    for fn in files:
+    for fn in sorted(files):
         wfn = util.pconvert(fn)
         if wfn not in ctx:
             # File doesn't exist; could be a bogus modify
@@ -110,7 +109,7 @@
                                   os.lstat(dest).st_mtime))
     return dirname, fns_and_mtime
 
-def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
+def dodiff(ui, repo, cmdline, pats, opts):
     '''Do the actual diff:
 
     - copy to a temp structure if diffing 2 internal revisions
@@ -121,8 +120,7 @@
 
     revs = opts.get('rev')
     change = opts.get('change')
-    args = ' '.join(map(util.shellquote, diffopts))
-    do3way = '$parent2' in args
+    do3way = '$parent2' in cmdline
 
     if revs and change:
         msg = _('cannot specify --rev and --change at the same time')
@@ -221,13 +219,12 @@
 
         # Match parent2 first, so 'parent1?' will match both parent1 and parent
         regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
-        if not do3way and not re.search(regex, args):
-            args += ' $parent1 $child'
-        args = re.sub(regex, quote, args)
-        cmdline = util.shellquote(diffcmd) + ' ' + args
+        if not do3way and not re.search(regex, cmdline):
+            cmdline += ' $parent1 $child'
+        cmdline = re.sub(regex, quote, cmdline)
 
         ui.debug('running %r in %s\n' % (cmdline, tmproot))
-        util.system(cmdline, cwd=tmproot, out=ui.fout)
+        ui.system(cmdline, cwd=tmproot)
 
         for copy_fn, working_fn, mtime in fns_and_mtime:
             if os.lstat(copy_fn).st_mtime != mtime:
@@ -272,35 +269,48 @@
     if not program:
         program = 'diff'
         option = option or ['-Npru']
-    return dodiff(ui, repo, program, option, pats, opts)
+    cmdline = ' '.join(map(util.shellquote, [program] + option))
+    return dodiff(ui, repo, cmdline, pats, opts)
 
 def uisetup(ui):
     for cmd, path in ui.configitems('extdiff'):
         if cmd.startswith('cmd.'):
             cmd = cmd[4:]
             if not path:
-                path = cmd
-            diffopts = shlex.split(ui.config('extdiff', 'opts.' + cmd, ''))
+                path = util.findexe(cmd)
+                if path is None:
+                    path = filemerge.findexternaltool(ui, cmd) or cmd
+            diffopts = ui.config('extdiff', 'opts.' + cmd, '')
+            cmdline = util.shellquote(path)
+            if diffopts:
+                cmdline += ' ' + diffopts
         elif cmd.startswith('opts.'):
             continue
         else:
-            # command = path opts
             if path:
-                diffopts = shlex.split(path)
-                path = diffopts.pop(0)
+                # case "cmd = path opts"
+                cmdline = path
+                diffopts = len(shlex.split(cmdline)) > 1
             else:
-                path, diffopts = cmd, []
+                # case "cmd ="
+                path = util.findexe(cmd)
+                if path is None:
+                    path = filemerge.findexternaltool(ui, cmd) or cmd
+                cmdline = util.shellquote(path)
+                diffopts = False
         # look for diff arguments in [diff-tools] then [merge-tools]
-        if diffopts == []:
+        if not diffopts:
             args = ui.config('diff-tools', cmd+'.diffargs') or \
                    ui.config('merge-tools', cmd+'.diffargs')
             if args:
-                diffopts = shlex.split(args)
-        def save(cmd, path, diffopts):
+                cmdline += ' ' + args
+        def save(cmdline):
             '''use closure to save diff command to use'''
             def mydiff(ui, repo, *pats, **opts):
-                return dodiff(ui, repo, path, diffopts + opts['option'],
-                              pats, opts)
+                options = ' '.join(map(util.shellquote, opts['option']))
+                if options:
+                    options = ' ' + options
+                return dodiff(ui, repo, cmdline + options, pats, opts)
             doc = _('''\
 use %(path)s to diff repository (or selected files)
 
@@ -322,6 +332,6 @@
             # right encoding) prevents that.
             mydiff.__doc__ = doc.decode(encoding.encoding)
             return mydiff
-        cmdtable[cmd] = (save(cmd, path, diffopts),
+        cmdtable[cmd] = (save(cmdline),
                          cmdtable['extdiff'][1][1:],
                          _('hg %s [OPTION]... [FILE]...') % cmd)
--- a/hgext/factotum.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/factotum.py	Sat Jan 17 18:28:30 2015 -0800
@@ -72,9 +72,14 @@
                 l = os.read(fd, ERRMAX).split()
                 if l[0] == 'ok':
                     os.write(fd, 'read')
-                    l = os.read(fd, ERRMAX).split()
-                    if l[0] == 'ok':
-                        return l[1:]
+                    status, user, passwd = os.read(fd, ERRMAX).split(None, 2)
+                    if status == 'ok':
+                        if passwd.startswith("'"):
+                            if passwd.endswith("'"):
+                                passwd = passwd[1:-1].replace("''", "'")
+                            else:
+                                raise util.Abort(_('malformed password string'))
+                        return (user, passwd)
             except (OSError, IOError):
                 raise util.Abort(_('factotum not responding'))
         finally:
--- a/hgext/gpg.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/gpg.py	Sat Jan 17 18:28:30 2015 -0800
@@ -103,7 +103,7 @@
     try:
         # read local signatures
         fn = "localsigs"
-        for item in parsefile(repo.opener(fn), fn):
+        for item in parsefile(repo.vfs(fn), fn):
             yield item
     except IOError:
         pass
@@ -250,7 +250,7 @@
 
     # write it
     if opts['local']:
-        repo.opener.append("localsigs", sigmessage)
+        repo.vfs.append("localsigs", sigmessage)
         return
 
     if not opts["force"]:
--- a/hgext/hgk.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/hgk.py	Sat Jan 17 18:28:30 2015 -0800
@@ -35,7 +35,7 @@
 '''
 
 import os
-from mercurial import cmdutil, commands, util, patch, revlog, scmutil
+from mercurial import cmdutil, commands, patch, revlog, scmutil
 from mercurial.node import nullid, nullrev, short
 from mercurial.i18n import _
 
@@ -95,8 +95,10 @@
             if opts['pretty']:
                 catcommit(ui, repo, node2, "")
             m = scmutil.match(repo[node1], files)
+            diffopts = patch.difffeatureopts(ui)
+            diffopts.git = True
             chunks = patch.diff(repo, node1, node2, match=m,
-                                opts=patch.diffopts(ui, {'git': True}))
+                                opts=diffopts)
             for chunk in chunks:
                 ui.write(chunk)
         else:
@@ -349,4 +351,4 @@
     optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
     cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
     ui.debug("running %s\n" % cmd)
-    util.system(cmd)
+    ui.system(cmd)
--- a/hgext/highlight/highlight.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/highlight/highlight.py	Sat Jan 17 18:28:30 2015 -0800
@@ -32,6 +32,11 @@
     if util.binary(text):
         return
 
+    # str.splitlines() != unicode.splitlines() because "reasons"
+    for c in "\x0c\x1c\x1d\x1e":
+        if c in text:
+            text = text.replace(c, '')
+
     # Pygments is best used with Unicode strings:
     # <http://pygments.org/docs/unicode/>
     text = text.decode(encoding.encoding, 'replace')
--- a/hgext/keyword.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/keyword.py	Sat Jan 17 18:28:30 2015 -0800
@@ -1,6 +1,6 @@
 # keyword.py - $Keyword$ expansion for Mercurial
 #
-# Copyright 2007-2014 Christian Ebert <blacktrash@gmx.net>
+# Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
@@ -264,8 +264,17 @@
             if util.binary(data):
                 continue
             if expand:
+                parents = ctx.parents()
                 if lookup:
                     ctx = self.linkctx(f, mf[f])
+                elif self.restrict and len(parents) > 1:
+                    # merge commit
+                    # in case of conflict f is in modified state during
+                    # merge, even if f does not differ from f in parent
+                    for p in parents:
+                        if f in p and not p[f].cmp(ctx[f]):
+                            ctx = p[f].changectx()
+                            break
                 data, found = self.substitute(data, f, ctx, re_kw.subn)
             elif self.restrict:
                 found = re_kw.search(data)
@@ -273,7 +282,7 @@
                 data, found = _shrinktext(data, re_kw.subn)
             if found:
                 self.ui.note(msg % f)
-                fp = self.repo.wopener(f, "wb", atomictemp=True)
+                fp = self.repo.wvfs(f, "wb", atomictemp=True)
                 fp.write(data)
                 fp.close()
                 if kwcmd:
@@ -402,7 +411,7 @@
         if args:
             # simulate hgrc parsing
             rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
-            fp = repo.opener('hgrc', 'w')
+            fp = repo.vfs('hgrc', 'w')
             fp.writelines(rcmaps)
             fp.close()
             ui.readconfig(repo.join('hgrc'))
@@ -431,7 +440,7 @@
     demoitems('keywordset', ui.configitems('keywordset'))
     demoitems('keywordmaps', kwmaps.iteritems())
     keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
-    repo.wopener.write(fn, keywords)
+    repo.wvfs.write(fn, keywords)
     repo[None].add([fn])
     ui.note(_('\nkeywords written to %s:\n') % fn)
     ui.note(keywords)
@@ -448,12 +457,9 @@
     repo.commit(text=msg)
     ui.status(_('\n\tkeywords expanded\n'))
     ui.write(repo.wread(fn))
-    for root, dirs, files in os.walk(tmpdir, topdown=False):
+    for root, dirs, files in os.walk(tmpdir):
         for f in files:
-            util.unlink(os.path.join(root, f))
-        for d in dirs:
-            os.rmdir(os.path.join(root, d))
-    os.rmdir(tmpdir)
+            util.unlinkpath(repo.vfs.reljoin(root, f))
 
 @command('kwexpand',
     commands.walkopts,
@@ -585,7 +591,7 @@
         def file(self, f):
             if f[0] == '/':
                 f = f[1:]
-            return kwfilelog(self.sopener, kwt, f)
+            return kwfilelog(self.svfs, kwt, f)
 
         def wread(self, filename):
             data = super(kwrepo, self).wread(filename)
--- a/hgext/largefiles/lfcommands.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/largefiles/lfcommands.py	Sat Jan 17 18:28:30 2015 -0800
@@ -268,6 +268,7 @@
     mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
                           getfilectx, ctx.user(), ctx.date(), ctx.extra())
     ret = rdst.commitctx(mctx)
+    lfutil.copyalltostore(rdst, ret)
     rdst.setparents(ret)
     revmap[ctx.node()] = rdst.changelog.tip()
 
@@ -435,8 +436,14 @@
         ui.status(_("%d largefiles failed to download\n") % totalmissing)
     return totalsuccess, totalmissing
 
-def updatelfiles(ui, repo, filelist=None, printmessage=True,
-                 normallookup=False):
+def updatelfiles(ui, repo, filelist=None, printmessage=None,
+                 normallookup=False, checked=False):
+    '''Update largefiles according to standins in the working directory
+
+    If ``printmessage`` is other than ``None``, it means "print (or
+    ignore, for false) message forcibly".
+    '''
+    statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
     wlock = repo.wlock()
     try:
         lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -458,14 +465,15 @@
                     util.unlinkpath(absstandin + '.orig')
                 expecthash = lfutil.readstandin(repo, lfile)
                 if (expecthash != '' and
-                    (not os.path.exists(abslfile) or
+                    (checked or
+                     not os.path.exists(abslfile) or
                      expecthash != lfutil.hashfile(abslfile))):
                     if lfile not in repo[None]: # not switched to normal file
                         util.unlinkpath(abslfile, ignoremissing=True)
-                    # use normallookup() to allocate entry in largefiles
+                    # use normallookup() to allocate an entry in largefiles
                     # dirstate, because lack of it misleads
                     # lfilesrepo.status() into recognition that such cache
-                    # missing files are REMOVED.
+                    # missing files are removed.
                     lfdirstate.normallookup(lfile)
                     update[lfile] = expecthash
             else:
@@ -482,8 +490,7 @@
         lfdirstate.write()
 
         if lfiles:
-            if printmessage:
-                ui.status(_('getting changed largefiles\n'))
+            statuswriter(_('getting changed largefiles\n'))
             cachelfiles(ui, repo, None, lfiles)
 
         for lfile in lfiles:
@@ -514,8 +521,8 @@
             lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
 
         lfdirstate.write()
-        if printmessage and lfiles:
-            ui.status(_('%d largefiles updated, %d removed\n') % (updated,
+        if lfiles:
+            statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
                 removed))
     finally:
         wlock.release()
--- a/hgext/largefiles/lfutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/largefiles/lfutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -12,6 +12,7 @@
 import platform
 import shutil
 import stat
+import copy
 
 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
 from mercurial.i18n import _
@@ -203,7 +204,7 @@
 def copytostoreabsolute(repo, file, hash):
     if inusercache(repo.ui, hash):
         link(usercachepath(repo.ui, hash), storepath(repo, hash))
-    elif not getattr(repo, "_isconverting", False):
+    else:
         util.makedirs(os.path.dirname(storepath(repo, hash)))
         dst = util.atomictempfile(storepath(repo, hash),
                                   createmode=repo.store.createmode)
@@ -386,6 +387,30 @@
     elif state == '?':
         lfdirstate.drop(lfile)
 
+def markcommitted(orig, ctx, node):
+    repo = ctx._repo
+
+    orig(node)
+
+    # ATTENTION: "ctx.files()" may differ from "repo[node].files()"
+    # because files coming from the 2nd parent are omitted in the latter.
+    #
+    # The former should be used to get targets of "synclfdirstate",
+    # because such files:
+    # - are marked as "a" by "patch.patch()" (e.g. via transplant), and
+    # - have to be marked as "n" after commit, but
+    # - aren't listed in "repo[node].files()"
+
+    lfdirstate = openlfdirstate(repo.ui, repo)
+    for f in ctx.files():
+        if isstandin(f):
+            lfile = splitstandin(f)
+            synclfdirstate(repo, lfdirstate, lfile, False)
+    lfdirstate.write()
+
+    # As part of committing, copy all of the largefiles into the cache.
+    copyalltostore(repo, node)
+
 def getlfilestoupdate(oldstandins, newstandins):
     changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
     filelist = []
@@ -395,9 +420,18 @@
     return filelist
 
 def getlfilestoupload(repo, missing, addfunc):
-    for n in missing:
+    for i, n in enumerate(missing):
+        repo.ui.progress(_('finding outgoing largefiles'), i,
+            unit=_('revision'), total=len(missing))
         parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
-        ctx = repo[n]
+
+        oldlfstatus = repo.lfstatus
+        repo.lfstatus = False
+        try:
+            ctx = repo[n]
+        finally:
+            repo.lfstatus = oldlfstatus
+
         files = set(ctx.files())
         if len(parents) == 2:
             mc = ctx.manifest()
@@ -415,3 +449,138 @@
         for fn in files:
             if isstandin(fn) and fn in ctx:
                 addfunc(fn, ctx[fn].data().strip())
+    repo.ui.progress(_('finding outgoing largefiles'), None)
+
+def updatestandinsbymatch(repo, match):
+    '''Update standins in the working directory according to specified match
+
+    This returns (possibly modified) ``match`` object to be used for
+    subsequent commit process.
+    '''
+
+    ui = repo.ui
+
+    # Case 1: user calls commit with no specific files or
+    # include/exclude patterns: refresh and commit all files that
+    # are "dirty".
+    if match is None or match.always():
+        # Spend a bit of time here to get a list of files we know
+        # are modified so we can compare only against those.
+        # It can cost a lot of time (several seconds)
+        # otherwise to update all standins if the largefiles are
+        # large.
+        lfdirstate = openlfdirstate(ui, repo)
+        dirtymatch = match_.always(repo.root, repo.getcwd())
+        unsure, s = lfdirstate.status(dirtymatch, [], False, False,
+                                      False)
+        modifiedfiles = unsure + s.modified + s.added + s.removed
+        lfiles = listlfiles(repo)
+        # this only loops through largefiles that exist (not
+        # removed/renamed)
+        for lfile in lfiles:
+            if lfile in modifiedfiles:
+                if os.path.exists(
+                        repo.wjoin(standin(lfile))):
+                    # this handles the case where a rebase is being
+                    # performed and the working copy is not updated
+                    # yet.
+                    if os.path.exists(repo.wjoin(lfile)):
+                        updatestandin(repo,
+                            standin(lfile))
+
+        return match
+
+    lfiles = listlfiles(repo)
+    match._files = repo._subdirlfs(match.files(), lfiles)
+
+    # Case 2: user calls commit with specified patterns: refresh
+    # any matching big files.
+    smatcher = composestandinmatcher(repo, match)
+    standins = repo.dirstate.walk(smatcher, [], False, False)
+
+    # No matching big files: get out of the way and pass control to
+    # the usual commit() method.
+    if not standins:
+        return match
+
+    # Refresh all matching big files.  It's possible that the
+    # commit will end up failing, in which case the big files will
+    # stay refreshed.  No harm done: the user modified them and
+    # asked to commit them, so sooner or later we're going to
+    # refresh the standins.  Might as well leave them refreshed.
+    lfdirstate = openlfdirstate(ui, repo)
+    for fstandin in standins:
+        lfile = splitstandin(fstandin)
+        if lfdirstate[lfile] != 'r':
+            updatestandin(repo, fstandin)
+
+    # Cook up a new matcher that only matches regular files or
+    # standins corresponding to the big files requested by the
+    # user.  Have to modify _files to prevent commit() from
+    # complaining "not tracked" for big files.
+    match = copy.copy(match)
+    origmatchfn = match.matchfn
+
+    # Check both the list of largefiles and the list of
+    # standins because if a largefile was removed, it
+    # won't be in the list of largefiles at this point
+    match._files += sorted(standins)
+
+    actualfiles = []
+    for f in match._files:
+        fstandin = standin(f)
+
+        # ignore known largefiles and standins
+        if f in lfiles or fstandin in standins:
+            continue
+
+        actualfiles.append(f)
+    match._files = actualfiles
+
+    def matchfn(f):
+        if origmatchfn(f):
+            return f not in lfiles
+        else:
+            return f in standins
+
+    match.matchfn = matchfn
+
+    return match
+
+class automatedcommithook(object):
+    '''Stateful hook to update standins at the 1st commit of resuming
+
+    For efficiency, updating standins in the working directory should
+    be avoided while automated committing (like rebase, transplant and
+    so on), because they should be updated before committing.
+
+    But the 1st commit of resuming automated committing (e.g. ``rebase
+    --continue``) should update them, because largefiles may be
+    modified manually.
+    '''
+    def __init__(self, resuming):
+        self.resuming = resuming
+
+    def __call__(self, repo, match):
+        if self.resuming:
+            self.resuming = False # avoids updating at subsequent commits
+            return updatestandinsbymatch(repo, match)
+        else:
+            return match
+
+def getstatuswriter(ui, repo, forcibly=None):
+    '''Return the function to write largefiles specific status out
+
+    If ``forcibly`` is ``None``, this returns the last element of
+    ``repo._lfstatuswriters`` as "default" writer function.
+
+    Otherwise, this returns the function to always write out (or
+    ignore if ``not forcibly``) status.
+    '''
+    if forcibly is None:
+        return repo._lfstatuswriters[-1]
+    else:
+        if forcibly:
+            return ui.status # forcibly WRITE OUT
+        else:
+            return lambda *msg, **opts: None # forcibly IGNORE
--- a/hgext/largefiles/overrides.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/largefiles/overrides.py	Sat Jan 17 18:28:30 2015 -0800
@@ -11,11 +11,10 @@
 import os
 import copy
 
-from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
+from mercurial import hg, util, cmdutil, scmutil, match as match_, \
         archival, pathutil, revset
 from mercurial.i18n import _
 from mercurial.node import hex
-from hgext import rebase
 
 import lfutil
 import lfcommands
@@ -35,20 +34,27 @@
     m.matchfn = lambda f: lfile(f) and origmatchfn(f)
     return m
 
+def composenormalfilematcher(match, manifest, exclude=None):
+    excluded = set()
+    if exclude is not None:
+        excluded.update(exclude)
+
+    m = copy.copy(match)
+    notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
+            manifest or f in excluded)
+    m._files = filter(notlfile, m._files)
+    m._fmap = set(m._files)
+    m._always = False
+    origmatchfn = m.matchfn
+    m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
+    return m
+
 def installnormalfilesmatchfn(manifest):
     '''installmatchfn with a matchfn that ignores all largefiles'''
     def overridematch(ctx, pats=[], opts={}, globbed=False,
             default='relpath'):
         match = oldmatch(ctx, pats, opts, globbed, default)
-        m = copy.copy(match)
-        notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
-                manifest)
-        m._files = filter(notlfile, m._files)
-        m._fmap = set(m._files)
-        m._always = False
-        origmatchfn = m.matchfn
-        m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
-        return m
+        return composenormalfilematcher(match, manifest)
     oldmatch = installmatchfn(overridematch)
 
 def installmatchfn(f):
@@ -64,7 +70,7 @@
     was called.  no-op if scmutil.match is its original function.
 
     Note that n calls to installmatchfn will require n calls to
-    restore matchfn to reverse'''
+    restore the original matchfn.'''
     scmutil.match = getattr(scmutil.match, 'oldmatch')
 
 def installmatchandpatsfn(f):
@@ -75,18 +81,18 @@
 
 def restorematchandpatsfn():
     '''restores scmutil.matchandpats to what it was before
-    installnormalfilesmatchandpatsfn was called.  no-op if scmutil.matchandpats
+    installmatchandpatsfn was called. No-op if scmutil.matchandpats
     is its original function.
 
-    Note that n calls to installnormalfilesmatchandpatsfn will require n calls
-    to restore matchfn to reverse'''
+    Note that n calls to installmatchandpatsfn will require n calls
+    to restore the original matchfn.'''
     scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
             scmutil.matchandpats)
 
-def addlargefiles(ui, repo, *pats, **opts):
-    large = opts.pop('large', None)
+def addlargefiles(ui, repo, isaddremove, matcher, **opts):
+    large = opts.get('large')
     lfsize = lfutil.getminsize(
-        ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
+        ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
 
     lfmatcher = None
     if lfutil.islfilesrepo(repo):
@@ -95,7 +101,7 @@
             lfmatcher = match_.match(repo.root, '', list(lfpats))
 
     lfnames = []
-    m = scmutil.match(repo[None], pats, opts)
+    m = copy.copy(matcher)
     m.bad = lambda x, y: None
     wctx = repo[None]
     for f in repo.walk(m):
@@ -104,27 +110,31 @@
         nfile = f in wctx
         exists = lfile or nfile
 
+        # addremove in core gets fancy with the name, add doesn't
+        if isaddremove:
+            name = m.uipath(f)
+        else:
+            name = m.rel(f)
+
         # Don't warn the user when they attempt to add a normal tracked file.
         # The normal add code will do that for us.
         if exact and exists:
             if lfile:
-                ui.warn(_('%s already a largefile\n') % f)
+                ui.warn(_('%s already a largefile\n') % name)
             continue
 
         if (exact or not exists) and not lfutil.isstandin(f):
-            wfile = repo.wjoin(f)
-
             # In case the file was removed previously, but not committed
             # (issue3507)
-            if not os.path.exists(wfile):
+            if not repo.wvfs.exists(f):
                 continue
 
             abovemin = (lfsize and
-                        os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
+                        repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
             if large or abovemin or (lfmatcher and lfmatcher(f)):
                 lfnames.append(f)
                 if ui.verbose or not exact:
-                    ui.status(_('adding %s as a largefile\n') % m.rel(f))
+                    ui.status(_('adding %s as a largefile\n') % name)
 
     bad = []
 
@@ -148,19 +158,18 @@
             bad += [lfutil.splitstandin(f)
                     for f in repo[None].add(standins)
                     if f in m.files()]
+
+        added = [f for f in lfnames if f not in bad]
     finally:
         wlock.release()
-    return bad
+    return added, bad
 
-def removelargefiles(ui, repo, isaddremove, *pats, **opts):
+def removelargefiles(ui, repo, isaddremove, matcher, **opts):
     after = opts.get('after')
-    if not pats and not after:
-        raise util.Abort(_('no files specified'))
-    m = composelargefilematcher(scmutil.match(repo[None], pats, opts),
-                                repo[None].manifest())
+    m = composelargefilematcher(matcher, repo[None].manifest())
     try:
         repo.lfstatus = True
-        s = repo.status(match=m, clean=True)
+        s = repo.status(match=m, clean=not isaddremove)
     finally:
         repo.lfstatus = False
     manifest = repo[None].manifest()
@@ -187,21 +196,19 @@
         result = warn(added, _('not removing %s: file has been marked for add'
                                ' (use forget to undo)\n')) or result
 
-    for f in sorted(remove):
-        if ui.verbose or not m.exact(f):
-            ui.status(_('removing %s\n') % m.rel(f))
-
     # Need to lock because standin files are deleted then removed from the
     # repository and we could race in-between.
     wlock = repo.wlock()
     try:
         lfdirstate = lfutil.openlfdirstate(ui, repo)
-        for f in remove:
-            if not after:
-                # If this is being called by addremove, notify the user that we
-                # are removing the file.
+        for f in sorted(remove):
+            if ui.verbose or not m.exact(f):
+                # addremove in core gets fancy with the name, remove doesn't
                 if isaddremove:
-                    ui.status(_('removing %s\n') % f)
+                    name = m.uipath(f)
+                else:
+                    name = m.rel(f)
+                ui.status(_('removing %s\n') % name)
 
             if not opts.get('dry_run'):
                 if not after:
@@ -235,28 +242,29 @@
 
 # -- Wrappers: modify existing commands --------------------------------
 
-# Add works by going through the files that the user wanted to add and
-# checking if they should be added as largefiles. Then it makes a new
-# matcher which matches only the normal files and runs the original
-# version of add.
 def overrideadd(orig, ui, repo, *pats, **opts):
-    normal = opts.pop('normal')
-    if normal:
-        if opts.get('large'):
-            raise util.Abort(_('--normal cannot be used with --large'))
-        return orig(ui, repo, *pats, **opts)
-    bad = addlargefiles(ui, repo, *pats, **opts)
-    installnormalfilesmatchfn(repo[None].manifest())
-    result = orig(ui, repo, *pats, **opts)
-    restorematchfn()
+    if opts.get('normal') and opts.get('large'):
+        raise util.Abort(_('--normal cannot be used with --large'))
+    return orig(ui, repo, *pats, **opts)
+
+def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
+    # The --normal flag short circuits this override
+    if opts.get('normal'):
+        return orig(ui, repo, matcher, prefix, explicitonly, **opts)
 
-    return (result == 1 or bad) and 1 or 0
+    ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
+    normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
+                                             ladded)
+    bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
 
-def overrideremove(orig, ui, repo, *pats, **opts):
-    installnormalfilesmatchfn(repo[None].manifest())
-    result = orig(ui, repo, *pats, **opts)
-    restorematchfn()
-    return removelargefiles(ui, repo, False, *pats, **opts) or result
+    bad.extend(f for f in lbad)
+    return bad
+
+def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
+    normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
+    result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
+    return removelargefiles(ui, repo, False, matcher, after=after,
+                            force=force) or result
 
 def overridestatusfn(orig, repo, rev2, **opts):
     try:
@@ -397,7 +405,7 @@
         wlock.release()
 
 # Before starting the manifest merge, merge.updates will call
-# _checkunknown to check if there are any files in the merged-in
+# _checkunknownfile to check if there are any files in the merged-in
 # changeset that collide with unknown files in the working copy.
 #
 # The largefiles are seen as unknown, so this prevents us from merging
@@ -405,16 +413,16 @@
 #
 # The overridden function filters the unknown files by removing any
 # largefiles. This makes the merge proceed and we can then handle this
-# case further in the overridden manifestmerge function below.
-def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
+# case further in the overridden calculateupdates function below.
+def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
     if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
         return False
-    return origfn(repo, wctx, mctx, f)
+    return origfn(repo, wctx, mctx, f, f2)
 
 # The manifest merge handles conflicts on the manifest level. We want
 # to handle changes in largefile-ness of files at this level too.
 #
-# The strategy is to run the original manifestmerge and then process
+# The strategy is to run the original calculateupdates and then process
 # the action list it outputs. There are two cases we need to deal with:
 #
 # 1. Normal file in p1, largefile in p2. Here the largefile is
@@ -440,81 +448,65 @@
 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
                              partial, acceptremote, followcopies):
     overwrite = force and not branchmerge
-    actions = origfn(repo, p1, p2, pas, branchmerge, force, partial,
-                     acceptremote, followcopies)
+    actions, diverge, renamedelete = origfn(
+        repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
+        followcopies)
 
     if overwrite:
-        return actions
-
-    removes = set(a[0] for a in actions['r'])
+        return actions, diverge, renamedelete
 
-    newglist = []
-    lfmr = [] # LargeFiles: Mark as Removed ... and mark largefile as added
-    for action in actions['g']:
-        f, args, msg = action
+    # Convert to dictionary with filename as key and action as value.
+    lfiles = set()
+    for f in actions:
         splitstandin = f and lfutil.splitstandin(f)
-        if (splitstandin is not None and
-            splitstandin in p1 and splitstandin not in removes):
+        if splitstandin in p1:
+            lfiles.add(splitstandin)
+        elif lfutil.standin(f) in p1:
+            lfiles.add(f)
+
+    for lfile in lfiles:
+        standin = lfutil.standin(lfile)
+        (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
+        (sm, sargs, smsg) = actions.get(standin, (None, None, None))
+        if sm in ('g', 'dc') and lm != 'r':
             # Case 1: normal file in the working copy, largefile in
             # the second parent
-            lfile = splitstandin
-            standin = f
-            msg = _('remote turned local normal file %s into a largefile\n'
-                    'use (l)argefile or keep (n)ormal file?'
-                    '$$ &Largefile $$ &Normal file') % lfile
-            if (# local has unchanged normal file, pick remote largefile
-                pas and lfile in pas[0] and
-                not pas[0][lfile].cmp(p1[lfile]) or
-                # if remote has unchanged largefile, pick local normal file
-                not (pas and standin in pas[0] and
-                     not pas[0][standin].cmp(p2[standin])) and
-                # else, prompt
-                repo.ui.promptchoice(msg, 0) == 0
-                ): # pick remote largefile
-                actions['r'].append((lfile, None, msg))
-                newglist.append((standin, (p2.flags(standin),), msg))
+            usermsg = _('remote turned local normal file %s into a largefile\n'
+                        'use (l)argefile or keep (n)ormal file?'
+                        '$$ &Largefile $$ &Normal file') % lfile
+            if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
+                actions[lfile] = ('r', None, 'replaced by standin')
+                actions[standin] = ('g', sargs, 'replaces standin')
             else: # keep local normal file
-                actions['r'].append((standin, None, msg))
-        elif lfutil.standin(f) in p1 and lfutil.standin(f) not in removes:
+                actions[lfile] = ('k', None, 'replaces standin')
+                if branchmerge:
+                    actions[standin] = ('k', None, 'replaced by non-standin')
+                else:
+                    actions[standin] = ('r', None, 'replaced by non-standin')
+        elif lm in ('g', 'dc') and sm != 'r':
             # Case 2: largefile in the working copy, normal file in
             # the second parent
-            standin = lfutil.standin(f)
-            lfile = f
-            msg = _('remote turned local largefile %s into a normal file\n'
+            usermsg = _('remote turned local largefile %s into a normal file\n'
                     'keep (l)argefile or use (n)ormal file?'
                     '$$ &Largefile $$ &Normal file') % lfile
-            if (# if remote has unchanged normal file, pick local largefile
-                pas and f in pas[0] and
-                not pas[0][f].cmp(p2[f]) or
-                # if local has unchanged largefile, pick remote normal file
-                not (pas and standin in pas[0] and
-                     not pas[0][standin].cmp(p1[standin])) and
-                # else, prompt
-                repo.ui.promptchoice(msg, 0) == 0
-                ): # keep local largefile
+            if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
                 if branchmerge:
                     # largefile can be restored from standin safely
-                    actions['r'].append((lfile, None, msg))
+                    actions[lfile] = ('k', None, 'replaced by standin')
+                    actions[standin] = ('k', None, 'replaces standin')
                 else:
                     # "lfile" should be marked as "removed" without
                     # removal of itself
-                    lfmr.append((lfile, None, msg))
+                    actions[lfile] = ('lfmr', None,
+                                      'forget non-standin largefile')
 
                     # linear-merge should treat this largefile as 're-added'
-                    actions['a'].append((standin, None, msg))
+                    actions[standin] = ('a', None, 'keep standin')
             else: # pick remote normal file
-                actions['r'].append((standin, None, msg))
-                newglist.append((lfile, (p2.flags(lfile),), msg))
-        else:
-            newglist.append(action)
+                actions[lfile] = ('g', largs, 'replaces standin')
+                actions[standin] = ('r', None, 'replaced by non-standin')
 
-    newglist.sort()
-    actions['g'] = newglist
-    if lfmr:
-        lfmr.sort()
-        actions['lfmr'] = lfmr
-
-    return actions
+    return actions, diverge, renamedelete
 
 def mergerecordupdates(orig, repo, actions, branchmerge):
     if 'lfmr' in actions:
@@ -620,7 +612,6 @@
                 lfile = lambda f: lfutil.standin(f) in manifest
                 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
                 m._fmap = set(m._files)
-                m._always = False
                 origmatchfn = m.matchfn
                 m.matchfn = lambda f: (lfutil.isstandin(f) and
                                     (f in manifest) and
@@ -728,7 +719,6 @@
             m._files = [tostandin(f) for f in m._files]
             m._files = [f for f in m._files if f is not None]
             m._fmap = set(m._files)
-            m._always = False
             origmatchfn = m.matchfn
             def matchfn(f):
                 if lfutil.isstandin(f):
@@ -756,37 +746,14 @@
     finally:
         wlock.release()
 
-# When we rebase a repository with remotely changed largefiles, we need to
-# take some extra care so that the largefiles are correctly updated in the
-# working copy
+# after pulling changesets, we need to take some extra care to get
+# largefiles updated remotely
 def overridepull(orig, ui, repo, source=None, **opts):
     revsprepull = len(repo)
     if not source:
         source = 'default'
     repo.lfpullsource = source
-    if opts.get('rebase', False):
-        repo._isrebasing = True
-        try:
-            if opts.get('update'):
-                del opts['update']
-                ui.debug('--update and --rebase are not compatible, ignoring '
-                         'the update flag\n')
-            del opts['rebase']
-            origpostincoming = commands.postincoming
-            def _dummy(*args, **kwargs):
-                pass
-            commands.postincoming = _dummy
-            try:
-                result = commands.pull(ui, repo, source, **opts)
-            finally:
-                commands.postincoming = origpostincoming
-            revspostpull = len(repo)
-            if revspostpull > revsprepull:
-                result = result or rebase.rebase(ui, repo)
-        finally:
-            repo._isrebasing = False
-    else:
-        result = orig(ui, repo, source, **opts)
+    result = orig(ui, repo, source, **opts)
     revspostpull = len(repo)
     lfrevs = opts.get('lfrev', [])
     if opts.get('all_largefiles'):
@@ -860,11 +827,14 @@
     return result
 
 def overriderebase(orig, ui, repo, **opts):
-    repo._isrebasing = True
+    resuming = opts.get('continue')
+    repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
+    repo._lfstatuswriters.append(lambda *msg, **opts: None)
     try:
         return orig(ui, repo, **opts)
     finally:
-        repo._isrebasing = False
+        repo._lfstatuswriters.pop()
+        repo._lfcommithooks.pop()
 
 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
             prefix=None, mtime=None, subrepos=None):
@@ -941,16 +911,16 @@
         for subpath in sorted(ctx.substate):
             sub = ctx.sub(subpath)
             submatch = match_.narrowmatcher(subpath, matchfn)
-            sub.archive(repo.ui, archiver, prefix, submatch)
+            sub.archive(archiver, prefix, submatch)
 
     archiver.done()
 
-def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
+def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
     repo._get(repo._state + ('hg',))
     rev = repo._state[1]
     ctx = repo._repo[rev]
 
-    lfcommands.cachelfiles(ui, repo._repo, ctx.node())
+    lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
 
     def write(name, mode, islink, getdata):
         # At this point, the standin has been replaced with the largefile name,
@@ -988,13 +958,12 @@
     for subpath in sorted(ctx.substate):
         sub = ctx.sub(subpath)
         submatch = match_.narrowmatcher(subpath, match)
-        sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
-                    submatch)
+        sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
 
 # If a largefile is modified, the change is not reflected in its
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
-# largefiles were changed. This is used by bisect and backout.
+# largefiles were changed. This is used by bisect, backout and fetch.
 def overridebailifchanged(orig, repo):
     orig(repo)
     repo.lfstatus = True
@@ -1003,21 +972,10 @@
     if s.modified or s.added or s.removed or s.deleted:
         raise util.Abort(_('uncommitted changes'))
 
-# Fetch doesn't use cmdutil.bailifchanged so override it to add the check
-def overridefetch(orig, ui, repo, *pats, **opts):
-    repo.lfstatus = True
-    s = repo.status()
-    repo.lfstatus = False
-    if s.modified or s.added or s.removed or s.deleted:
-        raise util.Abort(_('uncommitted changes'))
-    return orig(ui, repo, *pats, **opts)
-
-def overrideforget(orig, ui, repo, *pats, **opts):
-    installnormalfilesmatchfn(repo[None].manifest())
-    result = orig(ui, repo, *pats, **opts)
-    restorematchfn()
-    m = composelargefilematcher(scmutil.match(repo[None], pats, opts),
-                                repo[None].manifest())
+def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
+    normalmatcher = composenormalfilematcher(match, repo[None].manifest())
+    bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
+    m = composelargefilematcher(match, repo[None].manifest())
 
     try:
         repo.lfstatus = True
@@ -1029,10 +987,10 @@
 
     for f in forget:
         if lfutil.standin(f) not in repo.dirstate and not \
-                os.path.isdir(m.rel(lfutil.standin(f))):
+                repo.wvfs.isdir(lfutil.standin(f)):
             ui.warn(_('not removing %s: file is already untracked\n')
                     % m.rel(f))
-            result = 1
+            bad.append(f)
 
     for f in forget:
         if ui.verbose or not m.exact(f):
@@ -1052,11 +1010,13 @@
         standins = [lfutil.standin(f) for f in forget]
         for f in standins:
             util.unlinkpath(repo.wjoin(f), ignoremissing=True)
-        repo[None].forget(standins)
+        rejected = repo[None].forget(standins)
     finally:
         wlock.release()
 
-    return result
+    bad.extend(f for f in rejected if f in m.files())
+    forgot.extend(f for f in forget if f not in rejected)
+    return bad, forgot
 
 def _getoutgoings(repo, other, missing, addfunc):
     """get pairs of filename and largefile hash in outgoing revisions
@@ -1149,10 +1109,10 @@
     finally:
         repo.lfstatus = False
 
-def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
+def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
                      similarity=None):
     if not lfutil.islfilesrepo(repo):
-        return orig(repo, pats, opts, dry_run, similarity)
+        return orig(repo, matcher, prefix, opts, dry_run, similarity)
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
     unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
@@ -1163,24 +1123,37 @@
     # we don't remove the standin in the largefiles code, preventing a very
     # confused state later.
     if s.deleted:
-        m = [repo.wjoin(f) for f in s.deleted]
-        removelargefiles(repo.ui, repo, True, *m, **opts)
+        m = copy.copy(matcher)
+
+        # The m._files and m._map attributes are not changed to the deleted list
+        # because that affects the m.exact() test, which in turn governs whether
+        # or not the file name is printed, and how.  Simply limit the original
+        # matches to those in the deleted status list.
+        matchfn = m.matchfn
+        m.matchfn = lambda f: f in s.deleted and matchfn(f)
+
+        removelargefiles(repo.ui, repo, True, m, **opts)
     # Call into the normal add code, and any files that *should* be added as
     # largefiles will be
-    addlargefiles(repo.ui, repo, *pats, **opts)
+    added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
     # Now that we've handled largefiles, hand off to the original addremove
     # function to take care of the rest.  Make sure it doesn't do anything with
-    # largefiles by installing a matcher that will ignore them.
-    installnormalfilesmatchfn(repo[None].manifest())
-    result = orig(repo, pats, opts, dry_run, similarity)
-    restorematchfn()
-    return result
+    # largefiles by passing a matcher that will ignore them.
+    matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
+    return orig(repo, matcher, prefix, opts, dry_run, similarity)
 
 # Calling purge with --all will cause the largefiles to be deleted.
 # Override repo.status to prevent this from happening.
 def overridepurge(orig, ui, repo, *dirs, **opts):
-    # XXX large file status is buggy when used on repo proxy.
-    # XXX this needs to be investigate.
+    # XXX Monkey patching a repoview will not work. The assigned attribute will
+    # be set on the unfiltered repo, but we will only lookup attributes in the
+    # unfiltered repo if the lookup in the repoview object itself fails. As the
+    # monkey patched method exists on the repoview class the lookup will not
+    # fail. As a result, the original version will shadow the monkey patched
+    # one, defeating the monkey patch.
+    #
+    # As a work around we use an unfiltered repo here. We should do something
+    # cleaner instead.
     repo = repo.unfiltered()
     oldstatus = repo.status
     def overridestatus(node1='.', node2=None, match=None, ignored=False,
@@ -1236,16 +1209,14 @@
     return result
 
 def overridetransplant(orig, ui, repo, *revs, **opts):
+    resuming = opts.get('continue')
+    repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
+    repo._lfstatuswriters.append(lambda *msg, **opts: None)
     try:
-        oldstandins = lfutil.getstandinsstate(repo)
-        repo._istransplanting = True
         result = orig(ui, repo, *revs, **opts)
-        newstandins = lfutil.getstandinsstate(repo)
-        filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
-        lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
-                                printmessage=True)
     finally:
-        repo._istransplanting = False
+        repo._lfstatuswriters.pop()
+        repo._lfcommithooks.pop()
     return result
 
 def overridecat(orig, ui, repo, file1, *pats, **opts):
@@ -1296,14 +1267,6 @@
         err = 0
     return err
 
-def mercurialsinkbefore(orig, sink):
-    sink.repo._isconverting = True
-    orig(sink)
-
-def mercurialsinkafter(orig, sink):
-    sink.repo._isconverting = False
-    orig(sink)
-
 def mergeupdate(orig, repo, node, branchmerge, force, partial,
                 *args, **kwargs):
     wlock = repo.wlock()
@@ -1332,8 +1295,21 @@
             unsure, s = lfdirstate.status(match_.always(repo.root,
                                                         repo.getcwd()),
                                           [], False, False, False)
-            for lfile in unsure + s.modified + s.added:
+            pctx = repo['.']
+            for lfile in unsure + s.modified:
+                lfileabs = repo.wvfs.join(lfile)
+                if not os.path.exists(lfileabs):
+                    continue
+                lfhash = lfutil.hashrepofile(repo, lfile)
+                standin = lfutil.standin(lfile)
+                lfutil.writestandin(repo, standin, lfhash,
+                                    lfutil.getexecutable(lfileabs))
+                if (standin in pctx and
+                    lfhash == lfutil.readstandin(repo, lfile, '.')):
+                    lfdirstate.normal(lfile)
+            for lfile in s.added:
                 lfutil.updatestandin(repo, lfutil.standin(lfile))
+            lfdirstate.write()
 
         if linearmerge:
             # Only call updatelfiles on the standins that have changed
@@ -1347,12 +1323,8 @@
             newstandins = lfutil.getstandinsstate(repo)
             filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
 
-        # suppress status message while automated committing
-        printmessage = not (getattr(repo, "_isrebasing", False) or
-                            getattr(repo, "_istransplanting", False))
         lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
-                                printmessage=printmessage,
-                                normallookup=partial)
+                                normallookup=partial, checked=linearmerge)
 
         return result
     finally:
--- a/hgext/largefiles/reposetup.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/largefiles/reposetup.py	Sat Jan 17 18:28:30 2015 -0800
@@ -12,7 +12,7 @@
 
 from mercurial import error, manifest, match as match_, util
 from mercurial.i18n import _
-from mercurial import localrepo, scmutil
+from mercurial import scmutil
 
 import lfcommands
 import lfutil
@@ -34,7 +34,7 @@
         # their actual contents.
         def __getitem__(self, changeid):
             ctx = super(lfilesrepo, self).__getitem__(changeid)
-            if self.lfstatus:
+            if self.unfiltered().lfstatus:
                 class lfilesmanifestdict(manifest.manifestdict):
                     def __contains__(self, filename):
                         orig = super(lfilesmanifestdict, self).__contains__
@@ -72,19 +72,20 @@
         # appropriate list in the result. Also removes standin files
         # from the listing. Revert to the original status if
         # self.lfstatus is False.
-        # XXX large file status is buggy when used on repo proxy.
-        # XXX this needs to be investigated.
-        @localrepo.unfilteredmethod
         def status(self, node1='.', node2=None, match=None, ignored=False,
                 clean=False, unknown=False, listsubrepos=False):
             listignored, listclean, listunknown = ignored, clean, unknown
             orig = super(lfilesrepo, self).status
-            if not self.lfstatus:
+
+            # When various overrides set repo.lfstatus, the change is redirected
+            # to the unfiltered repo, and self.lfstatus is always false when
+            # this repo is filtered.
+            if not self.unfiltered().lfstatus:
                 return orig(node1, node2, match, listignored, listclean,
                             listunknown, listsubrepos)
 
             # some calls in this function rely on the old version of status
-            self.lfstatus = False
+            self.unfiltered().lfstatus = False
             ctx1 = self[node1]
             ctx2 = self[node2]
             working = ctx2.rev() is None
@@ -102,12 +103,12 @@
                 except error.LockError:
                     pass
 
-                # First check if there were files specified on the
-                # command line.  If there were, and none of them were
+                # First check if paths or patterns were specified on the
+                # command line.  If there were, and they don't match any
                 # largefiles, we should just bail here and let super
                 # handle it -- thus gaining a big performance boost.
                 lfdirstate = lfutil.openlfdirstate(ui, self)
-                if match.files() and not match.anypats():
+                if not match.always():
                     for f in lfdirstate:
                         if match(f):
                             break
@@ -240,14 +241,16 @@
                 if wlock:
                     wlock.release()
 
-            self.lfstatus = True
+            self.unfiltered().lfstatus = True
             return scmutil.status(*result)
 
-        # As part of committing, copy all of the largefiles into the
-        # cache.
-        def commitctx(self, *args, **kwargs):
-            node = super(lfilesrepo, self).commitctx(*args, **kwargs)
-            lfutil.copyalltostore(self, node)
+        def commitctx(self, ctx, *args, **kwargs):
+            node = super(lfilesrepo, self).commitctx(ctx, *args, **kwargs)
+            class lfilesctx(ctx.__class__):
+                def markcommitted(self, node):
+                    orig = super(lfilesctx, self).markcommitted
+                    return lfutil.markcommitted(orig, self, node)
+            ctx.__class__ = lfilesctx
             return node
 
         # Before commit, largefile standins have not had their
@@ -259,139 +262,10 @@
 
             wlock = self.wlock()
             try:
-                # Case 0: Automated committing
-                #
-                # While automated committing (like rebase, transplant
-                # and so on), this code path is used to avoid:
-                # (1) updating standins, because standins should
-                #     be already updated at this point
-                # (2) aborting when stadnins are matched by "match",
-                #     because automated committing may specify them directly
-                #
-                if getattr(self, "_isrebasing", False) or \
-                        getattr(self, "_istransplanting", False):
-                    result = orig(text=text, user=user, date=date, match=match,
-                                    force=force, editor=editor, extra=extra)
-
-                    if result:
-                        lfdirstate = lfutil.openlfdirstate(ui, self)
-                        for f in self[result].files():
-                            if lfutil.isstandin(f):
-                                lfile = lfutil.splitstandin(f)
-                                lfutil.synclfdirstate(self, lfdirstate, lfile,
-                                                      False)
-                        lfdirstate.write()
-
-                    return result
-                # Case 1: user calls commit with no specific files or
-                # include/exclude patterns: refresh and commit all files that
-                # are "dirty".
-                if ((match is None) or
-                    (not match.anypats() and not match.files())):
-                    # Spend a bit of time here to get a list of files we know
-                    # are modified so we can compare only against those.
-                    # It can cost a lot of time (several seconds)
-                    # otherwise to update all standins if the largefiles are
-                    # large.
-                    lfdirstate = lfutil.openlfdirstate(ui, self)
-                    dirtymatch = match_.always(self.root, self.getcwd())
-                    unsure, s = lfdirstate.status(dirtymatch, [], False, False,
-                                                  False)
-                    modifiedfiles = unsure + s.modified + s.added + s.removed
-                    lfiles = lfutil.listlfiles(self)
-                    # this only loops through largefiles that exist (not
-                    # removed/renamed)
-                    for lfile in lfiles:
-                        if lfile in modifiedfiles:
-                            if os.path.exists(
-                                    self.wjoin(lfutil.standin(lfile))):
-                                # this handles the case where a rebase is being
-                                # performed and the working copy is not updated
-                                # yet.
-                                if os.path.exists(self.wjoin(lfile)):
-                                    lfutil.updatestandin(self,
-                                        lfutil.standin(lfile))
-                                    lfdirstate.normal(lfile)
-
-                    result = orig(text=text, user=user, date=date, match=match,
-                                    force=force, editor=editor, extra=extra)
-
-                    if result is not None:
-                        for lfile in lfdirstate:
-                            if lfile in modifiedfiles:
-                                if (not os.path.exists(self.wjoin(
-                                   lfutil.standin(lfile)))) or \
-                                   (not os.path.exists(self.wjoin(lfile))):
-                                    lfdirstate.drop(lfile)
-
-                    # This needs to be after commit; otherwise precommit hooks
-                    # get the wrong status
-                    lfdirstate.write()
-                    return result
-
-                lfiles = lfutil.listlfiles(self)
-                match._files = self._subdirlfs(match.files(), lfiles)
-
-                # Case 2: user calls commit with specified patterns: refresh
-                # any matching big files.
-                smatcher = lfutil.composestandinmatcher(self, match)
-                standins = self.dirstate.walk(smatcher, [], False, False)
-
-                # No matching big files: get out of the way and pass control to
-                # the usual commit() method.
-                if not standins:
-                    return orig(text=text, user=user, date=date, match=match,
-                                    force=force, editor=editor, extra=extra)
-
-                # Refresh all matching big files.  It's possible that the
-                # commit will end up failing, in which case the big files will
-                # stay refreshed.  No harm done: the user modified them and
-                # asked to commit them, so sooner or later we're going to
-                # refresh the standins.  Might as well leave them refreshed.
-                lfdirstate = lfutil.openlfdirstate(ui, self)
-                for standin in standins:
-                    lfile = lfutil.splitstandin(standin)
-                    if lfdirstate[lfile] != 'r':
-                        lfutil.updatestandin(self, standin)
-                        lfdirstate.normal(lfile)
-                    else:
-                        lfdirstate.drop(lfile)
-
-                # Cook up a new matcher that only matches regular files or
-                # standins corresponding to the big files requested by the
-                # user.  Have to modify _files to prevent commit() from
-                # complaining "not tracked" for big files.
-                match = copy.copy(match)
-                origmatchfn = match.matchfn
-
-                # Check both the list of largefiles and the list of
-                # standins because if a largefile was removed, it
-                # won't be in the list of largefiles at this point
-                match._files += sorted(standins)
-
-                actualfiles = []
-                for f in match._files:
-                    fstandin = lfutil.standin(f)
-
-                    # ignore known largefiles and standins
-                    if f in lfiles or fstandin in standins:
-                        continue
-
-                    actualfiles.append(f)
-                match._files = actualfiles
-
-                def matchfn(f):
-                    if origmatchfn(f):
-                        return f not in lfiles
-                    else:
-                        return f in standins
-
-                match.matchfn = matchfn
+                lfcommithook = self._lfcommithooks[-1]
+                match = lfcommithook(self, match)
                 result = orig(text=text, user=user, date=date, match=match,
                                 force=force, editor=editor, extra=extra)
-                # This needs to be after commit; otherwise precommit hooks
-                # get the wrong status
-                lfdirstate.write()
                 return result
             finally:
                 wlock.release()
@@ -407,10 +281,12 @@
             return super(lfilesrepo, self).push(remote, force=force, revs=revs,
                 newbranch=newbranch)
 
+        # TODO: _subdirlfs should be moved into "lfutil.py", because
+        # it is referred only from "lfutil.updatestandinsbymatch"
         def _subdirlfs(self, files, lfiles):
             '''
             Adjust matched file list
-            If we pass a directory to commit whose only commitable files
+            If we pass a directory to commit whose only committable files
             are largefiles, the core commit code aborts before finding
             the largefiles.
             So we do the following:
@@ -463,6 +339,15 @@
 
     repo.__class__ = lfilesrepo
 
+    # stack of hooks being executed before committing.
+    # only last element ("_lfcommithooks[-1]") is used for each committing.
+    repo._lfcommithooks = [lfutil.updatestandinsbymatch]
+
+    # Stack of status writer functions taking "*msg, **opts" arguments
+    # like "ui.status()". Only last element ("_lfstatuswriters[-1]")
+    # is used to write status out.
+    repo._lfstatuswriters = [ui.status]
+
     def prepushoutgoinghook(local, remote, outgoing):
         if outgoing.missing:
             toupload = set()
--- a/hgext/largefiles/uisetup.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/largefiles/uisetup.py	Sat Jan 17 18:28:30 2015 -0800
@@ -33,10 +33,9 @@
     # and in the process of handling commit -A (issue3542)
     entry = extensions.wrapfunction(scmutil, 'addremove',
                                     overrides.scmutiladdremove)
-    entry = extensions.wrapcommand(commands.table, 'remove',
-                                   overrides.overrideremove)
-    entry = extensions.wrapcommand(commands.table, 'forget',
-                                   overrides.overrideforget)
+    extensions.wrapfunction(cmdutil, 'add', overrides.cmdutiladd)
+    extensions.wrapfunction(cmdutil, 'remove', overrides.cmdutilremove)
+    extensions.wrapfunction(cmdutil, 'forget', overrides.cmdutilforget)
 
     # Subrepos call status function
     entry = extensions.wrapcommand(commands.table, 'status',
@@ -160,22 +159,14 @@
 
     # override some extensions' stuff as well
     for name, module in extensions.extensions():
-        if name == 'fetch':
-            extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
-                overrides.overridefetch)
         if name == 'purge':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
                 overrides.overridepurge)
         if name == 'rebase':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
                 overrides.overriderebase)
+            extensions.wrapfunction(module, 'rebase',
+                                    overrides.overriderebase)
         if name == 'transplant':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant',
                 overrides.overridetransplant)
-        if name == 'convert':
-            convcmd = getattr(module, 'convcmd')
-            hgsink = getattr(convcmd, 'mercurial_sink')
-            extensions.wrapfunction(hgsink, 'before',
-                                    overrides.mercurialsinkbefore)
-            extensions.wrapfunction(hgsink, 'after',
-                                    overrides.mercurialsinkafter)
--- a/hgext/mq.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/mq.py	Sat Jan 17 18:28:30 2015 -0800
@@ -114,6 +114,12 @@
     '# Node ID ',
     '# Parent  ', # can occur twice for merges - but that is not relevant for mq
     ]
+# The order of headers in plain 'mail style' patches:
+PLAINHEADERS = {
+    'from': 0,
+    'date': 1,
+    'subject': 2,
+    }
 
 def inserthgheader(lines, header, value):
     """Assuming lines contains a HG patch header, add a header line with value.
@@ -156,9 +162,40 @@
     return lines
 
 def insertplainheader(lines, header, value):
-    if lines and lines[0] and ':' not in lines[0]:
-        lines.insert(0, '')
-    lines.insert(0, '%s: %s' % (header, value))
+    """For lines containing a plain patch header, add a header line with value.
+    >>> insertplainheader([], 'Date', 'z')
+    ['Date: z']
+    >>> insertplainheader([''], 'Date', 'z')
+    ['Date: z', '']
+    >>> insertplainheader(['x'], 'Date', 'z')
+    ['Date: z', '', 'x']
+    >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
+    ['From: y', 'Date: z', '', 'x']
+    >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
+    [' date : x', 'From: z', '']
+    >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
+    ['Date: z', '', 'Date: y']
+    >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
+    ['From: y', 'foo: bar', 'DATE: z', '', 'x']
+    """
+    newprio = PLAINHEADERS[header.lower()]
+    bestpos = len(lines)
+    for i, line in enumerate(lines):
+        if ':' in line:
+            lheader = line.split(':', 1)[0].strip().lower()
+            lprio = PLAINHEADERS.get(lheader, newprio + 1)
+            if lprio == newprio:
+                lines[i] = '%s: %s' % (header, value)
+                return lines
+            if lprio > newprio and i < bestpos:
+                bestpos = i
+        else:
+            if line:
+                lines.insert(i, '')
+            if i < bestpos:
+                bestpos = i
+            break
+    lines.insert(bestpos, '%s: %s' % (header, value))
     return lines
 
 class patchheader(object):
@@ -266,38 +303,34 @@
                                    for c in self.comments))
 
     def setuser(self, user):
-        if not self.updateheader(['From: ', '# User '], user):
-            try:
-                inserthgheader(self.comments, '# User ', user)
-            except ValueError:
-                if self.plainmode:
-                    insertplainheader(self.comments, 'From', user)
-                else:
-                    tmp = ['# HG changeset patch', '# User ' + user]
-                    self.comments = tmp + self.comments
+        try:
+            inserthgheader(self.comments, '# User ', user)
+        except ValueError:
+            if self.plainmode:
+                insertplainheader(self.comments, 'From', user)
+            else:
+                tmp = ['# HG changeset patch', '# User ' + user]
+                self.comments = tmp + self.comments
         self.user = user
 
     def setdate(self, date):
-        if not self.updateheader(['Date: ', '# Date '], date):
-            try:
-                inserthgheader(self.comments, '# Date ', date)
-            except ValueError:
-                if self.plainmode:
-                    insertplainheader(self.comments, 'Date', date)
-                else:
-                    tmp = ['# HG changeset patch', '# Date ' + date]
-                    self.comments = tmp + self.comments
+        try:
+            inserthgheader(self.comments, '# Date ', date)
+        except ValueError:
+            if self.plainmode:
+                insertplainheader(self.comments, 'Date', date)
+            else:
+                tmp = ['# HG changeset patch', '# Date ' + date]
+                self.comments = tmp + self.comments
         self.date = date
 
     def setparent(self, parent):
-        if not (self.updateheader(['# Parent  '], parent) or
-                self.updateheader(['# Parent '], parent)):
-            try:
-                inserthgheader(self.comments, '# Parent  ', parent)
-            except ValueError:
-                if not self.plainmode:
-                    tmp = ['# HG changeset patch', '# Parent  ' + parent]
-                    self.comments = tmp + self.comments
+        try:
+            inserthgheader(self.comments, '# Parent  ', parent)
+        except ValueError:
+            if not self.plainmode:
+                tmp = ['# HG changeset patch', '# Parent  ' + parent]
+                self.comments = tmp + self.comments
         self.parent = parent
 
     def setmessage(self, message):
@@ -309,18 +342,6 @@
                 self.comments.append('')
             self.comments.append(message)
 
-    def updateheader(self, prefixes, new):
-        '''Update all references to a field in the patch header.
-        Return whether the field is present.'''
-        res = False
-        for prefix in prefixes:
-            for i in xrange(len(self.comments)):
-                if self.comments[i].startswith(prefix):
-                    self.comments[i] = prefix + new
-                    res = True
-                    break
-        return res
-
     def __str__(self):
         s = '\n'.join(self.comments).rstrip()
         if not s:
@@ -2285,7 +2306,7 @@
     q.savedirty()
     if r:
         if not os.path.exists(r.wjoin('.hgignore')):
-            fp = r.wopener('.hgignore', 'w')
+            fp = r.wvfs('.hgignore', 'w')
             fp.write('^\\.hg\n')
             fp.write('^\\.mq\n')
             fp.write('syntax: glob\n')
@@ -2293,7 +2314,7 @@
             fp.write('guards\n')
             fp.close()
         if not os.path.exists(r.wjoin('series')):
-            r.wopener('series', 'w').close()
+            r.wvfs('series', 'w').close()
         r[None].add(['.hgignore', 'series'])
         commands.add(ui, r)
     return 0
@@ -3194,7 +3215,7 @@
 
     def _noqueues():
         try:
-            fh = repo.opener(_allqueues, 'r')
+            fh = repo.vfs(_allqueues, 'r')
             fh.close()
         except IOError:
             return True
@@ -3205,7 +3226,7 @@
         current = _getcurrent()
 
         try:
-            fh = repo.opener(_allqueues, 'r')
+            fh = repo.vfs(_allqueues, 'r')
             queues = [queue.strip() for queue in fh if queue.strip()]
             fh.close()
             if current not in queues:
@@ -3222,13 +3243,13 @@
         _setactivenocheck(name)
 
     def _setactivenocheck(name):
-        fh = repo.opener(_activequeue, 'w')
+        fh = repo.vfs(_activequeue, 'w')
         if name != 'patches':
             fh.write(name)
         fh.close()
 
     def _addqueue(name):
-        fh = repo.opener(_allqueues, 'a')
+        fh = repo.vfs(_allqueues, 'a')
         fh.write('%s\n' % (name,))
         fh.close()
 
@@ -3253,7 +3274,7 @@
         if name == current:
             raise util.Abort(_('cannot delete currently active queue'))
 
-        fh = repo.opener('patches.queues.new', 'w')
+        fh = repo.vfs('patches.queues.new', 'w')
         for queue in existing:
             if queue == name:
                 continue
@@ -3301,7 +3322,7 @@
             raise util.Abort(_('non-queue directory "%s" already exists') %
                     newdir)
 
-        fh = repo.opener('patches.queues.new', 'w')
+        fh = repo.vfs('patches.queues.new', 'w')
         for queue in existing:
             if queue == current:
                 fh.write('%s\n' % (name,))
--- a/hgext/notify.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/notify.py	Sat Jan 17 18:28:30 2015 -0800
@@ -341,7 +341,8 @@
         maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
         prev = ctx.p1().node()
         ref = ref and ref.node() or ctx.node()
-        chunks = patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui))
+        chunks = patch.diff(self.repo, prev, ref,
+                            opts=patch.diffallopts(self.ui))
         difflines = ''.join(chunks).splitlines()
 
         if self.ui.configbool('notify', 'diffstat', True):
--- a/hgext/patchbomb.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/patchbomb.py	Sat Jan 17 18:28:30 2015 -0800
@@ -43,6 +43,18 @@
 that the patchbomb extension can automatically send patchbombs
 directly from the commandline. See the [email] and [smtp] sections in
 hgrc(5) for details.
+
+You can control the default inclusion of an introduction message with the
+``patchbomb.intro`` configuration option. The configuration is always
+overwritten by command line flags like --intro and --desc::
+
+  [patchbomb]
+  intro=auto   # include introduction message if more than 1 patch (default)
+  intro=never  # never include an introduction message
+  intro=always # always include an introduction message
+
+You can set patchbomb to always ask for confirmation by setting
+``patchbomb.confirm`` to true.
 '''
 
 import os, errno, socket, tempfile, cStringIO
@@ -66,9 +78,23 @@
         prompt += ' [%s]' % default
     return ui.prompt(prompt + rest, default)
 
-def introwanted(opts, number):
+def introwanted(ui, opts, number):
     '''is an introductory message apparently wanted?'''
-    return number > 1 or opts.get('intro') or opts.get('desc')
+    introconfig = ui.config('patchbomb', 'intro', 'auto')
+    if opts.get('intro') or opts.get('desc'):
+        intro = True
+    elif introconfig == 'always':
+        intro = True
+    elif introconfig == 'never':
+        intro = False
+    elif introconfig == 'auto':
+        intro = 1 < number
+    else:
+        ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
+                     % introconfig)
+        ui.write_err(_('(should be one of always, never, auto)\n'))
+        intro = 1 < number
+    return intro
 
 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
               patchname=None):
@@ -153,6 +179,175 @@
     msg['X-Mercurial-Series-Total'] = '%i' % total
     return msg, subj, ds
 
+def _getpatches(repo, revs, **opts):
+    """return a list of patches for a list of revisions
+
+    Each patch in the list is itself a list of lines.
+    """
+    ui = repo.ui
+    prev = repo['.'].rev()
+    for r in scmutil.revrange(repo, revs):
+        if r == prev and (repo[None].files() or repo[None].deleted()):
+            ui.warn(_('warning: working directory has '
+                      'uncommitted changes\n'))
+        output = cStringIO.StringIO()
+        cmdutil.export(repo, [r], fp=output,
+                     opts=patch.difffeatureopts(ui, opts, git=True))
+        yield output.getvalue().split('\n')
+def _getbundle(repo, dest, **opts):
+    """return a bundle containing changesets missing in "dest"
+
+    The `opts` keyword-arguments are the same as the one accepted by the
+    `bundle` command.
+
+    The bundle is a returned as a single in-memory binary blob.
+    """
+    ui = repo.ui
+    tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
+    tmpfn = os.path.join(tmpdir, 'bundle')
+    try:
+        commands.bundle(ui, repo, tmpfn, dest, **opts)
+        fp = open(tmpfn, 'rb')
+        data = fp.read()
+        fp.close()
+        return data
+    finally:
+        try:
+            os.unlink(tmpfn)
+        except OSError:
+            pass
+        os.rmdir(tmpdir)
+
+def _getdescription(repo, defaultbody, sender, **opts):
+    """obtain the body of the introduction message and return it
+
+    This is also used for the body of email with an attached bundle.
+
+    The body can be obtained either from the command line option or entered by
+    the user through the editor.
+    """
+    ui = repo.ui
+    if opts.get('desc'):
+        body = open(opts.get('desc')).read()
+    else:
+        ui.write(_('\nWrite the introductory message for the '
+                   'patch series.\n\n'))
+        body = ui.edit(defaultbody, sender)
+        # Save series description in case sendmail fails
+        msgfile = repo.vfs('last-email.txt', 'wb')
+        msgfile.write(body)
+        msgfile.close()
+    return body
+
+def _getbundlemsgs(repo, sender, bundle, **opts):
+    """Get the full email for sending a given bundle
+
+    This function returns a list of "email" tuples (subject, content, None).
+    The list is always one message long in that case.
+    """
+    ui = repo.ui
+    _charsets = mail._charsets(ui)
+    subj = (opts.get('subject')
+            or prompt(ui, 'Subject:', 'A bundle for your repository'))
+
+    body = _getdescription(repo, '', sender, **opts)
+    msg = email.MIMEMultipart.MIMEMultipart()
+    if body:
+        msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
+    datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
+    datapart.set_payload(bundle)
+    bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
+    datapart.add_header('Content-Disposition', 'attachment',
+                        filename=bundlename)
+    email.Encoders.encode_base64(datapart)
+    msg.attach(datapart)
+    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
+    return [(msg, subj, None)]
+
+def _makeintro(repo, sender, patches, **opts):
+    """make an introduction email, asking the user for content if needed
+
+    email is returned as (subject, body, cumulative-diffstat)"""
+    ui = repo.ui
+    _charsets = mail._charsets(ui)
+    tlen = len(str(len(patches)))
+
+    flag = opts.get('flag') or ''
+    if flag:
+        flag = ' ' + ' '.join(flag)
+    prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
+
+    subj = (opts.get('subject') or
+            prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
+    if not subj:
+        return None         # skip intro if the user doesn't bother
+
+    subj = prefix + ' ' + subj
+
+    body = ''
+    if opts.get('diffstat'):
+        # generate a cumulative diffstat of the whole patch series
+        diffstat = patch.diffstat(sum(patches, []))
+        body = '\n' + diffstat
+    else:
+        diffstat = None
+
+    body = _getdescription(repo, body, sender, **opts)
+    msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
+    msg['Subject'] = mail.headencode(ui, subj, _charsets,
+                                     opts.get('test'))
+    return (msg, subj, diffstat)
+
+def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
+    """return a list of emails from a list of patches
+
+    This involves introduction message creation if necessary.
+
+    This function returns a list of "email" tuples (subject, content, None).
+    """
+    ui = repo.ui
+    _charsets = mail._charsets(ui)
+    msgs = []
+
+    ui.write(_('this patch series consists of %d patches.\n\n')
+             % len(patches))
+
+    # build the intro message, or skip it if the user declines
+    if introwanted(ui, opts, len(patches)):
+        msg = _makeintro(repo, sender, patches, **opts)
+        if msg:
+            msgs.append(msg)
+
+    # are we going to send more than one message?
+    numbered = len(msgs) + len(patches) > 1
+
+    # now generate the actual patch messages
+    name = None
+    for i, p in enumerate(patches):
+        if patchnames:
+            name = patchnames[i]
+        msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
+                        len(patches), numbered, name)
+        msgs.append(msg)
+
+    return msgs
+
+def _getoutgoing(repo, dest, revs):
+    '''Return the revisions present locally but not in dest'''
+    ui = repo.ui
+    url = ui.expandpath(dest or 'default-push', dest or 'default')
+    url = hg.parseurl(url)[0]
+    ui.status(_('comparing with %s\n') % util.hidepassword(url))
+
+    revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
+    if not revs:
+        revs = [len(repo) - 1]
+    revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
+    if not revs:
+        ui.status(_("no changes found\n"))
+        return []
+    return [str(r) for r in revs]
+
 emailopts = [
     ('', 'body', None, _('send patches as inline message text (default)')),
     ('a', 'attach', None, _('send patches as attachments')),
@@ -240,6 +435,9 @@
     In case email sending fails, you will find a backup of your series
     introductory message in ``.hg/last-email.txt``.
 
+    The default behavior of this command can be customized through
+    configuration. (See :hg:`help patchbomb` for details)
+
     Examples::
 
       hg email -r 3000          # send patch 3000 only
@@ -277,48 +475,6 @@
     # internal option used by pbranches
     patches = opts.get('patches')
 
-    def getoutgoing(dest, revs):
-        '''Return the revisions present locally but not in dest'''
-        url = ui.expandpath(dest or 'default-push', dest or 'default')
-        url = hg.parseurl(url)[0]
-        ui.status(_('comparing with %s\n') % util.hidepassword(url))
-
-        revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
-        if not revs:
-            revs = [len(repo) - 1]
-        revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
-        if not revs:
-            ui.status(_("no changes found\n"))
-            return []
-        return [str(r) for r in revs]
-
-    def getpatches(revs):
-        prev = repo['.'].rev()
-        for r in scmutil.revrange(repo, revs):
-            if r == prev and (repo[None].files() or repo[None].deleted()):
-                ui.warn(_('warning: working directory has '
-                          'uncommitted changes\n'))
-            output = cStringIO.StringIO()
-            cmdutil.export(repo, [r], fp=output,
-                         opts=patch.diffopts(ui, opts))
-            yield output.getvalue().split('\n')
-
-    def getbundle(dest):
-        tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
-        tmpfn = os.path.join(tmpdir, 'bundle')
-        try:
-            commands.bundle(ui, repo, tmpfn, dest, **opts)
-            fp = open(tmpfn, 'rb')
-            data = fp.read()
-            fp.close()
-            return data
-        finally:
-            try:
-                os.unlink(tmpfn)
-            except OSError:
-                pass
-            os.rmdir(tmpdir)
-
     if not (opts.get('test') or mbox):
         # really sending
         mail.validateconfig(ui)
@@ -342,7 +498,7 @@
         revs = rev
 
     if outgoing:
-        revs = getoutgoing(dest, rev)
+        revs = _getoutgoing(repo, dest, rev)
     if bundle:
         opts['revs'] = revs
 
@@ -355,102 +511,21 @@
     def genmsgid(id):
         return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
 
-    def getdescription(body, sender):
-        if opts.get('desc'):
-            body = open(opts.get('desc')).read()
-        else:
-            ui.write(_('\nWrite the introductory message for the '
-                       'patch series.\n\n'))
-            body = ui.edit(body, sender)
-            # Save series description in case sendmail fails
-            msgfile = repo.opener('last-email.txt', 'wb')
-            msgfile.write(body)
-            msgfile.close()
-        return body
-
-    def getpatchmsgs(patches, patchnames=None):
-        msgs = []
-
-        ui.write(_('this patch series consists of %d patches.\n\n')
-                 % len(patches))
-
-        # build the intro message, or skip it if the user declines
-        if introwanted(opts, len(patches)):
-            msg = makeintro(patches)
-            if msg:
-                msgs.append(msg)
-
-        # are we going to send more than one message?
-        numbered = len(msgs) + len(patches) > 1
-
-        # now generate the actual patch messages
-        name = None
-        for i, p in enumerate(patches):
-            if patchnames:
-                name = patchnames[i]
-            msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
-                            len(patches), numbered, name)
-            msgs.append(msg)
-
-        return msgs
-
-    def makeintro(patches):
-        tlen = len(str(len(patches)))
-
-        flag = opts.get('flag') or ''
-        if flag:
-            flag = ' ' + ' '.join(flag)
-        prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
-
-        subj = (opts.get('subject') or
-                prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
-        if not subj:
-            return None         # skip intro if the user doesn't bother
-
-        subj = prefix + ' ' + subj
-
-        body = ''
-        if opts.get('diffstat'):
-            # generate a cumulative diffstat of the whole patch series
-            diffstat = patch.diffstat(sum(patches, []))
-            body = '\n' + diffstat
-        else:
-            diffstat = None
-
-        body = getdescription(body, sender)
-        msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
-        msg['Subject'] = mail.headencode(ui, subj, _charsets,
-                                         opts.get('test'))
-        return (msg, subj, diffstat)
-
-    def getbundlemsgs(bundle):
-        subj = (opts.get('subject')
-                or prompt(ui, 'Subject:', 'A bundle for your repository'))
-
-        body = getdescription('', sender)
-        msg = email.MIMEMultipart.MIMEMultipart()
-        if body:
-            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
-        datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
-        datapart.set_payload(bundle)
-        bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
-        datapart.add_header('Content-Disposition', 'attachment',
-                            filename=bundlename)
-        email.Encoders.encode_base64(datapart)
-        msg.attach(datapart)
-        msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
-        return [(msg, subj, None)]
-
     sender = (opts.get('from') or ui.config('email', 'from') or
               ui.config('patchbomb', 'from') or
               prompt(ui, 'From', ui.username()))
 
     if patches:
-        msgs = getpatchmsgs(patches, opts.get('patchnames'))
+        msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
+                             **opts)
     elif bundle:
-        msgs = getbundlemsgs(getbundle(dest))
+        bundledata = _getbundle(repo, dest, **opts)
+        bundleopts = opts.copy()
+        bundleopts.pop('bundle', None)  # already processed
+        msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
     else:
-        msgs = getpatchmsgs(list(getpatches(revs)))
+        _patches = list(_getpatches(repo, revs, **opts))
+        msgs = _getpatchmsgs(repo, sender, _patches, **opts)
 
     showaddrs = []
 
@@ -482,15 +557,18 @@
     bcc = getaddrs('Bcc') or []
     replyto = getaddrs('Reply-To')
 
-    if opts.get('diffstat') or opts.get('confirm'):
-        ui.write(_('\nFinal summary:\n\n'))
-        ui.write(('From: %s\n' % sender))
+    confirm = ui.configbool('patchbomb', 'confirm')
+    confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
+
+    if confirm:
+        ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
+        ui.write(('From: %s\n' % sender), label='patchbomb.from')
         for addr in showaddrs:
-            ui.write('%s\n' % addr)
+            ui.write('%s\n' % addr, label='patchbomb.to')
         for m, subj, ds in msgs:
-            ui.write(('Subject: %s\n' % subj))
+            ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
             if ds:
-                ui.write(ds)
+                ui.write(ds, label='patchbomb.diffstats')
         ui.write('\n')
         if ui.promptchoice(_('are you sure you want to send (yn)?'
                              '$$ &Yes $$ &No')):
--- a/hgext/progress.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/progress.py	Sat Jan 17 18:28:30 2015 -0800
@@ -37,6 +37,7 @@
 
 import sys
 import time
+import threading
 
 from mercurial.i18n import _
 testedwith = 'internal'
@@ -90,6 +91,7 @@
 class progbar(object):
     def __init__(self, ui):
         self.ui = ui
+        self._refreshlock = threading.Lock()
         self.resetstate()
 
     def resetstate(self):
@@ -100,6 +102,7 @@
         self.printed = False
         self.lastprint = time.time() + float(self.ui.config(
             'progress', 'delay', default=3))
+        self.curtopic = None
         self.lasttopic = None
         self.indetcount = 0
         self.refresh = float(self.ui.config(
@@ -227,41 +230,53 @@
             return _('%d %s/sec') % (delta / elapsed, unit)
         return ''
 
+    def _oktoprint(self, now):
+        '''Check if conditions are met to print - e.g. changedelay elapsed'''
+        if (self.lasttopic is None # first time we printed
+            # not a topic change
+            or self.curtopic == self.lasttopic
+            # it's been long enough we should print anyway
+            or now - self.lastprint >= self.changedelay):
+            return True
+        else:
+            return False
+
     def progress(self, topic, pos, item='', unit='', total=None):
         now = time.time()
-        if pos is None:
-            self.starttimes.pop(topic, None)
-            self.startvals.pop(topic, None)
-            self.topicstates.pop(topic, None)
-            # reset the progress bar if this is the outermost topic
-            if self.topics and self.topics[0] == topic and self.printed:
-                self.complete()
-                self.resetstate()
-            # truncate the list of topics assuming all topics within
-            # this one are also closed
-            if topic in self.topics:
-                self.topics = self.topics[:self.topics.index(topic)]
-                # reset the last topic to the one we just unwound to,
-                # so that higher-level topics will be stickier than
-                # lower-level topics
-                if self.topics:
-                    self.lasttopic = self.topics[-1]
-                else:
-                    self.lasttopic = None
-        else:
-            if topic not in self.topics:
-                self.starttimes[topic] = now
-                self.startvals[topic] = pos
-                self.topics.append(topic)
-            self.topicstates[topic] = pos, item, unit, total
-            if now - self.lastprint >= self.refresh and self.topics:
-                if (self.lasttopic is None # first time we printed
-                    # not a topic change
-                    or topic == self.lasttopic
-                    # it's been long enough we should print anyway
-                    or now - self.lastprint >= self.changedelay):
-                    self.lastprint = now
-                    self.show(now, topic, *self.topicstates[topic])
+        self._refreshlock.acquire()
+        try:
+            if pos is None:
+                self.starttimes.pop(topic, None)
+                self.startvals.pop(topic, None)
+                self.topicstates.pop(topic, None)
+                # reset the progress bar if this is the outermost topic
+                if self.topics and self.topics[0] == topic and self.printed:
+                    self.complete()
+                    self.resetstate()
+                # truncate the list of topics assuming all topics within
+                # this one are also closed
+                if topic in self.topics:
+                    self.topics = self.topics[:self.topics.index(topic)]
+                    # reset the last topic to the one we just unwound to,
+                    # so that higher-level topics will be stickier than
+                    # lower-level topics
+                    if self.topics:
+                        self.lasttopic = self.topics[-1]
+                    else:
+                        self.lasttopic = None
+            else:
+                if topic not in self.topics:
+                    self.starttimes[topic] = now
+                    self.startvals[topic] = pos
+                    self.topics.append(topic)
+                self.topicstates[topic] = pos, item, unit, total
+                self.curtopic = topic
+                if now - self.lastprint >= self.refresh and self.topics:
+                    if self._oktoprint(now):
+                        self.lastprint = now
+                        self.show(now, topic, *self.topicstates[topic])
+        finally:
+            self._refreshlock.release()
 
 _singleton = None
 
--- a/hgext/rebase.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/rebase.py	Sat Jan 17 18:28:30 2015 -0800
@@ -18,11 +18,12 @@
 from mercurial import extensions, patch, scmutil, phases, obsolete, error
 from mercurial import copies
 from mercurial.commands import templateopts
-from mercurial.node import nullrev
+from mercurial.node import nullrev, nullid, hex, short
 from mercurial.lock import release
 from mercurial.i18n import _
 import os, errno
 
+revtodo = -1
 nullmerge = -2
 revignored = -3
 
@@ -197,7 +198,7 @@
 
         if opts.get('interactive'):
             msg = _("interactive history editing is supported by the "
-                    "'histedit' extension (see 'hg help histedit')")
+                    "'histedit' extension (see \"hg help histedit\")")
             raise util.Abort(msg)
 
         if collapsemsg and not collapsef:
@@ -282,7 +283,7 @@
 
                 if not rebaseset:
                     # transform to list because smartsets are not comparable to
-                    # lists. This should be improved to honor lazyness of
+                    # lists. This should be improved to honor laziness of
                     # smartset.
                     if list(base) == [dest.rev()]:
                         if basef:
@@ -326,7 +327,7 @@
             if not keepf and not repo[root].mutable():
                 raise util.Abort(_("can't rebase immutable changeset %s")
                                  % repo[root],
-                                 hint=_('see hg help phases for details'))
+                                 hint=_('see "hg help phases" for details'))
 
             originalwd, target, state = result
             if collapsef:
@@ -366,12 +367,19 @@
         total = len(sortedstate)
         pos = 0
         for rev in sortedstate:
+            ctx = repo[rev]
+            desc = '%d:%s "%s"' % (ctx.rev(), ctx,
+                                   ctx.description().split('\n', 1)[0])
+            names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
+            if names:
+                desc += ' (%s)' % ' '.join(names)
             pos += 1
-            if state[rev] == -1:
-                ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, repo[rev])),
+            if state[rev] == revtodo:
+                ui.status(_('rebasing %s\n') % desc)
+                ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
                             _('changesets'), total)
-                p1, p2 = defineparents(repo, rev, target, state,
-                                                        targetancestors)
+                p1, p2, base = defineparents(repo, rev, target, state,
+                                             targetancestors)
                 storestatus(repo, originalwd, target, state, collapsef, keepf,
                             keepbranchesf, external, activebookmark)
                 if len(repo.parents()) == 2:
@@ -380,8 +388,8 @@
                     try:
                         ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
                                      'rebase')
-                        stats = rebasenode(repo, rev, p1, state, collapsef,
-                                           target)
+                        stats = rebasenode(repo, rev, p1, base, state,
+                                           collapsef, target)
                         if stats and stats[3] > 0:
                             raise error.InterventionRequired(
                                 _('unresolved conflicts (see hg '
@@ -389,33 +397,42 @@
                     finally:
                         ui.setconfig('ui', 'forcemerge', '', 'rebase')
                 if not collapsef:
-                    merging = repo[p2].rev() != nullrev
+                    merging = p2 != nullrev
                     editform = cmdutil.mergeeditform(merging, 'rebase')
                     editor = cmdutil.getcommiteditor(editform=editform, **opts)
-                    newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn,
-                                          editor=editor)
+                    newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
+                                           editor=editor)
                 else:
                     # Skip commit if we are collapsing
                     repo.dirstate.beginparentchange()
                     repo.setparents(repo[p1].node())
                     repo.dirstate.endparentchange()
-                    newrev = None
+                    newnode = None
                 # Update the state
-                if newrev is not None:
-                    state[rev] = repo[newrev].rev()
+                if newnode is not None:
+                    state[rev] = repo[newnode].rev()
+                    ui.debug('rebased as %s\n' % short(newnode))
                 else:
+                    ui.warn(_('note: rebase of %d:%s created no changes '
+                              'to commit\n') % (rev, ctx))
                     if not collapsef:
-                        ui.note(_('no changes, revision %d skipped\n') % rev)
-                        ui.debug('next revision set to %s\n' % p1)
                         skipped.add(rev)
                     state[rev] = p1
+                    ui.debug('next revision set to %s\n' % p1)
+            elif state[rev] == nullmerge:
+                ui.debug('ignoring null merge rebase of %s\n' % rev)
+            elif state[rev] == revignored:
+                ui.status(_('not rebasing ignored %s\n') % desc)
+            else:
+                ui.status(_('already rebased %s as %s\n') %
+                          (desc, repo[state[rev]]))
 
         ui.progress(_('rebasing'), None)
         ui.note(_('rebase merging completed\n'))
 
         if collapsef and not keepopen:
-            p1, p2 = defineparents(repo, min(state), target,
-                                                        state, targetancestors)
+            p1, p2, _base = defineparents(repo, min(state), target,
+                                          state, targetancestors)
             editopt = opts.get('edit')
             editform = 'rebase.collapse'
             if collapsemsg:
@@ -427,8 +444,12 @@
                         commitmsg += '\n* %s' % repo[rebased].description()
                 editopt = True
             editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
-            newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
-                                  extrafn=extrafn, editor=editor)
+            newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
+                                   extrafn=extrafn, editor=editor)
+            if newnode is None:
+                newrev = target
+            else:
+                newrev = repo[newnode].rev()
             for oldrev in state.iterkeys():
                 if state[oldrev] > nullmerge:
                     state[oldrev] = newrev
@@ -459,7 +480,7 @@
         if not keepf:
             collapsedas = None
             if collapsef:
-                collapsedas = newrev
+                collapsedas = newnode
             clearrebased(ui, repo, state, skipped, collapsedas)
 
         if currentbookmarks:
@@ -505,7 +526,9 @@
                       ', '.join(str(p) for p in sorted(parents))))
 
 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
-    'Commit the changes and store useful information in extra'
+    '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
+    but also store useful information in extra.
+    Return node of committed revision.'''
     try:
         repo.dirstate.beginparentchange()
         repo.setparents(repo[p1].node(), repo[p2].node())
@@ -522,73 +545,31 @@
             targetphase = max(ctx.phase(), phases.draft)
             repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
             # Commit might fail if unresolved files exist
-            newrev = repo.commit(text=commitmsg, user=ctx.user(),
-                                 date=ctx.date(), extra=extra, editor=editor)
+            newnode = repo.commit(text=commitmsg, user=ctx.user(),
+                                  date=ctx.date(), extra=extra, editor=editor)
         finally:
             repo.ui.restoreconfig(backup)
 
-        repo.dirstate.setbranch(repo[newrev].branch())
-        return newrev
+        repo.dirstate.setbranch(repo[newnode].branch())
+        return newnode
     except util.Abort:
         # Invalidate the previous setparents
         repo.dirstate.invalidate()
         raise
 
-def rebasenode(repo, rev, p1, state, collapse, target):
-    'Rebase a single revision'
+def rebasenode(repo, rev, p1, base, state, collapse, target):
+    'Rebase a single revision rev on top of p1 using base as merge ancestor'
     # Merge phase
     # Update to target and merge it with local
-    if repo['.'].rev() != repo[p1].rev():
-        repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
+    if repo['.'].rev() != p1:
+        repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
         merge.update(repo, p1, False, True, False)
     else:
         repo.ui.debug(" already in target\n")
     repo.dirstate.write()
-    repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
-    if repo[rev].rev() == repo[min(state)].rev():
-        # Case (1) initial changeset of a non-detaching rebase.
-        # Let the merge mechanism find the base itself.
-        base = None
-    elif not repo[rev].p2():
-        # Case (2) detaching the node with a single parent, use this parent
-        base = repo[rev].p1().node()
-    else:
-        # In case of merge, we need to pick the right parent as merge base.
-        #
-        # Imagine we have:
-        # - M: currently rebase revision in this step
-        # - A: one parent of M
-        # - B: second parent of M
-        # - D: destination of this merge step (p1 var)
-        #
-        # If we are rebasing on D, D is the successors of A or B. The right
-        # merge base is the one D succeed to. We pretend it is B for the rest
-        # of this comment
-        #
-        # If we pick B as the base, the merge involves:
-        # - changes from B to M (actual changeset payload)
-        # - changes from B to D (induced by rebase) as D is a rebased
-        #   version of B)
-        # Which exactly represent the rebase operation.
-        #
-        # If we pick the A as the base, the merge involves
-        # - changes from A to M (actual changeset payload)
-        # - changes from A to D (with include changes between unrelated A and B
-        #   plus changes induced by rebase)
-        # Which does not represent anything sensible and creates a lot of
-        # conflicts.
-        for p in repo[rev].parents():
-            if state.get(p.rev()) == repo[p1].rev():
-                base = p.node()
-                break
-        else: # fallback when base not found
-            base = None
-
-            # Raise because this function is called wrong (see issue 4106)
-            raise AssertionError('no base found to rebase on '
-                                 '(rebasenode called wrong)')
+    repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
     if base is not None:
-        repo.ui.debug("   detach base %d:%s\n" % (repo[base].rev(), repo[base]))
+        repo.ui.debug("   detach base %d:%s\n" % (base, repo[base]))
     # When collapsing in-place, the parent is the common ancestor, we
     # have to allow merging with it.
     stats = merge.update(repo, rev, True, True, False, base, collapse,
@@ -655,7 +636,60 @@
             p2 = p2n
     repo.ui.debug(" future parents are %d and %d\n" %
                             (repo[p1].rev(), repo[p2].rev()))
-    return p1, p2
+
+    if rev == min(state):
+        # Case (1) initial changeset of a non-detaching rebase.
+        # Let the merge mechanism find the base itself.
+        base = None
+    elif not repo[rev].p2():
+        # Case (2) detaching the node with a single parent, use this parent
+        base = repo[rev].p1().rev()
+    else:
+        # Assuming there is a p1, this is the case where there also is a p2.
+        # We are thus rebasing a merge and need to pick the right merge base.
+        #
+        # Imagine we have:
+        # - M: current rebase revision in this step
+        # - A: one parent of M
+        # - B: other parent of M
+        # - D: destination of this merge step (p1 var)
+        #
+        # Consider the case where D is a descendant of A or B and the other is
+        # 'outside'. In this case, the right merge base is the D ancestor.
+        #
+        # An informal proof, assuming A is 'outside' and B is the D ancestor:
+        #
+        # If we pick B as the base, the merge involves:
+        # - changes from B to M (actual changeset payload)
+        # - changes from B to D (induced by rebase) as D is a rebased
+        #   version of B)
+        # Which exactly represent the rebase operation.
+        #
+        # If we pick A as the base, the merge involves:
+        # - changes from A to M (actual changeset payload)
+        # - changes from A to D (with include changes between unrelated A and B
+        #   plus changes induced by rebase)
+        # Which does not represent anything sensible and creates a lot of
+        # conflicts. A is thus not the right choice - B is.
+        #
+        # Note: The base found in this 'proof' is only correct in the specified
+        # case. This base does not make sense if is not D a descendant of A or B
+        # or if the other is not parent 'outside' (especially not if the other
+        # parent has been rebased). The current implementation does not
+        # make it feasible to consider different cases separately. In these
+        # other cases we currently just leave it to the user to correctly
+        # resolve an impossible merge using a wrong ancestor.
+        for p in repo[rev].parents():
+            if state.get(p.rev()) == p1:
+                base = p.rev()
+                break
+        else: # fallback when base not found
+            base = None
+
+            # Raise because this function is called wrong (see issue 4106)
+            raise AssertionError('no base found to rebase on '
+                                 '(defineparents called wrong)')
+    return p1, p2, base
 
 def isagitpatch(repo, patchname):
     'Return true if the given patch is in git format'
@@ -689,7 +723,8 @@
         for rev in sorted(mqrebase, reverse=True):
             if rev not in skipped:
                 name, isgit = mqrebase[rev]
-                repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name))
+                repo.ui.note(_('updating mq patch %s to %s:%s\n') %
+                             (name, state[rev], repo[state[rev]]))
                 mq.qimport(repo, (), patchname=name, git=isgit,
                                 rev=[str(state[rev])])
             else:
@@ -719,7 +754,7 @@
 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
                 external, activebookmark):
     'Store the current status to allow recovery'
-    f = repo.opener("rebasestate", "w")
+    f = repo.vfs("rebasestate", "w")
     f.write(repo[originalwd].hex() + '\n')
     f.write(repo[target].hex() + '\n')
     f.write(repo[external].hex() + '\n')
@@ -729,8 +764,12 @@
     f.write('%s\n' % (activebookmark or ''))
     for d, v in state.iteritems():
         oldrev = repo[d].hex()
-        if v > nullmerge:
+        if v >= 0:
             newrev = repo[v].hex()
+        elif v == revtodo:
+            # To maintain format compatibility, we have to use nullid.
+            # Please do remove this special case when upgrading the format.
+            newrev = hex(nullid)
         else:
             newrev = v
         f.write("%s:%s\n" % (oldrev, newrev))
@@ -750,7 +789,7 @@
         external = nullrev
         activebookmark = None
         state = {}
-        f = repo.opener("rebasestate")
+        f = repo.vfs("rebasestate")
         for i, l in enumerate(f.read().splitlines()):
             if i == 0:
                 originalwd = repo[l].rev()
@@ -772,6 +811,9 @@
                 oldrev, newrev = l.split(':')
                 if newrev in (str(nullmerge), str(revignored)):
                     state[repo[oldrev].rev()] = int(newrev)
+                elif newrev == nullid:
+                    state[repo[oldrev].rev()] = revtodo
+                    # Legacy compat special case
                 else:
                     state[repo[oldrev].rev()] = repo[newrev].rev()
 
@@ -783,7 +825,7 @@
         if not collapse:
             seen = set([target])
             for old, new in sorted(state.items()):
-                if new != nullrev and new in seen:
+                if new != revtodo and new in seen:
                     skipped.add(old)
                 seen.add(new)
         repo.ui.debug('computed skipped revs: %s\n' %
@@ -810,13 +852,13 @@
 
 def abort(repo, originalwd, target, state):
     'Restore the repository to its original state'
-    dstates = [s for s in state.values() if s > nullrev]
+    dstates = [s for s in state.values() if s >= 0]
     immutable = [d for d in dstates if not repo[d].mutable()]
     cleanup = True
     if immutable:
         repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
                      % ', '.join(str(repo[r]) for r in immutable),
-                     hint=_('see hg help phases for details'))
+                     hint=_('see "hg help phases" for details'))
         cleanup = False
 
     descendants = set()
@@ -830,10 +872,10 @@
     if cleanup:
         # Update away from the rebase if necessary
         if inrebase(repo, originalwd, state):
-            merge.update(repo, repo[originalwd].rev(), False, True, False)
+            merge.update(repo, originalwd, False, True, False)
 
         # Strip from the first rebased revision
-        rebased = filter(lambda x: x > -1 and x != target, state.values())
+        rebased = filter(lambda x: x >= 0 and x != target, state.values())
         if rebased:
             strippoints = [c.node()  for c in repo.set('roots(%ld)', rebased)]
             # no backup of rebased cset versions needed
@@ -875,7 +917,7 @@
                 return None
 
         repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
-        state.update(dict.fromkeys(rebaseset, nullrev))
+        state.update(dict.fromkeys(rebaseset, revtodo))
         # Rebase tries to turn <dest> into a parent of <root> while
         # preserving the number of parents of rebased changesets:
         #
@@ -1012,7 +1054,7 @@
         msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
         ui.write(msg)
         return
-    numrebased = len([i for i in state.itervalues() if i != -1])
+    numrebased = len([i for i in state.itervalues() if i >= 0])
     # i18n: column positioning for "hg summary"
     ui.write(_('rebase: %s, %s (rebase --continue)\n') %
              (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
--- a/hgext/record.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/record.py	Sat Jan 17 18:28:30 2015 -0800
@@ -328,10 +328,9 @@
                     f.close()
                     # Start the editor and wait for it to complete
                     editor = ui.geteditor()
-                    util.system("%s \"%s\"" % (editor, patchfn),
-                            environ={'HGUSER': ui.username()},
-                            onerr=util.Abort, errprefix=_("edit failed"),
-                            out=ui.fout)
+                    ui.system("%s \"%s\"" % (editor, patchfn),
+                              environ={'HGUSER': ui.username()},
+                              onerr=util.Abort, errprefix=_("edit failed"))
                     # Remove comment lines
                     patchfp = open(patchfn)
                     ncpatchfp = cStringIO.StringIO()
@@ -520,10 +519,9 @@
                                '(use "hg commit" instead)'))
 
         status = repo.status(match=match)
-        diffopts = opts.copy()
-        diffopts['nodates'] = True
-        diffopts['git'] = True
-        diffopts = patch.diffopts(ui, opts=diffopts)
+        diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
+        diffopts.nodates = True
+        diffopts.git = True
         chunks = patch.diff(repo, changes=status, opts=diffopts)
         fp = cStringIO.StringIO()
         fp.write(''.join(chunks))
--- a/hgext/share.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/share.py	Sat Jan 17 18:28:30 2015 -0800
@@ -6,21 +6,24 @@
 '''share a common history between several working directories'''
 
 from mercurial.i18n import _
-from mercurial import cmdutil, hg, util
+from mercurial import cmdutil, hg, util, extensions, bookmarks
+from mercurial.hg import repository, parseurl
+import errno
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
 testedwith = 'internal'
 
 @command('share',
-    [('U', 'noupdate', None, _('do not create a working copy'))],
-    _('[-U] SOURCE [DEST]'),
+    [('U', 'noupdate', None, _('do not create a working copy')),
+     ('B', 'bookmarks', None, _('also share bookmarks'))],
+    _('[-U] [-B] SOURCE [DEST]'),
     norepo=True)
-def share(ui, source, dest=None, noupdate=False):
+def share(ui, source, dest=None, noupdate=False, bookmarks=False):
     """create a new shared repository
 
     Initialize a new repository and working directory that shares its
-    history with another repository.
+    history (and optionally bookmarks) with another repository.
 
     .. note::
 
@@ -34,7 +37,7 @@
        the broken clone to reset it to a changeset that still exists.
     """
 
-    return hg.share(ui, source, dest, not noupdate)
+    return hg.share(ui, source, dest, not noupdate, bookmarks)
 
 @command('unshare', [], '')
 def unshare(ui, repo):
@@ -43,7 +46,7 @@
     Copy the store data to the repo and remove the sharedpath data.
     """
 
-    if repo.sharedpath == repo.path:
+    if not repo.shared():
         raise util.Abort(_("this is not a shared repo"))
 
     destlock = lock = None
@@ -67,3 +70,56 @@
 
     # update store, spath, sopener and sjoin of repo
     repo.unfiltered().__init__(repo.baseui, repo.root)
+
+def extsetup(ui):
+    extensions.wrapfunction(bookmarks.bmstore, 'getbkfile', getbkfile)
+    extensions.wrapfunction(bookmarks.bmstore, 'recordchange', recordchange)
+    extensions.wrapfunction(bookmarks.bmstore, 'write', write)
+
+def _hassharedbookmarks(repo):
+    """Returns whether this repo has shared bookmarks"""
+    try:
+        shared = repo.vfs.read('shared').splitlines()
+    except IOError, inst:
+        if inst.errno != errno.ENOENT:
+            raise
+        return False
+    return 'bookmarks' in shared
+
+def _getsrcrepo(repo):
+    """
+    Returns the source repository object for a given shared repository.
+    If repo is not a shared repository, return None.
+    """
+    if repo.sharedpath == repo.path:
+        return None
+
+    # the sharedpath always ends in the .hg; we want the path to the repo
+    source = repo.vfs.split(repo.sharedpath)[0]
+    srcurl, branches = parseurl(source)
+    return repository(repo.ui, srcurl)
+
+def getbkfile(orig, self, repo):
+    if _hassharedbookmarks(repo):
+        srcrepo = _getsrcrepo(repo)
+        if srcrepo is not None:
+            repo = srcrepo
+    return orig(self, repo)
+
+def recordchange(orig, self, tr):
+    # Continue with write to local bookmarks file as usual
+    orig(self, tr)
+
+    if _hassharedbookmarks(self._repo):
+        srcrepo = _getsrcrepo(self._repo)
+        if srcrepo is not None:
+            category = 'share-bookmarks'
+            tr.addpostclose(category, lambda tr: self._writerepo(srcrepo))
+
+def write(orig, self):
+    # First write local bookmarks file in case we ever unshare
+    orig(self)
+    if _hassharedbookmarks(self._repo):
+        srcrepo = _getsrcrepo(self._repo)
+        if srcrepo is not None:
+            self._writerepo(srcrepo)
--- a/hgext/shelve.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/shelve.py	Sat Jan 17 18:28:30 2015 -0800
@@ -43,6 +43,7 @@
         self.repo = repo
         self.name = name
         self.vfs = scmutil.vfs(repo.join('shelved'))
+        self.ui = self.repo.ui
         if filetype:
             self.fname = name + '.' + filetype
         else:
@@ -82,7 +83,7 @@
         return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
                                            self.vfs.join(self.fname))
     def writebundle(self, cg):
-        changegroup.writebundle(cg, self.fname, 'HG10UN', self.vfs)
+        changegroup.writebundle(self.ui, cg, self.fname, 'HG10UN', self.vfs)
 
 class shelvedstate(object):
     """Handle persistence during unshelving operations.
@@ -95,7 +96,7 @@
 
     @classmethod
     def load(cls, repo):
-        fp = repo.opener(cls._filename)
+        fp = repo.vfs(cls._filename)
         try:
             version = int(fp.readline().strip())
 
@@ -121,7 +122,7 @@
 
     @classmethod
     def save(cls, repo, name, originalwctx, pendingctx, stripnodes):
-        fp = repo.opener(cls._filename, 'wb')
+        fp = repo.vfs(cls._filename, 'wb')
         fp.write('%i\n' % cls._version)
         fp.write('%s\n' % name)
         fp.write('%s\n' % hex(originalwctx.node()))
--- a/hgext/strip.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/strip.py	Sat Jan 17 18:28:30 2015 -0800
@@ -1,4 +1,4 @@
-"""strip changesets and their descendents from history
+"""strip changesets and their descendants from history
 
 This extension allows you to strip changesets and all their descendants from the
 repository. See the command help for details.
--- a/hgext/transplant.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/hgext/transplant.py	Sat Jan 17 18:28:30 2015 -0800
@@ -118,7 +118,7 @@
         revs = sorted(revmap)
         p1, p2 = repo.dirstate.parents()
         pulls = []
-        diffopts = patch.diffopts(self.ui, opts)
+        diffopts = patch.difffeatureopts(self.ui, opts)
         diffopts.git = True
 
         lock = wlock = tr = None
@@ -233,13 +233,12 @@
         fp.close()
 
         try:
-            util.system('%s %s %s' % (filter, util.shellquote(headerfile),
-                                   util.shellquote(patchfile)),
-                        environ={'HGUSER': changelog[1],
-                                 'HGREVISION': revlog.hex(node),
-                                 },
-                        onerr=util.Abort, errprefix=_('filter failed'),
-                        out=self.ui.fout)
+            self.ui.system('%s %s %s' % (filter, util.shellquote(headerfile),
+                                         util.shellquote(patchfile)),
+                           environ={'HGUSER': changelog[1],
+                                    'HGREVISION': revlog.hex(node),
+                                    },
+                           onerr=util.Abort, errprefix=_('filter failed'))
             user, date, msg = self.parselog(file(headerfile))[1:4]
         finally:
             os.unlink(headerfile)
@@ -302,8 +301,12 @@
         '''recover last transaction and apply remaining changesets'''
         if os.path.exists(os.path.join(self.path, 'journal')):
             n, node = self.recover(repo, source, opts)
-            self.ui.status(_('%s transplanted as %s\n') % (short(node),
-                                                           short(n)))
+            if n:
+                self.ui.status(_('%s transplanted as %s\n') % (short(node),
+                                                               short(n)))
+            else:
+                self.ui.status(_('%s skipped due to empty diff\n')
+                               % (short(node),))
         seriespath = os.path.join(self.path, 'series')
         if not os.path.exists(seriespath):
             self.transplants.write()
@@ -344,12 +347,16 @@
                                  revlog.hex(parent))
             if merge:
                 repo.setparents(p1, parents[1])
-            n = repo.commit(message, user, date, extra=extra,
-                            editor=self.getcommiteditor())
-            if not n:
-                raise util.Abort(_('commit failed'))
-            if not merge:
-                self.transplants.set(n, node)
+            modified, added, removed, deleted = repo.status()[:4]
+            if merge or modified or added or removed or deleted:
+                n = repo.commit(message, user, date, extra=extra,
+                                editor=self.getcommiteditor())
+                if not n:
+                    raise util.Abort(_('commit failed'))
+                if not merge:
+                    self.transplants.set(n, node)
+            else:
+                n = None
             self.unlog()
 
             return n, node
--- a/i18n/polib.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/i18n/polib.py	Sat Jan 17 18:28:30 2015 -0800
@@ -396,7 +396,7 @@
     def ordered_metadata(self):
         """
         Convenience method that returns an ordered version of the metadata
-        dictionnary. The return value is list of tuples (metadata name,
+        dictionary. The return value is list of tuples (metadata name,
         metadata_value).
         """
         # copy the dict first
--- a/mercurial/ancestor.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/ancestor.py	Sat Jan 17 18:28:30 2015 -0800
@@ -134,89 +134,128 @@
         return gca
     return deepest(gca)
 
-def missingancestors(revs, bases, pfunc):
-    """Return all the ancestors of revs that are not ancestors of bases.
-
-    This may include elements from revs.
+class incrementalmissingancestors(object):
+    '''persistent state used to calculate missing ancestors incrementally
 
-    Equivalent to the revset (::revs - ::bases). Revs are returned in
-    revision number order, which is a topological order.
+    Although similar in spirit to lazyancestors below, this is a separate class
+    because trying to support contains and missingancestors operations with the
+    same internal data structures adds needless complexity.'''
+    def __init__(self, pfunc, bases):
+        self.bases = set(bases)
+        if not self.bases:
+            self.bases.add(nullrev)
+        self.pfunc = pfunc
 
-    revs and bases should both be iterables. pfunc must return a list of
-    parent revs for a given revs.
-    """
+    def hasbases(self):
+        '''whether the common set has any non-trivial bases'''
+        return self.bases and self.bases != set([nullrev])
+
+    def addbases(self, newbases):
+        '''grow the ancestor set by adding new bases'''
+        self.bases.update(newbases)
 
-    revsvisit = set(revs)
-    basesvisit = set(bases)
-    if not revsvisit:
-        return []
-    if not basesvisit:
-        basesvisit.add(nullrev)
-    start = max(max(revsvisit), max(basesvisit))
-    bothvisit = revsvisit.intersection(basesvisit)
-    revsvisit.difference_update(bothvisit)
-    basesvisit.difference_update(bothvisit)
-    # At this point, we hold the invariants that:
-    # - revsvisit is the set of nodes we know are an ancestor of at least one
-    #   of the nodes in revs
-    # - basesvisit is the same for bases
-    # - bothvisit is the set of nodes we know are ancestors of at least one of
-    #   the nodes in revs and one of the nodes in bases
-    # - a node may be in none or one, but not more, of revsvisit, basesvisit
-    #   and bothvisit at any given time
-    # Now we walk down in reverse topo order, adding parents of nodes already
-    # visited to the sets while maintaining the invariants. When a node is
-    # found in both revsvisit and basesvisit, it is removed from them and
-    # added to bothvisit instead. When revsvisit becomes empty, there are no
-    # more ancestors of revs that aren't also ancestors of bases, so exit.
+    def removeancestorsfrom(self, revs):
+        '''remove all ancestors of bases from the set revs (in place)'''
+        bases = self.bases
+        pfunc = self.pfunc
+        revs.difference_update(bases)
+        # nullrev is always an ancestor
+        revs.discard(nullrev)
+        if not revs:
+            return
+        # anything in revs > start is definitely not an ancestor of bases
+        # revs <= start needs to be investigated
+        start = max(bases)
+        keepcount = sum(1 for r in revs if r > start)
+        if len(revs) == keepcount:
+            # no revs to consider
+            return
 
-    missing = []
-    for curr in xrange(start, nullrev, -1):
+        for curr in xrange(start, min(revs) - 1, -1):
+            if curr not in bases:
+                continue
+            revs.discard(curr)
+            bases.update(pfunc(curr))
+            if len(revs) == keepcount:
+                # no more potential revs to discard
+                break
+
+    def missingancestors(self, revs):
+        '''return all the ancestors of revs that are not ancestors of self.bases
+
+        This may include elements from revs.
+
+        Equivalent to the revset (::revs - ::self.bases). Revs are returned in
+        revision number order, which is a topological order.'''
+        revsvisit = set(revs)
+        basesvisit = self.bases
+        pfunc = self.pfunc
+        bothvisit = revsvisit.intersection(basesvisit)
+        revsvisit.difference_update(bothvisit)
         if not revsvisit:
-            break
+            return []
 
-        if curr in bothvisit:
-            bothvisit.remove(curr)
-            # curr's parents might have made it into revsvisit or basesvisit
-            # through another path
-            for p in pfunc(curr):
-                revsvisit.discard(p)
-                basesvisit.discard(p)
-                bothvisit.add(p)
-            continue
+        start = max(max(revsvisit), max(basesvisit))
+        # At this point, we hold the invariants that:
+        # - revsvisit is the set of nodes we know are an ancestor of at least
+        #   one of the nodes in revs
+        # - basesvisit is the same for bases
+        # - bothvisit is the set of nodes we know are ancestors of at least one
+        #   of the nodes in revs and one of the nodes in bases. bothvisit and
+        #   revsvisit are mutually exclusive, but bothvisit is a subset of
+        #   basesvisit.
+        # Now we walk down in reverse topo order, adding parents of nodes
+        # already visited to the sets while maintaining the invariants. When a
+        # node is found in both revsvisit and basesvisit, it is removed from
+        # revsvisit and added to bothvisit. When revsvisit becomes empty, there
+        # are no more ancestors of revs that aren't also ancestors of bases, so
+        # exit.
+
+        missing = []
+        for curr in xrange(start, nullrev, -1):
+            if not revsvisit:
+                break
 
-        # curr will never be in both revsvisit and basesvisit, since if it
-        # were it'd have been pushed to bothvisit
-        if curr in revsvisit:
-            missing.append(curr)
-            thisvisit = revsvisit
-            othervisit = basesvisit
-        elif curr in basesvisit:
-            thisvisit = basesvisit
-            othervisit = revsvisit
-        else:
-            # not an ancestor of revs or bases: ignore
-            continue
+            if curr in bothvisit:
+                bothvisit.remove(curr)
+                # curr's parents might have made it into revsvisit through
+                # another path
+                for p in pfunc(curr):
+                    revsvisit.discard(p)
+                    basesvisit.add(p)
+                    bothvisit.add(p)
+                continue
 
-        thisvisit.remove(curr)
-        for p in pfunc(curr):
-            if p == nullrev:
-                pass
-            elif p in othervisit or p in bothvisit:
-                # p is implicitly in thisvisit. This means p is or should be
-                # in bothvisit
-                revsvisit.discard(p)
-                basesvisit.discard(p)
-                bothvisit.add(p)
+            if curr in revsvisit:
+                missing.append(curr)
+                revsvisit.remove(curr)
+                thisvisit = revsvisit
+                othervisit = basesvisit
+            elif curr in basesvisit:
+                thisvisit = basesvisit
+                othervisit = revsvisit
             else:
-                # visit later
-                thisvisit.add(p)
+                # not an ancestor of revs or bases: ignore
+                continue
 
-    missing.reverse()
-    return missing
+            for p in pfunc(curr):
+                if p == nullrev:
+                    pass
+                elif p in othervisit or p in bothvisit:
+                    # p is implicitly in thisvisit. This means p is or should be
+                    # in bothvisit
+                    revsvisit.discard(p)
+                    basesvisit.add(p)
+                    bothvisit.add(p)
+                else:
+                    # visit later
+                    thisvisit.add(p)
+
+        missing.reverse()
+        return missing
 
 class lazyancestors(object):
-    def __init__(self, cl, revs, stoprev=0, inclusive=False):
+    def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
         """Create a new object generating ancestors for the given revs. Does
         not generate revs lower than stoprev.
 
@@ -228,7 +267,7 @@
         than stoprev will not be generated.
 
         Result does not include the null revision."""
-        self._parentrevs = cl.parentrevs
+        self._parentrevs = pfunc
         self._initrevs = revs
         self._stoprev = stoprev
         self._inclusive = inclusive
--- a/mercurial/archival.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/archival.py	Sat Jan 17 18:28:30 2015 -0800
@@ -276,8 +276,11 @@
                         'style': '', 'patch': None, 'git': None}
                 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
                 ltags, dist = repo.ui.popbuffer().split('\n')
-                tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
+                ltags = ltags.split(':')
+                changessince = len(repo.revs('only(.,%s)', ltags[0]))
+                tags = ''.join('latesttag: %s\n' % t for t in ltags)
                 tags += 'latesttagdistance: %s\n' % dist
+                tags += 'changessincelatesttag: %s\n' % changessince
 
             return base + tags
 
@@ -304,7 +307,7 @@
         for subpath in sorted(ctx.substate):
             sub = ctx.sub(subpath)
             submatch = matchmod.narrowmatcher(subpath, matchfn)
-            total += sub.archive(repo.ui, archiver, prefix, submatch)
+            total += sub.archive(archiver, prefix, submatch)
 
     if total == 0:
         raise error.Abort(_('no files match the archive pattern'))
--- a/mercurial/bookmarks.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/bookmarks.py	Sat Jan 17 18:28:30 2015 -0800
@@ -5,6 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import os
 from mercurial.i18n import _
 from mercurial.node import hex, bin
 from mercurial import encoding, error, util, obsolete, lock as lockmod
@@ -29,7 +30,8 @@
         dict.__init__(self)
         self._repo = repo
         try:
-            for line in repo.vfs('bookmarks'):
+            bkfile = self.getbkfile(repo)
+            for line in bkfile:
                 line = line.strip()
                 if not line:
                     continue
@@ -47,12 +49,24 @@
             if inst.errno != errno.ENOENT:
                 raise
 
+    def getbkfile(self, repo):
+        bkfile = None
+        if 'HG_PENDING' in os.environ:
+            try:
+                bkfile = repo.vfs('bookmarks.pending')
+            except IOError, inst:
+                if inst.errno != errno.ENOENT:
+                    raise
+        if bkfile is None:
+            bkfile = repo.vfs('bookmarks')
+        return bkfile
+
     def recordchange(self, tr):
         """record that bookmarks have been changed in a transaction
 
         The transaction is then responsible for updating the file content."""
         tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
-                            vfs=self._repo.vfs)
+                            location='plain')
         tr.hookargs['bookmark_moved'] = '1'
 
     def write(self):
@@ -65,6 +79,10 @@
         can be copied back on rollback.
         '''
         repo = self._repo
+        self._writerepo(repo)
+
+    def _writerepo(self, repo):
+        """Factored out for extensibility"""
         if repo._bookmarkcurrent not in self:
             unsetcurrent(repo)
 
@@ -97,7 +115,7 @@
     '''
     mark = None
     try:
-        file = repo.opener('bookmarks.current')
+        file = repo.vfs('bookmarks.current')
     except IOError, inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -126,7 +144,7 @@
 
     wlock = repo.wlock()
     try:
-        file = repo.opener('bookmarks.current', 'w', atomictemp=True)
+        file = repo.vfs('bookmarks.current', 'w', atomictemp=True)
         file.write(encoding.fromlocal(mark))
         file.close()
     finally:
--- a/mercurial/branchmap.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/branchmap.py	Sat Jan 17 18:28:30 2015 -0800
@@ -9,6 +9,8 @@
 import encoding
 import util
 import time
+from array import array
+from struct import calcsize, pack, unpack
 
 def _filename(repo):
     """name of a branchcache file for a given repo or repoview"""
@@ -19,7 +21,7 @@
 
 def read(repo):
     try:
-        f = repo.opener(_filename(repo))
+        f = repo.vfs(_filename(repo))
         lines = f.read().split('\n')
         f.close()
     except (IOError, OSError):
@@ -132,6 +134,7 @@
             self._closednodes = set()
         else:
             self._closednodes = closednodes
+        self._revbranchcache = None
 
     def _hashfiltered(self, repo):
         """build hash of revision filtered in the current cache
@@ -200,7 +203,7 @@
 
     def write(self, repo):
         try:
-            f = repo.opener(_filename(repo), "w", atomictemp=True)
+            f = repo.vfs(_filename(repo), "w", atomictemp=True)
             cachekey = [hex(self.tipnode), str(self.tiprev)]
             if self.filteredhash is not None:
                 cachekey.append(hex(self.filteredhash))
@@ -223,6 +226,9 @@
             repo.ui.debug("couldn't write branch cache: %s\n" % inst)
             # Abort may be raise by read only opener
             pass
+        if self._revbranchcache:
+            self._revbranchcache.write(repo.unfiltered())
+            self._revbranchcache = None
 
     def update(self, repo, revgen):
         """Given a branchhead cache, self, that may have extra nodes or be
@@ -233,9 +239,12 @@
         cl = repo.changelog
         # collect new branch entries
         newbranches = {}
-        getbranchinfo = cl.branchinfo
+        urepo = repo.unfiltered()
+        self._revbranchcache = revbranchcache(urepo)
+        getbranchinfo = self._revbranchcache.branchinfo
+        ucl = urepo.changelog
         for r in revgen:
-            branch, closesbranch = getbranchinfo(r)
+            branch, closesbranch = getbranchinfo(ucl, r)
             newbranches.setdefault(branch, []).append(r)
             if closesbranch:
                 self._closednodes.add(cl.node(r))
@@ -285,3 +294,158 @@
         duration = time.time() - starttime
         repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
                     repo.filtername, duration)
+
+# Revision branch info cache
+
+_rbcversion = '-v1'
+_rbcnames = 'cache/rbc-names' + _rbcversion
+_rbcrevs = 'cache/rbc-revs' + _rbcversion
+# [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
+_rbcrecfmt = '>4sI'
+_rbcrecsize = calcsize(_rbcrecfmt)
+_rbcnodelen = 4
+_rbcbranchidxmask = 0x7fffffff
+_rbccloseflag = 0x80000000
+
+class revbranchcache(object):
+    """Persistent cache, mapping from revision number to branch name and close.
+    This is a low level cache, independent of filtering.
+
+    Branch names are stored in rbc-names in internal encoding separated by 0.
+    rbc-names is append-only, and each branch name is only stored once and will
+    thus have a unique index.
+
+    The branch info for each revision is stored in rbc-revs as constant size
+    records. The whole file is read into memory, but it is only 'parsed' on
+    demand. The file is usually append-only but will be truncated if repo
+    modification is detected.
+    The record for each revision contains the first 4 bytes of the
+    corresponding node hash, and the record is only used if it still matches.
+    Even a completely trashed rbc-revs fill thus still give the right result
+    while converging towards full recovery ... assuming no incorrectly matching
+    node hashes.
+    The record also contains 4 bytes where 31 bits contains the index of the
+    branch and the last bit indicate that it is a branch close commit.
+    The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
+    and will grow with it but be 1/8th of its size.
+    """
+
+    def __init__(self, repo):
+        assert repo.filtername is None
+        self._names = [] # branch names in local encoding with static index
+        self._rbcrevs = array('c') # structs of type _rbcrecfmt
+        self._rbcsnameslen = 0
+        try:
+            bndata = repo.vfs.read(_rbcnames)
+            self._rbcsnameslen = len(bndata) # for verification before writing
+            self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')]
+        except (IOError, OSError), inst:
+            repo.ui.debug("couldn't read revision branch cache names: %s\n" %
+                          inst)
+        if self._names:
+            try:
+                data = repo.vfs.read(_rbcrevs)
+                self._rbcrevs.fromstring(data)
+            except (IOError, OSError), inst:
+                repo.ui.debug("couldn't read revision branch cache: %s\n" %
+                              inst)
+        # remember number of good records on disk
+        self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
+                               len(repo.changelog))
+        if self._rbcrevslen == 0:
+            self._names = []
+        self._rbcnamescount = len(self._names) # number of good names on disk
+        self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
+
+    def branchinfo(self, changelog, rev):
+        """Return branch name and close flag for rev, using and updating
+        persistent cache."""
+        rbcrevidx = rev * _rbcrecsize
+
+        # if requested rev is missing, add and populate all missing revs
+        if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
+            first = len(self._rbcrevs) // _rbcrecsize
+            self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
+                                         len(self._rbcrevs)))
+            for r in xrange(first, len(changelog)):
+                self._branchinfo(changelog, r)
+
+        # fast path: extract data from cache, use it if node is matching
+        reponode = changelog.node(rev)[:_rbcnodelen]
+        cachenode, branchidx = unpack(
+            _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize))
+        close = bool(branchidx & _rbccloseflag)
+        if close:
+            branchidx &= _rbcbranchidxmask
+        if cachenode == reponode:
+            return self._names[branchidx], close
+        # fall back to slow path and make sure it will be written to disk
+        self._rbcrevslen = min(self._rbcrevslen, rev)
+        return self._branchinfo(changelog, rev)
+
+    def _branchinfo(self, changelog, rev):
+        """Retrieve branch info from changelog and update _rbcrevs"""
+        b, close = changelog.branchinfo(rev)
+        if b in self._namesreverse:
+            branchidx = self._namesreverse[b]
+        else:
+            branchidx = len(self._names)
+            self._names.append(b)
+            self._namesreverse[b] = branchidx
+        reponode = changelog.node(rev)
+        if close:
+            branchidx |= _rbccloseflag
+        rbcrevidx = rev * _rbcrecsize
+        rec = array('c')
+        rec.fromstring(pack(_rbcrecfmt, reponode, branchidx))
+        self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec
+        return b, close
+
+    def write(self, repo):
+        """Save branch cache if it is dirty."""
+        if self._rbcnamescount < len(self._names):
+            try:
+                if self._rbcnamescount != 0:
+                    f = repo.vfs.open(_rbcnames, 'ab')
+                    # The position after open(x, 'a') is implementation defined-
+                    # see issue3543.  SEEK_END was added in 2.5
+                    f.seek(0, 2) #os.SEEK_END
+                    if f.tell() == self._rbcsnameslen:
+                        f.write('\0')
+                    else:
+                        f.close()
+                        repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
+                        self._rbcnamescount = 0
+                        self._rbcrevslen = 0
+                if self._rbcnamescount == 0:
+                    f = repo.vfs.open(_rbcnames, 'wb')
+                f.write('\0'.join(encoding.fromlocal(b)
+                                  for b in self._names[self._rbcnamescount:]))
+                self._rbcsnameslen = f.tell()
+                f.close()
+            except (IOError, OSError, util.Abort), inst:
+                repo.ui.debug("couldn't write revision branch cache names: "
+                              "%s\n" % inst)
+                return
+            self._rbcnamescount = len(self._names)
+
+        start = self._rbcrevslen * _rbcrecsize
+        if start != len(self._rbcrevs):
+            revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
+            try:
+                f = repo.vfs.open(_rbcrevs, 'ab')
+                # The position after open(x, 'a') is implementation defined-
+                # see issue3543.  SEEK_END was added in 2.5
+                f.seek(0, 2) #os.SEEK_END
+                if f.tell() != start:
+                    repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start))
+                    f.seek(start)
+                    f.truncate()
+                end = revs * _rbcrecsize
+                f.write(self._rbcrevs[start:end])
+                f.close()
+            except (IOError, OSError, util.Abort), inst:
+                repo.ui.debug("couldn't write revision branch cache: %s\n" %
+                              inst)
+                return
+            self._rbcrevslen = revs
--- a/mercurial/bundle2.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/bundle2.py	Sat Jan 17 18:28:30 2015 -0800
@@ -85,7 +85,7 @@
 
     :typesize: (one byte)
 
-    :parttype: alphanumerical part name
+    :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*)
 
     :partid: A 32bits integer (unique in the bundle) that can be used to refer
              to this part.
@@ -153,6 +153,7 @@
 import obsolete
 import pushkey
 import url
+import re
 
 import changegroup, error
 from i18n import _
@@ -171,6 +172,13 @@
 
 preferedchunksize = 4096
 
+_parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
+
+def validateparttype(parttype):
+    """raise ValueError if a parttype contains invalid character"""
+    if _parttypeforbidden.search(parttype):
+        raise ValueError(parttype)
+
 def _makefpartparamsizes(nbparams):
     """return a struct format to read part parameter sizes
 
@@ -191,6 +199,7 @@
             '''process a part of type "my part".'''
             ...
     """
+    validateparttype(parttype)
     def _decorator(func):
         lparttype = parttype.lower() # enforce lower case matching.
         assert lparttype not in parthandlermapping
@@ -229,7 +238,7 @@
             self.getreplies(inreplyto).add(category, entry)
 
     def getreplies(self, partid):
-        """get the subrecords that replies to a specific part"""
+        """get the records that are replies to a specific part"""
         return self._replies.setdefault(partid, unbundlerecords())
 
     def __getitem__(self, cat):
@@ -277,7 +286,7 @@
     to be created"""
     raise TransactionUnavailable()
 
-def processbundle(repo, unbundler, transactiongetter=_notransaction):
+def processbundle(repo, unbundler, transactiongetter=None):
     """This function process a bundle, apply effect to/from a repo
 
     It iterates over each part then searches for and uses the proper handling
@@ -288,6 +297,8 @@
 
     Unknown Mandatory part will abort the process.
     """
+    if transactiongetter is None:
+        transactiongetter = _notransaction
     op = bundleoperation(repo, transactiongetter)
     # todo:
     # - replace this is a init function soon.
@@ -303,7 +314,7 @@
             # consume the bundle content
             part.read()
         # Small hack to let caller code distinguish exceptions from bundle2
-        # processing fron the ones from bundle1 processing. This is mostly
+        # processing from processing the old format. This is mostly
         # needed to handle different return codes to unbundle according to the
         # type of bundle. We should probably clean up or drop this return code
         # craziness in a future version.
@@ -317,22 +328,19 @@
     The part is guaranteed to have been fully consumed when the function exits
     (even if an exception is raised)."""
     try:
-        parttype = part.type
-        # part key are matched lower case
-        key = parttype.lower()
         try:
-            handler = parthandlermapping.get(key)
+            handler = parthandlermapping.get(part.type)
             if handler is None:
-                raise error.UnsupportedPartError(parttype=key)
-            op.ui.debug('found a handler for part %r\n' % parttype)
+                raise error.UnsupportedPartError(parttype=part.type)
+            op.ui.debug('found a handler for part %r\n' % part.type)
             unknownparams = part.mandatorykeys - handler.params
             if unknownparams:
                 unknownparams = list(unknownparams)
                 unknownparams.sort()
-                raise error.UnsupportedPartError(parttype=key,
+                raise error.UnsupportedPartError(parttype=part.type,
                                                params=unknownparams)
         except error.UnsupportedPartError, exc:
-            if key != parttype: # mandatory parts
+            if part.mandatory: # mandatory parts
                 raise
             op.ui.debug('ignoring unsupported advisory part %s\n' % exc)
             return # skip to part processing
@@ -351,7 +359,8 @@
             if output is not None:
                 output = op.ui.popbuffer()
         if output:
-            outpart = op.reply.newpart('b2x:output', data=output)
+            outpart = op.reply.newpart('b2x:output', data=output,
+                                       mandatory=False)
             outpart.addparam('in-reply-to', str(part.id), mandatory=False)
     finally:
         # consume the part content to not corrupt the stream.
@@ -359,7 +368,7 @@
 
 
 def decodecaps(blob):
-    """decode a bundle2 caps bytes blob into a dictionnary
+    """decode a bundle2 caps bytes blob into a dictionary
 
     The blob is a list of capabilities (one per line)
     Capabilities may have values using a line of the form::
@@ -589,7 +598,8 @@
     """
 
     def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
-                 data=''):
+                 data='', mandatory=True):
+        validateparttype(parttype)
         self.id = None
         self.type = parttype
         self._data = data
@@ -606,6 +616,7 @@
         # - False: currently generated,
         # - True: generation done.
         self._generated = None
+        self.mandatory = mandatory
 
     # methods used to defines the part content
     def __setdata(self, data):
@@ -643,9 +654,13 @@
             raise RuntimeError('part can only be consumed once')
         self._generated = False
         #### header
+        if self.mandatory:
+            parttype = self.type.upper()
+        else:
+            parttype = self.type.lower()
         ## parttype
-        header = [_pack(_fparttypesize, len(self.type)),
-                  self.type, _pack(_fpartid, self.id),
+        header = [_pack(_fparttypesize, len(parttype)),
+                  parttype, _pack(_fpartid, self.id),
                  ]
         ## parameters
         # count
@@ -682,7 +697,8 @@
             # backup exception data for later
             exc_info = sys.exc_info()
             msg = 'unexpected error: %s' % exc
-            interpart = bundlepart('b2x:error:abort', [('message', msg)])
+            interpart = bundlepart('b2x:error:abort', [('message', msg)],
+                                   mandatory=False)
             interpart.id = 0
             yield _pack(_fpayloadsize, -1)
             for chunk in interpart.getchunks():
@@ -741,7 +757,7 @@
         self.ui.debug('bundle2 stream interruption, looking for a part.\n')
         headerblock = self._readpartheader()
         if headerblock is None:
-            self.ui.debug('no part found during iterruption.\n')
+            self.ui.debug('no part found during interruption.\n')
             return
         part = unbundlepart(self.ui, headerblock, self._fp)
         op = interruptoperation(self.ui)
@@ -784,6 +800,7 @@
         self.mandatorykeys = ()
         self._payloadstream = None
         self._readheader()
+        self._mandatory = None
 
     def _fromheader(self, size):
         """return the next <size> byte from the header"""
@@ -816,6 +833,9 @@
         self.ui.debug('part type: "%s"\n' % self.type)
         self.id = self._unpackheader(_fpartid)[0]
         self.ui.debug('part id: "%s"\n' % self.id)
+        # extract mandatory bit from type
+        self.mandatory = (self.type != self.type.lower())
+        self.type = self.type.lower()
         ## reading parameters
         # param count
         mancount, advcount = self._unpackheader(_fpartparamcount)
@@ -828,7 +848,7 @@
         # split mandatory from advisory
         mansizes = paramsizes[:mancount]
         advsizes = paramsizes[mancount:]
-        # retrive param value
+        # retrieve param value
         manparams = []
         for key, value in mansizes:
             manparams.append((self._fromheader(key), self._fromheader(value)))
@@ -871,24 +891,26 @@
 capabilities = {'HG2Y': (),
                 'b2x:listkeys': (),
                 'b2x:pushkey': (),
-                'b2x:changegroup': (),
                 'digests': tuple(sorted(util.DIGESTS.keys())),
                 'b2x:remote-changegroup': ('http', 'https'),
                }
 
-def getrepocaps(repo):
+def getrepocaps(repo, allowpushback=False):
     """return the bundle2 capabilities for a given repo
 
     Exists to allow extensions (like evolution) to mutate the capabilities.
     """
     caps = capabilities.copy()
+    caps['b2x:changegroup'] = tuple(sorted(changegroup.packermap.keys()))
     if obsolete.isenabled(repo, obsolete.exchangeopt):
         supportedformat = tuple('V%i' % v for v in obsolete.formats)
         caps['b2x:obsmarkers'] = supportedformat
+    if allowpushback:
+        caps['b2x:pushback'] = ()
     return caps
 
 def bundle2caps(remote):
-    """return the bundlecapabilities of a peer as dict"""
+    """return the bundle capabilities of a peer as dict"""
     raw = remote.capable('bundle2-exp')
     if not raw and raw != '':
         return {}
@@ -901,7 +923,7 @@
     obscaps = caps.get('b2x:obsmarkers', ())
     return [int(c[1:]) for c in obscaps if c.startswith('V')]
 
-@parthandler('b2x:changegroup')
+@parthandler('b2x:changegroup', ('version',))
 def handlechangegroup(op, inpart):
     """apply a changegroup part on the repo
 
@@ -914,15 +936,18 @@
     # we need to make sure we trigger the creation of a transaction object used
     # for the whole processing scope.
     op.gettransaction()
-    cg = changegroup.cg1unpacker(inpart, 'UN')
+    unpackerversion = inpart.params.get('version', '01')
+    # We should raise an appropriate exception here
+    unpacker = changegroup.packermap[unpackerversion][1]
+    cg = unpacker(inpart, 'UN')
     # the source and url passed here are overwritten by the one contained in
     # the transaction.hookargs argument. So 'bundle2' is a placeholder
     ret = changegroup.addchangegroup(op.repo, cg, 'bundle2', 'bundle2')
     op.records.add('changegroup', {'return': ret})
     if op.reply is not None:
-        # This is definitly not the final form of this
+        # This is definitely not the final form of this
         # return. But one need to start somewhere.
-        part = op.reply.newpart('b2x:reply:changegroup')
+        part = op.reply.newpart('b2x:reply:changegroup', mandatory=False)
         part.addparam('in-reply-to', str(inpart.id), mandatory=False)
         part.addparam('return', '%i' % ret, mandatory=False)
     assert not inpart.read()
@@ -989,7 +1014,7 @@
     ret = changegroup.addchangegroup(op.repo, cg, 'bundle2', 'bundle2')
     op.records.add('changegroup', {'return': ret})
     if op.reply is not None:
-        # This is definitly not the final form of this
+        # This is definitely not the final form of this
         # return. But one need to start somewhere.
         part = op.reply.newpart('b2x:reply:changegroup')
         part.addparam('in-reply-to', str(inpart.id), mandatory=False)
--- a/mercurial/bundlerepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/bundlerepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -15,7 +15,7 @@
 from i18n import _
 import os, tempfile, shutil
 import changegroup, util, mdiff, discovery, cmdutil, scmutil, exchange
-import localrepo, changelog, manifest, filelog, revlog, error
+import localrepo, changelog, manifest, filelog, revlog, error, phases
 
 class bundlerevlog(revlog.revlog):
     def __init__(self, opener, indexfile, bundle, linkmapper):
@@ -184,6 +184,23 @@
     def canpush(self):
         return False
 
+class bundlephasecache(phases.phasecache):
+    def __init__(self, *args, **kwargs):
+        super(bundlephasecache, self).__init__(*args, **kwargs)
+        if util.safehasattr(self, 'opener'):
+            self.opener = scmutil.readonlyvfs(self.opener)
+
+    def write(self):
+        raise NotImplementedError
+
+    def _write(self, fp):
+        raise NotImplementedError
+
+    def _updateroots(self, phase, newroots, tr):
+        self.phaseroots[phase] = newroots
+        self.invalidate()
+        self.dirty = True
+
 class bundlerepository(localrepo.localrepository):
     def __init__(self, ui, path, bundlename):
         self._tempparent = None
@@ -225,11 +242,19 @@
         # dict with the mapping 'filename' -> position in the bundle
         self.bundlefilespos = {}
 
+        self.firstnewrev = self.changelog.repotiprev + 1
+        phases.retractboundary(self, None, phases.draft,
+                               [ctx.node() for ctx in self[self.firstnewrev:]])
+
+    @localrepo.unfilteredpropertycache
+    def _phasecache(self):
+        return bundlephasecache(self, self._phasedefaults)
+
     @localrepo.unfilteredpropertycache
     def changelog(self):
         # consume the header if it exists
         self.bundle.changelogheader()
-        c = bundlechangelog(self.sopener, self.bundle)
+        c = bundlechangelog(self.svfs, self.bundle)
         self.manstart = self.bundle.tell()
         return c
 
@@ -238,7 +263,7 @@
         self.bundle.seek(self.manstart)
         # consume the header if it exists
         self.bundle.manifestheader()
-        m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
+        m = bundlemanifest(self.svfs, self.bundle, self.changelog.rev)
         self.filestart = self.bundle.tell()
         return m
 
@@ -271,10 +296,10 @@
 
         if f in self.bundlefilespos:
             self.bundle.seek(self.bundlefilespos[f])
-            return bundlefilelog(self.sopener, f, self.bundle,
+            return bundlefilelog(self.svfs, f, self.bundle,
                                  self.changelog.rev, self)
         else:
-            return filelog.filelog(self.sopener, f)
+            return filelog.filelog(self.svfs, f)
 
     def close(self):
         """Close assigned bundle file immediately."""
@@ -325,6 +350,16 @@
         repopath, bundlename = parentpath, path
     return bundlerepository(ui, repopath, bundlename)
 
+class bundletransactionmanager(object):
+    def transaction(self):
+        return None
+
+    def close(self):
+        raise NotImplementedError
+
+    def release(self):
+        raise NotImplementedError
+
 def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
                      force=False):
     '''obtains a bundle of changes incoming from other
@@ -375,7 +410,7 @@
         else:
             cg = other.changegroupsubset(incoming, rheads, 'incoming')
         bundletype = localrepo and "HG10BZ" or "HG10UN"
-        fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
+        fname = bundle = changegroup.writebundle(ui, cg, bundlename, bundletype)
         # keep written bundle?
         if bundlename:
             bundle = None
@@ -393,6 +428,14 @@
 
     csets = localrepo.changelog.findmissing(common, rheads)
 
+    if bundlerepo:
+        reponodes = [ctx.node() for ctx in bundlerepo[bundlerepo.firstnewrev:]]
+        remotephases = other.listkeys('phases')
+
+        pullop = exchange.pulloperation(bundlerepo, other, heads=reponodes)
+        pullop.trmanager = bundletransactionmanager()
+        exchange._pullapplyphases(pullop, remotephases)
+
     def cleanup():
         if bundlerepo:
             bundlerepo.close()
--- a/mercurial/changegroup.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/changegroup.py	Sat Jan 17 18:28:30 2015 -0800
@@ -13,6 +13,7 @@
 import discovery, error, phases, branchmap
 
 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
+_CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
 
 def readexactly(stream, n):
     '''read n bytes from stream.read and abort if less was available'''
@@ -41,6 +42,25 @@
     """return a changegroup chunk header (string) for a zero-length chunk"""
     return struct.pack(">l", 0)
 
+def combineresults(results):
+    """logic to combine 0 or more addchangegroup results into one"""
+    changedheads = 0
+    result = 1
+    for ret in results:
+        # If any changegroup result is 0, return 0
+        if ret == 0:
+            result = 0
+            break
+        if ret < -1:
+            changedheads += ret + 1
+        elif ret > 1:
+            changedheads += ret - 1
+    if changedheads > 0:
+        result = 1 + changedheads
+    elif changedheads < 0:
+        result = -1 + changedheads
+    return result
+
 class nocompress(object):
     def compress(self, x):
         return x
@@ -51,6 +71,7 @@
     "": ("", nocompress), # only when using unbundle on ssh and old http servers
                           # since the unification ssh accepts a header but there
                           # is no capability signaling it.
+    "HG2Y": (), # special-cased below
     "HG10UN": ("HG10UN", nocompress),
     "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
     "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
@@ -59,7 +80,7 @@
 # hgweb uses this list to communicate its preferred type
 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
 
-def writebundle(cg, filename, bundletype, vfs=None):
+def writebundle(ui, cg, filename, bundletype, vfs=None):
     """Write a bundle file and return its filename.
 
     Existing files will not be overwritten.
@@ -81,9 +102,20 @@
             fh = os.fdopen(fd, "wb")
         cleanup = filename
 
-        header, compressor = bundletypes[bundletype]
-        fh.write(header)
-        z = compressor()
+        if bundletype == "HG2Y":
+            import bundle2
+            bundle = bundle2.bundle20(ui)
+            part = bundle.newpart('b2x:changegroup', data=cg.getchunks())
+            part.addparam('version', cg.version)
+            z = nocompress()
+            chunkiter = bundle.getchunks()
+        else:
+            if cg.version != '01':
+                raise util.Abort(_('Bundle1 only supports v1 changegroups\n'))
+            header, compressor = bundletypes[bundletype]
+            fh.write(header)
+            z = compressor()
+            chunkiter = cg.getchunks()
 
         # parse the changegroup data, otherwise we will block
         # in case of sshrepo because we don't know the end of the stream
@@ -91,7 +123,7 @@
         # an empty chunkgroup is the end of the changegroup
         # a changegroup has at least 2 chunkgroups (changelog and manifest).
         # after that, an empty chunkgroup is the end of the changegroup
-        for chunk in cg.getchunks():
+        for chunk in chunkiter:
             fh.write(z.compress(chunk))
         fh.write(z.flush())
         cleanup = None
@@ -126,6 +158,7 @@
 class cg1unpacker(object):
     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
     deltaheadersize = struct.calcsize(deltaheader)
+    version = '01'
     def __init__(self, fh, alg):
         self._stream = decompressor(fh, alg)
         self._type = alg
@@ -215,6 +248,15 @@
                     pos = next
             yield closechunk()
 
+class cg2unpacker(cg1unpacker):
+    deltaheader = _CHANGEGROUPV2_DELTA_HEADER
+    deltaheadersize = struct.calcsize(deltaheader)
+    version = '02'
+
+    def _deltaheader(self, headertuple, prevnode):
+        node, p1, p2, deltabase, cs = headertuple
+        return node, p1, p2, deltabase, cs
+
 class headerlessfixup(object):
     def __init__(self, fh, h):
         self._h = h
@@ -229,6 +271,7 @@
 
 class cg1packer(object):
     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
+    version = '01'
     def __init__(self, repo, bundlecaps=None):
         """Given a source repo, construct a bundler.
 
@@ -249,6 +292,11 @@
         self._repo = repo
         self._reorder = reorder
         self._progress = repo.ui.progress
+        if self._repo.ui.verbose and not self._repo.ui.debugflag:
+            self._verbosenote = self._repo.ui.note
+        else:
+            self._verbosenote = lambda s: None
+
     def close(self):
         return closechunk()
 
@@ -332,9 +380,13 @@
             mfs.setdefault(c[0], x)
             return x
 
+        self._verbosenote(_('uncompressed size of bundle content:\n'))
+        size = 0
         for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
                                 reorder=reorder):
+            size += len(chunk)
             yield chunk
+        self._verbosenote(_('%8.i (changelog)\n') % size)
         progress(msgbundling, None)
 
         # Callback for the manifest, used to collect linkrevs for filelog
@@ -355,9 +407,12 @@
             return clnode
 
         mfnodes = self.prune(mf, mfs, commonrevs, source)
+        size = 0
         for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
                                 reorder=reorder):
+            size += len(chunk)
             yield chunk
+        self._verbosenote(_('%8.i (manifests)\n') % size)
         progress(msgbundling, None)
 
         mfs.clear()
@@ -408,15 +463,22 @@
             if filenodes:
                 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
                          total=total)
-                yield self.fileheader(fname)
+                h = self.fileheader(fname)
+                size = len(h)
+                yield h
                 for chunk in self.group(filenodes, filerevlog, lookupfilelog,
                                         reorder=reorder):
+                    size += len(chunk)
                     yield chunk
+                self._verbosenote(_('%8.i  %s\n') % (size, fname))
+
+    def deltaparent(self, revlog, rev, p1, p2, prev):
+        return prev
 
     def revchunk(self, revlog, rev, prev, linknode):
         node = revlog.node(rev)
         p1, p2 = revlog.parentrevs(rev)
-        base = prev
+        base = self.deltaparent(revlog, rev, p1, p2, prev)
 
         prefix = ''
         if base == nullrev:
@@ -436,6 +498,30 @@
         # do nothing with basenode, it is implicitly the previous one in HG10
         return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
 
+class cg2packer(cg1packer):
+    version = '02'
+    deltaheader = _CHANGEGROUPV2_DELTA_HEADER
+
+    def group(self, nodelist, revlog, lookup, units=None, reorder=None):
+        if (revlog._generaldelta and reorder is not True):
+            reorder = False
+        return super(cg2packer, self).group(nodelist, revlog, lookup,
+                                            units=units, reorder=reorder)
+
+    def deltaparent(self, revlog, rev, p1, p2, prev):
+        dp = revlog.deltaparent(rev)
+        # avoid storing full revisions; pick prev in those cases
+        # also pick prev when we can't be sure remote has dp
+        if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
+            return prev
+        return dp
+
+    def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
+        return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
+
+packermap = {'01': (cg1packer, cg1unpacker),
+             '02': (cg2packer, cg2unpacker)}
+
 def _changegroupinfo(repo, nodes, source):
     if repo.ui.verbose or source == 'bundle':
         repo.ui.status(_("%d changesets found\n") % len(nodes))
@@ -444,7 +530,7 @@
         for node in nodes:
             repo.ui.debug("%s\n" % hex(node))
 
-def getsubset(repo, outgoing, bundler, source, fastpath=False):
+def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
     repo = repo.unfiltered()
     commonrevs = outgoing.common
     csets = outgoing.missing
@@ -458,10 +544,13 @@
 
     repo.hook('preoutgoing', throw=True, source=source)
     _changegroupinfo(repo, csets, source)
-    gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
-    return cg1unpacker(util.chunkbuffer(gengroup), 'UN')
+    return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
 
-def changegroupsubset(repo, roots, heads, source):
+def getsubset(repo, outgoing, bundler, source, fastpath=False, version='01'):
+    gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
+    return packermap[version][1](util.chunkbuffer(gengroup), 'UN')
+
+def changegroupsubset(repo, roots, heads, source, version='01'):
     """Compute a changegroup consisting of all the nodes that are
     descendants of any of the roots and ancestors of any of the heads.
     Return a chunkbuffer object whose read() method will return
@@ -483,8 +572,19 @@
     for n in roots:
         discbases.extend([p for p in cl.parents(n) if p != nullid])
     outgoing = discovery.outgoing(cl, discbases, heads)
-    bundler = cg1packer(repo)
-    return getsubset(repo, outgoing, bundler, source)
+    bundler = packermap[version][0](repo)
+    return getsubset(repo, outgoing, bundler, source, version=version)
+
+def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
+                           version='01'):
+    """Like getbundle, but taking a discovery.outgoing as an argument.
+
+    This is only implemented for local repos and reuses potentially
+    precomputed sets in outgoing. Returns a raw changegroup generator."""
+    if not outgoing.missing:
+        return None
+    bundler = packermap[version][0](repo, bundlecaps)
+    return getsubsetraw(repo, outgoing, bundler, source)
 
 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None):
     """Like getbundle, but taking a discovery.outgoing as an argument.
@@ -515,6 +615,22 @@
         heads = cl.heads()
     return discovery.outgoing(cl, common, heads)
 
+def getchangegroupraw(repo, source, heads=None, common=None, bundlecaps=None,
+                      version='01'):
+    """Like changegroupsubset, but returns the set difference between the
+    ancestors of heads and the ancestors common.
+
+    If heads is None, use the local heads. If common is None, use [nullid].
+
+    If version is None, use a version '1' changegroup.
+
+    The nodes in common might not all be known locally due to the way the
+    current discovery protocol works. Returns a raw changegroup generator.
+    """
+    outgoing = _computeoutgoing(repo, heads, common)
+    return getlocalchangegroupraw(repo, source, outgoing, bundlecaps=bundlecaps,
+                                  version=version)
+
 def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None):
     """Like changegroupsubset, but returns the set difference between the
     ancestors of heads and the ancestors common.
@@ -598,12 +714,6 @@
     changesets = files = revisions = 0
     efiles = set()
 
-    # write changelog data to temp files so concurrent readers will not see
-    # inconsistent view
-    cl = repo.changelog
-    cl.delayupdate()
-    oldheads = cl.heads()
-
     tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
     # The transaction could have been created before and already carries source
     # information. In this case we use the top level data. We overwrite the
@@ -611,6 +721,12 @@
     # this function.
     srctype = tr.hookargs.setdefault('source', srctype)
     url = tr.hookargs.setdefault('url', url)
+
+    # write changelog data to temp files so concurrent readers will not see
+    # inconsistent view
+    cl = repo.changelog
+    cl.delayupdate(tr)
+    oldheads = cl.heads()
     try:
         repo.hook('prechangegroup', throw=True, **tr.hookargs)
 
@@ -693,7 +809,7 @@
         repo.invalidatevolatilesets()
 
         if changesets > 0:
-            p = lambda: cl.writepending() and repo.root or ""
+            p = lambda: tr.writepending() and repo.root or ""
             if 'node' not in tr.hookargs:
                 tr.hookargs['node'] = hex(cl.node(clstart))
                 hookargs = dict(tr.hookargs)
@@ -725,11 +841,6 @@
             # strip should not touch boundary at all
             phases.retractboundary(repo, tr, targetphase, added)
 
-        # make changelog see real files again
-        cl.finalize(trp)
-
-        tr.close()
-
         if changesets > 0:
             if srctype != 'strip':
                 # During strip, branchcache is invalid but coming call to
@@ -758,7 +869,11 @@
                             "%s incoming changes - new heads: %s\n",
                             len(added),
                             ', '.join([hex(c[:6]) for c in newheads]))
-            repo._afterlock(runhooks)
+
+            tr.addpostclose('changegroup-runhooks-%020i' % clstart,
+                            lambda tr: repo._afterlock(runhooks))
+
+        tr.close()
 
     finally:
         tr.release()
--- a/mercurial/changelog.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/changelog.py	Sat Jan 17 18:28:30 2015 -0800
@@ -108,15 +108,21 @@
         self.data.append(str(s))
         self.offset += len(s)
 
-def delayopener(opener, target, divert, buf):
-    def o(name, mode='r'):
+def _divertopener(opener, target):
+    """build an opener that writes in 'target.a' instead of 'target'"""
+    def _divert(name, mode='r'):
         if name != target:
             return opener(name, mode)
-        if divert:
-            return opener(name + ".a", mode.replace('a', 'w'))
-        # otherwise, divert to memory
+        return opener(name + ".a", mode)
+    return _divert
+
+def _delayopener(opener, target, buf):
+    """build an opener that stores chunks in 'buf' instead of 'target'"""
+    def _delay(name, mode='r'):
+        if name != target:
+            return opener(name, mode)
         return appender(opener, name, mode, buf)
-    return o
+    return _delay
 
 class changelog(revlog.revlog):
     def __init__(self, opener):
@@ -127,7 +133,7 @@
             self._generaldelta = False
         self._realopener = opener
         self._delayed = False
-        self._delaybuf = []
+        self._delaybuf = None
         self._divert = False
         self.filteredrevs = frozenset()
 
@@ -218,20 +224,30 @@
             raise error.FilteredIndexError(rev)
         return super(changelog, self).flags(rev)
 
-    def delayupdate(self):
+    def delayupdate(self, tr):
         "delay visibility of index updates to other readers"
+
+        if not self._delayed:
+            if len(self) == 0:
+                self._divert = True
+                if self._realopener.exists(self.indexfile + '.a'):
+                    self._realopener.unlink(self.indexfile + '.a')
+                self.opener = _divertopener(self._realopener, self.indexfile)
+            else:
+                self._delaybuf = []
+                self.opener = _delayopener(self._realopener, self.indexfile,
+                                           self._delaybuf)
         self._delayed = True
-        self._divert = (len(self) == 0)
-        self._delaybuf = []
-        self.opener = delayopener(self._realopener, self.indexfile,
-                                  self._divert, self._delaybuf)
+        tr.addpending('cl-%i' % id(self), self._writepending)
+        tr.addfinalize('cl-%i' % id(self), self._finalize)
 
-    def finalize(self, tr):
+    def _finalize(self, tr):
         "finalize index updates"
         self._delayed = False
         self.opener = self._realopener
         # move redirected index data back into place
         if self._divert:
+            assert not self._delaybuf
             tmpname = self.indexfile + ".a"
             nfile = self.opener.open(tmpname)
             nfile.close()
@@ -240,7 +256,8 @@
             fp = self.opener(self.indexfile, 'a')
             fp.write("".join(self._delaybuf))
             fp.close()
-            self._delaybuf = []
+            self._delaybuf = None
+        self._divert = False
         # split when we're done
         self.checkinlinesize(tr)
 
@@ -251,19 +268,24 @@
         self._nodecache = r._nodecache
         self._chunkcache = r._chunkcache
 
-    def writepending(self):
+    def _writepending(self, tr):
         "create a file containing the unfinalized state for pretxnchangegroup"
         if self._delaybuf:
             # make a temporary copy of the index
             fp1 = self._realopener(self.indexfile)
-            fp2 = self._realopener(self.indexfile + ".a", "w")
+            pendingfilename = self.indexfile + ".a"
+            # register as a temp file to ensure cleanup on failure
+            tr.registertmp(pendingfilename)
+            # write existing data
+            fp2 = self._realopener(pendingfilename, "w")
             fp2.write(fp1.read())
             # add pending data
             fp2.write("".join(self._delaybuf))
             fp2.close()
             # switch modes so finalize can simply rename
-            self._delaybuf = []
+            self._delaybuf = None
             self._divert = True
+            self.opener = _divertopener(self._realopener, self.indexfile)
 
         if self._divert:
             return True
--- a/mercurial/cmdutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/cmdutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -113,7 +113,7 @@
 def mergeeditform(ctxorbool, baseform):
     """build appropriate editform from ctxorbool and baseform
 
-    'cxtorbool' is one of a ctx to be committed, or a bool whether
+    'ctxorbool' is one of a ctx to be committed, or a bool whether
     merging is committed.
 
     This returns editform 'baseform' with '.merge' if merging is
@@ -902,20 +902,26 @@
         self.ui.write(_("changeset:   %d:%s\n") % (rev, hexfunc(changenode)),
                       label='log.changeset changeset.%s' % ctx.phasestr())
 
+        # branches are shown first before any other names due to backwards
+        # compatibility
         branch = ctx.branch()
         # don't show the default branch name
         if branch != 'default':
             # i18n: column positioning for "hg log"
             self.ui.write(_("branch:      %s\n") % branch,
                           label='log.branch')
-        for bookmark in self.repo.nodebookmarks(changenode):
-            # i18n: column positioning for "hg log"
-            self.ui.write(_("bookmark:    %s\n") % bookmark,
-                    label='log.bookmark')
-        for tag in self.repo.nodetags(changenode):
-            # i18n: column positioning for "hg log"
-            self.ui.write(_("tag:         %s\n") % tag,
-                          label='log.tag')
+
+        for name, ns in self.repo.names.iteritems():
+            # branches has special logic already handled above, so here we just
+            # skip it
+            if name == 'branches':
+                continue
+            # we will use the templatename as the color name since those two
+            # should be the same
+            for name in ns.names(self.repo, changenode):
+                # i18n: column positioning for "hg log"
+                name = _(("%s:" % ns.logname).ljust(13) + "%s\n") % name
+                self.ui.write("%s" % name, label='log.%s' % ns.colorname)
         if self.ui.debugflag:
             # i18n: column positioning for "hg log"
             self.ui.write(_("phase:       %s\n") % _(ctx.phasestr()),
@@ -991,7 +997,7 @@
         if matchfn:
             stat = self.diffopts.get('stat')
             diff = self.diffopts.get('patch')
-            diffopts = patch.diffopts(self.ui, self.diffopts)
+            diffopts = patch.diffallopts(self.ui, self.diffopts)
             prev = self.repo.changelog.parents(node)[0]
             if stat:
                 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
@@ -1093,7 +1099,7 @@
         if matchfn:
             stat = self.diffopts.get('stat')
             diff = self.diffopts.get('patch')
-            diffopts = patch.diffopts(self.ui, self.diffopts)
+            diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
             node, prev = ctx.node(), ctx.p1().node()
             if stat:
                 self.ui.pushbuffer()
@@ -1785,8 +1791,8 @@
         # If we're forced to take the slowpath it means we're following
         # at least one pattern/directory, so don't bother with rename tracking.
         if follow and not match.always() and not slowpath:
-            # _makelogfilematcher expects its files argument to be relative to
-            # the repo root, so use match.files(), not pats.
+            # _makefollowlogfilematcher expects its files argument to be
+            # relative to the repo root, so use match.files(), not pats.
             filematcher = _makefollowlogfilematcher(repo, match.files(),
                                                     followfirst)
         else:
@@ -1973,7 +1979,7 @@
         nodes = nodes[:limit]
     return graphmod.nodes(repo, nodes)
 
-def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
+def add(ui, repo, match, prefix, explicitonly, **opts):
     join = lambda f: os.path.join(prefix, f)
     bad = []
     oldbad = match.bad
@@ -1984,30 +1990,28 @@
     abort, warn = scmutil.checkportabilityalert(ui)
     if abort or warn:
         cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
-    for f in repo.walk(match):
+    for f in wctx.walk(match):
         exact = match.exact(f)
-        if exact or not explicitonly and f not in repo.dirstate:
+        if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
             if cca:
                 cca(f)
             names.append(f)
             if ui.verbose or not exact:
-                ui.status(_('adding %s\n') % match.rel(join(f)))
+                ui.status(_('adding %s\n') % match.rel(f))
 
     for subpath in sorted(wctx.substate):
         sub = wctx.sub(subpath)
         try:
             submatch = matchmod.narrowmatcher(subpath, match)
-            if listsubrepos:
-                bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
-                                   False))
+            if opts.get('subrepos'):
+                bad.extend(sub.add(ui, submatch, prefix, False, **opts))
             else:
-                bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
-                                   True))
+                bad.extend(sub.add(ui, submatch, prefix, True, **opts))
         except error.LookupError:
             ui.status(_("skipping missing subrepository: %s\n")
                            % join(subpath))
 
-    if not dryrun:
+    if not opts.get('dry_run'):
         rejected = wctx.add(names, prefix)
         bad.extend(f for f in rejected if f in match.files())
     return bad
@@ -2028,7 +2032,7 @@
         sub = wctx.sub(subpath)
         try:
             submatch = matchmod.narrowmatcher(subpath, match)
-            subbad, subforgot = sub.forget(ui, submatch, prefix)
+            subbad, subforgot = sub.forget(submatch, prefix)
             bad.extend([subpath + '/' + f for f in subbad])
             forgot.extend([subpath + '/' + f for f in subforgot])
         except error.LookupError:
@@ -2037,23 +2041,106 @@
 
     if not explicitonly:
         for f in match.files():
-            if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
+            if f not in repo.dirstate and not repo.wvfs.isdir(f):
                 if f not in forgot:
-                    if os.path.exists(match.rel(join(f))):
+                    if repo.wvfs.exists(f):
                         ui.warn(_('not removing %s: '
                                   'file is already untracked\n')
-                                % match.rel(join(f)))
+                                % match.rel(f))
                     bad.append(f)
 
     for f in forget:
         if ui.verbose or not match.exact(f):
-            ui.status(_('removing %s\n') % match.rel(join(f)))
+            ui.status(_('removing %s\n') % match.rel(f))
 
     rejected = wctx.forget(forget, prefix)
     bad.extend(f for f in rejected if f in match.files())
-    forgot.extend(forget)
+    forgot.extend(f for f in forget if f not in rejected)
     return bad, forgot
 
+def remove(ui, repo, m, prefix, after, force, subrepos):
+    join = lambda f: os.path.join(prefix, f)
+    ret = 0
+    s = repo.status(match=m, clean=True)
+    modified, added, deleted, clean = s[0], s[1], s[3], s[6]
+
+    wctx = repo[None]
+
+    for subpath in sorted(wctx.substate):
+        def matchessubrepo(matcher, subpath):
+            if matcher.exact(subpath):
+                return True
+            for f in matcher.files():
+                if f.startswith(subpath):
+                    return True
+            return False
+
+        if subrepos or matchessubrepo(m, subpath):
+            sub = wctx.sub(subpath)
+            try:
+                submatch = matchmod.narrowmatcher(subpath, m)
+                if sub.removefiles(submatch, prefix, after, force, subrepos):
+                    ret = 1
+            except error.LookupError:
+                ui.status(_("skipping missing subrepository: %s\n")
+                               % join(subpath))
+
+    # warn about failure to delete explicit files/dirs
+    for f in m.files():
+        def insubrepo():
+            for subpath in wctx.substate:
+                if f.startswith(subpath):
+                    return True
+            return False
+
+        if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
+            continue
+
+        if repo.wvfs.exists(f):
+            if repo.wvfs.isdir(f):
+                ui.warn(_('not removing %s: no tracked files\n')
+                        % m.rel(f))
+            else:
+                ui.warn(_('not removing %s: file is untracked\n')
+                        % m.rel(f))
+        # missing files will generate a warning elsewhere
+        ret = 1
+
+    if force:
+        list = modified + deleted + clean + added
+    elif after:
+        list = deleted
+        for f in modified + added + clean:
+            ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
+            ret = 1
+    else:
+        list = deleted + clean
+        for f in modified:
+            ui.warn(_('not removing %s: file is modified (use -f'
+                      ' to force removal)\n') % m.rel(f))
+            ret = 1
+        for f in added:
+            ui.warn(_('not removing %s: file has been marked for add'
+                      ' (use forget to undo)\n') % m.rel(f))
+            ret = 1
+
+    for f in sorted(list):
+        if ui.verbose or not m.exact(f):
+            ui.status(_('removing %s\n') % m.rel(f))
+
+    wlock = repo.wlock()
+    try:
+        if not after:
+            for f in list:
+                if f in added:
+                    continue # we never unlink added files on remove
+                util.unlinkpath(repo.wjoin(f), ignoremissing=True)
+        repo[None].forget(list)
+    finally:
+        wlock.release()
+
+    return ret
+
 def cat(ui, repo, ctx, matcher, prefix, **opts):
     err = 1
 
@@ -2098,7 +2185,7 @@
         try:
             submatch = matchmod.narrowmatcher(subpath, matcher)
 
-            if not sub.cat(ui, submatch, os.path.join(prefix, sub._path),
+            if not sub.cat(submatch, os.path.join(prefix, sub._path),
                            **opts):
                 err = 0
         except error.RepoLookupError:
@@ -2113,14 +2200,16 @@
     if date:
         opts['date'] = util.parsedate(date)
     message = logmessage(ui, opts)
+    matcher = scmutil.match(repo[None], pats, opts)
 
     # extract addremove carefully -- this function can be called from a command
     # that doesn't support addremove
     if opts.get('addremove'):
-        scmutil.addremove(repo, pats, opts)
-
-    return commitfunc(ui, repo, message,
-                      scmutil.match(repo[None], pats, opts), opts)
+        if scmutil.addremove(repo, matcher, "", opts) != 0:
+            raise util.Abort(
+                _("failed to mark all new/missing files as added/removed"))
+
+    return commitfunc(ui, repo, message, matcher, opts)
 
 def amend(ui, repo, commitfunc, old, extra, pats, opts):
     # amend will reuse the existing user if not specified, but the obsolete
@@ -2508,13 +2597,13 @@
 
             m = scmutil.matchfiles(repo, names)
 
-        modified = set(changes[0])
-        added    = set(changes[1])
-        removed  = set(changes[2])
-        _deleted = set(changes[3])
-        unknown  = set(changes[4])
-        unknown.update(changes[5])
-        clean    = set(changes[6])
+        modified = set(changes.modified)
+        added    = set(changes.added)
+        removed  = set(changes.removed)
+        _deleted = set(changes.deleted)
+        unknown  = set(changes.unknown)
+        unknown.update(changes.ignored)
+        clean    = set(changes.clean)
         modadded = set()
 
         # split between files known in target manifest and the others
@@ -2524,11 +2613,11 @@
         deladded = _deleted - smf
         deleted = _deleted - deladded
 
-        # We need to account for the state of file in the dirstate
+        # We need to account for the state of file in the dirstate.
         #
-        # Even, when we revert agains something else than parent. this will
+        # Even, when we revert against something else than parent. This will
         # slightly alter the behavior of revert (doing back up or not, delete
-        # or just forget etc)
+        # or just forget etc).
         if parent == node:
             dsmodified = modified
             dsadded = added
@@ -2538,9 +2627,9 @@
             modified, added, removed = set(), set(), set()
         else:
             changes = repo.status(node1=parent, match=m)
-            dsmodified = set(changes[0])
-            dsadded    = set(changes[1])
-            dsremoved  = set(changes[2])
+            dsmodified = set(changes.modified)
+            dsadded    = set(changes.added)
+            dsremoved  = set(changes.removed)
             # store all local modifications, useful later for rename detection
             localchanges = dsmodified | dsadded
 
@@ -2716,7 +2805,7 @@
             if targetsubs:
                 # Revert the subrepos on the revert list
                 for sub in targetsubs:
-                    ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
+                    ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
     finally:
         wlock.release()
 
--- a/mercurial/commands.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/commands.py	Sat Jan 17 18:28:30 2015 -0800
@@ -21,7 +21,7 @@
 import dagparser, context, simplemerge, graphmod, copies
 import random
 import setdiscovery, treediscovery, dagutil, pvec, localrepo
-import phases, obsolete, exchange
+import phases, obsolete, exchange, bundle2
 import ui as uimod
 
 table = {}
@@ -141,6 +141,7 @@
     ]
 
 diffopts2 = [
+    ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
     ('p', 'show-function', None, _('show which function each change is in')),
     ('', 'reverse', None, _('produce a diff that undoes the changes')),
     ] + diffwsopts + [
@@ -198,12 +199,11 @@
     """
 
     m = scmutil.match(repo[None], pats, opts)
-    rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
-                           opts.get('subrepos'), prefix="", explicitonly=False)
+    rejected = cmdutil.add(ui, repo, m, "", False, **opts)
     return rejected and 1 or 0
 
 @command('addremove',
-    similarityopts + walkopts + dryrunopts,
+    similarityopts + subrepoopts + walkopts + dryrunopts,
     _('[OPTION]... [FILE]...'),
     inferrepo=True)
 def addremove(ui, repo, *pats, **opts):
@@ -234,7 +234,8 @@
         raise util.Abort(_('similarity must be a number'))
     if sim < 0 or sim > 100:
         raise util.Abort(_('similarity must be between 0 and 100'))
-    return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
+    matcher = scmutil.match(repo[None], pats, opts)
+    return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
 
 @command('^annotate|blame',
     [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
@@ -315,7 +316,8 @@
     m = scmutil.match(ctx, pats, opts)
     m.bad = bad
     follow = not opts.get('no_follow')
-    diffopts = patch.diffopts(ui, opts, section='annotate')
+    diffopts = patch.difffeatureopts(ui, opts, section='annotate',
+                                     whitespace=True)
     for abs in ctx.walk(m):
         fctx = ctx[abs]
         if not opts.get('text') and util.binary(fctx.data()):
@@ -422,13 +424,14 @@
 
 @command('backout',
     [('', 'merge', None, _('merge with old dirstate parent after backout')),
+    ('', 'commit', None, _('commit if no conflicts were encountered')),
     ('', 'parent', '',
      _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
     ('r', 'rev', '', _('revision to backout'), _('REV')),
     ('e', 'edit', False, _('invoke editor on commit messages')),
     ] + mergetoolopts + walkopts + commitopts + commitopts2,
     _('[OPTION]... [-r] REV'))
-def backout(ui, repo, node=None, rev=None, **opts):
+def backout(ui, repo, node=None, rev=None, commit=False, **opts):
     '''reverse effect of earlier changeset
 
     Prepare a new changeset with the effect of REV undone in the
@@ -516,11 +519,12 @@
                 if stats[3]:
                     repo.ui.status(_("use 'hg resolve' to retry unresolved "
                                      "file merges\n"))
-                else:
+                    return 1
+                elif not commit:
                     msg = _("changeset %s backed out, "
                             "don't forget to commit.\n")
                     ui.status(msg % short(node))
-                return stats[3] > 0
+                    return 0
             finally:
                 ui.setconfig('ui', 'forcemerge', '', '')
         else:
@@ -743,9 +747,7 @@
                 # update state
                 state['current'] = [node]
                 hbisect.save_state(repo, state)
-                status = util.system(command,
-                                     environ={'HG_NODE': hex(node)},
-                                     out=ui.fout)
+                status = ui.system(command, environ={'HG_NODE': hex(node)})
                 if status == 125:
                     transition = "skip"
                 elif status == 0:
@@ -1037,7 +1039,7 @@
     branch.
 
     Unless -f/--force is specified, branch will not let you set a
-    branch name that already exists, even if it's inactive.
+    branch name that already exists.
 
     Use -C/--clean to reset the working directory branch to that of
     the parent of the working directory, negating a previous branch
@@ -1077,7 +1079,8 @@
         wlock.release()
 
 @command('branches',
-    [('a', 'active', False, _('show only branches that have unmerged heads')),
+    [('a', 'active', False,
+      _('show only branches that have unmerged heads (DEPRECATED)')),
      ('c', 'closed', False, _('show normal and closed branches')),
     ] + formatteropts,
     _('[-ac]'))
@@ -1088,9 +1091,6 @@
     inactive. If -c/--closed is specified, also list branches which have
     been marked closed (see :hg:`commit --close-branch`).
 
-    If -a/--active is specified, only show active branches. A branch
-    is considered active if it contains repository heads.
-
     Use the command :hg:`update` to switch to an existing branch.
 
     Returns 0.
@@ -1181,7 +1181,10 @@
         revs = scmutil.revrange(repo, opts['rev'])
 
     bundletype = opts.get('type', 'bzip2').lower()
-    btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
+    btypes = {'none': 'HG10UN',
+              'bzip2': 'HG10BZ',
+              'gzip': 'HG10GZ',
+              'bundle2': 'HG2Y'}
     bundletype = btypes.get(bundletype)
     if bundletype not in changegroup.bundletypes:
         raise util.Abort(_('unknown bundle type specified with --type'))
@@ -1217,7 +1220,7 @@
         scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
         return 1
 
-    changegroup.writebundle(cg, fname, bundletype)
+    changegroup.writebundle(ui, cg, fname, bundletype)
 
 @command('cat',
     [('o', 'output', '',
@@ -1573,9 +1576,8 @@
             fp.close()
 
         editor = ui.geteditor()
-        util.system("%s \"%s\"" % (editor, f),
-                    onerr=util.Abort, errprefix=_("edit failed"),
-                    out=ui.fout)
+        ui.system("%s \"%s\"" % (editor, f),
+                  onerr=util.Abort, errprefix=_("edit failed"))
         return
 
     for f in scmutil.rcpath():
@@ -1803,7 +1805,7 @@
         tr.close()
 
         if tags:
-            repo.opener.write("localtags", "".join(tags))
+            repo.vfs.write("localtags", "".join(tags))
     finally:
         ui.progress(_('building'), None)
         release(tr, lock)
@@ -1817,6 +1819,8 @@
     f = hg.openpath(ui, bundlepath)
     try:
         gen = exchange.readbundle(ui, f, bundlepath)
+        if isinstance(gen, bundle2.unbundle20):
+            return _debugbundle2(ui, gen, all=all, **opts)
         if all:
             ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
 
@@ -1849,6 +1853,8 @@
                 fname = chunkdata['filename']
                 showchunks(fname)
         else:
+            if isinstance(gen, bundle2.unbundle20):
+                raise util.Abort(_('use debugbundle2 for this file'))
             chunkdata = gen.changelogheader()
             chain = None
             while True:
@@ -1861,6 +1867,26 @@
     finally:
         f.close()
 
+def _debugbundle2(ui, gen, **opts):
+    """lists the contents of a bundle2"""
+    if not isinstance(gen, bundle2.unbundle20):
+        raise util.Abort(_('not a bundle2 file'))
+    ui.write(('Stream params: %s\n' % repr(gen.params)))
+    for part in gen.iterparts():
+        ui.write('%s -- %r\n' % (part.type, repr(part.params)))
+        if part.type == 'b2x:changegroup':
+            version = part.params.get('version', '01')
+            cg = changegroup.packermap[version][1](part, 'UN')
+            chunkdata = cg.changelogheader()
+            chain = None
+            while True:
+                chunkdata = cg.deltachunk(chain)
+                if not chunkdata:
+                    break
+                node = chunkdata['node']
+                ui.write("    %s\n" % hex(node))
+                chain = node
+
 @command('debugcheckstate', [], '')
 def debugcheckstate(ui, repo):
     """validate the correctness of the current dirstate"""
@@ -2138,11 +2164,14 @@
     bundle = repo.getbundle('debug', **args)
 
     bundletype = opts.get('type', 'bzip2').lower()
-    btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
+    btypes = {'none': 'HG10UN',
+              'bzip2': 'HG10BZ',
+              'gzip': 'HG10GZ',
+              'bundle2': 'HG2Y'}
     bundletype = btypes.get(bundletype)
     if bundletype not in changegroup.bundletypes:
         raise util.Abort(_('unknown bundle type specified with --type'))
-    changegroup.writebundle(bundle, bundlepath, bundletype)
+    changegroup.writebundle(ui, bundle, bundlepath, bundletype)
 
 @command('debugignore', [], '')
 def debugignore(ui, repo, *values, **opts):
@@ -2173,13 +2202,24 @@
     else:
         basehdr = '  base'
 
+    if ui.debugflag:
+        shortfn = hex
+    else:
+        shortfn = short
+
+    # There might not be anything in r, so have a sane default
+    idlen = 12
+    for i in r:
+        idlen = len(shortfn(r.node(i)))
+        break
+
     if format == 0:
         ui.write("   rev    offset  length " + basehdr + " linkrev"
-                 " nodeid       p1           p2\n")
+                 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
     elif format == 1:
         ui.write("   rev flag   offset   length"
                  "     size " + basehdr + "   link     p1     p2"
-                 "       nodeid\n")
+                 " %s\n" % "nodeid".rjust(idlen))
 
     for i in r:
         node = r.node(i)
@@ -2194,12 +2234,12 @@
                 pp = [nullid, nullid]
             ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
                     i, r.start(i), r.length(i), base, r.linkrev(i),
-                    short(node), short(pp[0]), short(pp[1])))
+                    shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
         elif format == 1:
             pr = r.parentrevs(i)
             ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
                     i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
-                    base, r.linkrev(i), pr[0], pr[1], short(node)))
+                    base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
 
 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
 def debugindexdot(ui, repo, file_):
@@ -2333,18 +2373,26 @@
 
 @command('debuglabelcomplete', [], _('LABEL...'))
 def debuglabelcomplete(ui, repo, *args):
-    '''complete "labels" - tags, open branch names, bookmark names'''
-
-    labels = set()
-    labels.update(t[0] for t in repo.tagslist())
-    labels.update(repo._bookmarks.keys())
-    labels.update(tag for (tag, heads, tip, closed)
-                  in repo.branchmap().iterbranches() if not closed)
+    '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
+    debugnamecomplete(ui, repo, *args)
+
+@command('debugnamecomplete', [], _('NAME...'))
+def debugnamecomplete(ui, repo, *args):
+    '''complete "names" - tags, open branch names, bookmark names'''
+
+    names = set()
+    # since we previously only listed open branches, we will handle that
+    # specially (after this for loop)
+    for name, ns in repo.names.iteritems():
+        if name != 'branches':
+            names.update(ns.listnames(repo))
+    names.update(tag for (tag, heads, tip, closed)
+                 in repo.branchmap().iterbranches() if not closed)
     completions = set()
     if not args:
         args = ['']
     for a in args:
-        completions.update(l for l in labels if l.startswith(a))
+        completions.update(n for n in names if n.startswith(a))
     ui.write('\n'.join(sorted(completions)))
     ui.write('\n')
 
@@ -2653,22 +2701,13 @@
                  " rawsize totalsize compression heads chainlen\n")
         ts = 0
         heads = set()
-        rindex = r.index
-
-        def chainbaseandlen(rev):
-            clen = 0
-            base = rindex[rev][3]
-            while base != rev:
-                clen += 1
-                rev = base
-                base = rindex[rev][3]
-            return base, clen
 
         for rev in xrange(numrevs):
             dbase = r.deltaparent(rev)
             if dbase == -1:
                 dbase = rev
-            cbase, clen = chainbaseandlen(rev)
+            cbase = r.chainbase(rev)
+            clen = r.chainlen(rev)
             p1, p2 = r.parentrevs(rev)
             rs = r.rawsize(rev)
             ts = ts + rs
@@ -2838,6 +2877,10 @@
         newtree = revset.findaliases(ui, tree)
         if newtree != tree:
             ui.note(revset.prettyformat(newtree), "\n")
+        tree = newtree
+        newtree = revset.foldconcat(tree)
+        if newtree != tree:
+            ui.note(revset.prettyformat(newtree), "\n")
         if opts["optimize"]:
             weight, optimizedtree = revset.optimize(newtree, True)
             ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
@@ -2873,23 +2916,18 @@
 def debugstate(ui, repo, nodates=None, datesort=None):
     """show the contents of the current dirstate"""
     timestr = ""
-    showdate = not nodates
     if datesort:
         keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
     else:
         keyfunc = None # sort by filename
     for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
-        if showdate:
-            if ent[3] == -1:
-                # Pad or slice to locale representation
-                locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
-                                               time.localtime(0)))
-                timestr = 'unset'
-                timestr = (timestr[:locale_len] +
-                           ' ' * (locale_len - len(timestr)))
-            else:
-                timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
-                                        time.localtime(ent[3]))
+        if ent[3] == -1:
+            timestr = 'unset               '
+        elif nodates:
+            timestr = 'set                 '
+        else:
+            timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
+                                    time.localtime(ent[3]))
         if ent[1] & 020000:
             mode = 'lnk'
         else:
@@ -3083,7 +3121,7 @@
     if reverse:
         node1, node2 = node2, node1
 
-    diffopts = patch.diffopts(ui, opts)
+    diffopts = patch.diffallopts(ui, opts)
     m = scmutil.match(repo[node2], pats, opts)
     cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
                            listsubrepos=opts.get('subrepos'))
@@ -3167,7 +3205,7 @@
         ui.note(_('exporting patch:\n'))
     cmdutil.export(repo, revs, template=opts.get('output'),
                  switch_parent=opts.get('switch_parent'),
-                 opts=patch.diffopts(ui, opts))
+                 opts=patch.diffallopts(ui, opts))
 
 @command('files',
     [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
@@ -3368,7 +3406,7 @@
             raise util.Abort(_("can't specify --continue and revisions"))
         # read in unfinished revisions
         try:
-            nodes = repo.opener.read('graftstate').splitlines()
+            nodes = repo.vfs.read('graftstate').splitlines()
             revs = [repo[node].rev() for node in nodes]
         except IOError, inst:
             if inst.errno != errno.ENOENT:
@@ -3406,7 +3444,8 @@
         # don't mutate while iterating, create a copy
         for rev in list(revs):
             if rev in ancestors:
-                ui.warn(_('skipping ancestor revision %s\n') % rev)
+                ui.warn(_('skipping ancestor revision %d:%s\n') %
+                        (rev, repo[rev]))
                 # XXX remove on list is slow
                 revs.remove(rev)
         if not revs:
@@ -3432,23 +3471,25 @@
                 except error.RepoLookupError:
                     r = None
                 if r in revs:
-                    ui.warn(_('skipping revision %s (already grafted to %s)\n')
-                            % (r, rev))
+                    ui.warn(_('skipping revision %d:%s '
+                              '(already grafted to %d:%s)\n')
+                            % (r, repo[r], rev, ctx))
                     revs.remove(r)
                 elif ids[n] in revs:
                     if r is None:
-                        ui.warn(_('skipping already grafted revision %s '
-                                  '(%s also has unknown origin %s)\n')
-                                % (ids[n], rev, n))
+                        ui.warn(_('skipping already grafted revision %d:%s '
+                                  '(%d:%s also has unknown origin %s)\n')
+                                % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
                     else:
-                        ui.warn(_('skipping already grafted revision %s '
-                                  '(%s also has origin %d)\n')
-                                % (ids[n], rev, r))
+                        ui.warn(_('skipping already grafted revision %d:%s '
+                                  '(%d:%s also has origin %d:%s)\n')
+                                % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
                     revs.remove(ids[n])
             elif ctx.hex() in ids:
                 r = ids[ctx.hex()]
-                ui.warn(_('skipping already grafted revision %s '
-                                '(was grafted from %d)\n') % (r, rev))
+                ui.warn(_('skipping already grafted revision %d:%s '
+                          '(was grafted from %d:%s)\n') %
+                        (r, repo[r], rev, ctx))
                 revs.remove(r)
         if not revs:
             return -1
@@ -3456,8 +3497,12 @@
     wlock = repo.wlock()
     try:
         for pos, ctx in enumerate(repo.set("%ld", revs)):
-
-            ui.status(_('grafting revision %s\n') % ctx.rev())
+            desc = '%d:%s "%s"' % (ctx.rev(), ctx,
+                                   ctx.description().split('\n', 1)[0])
+            names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
+            if names:
+                desc += ' (%s)' % ' '.join(names)
+            ui.status(_('grafting %s\n') % desc)
             if opts.get('dry_run'):
                 continue
 
@@ -3490,7 +3535,7 @@
                 if stats and stats[3] > 0:
                     # write out state for --continue
                     nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
-                    repo.opener.write('graftstate', ''.join(nodelines))
+                    repo.vfs.write('graftstate', ''.join(nodelines))
                     raise util.Abort(
                         _("unresolved conflicts, can't continue"),
                         hint=_('use hg resolve and hg graft --continue'))
@@ -3501,7 +3546,9 @@
             node = repo.commit(text=message, user=user,
                         date=date, extra=extra, editor=editor)
             if node is None:
-                ui.status(_('graft for revision %s is empty\n') % ctx.rev())
+                ui.warn(
+                    _('note: graft of %d:%s created no changes to commit\n') %
+                    (ctx.rev(), ctx))
     finally:
         wlock.release()
 
@@ -5097,7 +5144,7 @@
     [('A', 'after', None, _('record delete for missing files')),
     ('f', 'force', None,
      _('remove (and delete) file even if added or modified')),
-    ] + walkopts,
+    ] + subrepoopts + walkopts,
     _('[OPTION]... FILE...'),
     inferrepo=True)
 def remove(ui, repo, *pats, **opts):
@@ -5137,62 +5184,13 @@
     Returns 0 on success, 1 if any warnings encountered.
     """
 
-    ret = 0
     after, force = opts.get('after'), opts.get('force')
     if not pats and not after:
         raise util.Abort(_('no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
-    s = repo.status(match=m, clean=True)
-    modified, added, deleted, clean = s[0], s[1], s[3], s[6]
-
-    # warn about failure to delete explicit files/dirs
-    wctx = repo[None]
-    for f in m.files():
-        if f in repo.dirstate or f in wctx.dirs():
-            continue
-        if os.path.exists(m.rel(f)):
-            if os.path.isdir(m.rel(f)):
-                ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
-            else:
-                ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
-        # missing files will generate a warning elsewhere
-        ret = 1
-
-    if force:
-        list = modified + deleted + clean + added
-    elif after:
-        list = deleted
-        for f in modified + added + clean:
-            ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
-            ret = 1
-    else:
-        list = deleted + clean
-        for f in modified:
-            ui.warn(_('not removing %s: file is modified (use -f'
-                      ' to force removal)\n') % m.rel(f))
-            ret = 1
-        for f in added:
-            ui.warn(_('not removing %s: file has been marked for add'
-                      ' (use forget to undo)\n') % m.rel(f))
-            ret = 1
-
-    for f in sorted(list):
-        if ui.verbose or not m.exact(f):
-            ui.status(_('removing %s\n') % m.rel(f))
-
-    wlock = repo.wlock()
-    try:
-        if not after:
-            for f in list:
-                if f in added:
-                    continue # we never unlink added files on remove
-                util.unlinkpath(repo.wjoin(f), ignoremissing=True)
-        repo[None].forget(list)
-    finally:
-        wlock.release()
-
-    return ret
+    subrepos = opts.get('subrepos')
+    return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
 
 @command('rename|move|mv',
     [('A', 'after', None, _('record a rename that has already occurred')),
@@ -6150,8 +6148,20 @@
         for fname in fnames:
             f = hg.openpath(ui, fname)
             gen = exchange.readbundle(ui, f, fname)
-            modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
-                                                  'bundle:' + fname)
+            if isinstance(gen, bundle2.unbundle20):
+                tr = repo.transaction('unbundle')
+                try:
+                    op = bundle2.processbundle(repo, gen, lambda: tr)
+                    tr.close()
+                finally:
+                    if tr:
+                        tr.release()
+                changes = [r.get('result', 0)
+                           for r in op.records['changegroup']]
+                modheads = changegroup.combineresults(changes)
+            else:
+                modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
+                                                      'bundle:' + fname)
     finally:
         lock.release()
 
@@ -6245,7 +6255,6 @@
             raise util.Abort(_("uncommitted changes"))
         if rev is None:
             rev = repo[repo[None].branch()].rev()
-        mergemod._checkunknown(repo, repo[None], repo[rev])
 
     repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
 
--- a/mercurial/commandserver.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/commandserver.py	Sat Jan 17 18:28:30 2015 -0800
@@ -248,15 +248,47 @@
 
         return 0
 
+def _protectio(ui):
+    """ duplicates streams and redirect original to null if ui uses stdio """
+    ui.flush()
+    newfiles = []
+    nullfd = os.open(os.devnull, os.O_RDWR)
+    for f, sysf, mode in [(ui.fin, sys.stdin, 'rb'),
+                          (ui.fout, sys.stdout, 'wb')]:
+        if f is sysf:
+            newfd = os.dup(f.fileno())
+            os.dup2(nullfd, f.fileno())
+            f = os.fdopen(newfd, mode)
+        newfiles.append(f)
+    os.close(nullfd)
+    return tuple(newfiles)
+
+def _restoreio(ui, fin, fout):
+    """ restores streams from duplicated ones """
+    ui.flush()
+    for f, uif in [(fin, ui.fin), (fout, ui.fout)]:
+        if f is not uif:
+            os.dup2(f.fileno(), uif.fileno())
+            f.close()
+
 class pipeservice(object):
     def __init__(self, ui, repo, opts):
-        self.server = server(ui, repo, sys.stdin, sys.stdout)
+        self.ui = ui
+        self.repo = repo
 
     def init(self):
         pass
 
     def run(self):
-        return self.server.serve()
+        ui = self.ui
+        # redirect stdio to null device so that broken extensions or in-process
+        # hooks will never cause corruption of channel protocol.
+        fin, fout = _protectio(ui)
+        try:
+            sv = server(ui, self.repo, fin, fout)
+            return sv.serve()
+        finally:
+            _restoreio(ui, fin, fout)
 
 class _requesthandler(SocketServer.StreamRequestHandler):
     def handle(self):
--- a/mercurial/context.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/context.py	Sat Jan 17 18:28:30 2015 -0800
@@ -22,6 +22,41 @@
 # dirty in the working copy.
 _newnode = '!' * 21
 
+def _adjustlinkrev(repo, path, filelog, fnode, srcrev, inclusive=False):
+    """return the first ancestor of <srcrev> introducting <fnode>
+
+    If the linkrev of the file revision does not point to an ancestor of
+    srcrev, we'll walk down the ancestors until we find one introducing this
+    file revision.
+
+    :repo: a localrepository object (used to access changelog and manifest)
+    :path: the file path
+    :fnode: the nodeid of the file revision
+    :filelog: the filelog of this path
+    :srcrev: the changeset revision we search ancestors from
+    :inclusive: if true, the src revision will also be checked
+    """
+    cl = repo.unfiltered().changelog
+    ma = repo.manifest
+    # fetch the linkrev
+    fr = filelog.rev(fnode)
+    lkr = filelog.linkrev(fr)
+    # check if this linkrev is an ancestor of srcrev
+    anc = cl.ancestors([srcrev], lkr, inclusive=inclusive)
+    if lkr not in anc:
+        for a in anc:
+            ac = cl.read(a) # get changeset data (we avoid object creation).
+            if path in ac[3]: # checking the 'files' field.
+                # The file has been touched, check if the content is similar
+                # to the one we search for.
+                if fnode == ma.readfast(ac[0]).get(path):
+                    return a
+        # In theory, we should never get out of that loop without a result. But
+        # if manifest uses a buggy file revision (not children of the one it
+        # replaces) we could. Such a buggy situation will likely result is crash
+        # somewhere else at to some point.
+    return lkr
+
 class basectx(object):
     """A basectx object represents the common logic for its children:
     changectx: read-only context that is already present in the repo,
@@ -76,22 +111,9 @@
         object oriented way for other contexts to customize the manifest
         generation.
         """
-        if match.always():
-            return self.manifest().copy()
-
-        files = match.files()
-        if (match.matchfn == match.exact or
-            (not match.anypats() and util.all(fn in self for fn in files))):
-            return self.manifest().intersectfiles(files)
+        return self.manifest().matches(match)
 
-        mf = self.manifest().copy()
-        for fn in mf.keys():
-            if not match(fn):
-                del mf[fn]
-        return mf
-
-    def _matchstatus(self, other, s, match, listignored, listclean,
-                     listunknown):
+    def _matchstatus(self, other, match):
         """return match.always if match is none
 
         This internal method provides a way for child objects to override the
@@ -99,54 +121,57 @@
         """
         return match or matchmod.always(self._repo.root, self._repo.getcwd())
 
-    def _prestatus(self, other, s, match, listignored, listclean, listunknown):
-        """provide a hook to allow child objects to preprocess status results
-
-        For example, this allows other contexts, such as workingctx, to query
-        the dirstate before comparing the manifests.
-        """
-        # load earliest manifest first for caching reasons
-        if self.rev() < other.rev():
-            self.manifest()
-        return s
-
-    def _poststatus(self, other, s, match, listignored, listclean, listunknown):
-        """provide a hook to allow child objects to postprocess status results
-
-        For example, this allows other contexts, such as workingctx, to filter
-        suspect symlinks in the case of FAT32 and NTFS filesytems.
-        """
-        return s
-
     def _buildstatus(self, other, s, match, listignored, listclean,
                      listunknown):
         """build a status with respect to another context"""
+        # Load earliest manifest first for caching reasons. More specifically,
+        # if you have revisions 1000 and 1001, 1001 is probably stored as a
+        # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
+        # 1000 and cache it so that when you read 1001, we just need to apply a
+        # delta to what's in the cache. So that's one full reconstruction + one
+        # delta application.
+        if self.rev() is not None and self.rev() < other.rev():
+            self.manifest()
         mf1 = other._manifestmatches(match, s)
         mf2 = self._manifestmatches(match, s)
 
-        modified, added, clean = [], [], []
-        deleted, unknown, ignored = s[3], s[4], s[5]
+        modified, added = [], []
+        removed = []
+        clean = []
+        deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
         deletedset = set(deleted)
-        withflags = mf1.withflags() | mf2.withflags()
-        for fn, mf2node in mf2.iteritems():
-            if fn in mf1:
-                if (fn not in deletedset and
-                    ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
-                     (mf1[fn] != mf2node and
-                      (mf2node != _newnode or self[fn].cmp(other[fn]))))):
-                    modified.append(fn)
-                elif listclean:
-                    clean.append(fn)
-                del mf1[fn]
-            elif fn not in deletedset:
+        d = mf1.diff(mf2, clean=listclean)
+        for fn, value in d.iteritems():
+            if fn in deletedset:
+                continue
+            if value is None:
+                clean.append(fn)
+                continue
+            (node1, flag1), (node2, flag2) = value
+            if node1 is None:
                 added.append(fn)
-        removed = mf1.keys()
+            elif node2 is None:
+                removed.append(fn)
+            elif node2 != _newnode:
+                # The file was not a new file in mf2, so an entry
+                # from diff is really a difference.
+                modified.append(fn)
+            elif self[fn].cmp(other[fn]):
+                # node2 was newnode, but the working file doesn't
+                # match the one in mf1.
+                modified.append(fn)
+            else:
+                clean.append(fn)
+
         if removed:
             # need to filter files if they are already reported as removed
             unknown = [fn for fn in unknown if fn not in mf1]
             ignored = [fn for fn in ignored if fn not in mf1]
+            # if they're deleted, don't report them as removed
+            removed = [fn for fn in removed if fn not in deletedset]
 
-        return [modified, added, removed, deleted, unknown, ignored, clean]
+        return scmutil.status(modified, added, removed, deleted, unknown,
+                              ignored, clean)
 
     @propertycache
     def substate(self):
@@ -316,18 +341,16 @@
             reversed = True
             ctx1, ctx2 = ctx2, ctx1
 
-        r = [[], [], [], [], [], [], []]
-        match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
-                                  listunknown)
-        r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
+        match = ctx2._matchstatus(ctx1, match)
+        r = scmutil.status([], [], [], [], [], [], [])
         r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
                               listunknown)
-        r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
-                             listunknown)
 
         if reversed:
-            # reverse added and removed
-            r[1], r[2] = r[2], r[1]
+            # Reverse added and removed. Clear deleted, unknown and ignored as
+            # these make no sense to reverse.
+            r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
+                               r.clean)
 
         if listsubrepos:
             for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
@@ -346,8 +369,7 @@
         for l in r:
             l.sort()
 
-        # we return a tuple to signify that this list isn't changing
-        return scmutil.status(*r)
+        return r
 
 
 def makememctx(repo, parents, text, user, date, branch, files, store,
@@ -438,18 +460,13 @@
                 except (TypeError, LookupError):
                     pass
 
-            if changeid in repo._bookmarks:
-                self._node = repo._bookmarks[changeid]
+            # lookup bookmarks through the name interface
+            try:
+                self._node = repo.names.singlenode(repo, changeid)
                 self._rev = repo.changelog.rev(self._node)
                 return
-            if changeid in repo._tagscache.tags:
-                self._node = repo._tagscache.tags[changeid]
-                self._rev = repo.changelog.rev(self._node)
-                return
-            try:
-                self._node = repo.branchtip(changeid)
-                self._rev = repo.changelog.rev(self._node)
-                return
+            except KeyError:
+                pass
             except error.FilteredRepoLookupError:
                 raise
             except error.RepoLookupError:
@@ -764,17 +781,54 @@
 
         return True
 
+    def introrev(self):
+        """return the rev of the changeset which introduced this file revision
+
+        This method is different from linkrev because it take into account the
+        changeset the filectx was created from. It ensures the returned
+        revision is one of its ancestors. This prevents bugs from
+        'linkrev-shadowing' when a file revision is used by multiple
+        changesets.
+        """
+        lkr = self.linkrev()
+        attrs = vars(self)
+        noctx = not ('_changeid' in attrs or '_changectx' in attrs)
+        if noctx or self.rev() == lkr:
+            return self.linkrev()
+        return _adjustlinkrev(self._repo, self._path, self._filelog,
+                              self._filenode, self.rev(), inclusive=True)
+
     def parents(self):
         _path = self._path
         fl = self._filelog
-        pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
+        parents = self._filelog.parents(self._filenode)
+        pl = [(_path, node, fl) for node in parents if node != nullid]
 
-        r = self._filelog.renamed(self._filenode)
+        r = fl.renamed(self._filenode)
         if r:
-            pl[0] = (r[0], r[1], None)
+            # - In the simple rename case, both parent are nullid, pl is empty.
+            # - In case of merge, only one of the parent is null id and should
+            # be replaced with the rename information. This parent is -always-
+            # the first one.
+            #
+            # As null id have alway been filtered out in the previous list
+            # comprehension, inserting to 0 will always result in "replacing
+            # first nullid parent with rename information.
+            pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
 
-        return [filectx(self._repo, p, fileid=n, filelog=l)
-                for p, n, l in pl if n != nullid]
+        ret = []
+        for path, fnode, l in pl:
+            if '_changeid' in vars(self) or '_changectx' in vars(self):
+                # If self is associated with a changeset (probably explicitly
+                # fed), ensure the created filectx is associated with a
+                # changeset that is an ancestor of self.changectx.
+                rev = _adjustlinkrev(self._repo, path, l, fnode, self.rev())
+                fctx = filectx(self._repo, path, fileid=fnode, filelog=l,
+                               changeid=rev)
+            else:
+                fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
+            ret.append(fctx)
+        return ret
 
     def p1(self):
         return self.parents()[0]
@@ -836,10 +890,10 @@
             return pl
 
         # use linkrev to find the first changeset where self appeared
-        if self.rev() != self.linkrev():
-            base = self.filectx(self.filenode())
-        else:
-            base = self
+        base = self
+        introrev = self.introrev()
+        if self.rev() != introrev:
+            base = self.filectx(self.filenode(), changeid=introrev)
 
         # This algorithm would prefer to be recursive, but Python is a
         # bit recursion-hostile. Instead we do an iterative
@@ -926,7 +980,7 @@
     def _changectx(self):
         try:
             return changectx(self._repo, self._changeid)
-        except error.RepoLookupError:
+        except error.FilteredRepoLookupError:
             # Linkrev may point to any revision in the repository.  When the
             # repository is filtered this may lead to `filectx` trying to build
             # `changectx` for filtered revision. In such case we fallback to
@@ -945,11 +999,11 @@
             # considered when solving linkrev issue are on the table.
             return changectx(self._repo.unfiltered(), self._changeid)
 
-    def filectx(self, fileid):
+    def filectx(self, fileid, changeid=None):
         '''opens an arbitrary revision of the file without
         opening a new filelog'''
         return filectx(self._repo, self._path, fileid=fileid,
-                       filelog=self._filelog)
+                       filelog=self._filelog, changeid=changeid)
 
     def data(self):
         try:
@@ -1066,7 +1120,13 @@
 
     @propertycache
     def _manifest(self):
-        """generate a manifest corresponding to the values in self._status"""
+        """generate a manifest corresponding to the values in self._status
+
+        This reuse the file nodeid from parent, but we append an extra letter
+        when modified. Modified files get an extra 'm' while added files get
+        an extra 'a'. This is used by manifests merge to see that files
+        are different and by update logic to avoid deleting newly added files.
+        """
 
         man1 = self._parents[0].manifest()
         man = man1.copy()
@@ -1129,12 +1189,6 @@
         return self._status.removed
     def deleted(self):
         return self._status.deleted
-    def unknown(self):
-        return self._status.unknown
-    def ignored(self):
-        return self._status.ignored
-    def clean(self):
-        return self._status.clean
     def branch(self):
         return encoding.tolocal(self._extra['branch'])
     def closesbranch(self):
@@ -1190,6 +1244,8 @@
         return sorted(self._repo.dirstate.matches(match))
 
     def ancestors(self):
+        for p in self._parents:
+            yield p
         for a in self._repo.changelog.ancestors(
             [p.rev() for p in self._parents]):
             yield changectx(self._repo, a)
@@ -1411,37 +1467,14 @@
         need to build a manifest and return what matches.
         """
         mf = self._repo['.']._manifestmatches(match, s)
-        modified, added, removed = s[0:3]
-        for f in modified + added:
+        for f in s.modified + s.added:
             mf[f] = _newnode
             mf.setflag(f, self.flags(f))
-        for f in removed:
+        for f in s.removed:
             if f in mf:
                 del mf[f]
         return mf
 
-    def _prestatus(self, other, s, match, listignored, listclean, listunknown):
-        """override the parent hook with a dirstate query
-
-        We use this prestatus hook to populate the status with information from
-        the dirstate.
-        """
-        # doesn't need to call super; if that changes, be aware that super
-        # calls self.manifest which would slow down the common case of calling
-        # status against a workingctx's parent
-        return self._dirstatestatus(match, listignored, listclean, listunknown)
-
-    def _poststatus(self, other, s, match, listignored, listclean, listunknown):
-        """override the parent hook with a filter for suspect symlinks
-
-        We use this poststatus hook to filter out symlinks that might have
-        accidentally ended up with the entire contents of the file they are
-        susposed to be linking to.
-        """
-        s[0] = self._filtersuspectsymlink(s[0])
-        self._status = scmutil.status(*s)
-        return s
-
     def _dirstatestatus(self, match=None, ignored=False, clean=False,
                         unknown=False):
         '''Gets the status from the dirstate -- internal use only.'''
@@ -1452,18 +1485,26 @@
             subrepos = sorted(self.substate)
         cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
                                             listclean, listunknown)
-        modified, added, removed, deleted, unknown, ignored, clean = s
 
         # check for any possibly clean files
         if cmp:
             modified2, fixup = self._checklookup(cmp)
-            modified += modified2
+            s.modified.extend(modified2)
 
             # update dirstate for files that are actually clean
             if fixup and listclean:
-                clean += fixup
+                s.clean.extend(fixup)
 
-        return [modified, added, removed, deleted, unknown, ignored, clean]
+        if match.always():
+            # cache for performance
+            if s.unknown or s.ignored or s.clean:
+                # "_status" is cached with list*=False in the normal route
+                self._status = scmutil.status(s.modified, s.added, s.removed,
+                                              s.deleted, [], [], [])
+            else:
+                self._status = s
+
+        return s
 
     def _buildstatus(self, other, s, match, listignored, listclean,
                      listunknown):
@@ -1474,14 +1515,18 @@
         building a new manifest if self (working directory) is not comparing
         against its parent (repo['.']).
         """
+        s = self._dirstatestatus(match, listignored, listclean, listunknown)
+        # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
+        # might have accidentally ended up with the entire contents of the file
+        # they are supposed to be linking to.
+        s.modified[:] = self._filtersuspectsymlink(s.modified)
         if other != self._repo['.']:
             s = super(workingctx, self)._buildstatus(other, s, match,
                                                      listignored, listclean,
                                                      listunknown)
         return s
 
-    def _matchstatus(self, other, s, match, listignored, listclean,
-                     listunknown):
+    def _matchstatus(self, other, match):
         """override the match method with a filter for directory patterns
 
         We use inheritance to customize the match.bad method only in cases of
@@ -1492,8 +1537,7 @@
         just use the default match object sent to us.
         """
         superself = super(workingctx, self)
-        match = superself._matchstatus(other, s, match, listignored, listclean,
-                                       listunknown)
+        match = superself._matchstatus(other, match)
         if other != self._repo['.']:
             def bad(f, msg):
                 # 'f' may be a directory pattern from 'match.files()',
@@ -1504,14 +1548,6 @@
             match.bad = bad
         return match
 
-    def status(self, other='.', match=None, listignored=False,
-               listclean=False, listunknown=False, listsubrepos=False):
-        # yet to be determined: what to do if 'other' is a 'workingctx' or a
-        # 'memctx'?
-        return super(workingctx, self).status(other, match, listignored,
-                                              listclean, listunknown,
-                                              listsubrepos)
-
 class committablefilectx(basefilectx):
     """A committablefilectx provides common functionality for a file context
     that wants the ability to commit, e.g. workingfilectx or memfilectx."""
@@ -1599,6 +1635,44 @@
         """wraps repo.wwrite"""
         self._repo.wwrite(self._path, data, flags)
 
+class workingcommitctx(workingctx):
+    """A workingcommitctx object makes access to data related to
+    the revision being committed convenient.
+
+    This hides changes in the working directory, if they aren't
+    committed in this context.
+    """
+    def __init__(self, repo, changes,
+                 text="", user=None, date=None, extra=None):
+        super(workingctx, self).__init__(repo, text, user, date, extra,
+                                         changes)
+
+    def _dirstatestatus(self, match=None, ignored=False, clean=False,
+                        unknown=False):
+        """Return matched files only in ``self._status``
+
+        Uncommitted files appear "clean" via this context, even if
+        they aren't actually so in the working directory.
+        """
+        match = match or matchmod.always(self._repo.root, self._repo.getcwd())
+        if clean:
+            clean = [f for f in self._manifest if f not in self._changedset]
+        else:
+            clean = []
+        return scmutil.status([f for f in self._status.modified if match(f)],
+                              [f for f in self._status.added if match(f)],
+                              [f for f in self._status.removed if match(f)],
+                              [], [], [], clean)
+
+    @propertycache
+    def _changedset(self):
+        """Return the set of files changed in this context
+        """
+        changed = set(self._status.modified)
+        changed.update(self._status.added)
+        changed.update(self._status.removed)
+        return changed
+
 class memctx(committablectx):
     """Use memctx to perform in-memory commits via localrepo.commitctx().
 
@@ -1641,8 +1715,7 @@
         p1, p2 = parents
         self._parents = [changectx(self._repo, p) for p in (p1, p2)]
         files = sorted(set(files))
-        self._status = scmutil.status(files, [], [], [], [], [], [])
-        self._filectxfn = filectxfn
+        self._files = files
         self.substate = {}
 
         # if store is not callable, wrap it in a function
@@ -1658,6 +1731,10 @@
                                   islink=fctx.islink(), isexec=fctx.isexec(),
                                   copied=copied, memctx=memctx)
             self._filectxfn = getfilectx
+        else:
+            # "util.cachefunc" reduces invocation of possibly expensive
+            # "filectxfn" for performance (e.g. converting from another VCS)
+            self._filectxfn = util.cachefunc(filectxfn)
 
         self._extra = extra and extra.copy() or {}
         if self._extra.get('branch', '') == '':
@@ -1685,7 +1762,7 @@
         pctx = self._parents[0]
         man = pctx.manifest().copy()
 
-        for f, fnode in man.iteritems():
+        for f in self._status.modified:
             p1node = nullid
             p2node = nullid
             p = pctx[f].parents() # if file isn't in pctx, check p2?
@@ -1695,13 +1772,45 @@
                     p2node = p[1].node()
             man[f] = revlog.hash(self[f].data(), p1node, p2node)
 
+        for f in self._status.added:
+            man[f] = revlog.hash(self[f].data(), nullid, nullid)
+
+        for f in self._status.removed:
+            if f in man:
+                del man[f]
+
         return man
 
+    @propertycache
+    def _status(self):
+        """Calculate exact status from ``files`` specified at construction
+        """
+        man1 = self.p1().manifest()
+        p2 = self._parents[1]
+        # "1 < len(self._parents)" can't be used for checking
+        # existence of the 2nd parent, because "memctx._parents" is
+        # explicitly initialized by the list, of which length is 2.
+        if p2.node() != nullid:
+            man2 = p2.manifest()
+            managing = lambda f: f in man1 or f in man2
+        else:
+            managing = lambda f: f in man1
+
+        modified, added, removed = [], [], []
+        for f in self._files:
+            if not managing(f):
+                added.append(f)
+            elif self[f]:
+                modified.append(f)
+            else:
+                removed.append(f)
+
+        return scmutil.status(modified, added, removed, [], [], [], [])
 
 class memfilectx(committablefilectx):
     """memfilectx represents an in-memory file to commit.
 
-    See memctx and commitablefilectx for more details.
+    See memctx and committablefilectx for more details.
     """
     def __init__(self, repo, path, data, islink=False,
                  isexec=False, copied=None, memctx=None):
--- a/mercurial/copies.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/copies.py	Sat Jan 17 18:28:30 2015 -0800
@@ -97,7 +97,7 @@
     # |/
     # o  0 a0
     #
-    # When findlimit is called, a and b are revs 3 and 0, so limit will be 2,
+    # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
     # yet the filelog has the copy information in rev 1 and we will not look
     # back far enough unless we also look at the a and b as candidates.
     # This only occurs when a is a descendent of b or visa-versa.
--- a/mercurial/dagutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/dagutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -25,7 +25,7 @@
         self._inverse = None
 
     def nodeset(self):
-        '''set of all node idxs'''
+        '''set of all node ixs'''
         raise NotImplementedError
 
     def heads(self):
@@ -77,7 +77,7 @@
         return self._internalize(id)
 
     def internalizeall(self, ids, filterunknown=False):
-        '''return a list of (or set if given a set) of node ids'''
+        '''return a list of (or set if given a set) of node ixs'''
         ixs = self._internalizeall(ids, filterunknown)
         if isinstance(ids, set):
             return set(ixs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/default.d/mergetools.rc	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,135 @@
+# Some default global settings for common merge tools
+
+[merge-tools]
+kdiff3.args=--auto --L1 base --L2 local --L3 other $base $local $other -o $output
+kdiff3.regkey=Software\KDiff3
+kdiff3.regkeyalt=Software\Wow6432Node\KDiff3
+kdiff3.regappend=\kdiff3.exe
+kdiff3.fixeol=True
+kdiff3.gui=True
+kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
+
+gvimdiff.args=--nofork -d -g -O $local $other $base
+gvimdiff.regkey=Software\Vim\GVim
+gvimdiff.regkeyalt=Software\Wow6432Node\Vim\GVim
+gvimdiff.regname=path
+gvimdiff.priority=-9
+gvimdiff.diffargs=--nofork -d -g -O $parent $child
+
+vimdiff.args=$local $other $base -c 'redraw | echomsg "hg merge conflict, type \":cq\" to abort vimdiff"'
+vimdiff.check=changed
+vimdiff.priority=-10
+
+merge.check=conflicts
+merge.priority=-100
+
+gpyfm.gui=True
+
+meld.gui=True
+meld.args=--label='local' $local --label='merged' $base --label='other' $other -o $output
+meld.check=changed
+meld.diffargs=-a --label='$plabel1' $parent --label='$clabel' $child
+
+tkdiff.args=$local $other -a $base -o $output
+tkdiff.gui=True
+tkdiff.priority=-8
+tkdiff.diffargs=-L '$plabel1' $parent -L '$clabel' $child
+
+xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 local --title2 base --title3 other --merged-filename $output --merge $local $base $other
+xxdiff.gui=True
+xxdiff.priority=-8
+xxdiff.diffargs=--title1 '$plabel1' $parent --title2 '$clabel' $child
+
+diffmerge.regkey=Software\SourceGear\SourceGear DiffMerge\
+diffmerge.regkeyalt=Software\Wow6432Node\SourceGear\SourceGear DiffMerge\
+diffmerge.regname=Location
+diffmerge.priority=-7
+diffmerge.args=-nosplash -merge -title1=local -title2=merged -title3=other $local $base $other -result=$output
+diffmerge.check=changed
+diffmerge.gui=True
+diffmerge.diffargs=--nosplash --title1='$plabel1' --title2='$clabel' $parent $child
+
+p4merge.args=$base $local $other $output
+p4merge.regkey=Software\Perforce\Environment
+p4merge.regkeyalt=Software\Wow6432Node\Perforce\Environment
+p4merge.regname=P4INSTROOT
+p4merge.regappend=\p4merge.exe
+p4merge.gui=True
+p4merge.priority=-8
+p4merge.diffargs=$parent $child
+
+p4mergeosx.executable = /Applications/p4merge.app/Contents/MacOS/p4merge
+p4mergeosx.args = $base $local $other $output
+p4mergeosx.gui = True
+p4mergeosx.priority=-8
+p4mergeosx.diffargs=$parent $child
+
+tortoisemerge.args=/base:$base /mine:$local /theirs:$other /merged:$output
+tortoisemerge.regkey=Software\TortoiseSVN
+tortoisemerge.regkeyalt=Software\Wow6432Node\TortoiseSVN
+tortoisemerge.check=changed
+tortoisemerge.gui=True
+tortoisemerge.priority=-8
+tortoisemerge.diffargs=/base:$parent /mine:$child /basename:'$plabel1' /minename:'$clabel'
+
+ecmerge.args=$base $local $other --mode=merge3 --title0=base --title1=local --title2=other --to=$output
+ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge
+ecmerge.regkeyalt=Software\Wow6432Node\Elli\xc3\xa9 Computing\Merge
+ecmerge.gui=True
+ecmerge.diffargs=$parent $child --mode=diff2 --title1='$plabel1' --title2='$clabel'
+
+# editmerge is a small script shipped in contrib.
+# It needs this config otherwise it behaves the same as internal:local
+editmerge.args=$output
+editmerge.check=changed
+editmerge.premerge=keep
+
+filemerge.executable=/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge
+filemerge.args=-left $other -right $local -ancestor $base -merge $output
+filemerge.gui=True
+
+; Windows version of Beyond Compare
+beyondcompare3.args=$local $other $base $output /ro /lefttitle=local /centertitle=base /righttitle=other /automerge /reviewconflicts /solo
+beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3
+beyondcompare3.regname=ExePath
+beyondcompare3.gui=True
+beyondcompare3.priority=-2
+beyondcompare3.diffargs=/lro /lefttitle='$plabel1' /righttitle='$clabel' /solo /expandall $parent $child
+
+; Linux version of Beyond Compare
+bcompare.args=$local $other $base -mergeoutput=$output -ro -lefttitle=parent1 -centertitle=base -righttitle=parent2 -outputtitle=merged -automerge -reviewconflicts -solo
+bcompare.gui=True
+bcompare.priority=-1
+bcompare.diffargs=-lro -lefttitle='$plabel1' -righttitle='$clabel' -solo -expandall $parent $child
+
+winmerge.args=/e /x /wl /ub /dl other /dr local $other $local $output
+winmerge.regkey=Software\Thingamahoochie\WinMerge
+winmerge.regkeyalt=Software\Wow6432Node\Thingamahoochie\WinMerge\
+winmerge.regname=Executable
+winmerge.check=changed
+winmerge.gui=True
+winmerge.priority=-10
+winmerge.diffargs=/r /e /x /ub /wl /dl '$plabel1' /dr '$clabel' $parent $child
+
+araxis.regkey=SOFTWARE\Classes\TypeLib\{46799e0a-7bd1-4330-911c-9660bb964ea2}\7.0\HELPDIR
+araxis.regappend=\ConsoleCompare.exe
+araxis.priority=-2
+araxis.args=/3 /a2 /wait /merge /title1:"Other" /title2:"Base" /title3:"Local :"$local $other $base $local $output
+araxis.checkconflict=True
+araxis.binary=True
+araxis.gui=True
+araxis.diffargs=/2 /wait /title1:"$plabel1" /title2:"$clabel" $parent $child
+
+diffuse.priority=-3
+diffuse.args=$local $base $other
+diffuse.gui=True
+diffuse.diffargs=$parent $child
+
+UltraCompare.regkey=Software\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
+UltraCompare.regkeyalt=Software\Wow6432Node\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
+UltraCompare.args = $base $local $other -title1 base -title3 other
+UltraCompare.priority = -2
+UltraCompare.gui = True
+UltraCompare.binary = True
+UltraCompare.check = conflicts,changed
+UltraCompare.diffargs=$child $parent -title1 $clabel -title2 $plabel1
--- a/mercurial/dirstate.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/dirstate.py	Sat Jan 17 18:28:30 2015 -0800
@@ -8,7 +8,7 @@
 from node import nullid
 from i18n import _
 import scmutil, util, ignore, osutil, parsers, encoding, pathutil
-import os, stat, errno, gc
+import os, stat, errno
 
 propertycache = util.propertycache
 filecache = scmutil.filecache
@@ -130,7 +130,9 @@
         files = [self._join('.hgignore')]
         for name, path in self._ui.configitems("ui"):
             if name == 'ignore' or name.startswith('ignore.'):
-                files.append(util.expandpath(path))
+                # we need to use os.path.join here rather than self._join
+                # because path is arbitrary and user-specified
+                files.append(os.path.join(self._rootdir, util.expandpath(path)))
         return ignore.ignore(self._root, files, self._ui.warn)
 
     @propertycache
@@ -317,13 +319,10 @@
         # Depending on when in the process's lifetime the dirstate is parsed,
         # this can get very expensive. As a workaround, disable GC while
         # parsing the dirstate.
-        gcenabled = gc.isenabled()
-        gc.disable()
-        try:
-            p = parsers.parse_dirstate(self._map, self._copymap, st)
-        finally:
-            if gcenabled:
-                gc.enable()
+        #
+        # (we cannot decorate the function directly since it is in a C module)
+        parse_dirstate = util.nogc(parsers.parse_dirstate)
+        p = parse_dirstate(self._map, self._copymap, st)
         if not self._dirtypl:
             self._pl = p
 
@@ -545,7 +544,7 @@
         # enough 'delaywrite' prevents 'pack_dirstate' from dropping
         # timestamp of each entries in dirstate, because of 'now > mtime'
         delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
-        if delaywrite:
+        if delaywrite > 0:
             import time # to avoid useless import
             time.sleep(delaywrite)
 
@@ -629,6 +628,7 @@
         results = dict.fromkeys(subrepos)
         results['.hg'] = None
 
+        alldirs = None
         for ff in files:
             if normalize:
                 nf = normalize(normpath(ff), False, True)
@@ -657,13 +657,12 @@
                 if nf in dmap: # does it exactly match a missing file?
                     results[nf] = None
                 else: # does it match a missing directory?
-                    prefix = nf + "/"
-                    for fn in dmap:
-                        if fn.startswith(prefix):
-                            if matchedir:
-                                matchedir(nf)
-                            notfoundadd(nf)
-                            break
+                    if alldirs is None:
+                        alldirs = scmutil.dirs(dmap)
+                    if nf in alldirs:
+                        if matchedir:
+                            matchedir(nf)
+                        notfoundadd(nf)
                     else:
                         badfn(ff, inst.strerror)
 
--- a/mercurial/dispatch.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/dispatch.py	Sat Jan 17 18:28:30 2015 -0800
@@ -283,12 +283,19 @@
                 # We found an untested extension. It's likely the culprit.
                 worst = name, 'unknown', report
                 break
-            if compare not in testedwith.split() and testedwith != 'internal':
-                tested = [tuplever(v) for v in testedwith.split()]
-                lower = [t for t in tested if t < ct]
-                nearest = max(lower or tested)
-                if worst[0] is None or nearest < worst[1]:
-                    worst = name, nearest, report
+
+            # Never blame on extensions bundled with Mercurial.
+            if testedwith == 'internal':
+                continue
+
+            tested = [tuplever(t) for t in testedwith.split()]
+            if ct in tested:
+                continue
+
+            lower = [t for t in tested if t < ct]
+            nearest = max(lower or tested)
+            if worst[0] is None or nearest < worst[1]:
+                worst = name, nearest, report
         if worst[0] is not None:
             name, testedwith, report = worst
             if not isinstance(testedwith, str):
@@ -315,7 +322,10 @@
 
 def tuplever(v):
     try:
-        return tuple([int(i) for i in v.split('.')])
+        # Assertion: tuplever is only used for extension compatibility
+        # checking. Otherwise, the discarding of extra version fields is
+        # incorrect.
+        return tuple([int(i) for i in v.split('.')[0:2]])
     except ValueError:
         return tuple()
 
@@ -402,7 +412,7 @@
                         return ''
                 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
                 cmd = aliasinterpolate(self.name, args, cmd)
-                return util.system(cmd, environ=env, out=ui.fout)
+                return ui.system(cmd, environ=env)
             self.fn = fn
             return
 
--- a/mercurial/exchange.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/exchange.py	Sat Jan 17 18:28:30 2015 -0800
@@ -49,7 +49,7 @@
         if version is None:
             raise ValueError('bundler do not support common obsmarker format')
         stream = obsolete.encodemarkers(markers, True, version=version)
-        return bundler.newpart('B2X:OBSMARKERS', data=stream)
+        return bundler.newpart('b2x:obsmarkers', data=stream)
     return None
 
 class pushoperation(object):
@@ -104,6 +104,8 @@
         self.outobsmarkers = set()
         # outgoing bookmarks
         self.outbookmarks = []
+        # transaction manager
+        self.trmanager = None
 
     @util.propertycache
     def futureheads(self):
@@ -204,6 +206,10 @@
         msg = 'cannot lock source repository: %s\n' % err
         pushop.ui.debug(msg)
     try:
+        if pushop.locallocked:
+            pushop.trmanager = transactionmanager(repo,
+                                                  'push-response',
+                                                  pushop.remote.url())
         pushop.repo.checkpush(pushop)
         lock = None
         unbundle = pushop.remote.capable('unbundle')
@@ -222,7 +228,11 @@
         finally:
             if lock is not None:
                 lock.release()
+        if pushop.trmanager:
+            pushop.trmanager.close()
     finally:
+        if pushop.trmanager:
+            pushop.trmanager.release()
         if locallock is not None:
             locallock.release()
 
@@ -261,12 +271,11 @@
 @pushdiscovery('changeset')
 def _pushdiscoverychangeset(pushop):
     """discover the changeset that need to be pushed"""
-    unfi = pushop.repo.unfiltered()
     fci = discovery.findcommonincoming
-    commoninc = fci(unfi, pushop.remote, force=pushop.force)
+    commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
     common, inc, remoteheads = commoninc
     fco = discovery.findcommonoutgoing
-    outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
+    outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
                    commoninc=commoninc, force=pushop.force)
     pushop.outgoing = outgoing
     pushop.remoteheads = remoteheads
@@ -298,7 +307,7 @@
     else:
         # adds changeset we are going to push as draft
         #
-        # should not be necessary for pushblishing server, but because of an
+        # should not be necessary for publishing server, but because of an
         # issue fixed in xxxxx we have to do it anyway.
         fdroots = list(unfi.set('roots(%ln  + %ln::)',
                        outgoing.missing, droots))
@@ -444,11 +453,26 @@
                                      pushop.remote,
                                      pushop.outgoing)
     if not pushop.force:
-        bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
-    cg = changegroup.getlocalchangegroup(pushop.repo, 'push', pushop.outgoing)
-    cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
+        bundler.newpart('b2x:check:heads', data=iter(pushop.remoteheads))
+    b2caps = bundle2.bundle2caps(pushop.remote)
+    version = None
+    cgversions = b2caps.get('b2x:changegroup')
+    if not cgversions:  # 3.1 and 3.2 ship with an empty value
+        cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
+                                                pushop.outgoing)
+    else:
+        cgversions = [v for v in cgversions if v in changegroup.packermap]
+        if not cgversions:
+            raise ValueError(_('no common changegroup version'))
+        version = max(cgversions)
+        cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
+                                                pushop.outgoing,
+                                                version=version)
+    cgpart = bundler.newpart('b2x:changegroup', data=cg)
+    if version is not None:
+        cgpart.addparam('version', version)
     def handlereply(op):
-        """extract addchangroup returns from server reply"""
+        """extract addchangegroup returns from server reply"""
         cgreplies = op.records.getreplies(cgpart.id)
         assert len(cgreplies['changegroup']) == 1
         pushop.cgresult = cgreplies['changegroup'][0]['return']
@@ -547,8 +571,12 @@
     The only currently supported type of data is changegroup but this will
     evolve in the future."""
     bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
+    pushback = (pushop.trmanager
+                and pushop.ui.configbool('experimental', 'bundle2.pushback'))
+
     # create reply capability
-    capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
+    capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
+                                                      allowpushback=pushback))
     bundler.newpart('b2x:replycaps', data=capsblob)
     replyhandlers = []
     for partgenname in b2partsgenorder:
@@ -565,7 +593,10 @@
     except error.BundleValueError, exc:
         raise util.Abort('missing support for %s' % exc)
     try:
-        op = bundle2.processbundle(pushop.repo, reply)
+        trgetter = None
+        if pushback:
+            trgetter = pushop.trmanager.transaction
+        op = bundle2.processbundle(pushop.repo, reply, trgetter)
     except error.BundleValueError, exc:
         raise util.Abort('missing support for %s' % exc)
     for rephand in replyhandlers:
@@ -678,13 +709,11 @@
 
 def _localphasemove(pushop, nodes, phase=phases.public):
     """move <nodes> to <phase> in the local source repo"""
-    if pushop.locallocked:
-        tr = pushop.repo.transaction('push-phase-sync')
-        try:
-            phases.advanceboundary(pushop.repo, tr, phase, nodes)
-            tr.close()
-        finally:
-            tr.release()
+    if pushop.trmanager:
+        phases.advanceboundary(pushop.repo,
+                               pushop.trmanager.transaction(),
+                               phase,
+                               nodes)
     else:
         # repo is not locked, do not change any phases!
         # Informs the user that phases should have been moved when
@@ -739,7 +768,7 @@
 class pulloperation(object):
     """A object that represent a single pull operation
 
-    It purpose is to carry push related state and very common operation.
+    It purpose is to carry pull related state and very common operation.
 
     A new should be created at the beginning of each pull and discarded
     afterward.
@@ -756,10 +785,8 @@
         self.explicitbookmarks = bookmarks
         # do we force pull?
         self.force = force
-        # the name the pull transaction
-        self._trname = 'pull\n' + util.hidepassword(remote.url())
-        # hold the transaction once created
-        self._tr = None
+        # transaction manager
+        self.trmanager = None
         # set of common changeset between local and remote before pull
         self.common = None
         # set of pulled head
@@ -792,29 +819,44 @@
             return self.heads
 
     def gettransaction(self):
-        """get appropriate pull transaction, creating it if needed"""
-        if self._tr is None:
-            self._tr = self.repo.transaction(self._trname)
-            self._tr.hookargs['source'] = 'pull'
-            self._tr.hookargs['url'] = self.remote.url()
+        # deprecated; talk to trmanager directly
+        return self.trmanager.transaction()
+
+class transactionmanager(object):
+    """An object to manage the life cycle of a transaction
+
+    It creates the transaction on demand and calls the appropriate hooks when
+    closing the transaction."""
+    def __init__(self, repo, source, url):
+        self.repo = repo
+        self.source = source
+        self.url = url
+        self._tr = None
+
+    def transaction(self):
+        """Return an open transaction object, constructing if necessary"""
+        if not self._tr:
+            trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
+            self._tr = self.repo.transaction(trname)
+            self._tr.hookargs['source'] = self.source
+            self._tr.hookargs['url'] = self.url
         return self._tr
 
-    def closetransaction(self):
+    def close(self):
         """close transaction if created"""
         if self._tr is not None:
             repo = self.repo
-            cl = repo.unfiltered().changelog
-            p = cl.writepending() and repo.root or ""
-            p = cl.writepending() and repo.root or ""
+            p = lambda: self._tr.writepending() and repo.root or ""
             repo.hook('b2x-pretransactionclose', throw=True, pending=p,
                       **self._tr.hookargs)
-            self._tr.close()
             hookargs = dict(self._tr.hookargs)
             def runhooks():
                 repo.hook('b2x-transactionclose', **hookargs)
-            repo._afterlock(runhooks)
+            self._tr.addpostclose('b2x-hook-transactionclose',
+                                  lambda tr: repo._afterlock(runhooks))
+            self._tr.close()
 
-    def releasetransaction(self):
+    def release(self):
         """release transaction if created"""
         if self._tr is not None:
             self._tr.release()
@@ -832,6 +874,7 @@
     pullop.remotebookmarks = remote.listkeys('bookmarks')
     lock = pullop.repo.lock()
     try:
+        pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
         _pulldiscovery(pullop)
         if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
             and pullop.remote.capable('bundle2-exp')):
@@ -840,9 +883,9 @@
         _pullphase(pullop)
         _pullbookmarks(pullop)
         _pullobsolete(pullop)
-        pullop.closetransaction()
+        pullop.trmanager.close()
     finally:
-        pullop.releasetransaction()
+        pullop.trmanager.release()
         lock.release()
 
     return pullop
@@ -883,11 +926,36 @@
 
     Current handle changeset discovery only, will change handle all discovery
     at some point."""
-    tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
+    tmp = discovery.findcommonincoming(pullop.repo,
                                        pullop.remote,
                                        heads=pullop.heads,
                                        force=pullop.force)
-    pullop.common, pullop.fetch, pullop.rheads = tmp
+    common, fetch, rheads = tmp
+    nm = pullop.repo.unfiltered().changelog.nodemap
+    if fetch and rheads:
+        # If a remote heads in filtered locally, lets drop it from the unknown
+        # remote heads and put in back in common.
+        #
+        # This is a hackish solution to catch most of "common but locally
+        # hidden situation".  We do not performs discovery on unfiltered
+        # repository because it end up doing a pathological amount of round
+        # trip for w huge amount of changeset we do not care about.
+        #
+        # If a set of such "common but filtered" changeset exist on the server
+        # but are not including a remote heads, we'll not be able to detect it,
+        scommon = set(common)
+        filteredrheads = []
+        for n in rheads:
+            if n in nm and n not in scommon:
+                common.append(n)
+            else:
+                filteredrheads.append(n)
+        if not filteredrheads:
+            fetch = []
+        rheads = filteredrheads
+    pullop.common = common
+    pullop.fetch = fetch
+    pullop.rheads = rheads
 
 def _pullbundle2(pullop):
     """pull data using bundle2
@@ -924,22 +992,8 @@
         raise util.Abort('missing support for %s' % exc)
 
     if pullop.fetch:
-        changedheads = 0
-        pullop.cgresult = 1
-        for cg in op.records['changegroup']:
-            ret = cg['return']
-            # If any changegroup result is 0, return 0
-            if ret == 0:
-                pullop.cgresult = 0
-                break
-            if ret < -1:
-                changedheads += ret + 1
-            elif ret > 1:
-                changedheads += ret - 1
-        if changedheads > 0:
-            pullop.cgresult = 1 + changedheads
-        elif changedheads < 0:
-            pullop.cgresult = -1 + changedheads
+        results = [cg['return'] for cg in op.records['changegroup']]
+        pullop.cgresult = changegroup.combineresults(results)
 
     # processing phases change
     for namespace, value in op.records['listkeys']:
@@ -965,9 +1019,9 @@
         return
     pullop.stepsdone.add('changegroup')
     if not pullop.fetch:
-            pullop.repo.ui.status(_("no changes found\n"))
-            pullop.cgresult = 0
-            return
+        pullop.repo.ui.status(_("no changes found\n"))
+        pullop.cgresult = 0
+        return
     pullop.gettransaction()
     if pullop.heads is None and list(pullop.common) == [nullid]:
         pullop.repo.ui.status(_("requesting all changes\n"))
@@ -1133,10 +1187,11 @@
             b2caps.update(bundle2.decodecaps(blob))
     bundler = bundle2.bundle20(repo.ui, b2caps)
 
+    kwargs['heads'] = heads
+    kwargs['common'] = common
+
     for name in getbundle2partsorder:
         func = getbundle2partsmapping[name]
-        kwargs['heads'] = heads
-        kwargs['common'] = common
         func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
              **kwargs)
 
@@ -1149,11 +1204,26 @@
     cg = None
     if kwargs.get('cg', True):
         # build changegroup bundle here.
-        cg = changegroup.getchangegroup(repo, source, heads=heads,
-                                        common=common, bundlecaps=bundlecaps)
+        version = None
+        cgversions = b2caps.get('b2x:changegroup')
+        if not cgversions:  # 3.1 and 3.2 ship with an empty value
+            cg = changegroup.getchangegroupraw(repo, source, heads=heads,
+                                               common=common,
+                                               bundlecaps=bundlecaps)
+        else:
+            cgversions = [v for v in cgversions if v in changegroup.packermap]
+            if not cgversions:
+                raise ValueError(_('no common changegroup version'))
+            version = max(cgversions)
+            cg = changegroup.getchangegroupraw(repo, source, heads=heads,
+                                               common=common,
+                                               bundlecaps=bundlecaps,
+                                               version=version)
 
     if cg:
-        bundler.newpart('b2x:changegroup', data=cg.getchunks())
+        part = bundler.newpart('b2x:changegroup', data=cg)
+        if version is not None:
+            part.addparam('version', version)
 
 @getbundle2partsgenerator('listkeys')
 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
@@ -1213,15 +1283,15 @@
                 tr.hookargs['url'] = url
                 tr.hookargs['bundle2-exp'] = '1'
                 r = bundle2.processbundle(repo, cg, lambda: tr).reply
-                cl = repo.unfiltered().changelog
-                p = cl.writepending() and repo.root or ""
+                p = lambda: tr.writepending() and repo.root or ""
                 repo.hook('b2x-pretransactionclose', throw=True, pending=p,
                           **tr.hookargs)
-                tr.close()
                 hookargs = dict(tr.hookargs)
                 def runhooks():
                     repo.hook('b2x-transactionclose', **hookargs)
-                repo._afterlock(runhooks)
+                tr.addpostclose('b2x-hook-transactionclose',
+                                lambda tr: repo._afterlock(runhooks))
+                tr.close()
             except Exception, exc:
                 exc.duringunbundle2 = True
                 raise
--- a/mercurial/filelog.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/filelog.py	Sat Jan 17 18:28:30 2015 -0800
@@ -85,7 +85,7 @@
             return False
 
         # censored files compare against the empty file
-        if self._iscensored(node):
+        if self._iscensored(self.rev(node)):
             return text != ''
 
         # renaming a file produces a different hash, even if the data
@@ -107,10 +107,6 @@
     def _file(self, f):
         return filelog(self.opener, f)
 
-    def _iscensored(self, revornode):
+    def _iscensored(self, rev):
         """Check if a file revision is censored."""
-        try:
-            self.revision(revornode)
-            return False
-        except error.CensoredNodeError:
-            return True
+        return self.flags(rev) & revlog.REVIDX_ISCENSORED
--- a/mercurial/filemerge.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/filemerge.py	Sat Jan 17 18:28:30 2015 -0800
@@ -37,6 +37,9 @@
 def _findtool(ui, tool):
     if tool in internals:
         return tool
+    return findexternaltool(ui, tool)
+
+def findexternaltool(ui, tool):
     for kn in ("regkey", "regkeyalt"):
         k = _toolstr(ui, tool, kn)
         if not k:
@@ -298,8 +301,7 @@
         replace = {'local': a, 'base': b, 'other': c, 'output': out}
         args = util.interpolate(r'\$', replace, args,
                                 lambda s: util.shellquote(util.localpath(s)))
-        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
-                        out=ui.fout)
+        r = ui.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
         return True, r
     return False, 0
 
--- a/mercurial/graphmod.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/graphmod.py	Sat Jan 17 18:28:30 2015 -0800
@@ -20,8 +20,208 @@
 from mercurial.node import nullrev
 import util
 
+import heapq
+
 CHANGESET = 'C'
 
+def groupbranchiter(revs, parentsfunc, firstbranch=()):
+    """Yield revisions from heads to roots one (topo) branch at a time.
+
+    This function aims to be used by a graph generator that wishes to minimize
+    the number of parallel branches and their interleaving.
+
+    Example iteration order (numbers show the "true" order in a changelog):
+
+      o  4
+      |
+      o  1
+      |
+      | o  3
+      | |
+      | o  2
+      |/
+      o  0
+
+    Note that the ancestors of merges are understood by the current
+    algorithm to be on the same branch. This means no reordering will
+    occur behind a merge.
+    """
+
+    ### Quick summary of the algorithm
+    #
+    # This function is based around a "retention" principle. We keep revisions
+    # in memory until we are ready to emit a whole branch that immediately
+    # "merges" into an existing one. This reduces the number of parallel
+    # branches with interleaved revisions.
+    #
+    # During iteration revs are split into two groups:
+    # A) revision already emitted
+    # B) revision in "retention". They are stored as different subgroups.
+    #
+    # for each REV, we do the following logic:
+    #
+    #   1) if REV is a parent of (A), we will emit it. If there is a
+    #   retention group ((B) above) that is blocked on REV being
+    #   available, we emit all the revisions out of that retention
+    #   group first.
+    #
+    #   2) else, we'll search for a subgroup in (B) awaiting for REV to be
+    #   available, if such subgroup exist, we add REV to it and the subgroup is
+    #   now awaiting for REV.parents() to be available.
+    #
+    #   3) finally if no such group existed in (B), we create a new subgroup.
+    #
+    #
+    # To bootstrap the algorithm, we emit the tipmost revision (which
+    # puts it in group (A) from above).
+
+    revs.sort(reverse=True)
+
+    # Set of parents of revision that have been emitted. They can be considered
+    # unblocked as the graph generator is already aware of them so there is no
+    # need to delay the revisions that reference them.
+    #
+    # If someone wants to prioritize a branch over the others, pre-filling this
+    # set will force all other branches to wait until this branch is ready to be
+    # emitted.
+    unblocked = set(firstbranch)
+
+    # list of groups waiting to be displayed, each group is defined by:
+    #
+    #   (revs:    lists of revs waiting to be displayed,
+    #    blocked: set of that cannot be displayed before those in 'revs')
+    #
+    # The second value ('blocked') correspond to parents of any revision in the
+    # group ('revs') that is not itself contained in the group. The main idea
+    # of this algorithm is to delay as much as possible the emission of any
+    # revision.  This means waiting for the moment we are about to display
+    # these parents to display the revs in a group.
+    #
+    # This first implementation is smart until it encounters a merge: it will
+    # emit revs as soon as any parent is about to be emitted and can grow an
+    # arbitrary number of revs in 'blocked'. In practice this mean we properly
+    # retains new branches but gives up on any special ordering for ancestors
+    # of merges. The implementation can be improved to handle this better.
+    #
+    # The first subgroup is special. It corresponds to all the revision that
+    # were already emitted. The 'revs' lists is expected to be empty and the
+    # 'blocked' set contains the parents revisions of already emitted revision.
+    #
+    # You could pre-seed the <parents> set of groups[0] to a specific
+    # changesets to select what the first emitted branch should be.
+    groups = [([], unblocked)]
+    pendingheap = []
+    pendingset = set()
+
+    heapq.heapify(pendingheap)
+    heappop = heapq.heappop
+    heappush = heapq.heappush
+    for currentrev in revs:
+        # Heap works with smallest element, we want highest so we invert
+        if currentrev not in pendingset:
+            heappush(pendingheap, -currentrev)
+            pendingset.add(currentrev)
+        # iterates on pending rev until after the current rev have been
+        # processeed.
+        rev = None
+        while rev != currentrev:
+            rev = -heappop(pendingheap)
+            pendingset.remove(rev)
+
+            # Seek for a subgroup blocked, waiting for the current revision.
+            matching = [i for i, g in enumerate(groups) if rev in g[1]]
+
+            if matching:
+                # The main idea is to gather together all sets that are blocked
+                # on the same revision.
+                #
+                # Groups are merged when a common blocking ancestor is
+                # observed. For example, given two groups:
+                #
+                # revs [5, 4] waiting for 1
+                # revs [3, 2] waiting for 1
+                #
+                # These two groups will be merged when we process
+                # 1. In theory, we could have merged the groups when
+                # we added 2 to the group it is now in (we could have
+                # noticed the groups were both blocked on 1 then), but
+                # the way it works now makes the algorithm simpler.
+                #
+                # We also always keep the oldest subgroup first. We can
+                # probably improve the behavior by having the longest set
+                # first. That way, graph algorithms could minimise the length
+                # of parallel lines their drawing. This is currently not done.
+                targetidx = matching.pop(0)
+                trevs, tparents = groups[targetidx]
+                for i in matching:
+                    gr = groups[i]
+                    trevs.extend(gr[0])
+                    tparents |= gr[1]
+                # delete all merged subgroups (except the one we kept)
+                # (starting from the last subgroup for performance and
+                # sanity reasons)
+                for i in reversed(matching):
+                    del groups[i]
+            else:
+                # This is a new head. We create a new subgroup for it.
+                targetidx = len(groups)
+                groups.append(([], set([rev])))
+
+            gr = groups[targetidx]
+
+            # We now add the current nodes to this subgroups. This is done
+            # after the subgroup merging because all elements from a subgroup
+            # that relied on this rev must precede it.
+            #
+            # we also update the <parents> set to include the parents of the
+            # new nodes.
+            if rev == currentrev: # only display stuff in rev
+                gr[0].append(rev)
+            gr[1].remove(rev)
+            parents = [p for p in parentsfunc(rev) if p > nullrev]
+            gr[1].update(parents)
+            for p in parents:
+                if p not in pendingset:
+                    pendingset.add(p)
+                    heappush(pendingheap, -p)
+
+            # Look for a subgroup to display
+            #
+            # When unblocked is empty (if clause), we were not waiting for any
+            # revisions during the first iteration (if no priority was given) or
+            # if we emitted a whole disconnected set of the graph (reached a
+            # root).  In that case we arbitrarily take the oldest known
+            # subgroup. The heuristic could probably be better.
+            #
+            # Otherwise (elif clause) if the subgroup is blocked on
+            # a revision we just emitted, we can safely emit it as
+            # well.
+            if not unblocked:
+                if len(groups) > 1:  # display other subset
+                    targetidx = 1
+                    gr = groups[1]
+            elif not gr[1] & unblocked:
+                gr = None
+
+            if gr is not None:
+                # update the set of awaited revisions with the one from the
+                # subgroup
+                unblocked |= gr[1]
+                # output all revisions in the subgroup
+                for r in gr[0]:
+                    yield r
+                # delete the subgroup that you just output
+                # unless it is groups[0] in which case you just empty it.
+                if targetidx:
+                    del groups[targetidx]
+                else:
+                    gr[0][:] = []
+    # Check if we have some subgroup waiting for revisions we are not going to
+    # iterate over
+    for g in groups:
+        for r in g[0]:
+            yield r
+
 def dagwalker(repo, revs):
     """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
 
@@ -37,6 +237,15 @@
     lowestrev = revs.min()
     gpcache = {}
 
+    if repo.ui.configbool('experimental', 'graph-group-branches', False):
+        firstbranch = ()
+        firstbranchrevset = repo.ui.config(
+            'experimental', 'graph-group-branches.firstbranch', '')
+        if firstbranchrevset:
+            firstbranch = repo.revs(firstbranchrevset)
+        parentrevs = repo.changelog.parentrevs
+        revs = list(groupbranchiter(revs, parentrevs, firstbranch))
+
     for rev in revs:
         ctx = repo[rev]
         parents = sorted(set([p.rev() for p in ctx.parents()
--- a/mercurial/hbisect.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/hbisect.py	Sat Jan 17 18:28:30 2015 -0800
@@ -136,7 +136,7 @@
 def load_state(repo):
     state = {'current': [], 'good': [], 'bad': [], 'skip': []}
     if os.path.exists(repo.join("bisect.state")):
-        for l in repo.opener("bisect.state"):
+        for l in repo.vfs("bisect.state"):
             kind, node = l[:-1].split()
             node = repo.lookup(node)
             if kind not in state:
@@ -146,7 +146,7 @@
 
 
 def save_state(repo, state):
-    f = repo.opener("bisect.state", "w", atomictemp=True)
+    f = repo.vfs("bisect.state", "w", atomictemp=True)
     wlock = repo.wlock()
     try:
         for kind in sorted(state):
--- a/mercurial/help.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/help.py	Sat Jan 17 18:28:30 2015 -0800
@@ -371,6 +371,9 @@
             elif name and not full:
                 rst.append(_('\n(use "hg help %s" to show the full help '
                              'text)\n') % name)
+            elif name and cmds and name in cmds.keys():
+                rst.append(_('\n(use "hg help -v -e %s" to show built-in '
+                             'aliases and global options)\n') % name)
             else:
                 rst.append(_('\n(use "hg help -v%s" to show built-in aliases '
                              'and global options)\n')
--- a/mercurial/help/config.txt	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/help/config.txt	Sat Jan 17 18:28:30 2015 -0800
@@ -38,6 +38,7 @@
   - ``<install-root>/etc/mercurial/hgrc.d/*.rc`` (per-installation)
   - ``/etc/mercurial/hgrc`` (per-system)
   - ``/etc/mercurial/hgrc.d/*.rc`` (per-system)
+  - ``<internal>/default.d/*.rc`` (defaults)
 
 .. container:: verbose.windows
 
@@ -51,6 +52,7 @@
   - ``<install-dir>\Mercurial.ini`` (per-installation)
   - ``<install-dir>\hgrc.d\*.rc`` (per-installation)
   - ``HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial`` (per-installation)
+  - ``<internal>/default.d/*.rc`` (defaults)
 
   .. note::
 
@@ -67,6 +69,7 @@
   - ``<install-root>/lib/mercurial/hgrc.d/*.rc`` (per-installation)
   - ``/lib/mercurial/hgrc`` (per-system)
   - ``/lib/mercurial/hgrc.d/*.rc`` (per-system)
+  - ``<internal>/default.d/*.rc`` (defaults)
 
 Per-repository configuration options only apply in a
 particular repository. This file is not version-controlled, and
@@ -102,6 +105,13 @@
 executed by any user in any directory. Options in these files
 override per-installation options.
 
+Mercurial comes with some default configuration. The default configuration
+files are installed with Mercurial and will be overwritten on upgrades. Default
+configuration files should never be edited by users or administrators but can
+be overridden in other configuration files. So far the directory only contains
+merge tool configuration but packagers can also put other default configuration
+there.
+
 Syntax
 ======
 
@@ -537,6 +547,9 @@
 ``nodates``
     Don't include dates in diff headers.
 
+``noprefix``
+    Omit 'a/' and 'b/' prefixes from filenames. Ignored in plain mode.
+
 ``showfunc``
     Show which function each change is in.
 
@@ -1325,11 +1338,11 @@
 
 ``ignore``
     A file to read per-user ignore patterns from. This file should be
-    in the same format as a repository-wide .hgignore file. This
-    option supports hook syntax, so if you want to specify multiple
-    ignore files, you can do so by setting something like
-    ``ignore.other = ~/.hgignore2``. For details of the ignore file
-    format, see the ``hgignore(5)`` man page.
+    in the same format as a repository-wide .hgignore file. Filenames
+    are relative to the repository root. This option supports hook syntax,
+    so if you want to specify multiple ignore files, you can do so by
+    setting something like ``ignore.other = ~/.hgignore2``. For details
+    of the ignore file format, see the ``hgignore(5)`` man page.
 
 ``interactive``
     Allow to prompt the user. True or False. Default is True.
--- a/mercurial/help/revsets.txt	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/help/revsets.txt	Sat Jan 17 18:28:30 2015 -0800
@@ -81,6 +81,19 @@
 defines three aliases, ``h``, ``d``, and ``rs``. ``rs(0:tip, author)`` is
 exactly equivalent to ``reverse(sort(0:tip, author))``.
 
+An infix operator ``##`` can concatenate strings and identifiers into
+one string. For example::
+
+  [revsetalias]
+  issue($1) = grep(r'\bissue[ :]?' ## $1 ## r'\b|\bbug\(' ## $1 ## r'\)')
+
+``issue(1234)`` is equivalent to ``grep(r'\bissue[ :]?1234\b|\bbug\(1234\)')``
+in this case. This matches against all of "issue 1234", "issue:1234",
+"issue1234" and "bug(1234)".
+
+All other prefix, infix and postfix operators have lower priority than
+``##``. For example, ``$1 ## $2~2`` is equivalent to ``($1 ## $2)~2``.
+
 Command line equivalents for :hg:`log`::
 
   -f    ->  ::.
--- a/mercurial/help/subrepos.txt	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/help/subrepos.txt	Sat Jan 17 18:28:30 2015 -0800
@@ -81,6 +81,12 @@
     Git and Subversion subrepositories are currently silently
     ignored.
 
+:addremove: addremove does not recurse into subrepos unless
+    -S/--subrepos is specified.  However, if you specify the full
+    path of a directory in a subrepo, addremove will be performed on
+    it even without -S/--subrepos being specified.  Git and
+    Subversion subrepositories will print a warning and continue.
+
 :archive: archive does not recurse in subrepositories unless
     -S/--subrepos is specified.
 
@@ -94,12 +100,14 @@
     -S/--subrepos, or setting "ui.commitsubrepos=True" in a
     configuration file (see :hg:`help config`).  After there are no
     longer any modified subrepositories, it records their state and
-    finally commits it in the parent repository.
+    finally commits it in the parent repository.  The --addremove
+    option also honors the -S/--subrepos option.  However, Git and
+    Subversion subrepositories will print a warning and abort.
 
 :diff: diff does not recurse in subrepos unless -S/--subrepos is
     specified. Changes are displayed as usual, on the subrepositories
-    elements. Git and Subversion subrepositories are currently
-    silently ignored.
+    elements. Git subrepositories do not support --include/--exclude.
+    Subversion subrepositories are currently silently ignored.
 
 :forget: forget currently only handles exact file matches in subrepos.
     Git and Subversion subrepositories are currently silently ignored.
@@ -129,6 +137,12 @@
     elements. Subversion subrepositories are currently silently
     ignored.
 
+:remove: remove does not recurse into subrepositories unless
+    -S/--subrepos is specified.  However, if you specify a file or
+    directory path in a subrepo, it will be removed even without
+    -S/--subrepos.  Git and Subversion subrepositories are currently
+    silently ignored.
+
 :update: update restores the subrepos in the state they were
     originally committed in target changeset. If the recorded
     changeset is not available in the current subrepository, Mercurial
--- a/mercurial/hg.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/hg.py	Sat Jan 17 18:28:30 2015 -0800
@@ -158,7 +158,7 @@
         return ''
     return os.path.basename(os.path.normpath(path))
 
-def share(ui, source, dest=None, update=True):
+def share(ui, source, dest=None, update=True, bookmarks=True):
     '''create a shared repository'''
 
     if not islocal(source):
@@ -193,7 +193,7 @@
 
     requirements = ''
     try:
-        requirements = srcrepo.opener.read('requires')
+        requirements = srcrepo.vfs.read('requires')
     except IOError, inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -206,7 +206,7 @@
 
     default = srcrepo.ui.config('paths', 'default')
     if default:
-        fp = r.opener("hgrc", "w", text=True)
+        fp = r.vfs("hgrc", "w", text=True)
         fp.write("[paths]\n")
         fp.write("default = %s\n" % default)
         fp.close()
@@ -225,6 +225,11 @@
                 continue
         _update(r, uprev)
 
+    if bookmarks:
+        fp = r.vfs('shared', 'w')
+        fp.write('bookmarks\n')
+        fp.close()
+
 def copystore(ui, srcrepo, destpath):
     '''copy files from store of srcrepo in destpath
 
@@ -284,7 +289,8 @@
     dest: URL of destination repository to create (defaults to base
     name of source repository)
 
-    pull: always pull from source repository, even in local case
+    pull: always pull from source repository, even in local case or if the
+    server prefers streaming
 
     stream: stream raw data uncompressed from repository (fast over
     LAN, slow over WAN)
@@ -390,7 +396,7 @@
 
             dstcachedir = os.path.join(destpath, 'cache')
             # In local clones we're copying all nodes, not just served
-            # ones. Therefore copy all branchcaches over.
+            # ones. Therefore copy all branch caches over.
             copybranchcache('branch2')
             for cachename in repoview.filtertable:
                 copybranchcache('branch2-%s' % cachename)
@@ -420,6 +426,11 @@
                 revs = [srcpeer.lookup(r) for r in rev]
                 checkout = revs[0]
             if destpeer.local():
+                if not stream:
+                    if pull:
+                        stream = False
+                    else:
+                        stream = None
                 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
             elif srcrepo:
                 exchange.push(srcrepo, destpeer, revs=revs,
@@ -432,7 +443,7 @@
         destrepo = destpeer.local()
         if destrepo:
             template = uimod.samplehgrcs['cloned']
-            fp = destrepo.opener("hgrc", "w", text=True)
+            fp = destrepo.vfs("hgrc", "w", text=True)
             u = util.url(abspath)
             u.passwd = None
             defaulturl = str(u)
--- a/mercurial/hgweb/webcommands.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/hgweb/webcommands.py	Sat Jan 17 18:28:30 2015 -0800
@@ -282,31 +282,14 @@
         if pos != -1:
             revs = web.repo.changelog.revs(pos, 0)
         curcount = 0
-        for i in revs:
-            ctx = web.repo[i]
-            n = ctx.node()
-            showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
-            files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
-
+        for rev in revs:
             curcount += 1
             if curcount > revcount + 1:
                 break
-            yield {"parity": parity.next(),
-                   "author": ctx.user(),
-                   "parent": webutil.parents(ctx, i - 1),
-                   "child": webutil.children(ctx, i + 1),
-                   "changelogtag": showtags,
-                   "desc": ctx.description(),
-                   "extra": ctx.extra(),
-                   "date": ctx.date(),
-                   "files": files,
-                   "rev": i,
-                   "node": hex(n),
-                   "tags": webutil.nodetagsdict(web.repo, n),
-                   "bookmarks": webutil.nodebookmarksdict(web.repo, n),
-                   "inbranch": webutil.nodeinbranch(web.repo, ctx),
-                   "branches": webutil.nodebranchdict(web.repo, ctx)
-            }
+
+            entry = webutil.changelistentry(web, web.repo[rev], tmpl)
+            entry['parity'] = parity.next()
+            yield entry
 
     revcount = shortlog and web.maxshortchanges or web.maxchanges
     if 'revcount' in req.form:
@@ -379,7 +362,7 @@
                 diff=diffs,
                 rev=ctx.rev(),
                 node=ctx.hex(),
-                parent=webutil.parents(ctx),
+                parent=tuple(webutil.parents(ctx)),
                 child=webutil.children(ctx),
                 basenode=basectx.hex(),
                 changesettag=showtags,
@@ -753,7 +736,8 @@
     fctx = webutil.filectx(web.repo, req)
     f = fctx.path()
     parity = paritygen(web.stripecount)
-    diffopts = patch.diffopts(web.repo.ui, untrusted=True, section='annotate')
+    diffopts = patch.difffeatureopts(web.repo.ui, untrusted=True,
+                                     section='annotate', whitespace=True)
 
     def annotate(**map):
         last = None
--- a/mercurial/hgweb/webutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/hgweb/webutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -249,6 +249,35 @@
 
     return fctx
 
+def changelistentry(web, ctx, tmpl):
+    '''Obtain a dictionary to be used for entries in a changelist.
+
+    This function is called when producing items for the "entries" list passed
+    to the "shortlog" and "changelog" templates.
+    '''
+    repo = web.repo
+    rev = ctx.rev()
+    n = ctx.node()
+    showtags = showtag(repo, tmpl, 'changelogtag', n)
+    files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
+
+    return {
+        "author": ctx.user(),
+        "parent": parents(ctx, rev - 1),
+        "child": children(ctx, rev + 1),
+        "changelogtag": showtags,
+        "desc": ctx.description(),
+        "extra": ctx.extra(),
+        "date": ctx.date(),
+        "files": files,
+        "rev": rev,
+        "node": hex(n),
+        "tags": nodetagsdict(repo, n),
+        "bookmarks": nodebookmarksdict(repo, n),
+        "inbranch": nodeinbranch(repo, ctx),
+        "branches": nodebranchdict(repo, ctx)
+    }
+
 def listfilediffs(tmpl, files, node, max):
     for f in files[:max]:
         yield tmpl('filedifflink', node=hex(node), file=f)
--- a/mercurial/hook.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/hook.py	Sat Jan 17 18:28:30 2015 -0800
@@ -131,10 +131,7 @@
         cwd = repo.root
     else:
         cwd = os.getcwd()
-    if 'HG_URL' in env and env['HG_URL'].startswith('remote:http'):
-        r = util.system(cmd, environ=env, cwd=cwd, out=ui)
-    else:
-        r = util.system(cmd, environ=env, cwd=cwd, out=ui.fout)
+    r = ui.system(cmd, environ=env, cwd=cwd)
 
     duration = time.time() - starttime
     ui.log('exthook', 'exthook-%s: %s finished in %0.2f seconds\n',
--- a/mercurial/httppeer.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/httppeer.py	Sat Jan 17 18:28:30 2015 -0800
@@ -193,7 +193,7 @@
                 type = x
                 break
 
-        tempname = changegroup.writebundle(cg, None, type)
+        tempname = changegroup.writebundle(self.ui, cg, None, type)
         fp = httpconnection.httpsendfile(self.ui, tempname, "rb")
         headers = {'Content-Type': 'application/mercurial-0.1'}
 
--- a/mercurial/localrepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/localrepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -18,6 +18,7 @@
 from lock import release
 import weakref, errno, os, time, inspect
 import branchmap, pathutil
+import namespaces
 propertycache = util.propertycache
 filecache = scmutil.filecache
 
@@ -297,6 +298,9 @@
         # - bookmark changes
         self.filteredrevcache = {}
 
+        # generic mapping between names and nodes
+        self.names = namespaces.namespaces()
+
     def close(self):
         pass
 
@@ -311,14 +315,17 @@
 
     def _applyrequirements(self, requirements):
         self.requirements = requirements
-        self.sopener.options = dict((r, 1) for r in requirements
+        self.svfs.options = dict((r, 1) for r in requirements
                                            if r in self.openerreqs)
         chunkcachesize = self.ui.configint('format', 'chunkcachesize')
         if chunkcachesize is not None:
-            self.sopener.options['chunkcachesize'] = chunkcachesize
+            self.svfs.options['chunkcachesize'] = chunkcachesize
+        maxchainlen = self.ui.configint('format', 'maxchainlen')
+        if maxchainlen is not None:
+            self.svfs.options['maxchainlen'] = maxchainlen
 
     def _writerequirements(self):
-        reqfile = self.opener("requires", "w")
+        reqfile = self.vfs("requires", "w")
         for r in sorted(self.requirements):
             reqfile.write("%s\n" % r)
         reqfile.close()
@@ -407,7 +414,7 @@
         if defaultformat is not None:
             kwargs['defaultformat'] = defaultformat
         readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
-        store = obsolete.obsstore(self.sopener, readonly=readonly,
+        store = obsolete.obsstore(self.svfs, readonly=readonly,
                                   **kwargs)
         if store and readonly:
             # message is rare enough to not be translated
@@ -417,7 +424,7 @@
 
     @storecache('00changelog.i')
     def changelog(self):
-        c = changelog.changelog(self.sopener)
+        c = changelog.changelog(self.svfs)
         if 'HG_PENDING' in os.environ:
             p = os.environ['HG_PENDING']
             if p.startswith(self.root):
@@ -426,7 +433,7 @@
 
     @storecache('00manifest.i')
     def manifest(self):
-        return manifest.manifest(self.sopener)
+        return manifest.manifest(self.svfs)
 
     @repofilecache('dirstate')
     def dirstate(self):
@@ -442,11 +449,15 @@
                                    " working parent %s!\n") % short(node))
                 return nullid
 
-        return dirstate.dirstate(self.opener, self.ui, self.root, validate)
+        return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
 
     def __getitem__(self, changeid):
         if changeid is None:
             return context.workingctx(self)
+        if isinstance(changeid, slice):
+            return [context.changectx(self, i)
+                    for i in xrange(*changeid.indices(len(self)))
+                    if i not in self.changelog.filteredrevs]
         return context.changectx(self, changeid)
 
     def __contains__(self, changeid):
@@ -520,9 +531,9 @@
         prevtags = ''
         if local:
             try:
-                fp = self.opener('localtags', 'r+')
+                fp = self.vfs('localtags', 'r+')
             except IOError:
-                fp = self.opener('localtags', 'a')
+                fp = self.vfs('localtags', 'a')
             else:
                 prevtags = fp.read()
 
@@ -707,12 +718,21 @@
         branchmap.updatecache(self)
         return self._branchcaches[self.filtername]
 
-    def branchtip(self, branch):
-        '''return the tip node for a given branch'''
+    def branchtip(self, branch, ignoremissing=False):
+        '''return the tip node for a given branch
+
+        If ignoremissing is True, then this method will not raise an error.
+        This is helpful for callers that only expect None for a missing branch
+        (e.g. namespace).
+
+        '''
         try:
             return self.branchmap().branchtip(branch)
         except KeyError:
-            raise error.RepoLookupError(_("unknown branch '%s'") % branch)
+            if not ignoremissing:
+                raise error.RepoLookupError(_("unknown branch '%s'") % branch)
+            else:
+                pass
 
     def lookup(self, key):
         return self[key].node()
@@ -747,16 +767,22 @@
         # if publishing we can't copy if there is filtered content
         return not self.filtered('visible').changelog.filteredrevs
 
+    def shared(self):
+        '''the type of shared repository (None if not shared)'''
+        if self.sharedpath != self.path:
+            return 'store'
+        return None
+
     def join(self, f, *insidef):
-        return os.path.join(self.path, f, *insidef)
+        return self.vfs.join(os.path.join(f, *insidef))
 
     def wjoin(self, f, *insidef):
-        return os.path.join(self.root, f, *insidef)
+        return self.vfs.reljoin(self.root, f, *insidef)
 
     def file(self, f):
         if f[0] == '/':
             f = f[1:]
-        return filelog.filelog(self.sopener, f)
+        return filelog.filelog(self.svfs, f)
 
     def changectx(self, changeid):
         return self[changeid]
@@ -794,7 +820,7 @@
         return self.dirstate.pathto(f, cwd)
 
     def wfile(self, f, mode='r'):
-        return self.wopener(f, mode)
+        return self.wvfs(f, mode)
 
     def _link(self, f):
         return self.wvfs.islink(f)
@@ -847,24 +873,31 @@
         if self._link(filename):
             data = self.wvfs.readlink(filename)
         else:
-            data = self.wopener.read(filename)
+            data = self.wvfs.read(filename)
         return self._filter(self._encodefilterpats, filename, data)
 
     def wwrite(self, filename, data, flags):
         data = self._filter(self._decodefilterpats, filename, data)
         if 'l' in flags:
-            self.wopener.symlink(data, filename)
+            self.wvfs.symlink(data, filename)
         else:
-            self.wopener.write(filename, data)
+            self.wvfs.write(filename, data)
             if 'x' in flags:
                 self.wvfs.setflags(filename, False, True)
 
     def wwritedata(self, filename, data):
         return self._filter(self._decodefilterpats, filename, data)
 
-    def transaction(self, desc, report=None):
+    def currenttransaction(self):
+        """return the current transaction or None if non exists"""
         tr = self._transref and self._transref() or None
         if tr and tr.running():
+            return tr
+        return None
+
+    def transaction(self, desc, report=None):
+        tr = self.currenttransaction()
+        if tr is not None:
             return tr.nest()
 
         # abort here if the journal already exists
@@ -873,17 +906,19 @@
                 _("abandoned transaction found"),
                 hint=_("run 'hg recover' to clean up transaction"))
 
-        def onclose():
-            self.store.write(self._transref())
-
         self._writejournal(desc)
         renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
         rp = report and report or self.ui.warn
-        tr = transaction.transaction(rp, self.sopener,
+        vfsmap = {'plain': self.vfs} # root of .hg/
+        tr = transaction.transaction(rp, self.svfs, vfsmap,
                                      "journal",
+                                     "undo",
                                      aftertrans(renames),
-                                     self.store.createmode,
-                                     onclose)
+                                     self.store.createmode)
+        # note: writing the fncache only during finalize mean that the file is
+        # outdated when running hooks. As fncache is used for streaming clone,
+        # this is not expected to break anything that happen during the hooks.
+        tr.addfinalize('flush-fncache', self.store.write)
         self._transref = weakref.ref(tr)
         return tr
 
@@ -899,23 +934,25 @@
         return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
 
     def _writejournal(self, desc):
-        self.opener.write("journal.dirstate",
-                          self.opener.tryread("dirstate"))
-        self.opener.write("journal.branch",
+        self.vfs.write("journal.dirstate",
+                          self.vfs.tryread("dirstate"))
+        self.vfs.write("journal.branch",
                           encoding.fromlocal(self.dirstate.branch()))
-        self.opener.write("journal.desc",
+        self.vfs.write("journal.desc",
                           "%d\n%s\n" % (len(self), desc))
-        self.opener.write("journal.bookmarks",
-                          self.opener.tryread("bookmarks"))
-        self.sopener.write("journal.phaseroots",
-                           self.sopener.tryread("phaseroots"))
+        self.vfs.write("journal.bookmarks",
+                          self.vfs.tryread("bookmarks"))
+        self.svfs.write("journal.phaseroots",
+                           self.svfs.tryread("phaseroots"))
 
     def recover(self):
         lock = self.lock()
         try:
             if self.svfs.exists("journal"):
                 self.ui.status(_("rolling back interrupted transaction\n"))
-                transaction.rollback(self.sopener, "journal",
+                vfsmap = {'': self.svfs,
+                          'plain': self.vfs,}
+                transaction.rollback(self.svfs, vfsmap, "journal",
                                      self.ui.warn)
                 self.invalidate()
                 return True
@@ -942,7 +979,7 @@
     def _rollback(self, dryrun, force):
         ui = self.ui
         try:
-            args = self.opener.read('undo.desc').splitlines()
+            args = self.vfs.read('undo.desc').splitlines()
             (oldlen, desc, detail) = (int(args[0]), args[1], None)
             if len(args) >= 3:
                 detail = args[2]
@@ -971,7 +1008,8 @@
 
         parents = self.dirstate.parents()
         self.destroying()
-        transaction.rollback(self.sopener, 'undo', ui.warn)
+        vfsmap = {'plain': self.vfs, '': self.svfs}
+        transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
         if self.vfs.exists('undo.bookmarks'):
             self.vfs.rename('undo.bookmarks', 'bookmarks')
         if self.svfs.exists('undo.phaseroots'):
@@ -983,7 +1021,7 @@
         if parentgone:
             self.vfs.rename('undo.dirstate', 'dirstate')
             try:
-                branch = self.opener.read('undo.branch')
+                branch = self.vfs.read('undo.branch')
                 self.dirstate.setbranch(encoding.tolocal(branch))
             except IOError:
                 ui.warn(_('named branch could not be reset: '
@@ -1315,7 +1353,8 @@
                     elif f not in self.dirstate:
                         fail(f, _("file not tracked!"))
 
-            cctx = context.workingctx(self, text, user, date, extra, status)
+            cctx = context.workingcommitctx(self, status,
+                                            text, user, date, extra)
 
             if (not force and not extra.get("close") and not merge
                 and not cctx.files()
@@ -1328,8 +1367,8 @@
             ms = mergemod.mergestate(self)
             for f in status.modified:
                 if f in ms and ms[f] == 'u':
-                    raise util.Abort(_("unresolved merge conflicts "
-                                       "(see hg help resolve)"))
+                    raise util.Abort(_('unresolved merge conflicts '
+                                       '(see "hg help resolve")'))
 
             if editor:
                 cctx._text = editor(self, cctx, subs)
@@ -1403,6 +1442,7 @@
                 changed = []
                 removed = list(ctx.removed())
                 linkrev = len(self)
+                self.ui.note(_("committing files:\n"))
                 for f in sorted(ctx.modified() + ctx.added()):
                     self.ui.note(f + "\n")
                     try:
@@ -1424,6 +1464,7 @@
                         raise
 
                 # update manifest
+                self.ui.note(_("committing manifest\n"))
                 removed = [f for f in sorted(removed) if f in m1 or f in m2]
                 drop = [f for f in removed if f in m]
                 for f in drop:
@@ -1437,15 +1478,15 @@
                 files = []
 
             # update changelog
-            self.changelog.delayupdate()
+            self.ui.note(_("committing changelog\n"))
+            self.changelog.delayupdate(tr)
             n = self.changelog.add(mn, files, ctx.description(),
                                    trp, p1.node(), p2.node(),
                                    user, ctx.date(), ctx.extra().copy())
-            p = lambda: self.changelog.writepending() and self.root or ""
+            p = lambda: tr.writepending() and self.root or ""
             xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
             self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
                       parent2=xp2, pending=p)
-            self.changelog.finalize(trp)
             # set the new commit is proper phase
             targetphase = subrepo.newcommitphase(self.ui, ctx)
             if targetphase:
@@ -1653,7 +1694,7 @@
                         self.ui.debug('adding %s (%s)\n' %
                                       (name, util.bytecount(size)))
                     # for backwards compat, name was partially encoded
-                    ofp = self.sopener(store.decodedir(name), 'w')
+                    ofp = self.svfs(store.decodedir(name), 'w')
                     for chunk in util.filechunkiter(fp, limit=size):
                         handled_bytes += len(chunk)
                         self.ui.progress(_('clone'), handled_bytes,
@@ -1713,7 +1754,7 @@
         finally:
             lock.release()
 
-    def clone(self, remote, heads=[], stream=False):
+    def clone(self, remote, heads=[], stream=None):
         '''clone remote repository.
 
         keyword arguments:
@@ -1728,7 +1769,7 @@
         # and format flags on "stream" capability, and use
         # uncompressed only if compatible.
 
-        if not stream:
+        if stream is None:
             # if the server explicitly prefers to stream (for fast LANs)
             stream = remote.capable('stream-preferred')
 
@@ -1764,8 +1805,10 @@
             return False
         self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
         ret = pushkey.push(self, namespace, key, old, new)
-        self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
-                  ret=ret)
+        def runhook():
+            self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
+                      ret=ret)
+        self._afterlock(runhook)
         return ret
 
     def listkeys(self, namespace):
@@ -1780,7 +1823,7 @@
         return "%s %s %s %s %s" % (one, two, three, four, five)
 
     def savecommitmessage(self, text):
-        fp = self.opener('last-message.txt', 'wb')
+        fp = self.vfs('last-message.txt', 'wb')
         try:
             fp.write(text)
         finally:
--- a/mercurial/manifest.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/manifest.py	Sat Jan 17 18:28:30 2015 -0800
@@ -22,8 +22,6 @@
         dict.__setitem__(self, k, v)
     def flags(self, f):
         return self._flags.get(f, "")
-    def withflags(self):
-        return set(self._flags.keys())
     def setflag(self, f, flags):
         """Set the flags (symlink, executable) for path f."""
         self._flags[f] = flags
@@ -42,13 +40,37 @@
                     ret._flags[fn] = flags
         return ret
 
-    def diff(self, m2):
-        '''Finds changes between the current manifest and m2. The result is
-        returned as a dict with filename as key and values of the form
-        ((n1,fl1),(n2,fl2)), where n1/n2 is the nodeid in the current/other
-        manifest and fl1/fl2 is the flag in the current/other manifest. Where
-        the file does not exist, the nodeid will be None and the flags will be
-        the empty string.'''
+    def matches(self, match):
+        '''generate a new manifest filtered by the match argument'''
+        if match.always():
+            return self.copy()
+
+        files = match.files()
+        if (match.matchfn == match.exact or
+            (not match.anypats() and util.all(fn in self for fn in files))):
+            return self.intersectfiles(files)
+
+        mf = self.copy()
+        for fn in mf.keys():
+            if not match(fn):
+                del mf[fn]
+        return mf
+
+    def diff(self, m2, clean=False):
+        '''Finds changes between the current manifest and m2.
+
+        Args:
+          m2: the manifest to which this manifest should be compared.
+          clean: if true, include files unchanged between these manifests
+                 with a None value in the returned dictionary.
+
+        The result is returned as a dict with filename as key and
+        values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
+        nodeid in the current/other manifest and fl1/fl2 is the flag
+        in the current/other manifest. Where the file does not exist,
+        the nodeid will be None and the flags will be the empty
+        string.
+        '''
         diff = {}
 
         for fn, n1 in self.iteritems():
@@ -59,6 +81,8 @@
                 fl2 = ''
             if n1 != n2 or fl1 != fl2:
                 diff[fn] = ((n1, fl1), (n2, fl2))
+            elif clean:
+                diff[fn] = None
 
         for fn, n2 in m2.iteritems():
             if fn not in self:
--- a/mercurial/match.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/match.py	Sat Jan 17 18:28:30 2015 -0800
@@ -65,6 +65,7 @@
         self._anypats = bool(include or exclude)
         self._ctx = ctx
         self._always = False
+        self._pathrestricted = bool(include or exclude or patterns)
 
         matchfns = []
         if include:
@@ -124,10 +125,21 @@
     # by recursive traversal is visited.
     traversedir = None
 
+    def abs(self, f):
+        '''Convert a repo path back to path that is relative to the root of the
+        matcher.'''
+        return f
+
     def rel(self, f):
         '''Convert repo path back to path that is relative to cwd of matcher.'''
         return util.pathto(self._root, self._cwd, f)
 
+    def uipath(self, f):
+        '''Convert repo path to a display path.  If patterns or -I/-X were used
+        to create this matcher, the display path will be relative to cwd.
+        Otherwise it is relative to the root of the repo.'''
+        return (self._pathrestricted and self.rel(f)) or self.abs(f)
+
     def files(self):
         '''Explicitly listed files or patterns or roots:
         if no patterns or .always(): empty list,
@@ -149,13 +161,11 @@
         - optimization might be possible and necessary.'''
         return self._always
 
-class exact(match):
-    def __init__(self, root, cwd, files):
-        match.__init__(self, root, cwd, files, exact=True)
+def exact(root, cwd, files):
+    return match(root, cwd, files, exact=True)
 
-class always(match):
-    def __init__(self, root, cwd):
-        match.__init__(self, root, cwd, [])
+def always(root, cwd):
+    return match(root, cwd, [])
 
 class narrowmatcher(match):
     """Adapt a matcher to work on a subdirectory only.
@@ -176,13 +186,15 @@
     ['b.txt']
     >>> m2.exact('b.txt')
     True
-    >>> m2.rel('b.txt')
-    'b.txt'
+    >>> util.pconvert(m2.rel('b.txt'))
+    'sub/b.txt'
     >>> def bad(f, msg):
     ...     print "%s: %s" % (f, msg)
     >>> m1.bad = bad
     >>> m2.bad('x.txt', 'No such file')
     sub/x.txt: No such file
+    >>> m2.abs('c.txt')
+    'sub/c.txt'
     """
 
     def __init__(self, path, matcher):
@@ -191,6 +203,7 @@
         self._path = path
         self._matcher = matcher
         self._always = matcher._always
+        self._pathrestricted = matcher._pathrestricted
 
         self._files = [f[len(path) + 1:] for f in matcher._files
                        if f.startswith(path + "/")]
@@ -198,9 +211,15 @@
         self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
         self._fmap = set(self._files)
 
+    def abs(self, f):
+        return self._matcher.abs(self._path + "/" + f)
+
     def bad(self, f, msg):
         self._matcher.bad(self._path + "/" + f, msg)
 
+    def rel(self, f):
+        return self._matcher.rel(self._path + "/" + f)
+
 def patkind(pattern, default=None):
     '''If pattern is 'kind:pat' with a known kind, return kind.'''
     return _patsplit(pattern, default)[0]
--- a/mercurial/mdiff.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/mdiff.py	Sat Jan 17 18:28:30 2015 -0800
@@ -25,6 +25,8 @@
     showfunc enables diff -p output
     git enables the git extended patch format
     nodates removes dates from diff headers
+    nobinary ignores binary files
+    noprefix disables the 'a/' and 'b/' prefixes (ignored in plain mode)
     ignorews ignores all whitespace changes in the diff
     ignorewsamount ignores changes in the amount of whitespace
     ignoreblanklines ignores changes whose lines are all blank
@@ -38,6 +40,7 @@
         'git': False,
         'nodates': False,
         'nobinary': False,
+        'noprefix': False,
         'ignorews': False,
         'ignorewsamount': False,
         'ignoreblanklines': False,
@@ -153,6 +156,13 @@
 
     if not a and not b:
         return ""
+
+    if opts.noprefix:
+        aprefix = bprefix = ''
+    else:
+        aprefix = 'a/'
+        bprefix = 'b/'
+
     epoch = util.datestr((0, 0))
 
     fn1 = util.pconvert(fn1)
@@ -167,17 +177,17 @@
         if a is None:
             l1 = '--- /dev/null%s' % datetag(epoch)
         else:
-            l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
-        l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
+            l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
+        l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
         l3 = "@@ -0,0 +1,%d @@\n" % len(b)
         l = [l1, l2, l3] + ["+" + e for e in b]
     elif not b:
         a = splitnewlines(a)
-        l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
+        l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
         if b is None:
             l2 = '+++ /dev/null%s' % datetag(epoch)
         else:
-            l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
+            l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
         l3 = "@@ -1,%d +0,0 @@\n" % len(a)
         l = [l1, l2, l3] + ["-" + e for e in a]
     else:
@@ -187,8 +197,8 @@
         if not l:
             return ""
 
-        l.insert(0, "--- a/%s%s" % (fn1, datetag(ad, fn1)))
-        l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd, fn2)))
+        l.insert(0, "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)))
+        l.insert(1, "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)))
 
     for ln in xrange(len(l)):
         if l[ln][-1] != '\n':
--- a/mercurial/merge.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/merge.py	Sat Jan 17 18:28:30 2015 -0800
@@ -18,9 +18,9 @@
 
 def _droponode(data):
     # used for compatibility for v1
-    bits = data.split("\0")
+    bits = data.split('\0')
     bits = bits[:-2] + bits[-1:]
-    return "\0".join(bits)
+    return '\0'.join(bits)
 
 class mergestate(object):
     '''track 3-way merge state of individual files
@@ -45,8 +45,8 @@
     O: the node of the "other" part of the merge (hexified version)
     F: a file to be merged entry
     '''
-    statepathv1 = "merge/state"
-    statepathv2 = "merge/state2"
+    statepathv1 = 'merge/state'
+    statepathv2 = 'merge/state2'
 
     def __init__(self, repo):
         self._repo = repo
@@ -60,7 +60,7 @@
         if node:
             self._local = node
             self._other = other
-        shutil.rmtree(self._repo.join("merge"), True)
+        shutil.rmtree(self._repo.join('merge'), True)
         self._dirty = False
 
     def _read(self):
@@ -78,8 +78,8 @@
                 self._local = bin(record)
             elif rtype == 'O':
                 self._other = bin(record)
-            elif rtype == "F":
-                bits = record.split("\0")
+            elif rtype == 'F':
+                bits = record.split('\0')
                 self._state[bits[0]] = bits[1:]
             elif not rtype.islower():
                 raise util.Abort(_('unsupported merge state record: %s')
@@ -121,9 +121,9 @@
                 # if mctx was wrong `mctx[bits[-2]]` may fails.
                 for idx, r in enumerate(v1records):
                     if r[0] == 'F':
-                        bits = r[1].split("\0")
+                        bits = r[1].split('\0')
                         bits.insert(-2, '')
-                        v1records[idx] = (r[0], "\0".join(bits))
+                        v1records[idx] = (r[0], '\0'.join(bits))
                 return v1records
         else:
             return v2records
@@ -138,7 +138,7 @@
         """
         records = []
         try:
-            f = self._repo.opener(self.statepathv1)
+            f = self._repo.vfs(self.statepathv1)
             for i, l in enumerate(f):
                 if i == 0:
                     records.append(('L', l[:-1]))
@@ -157,7 +157,7 @@
         """
         records = []
         try:
-            f = self._repo.opener(self.statepathv2)
+            f = self._repo.vfs(self.statepathv2)
             data = f.read()
             off = 0
             end = len(data)
@@ -184,17 +184,17 @@
         # Check local variables before looking at filesystem for performance
         # reasons.
         return bool(self._local) or bool(self._state) or \
-               self._repo.opener.exists(self.statepathv1) or \
-               self._repo.opener.exists(self.statepathv2)
+               self._repo.vfs.exists(self.statepathv1) or \
+               self._repo.vfs.exists(self.statepathv2)
 
     def commit(self):
         """Write current state on disk (if necessary)"""
         if self._dirty:
             records = []
-            records.append(("L", hex(self._local)))
-            records.append(("O", hex(self._other)))
+            records.append(('L', hex(self._local)))
+            records.append(('O', hex(self._other)))
             for d, v in self._state.iteritems():
-                records.append(("F", "\0".join([d] + v)))
+                records.append(('F', '\0'.join([d] + v)))
             self._writerecords(records)
             self._dirty = False
 
@@ -205,22 +205,22 @@
 
     def _writerecordsv1(self, records):
         """Write current state on disk in a version 1 file"""
-        f = self._repo.opener(self.statepathv1, "w")
+        f = self._repo.vfs(self.statepathv1, 'w')
         irecords = iter(records)
         lrecords = irecords.next()
         assert lrecords[0] == 'L'
-        f.write(hex(self._local) + "\n")
+        f.write(hex(self._local) + '\n')
         for rtype, data in irecords:
-            if rtype == "F":
-                f.write("%s\n" % _droponode(data))
+            if rtype == 'F':
+                f.write('%s\n' % _droponode(data))
         f.close()
 
     def _writerecordsv2(self, records):
         """Write current state on disk in a version 2 file"""
-        f = self._repo.opener(self.statepathv2, "w")
+        f = self._repo.vfs(self.statepathv2, 'w')
         for key, data in records:
             assert len(key) == 1
-            format = ">sI%is" % len(data)
+            format = '>sI%is' % len(data)
             f.write(_pack(format, key, len(data), data))
         f.close()
 
@@ -234,7 +234,7 @@
         note: also write the local version to the `.hg/merge` directory.
         """
         hash = util.sha1(fcl.path()).hexdigest()
-        self._repo.opener.write("merge/" + hash, fcl.data())
+        self._repo.vfs.write('merge/' + hash, fcl.data())
         self._state[fd] = ['u', hash, fcl.path(),
                            fca.path(), hex(fca.filenode()),
                            fco.path(), hex(fco.filenode()),
@@ -284,7 +284,7 @@
             elif flags == fla:
                 flags = flo
         # restore local
-        f = self._repo.opener("merge/" + hash)
+        f = self._repo.vfs('merge/' + hash)
         self._repo.wwrite(dfile, f.read(), flags)
         f.close()
         r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca,
@@ -297,24 +297,48 @@
             self.mark(dfile, 'r')
         return r
 
-def _checkunknownfile(repo, wctx, mctx, f):
+def _checkunknownfile(repo, wctx, mctx, f, f2=None):
+    if f2 is None:
+        f2 = f
     return (os.path.isfile(repo.wjoin(f))
-        and repo.wopener.audit.check(f)
+        and repo.wvfs.audit.check(f)
         and repo.dirstate.normalize(f) not in repo.dirstate
-        and mctx[f].cmp(wctx[f]))
+        and mctx[f2].cmp(wctx[f]))
 
-def _checkunknown(repo, wctx, mctx):
-    "check for collisions between unknown files and files in mctx"
+def _checkunknownfiles(repo, wctx, mctx, force, actions):
+    """
+    Considers any actions that care about the presence of conflicting unknown
+    files. For some actions, the result is to abort; for others, it is to
+    choose a different action.
+    """
+    aborts = []
+    if not force:
+        for f, (m, args, msg) in actions.iteritems():
+            if m in ('c', 'dc'):
+                if _checkunknownfile(repo, wctx, mctx, f):
+                    aborts.append(f)
+            elif m == 'dg':
+                if _checkunknownfile(repo, wctx, mctx, f, args[0]):
+                    aborts.append(f)
 
-    error = False
-    for f in mctx:
-        if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
-            error = True
-            wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
-    if error:
+    for f in sorted(aborts):
+        repo.ui.warn(_("%s: untracked file differs\n") % f)
+    if aborts:
         raise util.Abort(_("untracked files in working directory differ "
                            "from files in requested revision"))
 
+    for f, (m, args, msg) in actions.iteritems():
+        if m == 'c':
+            actions[f] = ('g', args, msg)
+        elif m == 'cm':
+            fl2, anc = args
+            different = _checkunknownfile(repo, wctx, mctx, f)
+            if different:
+                actions[f] = ('m', (f, f, None, False, anc),
+                              "remote differs from untracked local")
+            else:
+                actions[f] = ('g', (fl2,), "remote created")
+
 def _forgetremoved(wctx, mctx, branchmerge):
     """
     Forget removed files
@@ -330,20 +354,20 @@
     as removed.
     """
 
-    ractions = []
-    factions = xactions = []
+    actions = {}
+    m = 'f'
     if branchmerge:
-        xactions = ractions
+        m = 'r'
     for f in wctx.deleted():
         if f not in mctx:
-            xactions.append((f, None, "forget deleted"))
+            actions[f] = m, None, "forget deleted"
 
     if not branchmerge:
         for f in wctx.removed():
             if f not in mctx:
-                factions.append((f, None, "forget removed"))
+                actions[f] = 'f', None, "forget removed"
 
-    return ractions, factions
+    return actions
 
 def _checkcollision(repo, wmf, actions):
     # build provisional merged manifest up
@@ -361,7 +385,6 @@
             pmmf.discard(f2)
             pmmf.add(f)
         for f, args, msg in actions['dg']:
-            f2, flags = args
             pmmf.add(f)
         for f, args, msg in actions['m']:
             f1, f2, fa, move, anc = args
@@ -388,8 +411,7 @@
     acceptremote = accept the incoming changes without prompting
     """
 
-    actions = dict((m, []) for m in 'a f g cd dc r dm dg m dr e rd k'.split())
-    copy, movewithdir = {}, {}
+    copy, movewithdir, diverge, renamedelete = {}, {}, {}, {}
 
     # manifests fetched in order are going to be faster, so prime the caches
     [x.manifest() for x in
@@ -398,10 +420,6 @@
     if followcopies:
         ret = copies.mergecopies(repo, wctx, p2, pa)
         copy, movewithdir, diverge, renamedelete = ret
-        for of, fl in diverge.iteritems():
-            actions['dr'].append((of, (fl,), "divergent renames"))
-        for of, fl in renamedelete.iteritems():
-            actions['rd'].append((of, (fl,), "rename and delete"))
 
     repo.ui.note(_("resolving manifests\n"))
     repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
@@ -416,126 +434,214 @@
         # check whether sub state is modified
         for s in sorted(wctx.substate):
             if wctx.sub(s).dirty():
-                m1['.hgsubstate'] += "+"
+                m1['.hgsubstate'] += '+'
                 break
 
-    aborts = []
     # Compare manifests
     diff = m1.diff(m2)
 
+    actions = {}
     for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
         if partial and not partial(f):
             continue
-        if n1 and n2:
-            fa = f
-            a = ma.get(f, nullid)
-            if a == nullid:
-                fa = copy.get(f, f)
-                # Note: f as default is wrong - we can't really make a 3-way
-                # merge without an ancestor file.
-            fla = ma.flags(fa)
-            nol = 'l' not in fl1 + fl2 + fla
-            if n2 == a and fl2 == fla:
-                actions['k'].append((f, (), "keep")) # remote unchanged
-            elif n1 == a and fl1 == fla: # local unchanged - use remote
-                if n1 == n2: # optimization: keep local content
-                    actions['e'].append((f, (fl2,), "update permissions"))
+        if n1 and n2: # file exists on both local and remote side
+            if f not in ma:
+                fa = copy.get(f, None)
+                if fa is not None:
+                    actions[f] = ('m', (f, f, fa, False, pa.node()),
+                                  "both renamed from " + fa)
                 else:
-                    actions['g'].append((f, (fl2,), "remote is newer"))
-            elif nol and n2 == a: # remote only changed 'x'
-                actions['e'].append((f, (fl2,), "update permissions"))
-            elif nol and n1 == a: # local only changed 'x'
-                actions['g'].append((f, (fl1,), "remote is newer"))
-            else: # both changed something
-                actions['m'].append((f, (f, f, fa, False, pa.node()),
-                               "versions differ"))
-        elif f in copied: # files we'll deal with on m2 side
-            pass
-        elif n1 and f in movewithdir: # directory rename, move local
-            f2 = movewithdir[f]
-            actions['dm'].append((f2, (f, fl1),
-                            "remote directory rename - move from " + f))
-        elif n1 and f in copy:
-            f2 = copy[f]
-            actions['m'].append((f, (f, f2, f2, False, pa.node()),
-                            "local copied/moved from " + f2))
-        elif n1 and f in ma: # clean, a different, no remote
-            if n1 != ma[f]:
-                if acceptremote:
-                    actions['r'].append((f, None, "remote delete"))
-                else:
-                    actions['cd'].append((f, None, "prompt changed/deleted"))
-            elif n1[20:] == "a": # added, no remote
-                actions['f'].append((f, None, "remote deleted"))
-            else:
-                actions['r'].append((f, None, "other deleted"))
-        elif n2 and f in movewithdir:
-            f2 = movewithdir[f]
-            actions['dg'].append((f2, (f, fl2),
-                            "local directory rename - get from " + f))
-        elif n2 and f in copy:
-            f2 = copy[f]
-            if f2 in m2:
-                actions['m'].append((f, (f2, f, f2, False, pa.node()),
-                                "remote copied from " + f2))
+                    actions[f] = ('m', (f, f, None, False, pa.node()),
+                                  "both created")
             else:
-                actions['m'].append((f, (f2, f, f2, True, pa.node()),
-                                "remote moved from " + f2))
-        elif n2 and f not in ma:
-            # local unknown, remote created: the logic is described by the
-            # following table:
-            #
-            # force  branchmerge  different  |  action
-            #   n         *           n      |    get
-            #   n         *           y      |   abort
-            #   y         n           *      |    get
-            #   y         y           n      |    get
-            #   y         y           y      |   merge
-            #
-            # Checking whether the files are different is expensive, so we
-            # don't do that when we can avoid it.
-            if force and not branchmerge:
-                actions['g'].append((f, (fl2,), "remote created"))
-            else:
-                different = _checkunknownfile(repo, wctx, p2, f)
-                if force and branchmerge and different:
-                    # FIXME: This is wrong - f is not in ma ...
-                    actions['m'].append((f, (f, f, f, False, pa.node()),
-                                    "remote differs from untracked local"))
-                elif not force and different:
-                    aborts.append((f, "ud"))
+                a = ma[f]
+                fla = ma.flags(f)
+                nol = 'l' not in fl1 + fl2 + fla
+                if n2 == a and fl2 == fla:
+                    actions[f] = ('k' , (), "remote unchanged")
+                elif n1 == a and fl1 == fla: # local unchanged - use remote
+                    if n1 == n2: # optimization: keep local content
+                        actions[f] = ('e', (fl2,), "update permissions")
+                    else:
+                        actions[f] = ('g', (fl2,), "remote is newer")
+                elif nol and n2 == a: # remote only changed 'x'
+                    actions[f] = ('e', (fl2,), "update permissions")
+                elif nol and n1 == a: # local only changed 'x'
+                    actions[f] = ('g', (fl1,), "remote is newer")
+                else: # both changed something
+                    actions[f] = ('m', (f, f, f, False, pa.node()),
+                                   "versions differ")
+        elif n1: # file exists only on local side
+            if f in copied:
+                pass # we'll deal with it on m2 side
+            elif f in movewithdir: # directory rename, move local
+                f2 = movewithdir[f]
+                if f2 in m2:
+                    actions[f2] = ('m', (f, f2, None, True, pa.node()),
+                                   "remote directory rename, both created")
+                else:
+                    actions[f2] = ('dm', (f, fl1),
+                                   "remote directory rename - move from " + f)
+            elif f in copy:
+                f2 = copy[f]
+                actions[f] = ('m', (f, f2, f2, False, pa.node()),
+                              "local copied/moved from " + f2)
+            elif f in ma: # clean, a different, no remote
+                if n1 != ma[f]:
+                    if acceptremote:
+                        actions[f] = ('r', None, "remote delete")
+                    else:
+                        actions[f] = ('cd', None,  "prompt changed/deleted")
+                elif n1[20:] == 'a':
+                    # This extra 'a' is added by working copy manifest to mark
+                    # the file as locally added. We should forget it instead of
+                    # deleting it.
+                    actions[f] = ('f', None, "remote deleted")
+                else:
+                    actions[f] = ('r', None, "other deleted")
+        elif n2: # file exists only on remote side
+            if f in copied:
+                pass # we'll deal with it on m1 side
+            elif f in movewithdir:
+                f2 = movewithdir[f]
+                if f2 in m1:
+                    actions[f2] = ('m', (f2, f, None, False, pa.node()),
+                                   "local directory rename, both created")
+                else:
+                    actions[f2] = ('dg', (f, fl2),
+                                   "local directory rename - get from " + f)
+            elif f in copy:
+                f2 = copy[f]
+                if f2 in m2:
+                    actions[f] = ('m', (f2, f, f2, False, pa.node()),
+                                  "remote copied from " + f2)
+                else:
+                    actions[f] = ('m', (f2, f, f2, True, pa.node()),
+                                  "remote moved from " + f2)
+            elif f not in ma:
+                # local unknown, remote created: the logic is described by the
+                # following table:
+                #
+                # force  branchmerge  different  |  action
+                #   n         *           *      |   create
+                #   y         n           *      |   create
+                #   y         y           n      |   create
+                #   y         y           y      |   merge
+                #
+                # Checking whether the files are different is expensive, so we
+                # don't do that when we can avoid it.
+                if not force:
+                    actions[f] = ('c', (fl2,), "remote created")
+                elif not branchmerge:
+                    actions[f] = ('c', (fl2,), "remote created")
+                else:
+                    actions[f] = ('cm', (fl2, pa.node()),
+                                  "remote created, get or merge")
+            elif n2 != ma[f]:
+                if acceptremote:
+                    actions[f] = ('c', (fl2,), "remote recreating")
                 else:
-                    actions['g'].append((f, (fl2,), "remote created"))
-        elif n2 and n2 != ma[f]:
-            different = _checkunknownfile(repo, wctx, p2, f)
-            if not force and different:
-                aborts.append((f, "ud"))
-            else:
-                # if different: old untracked f may be overwritten and lost
-                if acceptremote:
-                    actions['g'].append((f, (m2.flags(f),),
-                                   "remote recreating"))
-                else:
-                    actions['dc'].append((f, (m2.flags(f),),
-                                   "prompt deleted/changed"))
+                    actions[f] = ('dc', (fl2,), "prompt deleted/changed")
+
+    return actions, diverge, renamedelete
+
+def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
+    """Resolves false conflicts where the nodeid changed but the content
+       remained the same."""
+
+    for f, (m, args, msg) in actions.items():
+        if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
+            # local did change but ended up with same content
+            actions[f] = 'r', None, "prompt same"
+        elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
+            # remote did change but ended up with same content
+            del actions[f] # don't get = keep local deleted
+
+def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
+                     acceptremote, followcopies):
+    "Calculate the actions needed to merge mctx into wctx using ancestors"
+
+    if len(ancestors) == 1: # default
+        actions, diverge, renamedelete = manifestmerge(
+            repo, wctx, mctx, ancestors[0], branchmerge, force, partial,
+            acceptremote, followcopies)
+        _checkunknownfiles(repo, wctx, mctx, force, actions)
+
+    else: # only when merge.preferancestor=* - the default
+        repo.ui.note(
+            _("note: merging %s and %s using bids from ancestors %s\n") %
+            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
 
-    for f, m in sorted(aborts):
-        if m == "ud":
-            repo.ui.warn(_("%s: untracked file differs\n") % f)
-        else: assert False, m
-    if aborts:
-        raise util.Abort(_("untracked files in working directory differ "
-                           "from files in requested revision"))
+        # Call for bids
+        fbids = {} # mapping filename to bids (action method to list af actions)
+        diverge, renamedelete = None, None
+        for ancestor in ancestors:
+            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
+            actions, diverge1, renamedelete1 = manifestmerge(
+                repo, wctx, mctx, ancestor, branchmerge, force, partial,
+                acceptremote, followcopies)
+            _checkunknownfiles(repo, wctx, mctx, force, actions)
+            if diverge is None: # and renamedelete is None.
+                # Arbitrarily pick warnings from first iteration
+                diverge = diverge1
+                renamedelete = renamedelete1
+            for f, a in sorted(actions.iteritems()):
+                m, args, msg = a
+                repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
+                if f in fbids:
+                    d = fbids[f]
+                    if m in d:
+                        d[m].append(a)
+                    else:
+                        d[m] = [a]
+                else:
+                    fbids[f] = {m: [a]}
 
-    if not util.checkcase(repo.path):
-        # check collision between files only in p2 for clean update
-        if (not branchmerge and
-            (force or not wctx.dirty(missing=True, branch=False))):
-            _checkcollision(repo, m2, None)
-        else:
-            _checkcollision(repo, m1, actions)
+        # Pick the best bid for each file
+        repo.ui.note(_('\nauction for merging merge bids\n'))
+        actions = {}
+        for f, bids in sorted(fbids.items()):
+            # bids is a mapping from action method to list af actions
+            # Consensus?
+            if len(bids) == 1: # all bids are the same kind of method
+                m, l = bids.items()[0]
+                if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
+                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
+                    actions[f] = l[0]
+                    continue
+            # If keep is an option, just do it.
+            if 'k' in bids:
+                repo.ui.note(" %s: picking 'keep' action\n" % f)
+                actions[f] = bids['k'][0]
+                continue
+            # If there are gets and they all agree [how could they not?], do it.
+            if 'g' in bids:
+                ga0 = bids['g'][0]
+                if util.all(a == ga0 for a in bids['g'][1:]):
+                    repo.ui.note(" %s: picking 'get' action\n" % f)
+                    actions[f] = ga0
+                    continue
+            # TODO: Consider other simple actions such as mode changes
+            # Handle inefficient democrazy.
+            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
+            for m, l in sorted(bids.items()):
+                for _f, args, msg in l:
+                    repo.ui.note('  %s -> %s\n' % (msg, m))
+            # Pick random action. TODO: Instead, prompt user when resolving
+            m, l = bids.items()[0]
+            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
+                         (f, m))
+            actions[f] = l[0]
+            continue
+        repo.ui.note(_('end of auction\n\n'))
 
-    return actions
+    _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
+
+    if wctx.rev() is None:
+        fractions = _forgetremoved(wctx, mctx, branchmerge)
+        actions.update(fractions)
+
+    return actions, diverge, renamedelete
 
 def batchremove(repo, actions):
     """apply removes to the working directory
@@ -545,7 +651,7 @@
     verbose = repo.ui.verbose
     unlink = util.unlinkpath
     wjoin = repo.wjoin
-    audit = repo.wopener.audit
+    audit = repo.wvfs.audit
     i = 0
     for f, args, msg in actions:
         repo.ui.debug(" %s: %s -> r\n" % (f, msg))
@@ -621,7 +727,7 @@
         if f1 != f and move:
             moves.append(f1)
 
-    audit = repo.wopener.audit
+    audit = repo.wvfs.audit
     _updating = _('updating')
     _files = _('files')
     progress = repo.ui.progress
@@ -678,7 +784,6 @@
         repo.ui.debug(" %s: %s -> m\n" % (f, msg))
         z += 1
         progress(_updating, z, item=f, total=numupdates, unit=_files)
-        f1, f2, fa, move, anc = args
         if f == '.hgsubstate': # subrepo states need updating
             subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
                              overwrite)
@@ -715,28 +820,6 @@
         repo.wwrite(f, mctx.filectx(f0).data(), flags)
         updated += 1
 
-    # divergent renames
-    for f, args, msg in actions['dr']:
-        repo.ui.debug(" %s: %s -> dr\n" % (f, msg))
-        z += 1
-        progress(_updating, z, item=f, total=numupdates, unit=_files)
-        fl, = args
-        repo.ui.warn(_("note: possible conflict - %s was renamed "
-                       "multiple times to:\n") % f)
-        for nf in fl:
-            repo.ui.warn(" %s\n" % nf)
-
-    # rename and delete
-    for f, args, msg in actions['rd']:
-        repo.ui.debug(" %s: %s -> rd\n" % (f, msg))
-        z += 1
-        progress(_updating, z, item=f, total=numupdates, unit=_files)
-        fl, = args
-        repo.ui.warn(_("note: possible conflict - %s was deleted "
-                       "and renamed to:\n") % f)
-        for nf in fl:
-            repo.ui.warn(" %s\n" % nf)
-
     # exec
     for f, args, msg in actions['e']:
         repo.ui.debug(" %s: %s -> e\n" % (f, msg))
@@ -752,111 +835,6 @@
 
     return updated, merged, removed, unresolved
 
-def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
-                     acceptremote, followcopies):
-    "Calculate the actions needed to merge mctx into wctx using ancestors"
-
-    if len(ancestors) == 1: # default
-        actions = manifestmerge(repo, wctx, mctx, ancestors[0],
-                                branchmerge, force,
-                                partial, acceptremote, followcopies)
-
-    else: # only when merge.preferancestor=* - the default
-        repo.ui.note(
-            _("note: merging %s and %s using bids from ancestors %s\n") %
-            (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
-
-        # Call for bids
-        fbids = {} # mapping filename to bids (action method to list af actions)
-        for ancestor in ancestors:
-            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
-            actions = manifestmerge(repo, wctx, mctx, ancestor,
-                                    branchmerge, force,
-                                    partial, acceptremote, followcopies)
-            for m, l in sorted(actions.items()):
-                for a in l:
-                    f, args, msg = a
-                    repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
-                    if f in fbids:
-                        d = fbids[f]
-                        if m in d:
-                            d[m].append(a)
-                        else:
-                            d[m] = [a]
-                    else:
-                        fbids[f] = {m: [a]}
-
-        # Pick the best bid for each file
-        repo.ui.note(_('\nauction for merging merge bids\n'))
-        actions = dict((m, []) for m in actions.keys())
-        for f, bids in sorted(fbids.items()):
-            # bids is a mapping from action method to list af actions
-            # Consensus?
-            if len(bids) == 1: # all bids are the same kind of method
-                m, l = bids.items()[0]
-                if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
-                    repo.ui.note(" %s: consensus for %s\n" % (f, m))
-                    actions[m].append(l[0])
-                    continue
-            # If keep is an option, just do it.
-            if "k" in bids:
-                repo.ui.note(" %s: picking 'keep' action\n" % f)
-                actions['k'].append(bids["k"][0])
-                continue
-            # If there are gets and they all agree [how could they not?], do it.
-            if "g" in bids:
-                ga0 = bids["g"][0]
-                if util.all(a == ga0 for a in bids["g"][1:]):
-                    repo.ui.note(" %s: picking 'get' action\n" % f)
-                    actions['g'].append(ga0)
-                    continue
-            # TODO: Consider other simple actions such as mode changes
-            # Handle inefficient democrazy.
-            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
-            for m, l in sorted(bids.items()):
-                for _f, args, msg in l:
-                    repo.ui.note('  %s -> %s\n' % (msg, m))
-            # Pick random action. TODO: Instead, prompt user when resolving
-            m, l = bids.items()[0]
-            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
-                         (f, m))
-            actions[m].append(l[0])
-            continue
-        repo.ui.note(_('end of auction\n\n'))
-
-    # Prompt and create actions. TODO: Move this towards resolve phase.
-    for f, args, msg in actions['cd']:
-        if f in ancestors[0] and not wctx[f].cmp(ancestors[0][f]):
-            # local did change but ended up with same content
-            actions['r'].append((f, None, "prompt same"))
-        elif repo.ui.promptchoice(
-            _("local changed %s which remote deleted\n"
-              "use (c)hanged version or (d)elete?"
-              "$$ &Changed $$ &Delete") % f, 0):
-            actions['r'].append((f, None, "prompt delete"))
-        else:
-            actions['a'].append((f, None, "prompt keep"))
-    del actions['cd'][:]
-
-    for f, args, msg in actions['dc']:
-        flags, = args
-        if f in ancestors[0] and not mctx[f].cmp(ancestors[0][f]):
-            # remote did change but ended up with same content
-            pass # don't get = keep local deleted
-        elif repo.ui.promptchoice(
-            _("remote changed %s which local deleted\n"
-              "use (c)hanged version or leave (d)eleted?"
-              "$$ &Changed $$ &Deleted") % f, 0) == 0:
-            actions['g'].append((f, (flags,), "prompt recreating"))
-    del actions['dc'][:]
-
-    if wctx.rev() is None:
-        ractions, factions = _forgetremoved(wctx, mctx, branchmerge)
-        actions['r'].extend(ractions)
-        actions['f'].extend(factions)
-
-    return actions
-
 def recordupdates(repo, actions, branchmerge):
     "record merge actions to the dirstate"
     # remove (must come first)
@@ -918,9 +896,6 @@
     # directory rename, move local
     for f, args, msg in actions['dm']:
         f0, flag = args
-        if f0 not in repo.dirstate:
-            # untracked file moved
-            continue
         if branchmerge:
             repo.dirstate.add(f)
             repo.dirstate.remove(f0)
@@ -990,7 +965,7 @@
         pl = wc.parents()
         p1 = pl[0]
         pas = [None]
-        if ancestor:
+        if ancestor is not None:
             pas = [repo[ancestor]]
 
         if node is None:
@@ -1000,8 +975,8 @@
             try:
                 node = repo.branchtip(wc.branch())
             except errormod.RepoLookupError:
-                if wc.branch() == "default": # no default branch!
-                    node = repo.lookup("tip") # update to tip
+                if wc.branch() == 'default': # no default branch!
+                    node = repo.lookup('tip') # update to tip
                 else:
                     raise util.Abort(_("branch %s not found") % wc.branch())
 
@@ -1029,14 +1004,14 @@
 
                     # get the max revision for the given successors set,
                     # i.e. the 'tip' of a set
-                    node = repo.revs("max(%ln)", successors).first()
+                    node = repo.revs('max(%ln)', successors).first()
                     pas = [p1]
 
         overwrite = force and not branchmerge
 
         p2 = repo[node]
         if pas[0] is None:
-            if repo.ui.config("merge", "preferancestor", '*') == '*':
+            if repo.ui.config('merge', 'preferancestor', '*') == '*':
                 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
                 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
             else:
@@ -1104,12 +1079,47 @@
             pas = [wc.p1()]
         elif not branchmerge and not wc.dirty(missing=True):
             pass
-        elif pas[0] and repo.ui.configbool("merge", "followcopies", True):
+        elif pas[0] and repo.ui.configbool('merge', 'followcopies', True):
             followcopies = True
 
         ### calculate phase
-        actions = calculateupdates(repo, wc, p2, pas, branchmerge, force,
-                                   partial, mergeancestor, followcopies)
+        actionbyfile, diverge, renamedelete = calculateupdates(
+            repo, wc, p2, pas, branchmerge, force, partial, mergeancestor,
+            followcopies)
+        # Convert to dictionary-of-lists format
+        actions = dict((m, []) for m in 'a f g cd dc r dm dg m e k'.split())
+        for f, (m, args, msg) in actionbyfile.iteritems():
+            if m not in actions:
+                actions[m] = []
+            actions[m].append((f, args, msg))
+
+        if not util.checkcase(repo.path):
+            # check collision between files only in p2 for clean update
+            if (not branchmerge and
+                (force or not wc.dirty(missing=True, branch=False))):
+                _checkcollision(repo, p2.manifest(), None)
+            else:
+                _checkcollision(repo, wc.manifest(), actions)
+
+        # Prompt and create actions. TODO: Move this towards resolve phase.
+        for f, args, msg in sorted(actions['cd']):
+            if repo.ui.promptchoice(
+                _("local changed %s which remote deleted\n"
+                  "use (c)hanged version or (d)elete?"
+                  "$$ &Changed $$ &Delete") % f, 0):
+                actions['r'].append((f, None, "prompt delete"))
+            else:
+                actions['a'].append((f, None, "prompt keep"))
+        del actions['cd'][:]
+
+        for f, args, msg in sorted(actions['dc']):
+            flags, = args
+            if repo.ui.promptchoice(
+                _("remote changed %s which local deleted\n"
+                  "use (c)hanged version or leave (d)eleted?"
+                  "$$ &Changed $$ &Deleted") % f, 0) == 0:
+                actions['g'].append((f, (flags,), "prompt recreating"))
+        del actions['dc'][:]
 
         ### apply phase
         if not branchmerge: # just jump to the new rev
@@ -1121,6 +1131,20 @@
 
         stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
 
+        # divergent renames
+        for f, fl in sorted(diverge.iteritems()):
+            repo.ui.warn(_("note: possible conflict - %s was renamed "
+                           "multiple times to:\n") % f)
+            for nf in fl:
+                repo.ui.warn(" %s\n" % nf)
+
+        # rename and delete
+        for f, fl in sorted(renamedelete.iteritems()):
+            repo.ui.warn(_("note: possible conflict - %s was deleted "
+                           "and renamed to:\n") % f)
+            for nf in fl:
+                repo.ui.warn(" %s\n" % nf)
+
         if not partial:
             repo.dirstate.beginparentchange()
             repo.setparents(fp1, fp2)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/namespaces.py	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,164 @@
+from i18n import _
+from mercurial import util
+import templatekw
+
+def tolist(val):
+    """
+    a convenience method to return an empty list instead of None
+    """
+    if val is None:
+        return []
+    else:
+        return [val]
+
+class namespaces(object):
+    """provides an interface to register and operate on multiple namespaces. See
+    the namespace class below for details on the namespace object.
+
+    """
+
+    _names_version = 0
+
+    def __init__(self):
+        self._names = util.sortdict()
+
+        # we need current mercurial named objects (bookmarks, tags, and
+        # branches) to be initialized somewhere, so that place is here
+        bmknames = lambda repo: repo._bookmarks.keys()
+        bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name))
+        bmknodemap = lambda repo, name: repo.nodebookmarks(name)
+        n = namespace("bookmarks", templatename="bookmark", listnames=bmknames,
+                      namemap=bmknamemap, nodemap=bmknodemap)
+        self.addnamespace(n)
+
+        tagnames = lambda repo: [t for t, n in repo.tagslist()]
+        tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name))
+        tagnodemap = lambda repo, name: repo.nodetags(name)
+        n = namespace("tags", templatename="tag", listnames=tagnames,
+                      namemap=tagnamemap, nodemap=tagnodemap)
+        self.addnamespace(n)
+
+        bnames = lambda repo: repo.branchmap().keys()
+        bnamemap = lambda repo, name: tolist(repo.branchtip(name, True))
+        bnodemap = lambda repo, node: [repo[node].branch()]
+        n = namespace("branches", templatename="branch", listnames=bnames,
+                      namemap=bnamemap, nodemap=bnodemap)
+        self.addnamespace(n)
+
+    def __getitem__(self, namespace):
+        """returns the namespace object"""
+        return self._names[namespace]
+
+    def __iter__(self):
+        return self._names.__iter__()
+
+    def iteritems(self):
+        return self._names.iteritems()
+
+    def addnamespace(self, namespace, order=None):
+        """register a namespace
+
+        namespace: the name to be registered (in plural form)
+        order: optional argument to specify the order of namespaces
+               (e.g. 'branches' should be listed before 'bookmarks')
+
+        """
+        if order is not None:
+            self._names.insert(order, namespace.name, namespace)
+        else:
+            self._names[namespace.name] = namespace
+
+        # we only generate a template keyword if one does not already exist
+        if namespace.name not in templatekw.keywords:
+            def generatekw(**args):
+                return templatekw.shownames(namespace.name, **args)
+
+            templatekw.keywords[namespace.name] = generatekw
+
+    def singlenode(self, repo, name):
+        """
+        Return the 'best' node for the given name. Best means the first node
+        in the first nonempty list returned by a name-to-nodes mapping function
+        in the defined precedence order.
+
+        Raises a KeyError if there is no such node.
+        """
+        for ns, v in self._names.iteritems():
+            n = v.namemap(repo, name)
+            if n:
+                # return max revision number
+                if len(n) > 1:
+                    cl = repo.changelog
+                    maxrev = max(cl.rev(node) for node in n)
+                    return cl.node(maxrev)
+                return n[0]
+        raise KeyError(_('no such name: %s') % name)
+
+class namespace(object):
+    """provides an interface to a namespace
+
+    Namespaces are basically generic many-to-many mapping between some
+    (namespaced) names and nodes. The goal here is to control the pollution of
+    jamming things into tags or bookmarks (in extension-land) and to simplify
+    internal bits of mercurial: log output, tab completion, etc.
+
+    More precisely, we define a mapping of names to nodes, and a mapping from
+    nodes to names. Each mapping returns a list.
+
+    Furthermore, each name mapping will be passed a name to lookup which might
+    not be in its domain. In this case, each method should return an empty list
+    and not raise an error.
+
+    This namespace object will define the properties we need:
+      'name': the namespace (plural form)
+      'templatename': name to use for templating (usually the singular form
+                      of the plural namespace name)
+      'listnames': list of all names in the namespace (usually the keys of a
+                   dictionary)
+      'namemap': function that takes a name and returns a list of nodes
+      'nodemap': function that takes a node and returns a list of names
+
+    """
+
+    def __init__(self, name, templatename=None, logname=None, colorname=None,
+                 listnames=None, namemap=None, nodemap=None):
+        """create a namespace
+
+        name: the namespace to be registered (in plural form)
+        templatename: the name to use for templating
+        logname: the name to use for log output; if not specified templatename
+                 is used
+        colorname: the name to use for colored log output; if not specified
+                   logname is used
+        listnames: function to list all names
+        namemap: function that inputs a node, output name(s)
+        nodemap: function that inputs a name, output node(s)
+
+        """
+        self.name = name
+        self.templatename = templatename
+        self.logname = logname
+        self.colorname = colorname
+        self.listnames = listnames
+        self.namemap = namemap
+        self.nodemap = nodemap
+
+        # if logname is not specified, use the template name as backup
+        if self.logname is None:
+            self.logname = self.templatename
+
+        # if colorname is not specified, just use the logname as a backup
+        if self.colorname is None:
+            self.colorname = self.logname
+
+    def names(self, repo, node):
+        """method that returns a (sorted) list of names in a namespace that
+        match a given node"""
+        return sorted(self.nodemap(repo, node))
+
+    def nodes(self, repo, name):
+        """method that returns a list of nodes in a namespace that
+        match a given name.
+
+        """
+        return sorted(self.namemap(repo, name))
--- a/mercurial/obsolete.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/obsolete.py	Sat Jan 17 18:28:30 2015 -0800
@@ -74,6 +74,7 @@
 
 _pack = struct.pack
 _unpack = struct.unpack
+_calcsize = struct.calcsize
 
 _SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
 
@@ -142,8 +143,8 @@
 _fm0version = 0
 _fm0fixed   = '>BIB20s'
 _fm0node = '20s'
-_fm0fsize = struct.calcsize(_fm0fixed)
-_fm0fnodesize = struct.calcsize(_fm0node)
+_fm0fsize = _calcsize(_fm0fixed)
+_fm0fnodesize = _calcsize(_fm0node)
 
 def _fm0readmarkers(data, off=0):
     # Loop on markers
@@ -275,66 +276,88 @@
 _fm1fixed = '>IdhHBBB20s'
 _fm1nodesha1 = '20s'
 _fm1nodesha256 = '32s'
-_fm1fsize = struct.calcsize(_fm1fixed)
+_fm1nodesha1size = _calcsize(_fm1nodesha1)
+_fm1nodesha256size = _calcsize(_fm1nodesha256)
+_fm1fsize = _calcsize(_fm1fixed)
 _fm1parentnone = 3
 _fm1parentshift = 14
 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
 _fm1metapair = 'BB'
-_fm1metapairsize = struct.calcsize('BB')
+_fm1metapairsize = _calcsize('BB')
 
 def _fm1readmarkers(data, off=0):
+    # make some global constants local for performance
+    noneflag = _fm1parentnone
+    sha2flag = usingsha256
+    sha1size = _fm1nodesha1size
+    sha2size = _fm1nodesha256size
+    sha1fmt = _fm1nodesha1
+    sha2fmt = _fm1nodesha256
+    metasize = _fm1metapairsize
+    metafmt = _fm1metapair
+    fsize = _fm1fsize
+    unpack = _unpack
+
     # Loop on markers
-    l = len(data)
-    while off + _fm1fsize <= l:
+    stop = len(data) - _fm1fsize
+    ufixed = util.unpacker(_fm1fixed)
+    while off <= stop:
         # read fixed part
-        cur = data[off:off + _fm1fsize]
-        off += _fm1fsize
-        fixeddata = _unpack(_fm1fixed, cur)
-        ttsize, seconds, tz, flags, numsuc, numpar, nummeta, prec = fixeddata
-        # extract the number of parents information
-        if numpar == _fm1parentnone:
-            numpar = None
-        # build the date tuple (upgrade tz minutes to seconds)
-        date = (seconds, tz * 60)
-        _fm1node = _fm1nodesha1
-        if flags & usingsha256:
-            _fm1node = _fm1nodesha256
-        fnodesize = struct.calcsize(_fm1node)
-        # read replacement
-        sucs = ()
-        if numsuc:
-            s = (fnodesize * numsuc)
-            cur = data[off:off + s]
-            sucs = _unpack(_fm1node * numsuc, cur)
-            off += s
-        # read parents
-        if numpar is None:
-            parents = None
-        elif numpar == 0:
-            parents = ()
-        elif numpar:  # neither None nor zero
-            s = (fnodesize * numpar)
-            cur = data[off:off + s]
-            parents = _unpack(_fm1node * numpar, cur)
-            off += s
+        o1 = off + fsize
+        t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
+
+        if flags & sha2flag:
+            # FIXME: prec was read as a SHA1, needs to be amended
+
+            # read 0 or more successors
+            if numsuc == 1:
+                o2 = o1 + sha2size
+                sucs = (data[o1:o2],)
+            else:
+                o2 = o1 + sha2size * numsuc
+                sucs = unpack(sha2fmt * numsuc, data[o1:o2])
+
+            # read parents
+            if numpar == noneflag:
+                o3 = o2
+                parents = None
+            elif numpar == 1:
+                o3 = o2 + sha2size
+                parents = (data[o2:o3],)
+            else:
+                o3 = o2 + sha2size * numpar
+                parents = unpack(sha2fmt * numpar, data[o2:o3])
+        else:
+            # read 0 or more successors
+            if numsuc == 1:
+                o2 = o1 + sha1size
+                sucs = (data[o1:o2],)
+            else:
+                o2 = o1 + sha1size * numsuc
+                sucs = unpack(sha1fmt * numsuc, data[o1:o2])
+
+            # read parents
+            if numpar == noneflag:
+                o3 = o2
+                parents = None
+            elif numpar == 1:
+                o3 = o2 + sha1size
+                parents = (data[o2:o3],)
+            else:
+                o3 = o2 + sha1size * numpar
+                parents = unpack(sha1fmt * numpar, data[o2:o3])
+
         # read metadata
-        metaformat = '>' + (_fm1metapair * nummeta)
-        s = _fm1metapairsize * nummeta
-        metapairsize = _unpack(metaformat, data[off:off + s])
-        off += s
+        off = o3 + metasize * nummeta
+        metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
         metadata = []
         for idx in xrange(0, len(metapairsize), 2):
-            sk = metapairsize[idx]
-            sv = metapairsize[idx + 1]
-            key = data[off:off + sk]
-            value = data[off + sk:off + sk + sv]
-            assert len(key) == sk
-            assert len(value) == sv
-            metadata.append((key, value))
-            off += sk + sv
-        metadata = tuple(metadata)
+            o1 = off + metapairsize[idx]
+            o2 = o1 + metapairsize[idx + 1]
+            metadata.append((data[off:o1], data[o1:o2]))
+            off = o2
 
-        yield (prec, sucs, flags, metadata, date, parents)
+        yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
 
 def _fm1encodeonemarker(marker):
     pre, sucs, flags, metadata, date, parents = marker
@@ -358,7 +381,7 @@
     data.extend(sucs)
     if parents is not None:
         data.extend(parents)
-    totalsize = struct.calcsize(format)
+    totalsize = _calcsize(format)
     for key, value in metadata:
         lk = len(key)
         lv = len(value)
@@ -377,6 +400,7 @@
 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
            _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
 
+@util.nogc
 def _readmarkers(data):
     """Read and enumerate markers from raw data"""
     off = 0
@@ -562,6 +586,7 @@
         version, markers = _readmarkers(data)
         return self.add(transaction, markers)
 
+    @util.nogc
     def _load(self, markers):
         for mark in markers:
             self._all.append(mark)
--- a/mercurial/parsers.c	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/parsers.c	Sat Jan 17 18:28:30 2015 -0800
@@ -1978,6 +1978,9 @@
 			PyErr_SetString(PyExc_ValueError, "rev out of range");
 		return -1;
 	}
+
+	if (nt_init(self) == -1)
+		return -1;
 	return nt_insert(self, node, (int)rev);
 }
 
--- a/mercurial/patch.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/patch.py	Sat Jan 17 18:28:30 2015 -0800
@@ -1558,20 +1558,53 @@
 class GitDiffRequired(Exception):
     pass
 
-def diffopts(ui, opts=None, untrusted=False, section='diff'):
-    def get(key, name=None, getter=ui.configbool):
-        return ((opts and opts.get(key)) or
-                getter(section, name or key, None, untrusted=untrusted))
-    return mdiff.diffopts(
-        text=opts and opts.get('text'),
-        git=get('git'),
-        nodates=get('nodates'),
-        nobinary=get('nobinary'),
-        showfunc=get('show_function', 'showfunc'),
-        ignorews=get('ignore_all_space', 'ignorews'),
-        ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
-        ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
-        context=get('unified', getter=ui.config))
+def diffallopts(ui, opts=None, untrusted=False, section='diff'):
+    '''return diffopts with all features supported and parsed'''
+    return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
+                           git=True, whitespace=True, formatchanging=True)
+
+diffopts = diffallopts
+
+def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
+                    whitespace=False, formatchanging=False):
+    '''return diffopts with only opted-in features parsed
+
+    Features:
+    - git: git-style diffs
+    - whitespace: whitespace options like ignoreblanklines and ignorews
+    - formatchanging: options that will likely break or cause correctness issues
+      with most diff parsers
+    '''
+    def get(key, name=None, getter=ui.configbool, forceplain=None):
+        if opts:
+            v = opts.get(key)
+            if v:
+                return v
+        if forceplain is not None and ui.plain():
+            return forceplain
+        return getter(section, name or key, None, untrusted=untrusted)
+
+    # core options, expected to be understood by every diff parser
+    buildopts = {
+        'nodates': get('nodates'),
+        'showfunc': get('show_function', 'showfunc'),
+        'context': get('unified', getter=ui.config),
+    }
+
+    if git:
+        buildopts['git'] = get('git')
+    if whitespace:
+        buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
+        buildopts['ignorewsamount'] = get('ignore_space_change',
+                                          'ignorewsamount')
+        buildopts['ignoreblanklines'] = get('ignore_blank_lines',
+                                            'ignoreblanklines')
+    if formatchanging:
+        buildopts['text'] = opts and opts.get('text')
+        buildopts['nobinary'] = get('nobinary')
+        buildopts['noprefix'] = get('noprefix', forceplain=False)
+
+    return mdiff.diffopts(**buildopts)
 
 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
          losedatafn=None, prefix=''):
@@ -1625,7 +1658,6 @@
     if not modified and not added and not removed:
         return []
 
-    revs = None
     hexfunc = repo.ui.debugflag and hex or short
     revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
 
@@ -1715,13 +1747,8 @@
             header.append('old mode %s\n' % omode)
             header.append('new mode %s\n' % nmode)
 
-    def addindexmeta(meta, revs):
-        if opts.git:
-            i = len(revs)
-            if i==2:
-                meta.append('index %s..%s\n' % tuple(revs))
-            elif i==3:
-                meta.append('index %s,%s..%s\n' % tuple(revs))
+    def addindexmeta(meta, oindex, nindex):
+        meta.append('index %s..%s\n' % (oindex, nindex))
 
     def gitindex(text):
         if not text:
@@ -1731,9 +1758,15 @@
         s.update(text)
         return s.hexdigest()
 
+    if opts.noprefix:
+        aprefix = bprefix = ''
+    else:
+        aprefix = 'a/'
+        bprefix = 'b/'
+
     def diffline(a, b, revs):
         if opts.git:
-            line = 'diff --git a/%s b/%s\n' % (a, b)
+            line = 'diff --git %s%s %s%s\n' % (aprefix, a, bprefix, b)
         elif not repo.ui.quiet:
             if revs:
                 revinfo = ' '.join(["-r %s" % rev for rev in revs])
@@ -1745,7 +1778,7 @@
         return line
 
     date1 = util.datestr(ctx1.date())
-    man1 = ctx1.manifest()
+    date2 = util.datestr(ctx2.date())
 
     gone = set()
     gitmode = {'l': '120000', 'x': '100755', '': '100644'}
@@ -1755,18 +1788,25 @@
     if opts.git:
         revs = None
 
+    modifiedset, addedset, removedset = set(modified), set(added), set(removed)
+    # Fix up modified and added, since merged-in additions appear as
+    # modifications during merges
+    for f in modifiedset.copy():
+        if f not in ctx1:
+            addedset.add(f)
+            modifiedset.remove(f)
     for f in sorted(modified + added + removed):
         to = None
         tn = None
-        dodiff = True
+        binarydiff = False
         header = []
-        if f in man1:
+        if f not in addedset:
             to = getfilectx(f, ctx1).data()
-        if f not in removed:
+        if f not in removedset:
             tn = getfilectx(f, ctx2).data()
         a, b = f, f
         if opts.git or losedatafn:
-            if f in added or (f in modified and to is None):
+            if f in addedset:
                 mode = gitmode[ctx2.flags(f)]
                 if f in copy or f in copyto:
                     if opts.git:
@@ -1774,9 +1814,9 @@
                             a = copy[f]
                         else:
                             a = copyto[f]
-                        omode = gitmode[man1.flags(a)]
+                        omode = gitmode[ctx1.flags(a)]
                         addmodehdr(header, omode, mode)
-                        if a in removed and a not in gone:
+                        if a in removedset and a not in gone:
                             op = 'rename'
                             gone.add(a)
                         else:
@@ -1796,55 +1836,53 @@
                 # forces git mode.
                 if util.binary(tn):
                     if opts.git:
-                        dodiff = 'binary'
+                        binarydiff = True
                     else:
                         losedatafn(f)
                 if not opts.git and not tn:
                     # regular diffs cannot represent new empty file
                     losedatafn(f)
-            elif f in removed or (f in modified and tn is None):
+            elif f in removedset:
                 if opts.git:
                     # have we already reported a copy above?
-                    if ((f in copy and copy[f] in added
+                    if ((f in copy and copy[f] in addedset
                          and copyto[copy[f]] == f) or
-                        (f in copyto and copyto[f] in added
+                        (f in copyto and copyto[f] in addedset
                          and copy[copyto[f]] == f)):
-                        dodiff = False
+                        continue
                     else:
                         header.append('deleted file mode %s\n' %
-                                      gitmode[man1.flags(f)])
+                                      gitmode[ctx1.flags(f)])
                         if util.binary(to):
-                            dodiff = 'binary'
+                            binarydiff = True
                 elif not to or util.binary(to):
                     # regular diffs cannot represent empty file deletion
                     losedatafn(f)
             else:
-                oflag = man1.flags(f)
+                oflag = ctx1.flags(f)
                 nflag = ctx2.flags(f)
                 binary = util.binary(to) or util.binary(tn)
                 if opts.git:
                     addmodehdr(header, gitmode[oflag], gitmode[nflag])
                     if binary:
-                        dodiff = 'binary'
+                        binarydiff = True
                 elif binary or nflag != oflag:
                     losedatafn(f)
 
-        if dodiff:
-            if opts.git or revs:
-                header.insert(0, diffline(join(a), join(b), revs))
-            if dodiff == 'binary' and not opts.nobinary:
-                text = mdiff.b85diff(to, tn)
-                if text:
-                    addindexmeta(header, [gitindex(to), gitindex(tn)])
-            else:
-                text = mdiff.unidiff(to, date1,
-                                    # ctx2 date may be dynamic
-                                    tn, util.datestr(ctx2.date()),
-                                    join(a), join(b), opts=opts)
-            if header and (text or len(header) > 1):
-                yield ''.join(header)
-            if text:
-                yield text
+        if opts.git or revs:
+            header.insert(0, diffline(join(a), join(b), revs))
+        if binarydiff and not opts.nobinary:
+            text = mdiff.b85diff(to, tn)
+            if text and opts.git:
+                addindexmeta(header, gitindex(to), gitindex(tn))
+        else:
+            text = mdiff.unidiff(to, date1,
+                                 tn, date2,
+                                 join(a), join(b), opts=opts)
+        if header and (text or len(header) > 1):
+            yield ''.join(header)
+        if text:
+            yield text
 
 def diffstatsum(stats):
     maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
--- a/mercurial/pathutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/pathutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -157,7 +157,7 @@
 def normasprefix(path):
     '''normalize the specified path as path prefix
 
-    Returned vaule can be used safely for "p.startswith(prefix)",
+    Returned value can be used safely for "p.startswith(prefix)",
     "p[len(prefix):]", and so on.
 
     For efficiency, this expects "path" argument to be already
--- a/mercurial/phases.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/phases.py	Sat Jan 17 18:28:30 2015 -0800
@@ -100,6 +100,7 @@
 
 """
 
+import os
 import errno
 from node import nullid, nullrev, bin, hex, short
 from i18n import _
@@ -124,7 +125,15 @@
     dirty = False
     roots = [set() for i in allphases]
     try:
-        f = repo.sopener('phaseroots')
+        f = None
+        if 'HG_PENDING' in os.environ:
+            try:
+                f = repo.svfs('phaseroots.pending')
+            except IOError, inst:
+                if inst.errno != errno.ENOENT:
+                    raise
+        if f is None:
+            f = repo.svfs('phaseroots')
         try:
             for line in f:
                 phase, nh = line.split()
@@ -147,12 +156,12 @@
             self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
             self._phaserevs = None
             self.filterunknown(repo)
-            self.opener = repo.sopener
+            self.opener = repo.svfs
 
     def copy(self):
         # Shallow copy meant to ensure isolation in
         # advance/retractboundary(), nothing more.
-        ph = phasecache(None, None, _load=False)
+        ph = self.__class__(None, None, _load=False)
         ph.phaseroots = self.phaseroots[:]
         ph.dirty = self.dirty
         ph.opener = self.opener
--- a/mercurial/posix.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/posix.py	Sat Jan 17 18:28:30 2015 -0800
@@ -8,7 +8,7 @@
 from i18n import _
 import encoding
 import os, sys, errno, stat, getpass, pwd, grp, socket, tempfile, unicodedata
-import fcntl
+import fcntl, re
 
 posixfile = open
 normpath = os.path.normpath
@@ -315,9 +315,16 @@
     def checklink(path):
         return False
 
+_needsshellquote = None
 def shellquote(s):
     if os.sys.platform == 'OpenVMS':
         return '"%s"' % s
+    global _needsshellquote
+    if _needsshellquote is None:
+        _needsshellquote = re.compile(r'[^a-zA-Z0-9._/-]').search
+    if not _needsshellquote(s):
+        # "s" shouldn't have to be quoted
+        return s
     else:
         return "'%s'" % s.replace("'", "'\\''")
 
--- a/mercurial/repair.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/repair.py	Sat Jan 17 18:28:30 2015 -0800
@@ -6,24 +6,42 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from mercurial import changegroup, exchange
-from mercurial.node import short
+from mercurial import changegroup, exchange, util, bundle2
+from mercurial.node import short, hex
 from mercurial.i18n import _
 import errno
 
 def _bundle(repo, bases, heads, node, suffix, compress=True):
     """create a bundle with the specified revisions as a backup"""
-    cg = changegroup.changegroupsubset(repo, bases, heads, 'strip')
+    usebundle2 = (repo.ui.config('experimental', 'bundle2-exp') and
+                  repo.ui.config('experimental', 'strip-bundle2-version'))
+    if usebundle2:
+        cgversion = repo.ui.config('experimental', 'strip-bundle2-version')
+    else:
+        cgversion = '01'
+
+    cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
+                                       version=cgversion)
     backupdir = "strip-backup"
     vfs = repo.vfs
     if not vfs.isdir(backupdir):
         vfs.mkdir(backupdir)
-    name = "%s/%s-%s.hg" % (backupdir, short(node), suffix)
-    if compress:
+
+    # Include a hash of all the nodes in the filename for uniqueness
+    hexbases = (hex(n) for n in bases)
+    hexheads = (hex(n) for n in heads)
+    allcommits = repo.set('%ls::%ls', hexbases, hexheads)
+    allhashes = sorted(c.hex() for c in allcommits)
+    totalhash = util.sha1(''.join(allhashes)).hexdigest()
+    name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
+
+    if usebundle2:
+        bundletype = "HG2Y"
+    elif compress:
         bundletype = "HG10BZ"
     else:
         bundletype = "HG10UN"
-    return changegroup.writebundle(cg, name, bundletype, vfs)
+    return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
 
 def _collectfiles(repo, striprev):
     """find out the filelogs affected by the strip"""
@@ -140,7 +158,7 @@
         try:
             for i in xrange(offset, len(tr.entries)):
                 file, troffset, ignore = tr.entries[i]
-                repo.sopener(file, 'a').truncate(troffset)
+                repo.svfs(file, 'a').truncate(troffset)
                 if troffset == 0:
                     repo.store.markremoved(file)
             tr.close()
@@ -155,8 +173,17 @@
             if not repo.ui.verbose:
                 # silence internal shuffling chatter
                 repo.ui.pushbuffer()
-            changegroup.addchangegroup(repo, gen, 'strip',
-                                       'bundle:' + vfs.join(chgrpfile), True)
+            if isinstance(gen, bundle2.unbundle20):
+                tr = repo.transaction('strip')
+                try:
+                    bundle2.processbundle(repo, gen, lambda: tr)
+                    tr.close()
+                finally:
+                    tr.release()
+            else:
+                changegroup.addchangegroup(repo, gen, 'strip',
+                                           'bundle:' + vfs.join(chgrpfile),
+                                           True)
             if not repo.ui.verbose:
                 repo.ui.popbuffer()
             f.close()
--- a/mercurial/repoview.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/repoview.py	Sat Jan 17 18:28:30 2015 -0800
@@ -72,6 +72,13 @@
     h.update(str(hash(frozenset(hideable))))
     return h.digest()
 
+def _writehiddencache(cachefile, cachehash, hidden):
+    """write hidden data to a cache file"""
+    data = struct.pack('>%ii' % len(hidden), *sorted(hidden))
+    cachefile.write(struct.pack(">H", cacheversion))
+    cachefile.write(cachehash)
+    cachefile.write(data)
+
 def trywritehiddencache(repo, hideable, hidden):
     """write cache of hidden changesets to disk
 
@@ -87,12 +94,8 @@
             wlock = repo.wlock(wait=False)
             # write cache to file
             newhash = cachehash(repo, hideable)
-            sortedset = sorted(hidden)
-            data = struct.pack('>%ii' % len(sortedset), *sortedset)
             fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True)
-            fh.write(struct.pack(">H", cacheversion))
-            fh.write(newhash)
-            fh.write(data)
+            _writehiddencache(fh, newhash, hidden)
         except (IOError, OSError):
             repo.ui.debug('error writing hidden changesets cache')
         except error.LockHeld:
--- a/mercurial/revlog.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/revlog.py	Sat Jan 17 18:28:30 2015 -0800
@@ -34,7 +34,9 @@
 REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
 
 # revlog index flags
-REVIDX_KNOWN_FLAGS = 0
+REVIDX_ISCENSORED = (1 << 15) # revision has censor metadata, must be verified
+REVIDX_DEFAULT_FLAGS = 0
+REVIDX_KNOWN_FLAGS = REVIDX_ISCENSORED
 
 # max size of revlog with inline data
 _maxinline = 131072
@@ -204,6 +206,7 @@
         self._basecache = None
         self._chunkcache = (0, '')
         self._chunkcachesize = 65536
+        self._maxchainlen = None
         self.index = []
         self._pcache = {}
         self._nodecache = {nullid: nullrev}
@@ -219,6 +222,8 @@
                 v = 0
             if 'chunkcachesize' in opts:
                 self._chunkcachesize = opts['chunkcachesize']
+            if 'maxchainlen' in opts:
+                self._maxchainlen = opts['maxchainlen']
 
         if self._chunkcachesize <= 0:
             raise RevlogError(_('revlog chunk cache size %r is not greater '
@@ -267,6 +272,8 @@
             self.nodemap = self._nodecache = nodemap
         if not self._chunkcache:
             self._chunkclear()
+        # revnum -> (chain-length, sum-delta-length)
+        self._chaininfocache = {}
 
     def tip(self):
         return self.node(len(self.index) - 2)
@@ -350,6 +357,40 @@
             rev = base
             base = index[rev][3]
         return base
+    def chainlen(self, rev):
+        return self._chaininfo(rev)[0]
+
+    def _chaininfo(self, rev):
+        chaininfocache = self._chaininfocache
+        if rev in chaininfocache:
+            return chaininfocache[rev]
+        index = self.index
+        generaldelta = self._generaldelta
+        iterrev = rev
+        e = index[iterrev]
+        clen = 0
+        compresseddeltalen = 0
+        while iterrev != e[3]:
+            clen += 1
+            compresseddeltalen += e[1]
+            if generaldelta:
+                iterrev = e[3]
+            else:
+                iterrev -= 1
+            if iterrev in chaininfocache:
+                t = chaininfocache[iterrev]
+                clen += t[0]
+                compresseddeltalen += t[1]
+                break
+            e = index[iterrev]
+        else:
+            # Add text length of base since decompressing that also takes
+            # work. For cache hits the length is already included.
+            compresseddeltalen += e[1]
+        r = (clen, compresseddeltalen)
+        chaininfocache[rev] = r
+        return r
+
     def flags(self, rev):
         return self.index[rev][0] & 0xFFFF
     def rawsize(self, rev):
@@ -368,7 +409,7 @@
 
         See the documentation for ancestor.lazyancestors for more details."""
 
-        return ancestor.lazyancestors(self, revs, stoprev=stoprev,
+        return ancestor.lazyancestors(self.parentrevs, revs, stoprev=stoprev,
                                       inclusive=inclusive)
 
     def descendants(self, revs):
@@ -456,6 +497,20 @@
         missing.sort()
         return has, [self.node(r) for r in missing]
 
+    def incrementalmissingrevs(self, common=None):
+        """Return an object that can be used to incrementally compute the
+        revision numbers of the ancestors of arbitrary sets that are not
+        ancestors of common. This is an ancestor.incrementalmissingancestors
+        object.
+
+        'common' is a list of revision numbers. If common is not supplied, uses
+        nullrev.
+        """
+        if common is None:
+            common = [nullrev]
+
+        return ancestor.incrementalmissingancestors(self.parentrevs, common)
+
     def findmissingrevs(self, common=None, heads=None):
         """Return the revision numbers of the ancestors of heads that
         are not ancestors of common.
@@ -477,7 +532,8 @@
         if heads is None:
             heads = self.headrevs()
 
-        return ancestor.missingancestors(heads, common, self.parentrevs)
+        inc = self.incrementalmissingrevs(common=common)
+        return inc.missingancestors(heads)
 
     def findmissing(self, common=None, heads=None):
         """Return the ancestors of heads that are not ancestors of common.
@@ -502,8 +558,8 @@
         common = [self.rev(n) for n in common]
         heads = [self.rev(n) for n in heads]
 
-        return [self.node(r) for r in
-                ancestor.missingancestors(heads, common, self.parentrevs)]
+        inc = self.incrementalmissingrevs(common=common)
+        return [self.node(r) for r in inc.missingancestors(heads)]
 
     def nodesbetween(self, roots=None, heads=None):
         """Return a topological path from 'roots' to 'heads'.
@@ -1123,7 +1179,7 @@
         ifh = self.opener(self.indexfile, "a+")
         try:
             return self._addrevision(node, text, transaction, link, p1, p2,
-                                     cachedelta, ifh, dfh)
+                                     REVIDX_DEFAULT_FLAGS, cachedelta, ifh, dfh)
         finally:
             if dfh:
                 dfh.close()
@@ -1158,7 +1214,7 @@
             return ('u', text)
         return ("", bin)
 
-    def _addrevision(self, node, text, transaction, link, p1, p2,
+    def _addrevision(self, node, text, transaction, link, p1, p2, flags,
                      cachedelta, ifh, dfh):
         """internal function to add revisions to the log
 
@@ -1179,8 +1235,12 @@
             btext[0] = mdiff.patch(basetext, cachedelta[1])
             try:
                 self.checkhash(btext[0], p1, p2, node)
+                if flags & REVIDX_ISCENSORED:
+                    raise RevlogError(_('node %s is not censored') % node)
             except CensoredNodeError:
-                pass # always import a censor tombstone.
+                # must pass the censored index flag to add censored revisions
+                if not flags & REVIDX_ISCENSORED:
+                    raise
             return btext[0]
 
         def builddelta(rev):
@@ -1202,13 +1262,16 @@
                 base = rev
             else:
                 base = chainbase
-            return dist, l, data, base, chainbase
+            chainlen, compresseddeltalen = self._chaininfo(rev)
+            chainlen += 1
+            compresseddeltalen += l
+            return dist, l, data, base, chainbase, chainlen, compresseddeltalen
 
         curr = len(self)
         prev = curr - 1
         base = chainbase = curr
+        chainlen = None
         offset = self.end(prev)
-        flags = 0
         d = None
         if self._basecache is None:
             self._basecache = (prev, self.chainbase(prev))
@@ -1226,7 +1289,7 @@
                     d = builddelta(prev)
             else:
                 d = builddelta(prev)
-            dist, l, data, base, chainbase = d
+            dist, l, data, base, chainbase, chainlen, compresseddeltalen = d
 
         # full versions are inserted when the needed deltas
         # become comparable to the uncompressed text
@@ -1235,7 +1298,14 @@
                                         cachedelta[1])
         else:
             textlen = len(text)
-        if d is None or dist > textlen * 2:
+
+        # - 'dist' is the distance from the base revision -- bounding it limits
+        #   the amount of I/O we need to do.
+        # - 'compresseddeltalen' is the sum of the total size of deltas we need
+        #   to apply -- bounding it limits the amount of CPU we consume.
+        if (d is None or dist > textlen * 4 or l > textlen or
+            compresseddeltalen > textlen * 2 or
+            (self._maxchainlen and chainlen > self._maxchainlen)):
             text = buildtext()
             data = self.compress(text)
             l = len(data[1]) + len(data[0])
@@ -1332,7 +1402,8 @@
 
                 baserev = self.rev(deltabase)
                 chain = self._addrevision(node, None, transaction, link,
-                                          p1, p2, (baserev, delta), ifh, dfh)
+                                          p1, p2, REVIDX_DEFAULT_FLAGS,
+                                          (baserev, delta), ifh, dfh)
                 if not dfh and not self._inline:
                     # addrevision switched from inline to conventional
                     # reopen the index
@@ -1419,6 +1490,7 @@
 
         # then reset internal state in memory to forget those revisions
         self._cache = None
+        self._chaininfocache = {}
         self._chunkclear()
         for x in xrange(rev, len(self)):
             del self.nodemap[self.node(x)]
--- a/mercurial/revset.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/revset.py	Sat Jan 17 18:28:30 2015 -0800
@@ -10,7 +10,6 @@
 import node
 import heapq
 import match as matchmod
-import ancestor as ancestormod
 from i18n import _
 import encoding
 import obsolete as obsmod
@@ -103,7 +102,8 @@
     return baseset(sorted(reachable))
 
 elements = {
-    "(": (20, ("group", 1, ")"), ("func", 1, ")")),
+    "(": (21, ("group", 1, ")"), ("func", 1, ")")),
+    "##": (20, None, ("_concat", 20)),
     "~": (18, None, ("ancestor", 18)),
     "^": (18, None, ("parent", 18), ("parentpost", 18)),
     "-": (5, ("negate", 19), ("minus", 5)),
@@ -116,6 +116,7 @@
     "!": (10, ("not", 10)),
     "and": (5, None, ("and", 5)),
     "&": (5, None, ("and", 5)),
+    "%": (5, None, ("only", 5), ("onlypost", 5)),
     "or": (4, None, ("or", 4)),
     "|": (4, None, ("or", 4)),
     "+": (4, None, ("or", 4)),
@@ -128,15 +129,39 @@
 
 keywords = set(['and', 'or', 'not'])
 
-def tokenize(program, lookup=None):
+# default set of valid characters for the initial letter of symbols
+_syminitletters = set(c for c in [chr(i) for i in xrange(256)]
+                      if c.isalnum() or c in '._@' or ord(c) > 127)
+
+# default set of valid characters for non-initial letters of symbols
+_symletters = set(c for c in  [chr(i) for i in xrange(256)]
+                  if c.isalnum() or c in '-._/@' or ord(c) > 127)
+
+def tokenize(program, lookup=None, syminitletters=None, symletters=None):
     '''
     Parse a revset statement into a stream of tokens
 
+    ``syminitletters`` is the set of valid characters for the initial
+    letter of symbols.
+
+    By default, character ``c`` is recognized as valid for initial
+    letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``.
+
+    ``symletters`` is the set of valid characters for non-initial
+    letters of symbols.
+
+    By default, character ``c`` is recognized as valid for non-initial
+    letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``.
+
     Check that @ is a valid unquoted token character (issue3686):
     >>> list(tokenize("@::"))
     [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
 
     '''
+    if syminitletters is None:
+        syminitletters = _syminitletters
+    if symletters is None:
+        symletters = _symletters
 
     pos, l = 0, len(program)
     while pos < l:
@@ -149,7 +174,10 @@
         elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
             yield ('..', None, pos)
             pos += 1 # skip ahead
-        elif c in "():,-|&+!~^": # handle simple operators
+        elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
+            yield ('##', None, pos)
+            pos += 1 # skip ahead
+        elif c in "():,-|&+!~^%": # handle simple operators
             yield (c, None, pos)
         elif (c in '"\'' or c == 'r' and
               program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
@@ -173,12 +201,12 @@
             else:
                 raise error.ParseError(_("unterminated string"), s)
         # gather up a symbol/keyword
-        elif c.isalnum() or c in '._@' or ord(c) > 127:
+        elif c in syminitletters:
             s = pos
             pos += 1
             while pos < l: # find end of symbol
                 d = program[pos]
-                if not (d.isalnum() or d in "-._/@" or ord(d) > 127):
+                if d not in symletters:
                     break
                 if d == '.' and program[pos - 1] == '.': # special case for ..
                     pos -= 1
@@ -211,6 +239,14 @@
         pos += 1
     yield ('end', None, pos)
 
+def parseerrordetail(inst):
+    """Compose error message from specified ParseError object
+    """
+    if len(inst.args) > 1:
+        return _('at %s: %s') % (inst.args[1], inst.args[0])
+    else:
+        return inst.args[0]
+
 # helpers
 
 def getstring(x, err):
@@ -231,6 +267,40 @@
         raise error.ParseError(err)
     return l
 
+def isvalidsymbol(tree):
+    """Examine whether specified ``tree`` is valid ``symbol`` or not
+    """
+    return tree[0] == 'symbol' and len(tree) > 1
+
+def getsymbol(tree):
+    """Get symbol name from valid ``symbol`` in ``tree``
+
+    This assumes that ``tree`` is already examined by ``isvalidsymbol``.
+    """
+    return tree[1]
+
+def isvalidfunc(tree):
+    """Examine whether specified ``tree`` is valid ``func`` or not
+    """
+    return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1])
+
+def getfuncname(tree):
+    """Get function name from valid ``func`` in ``tree``
+
+    This assumes that ``tree`` is already examined by ``isvalidfunc``.
+    """
+    return getsymbol(tree[1])
+
+def getfuncargs(tree):
+    """Get list of function arguments from valid ``func`` in ``tree``
+
+    This assumes that ``tree`` is already examined by ``isvalidfunc``.
+    """
+    if len(tree) > 2:
+        return getlist(tree[2])
+    else:
+        return []
+
 def getset(repo, subset, x):
     if not x:
         raise error.ParseError(_("missing argument"))
@@ -265,9 +335,8 @@
     return stringset(repo, subset, x)
 
 def rangeset(repo, subset, x, y):
-    cl = baseset(repo.changelog)
-    m = getset(repo, cl, x)
-    n = getset(repo, cl, y)
+    m = getset(repo, fullreposet(repo), x)
+    n = getset(repo, fullreposet(repo), y)
 
     if not m or not n:
         return baseset()
@@ -371,7 +440,7 @@
         raise error.ParseError(_("~ expects a number"))
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, baseset(cl), x):
+    for r in getset(repo, fullreposet(repo), x):
         for i in range(n):
             r = cl.parentrevs(r)[0]
         ps.add(r)
@@ -386,30 +455,6 @@
     kind, pattern, matcher = _substringmatcher(n)
     return subset.filter(lambda x: matcher(encoding.lower(repo[x].user())))
 
-def only(repo, subset, x):
-    """``only(set, [set])``
-    Changesets that are ancestors of the first set that are not ancestors
-    of any other head in the repo. If a second set is specified, the result
-    is ancestors of the first set that are not ancestors of the second set
-    (i.e. ::<set1> - ::<set2>).
-    """
-    cl = repo.changelog
-    # i18n: "only" is a keyword
-    args = getargs(x, 1, 2, _('only takes one or two arguments'))
-    include = getset(repo, spanset(repo), args[0])
-    if len(args) == 1:
-        if not include:
-            return baseset()
-
-        descendants = set(_revdescendants(repo, include, False))
-        exclude = [rev for rev in cl.headrevs()
-            if not rev in descendants and not rev in include]
-    else:
-        exclude = getset(repo, spanset(repo), args[1])
-
-    results = set(ancestormod.missingancestors(include, exclude, cl.parentrevs))
-    return subset & results
-
 def bisect(repo, subset, x):
     """``bisect(string)``
     Changesets marked in the specified bisect status:
@@ -478,6 +523,11 @@
     a regular expression. To match a branch that actually starts with `re:`,
     use the prefix `literal:`.
     """
+    import branchmap
+    urepo = repo.unfiltered()
+    ucl = urepo.changelog
+    getbi = branchmap.revbranchcache(urepo).branchinfo
+
     try:
         b = getstring(x, '')
     except error.ParseError:
@@ -489,16 +539,16 @@
             # note: falls through to the revspec case if no branch with
             # this name exists
             if pattern in repo.branchmap():
-                return subset.filter(lambda r: matcher(repo[r].branch()))
+                return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
         else:
-            return subset.filter(lambda r: matcher(repo[r].branch()))
+            return subset.filter(lambda r: matcher(getbi(ucl, r)[0]))
 
     s = getset(repo, spanset(repo), x)
     b = set()
     for r in s:
-        b.add(repo[r].branch())
+        b.add(getbi(ucl, r)[0])
     c = s.__contains__
-    return subset.filter(lambda r: c(r) or repo[r].branch() in b)
+    return subset.filter(lambda r: c(r) or getbi(ucl, r)[0] in b)
 
 def bumped(repo, subset, x):
     """``bumped()``
@@ -573,7 +623,7 @@
     """``children(set)``
     Child changesets of changesets in set.
     """
-    s = getset(repo, baseset(repo), x)
+    s = getset(repo, fullreposet(repo), x)
     cs = _children(repo, subset, s)
     return subset & cs
 
@@ -797,24 +847,106 @@
     The pattern without explicit kind like ``glob:`` is expected to be
     relative to the current directory and match against a file exactly
     for efficiency.
+
+    If some linkrev points to revisions filtered by the current repoview, we'll
+    work around it to return a non-filtered value.
     """
 
     # i18n: "filelog" is a keyword
     pat = getstring(x, _("filelog requires a pattern"))
     s = set()
+    cl = repo.changelog
 
     if not matchmod.patkind(pat):
         f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
-        fl = repo.file(f)
-        for fr in fl:
-            s.add(fl.linkrev(fr))
+        files = [f]
     else:
         m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
-        for f in repo[None]:
-            if m(f):
-                fl = repo.file(f)
-                for fr in fl:
-                    s.add(fl.linkrev(fr))
+        files = (f for f in repo[None] if m(f))
+
+    for f in files:
+        backrevref = {}  # final value for: filerev -> changerev
+        lowestchild = {} # lowest known filerev child of a filerev
+        delayed = []     # filerev with filtered linkrev, for post-processing
+        lowesthead = None # cache for manifest content of all head revisions
+        fl = repo.file(f)
+        for fr in list(fl):
+            rev = fl.linkrev(fr)
+            if rev not in cl:
+                # changerev pointed in linkrev is filtered
+                # record it for post processing.
+                delayed.append((fr, rev))
+                continue
+            for p in fl.parentrevs(fr):
+                if 0 <= p and p not in lowestchild:
+                    lowestchild[p] = fr
+            backrevref[fr] = rev
+            s.add(rev)
+
+        # Post-processing of all filerevs we skipped because they were
+        # filtered. If such filerevs have known and unfiltered children, this
+        # means they have an unfiltered appearance out there. We'll use linkrev
+        # adjustment to find one of these appearances. The lowest known child
+        # will be used as a starting point because it is the best upper-bound we
+        # have.
+        #
+        # This approach will fail when an unfiltered but linkrev-shadowed
+        # appearance exists in a head changeset without unfiltered filerev
+        # children anywhere.
+        while delayed:
+            # must be a descending iteration. To slowly fill lowest child
+            # information that is of potential use by the next item.
+            fr, rev = delayed.pop()
+            lkr = rev
+
+            child = lowestchild.get(fr)
+
+            if child is None:
+                # search for existence of this file revision in a head revision.
+                # There are three possibilities:
+                # - the revision exists in a head and we can find an
+                #   introduction from there,
+                # - the revision does not exist in a head because it has been
+                #   changed since its introduction: we would have found a child
+                #   and be in the other 'else' clause,
+                # - all versions of the revision are hidden.
+                if lowesthead is None:
+                    lowesthead = {}
+                    for h in repo.heads():
+                        fnode = repo[h].manifest().get(f)
+                        if fnode is not None:
+                            lowesthead[fl.rev(fnode)] = h
+                headrev = lowesthead.get(fr)
+                if headrev is None:
+                    # content is nowhere unfiltered
+                    continue
+                rev = repo[headrev][f].introrev()
+            else:
+                # the lowest known child is a good upper bound
+                childcrev = backrevref[child]
+                # XXX this does not guarantee returning the lowest
+                # introduction of this revision, but this gives a
+                # result which is a good start and will fit in most
+                # cases. We probably need to fix the multiple
+                # introductions case properly (report each
+                # introduction, even for identical file revisions)
+                # once and for all at some point anyway.
+                for p in repo[childcrev][f].parents():
+                    if p.filerev() == fr:
+                        rev = p.rev()
+                        break
+                if rev == lkr:  # no shadowed entry found
+                    # XXX This should never happen unless some manifest points
+                    # to biggish file revisions (like a revision that uses a
+                    # parent that never appears in the manifest ancestors)
+                    continue
+
+            # Fill the data for the next iteration.
+            for p in fl.parentrevs(fr):
+                if 0 <= p and p not in lowestchild:
+                    lowestchild[p] = fr
+            backrevref[fr] = rev
+            s.add(rev)
 
     return subset & s
 
@@ -833,7 +965,7 @@
             cx = c[x]
             s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
             # include the revision responsible for the most recent version
-            s.add(cx.linkrev())
+            s.add(cx.introrev())
         else:
             return baseset()
     else:
@@ -1111,6 +1243,42 @@
     pat = getstring(x, _("modifies requires a pattern"))
     return checkstatus(repo, subset, pat, 0)
 
+def named(repo, subset, x):
+    """``named(namespace)``
+    The changesets in a given namespace.
+
+    If `namespace` starts with `re:`, the remainder of the string is treated as
+    a regular expression. To match a namespace that actually starts with `re:`,
+    use the prefix `literal:`.
+    """
+    # i18n: "named" is a keyword
+    args = getargs(x, 1, 1, _('named requires a namespace argument'))
+
+    ns = getstring(args[0],
+                   # i18n: "named" is a keyword
+                   _('the argument to named must be a string'))
+    kind, pattern, matcher = _stringmatcher(ns)
+    namespaces = set()
+    if kind == 'literal':
+        if pattern not in repo.names:
+            raise util.Abort(_("namespace '%s' does not exist") % ns)
+        namespaces.add(repo.names[pattern])
+    else:
+        for name, ns in repo.names.iteritems():
+            if matcher(name):
+                namespaces.add(ns)
+        if not namespaces:
+            raise util.Abort(_("no namespace exists that match '%s'")
+                             % pattern)
+
+    names = set()
+    for ns in namespaces:
+        for name in ns.listnames(repo):
+            names.update(ns.nodes(repo, name))
+
+    names -= set([node.nullrev])
+    return subset & names
+
 def node_(repo, subset, x):
     """``id(string)``
     Revision non-ambiguously specified by the given hex string prefix.
@@ -1140,6 +1308,30 @@
     obsoletes = obsmod.getrevs(repo, 'obsolete')
     return subset & obsoletes
 
+def only(repo, subset, x):
+    """``only(set, [set])``
+    Changesets that are ancestors of the first set that are not ancestors
+    of any other head in the repo. If a second set is specified, the result
+    is ancestors of the first set that are not ancestors of the second set
+    (i.e. ::<set1> - ::<set2>).
+    """
+    cl = repo.changelog
+    # i18n: "only" is a keyword
+    args = getargs(x, 1, 2, _('only takes one or two arguments'))
+    include = getset(repo, spanset(repo), args[0])
+    if len(args) == 1:
+        if not include:
+            return baseset()
+
+        descendants = set(_revdescendants(repo, include, False))
+        exclude = [rev for rev in cl.headrevs()
+            if not rev in descendants and not rev in include]
+    else:
+        exclude = getset(repo, spanset(repo), args[1])
+
+    results = set(cl.findmissingrevs(common=exclude, heads=include))
+    return subset & results
+
 def origin(repo, subset, x):
     """``origin([set])``
     Changesets that were specified as a source for the grafts, transplants or
@@ -1258,7 +1450,7 @@
         raise error.ParseError(_("^ expects a number 0, 1, or 2"))
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, baseset(cl), x):
+    for r in getset(repo, fullreposet(repo), x):
         if n == 0:
             ps.add(r)
         elif n == 1:
@@ -1384,7 +1576,7 @@
     # i18n: "matching" is a keyword
     l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
 
-    revs = getset(repo, baseset(repo.changelog), l[0])
+    revs = getset(repo, fullreposet(repo), l[0])
 
     fieldlist = ['metadata']
     if len(l) > 1:
@@ -1689,7 +1881,6 @@
     "ancestors": ancestors,
     "_firstancestors": _firstancestors,
     "author": author,
-    "only": only,
     "bisect": bisect,
     "bisected": bisected,
     "bookmark": bookmark,
@@ -1728,7 +1919,9 @@
     "merge": merge,
     "min": minrev,
     "modifies": modifies,
+    "named": named,
     "obsolete": obsolete,
+    "only": only,
     "origin": origin,
     "outgoing": outgoing,
     "p1": p1,
@@ -1800,6 +1993,7 @@
     "min",
     "modifies",
     "obsolete",
+    "only",
     "origin",
     "outgoing",
     "p1",
@@ -1837,6 +2031,8 @@
     "ancestor": ancestorspec,
     "parent": parentspec,
     "parentpost": p1,
+    "only": only,
+    "onlypost": only,
 }
 
 def optimize(x, small):
@@ -1850,6 +2046,9 @@
     op = x[0]
     if op == 'minus':
         return optimize(('and', x[1], ('not', x[2])), small)
+    elif op == 'only':
+        return optimize(('func', ('symbol', 'only'),
+                         ('list', x[1], x[2])), small)
     elif op == 'dagrangepre':
         return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
     elif op == 'dagrangepost':
@@ -1953,9 +2152,96 @@
         for t in tree:
             _checkaliasarg(t, known)
 
+# the set of valid characters for the initial letter of symbols in
+# alias declarations and definitions
+_aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)]
+                           if c.isalnum() or c in '._@$' or ord(c) > 127)
+
+def _tokenizealias(program, lookup=None):
+    """Parse alias declaration/definition into a stream of tokens
+
+    This allows symbol names to use also ``$`` as an initial letter
+    (for backward compatibility), and callers of this function should
+    examine whether ``$`` is used also for unexpected symbols or not.
+    """
+    return tokenize(program, lookup=lookup,
+                    syminitletters=_aliassyminitletters)
+
+def _parsealiasdecl(decl):
+    """Parse alias declaration ``decl``
+
+    This returns ``(name, tree, args, errorstr)`` tuple:
+
+    - ``name``: of declared alias (may be ``decl`` itself at error)
+    - ``tree``: parse result (or ``None`` at error)
+    - ``args``: list of alias argument names (or None for symbol declaration)
+    - ``errorstr``: detail about detected error (or None)
+
+    >>> _parsealiasdecl('foo')
+    ('foo', ('symbol', 'foo'), None, None)
+    >>> _parsealiasdecl('$foo')
+    ('$foo', None, None, "'$' not for alias arguments")
+    >>> _parsealiasdecl('foo::bar')
+    ('foo::bar', None, None, 'invalid format')
+    >>> _parsealiasdecl('foo bar')
+    ('foo bar', None, None, 'at 4: invalid token')
+    >>> _parsealiasdecl('foo()')
+    ('foo', ('func', ('symbol', 'foo')), [], None)
+    >>> _parsealiasdecl('$foo()')
+    ('$foo()', None, None, "'$' not for alias arguments")
+    >>> _parsealiasdecl('foo($1, $2)')
+    ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None)
+    >>> _parsealiasdecl('foo(bar_bar, baz.baz)')
+    ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None)
+    >>> _parsealiasdecl('foo($1, $2, nested($1, $2))')
+    ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list')
+    >>> _parsealiasdecl('foo(bar($1, $2))')
+    ('foo(bar($1, $2))', None, None, 'invalid argument list')
+    >>> _parsealiasdecl('foo("string")')
+    ('foo("string")', None, None, 'invalid argument list')
+    >>> _parsealiasdecl('foo($1, $2')
+    ('foo($1, $2', None, None, 'at 10: unexpected token: end')
+    >>> _parsealiasdecl('foo("string')
+    ('foo("string', None, None, 'at 5: unterminated string')
+    >>> _parsealiasdecl('foo($1, $2, $1)')
+    ('foo', None, None, 'argument names collide with each other')
+    """
+    p = parser.parser(_tokenizealias, elements)
+    try:
+        tree, pos = p.parse(decl)
+        if (pos != len(decl)):
+            raise error.ParseError(_('invalid token'), pos)
+
+        if isvalidsymbol(tree):
+            # "name = ...." style
+            name = getsymbol(tree)
+            if name.startswith('$'):
+                return (decl, None, None, _("'$' not for alias arguments"))
+            return (name, ('symbol', name), None, None)
+
+        if isvalidfunc(tree):
+            # "name(arg, ....) = ...." style
+            name = getfuncname(tree)
+            if name.startswith('$'):
+                return (decl, None, None, _("'$' not for alias arguments"))
+            args = []
+            for arg in getfuncargs(tree):
+                if not isvalidsymbol(arg):
+                    return (decl, None, None, _("invalid argument list"))
+                args.append(getsymbol(arg))
+            if len(args) != len(set(args)):
+                return (name, None, None,
+                        _("argument names collide with each other"))
+            return (name, ('func', ('symbol', name)), args, None)
+
+        return (decl, None, None, _("invalid format"))
+    except error.ParseError, inst:
+        return (decl, None, None, parseerrordetail(inst))
+
 class revsetalias(object):
-    funcre = re.compile('^([^(]+)\(([^)]+)\)$')
-    args = None
+    # whether own `error` information is already shown or not.
+    # this avoids showing same warning multiple times at each `findaliases`.
+    warned = False
 
     def __init__(self, name, value):
         '''Aliases like:
@@ -1963,24 +2249,27 @@
         h = heads(default)
         b($1) = ancestors($1) - ancestors(default)
         '''
-        m = self.funcre.search(name)
-        if m:
-            self.name = m.group(1)
-            self.tree = ('func', ('symbol', m.group(1)))
-            self.args = [x.strip() for x in m.group(2).split(',')]
+        self.name, self.tree, self.args, self.error = _parsealiasdecl(name)
+        if self.error:
+            self.error = _('failed to parse the declaration of revset alias'
+                           ' "%s": %s') % (self.name, self.error)
+            return
+
+        if self.args:
             for arg in self.args:
                 # _aliasarg() is an unknown symbol only used separate
                 # alias argument placeholders from regular strings.
                 value = value.replace(arg, '_aliasarg(%r)' % (arg,))
-        else:
-            self.name = name
-            self.tree = ('symbol', name)
-
-        self.replacement, pos = parse(value)
-        if pos != len(value):
-            raise error.ParseError(_('invalid token'), pos)
-        # Check for placeholder injection
-        _checkaliasarg(self.replacement, self.args)
+
+        try:
+            self.replacement, pos = parse(value)
+            if pos != len(value):
+                raise error.ParseError(_('invalid token'), pos)
+            # Check for placeholder injection
+            _checkaliasarg(self.replacement, self.args)
+        except error.ParseError, inst:
+            self.error = _('failed to parse the definition of revset alias'
+                           ' "%s": %s') % (self.name, parseerrordetail(inst))
 
 def _getalias(aliases, tree):
     """If tree looks like an unexpanded alias, return it. Return None
@@ -2022,6 +2311,8 @@
         return tree
     alias = _getalias(aliases, tree)
     if alias is not None:
+        if alias.error:
+            raise util.Abort(alias.error)
         if alias in expanding:
             raise error.ParseError(_('infinite expansion of revset alias "%s" '
                                      'detected') % alias.name)
@@ -2043,13 +2334,41 @@
                        for t in tree)
     return result
 
-def findaliases(ui, tree):
+def findaliases(ui, tree, showwarning=None):
     _checkaliasarg(tree)
     aliases = {}
     for k, v in ui.configitems('revsetalias'):
         alias = revsetalias(k, v)
         aliases[alias.name] = alias
-    return _expandaliases(aliases, tree, [], {})
+    tree = _expandaliases(aliases, tree, [], {})
+    if showwarning:
+        # warn about problematic (but not referred) aliases
+        for name, alias in sorted(aliases.iteritems()):
+            if alias.error and not alias.warned:
+                showwarning(_('warning: %s\n') % (alias.error))
+                alias.warned = True
+    return tree
+
+def foldconcat(tree):
+    """Fold elements to be concatenated by `##`
+    """
+    if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
+        return tree
+    if tree[0] == '_concat':
+        pending = [tree]
+        l = []
+        while pending:
+            e = pending.pop()
+            if e[0] == '_concat':
+                pending.extend(reversed(e[1:]))
+            elif e[0] in ('string', 'symbol'):
+                l.append(e[1])
+            else:
+                msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
+                raise error.ParseError(msg)
+        return ('string', ''.join(l))
+    else:
+        return tuple(foldconcat(t) for t in tree)
 
 def parse(spec, lookup=None):
     p = parser.parser(tokenize, elements)
@@ -2065,7 +2384,8 @@
     if (pos != len(spec)):
         raise error.ParseError(_("invalid token"), pos)
     if ui:
-        tree = findaliases(ui, tree)
+        tree = findaliases(ui, tree, showwarning=ui.warn)
+    tree = foldconcat(tree)
     weight, tree = optimize(tree, True)
     def mfunc(repo, subset):
         if util.safehasattr(subset, 'isascending'):
@@ -2551,7 +2871,7 @@
         return it()
 
     def _trysetasclist(self):
-        """populate the _asclist attribut if possible and necessary"""
+        """populate the _asclist attribute if possible and necessary"""
         if self._genlist is not None and self._asclist is None:
             self._asclist = sorted(self._genlist)
 
@@ -2744,7 +3064,7 @@
 
         # We have to use this complex iteration strategy to allow multiple
         # iterations at the same time. We need to be able to catch revision
-        # removed from `consumegen` and added to genlist in another instance.
+        # removed from _consumegen and added to genlist in another instance.
         #
         # Getting rid of it would provide an about 15% speed up on this
         # iteration.
@@ -2939,17 +3259,15 @@
 class fullreposet(_spanset):
     """a set containing all revisions in the repo
 
-    This class exists to host special optimisation.
+    This class exists to host special optimization.
     """
 
     def __init__(self, repo):
         super(fullreposet, self).__init__(repo)
 
     def __and__(self, other):
-        """fullrepo & other -> other
-
-        As self contains the whole repo, all of the other set should also be in
-        self. Therefor `self & other = other`.
+        """As self contains the whole repo, all of the other set should also be
+        in self. Therefore `self & other = other`.
 
         This boldly assumes the other contains valid revs only.
         """
@@ -2962,10 +3280,7 @@
             # object.
             other = baseset(other - self._hiddenrevs)
 
-        if self.isascending():
-            other.sort()
-        else:
-            other.sort(reverse)
+        other.sort(reverse=self.isdescending())
         return other
 
 # tell hggettext to extract docstrings from these functions:
--- a/mercurial/scmutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/scmutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -188,9 +188,25 @@
                 raise
         return ""
 
-    def open(self, path, mode="r", text=False, atomictemp=False):
+    def tryreadlines(self, path, mode='rb'):
+        '''gracefully return an empty array for missing files'''
+        try:
+            return self.readlines(path, mode=mode)
+        except IOError, inst:
+            if inst.errno != errno.ENOENT:
+                raise
+        return []
+
+    def open(self, path, mode="r", text=False, atomictemp=False,
+             notindexed=False):
+        '''Open ``path`` file, which is relative to vfs root.
+
+        Newly created directories are marked as "not to be indexed by
+        the content indexing service", if ``notindexed`` is specified
+        for "write" mode access.
+        '''
         self.open = self.__call__
-        return self.__call__(path, mode, text, atomictemp)
+        return self.__call__(path, mode, text, atomictemp, notindexed)
 
     def read(self, path):
         fp = self(path, 'rb')
@@ -199,6 +215,13 @@
         finally:
             fp.close()
 
+    def readlines(self, path, mode='rb'):
+        fp = self(path, mode=mode)
+        try:
+            return fp.readlines()
+        finally:
+            fp.close()
+
     def write(self, path, data):
         fp = self(path, 'wb')
         try:
@@ -206,6 +229,13 @@
         finally:
             fp.close()
 
+    def writelines(self, path, data, mode='wb', notindexed=False):
+        fp = self(path, mode=mode, notindexed=notindexed)
+        try:
+            return fp.writelines(data)
+        finally:
+            fp.close()
+
     def append(self, path, data):
         fp = self(path, 'ab')
         try:
@@ -231,6 +261,19 @@
     def islink(self, path=None):
         return os.path.islink(self.join(path))
 
+    def reljoin(self, *paths):
+        """join various elements of a path together (as os.path.join would do)
+
+        The vfs base is not injected so that path stay relative. This exists
+        to allow handling of strange encoding if needed."""
+        return os.path.join(*paths)
+
+    def split(self, path):
+        """split top-most element of a path (as os.path.split would do)
+
+        This exists to allow handling of strange encoding if needed."""
+        return os.path.split(path)
+
     def lexists(self, path=None):
         return os.path.lexists(self.join(path))
 
@@ -329,7 +372,14 @@
             return
         os.chmod(name, self.createmode & 0666)
 
-    def __call__(self, path, mode="r", text=False, atomictemp=False):
+    def __call__(self, path, mode="r", text=False, atomictemp=False,
+                 notindexed=False):
+        '''Open ``path`` file, which is relative to vfs root.
+
+        Newly created directories are marked as "not to be indexed by
+        the content indexing service", if ``notindexed`` is specified
+        for "write" mode access.
+        '''
         if self._audit:
             r = util.checkosfilename(path)
             if r:
@@ -347,7 +397,7 @@
             # to a directory. Let the posixfile() call below raise IOError.
             if basename:
                 if atomictemp:
-                    util.ensuredirs(dirname, self.createmode)
+                    util.ensuredirs(dirname, self.createmode, notindexed)
                     return util.atomictempfile(f, mode, self.createmode)
                 try:
                     if 'w' in mode:
@@ -365,7 +415,7 @@
                     if e.errno != errno.ENOENT:
                         raise
                     nlink = 0
-                    util.ensuredirs(dirname, self.createmode)
+                    util.ensuredirs(dirname, self.createmode, notindexed)
                 if nlink > 0:
                     if self._trustnlink is None:
                         self._trustnlink = nlink > 1 or util.checknlink(f)
@@ -495,7 +545,13 @@
 
 def osrcpath():
     '''return default os-specific hgrc search path'''
-    path = systemrcpath()
+    path = []
+    defaultpath = os.path.join(util.datapath, 'default.d')
+    if os.path.isdir(defaultpath):
+        for f, kind in osutil.listdir(defaultpath):
+            if f.endswith('.rc'):
+                path.append(os.path.join(defaultpath, f))
+    path.extend(systemrcpath())
     path.extend(userrcpath())
     path = [os.path.normpath(f) for f in path]
     return path
@@ -670,40 +726,68 @@
     '''Return a matcher that will efficiently match exactly these files.'''
     return matchmod.exact(repo.root, repo.getcwd(), files)
 
-def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
+def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
+    m = matcher
     if dry_run is None:
         dry_run = opts.get('dry_run')
     if similarity is None:
         similarity = float(opts.get('similarity') or 0)
-    # we'd use status here, except handling of symlinks and ignore is tricky
-    m = match(repo[None], pats, opts)
+
+    ret = 0
+    join = lambda f: os.path.join(prefix, f)
+
+    def matchessubrepo(matcher, subpath):
+        if matcher.exact(subpath):
+            return True
+        for f in matcher.files():
+            if f.startswith(subpath):
+                return True
+        return False
+
+    wctx = repo[None]
+    for subpath in sorted(wctx.substate):
+        if opts.get('subrepos') or matchessubrepo(m, subpath):
+            sub = wctx.sub(subpath)
+            try:
+                submatch = matchmod.narrowmatcher(subpath, m)
+                if sub.addremove(submatch, prefix, opts, dry_run, similarity):
+                    ret = 1
+            except error.LookupError:
+                repo.ui.status(_("skipping missing subrepository: %s\n")
+                                 % join(subpath))
+
     rejected = []
-    m.bad = lambda x, y: rejected.append(x)
+    origbad = m.bad
+    def badfn(f, msg):
+        if f in m.files():
+            origbad(f, msg)
+        rejected.append(f)
 
-    added, unknown, deleted, removed = _interestingfiles(repo, m)
+    m.bad = badfn
+    added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
+    m.bad = origbad
 
-    unknownset = set(unknown)
+    unknownset = set(unknown + forgotten)
     toprint = unknownset.copy()
     toprint.update(deleted)
     for abs in sorted(toprint):
         if repo.ui.verbose or not m.exact(abs):
-            rel = m.rel(abs)
             if abs in unknownset:
-                status = _('adding %s\n') % ((pats and rel) or abs)
+                status = _('adding %s\n') % m.uipath(abs)
             else:
-                status = _('removing %s\n') % ((pats and rel) or abs)
+                status = _('removing %s\n') % m.uipath(abs)
             repo.ui.status(status)
 
     renames = _findrenames(repo, m, added + unknown, removed + deleted,
                            similarity)
 
     if not dry_run:
-        _markchanges(repo, unknown, deleted, renames)
+        _markchanges(repo, unknown + forgotten, deleted, renames)
 
     for f in rejected:
         if f in m.files():
             return 1
-    return 0
+    return ret
 
 def marktouched(repo, files, similarity=0.0):
     '''Assert that files have somehow been operated upon. files are relative to
@@ -712,10 +796,10 @@
     rejected = []
     m.bad = lambda x, y: rejected.append(x)
 
-    added, unknown, deleted, removed = _interestingfiles(repo, m)
+    added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
 
     if repo.ui.verbose:
-        unknownset = set(unknown)
+        unknownset = set(unknown + forgotten)
         toprint = unknownset.copy()
         toprint.update(deleted)
         for abs in sorted(toprint):
@@ -728,7 +812,7 @@
     renames = _findrenames(repo, m, added + unknown, removed + deleted,
                            similarity)
 
-    _markchanges(repo, unknown, deleted, renames)
+    _markchanges(repo, unknown + forgotten, deleted, renames)
 
     for f in rejected:
         if f in m.files():
@@ -741,7 +825,7 @@
 
     This is different from dirstate.status because it doesn't care about
     whether files are modified or clean.'''
-    added, unknown, deleted, removed = [], [], [], []
+    added, unknown, deleted, removed, forgotten = [], [], [], [], []
     audit_path = pathutil.pathauditor(repo.root)
 
     ctx = repo[None]
@@ -754,13 +838,15 @@
             unknown.append(abs)
         elif dstate != 'r' and not st:
             deleted.append(abs)
+        elif dstate == 'r' and st:
+            forgotten.append(abs)
         # for finding renames
-        elif dstate == 'r':
+        elif dstate == 'r' and not st:
             removed.append(abs)
         elif dstate == 'a':
             added.append(abs)
 
-    return added, unknown, deleted, removed
+    return added, unknown, deleted, removed, forgotten
 
 def _findrenames(repo, matcher, added, removed, similarity):
     '''Find renames from removed files to added ones.'''
--- a/mercurial/setdiscovery.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/setdiscovery.py	Sat Jan 17 18:28:30 2015 -0800
@@ -40,12 +40,25 @@
 classified with it (since all ancestors or descendants will be marked as well).
 """
 
-from node import nullid
+from node import nullid, nullrev
 from i18n import _
 import random
 import util, dagutil
 
-def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
+def _updatesample(dag, nodes, sample, quicksamplesize=0):
+    """update an existing sample to match the expected size
+
+    The sample is updated with nodes exponentially distant from each head of the
+    <nodes> set. (H~1, H~2, H~4, H~8, etc).
+
+    If a target size is specified, the sampling will stop once this size is
+    reached. Otherwise sampling will happen until roots of the <nodes> set are
+    reached.
+
+    :dag: a dag object from dagutil
+    :nodes:  set of nodes we want to discover (if None, assume the whole dag)
+    :sample: a sample to update
+    :quicksamplesize: optional target size of the sample"""
     # if nodes is empty we scan the entire graph
     if nodes:
         heads = dag.headsetofconnecteds(nodes)
@@ -63,53 +76,41 @@
         if d > factor:
             factor *= 2
         if d == factor:
-            if curr not in always: # need this check for the early exit below
-                sample.add(curr)
-                if quicksamplesize and (len(sample) >= quicksamplesize):
-                    return
+            sample.add(curr)
+            if quicksamplesize and (len(sample) >= quicksamplesize):
+                return
         seen.add(curr)
         for p in dag.parents(curr):
             if not nodes or p in nodes:
                 dist.setdefault(p, d + 1)
                 visit.append(p)
 
-def _setupsample(dag, nodes, size):
-    if len(nodes) <= size:
-        return set(nodes), None, 0
-    always = dag.headsetofconnecteds(nodes)
-    desiredlen = size - len(always)
-    if desiredlen <= 0:
-        # This could be bad if there are very many heads, all unknown to the
-        # server. We're counting on long request support here.
-        return always, None, desiredlen
-    return always, set(), desiredlen
+def _takequicksample(dag, nodes, size):
+    """takes a quick sample of size <size>
+
+    It is meant for initial sampling and focuses on querying heads and close
+    ancestors of heads.
 
-def _takequicksample(dag, nodes, size, initial):
-    always, sample, desiredlen = _setupsample(dag, nodes, size)
-    if sample is None:
-        return always
-    if initial:
-        fromset = None
-    else:
-        fromset = nodes
-    _updatesample(dag, fromset, sample, always, quicksamplesize=desiredlen)
-    sample.update(always)
+    :dag: a dag object
+    :nodes: set of nodes to discover
+    :size: the maximum size of the sample"""
+    sample = dag.headsetofconnecteds(nodes)
+    if size <= len(sample):
+        return _limitsample(sample, size)
+    _updatesample(dag, None, sample, quicksamplesize=size)
     return sample
 
 def _takefullsample(dag, nodes, size):
-    always, sample, desiredlen = _setupsample(dag, nodes, size)
-    if sample is None:
-        return always
+    sample = dag.headsetofconnecteds(nodes)
     # update from heads
-    _updatesample(dag, nodes, sample, always)
+    _updatesample(dag, nodes, sample)
     # update from roots
-    _updatesample(dag.inverse(), nodes, sample, always)
+    _updatesample(dag.inverse(), nodes, sample)
     assert sample
-    sample = _limitsample(sample, desiredlen)
-    if len(sample) < desiredlen:
-        more = desiredlen - len(sample)
-        sample.update(random.sample(list(nodes - sample - always), more))
-    sample.update(always)
+    sample = _limitsample(sample, size)
+    if len(sample) < size:
+        more = size - len(sample)
+        sample.update(random.sample(list(nodes - sample), more))
     return sample
 
 def _limitsample(sample, desiredlen):
@@ -174,50 +175,46 @@
 
     # full blown discovery
 
+    # own nodes I know we both know
+    # treat remote heads (and maybe own heads) as a first implicit sample
+    # response
+    common = cl.incrementalmissingrevs(srvheads)
+    commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
+    common.addbases(commoninsample)
     # own nodes where I don't know if remote knows them
-    undecided = dag.nodeset()
-    # own nodes I know we both know
-    common = set()
+    undecided = set(common.missingancestors(ownheads))
     # own nodes I know remote lacks
     missing = set()
 
-    # treat remote heads (and maybe own heads) as a first implicit sample
-    # response
-    common.update(dag.ancestorset(srvheads))
-    undecided.difference_update(common)
-
     full = False
     while undecided:
 
         if sample:
-            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
-            common.update(dag.ancestorset(commoninsample, common))
-
             missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
             missing.update(dag.descendantset(missinginsample, missing))
 
             undecided.difference_update(missing)
-            undecided.difference_update(common)
 
         if not undecided:
             break
 
-        if full:
-            ui.note(_("sampling from both directions\n"))
-            sample = _takefullsample(dag, undecided, size=fullsamplesize)
-            targetsize = fullsamplesize
-        elif common:
-            # use cheapish initial sample
-            ui.debug("taking initial sample\n")
-            sample = _takefullsample(dag, undecided, size=fullsamplesize)
+        if full or common.hasbases():
+            if full:
+                ui.note(_("sampling from both directions\n"))
+            else:
+                ui.debug("taking initial sample\n")
+            samplefunc = _takefullsample
             targetsize = fullsamplesize
         else:
             # use even cheaper initial sample
             ui.debug("taking quick initial sample\n")
-            sample = _takequicksample(dag, undecided, size=initialsamplesize,
-                                      initial=True)
+            samplefunc = _takequicksample
             targetsize = initialsamplesize
-        sample = _limitsample(sample, targetsize)
+        if len(undecided) < targetsize:
+            sample = list(undecided)
+        else:
+            sample = samplefunc(dag, undecided, targetsize)
+            sample = _limitsample(sample, targetsize)
 
         roundtrips += 1
         ui.progress(_('searching'), roundtrips, unit=_('queries'))
@@ -228,7 +225,17 @@
         yesno = remote.known(dag.externalizeall(sample))
         full = True
 
-    result = dag.headsetofconnecteds(common)
+        if sample:
+            commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
+            common.addbases(commoninsample)
+            common.removeancestorsfrom(undecided)
+
+    # heads(common) == heads(common.bases) since common represents common.bases
+    # and all its ancestors
+    result = dag.headsetofconnecteds(common.bases)
+    # common.bases can include nullrev, but our contract requires us to not
+    # return any heads in that case, so discard that
+    result.discard(nullrev)
     ui.progress(_('searching'), None)
     ui.debug("%d total queries\n" % roundtrips)
 
--- a/mercurial/sshpeer.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/sshpeer.py	Sat Jan 17 18:28:30 2015 -0800
@@ -57,7 +57,7 @@
                 util.shellquote("%s init %s" %
                     (_serverquote(remotecmd), _serverquote(self.path))))
             ui.debug('running %s\n' % cmd)
-            res = util.system(cmd, out=ui.fout)
+            res = ui.system(cmd)
             if res != 0:
                 self._abort(error.RepoError(_("could not create remote repo")))
 
--- a/mercurial/sslutil.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/sslutil.py	Sat Jan 17 18:28:30 2015 -0800
@@ -14,26 +14,55 @@
     # avoid using deprecated/broken FakeSocket in python 2.6
     import ssl
     CERT_REQUIRED = ssl.CERT_REQUIRED
-    PROTOCOL_TLSv1 = ssl.PROTOCOL_TLSv1
-    def ssl_wrap_socket(sock, keyfile, certfile, ssl_version=PROTOCOL_TLSv1,
-                cert_reqs=ssl.CERT_NONE, ca_certs=None):
-        sslsocket = ssl.wrap_socket(sock, keyfile, certfile,
-                                    cert_reqs=cert_reqs, ca_certs=ca_certs,
-                                    ssl_version=ssl_version)
-        # check if wrap_socket failed silently because socket had been closed
-        # - see http://bugs.python.org/issue13721
-        if not sslsocket.cipher():
-            raise util.Abort(_('ssl connection failed'))
-        return sslsocket
+    try:
+        ssl_context = ssl.SSLContext
+
+        def ssl_wrap_socket(sock, keyfile, certfile, cert_reqs=ssl.CERT_NONE,
+                            ca_certs=None, serverhostname=None):
+            # Allow any version of SSL starting with TLSv1 and
+            # up. Note that specifying TLSv1 here prohibits use of
+            # newer standards (like TLSv1_2), so this is the right way
+            # to do this. Note that in the future it'd be better to
+            # support using ssl.create_default_context(), which sets
+            # up a bunch of things in smart ways (strong ciphers,
+            # protocol versions, etc) and is upgraded by Python
+            # maintainers for us, but that breaks too many things to
+            # do it in a hurry.
+            sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+            sslcontext.options &= ssl.OP_NO_SSLv2 & ssl.OP_NO_SSLv3
+            if certfile is not None:
+                sslcontext.load_cert_chain(certfile, keyfile)
+            sslcontext.verify_mode = cert_reqs
+            if ca_certs is not None:
+                sslcontext.load_verify_locations(cafile=ca_certs)
+
+            sslsocket = sslcontext.wrap_socket(sock,
+                                               server_hostname=serverhostname)
+            # check if wrap_socket failed silently because socket had been
+            # closed
+            # - see http://bugs.python.org/issue13721
+            if not sslsocket.cipher():
+                raise util.Abort(_('ssl connection failed'))
+            return sslsocket
+    except AttributeError:
+        def ssl_wrap_socket(sock, keyfile, certfile, cert_reqs=ssl.CERT_NONE,
+                            ca_certs=None, serverhostname=None):
+            sslsocket = ssl.wrap_socket(sock, keyfile, certfile,
+                                        cert_reqs=cert_reqs, ca_certs=ca_certs,
+                                        ssl_version=ssl.PROTOCOL_TLSv1)
+            # check if wrap_socket failed silently because socket had been
+            # closed
+            # - see http://bugs.python.org/issue13721
+            if not sslsocket.cipher():
+                raise util.Abort(_('ssl connection failed'))
+            return sslsocket
 except ImportError:
     CERT_REQUIRED = 2
 
-    PROTOCOL_TLSv1 = 3
-
     import socket, httplib
 
-    def ssl_wrap_socket(sock, keyfile, certfile, ssl_version=PROTOCOL_TLSv1,
-                        cert_reqs=CERT_REQUIRED, ca_certs=None):
+    def ssl_wrap_socket(sock, keyfile, certfile, cert_reqs=CERT_REQUIRED,
+                        ca_certs=None, serverhostname=None):
         if not util.safehasattr(socket, 'ssl'):
             raise util.Abort(_('Python SSL support not found'))
         if ca_certs:
@@ -101,8 +130,7 @@
             exe.startswith('/system/library/frameworks/python.framework/'))
 
 def sslkwargs(ui, host):
-    kws = {'ssl_version': PROTOCOL_TLSv1,
-           }
+    kws = {}
     hostfingerprint = ui.config('hostfingerprints', host)
     if hostfingerprint:
         return kws
--- a/mercurial/statichttprepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/statichttprepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -8,7 +8,7 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import changelog, byterange, url, error
+import changelog, byterange, url, error, namespaces
 import localrepo, manifest, util, scmutil, store
 import urllib, urllib2, errno, os
 
@@ -70,7 +70,7 @@
         def __init__(self, base):
             self.base = base
 
-        def __call__(self, path, mode="r", atomictemp=None):
+        def __call__(self, path, mode='r', *args, **kw):
             if mode not in ('r', 'rb'):
                 raise IOError('Permission denied')
             f = "/".join((self.base, urllib.quote(path)))
@@ -106,8 +106,10 @@
         self.vfs = self.opener
         self._phasedefaults = []
 
+        self.names = namespaces.namespaces()
+
         try:
-            requirements = scmutil.readrequires(self.opener, self.supported)
+            requirements = scmutil.readrequires(self.vfs, self.supported)
         except IOError, inst:
             if inst.errno != errno.ENOENT:
                 raise
@@ -115,7 +117,7 @@
 
             # check if it is a non-empty old-style repository
             try:
-                fp = self.opener("00changelog.i")
+                fp = self.vfs("00changelog.i")
                 fp.read(1)
                 fp.close()
             except IOError, inst:
@@ -128,14 +130,14 @@
         # setup store
         self.store = store.store(requirements, self.path, opener)
         self.spath = self.store.path
-        self.sopener = self.store.opener
-        self.svfs = self.sopener
+        self.svfs = self.store.opener
+        self.sopener = self.svfs
         self.sjoin = self.store.join
         self._filecache = {}
         self.requirements = requirements
 
-        self.manifest = manifest.manifest(self.sopener)
-        self.changelog = changelog.changelog(self.sopener)
+        self.manifest = manifest.manifest(self.svfs)
+        self.changelog = changelog.changelog(self.svfs)
         self._tags = None
         self.nodetagscache = None
         self._branchcaches = {}
--- a/mercurial/subrepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/subrepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -5,6 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import copy
 import errno, os, re, shutil, posixpath, sys
 import xml.dom.minidom
 import stat, subprocess, tarfile
@@ -32,16 +33,6 @@
     '''get a unique filename for the store hash cache of a remote repository'''
     return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
 
-def _calcfilehash(filename):
-    data = ''
-    if os.path.exists(filename):
-        fd = open(filename, 'rb')
-        try:
-            data = fd.read()
-        finally:
-            fd.close()
-    return util.sha1(data).hexdigest()
-
 class SubrepoAbort(error.Abort):
     """Exception class used to avoid handling a subrepo error more than once"""
     def __init__(self, *args, **kw):
@@ -311,7 +302,7 @@
             return repo.ui.config('paths', 'default-push')
         if repo.ui.config('paths', 'default'):
             return repo.ui.config('paths', 'default')
-        if repo.sharedpath != repo.path:
+        if repo.shared():
             # chop off the .hg component to get the default path form
             return os.path.dirname(repo.sharedpath)
     if abort:
@@ -382,6 +373,9 @@
 
 class abstractsubrepo(object):
 
+    def __init__(self, ui):
+        self.ui = ui
+
     def storeclean(self, path):
         """
         returns true if the repository has not changed since it was last
@@ -444,7 +438,11 @@
     def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
         return []
 
-    def cat(self, ui, match, prefix, **opts):
+    def addremove(self, matcher, prefix, opts, dry_run, similarity):
+        self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
+        return 1
+
+    def cat(self, match, prefix, **opts):
         return 1
 
     def status(self, rev2, **opts):
@@ -471,24 +469,24 @@
         """return file flags"""
         return ''
 
-    def archive(self, ui, archiver, prefix, match=None):
+    def archive(self, archiver, prefix, match=None):
         if match is not None:
             files = [f for f in self.files() if match(f)]
         else:
             files = self.files()
         total = len(files)
         relpath = subrelpath(self)
-        ui.progress(_('archiving (%s)') % relpath, 0,
-                    unit=_('files'), total=total)
+        self.ui.progress(_('archiving (%s)') % relpath, 0,
+                         unit=_('files'), total=total)
         for i, name in enumerate(files):
             flags = self.fileflags(name)
             mode = 'x' in flags and 0755 or 0644
             symlink = 'l' in flags
             archiver.addfile(os.path.join(prefix, self._path, name),
                              mode, symlink, self.filedata(name))
-            ui.progress(_('archiving (%s)') % relpath, i + 1,
-                        unit=_('files'), total=total)
-        ui.progress(_('archiving (%s)') % relpath, None)
+            self.ui.progress(_('archiving (%s)') % relpath, i + 1,
+                             unit=_('files'), total=total)
+        self.ui.progress(_('archiving (%s)') % relpath, None)
         return total
 
     def walk(self, match):
@@ -498,11 +496,18 @@
         '''
         pass
 
-    def forget(self, ui, match, prefix):
+    def forget(self, match, prefix):
         return ([], [])
 
-    def revert(self, ui, substate, *pats, **opts):
-        ui.warn('%s: reverting %s subrepos is unsupported\n' \
+    def removefiles(self, matcher, prefix, after, force, subrepos):
+        """remove the matched files from the subrepository and the filesystem,
+        possibly by force and/or after the file has been removed from the
+        filesystem.  Return 0 on success, 1 on any warning.
+        """
+        return 1
+
+    def revert(self, substate, *pats, **opts):
+        self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
             % (substate[0], substate[2]))
         return []
 
@@ -511,20 +516,19 @@
 
 class hgsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
+        super(hgsubrepo, self).__init__(ctx._repo.ui)
         self._path = path
         self._state = state
         r = ctx._repo
         root = r.wjoin(path)
-        create = False
-        if not os.path.exists(os.path.join(root, '.hg')):
-            create = True
-            util.makedirs(root)
+        create = not r.wvfs.exists('%s/.hg' % path)
         self._repo = hg.repository(r.baseui, root, create=create)
+        self.ui = self._repo.ui
         for s, k in [('ui', 'commitsubrepos')]:
             v = r.ui.config(s, k)
             if v:
-                self._repo.ui.setconfig(s, k, v, 'subrepo')
-        self._repo.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
+                self.ui.setconfig(s, k, v, 'subrepo')
+        self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
         self._initrepo(r, state[0], create)
 
     def storeclean(self, path):
@@ -562,26 +566,19 @@
         # sort the files that will be hashed in increasing (likely) file size
         filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
         yield '# %s\n' % _expandedabspath(remotepath)
+        vfs = self._repo.vfs
         for relname in filelist:
-            absname = os.path.normpath(self._repo.join(relname))
-            yield '%s = %s\n' % (relname, _calcfilehash(absname))
+            filehash = util.sha1(vfs.tryread(relname)).hexdigest()
+            yield '%s = %s\n' % (relname, filehash)
 
-    def _getstorehashcachepath(self, remotepath):
-        '''get a unique path for the store hash cache'''
-        return self._repo.join(os.path.join(
-            'cache', 'storehash', _getstorehashcachename(remotepath)))
+    @propertycache
+    def _cachestorehashvfs(self):
+        return scmutil.vfs(self._repo.join('cache/storehash'))
 
     def _readstorehashcache(self, remotepath):
         '''read the store hash cache for a given remote repository'''
-        cachefile = self._getstorehashcachepath(remotepath)
-        if not os.path.exists(cachefile):
-            return ''
-        fd = open(cachefile, 'r')
-        try:
-            pullstate = fd.readlines()
-        finally:
-            fd.close()
-        return pullstate
+        cachefile = _getstorehashcachename(remotepath)
+        return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
 
     def _cachestorehash(self, remotepath):
         '''cache the current store hash
@@ -589,18 +586,12 @@
         Each remote repo requires its own store hash cache, because a subrepo
         store may be "clean" versus a given remote repo, but not versus another
         '''
-        cachefile = self._getstorehashcachepath(remotepath)
+        cachefile = _getstorehashcachename(remotepath)
         lock = self._repo.lock()
         try:
             storehash = list(self._calcstorehash(remotepath))
-            cachedir = os.path.dirname(cachefile)
-            if not os.path.exists(cachedir):
-                util.makedirs(cachedir, notindexed=True)
-            fd = open(cachefile, 'w')
-            try:
-                fd.writelines(storehash)
-            finally:
-                fd.close()
+            vfs = self._cachestorehashvfs
+            vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
         finally:
             lock.release()
 
@@ -615,7 +606,7 @@
             def addpathconfig(key, value):
                 if value:
                     lines.append('%s = %s\n' % (key, value))
-                    self._repo.ui.setconfig('paths', key, value, 'subrepo')
+                    self.ui.setconfig('paths', key, value, 'subrepo')
 
             defpath = _abssource(self._repo, abort=False)
             defpushpath = _abssource(self._repo, True, abort=False)
@@ -623,22 +614,33 @@
             if defpath != defpushpath:
                 addpathconfig('default-push', defpushpath)
 
-            fp = self._repo.opener("hgrc", "w", text=True)
+            fp = self._repo.vfs("hgrc", "w", text=True)
             try:
                 fp.write(''.join(lines))
             finally:
                 fp.close()
 
     @annotatesubrepoerror
-    def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
-        return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
-                           os.path.join(prefix, self._path), explicitonly)
+    def add(self, ui, match, prefix, explicitonly, **opts):
+        return cmdutil.add(ui, self._repo, match,
+                           os.path.join(prefix, self._path), explicitonly,
+                           **opts)
+
+    def addremove(self, m, prefix, opts, dry_run, similarity):
+        # In the same way as sub directories are processed, once in a subrepo,
+        # always entry any of its subrepos.  Don't corrupt the options that will
+        # be used to process sibling subrepos however.
+        opts = copy.copy(opts)
+        opts['subrepos'] = True
+        return scmutil.addremove(self._repo, m,
+                                 os.path.join(prefix, self._path), opts,
+                                 dry_run, similarity)
 
     @annotatesubrepoerror
-    def cat(self, ui, match, prefix, **opts):
+    def cat(self, match, prefix, **opts):
         rev = self._state[1]
         ctx = self._repo[rev]
-        return cmdutil.cat(ui, self._repo, ctx, match, prefix, **opts)
+        return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
 
     @annotatesubrepoerror
     def status(self, rev2, **opts):
@@ -648,8 +650,8 @@
             ctx2 = self._repo[rev2]
             return self._repo.status(ctx1, ctx2, **opts)
         except error.RepoLookupError, inst:
-            self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
-                               % (inst, subrelpath(self)))
+            self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
+                         % (inst, subrelpath(self)))
             return scmutil.status([], [], [], [], [], [], [])
 
     @annotatesubrepoerror
@@ -665,20 +667,20 @@
                                    prefix=posixpath.join(prefix, self._path),
                                    listsubrepos=True, **opts)
         except error.RepoLookupError, inst:
-            self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
-                               % (inst, subrelpath(self)))
+            self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
+                          % (inst, subrelpath(self)))
 
     @annotatesubrepoerror
-    def archive(self, ui, archiver, prefix, match=None):
+    def archive(self, archiver, prefix, match=None):
         self._get(self._state + ('hg',))
-        total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
+        total = abstractsubrepo.archive(self, archiver, prefix, match)
         rev = self._state[1]
         ctx = self._repo[rev]
         for subpath in ctx.substate:
             s = subrepo(ctx, subpath)
             submatch = matchmod.narrowmatcher(subpath, match)
             total += s.archive(
-                ui, archiver, os.path.join(prefix, self._path), submatch)
+                archiver, os.path.join(prefix, self._path), submatch)
         return total
 
     @annotatesubrepoerror
@@ -704,7 +706,7 @@
         # updated
         if not self.dirty(True):
             return self._repo['.'].hex()
-        self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
+        self.ui.debug("committing subrepo %s\n" % subrelpath(self))
         n = self._repo.commit(text, user, date)
         if not n:
             return self._repo['.'].hex() # different version checked out
@@ -718,7 +720,7 @@
     def remove(self):
         # we can't fully delete the repository as it may contain
         # local-only history
-        self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
+        self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
         hg.clean(self._repo, node.nullid, False)
 
     def _get(self, state):
@@ -729,8 +731,8 @@
         srcurl = _abssource(self._repo)
         other = hg.peer(self._repo, {}, srcurl)
         if len(self._repo) == 0:
-            self._repo.ui.status(_('cloning subrepo %s from %s\n')
-                                 % (subrelpath(self), srcurl))
+            self.ui.status(_('cloning subrepo %s from %s\n')
+                           % (subrelpath(self), srcurl))
             parentrepo = self._repo._subparent
             shutil.rmtree(self._repo.path)
             other, cloned = hg.clone(self._repo._subparent.baseui, {},
@@ -740,8 +742,8 @@
             self._initrepo(parentrepo, source, create=True)
             self._cachestorehash(srcurl)
         else:
-            self._repo.ui.status(_('pulling subrepo %s from %s\n')
-                                 % (subrelpath(self), srcurl))
+            self.ui.status(_('pulling subrepo %s from %s\n')
+                           % (subrelpath(self), srcurl))
             cleansub = self.storeclean(srcurl)
             exchange.pull(self._repo, other)
             if cleansub:
@@ -774,18 +776,18 @@
 
         def mergefunc():
             if anc == cur and dst.branch() == cur.branch():
-                self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
+                self.ui.debug("updating subrepo %s\n" % subrelpath(self))
                 hg.update(self._repo, state[1])
             elif anc == dst:
-                self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
+                self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
             else:
-                self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
+                self.ui.debug("merging subrepo %s\n" % subrelpath(self))
                 hg.merge(self._repo, state[1], remind=False)
 
         wctx = self._repo[None]
         if self.dirty():
             if anc != dst:
-                if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
+                if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
                     mergefunc()
             else:
                 mergefunc()
@@ -808,11 +810,11 @@
         dsturl = _abssource(self._repo, True)
         if not force:
             if self.storeclean(dsturl):
-                self._repo.ui.status(
+                self.ui.status(
                     _('no changes made to subrepo %s since last push to %s\n')
                     % (subrelpath(self), dsturl))
                 return None
-        self._repo.ui.status(_('pushing subrepo %s to %s\n') %
+        self.ui.status(_('pushing subrepo %s to %s\n') %
             (subrelpath(self), dsturl))
         other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
         res = exchange.push(self._repo, other, force, newbranch=newbranch)
@@ -849,18 +851,24 @@
         return ctx.walk(match)
 
     @annotatesubrepoerror
-    def forget(self, ui, match, prefix):
-        return cmdutil.forget(ui, self._repo, match,
+    def forget(self, match, prefix):
+        return cmdutil.forget(self.ui, self._repo, match,
                               os.path.join(prefix, self._path), True)
 
     @annotatesubrepoerror
-    def revert(self, ui, substate, *pats, **opts):
+    def removefiles(self, matcher, prefix, after, force, subrepos):
+        return cmdutil.remove(self.ui, self._repo, matcher,
+                              os.path.join(prefix, self._path), after, force,
+                              subrepos)
+
+    @annotatesubrepoerror
+    def revert(self, substate, *pats, **opts):
         # reverting a subrepo is a 2 step process:
         # 1. if the no_backup is not set, revert all modified
         #    files inside the subrepo
         # 2. update the subrepo to the revision specified in
         #    the corresponding substate dictionary
-        ui.status(_('reverting subrepo %s\n') % substate[0])
+        self.ui.status(_('reverting subrepo %s\n') % substate[0])
         if not opts.get('no_backup'):
             # Revert all files on the subrepo, creating backups
             # Note that this will not recursively revert subrepos
@@ -872,29 +880,29 @@
             pats = []
             if not opts.get('all'):
                 pats = ['set:modified()']
-            self.filerevert(ui, *pats, **opts)
+            self.filerevert(*pats, **opts)
 
         # Update the repo to the revision specified in the given substate
         self.get(substate, overwrite=True)
 
-    def filerevert(self, ui, *pats, **opts):
+    def filerevert(self, *pats, **opts):
         ctx = self._repo[opts['rev']]
         parents = self._repo.dirstate.parents()
         if opts.get('all'):
             pats = ['set:modified()']
         else:
             pats = []
-        cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
+        cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
 
     def shortid(self, revid):
         return revid[:12]
 
 class svnsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
+        super(svnsubrepo, self).__init__(ctx._repo.ui)
         self._path = path
         self._state = state
         self._ctx = ctx
-        self._ui = ctx._repo.ui
         self._exe = util.findexe('svn')
         if not self._exe:
             raise util.Abort(_("'svn' executable not found for subrepo '%s'")
@@ -903,7 +911,7 @@
     def _svncommand(self, commands, filename='', failok=False):
         cmd = [self._exe]
         extrakw = {}
-        if not self._ui.interactive():
+        if not self.ui.interactive():
             # Making stdin be a pipe should prevent svn from behaving
             # interactively even if we can't pass --non-interactive.
             extrakw['stdin'] = subprocess.PIPE
@@ -933,7 +941,7 @@
             if p.returncode:
                 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
             if stderr:
-                self._ui.warn(stderr + '\n')
+                self.ui.warn(stderr + '\n')
         return stdout, stderr
 
     @propertycache
@@ -1025,7 +1033,7 @@
             # seems a better approach.
             raise util.Abort(_('cannot commit missing svn entries'))
         commitinfo, err = self._svncommand(['commit', '-m', text])
-        self._ui.status(commitinfo)
+        self.ui.status(commitinfo)
         newrev = re.search('Committed revision ([0-9]+).', commitinfo)
         if not newrev:
             if not commitinfo.strip():
@@ -1036,16 +1044,16 @@
                 raise util.Abort(_('failed to commit svn changes'))
             raise util.Abort(commitinfo.splitlines()[-1])
         newrev = newrev.groups()[0]
-        self._ui.status(self._svncommand(['update', '-r', newrev])[0])
+        self.ui.status(self._svncommand(['update', '-r', newrev])[0])
         return newrev
 
     @annotatesubrepoerror
     def remove(self):
         if self.dirty():
-            self._ui.warn(_('not removing repo %s because '
-                            'it has changes.\n') % self._path)
+            self.ui.warn(_('not removing repo %s because '
+                           'it has changes.\n') % self._path)
             return
-        self._ui.note(_('removing subrepo %s\n') % self._path)
+        self.ui.note(_('removing subrepo %s\n') % self._path)
 
         def onerror(function, path, excinfo):
             if function is not os.remove:
@@ -1075,7 +1083,7 @@
         # update to a directory which has since been deleted and recreated.
         args.append('%s@%s' % (state[0], state[1]))
         status, err = self._svncommand(args, failok=True)
-        _sanitize(self._ui, self._ctx._repo.wjoin(self._path), '.svn')
+        _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
         if not re.search('Checked out revision [0-9]+.', status):
             if ('is already a working copy for a different URL' in err
                 and (self._wcchanged()[:2] == (False, False))):
@@ -1084,7 +1092,7 @@
                 self.get(state, overwrite=False)
                 return
             raise util.Abort((status or err).splitlines()[-1])
-        self._ui.status(status)
+        self.ui.status(status)
 
     @annotatesubrepoerror
     def merge(self, state):
@@ -1093,7 +1101,7 @@
         wcrev = self._wcrev()
         if new != wcrev:
             dirty = old == wcrev or self._wcchanged()[0]
-            if _updateprompt(self._ui, self, dirty, wcrev, new):
+            if _updateprompt(self.ui, self, dirty, wcrev, new):
                 self.get(state, False)
 
     def push(self, opts):
@@ -1121,13 +1129,13 @@
 
 class gitsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
+        super(gitsubrepo, self).__init__(ctx._repo.ui)
         self._state = state
         self._ctx = ctx
         self._path = path
         self._relpath = os.path.join(reporelpath(ctx._repo), path)
         self._abspath = ctx._repo.wjoin(path)
         self._subparent = ctx._repo
-        self._ui = ctx._repo.ui
         self._ensuregit()
 
     def _ensuregit(self):
@@ -1141,11 +1149,23 @@
             out, err = self._gitnodir(['--version'])
         versionstatus = self._checkversion(out)
         if versionstatus == 'unknown':
-            self._ui.warn(_('cannot retrieve git version\n'))
+            self.ui.warn(_('cannot retrieve git version\n'))
         elif versionstatus == 'abort':
             raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
         elif versionstatus == 'warning':
-            self._ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
+            self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
+
+    @staticmethod
+    def _gitversion(out):
+        m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
+        if m:
+            return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
+
+        m = re.search(r'^git version (\d+)\.(\d+)', out)
+        if m:
+            return (int(m.group(1)), int(m.group(2)), 0)
+
+        return -1
 
     @staticmethod
     def _checkversion(out):
@@ -1171,16 +1191,15 @@
         >>> _checkversion('no')
         'unknown'
         '''
-        m = re.search(r'^git version (\d+)\.(\d+)', out)
-        if not m:
-            return 'unknown'
-        version = (int(m.group(1)), int(m.group(2)))
+        version = gitsubrepo._gitversion(out)
         # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
         # despite the docstring comment.  For now, error on 1.4.0, warn on
         # 1.5.0 but attempt to continue.
-        if version < (1, 5):
+        if version == -1:
+            return 'unknown'
+        if version < (1, 5, 0):
             return 'abort'
-        elif version < (1, 6):
+        elif version < (1, 6, 0):
             return 'warning'
         return 'ok'
 
@@ -1197,11 +1216,11 @@
         The methods tries to call the git command. versions prior to 1.6.0
         are not supported and very probably fail.
         """
-        self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
+        self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
         # unless ui.quiet is set, print git's stderr,
         # which is mostly progress and useful info
         errpipe = None
-        if self._ui.quiet:
+        if self.ui.quiet:
             errpipe = open(os.devnull, 'w')
         p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
                              cwd=cwd, env=env, close_fds=util.closefds,
@@ -1306,12 +1325,12 @@
     def _fetch(self, source, revision):
         if self._gitmissing():
             source = self._abssource(source)
-            self._ui.status(_('cloning subrepo %s from %s\n') %
+            self.ui.status(_('cloning subrepo %s from %s\n') %
                             (self._relpath, source))
             self._gitnodir(['clone', source, self._abspath])
         if self._githavelocally(revision):
             return
-        self._ui.status(_('pulling subrepo %s from %s\n') %
+        self.ui.status(_('pulling subrepo %s from %s\n') %
                         (self._relpath, self._gitremote('origin')))
         # try only origin: the originally cloned repo
         self._gitcommand(['fetch'])
@@ -1368,13 +1387,13 @@
                 self._gitcommand(['reset', 'HEAD'])
                 cmd.append('-f')
             self._gitcommand(cmd + args)
-            _sanitize(self._ui, self._abspath, '.git')
+            _sanitize(self.ui, self._abspath, '.git')
 
         def rawcheckout():
             # no branch to checkout, check it out with no branch
-            self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
+            self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
                           self._relpath)
-            self._ui.warn(_('check out a git branch if you intend '
+            self.ui.warn(_('check out a git branch if you intend '
                             'to make changes\n'))
             checkout(['-q', revision])
 
@@ -1417,7 +1436,7 @@
             if tracking[remote] != self._gitcurrentbranch():
                 checkout([tracking[remote]])
             self._gitcommand(['merge', '--ff', remote])
-            _sanitize(self._ui, self._abspath, '.git')
+            _sanitize(self.ui, self._abspath, '.git')
         else:
             # a real merge would be required, just checkout the revision
             rawcheckout()
@@ -1453,12 +1472,12 @@
                 self.get(state) # fast forward merge
             elif base != self._state[1]:
                 self._gitcommand(['merge', '--no-commit', revision])
-            _sanitize(self._ui, self._abspath, '.git')
+            _sanitize(self.ui, self._abspath, '.git')
 
         if self.dirty():
             if self._gitstate() != revision:
                 dirty = self._gitstate() == self._state[1] or code != 0
-                if _updateprompt(self._ui, self, dirty,
+                if _updateprompt(self.ui, self, dirty,
                                  self._state[1][:7], revision[:7]):
                     mergefunc()
         else:
@@ -1491,16 +1510,16 @@
         if current:
             # determine if the current branch is even useful
             if not self._gitisancestor(self._state[1], current):
-                self._ui.warn(_('unrelated git branch checked out '
+                self.ui.warn(_('unrelated git branch checked out '
                                 'in subrepo %s\n') % self._relpath)
                 return False
-            self._ui.status(_('pushing branch %s of subrepo %s\n') %
-                            (current.split('/', 2)[2], self._relpath))
+            self.ui.status(_('pushing branch %s of subrepo %s\n') %
+                           (current.split('/', 2)[2], self._relpath))
             ret = self._gitdir(cmd + ['origin', current])
             return ret[1] == 0
         else:
-            self._ui.warn(_('no branch checked out in subrepo %s\n'
-                            'cannot push revision %s\n') %
+            self.ui.warn(_('no branch checked out in subrepo %s\n'
+                           'cannot push revision %s\n') %
                           (self._relpath, self._state[1]))
             return False
 
@@ -1509,12 +1528,12 @@
         if self._gitmissing():
             return
         if self.dirty():
-            self._ui.warn(_('not removing repo %s because '
-                            'it has changes.\n') % self._relpath)
+            self.ui.warn(_('not removing repo %s because '
+                           'it has changes.\n') % self._relpath)
             return
         # we can't fully delete the repository as it may contain
         # local-only history
-        self._ui.note(_('removing subrepo %s\n') % self._relpath)
+        self.ui.note(_('removing subrepo %s\n') % self._relpath)
         self._gitcommand(['config', 'core.bare', 'true'])
         for f in os.listdir(self._abspath):
             if f == '.git':
@@ -1525,7 +1544,7 @@
             else:
                 os.remove(path)
 
-    def archive(self, ui, archiver, prefix, match=None):
+    def archive(self, archiver, prefix, match=None):
         total = 0
         source, revision = self._state
         if not revision:
@@ -1538,7 +1557,7 @@
         tarstream = self._gitcommand(['archive', revision], stream=True)
         tar = tarfile.open(fileobj=tarstream, mode='r|')
         relpath = subrelpath(self)
-        ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
+        self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
         for i, info in enumerate(tar):
             if info.isdir():
                 continue
@@ -1551,9 +1570,9 @@
             archiver.addfile(os.path.join(prefix, self._path, info.name),
                              info.mode, info.issym(), data)
             total += 1
-            ui.progress(_('archiving (%s)') % relpath, i + 1,
-                        unit=_('files'))
-        ui.progress(_('archiving (%s)') % relpath, None)
+            self.ui.progress(_('archiving (%s)') % relpath, i + 1,
+                             unit=_('files'))
+        self.ui.progress(_('archiving (%s)') % relpath, None)
         return total
 
 
@@ -1583,9 +1602,76 @@
                 removed.append(f)
 
         deleted, unknown, ignored, clean = [], [], [], []
+
+        if not rev2:
+            command = ['ls-files', '--others', '--exclude-standard']
+            out = self._gitcommand(command)
+            for line in out.split('\n'):
+                if len(line) == 0:
+                    continue
+                unknown.append(line)
+
         return scmutil.status(modified, added, removed, deleted,
                               unknown, ignored, clean)
 
+    @annotatesubrepoerror
+    def diff(self, ui, diffopts, node2, match, prefix, **opts):
+        node1 = self._state[1]
+        cmd = ['diff']
+        if opts['stat']:
+            cmd.append('--stat')
+        else:
+            # for Git, this also implies '-p'
+            cmd.append('-U%d' % diffopts.context)
+
+        gitprefix = os.path.join(prefix, self._path)
+
+        if diffopts.noprefix:
+            cmd.extend(['--src-prefix=%s/' % gitprefix,
+                        '--dst-prefix=%s/' % gitprefix])
+        else:
+            cmd.extend(['--src-prefix=a/%s/' % gitprefix,
+                        '--dst-prefix=b/%s/' % gitprefix])
+
+        if diffopts.ignorews:
+            cmd.append('--ignore-all-space')
+        if diffopts.ignorewsamount:
+            cmd.append('--ignore-space-change')
+        if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
+                and diffopts.ignoreblanklines:
+            cmd.append('--ignore-blank-lines')
+
+        cmd.append(node1)
+        if node2:
+            cmd.append(node2)
+
+        if match.anypats():
+            return #No support for include/exclude yet
+
+        if match.always():
+            ui.write(self._gitcommand(cmd))
+        elif match.files():
+            for f in match.files():
+                ui.write(self._gitcommand(cmd + [f]))
+        elif match(gitprefix): #Subrepo is matched
+            ui.write(self._gitcommand(cmd))
+
+    @annotatesubrepoerror
+    def revert(self, substate, *pats, **opts):
+        self.ui.status(_('reverting subrepo %s\n') % substate[0])
+        if not opts.get('no_backup'):
+            status = self.status(None)
+            names = status.modified
+            for name in names:
+                bakname = "%s.orig" % name
+                self.ui.note(_('saving current version of %s as %s\n') %
+                        (name, bakname))
+                util.rename(os.path.join(self._abspath, name),
+                            os.path.join(self._abspath, bakname))
+
+        self.get(substate, overwrite=True)
+        return []
+
     def shortid(self, revid):
         return revid[:7]
 
--- a/mercurial/tagmerge.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/tagmerge.py	Sat Jan 17 18:28:30 2015 -0800
@@ -39,7 +39,7 @@
 #       and between base and p2, possibly on separate clones
 # 4. for each tag found both on p1 and p2 perform the following merge algorithm:
 #     - the tags conflict if their tag "histories" have the same "rank" (i.e.
-#       length) _AND_ the last (current) tag is _NOT_ the same
+#       length) AND the last (current) tag is NOT the same
 #     - for non conflicting tags:
 #         - choose which are the high and the low ranking nodes
 #             - the high ranking list of nodes is the one that is longer.
@@ -57,7 +57,7 @@
 # 5. write the merged tags taking into account to their positions in the first
 #    parent (i.e. try to keep the relative ordering of the nodes that come
 #    from p1). This minimizes the diff between the merged and the p1 tag files
-#    This is donw by using the following algorithm
+#    This is done by using the following algorithm
 #     - group the nodes for a given tag that must be written next to each other
 #         - A: nodes that come from consecutive lines on p1
 #         - B: nodes that come from p2 (i.e. whose associated line number is
@@ -81,9 +81,9 @@
 def readtagsformerge(ui, repo, lines, fn='', keeplinenums=False):
     '''read the .hgtags file into a structure that is suitable for merging
 
-    Sepending on the keeplinenumbers flag, clear the line numbers associated
-    with each tag. Rhis is done because only the line numbers of the first
-    parent are useful for merging
+    Depending on the keeplinenums flag, clear the line numbers associated
+    with each tag. This is done because only the line numbers of the first
+    parent are useful for merging.
     '''
     filetags = tagsmod._readtaghist(ui, repo, lines, fn=fn, recode=None,
                                     calcnodelines=True)[1]
--- a/mercurial/tags.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/tags.py	Sat Jan 17 18:28:30 2015 -0800
@@ -62,7 +62,7 @@
 def readlocaltags(ui, repo, alltags, tagtypes):
     '''Read local tags in repo.  Update alltags and tagtypes.'''
     try:
-        data = repo.opener.read("localtags")
+        data = repo.vfs.read("localtags")
     except IOError, inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -87,7 +87,7 @@
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
     '''Read tag definitions from a file (or any source of lines).
     This function returns two sortdicts with similar information:
-    - the first dict, bingtaglist, contains the tag information as expected by
+    - the first dict, bintaghist, contains the tag information as expected by
       the _readtags function, i.e. a mapping from tag name to (node, hist):
         - node is the node id from the last line read for that name,
         - hist is the list of node ids previously associated with it (in file
@@ -193,7 +193,7 @@
     set, caller is responsible for reading tag info from each head.'''
 
     try:
-        cachefile = repo.opener('cache/tags', 'r')
+        cachefile = repo.vfs('cache/tags', 'r')
         # force reading the file for static-http
         cachelines = iter(cachefile)
     except IOError:
@@ -303,7 +303,7 @@
 def _writetagcache(ui, repo, heads, tagfnode, cachetags):
 
     try:
-        cachefile = repo.opener('cache/tags', 'w', atomictemp=True)
+        cachefile = repo.vfs('cache/tags', 'w', atomictemp=True)
     except (OSError, IOError):
         return
 
--- a/mercurial/templatefilters.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templatefilters.py	Sat Jan 17 18:28:30 2015 -0800
@@ -199,7 +199,7 @@
         return '"%s"' % jsonescape(obj)
     elif util.safehasattr(obj, 'keys'):
         out = []
-        for k, v in obj.iteritems():
+        for k, v in sorted(obj.iteritems()):
             s = '%s: %s' % (json(k), json(v))
             out.append(s)
         return '{' + ', '.join(out) + '}'
@@ -208,6 +208,8 @@
         for i in obj:
             out.append(json(i))
         return '[' + ', '.join(out) + ']'
+    elif util.safehasattr(obj, '__call__'):
+        return json(obj())
     else:
         raise TypeError('cannot encode type %s' % obj.__class__.__name__)
 
--- a/mercurial/templatekw.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templatekw.py	Sat Jan 17 18:28:30 2015 -0800
@@ -374,9 +374,13 @@
             subrepos.append(sub) # removed in ctx
     return showlist('subrepo', sorted(subrepos), **args)
 
-def showtags(**args):
-    """:tags: List of strings. Any tags associated with the changeset."""
-    return showlist('tag', args['ctx'].tags(), **args)
+def shownames(namespace, **args):
+    """helper method to generate a template keyword for a namespace"""
+    ctx = args['ctx']
+    repo = ctx._repo
+    ns = repo.names[namespace]
+    names = ns.names(repo, ctx.node())
+    return showlist(ns.templatename, names, plural=namespace, **args)
 
 # keywords are callables like:
 # fn(repo, ctx, templ, cache, revcache, **args)
@@ -416,7 +420,6 @@
     'phaseidx': showphaseidx,
     'rev': showrev,
     'subrepos': showsubrepos,
-    'tags': showtags,
 }
 
 def _showparents(**args):
--- a/mercurial/templates/coal/map	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/coal/map	Sat Jan 17 18:28:30 2015 -0800
@@ -109,6 +109,12 @@
 
 changesetparent = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
 
+changesetparentdiff = '
+  {changesetparent}
+  {ifeq(node, basenode, '(current diff)', '({difffrom})')}'
+
+difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
+
 filerevparent = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> '
 filerevchild = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> '
 
@@ -231,3 +237,6 @@
 urlparameter = '{separator}{name}={value|urlescape}'
 hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />'
 breadcrumb = '&gt; <a href="{url|urlescape}">{name|escape}</a> '
+
+searchhint = 'Find changesets by keywords (author, files, the commit message), revision
+  number or hash, or <a href="{url|urlescape}help/revsets">revset expression</a>.'
--- a/mercurial/templates/gitweb/changeset.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/gitweb/changeset.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -36,7 +36,7 @@
 <tr><td></td><td class="date age">{date|rfc822date}</td></tr>
 {branch%changesetbranch}
 <tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr>
-{parent%changesetparent}
+{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
 {child%changesetchild}
 </table></div>
 
--- a/mercurial/templates/gitweb/map	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/gitweb/map	Sat Jan 17 18:28:30 2015 -0800
@@ -105,10 +105,10 @@
     <td><pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a></pre></td>
     <td><pre>{line|escape}</pre></td>
   </tr>'
-difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineplus = '<span class="difflineplus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineminus = '<span class="difflineminus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineat = '<span class="difflineat"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+diffline = '<a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
 
 comparisonblock ='
   <tbody class="block">
@@ -127,14 +127,23 @@
       <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
     </td>
   </tr>'
+changesetlink = '<a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
 changesetbranch = '<tr><td>branch</td><td>{name|escape}</td></tr>'
 changesetparent = '
   <tr>
     <td>parent {rev}</td>
     <td style="font-family:monospace">
-      <a class="list" href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
+      {changesetlink}
     </td>
   </tr>'
+changesetparentdiff = '
+  <tr>
+    <td>parent {rev}</td>
+    <td style="font-family:monospace">
+      {changesetlink} {ifeq(node, basenode, '(current diff)', \'({difffrom})\')}
+    </td>
+  </tr>'
+difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
 filerevbranch = '<tr><td>branch</td><td>{name|escape}</td></tr>'
 filerevparent = '
   <tr>
--- a/mercurial/templates/monoblue/changelog.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/changelog.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -25,7 +25,8 @@
             <li><a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a></li>
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
-            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry}</li>
+            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
+            {archives%archiveentry}
 	    <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
--- a/mercurial/templates/monoblue/changeset.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/changeset.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -32,7 +32,8 @@
 
     <ul class="submenu">
         <li class="current">changeset</li>
-        <li><a href="{url|urlescape}raw-rev/{node|short}">raw</a> {archives%archiveentry}</li>
+        <li><a href="{url|urlescape}raw-rev/{node|short}">raw</a></li>
+        {archives%archiveentry}
     </ul>
 
     <h2 class="no-link no-border">changeset</h2>
@@ -48,7 +49,7 @@
         {branch%changesetbranch}
         <dt>changeset {rev}</dt>
         <dd>{node|short}</dd>
-        {parent%changesetparent}
+        {ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
         {child%changesetchild}
     </dl>
 
--- a/mercurial/templates/monoblue/footer.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/footer.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -9,7 +9,7 @@
     </div>
 
     <div id="powered-by">
-        <p><a href="{logourl}" title="Mercurial"><img src="{staticurl|urlescape}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
+        <p><a href="{logourl}" title="Mercurial"><img src="{staticurl|urlescape}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a></p>
     </div>
 
     <div id="corner-top-left"></div>
--- a/mercurial/templates/monoblue/manifest.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/manifest.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -31,7 +31,7 @@
     </div>
 
     <ul class="submenu">
-        <li><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a> {archives%archiveentry}</li>
+        <li><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
         {archives%archiveentry}
     </ul>
 
--- a/mercurial/templates/monoblue/map	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/map	Sat Jan 17 18:28:30 2015 -0800
@@ -100,10 +100,10 @@
     </td>
     <td class="source">{line|escape}</td>
   </tr>'
-difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineplus = '<span class="difflineplus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineminus = '<span class="difflineminus"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+difflineat = '<span class="difflineat"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
+diffline = '<a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
 
 comparisonblock ='
   <tbody class="block">
@@ -115,17 +115,22 @@
     <td class="source {type}"><a class="linenr" href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
   </tr>'
 
+changesetlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
 changelogparent = '
   <tr>
     <th class="parent">parent {rev}:</th>
     <td class="parent">
-      <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
+      {changesetlink}
     </td>
   </tr>'
 changesetbranch = '<dt>branch</dt><dd>{name|escape}</dd>'
 changesetparent = '
   <dt>parent {rev}</dt>
-  <dd><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>'
+  <dd>{changesetlink}</dd>'
+changesetparentdiff = '
+  <dt>parent {rev}</dt>
+  <dd>{changesetlink} {ifeq(node, basenode, '(current diff)', \'({difffrom})\')}</dd>'
+difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
 filerevbranch = '<dt>branch</dt><dd>{name|escape}</dd>'
 filerevparent = '
   <dt>parent {rev}</dt>
--- a/mercurial/templates/monoblue/notfound.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/notfound.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -25,7 +25,8 @@
             <li><a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a></li>
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
-            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry}</li>
+            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
+            {archives%archiveentry}
             <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
--- a/mercurial/templates/monoblue/search.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/monoblue/search.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -25,7 +25,8 @@
             <li><a href="{url|urlescape}tags{sessionvars%urlparameter}">tags</a></li>
             <li><a href="{url|urlescape}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
             <li><a href="{url|urlescape}branches{sessionvars%urlparameter}">branches</a></li>
-            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry}
+            <li><a href="{url|urlescape}file/{node|short}{sessionvars%urlparameter}">files</a></li>
+            {archives%archiveentry}
             <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
         </ul>
     </div>
--- a/mercurial/templates/paper/bookmarks.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/bookmarks.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -23,10 +23,10 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-bookmarks" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed">
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
 </a>
 </div>
 </div>
--- a/mercurial/templates/paper/branches.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/branches.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -23,10 +23,10 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-branches" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed">
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
 </a>
 </div>
 </div>
--- a/mercurial/templates/paper/changeset.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/changeset.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -51,7 +51,7 @@
  <td class="date age">{date|rfc822date}</td></tr>
 <tr>
  <th class="author">parents</th>
- <td class="author">{parent%changesetparent}</td>
+ <td class="author">{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}</td>
 </tr>
 <tr>
  <th class="author">children</th>
@@ -65,10 +65,10 @@
   <th class="diffstat">diffstat</th>
   <td class="diffstat">
     {diffsummary}
-    <a id="diffstatexpand" href="javascript:toggleDiffstat()"/>[<tt>+</tt>]</a>
+    <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
     <div id="diffstatdetails" style="display:none;">
-      <a href="javascript:toggleDiffstat()"/>[<tt>-</tt>]</a>
-      <p>
+      <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
+      <p></p>
       <table class="stripes2">{diffstat}</table>
     </div>
   </td>
--- a/mercurial/templates/paper/filelog.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/filelog.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -35,10 +35,11 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log/{node|short}/{file|urlescape}" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed"></a>
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
+</a>
 </div>
 </div>
 
--- a/mercurial/templates/paper/graph.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/graph.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -28,10 +28,10 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed">
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
 </a>
 </div>
 </div>
--- a/mercurial/templates/paper/map	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/map	Sat Jan 17 18:28:30 2015 -0800
@@ -112,7 +112,11 @@
 
 changesetparent = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> '
 
-difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">{node|short}</a> '
+changesetparentdiff = '
+  {changesetparent}
+  {ifeq(node, basenode, '(current diff)', '({difffrom})')}'
+
+difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
 
 filerevparent = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> '
 filerevchild = '<a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> '
--- a/mercurial/templates/paper/shortlog.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/shortlog.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -30,10 +30,10 @@
 <ul>
  <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-log" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed">
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
 </a>
 </div>
 </div>
--- a/mercurial/templates/paper/tags.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/paper/tags.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -23,10 +23,11 @@
 <ul>
 <li><a href="{url|urlescape}help{sessionvars%urlparameter}">help</a></li>
 </ul>
-<p>
+<p></p>
 <div class="atom-logo">
 <a href="{url|urlescape}atom-tags" title="subscribe to atom feed">
-<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed"></a>
+<img class="atom-logo" src="{staticurl|urlescape}feed-icon-14x14.png" alt="atom feed" />
+</a>
 </div>
 </div>
 
--- a/mercurial/templates/spartan/changeset.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/spartan/changeset.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -22,7 +22,7 @@
  <th class="changeset">changeset {rev}:</th>
  <td class="changeset"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
 </tr>
-{parent%changesetparent}
+{ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
 {child%changesetchild}
 {changesettag}
 <tr>
--- a/mercurial/templates/spartan/filelogentry.tmpl	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/spartan/filelogentry.tmpl	Sat Jan 17 18:28:30 2015 -0800
@@ -4,7 +4,7 @@
   <th class="firstline"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></th>
  </tr>
  <tr>
-  <th class="revision">revision {filerev}:</td>
+  <th class="revision">revision {filerev}:</th>
   <td class="node">
    <a href="{url|urlescape}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a>
    <a href="{url|urlescape}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">(diff)</a>
--- a/mercurial/templates/spartan/map	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/spartan/map	Sat Jan 17 18:28:30 2015 -0800
@@ -68,18 +68,25 @@
 difflineminus = '<span class="minusline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>'
 difflineat = '<span class="atline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>'
 diffline = '<a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}'
+changesetlink = '<a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>'
 changelogparent = '
   <tr>
     <th class="parent">parent {rev}:</th>
     <td class="parent">
-      <a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>
+      {changesetlink}
     </td>
   </tr>'
 changesetparent = '
   <tr>
     <th class="parent">parent {rev}:</th>
-    <td class="parent"><a href="{url|urlescape}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
+    <td class="parent">{changesetlink}</td>
   </tr>'
+changesetparentdiff = '
+  <tr>
+    <th class="parent">parent {rev}:</th>
+    <td class="parent">{changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')}</td>
+  </tr>'
+difffrom = '<a href="{url|urlescape}rev/{node|short}:{originalnode|short}{sessionvars%urlparameter}">diff</a>'
 filerevparent = '
   <tr>
     <td class="metatag">parent:</td>
--- a/mercurial/templates/static/style-gitweb.css	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/static/style-gitweb.css	Sat Jan 17 18:28:30 2015 -0800
@@ -84,6 +84,9 @@
 	background-color: #afdffa;
 	border-color: #ccecff #46ace6 #46ace6 #ccecff;
 }
+span.difflineplus { color:#008800; }
+span.difflineminus { color:#cc0000; }
+span.difflineat { color:#990099; }
 
 /* Graph */
 div#wrapper {
--- a/mercurial/templates/static/style-monoblue.css	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/templates/static/style-monoblue.css	Sat Jan 17 18:28:30 2015 -0800
@@ -260,6 +260,10 @@
   font-size: 1.2em;
   padding: 3px 0;
 }
+span.difflineplus { color:#008800; }
+span.difflineminus { color:#cc0000; }
+span.difflineat { color:#990099; }
+
 td.source {
   white-space: pre;
   font-family: monospace;
--- a/mercurial/transaction.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/transaction.py	Sat Jan 17 18:28:30 2015 -0800
@@ -15,7 +15,7 @@
 import errno
 import error, util
 
-version = 1
+version = 2
 
 def active(func):
     def _active(self, *args, **kwds):
@@ -25,7 +25,8 @@
         return func(self, *args, **kwds)
     return _active
 
-def _playback(journal, report, opener, entries, backupentries, unlink=True):
+def _playback(journal, report, opener, vfsmap, entries, backupentries,
+              unlink=True):
     for f, o, _ignore in entries:
         if o or not unlink:
             try:
@@ -43,26 +44,46 @@
                     raise
 
     backupfiles = []
-    for f, b, _ignore in backupentries:
-        filepath = opener.join(f)
-        backuppath = opener.join(b)
+    for l, f, b, c in backupentries:
+        if l not in vfsmap and c:
+            report("couldn't handle %s: unknown cache location %s\n"
+                        % (b, l))
+        vfs = vfsmap[l]
         try:
-            util.copyfile(backuppath, filepath)
-            backupfiles.append(b)
-        except IOError:
-            report(_("failed to recover %s\n") % f)
-            raise
+            if f and b:
+                filepath = vfs.join(f)
+                backuppath = vfs.join(b)
+                try:
+                    util.copyfile(backuppath, filepath)
+                    backupfiles.append(b)
+                except IOError:
+                    report(_("failed to recover %s\n") % f)
+            else:
+                target = f or b
+                try:
+                    vfs.unlink(target)
+                except (IOError, OSError), inst:
+                    if inst.errno != errno.ENOENT:
+                        raise
+        except (IOError, OSError, util.Abort), inst:
+            if not c:
+                raise
 
     opener.unlink(journal)
     backuppath = "%s.backupfiles" % journal
     if opener.exists(backuppath):
         opener.unlink(backuppath)
-    for f in backupfiles:
-        opener.unlink(f)
+    try:
+        for f in backupfiles:
+            if opener.exists(f):
+                opener.unlink(f)
+    except (IOError, OSError, util.Abort), inst:
+        # only pure backup file remains, it is sage to ignore any error
+        pass
 
 class transaction(object):
-    def __init__(self, report, opener, journal, after=None, createmode=None,
-            onclose=None, onabort=None):
+    def __init__(self, report, opener, vfsmap, journalname, undoname=None,
+                 after=None, createmode=None):
         """Begin a new transaction
 
         Begins a new transaction that allows rolling back writes in the event of
@@ -70,37 +91,54 @@
 
         * `after`: called after the transaction has been committed
         * `createmode`: the mode of the journal file that will be created
-        * `onclose`: called as the transaction is closing, but before it is
-        closed
-        * `onabort`: called as the transaction is aborting, but before any files
-        have been truncated
         """
         self.count = 1
         self.usages = 1
         self.report = report
+        # a vfs to the store content
         self.opener = opener
+        # a map to access file in various {location -> vfs}
+        vfsmap = vfsmap.copy()
+        vfsmap[''] = opener  # set default value
+        self._vfsmap = vfsmap
         self.after = after
-        self.onclose = onclose
-        self.onabort = onabort
         self.entries = []
-        self.backupentries = []
         self.map = {}
-        self.backupmap = {}
-        self.journal = journal
+        self.journal = journalname
+        self.undoname = undoname
         self._queue = []
         # a dict of arguments to be passed to hooks
         self.hookargs = {}
+        self.file = opener.open(self.journal, "w")
 
-        self.backupjournal = "%s.backupfiles" % journal
-        self.file = opener.open(self.journal, "w")
-        self.backupsfile = opener.open(self.backupjournal, 'w')
-        self.backupsfile.write('%d\n' % version)
+        # a list of ('location', 'path', 'backuppath', cache) entries.
+        # - if 'backuppath' is empty, no file existed at backup time
+        # - if 'path' is empty, this is a temporary transaction file
+        # - if 'location' is not empty, the path is outside main opener reach.
+        #   use 'location' value as a key in a vfsmap to find the right 'vfs'
+        # (cache is currently unused)
+        self._backupentries = []
+        self._backupmap = {}
+        self._backupjournal = "%s.backupfiles" % self.journal
+        self._backupsfile = opener.open(self._backupjournal, 'w')
+        self._backupsfile.write('%d\n' % version)
+
         if createmode is not None:
             opener.chmod(self.journal, createmode & 0666)
-            opener.chmod(self.backupjournal, createmode & 0666)
+            opener.chmod(self._backupjournal, createmode & 0666)
 
         # hold file generations to be performed on commit
         self._filegenerators = {}
+        # hold callback to write pending data for hooks
+        self._pendingcallback = {}
+        # True is any pending data have been written ever
+        self._anypending = False
+        # holds callback to call when writing the transaction
+        self._finalizecallback = {}
+        # hold callback for post transaction close
+        self._postclosecallback = {}
+        # holds callbacks to call during abort
+        self._abortcallback = {}
 
     def __del__(self):
         if self.journal:
@@ -108,38 +146,37 @@
 
     @active
     def startgroup(self):
-        self._queue.append(([], []))
+        """delay registration of file entry
+
+        This is used by strip to delay vision of strip offset. The transaction
+        sees either none or all of the strip actions to be done."""
+        self._queue.append([])
 
     @active
     def endgroup(self):
-        q = self._queue.pop()
-        self.entries.extend(q[0])
-        self.backupentries.extend(q[1])
-
-        offsets = []
-        backups = []
-        for f, o, _data in q[0]:
-            offsets.append((f, o))
+        """apply delayed registration of file entry.
 
-        for f, b, _data in q[1]:
-            backups.append((f, b))
-
-        d = ''.join(['%s\0%d\n' % (f, o) for f, o in offsets])
-        self.file.write(d)
-        self.file.flush()
-
-        d = ''.join(['%s\0%s\n' % (f, b) for f, b in backups])
-        self.backupsfile.write(d)
-        self.backupsfile.flush()
+        This is used by strip to delay vision of strip offset. The transaction
+        sees either none or all of the strip actions to be done."""
+        q = self._queue.pop()
+        for f, o, data in q:
+            self._addentry(f, o, data)
 
     @active
     def add(self, file, offset, data=None):
-        if file in self.map or file in self.backupmap:
+        """record the state of an append-only file before update"""
+        if file in self.map or file in self._backupmap:
             return
         if self._queue:
-            self._queue[-1][0].append((file, offset, data))
+            self._queue[-1].append((file, offset, data))
             return
 
+        self._addentry(file, offset, data)
+
+    def _addentry(self, file, offset, data):
+        """add a append-only entry to memory and on-disk state"""
+        if file in self.map or file in self._backupmap:
+            return
         self.entries.append((file, offset, data))
         self.map[file] = len(self.entries) - 1
         # add enough data to the journal to do the truncate
@@ -147,7 +184,7 @@
         self.file.flush()
 
     @active
-    def addbackup(self, file, hardlink=True, vfs=None):
+    def addbackup(self, file, hardlink=True, location=''):
         """Adds a backup of the file to the transaction
 
         Calling addbackup() creates a hardlink backup of the specified file
@@ -157,31 +194,44 @@
         * `file`: the file path, relative to .hg/store
         * `hardlink`: use a hardlink to quickly create the backup
         """
+        if self._queue:
+            msg = 'cannot use transaction.addbackup inside "group"'
+            raise RuntimeError(msg)
 
-        if file in self.map or file in self.backupmap:
+        if file in self.map or file in self._backupmap:
             return
-        backupfile = "%s.backup.%s" % (self.journal, file)
-        if vfs is None:
-            vfs = self.opener
+        vfs = self._vfsmap[location]
+        dirname, filename = vfs.split(file)
+        backupfilename = "%s.backup.%s" % (self.journal, filename)
+        backupfile = vfs.reljoin(dirname, backupfilename)
         if vfs.exists(file):
             filepath = vfs.join(file)
-            backuppath = self.opener.join(backupfile)
-            util.copyfiles(filepath, backuppath, hardlink=hardlink)
+            backuppath = vfs.join(backupfile)
+            util.copyfile(filepath, backuppath, hardlink=hardlink)
         else:
-            self.add(file, 0)
-            return
+            backupfile = ''
+
+        self._addbackupentry((location, file, backupfile, False))
 
-        if self._queue:
-            self._queue[-1][1].append((file, backupfile))
-            return
-
-        self.backupentries.append((file, backupfile, None))
-        self.backupmap[file] = len(self.backupentries) - 1
-        self.backupsfile.write("%s\0%s\n" % (file, backupfile))
-        self.backupsfile.flush()
+    def _addbackupentry(self, entry):
+        """register a new backup entry and write it to disk"""
+        self._backupentries.append(entry)
+        self._backupmap[file] = len(self._backupentries) - 1
+        self._backupsfile.write("%s\0%s\0%s\0%d\n" % entry)
+        self._backupsfile.flush()
 
     @active
-    def addfilegenerator(self, genid, filenames, genfunc, order=0, vfs=None):
+    def registertmp(self, tmpfile, location=''):
+        """register a temporary transaction file
+
+        Such files will be deleted when the transaction exits (on both
+        failure and success).
+        """
+        self._addbackupentry((location, '', tmpfile, False))
+
+    @active
+    def addfilegenerator(self, genid, filenames, genfunc, order=0,
+                         location=''):
         """add a function to generates some files at transaction commit
 
         The `genfunc` argument is a function capable of generating proper
@@ -199,38 +249,43 @@
 
         The `order` argument may be used to control the order in which multiple
         generator will be executed.
+
+        The `location` arguments may be used to indicate the files are located
+        outside of the the standard directory for transaction. It should match
+        one of the key of the `transaction.vfsmap` dictionary.
         """
         # For now, we are unable to do proper backup and restore of custom vfs
         # but for bookmarks that are handled outside this mechanism.
-        assert vfs is None or filenames == ('bookmarks',)
-        self._filegenerators[genid] = (order, filenames, genfunc, vfs)
+        self._filegenerators[genid] = (order, filenames, genfunc, location)
 
-    def _generatefiles(self):
+    def _generatefiles(self, suffix=''):
         # write files registered for generation
+        any = False
         for entry in sorted(self._filegenerators.values()):
-            order, filenames, genfunc, vfs = entry
-            if vfs is None:
-                vfs = self.opener
+            any = True
+            order, filenames, genfunc, location = entry
+            vfs = self._vfsmap[location]
             files = []
             try:
                 for name in filenames:
-                    # Some files are already backed up when creating the
-                    # localrepo. Until this is properly fixed we disable the
-                    # backup for them.
-                    if name not in ('phaseroots', 'bookmarks'):
-                        self.addbackup(name)
+                    name += suffix
+                    if suffix:
+                        self.registertmp(name, location=location)
+                    else:
+                        self.addbackup(name, location=location)
                     files.append(vfs(name, 'w', atomictemp=True))
                 genfunc(*files)
             finally:
                 for f in files:
                     f.close()
+        return any
 
     @active
     def find(self, file):
         if file in self.map:
             return self.entries[self.map[file]]
-        if file in self.backupmap:
-            return self.backupentries[self.backupmap[file]]
+        if file in self._backupmap:
+            return self._backupentries[self._backupmap[file]]
         return None
 
     @active
@@ -263,29 +318,121 @@
     def running(self):
         return self.count > 0
 
+    def addpending(self, category, callback):
+        """add a callback to be called when the transaction is pending
+
+        The transaction will be given as callback's first argument.
+
+        Category is a unique identifier to allow overwriting an old callback
+        with a newer callback.
+        """
+        self._pendingcallback[category] = callback
+
+    @active
+    def writepending(self):
+        '''write pending file to temporary version
+
+        This is used to allow hooks to view a transaction before commit'''
+        categories = sorted(self._pendingcallback)
+        for cat in categories:
+            # remove callback since the data will have been flushed
+            any = self._pendingcallback.pop(cat)(self)
+            self._anypending = self._anypending or any
+        self._anypending |= self._generatefiles(suffix='.pending')
+        return self._anypending
+
+    @active
+    def addfinalize(self, category, callback):
+        """add a callback to be called when the transaction is closed
+
+        The transaction will be given as callback's first argument.
+
+        Category is a unique identifier to allow overwriting old callbacks with
+        newer callbacks.
+        """
+        self._finalizecallback[category] = callback
+
+    @active
+    def addpostclose(self, category, callback):
+        """add a callback to be called after the transaction is closed
+
+        The transaction will be given as callback's first argument.
+
+        Category is a unique identifier to allow overwriting an old callback
+        with a newer callback.
+        """
+        self._postclosecallback[category] = callback
+
+    @active
+    def addabort(self, category, callback):
+        """add a callback to be called when the transaction is aborted.
+
+        The transaction will be given as the first argument to the callback.
+
+        Category is a unique identifier to allow overwriting an old callback
+        with a newer callback.
+        """
+        self._abortcallback[category] = callback
+
     @active
     def close(self):
         '''commit the transaction'''
-        if self.count == 1 and self.onclose is not None:
+        if self.count == 1:
             self._generatefiles()
-            self.onclose()
+            categories = sorted(self._finalizecallback)
+            for cat in categories:
+                self._finalizecallback[cat](self)
 
         self.count -= 1
         if self.count != 0:
             return
         self.file.close()
-        self.backupsfile.close()
+        self._backupsfile.close()
+        # cleanup temporary files
+        for l, f, b, c in self._backupentries:
+            if l not in self._vfsmap and c:
+                self.report("couldn't remote %s: unknown cache location %s\n"
+                            % (b, l))
+                continue
+            vfs = self._vfsmap[l]
+            if not f and b and vfs.exists(b):
+                try:
+                    vfs.unlink(b)
+                except (IOError, OSError, util.Abort), inst:
+                    if not c:
+                        raise
+                    # Abort may be raise by read only opener
+                    self.report("couldn't remote %s: %s\n"
+                                % (vfs.join(b), inst))
         self.entries = []
+        self._writeundo()
         if self.after:
             self.after()
         if self.opener.isfile(self.journal):
             self.opener.unlink(self.journal)
-        if self.opener.isfile(self.backupjournal):
-            self.opener.unlink(self.backupjournal)
-            for _f, b, _ignore in self.backupentries:
-                self.opener.unlink(b)
-        self.backupentries = []
+        if self.opener.isfile(self._backupjournal):
+            self.opener.unlink(self._backupjournal)
+            for l, _f, b, c in self._backupentries:
+                if l not in self._vfsmap and c:
+                    self.report("couldn't remote %s: unknown cache location"
+                                "%s\n" % (b, l))
+                    continue
+                vfs = self._vfsmap[l]
+                if b and vfs.exists(b):
+                    try:
+                        vfs.unlink(b)
+                    except (IOError, OSError, util.Abort), inst:
+                        if not c:
+                            raise
+                        # Abort may be raise by read only opener
+                        self.report("couldn't remote %s: %s\n"
+                                    % (vfs.join(b), inst))
+        self._backupentries = []
         self.journal = None
+        # run post close action
+        categories = sorted(self._postclosecallback)
+        for cat in categories:
+            self._postclosecallback[cat](self)
 
     @active
     def abort(self):
@@ -294,28 +441,53 @@
         scope)'''
         self._abort()
 
+    def _writeundo(self):
+        """write transaction data for possible future undo call"""
+        if self.undoname is None:
+            return
+        undobackupfile = self.opener.open("%s.backupfiles" % self.undoname, 'w')
+        undobackupfile.write('%d\n' % version)
+        for l, f, b, c in self._backupentries:
+            if not f:  # temporary file
+                continue
+            if not b:
+                u = ''
+            else:
+                if l not in self._vfsmap and c:
+                    self.report("couldn't remote %s: unknown cache location"
+                                "%s\n" % (b, l))
+                    continue
+                vfs = self._vfsmap[l]
+                base, name = vfs.split(b)
+                assert name.startswith(self.journal), name
+                uname = name.replace(self.journal, self.undoname, 1)
+                u = vfs.reljoin(base, uname)
+                util.copyfile(vfs.join(b), vfs.join(u), hardlink=True)
+            undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c))
+        undobackupfile.close()
+
+
     def _abort(self):
         self.count = 0
         self.usages = 0
         self.file.close()
-        self.backupsfile.close()
-
-        if self.onabort is not None:
-            self.onabort()
+        self._backupsfile.close()
 
         try:
-            if not self.entries and not self.backupentries:
+            if not self.entries and not self._backupentries:
                 if self.journal:
                     self.opener.unlink(self.journal)
-                if self.backupjournal:
-                    self.opener.unlink(self.backupjournal)
+                if self._backupjournal:
+                    self.opener.unlink(self._backupjournal)
                 return
 
             self.report(_("transaction abort!\n"))
 
             try:
-                _playback(self.journal, self.report, self.opener,
-                          self.entries, self.backupentries, False)
+                for cat in sorted(self._abortcallback):
+                    self._abortcallback[cat](self)
+                _playback(self.journal, self.report, self.opener, self._vfsmap,
+                          self.entries, self._backupentries, False)
                 self.report(_("rollback completed\n"))
             except Exception:
                 self.report(_("rollback failed - please run hg recover\n"))
@@ -323,7 +495,7 @@
             self.journal = None
 
 
-def rollback(opener, file, report):
+def rollback(opener, vfsmap, file, report):
     """Rolls back the transaction contained in the given file
 
     Reads the entries in the specified file, and the corresponding
@@ -359,10 +531,10 @@
                     if line:
                         # Shave off the trailing newline
                         line = line[:-1]
-                        f, b = line.split('\0')
-                        backupentries.append((f, b, None))
+                        l, f, b, c = line.split('\0')
+                        backupentries.append((l, f, b, bool(c)))
             else:
-                report(_("journal was created by a newer version of "
+                report(_("journal was created by a different version of "
                          "Mercurial"))
 
-    _playback(file, report, opener, entries, backupentries)
+    _playback(file, report, opener, vfsmap, entries, backupentries)
--- a/mercurial/ui.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/ui.py	Sat Jan 17 18:28:30 2015 -0800
@@ -537,7 +537,7 @@
         return path or loc
 
     def pushbuffer(self, error=False):
-        """install a buffer to capture standar output of the ui object
+        """install a buffer to capture standard output of the ui object
 
         If error is True, the error output will be captured too."""
         self._buffers.append([])
@@ -814,10 +814,9 @@
 
             editor = self.geteditor()
 
-            util.system("%s \"%s\"" % (editor, name),
+            self.system("%s \"%s\"" % (editor, name),
                         environ=environ,
-                        onerr=util.Abort, errprefix=_("edit failed"),
-                        out=self.fout)
+                        onerr=util.Abort, errprefix=_("edit failed"))
 
             f = open(name)
             t = f.read()
@@ -827,6 +826,13 @@
 
         return t
 
+    def system(self, cmd, environ={}, cwd=None, onerr=None, errprefix=None):
+        '''execute shell command with appropriate output stream. command
+        output will be redirected if fout is not stdout.
+        '''
+        return util.system(cmd, environ=environ, cwd=cwd, onerr=onerr,
+                           errprefix=errprefix, out=self.fout)
+
     def traceback(self, exc=None, force=False):
         '''print exception traceback if traceback printing enabled or forced.
         only to call in exception handler. returns true if traceback
--- a/mercurial/unionrepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/unionrepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -178,7 +178,7 @@
 
     @localrepo.unfilteredpropertycache
     def changelog(self):
-        return unionchangelog(self.sopener, self.repo2.sopener)
+        return unionchangelog(self.svfs, self.repo2.svfs)
 
     def _clrev(self, rev2):
         """map from repo2 changelog rev to temporary rev in self.changelog"""
@@ -187,14 +187,14 @@
 
     @localrepo.unfilteredpropertycache
     def manifest(self):
-        return unionmanifest(self.sopener, self.repo2.sopener,
+        return unionmanifest(self.svfs, self.repo2.svfs,
                              self._clrev)
 
     def url(self):
         return self._url
 
     def file(self, f):
-        return unionfilelog(self.sopener, f, self.repo2.sopener,
+        return unionfilelog(self.svfs, f, self.repo2.svfs,
                             self._clrev, self)
 
     def close(self):
--- a/mercurial/url.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/url.py	Sat Jan 17 18:28:30 2015 -0800
@@ -185,7 +185,8 @@
             self.sock.connect((self.host, self.port))
             if _generic_proxytunnel(self):
                 # we do not support client X.509 certificates
-                self.sock = sslutil.ssl_wrap_socket(self.sock, None, None)
+                self.sock = sslutil.ssl_wrap_socket(self.sock, None, None,
+                                                    serverhostname=self.host)
         else:
             keepalive.HTTPConnection.connect(self)
 
@@ -341,7 +342,7 @@
                 _generic_proxytunnel(self)
                 host = self.realhostport.rsplit(':', 1)[0]
             self.sock = sslutil.ssl_wrap_socket(
-                self.sock, self.key_file, self.cert_file,
+                self.sock, self.key_file, self.cert_file, serverhostname=host,
                 **sslutil.sslkwargs(self.ui, host))
             sslutil.validator(self.ui, host)(self.sock)
 
--- a/mercurial/util.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/util.py	Sat Jan 17 18:28:30 2015 -0800
@@ -19,7 +19,8 @@
 import errno, shutil, sys, tempfile, traceback
 import re as remod
 import os, time, datetime, calendar, textwrap, signal, collections
-import imp, socket, urllib
+import imp, socket, urllib, struct
+import gc
 
 if os.name == 'nt':
     import windows as platform
@@ -228,6 +229,15 @@
 import subprocess
 closefds = os.name == 'posix'
 
+def unpacker(fmt):
+    """create a struct unpacker for the specified format"""
+    try:
+        # 2.5+
+        return struct.Struct(fmt).unpack
+    except AttributeError:
+        # 2.4
+        return lambda buf: struct.unpack(fmt, buf)
+
 def popen2(cmd, env=None, newlines=False):
     # Setting bufsize to -1 lets the system decide the buffer size.
     # The default for bufsize is 0, meaning unbuffered. This leads to
@@ -369,6 +379,12 @@
         return self._list
     def iterkeys(self):
         return self._list.__iter__()
+    def iteritems(self):
+        for k in self._list:
+            yield k, self[k]
+    def insert(self, index, key, val):
+        self._list.insert(index, key)
+        dict.__setitem__(self, key, val)
 
 class lrucachedict(object):
     '''cache most recent gets from or sets to this dictionary'''
@@ -538,6 +554,28 @@
 def never(fn):
     return False
 
+def nogc(func):
+    """disable garbage collector
+
+    Python's garbage collector triggers a GC each time a certain number of
+    container objects (the number being defined by gc.get_threshold()) are
+    allocated even when marked not to be tracked by the collector. Tracking has
+    no effect on when GCs are triggered, only on what objects the GC looks
+    into. As a workaround, disable GC while building complex (huge)
+    containers.
+
+    This garbage collector issue have been fixed in 2.7.
+    """
+    def wrapper(*args, **kwargs):
+        gcenabled = gc.isenabled()
+        gc.disable()
+        try:
+            return func(*args, **kwargs)
+        finally:
+            if gcenabled:
+                gc.enable()
+    return wrapper
+
 def pathto(root, n1, n2):
     '''return the relative path from one place to another.
     root should use os.sep to separate directories
@@ -613,9 +651,8 @@
     '''enhanced shell command execution.
     run with environment maybe modified, maybe in different dir.
 
-    if command fails and onerr is None, return status.  if ui object,
-    print error message and return status, else raise onerr object as
-    exception.
+    if command fails and onerr is None, return status, else raise onerr
+    object as exception.
 
     if out is specified, it is assumed to be a file-like object that has a
     write() method. stdout and stderr will be redirected to out.'''
@@ -664,10 +701,7 @@
                             explainexit(rc)[0])
         if errprefix:
             errmsg = '%s: %s' % (errprefix, errmsg)
-        try:
-            onerr.warn(errmsg + '\n')
-        except AttributeError:
-            raise onerr(errmsg)
+        raise onerr(errmsg)
     return rc
 
 def checksignature(func):
@@ -682,10 +716,16 @@
 
     return check
 
-def copyfile(src, dest):
+def copyfile(src, dest, hardlink=False):
     "copy a file, preserving mode and atime/mtime"
     if os.path.lexists(dest):
         unlink(dest)
+    if hardlink:
+        try:
+            oslink(src, dest)
+            return
+        except (IOError, OSError):
+            pass # fall back to normal copy
     if os.path.islink(src):
         os.symlink(os.readlink(src), dest)
     else:
@@ -1086,15 +1126,20 @@
     if mode is not None:
         os.chmod(name, mode)
 
-def ensuredirs(name, mode=None):
-    """race-safe recursive directory creation"""
+def ensuredirs(name, mode=None, notindexed=False):
+    """race-safe recursive directory creation
+
+    Newly created directories are marked as "not to be indexed by
+    the content indexing service", if ``notindexed`` is specified
+    for "write" mode access.
+    """
     if os.path.isdir(name):
         return
     parent = os.path.dirname(os.path.abspath(name))
     if parent != name:
-        ensuredirs(parent, mode)
+        ensuredirs(parent, mode, notindexed)
     try:
-        os.mkdir(name)
+        makedir(name, notindexed)
     except OSError, err:
         if err.errno == errno.EEXIST and os.path.isdir(name):
             # someone else seems to have won a directory creation race
@@ -1148,7 +1193,7 @@
         """Read L bytes of data from the iterator of chunks of data.
         Returns less than L bytes if the iterator runs dry.
 
-        If size parameter is ommited, read everything"""
+        If size parameter is omitted, read everything"""
         left = l
         buf = []
         queue = self._queue
@@ -1418,7 +1463,7 @@
         except ValueError:
             raise Abort(_("invalid day spec: %s") % date[1:])
         if days < 0:
-            raise Abort(_("%s must be nonnegative (see 'hg help dates')")
+            raise Abort(_('%s must be nonnegative (see "hg help dates")')
                 % date[1:])
         when = makedate()[0] - days * 3600 * 24
         return lambda x: x >= when
--- a/mercurial/windows.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/windows.py	Sat Jan 17 18:28:30 2015 -0800
@@ -148,10 +148,20 @@
 # backslash before every double quote (being careful with the double
 # quote we've appended to the end)
 _quotere = None
+_needsshellquote = None
 def shellquote(s):
     global _quotere
     if _quotere is None:
         _quotere = re.compile(r'(\\*)("|\\$)')
+    global _needsshellquote
+    if _needsshellquote is None:
+        # ":" and "\\" are also treated as "safe character", because
+        # they are used as a part of path name (and the latter doesn't
+        # work as "escape character", like one on posix) on Windows
+        _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/\\-]').search
+    if not _needsshellquote(s) and not _quotere.search(s):
+        # "s" shouldn't have to be quoted
+        return s
     return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
 
 def quotecommand(cmd):
--- a/mercurial/wireproto.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/mercurial/wireproto.py	Sat Jan 17 18:28:30 2015 -0800
@@ -172,7 +172,11 @@
     return []
 
 def encodelist(l, sep=' '):
-    return sep.join(map(hex, l))
+    try:
+        return sep.join(map(hex, l))
+    except TypeError:
+        print l
+        raise
 
 # batched call argument encoding
 
@@ -778,7 +782,7 @@
                       (len(entries), total_bytes))
         yield '%d %d\n' % (len(entries), total_bytes)
 
-        sopener = repo.sopener
+        sopener = repo.svfs
         oldaudit = sopener.mustaudit
         debugflag = repo.ui.debugflag
         sopener.mustaudit = False
@@ -827,7 +831,7 @@
             r = exchange.unbundle(repo, gen, their_heads, 'serve',
                                   proto._client())
             if util.safehasattr(r, 'addpart'):
-                # The return looks streameable, we are in the bundle2 case and
+                # The return looks streamable, we are in the bundle2 case and
                 # should return a stream.
                 return streamres(r.getchunks())
             return pushres(r)
@@ -837,7 +841,7 @@
             os.unlink(tempname)
     except error.BundleValueError, exc:
             bundler = bundle2.bundle20(repo.ui)
-            errpart = bundler.newpart('B2X:ERROR:UNSUPPORTEDCONTENT')
+            errpart = bundler.newpart('b2x:error:unsupportedcontent')
             if exc.parttype is not None:
                 errpart.addparam('parttype', exc.parttype)
             if exc.params:
@@ -854,7 +858,7 @@
             advargs = []
             if inst.hint is not None:
                 advargs.append(('hint', inst.hint))
-            bundler.addpart(bundle2.bundlepart('B2X:ERROR:ABORT',
+            bundler.addpart(bundle2.bundlepart('b2x:error:abort',
                                                manargs, advargs))
             return streamres(bundler.getchunks())
         else:
@@ -863,7 +867,7 @@
     except error.PushRaced, exc:
         if getattr(exc, 'duringunbundle2', False):
             bundler = bundle2.bundle20(repo.ui)
-            bundler.newpart('B2X:ERROR:PUSHRACED', [('message', str(exc))])
+            bundler.newpart('b2x:error:pushraced', [('message', str(exc))])
             return streamres(bundler.getchunks())
         else:
             return pusherr(str(exc))
--- a/setup.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/setup.py	Sat Jan 17 18:28:30 2015 -0800
@@ -76,7 +76,7 @@
 from distutils.command.install_lib import install_lib
 from distutils.command.install_scripts import install_scripts
 from distutils.spawn import spawn, find_executable
-from distutils import cygwinccompiler, file_util
+from distutils import file_util
 from distutils.errors import CCompilerError, DistutilsExecError
 from distutils.sysconfig import get_python_inc, get_config_var
 from distutils.version import StrictVersion
@@ -141,7 +141,8 @@
     py2exeloaded = False
 
 def runcmd(cmd, env):
-    if sys.platform == 'plan9':
+    if (sys.platform == 'plan9'
+       and (sys.version_info[0] == 2 and sys.version_info[1] < 7)):
         # subprocess kludge to work around issues in half-baked Python
         # ports, notably bichued/python:
         _, out, err = os.popen3(cmd)
@@ -195,9 +196,13 @@
         if hgid.endswith('+'): # propagate the dirty status to the tag
             version += '+'
     else: # no tag found
-        cmd = [sys.executable, 'hg', 'parents', '--template',
-               '{latesttag}+{latesttagdistance}-']
-        version = runhg(cmd, env) + hgid
+        ltagcmd = [sys.executable, 'hg', 'parents', '--template',
+                   '{latesttag}']
+        ltag = runhg(ltagcmd, env)
+        changessincecmd = [sys.executable, 'hg', 'log', '-T', 'x\n', '-r',
+                           "only(.,'%s')" % ltag]
+        changessince = len(runhg(changessincecmd, env).splitlines())
+        version = '%s+%s-%s' % (ltag, changessince, hgid)
     if version.endswith('+'):
         version += time.strftime('%Y%m%d')
 elif os.path.exists('.hg_archival.txt'):
@@ -206,7 +211,10 @@
     if 'tag' in kw:
         version =  kw['tag']
     elif 'latesttag' in kw:
-        version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
+        if 'changessincelatesttag' in kw:
+            version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
+        else:
+            version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
     else:
         version = kw.get('node', '')[:12]
 
@@ -501,22 +509,32 @@
                                 extra_link_args=osutil_ldflags,
                                 depends=common_depends))
 
-# the -mno-cygwin option has been deprecated for years
-Mingw32CCompiler = cygwinccompiler.Mingw32CCompiler
+try:
+    from distutils import cygwinccompiler
+
+    # the -mno-cygwin option has been deprecated for years
+    compiler = cygwinccompiler.Mingw32CCompiler
 
-class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
-    def __init__(self, *args, **kwargs):
-        Mingw32CCompiler.__init__(self, *args, **kwargs)
-        for i in 'compiler compiler_so linker_exe linker_so'.split():
-            try:
-                getattr(self, i).remove('-mno-cygwin')
-            except ValueError:
-                pass
+    class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
+        def __init__(self, *args, **kwargs):
+            compiler.__init__(self, *args, **kwargs)
+            for i in 'compiler compiler_so linker_exe linker_so'.split():
+                try:
+                    getattr(self, i).remove('-mno-cygwin')
+                except ValueError:
+                    pass
 
-cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
+    cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
+except ImportError:
+    # the cygwinccompiler package is not available on some Python
+    # distributions like the ones from the optware project for Synology
+    # DiskStation boxes
+    class HackedMingw32CCompiler(object):
+        pass
 
 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
                              'help/*.txt',
+                             'default.d/*.rc',
                              'dummycert.pem']}
 
 def ordinarypath(p):
--- a/tests/autodiff.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/autodiff.py	Sat Jan 17 18:28:30 2015 -0800
@@ -10,7 +10,7 @@
     [('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')],
     '[OPTION]... [FILE]...')
 def autodiff(ui, repo, *pats, **opts):
-    diffopts = patch.diffopts(ui, opts)
+    diffopts = patch.difffeatureopts(ui, opts)
     git = opts.get('git', 'no')
     brokenfiles = set()
     losedatafn = None
Binary file tests/bundles/issue4438-r1.hg has changed
Binary file tests/bundles/issue4438-r2.hg has changed
--- a/tests/dumbhttp.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/dumbhttp.py	Sat Jan 17 18:28:30 2015 -0800
@@ -5,15 +5,18 @@
 """
 
 from optparse import OptionParser
-import BaseHTTPServer, SimpleHTTPServer, os, signal, subprocess, sys
+import BaseHTTPServer, SimpleHTTPServer, signal, sys
 
+from mercurial import cmdutil
 
-def run(server_class=BaseHTTPServer.HTTPServer,
-        handler_class=SimpleHTTPServer.SimpleHTTPRequestHandler,
-        server_address=('localhost', 8000)):
-    httpd = server_class(server_address, handler_class)
-    httpd.serve_forever()
-
+class simplehttpservice(object):
+    def __init__(self, host, port):
+        self.address = (host, port)
+    def init(self):
+        self.httpd = BaseHTTPServer.HTTPServer(
+            self.address, SimpleHTTPServer.SimpleHTTPRequestHandler)
+    def run(self):
+        self.httpd.serve_forever()
 
 if __name__ == '__main__':
     parser = OptionParser()
@@ -26,6 +29,7 @@
     parser.add_option('-f', '--foreground', dest='foreground',
         action='store_true',
         help='do not start the HTTP server in the background')
+    parser.add_option('--daemon-pipefds')
 
     (options, args) = parser.parse_args()
 
@@ -34,21 +38,9 @@
     if options.foreground and options.pid:
         parser.error("options --pid and --foreground are mutually exclusive")
 
-    if options.foreground:
-        run(server_address=(options.host, options.port))
-    else:
-        # This doesn't attempt to cleanly detach the process, as it's not
-        # meant to be a long-lived, independent process. As a consequence,
-        # it's still part of the same process group, and keeps any file
-        # descriptors it might have inherited besided stdin/stdout/stderr.
-        # Trying to do things cleanly is more complicated, requires
-        # OS-dependent code, and is not worth the effort.
-        proc = subprocess.Popen([sys.executable, __file__, '-f',
-            '-H', options.host, '-p', str(options.port)],
-            stdin=open(os.devnull, 'r'),
-            stdout=open(os.devnull, 'w'),
-            stderr=subprocess.STDOUT)
-        if options.pid:
-            fp = file(options.pid, 'wb')
-            fp.write(str(proc.pid) + '\n')
-            fp.close()
+    opts = {'pid_file': options.pid,
+            'daemon': not options.foreground,
+            'daemon_pipefds': options.daemon_pipefds}
+    service = simplehttpservice(options.host, options.port)
+    cmdutil.service(opts, initfn=service.init, runfn=service.run,
+                    runargs=[sys.executable, __file__] + sys.argv[1:])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/f	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+
+"""
+Utility for inspecting files in various ways.
+
+This tool is like the collection of tools found in a unix environment but are
+cross platform and stable and suitable for our needs in the test suite.
+
+This can be used instead of tools like:
+  [
+  dd
+  find
+  head
+  hexdump
+  ls
+  md5sum
+  readlink
+  sha1sum
+  stat
+  tail
+  test
+  readlink.py
+  md5sum.py
+"""
+
+import sys, os, errno, re, glob, optparse
+
+def visit(opts, filenames, outfile):
+    """Process filenames in the way specified in opts, writing output to
+    outfile."""
+    for f in sorted(filenames):
+        isstdin = f == '-'
+        if not isstdin and not os.path.lexists(f):
+            outfile.write('%s: file not found\n' % f)
+            continue
+        quiet = opts.quiet and not opts.recurse or isstdin
+        isdir = os.path.isdir(f)
+        islink = os.path.islink(f)
+        isfile = os.path.isfile(f) and not islink
+        dirfiles = None
+        content = None
+        facts = []
+        if isfile:
+            if opts.type:
+                facts.append('file')
+            if opts.hexdump or opts.dump or opts.md5:
+                content = file(f).read()
+        elif islink:
+            if opts.type:
+                facts.append('link')
+            content = os.readlink(f)
+        elif isstdin:
+            content = sys.stdin.read()
+            if opts.size:
+                facts.append('size=%s' % len(content))
+        elif isdir:
+            if opts.recurse or opts.type:
+                dirfiles = glob.glob(f + '/*')
+                facts.append('directory with %s files' % len(dirfiles))
+        elif opts.type:
+            facts.append('type unknown')
+        if not isstdin:
+            stat = os.lstat(f)
+            if opts.size and not isdir:
+                facts.append('size=%s' % stat.st_size)
+            if opts.mode and not islink:
+                facts.append('mode=%o' % (stat.st_mode & 0777))
+            if opts.links:
+                facts.append('links=%s' % stat.st_nlink)
+            if opts.newer:
+                # mtime might be in whole seconds so newer file might be same
+                if stat.st_mtime >= os.stat(opts.newer).st_mtime:
+                    facts.append('newer than %s' % opts.newer)
+                else:
+                    facts.append('older than %s' % opts.newer)
+        if opts.md5 and content is not None:
+            try:
+                from hashlib import md5
+            except ImportError:
+                from md5 import md5
+            facts.append('md5=%s' % md5(content).hexdigest()[:opts.bytes])
+        if opts.sha1 and content is not None:
+            try:
+                from hashlib import sha1
+            except ImportError:
+                from sha import sha as sha1
+            facts.append('sha1=%s' % sha1(content).hexdigest()[:opts.bytes])
+        if isstdin:
+            outfile.write(', '.join(facts) + '\n')
+        elif facts:
+            outfile.write('%s: %s\n' % (f, ', '.join(facts)))
+        elif not quiet:
+            outfile.write('%s:\n' % f)
+        if content is not None:
+            chunk = content
+            if not islink:
+                if opts.lines:
+                    if opts.lines >= 0:
+                        chunk = ''.join(chunk.splitlines(True)[:opts.lines])
+                    else:
+                        chunk = ''.join(chunk.splitlines(True)[opts.lines:])
+                if opts.bytes:
+                    if opts.bytes >= 0:
+                        chunk = chunk[:opts.bytes]
+                    else:
+                        chunk = chunk[opts.bytes:]
+            if opts.hexdump:
+                for i in range(0, len(chunk), 16):
+                    s = chunk[i:i+16]
+                    outfile.write('%04x: %-47s |%s|\n' %
+                                  (i, ' '.join('%02x' % ord(c) for c in s),
+                                   re.sub('[^ -~]', '.', s)))
+            if opts.dump:
+                if not quiet:
+                    outfile.write('>>>\n')
+                outfile.write(chunk)
+                if not quiet:
+                    if chunk.endswith('\n'):
+                        outfile.write('<<<\n')
+                    else:
+                        outfile.write('\n<<< no trailing newline\n')
+        if opts.recurse and dirfiles:
+            assert not isstdin
+            visit(opts, dirfiles, outfile)
+
+if __name__ == "__main__":
+    parser = optparse.OptionParser("%prog [options] [filenames]")
+    parser.add_option("-t", "--type", action="store_true",
+                      help="show file type (file or directory)")
+    parser.add_option("-m", "--mode", action="store_true",
+                      help="show file mode")
+    parser.add_option("-l", "--links", action="store_true",
+                      help="show number of links")
+    parser.add_option("-s", "--size", action="store_true",
+                      help="show size of file")
+    parser.add_option("-n", "--newer", action="store",
+                      help="check if file is newer (or same)")
+    parser.add_option("-r", "--recurse", action="store_true",
+                      help="recurse into directories")
+    parser.add_option("-S", "--sha1", action="store_true",
+                      help="show sha1 hash of the content")
+    parser.add_option("-M", "--md5", action="store_true",
+                      help="show md5 hash of the content")
+    parser.add_option("-D", "--dump", action="store_true",
+                      help="dump file content")
+    parser.add_option("-H", "--hexdump", action="store_true",
+                      help="hexdump file content")
+    parser.add_option("-B", "--bytes", type="int",
+                      help="number of characters to dump")
+    parser.add_option("-L", "--lines", type="int",
+                      help="number of lines to dump")
+    parser.add_option("-q", "--quiet", action="store_true",
+                      help="no default output")
+    (opts, filenames) = parser.parse_args(sys.argv[1:])
+    if not filenames:
+        filenames = ['-']
+
+    visit(opts, filenames, sys.stdout)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/generate-working-copy-states.py	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,86 @@
+# Helper script used for generating history and working copy files and content.
+# The file's name corresponds to its history. The number of changesets can
+# be specified on the command line. With 2 changesets, files with names like
+# content1_content2_content1-untracked are generated. The first two filename
+# segments describe the contents in the two changesets. The third segment
+# ("content1-untracked") describes the state in the working copy, i.e.
+# the file has content "content1" and is untracked (since it was previously
+# tracked, it has been forgotten).
+#
+# This script generates the filenames and their content, but it's up to the
+# caller to tell hg about the state.
+#
+# There are two subcommands:
+#   filelist <numchangesets>
+#   state <numchangesets> (<changeset>|wc)
+#
+# Typical usage:
+#
+# $ python $TESTDIR/generate-working-copy-states.py state 2 1
+# $ hg addremove --similarity 0
+# $ hg commit -m 'first'
+#
+# $ python $TESTDIR/generate-working-copy-states.py state 2 1
+# $ hg addremove --similarity 0
+# $ hg commit -m 'second'
+#
+# $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+# $ hg addremove --similarity 0
+# $ hg forget *_*_*-untracked
+# $ rm *_*_missing-*
+
+import sys
+import os
+
+# Generates pairs of (filename, contents), where 'contents' is a list
+# describing the file's content at each revision (or in the working copy).
+# At each revision, it is either None or the file's actual content. When not
+# None, it may be either new content or the same content as an earlier
+# revisions, so all of (modified,clean,added,removed) can be tested.
+def generatestates(maxchangesets, parentcontents):
+    depth = len(parentcontents)
+    if depth == maxchangesets + 1:
+        for tracked in ('untracked', 'tracked'):
+            filename = "_".join([(content is None and 'missing' or content) for
+                                 content in parentcontents]) + "-" + tracked
+            yield (filename, parentcontents)
+    else:
+        for content in (set([None, 'content' + str(depth + 1)]) |
+                      set(parentcontents)):
+            for combination in generatestates(maxchangesets,
+                                              parentcontents + [content]):
+                yield combination
+
+# retrieve the command line arguments
+target = sys.argv[1]
+maxchangesets = int(sys.argv[2])
+if target == 'state':
+    depth = sys.argv[3]
+
+# sort to make sure we have stable output
+combinations = sorted(generatestates(maxchangesets, []))
+
+# compute file content
+content = []
+for filename, states in combinations:
+    if target == 'filelist':
+        print filename
+    elif target == 'state':
+        if depth == 'wc':
+            # Make sure there is content so the file gets written and can be
+            # tracked. It will be deleted outside of this script.
+            content.append((filename, states[maxchangesets] or 'TOBEDELETED'))
+        else:
+            content.append((filename, states[int(depth) - 1]))
+    else:
+        print >> sys.stderr, "unknown target:", target
+        sys.exit(1)
+
+# write actual content
+for filename, data in content:
+    if data is not None:
+        f = open(filename, 'wb')
+        f.write(data + '\n')
+        f.close()
+    elif os.path.exists(filename):
+        os.remove(filename)
--- a/tests/hghave.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/hghave.py	Sat Jan 17 18:28:30 2015 -0800
@@ -289,14 +289,17 @@
 @check("json", "some json module available")
 def has_json():
     try:
-        if sys.version_info < (2, 7):
-            import simplejson as json
-        else:
-            import json
+        import json
         json.dumps
         return True
     except ImportError:
-        return False
+        try:
+            import simplejson as json
+            json.dumps
+            return True
+        except ImportError:
+            pass
+    return False
 
 @check("outer-repo", "outer repo")
 def has_outer_repo():
@@ -304,11 +307,13 @@
     return not matchoutput('hg root 2>&1',
                            r'abort: no repository found', True)
 
-@check("ssl", "python >= 2.6 ssl module and python OpenSSL")
+@check("ssl", ("(python >= 2.6 ssl module and python OpenSSL) "
+               "OR python >= 2.7.9 ssl"))
 def has_ssl():
     try:
         import ssl
-        ssl.wrap_socket # silence unused import warning
+        if getattr(ssl, 'create_default_context', False):
+            return True
         import OpenSSL
         OpenSSL.SSL.Context
         return True
--- a/tests/run-tests.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/run-tests.py	Sat Jan 17 18:28:30 2015 -0800
@@ -61,12 +61,12 @@
 import unittest
 
 try:
-    if sys.version_info < (2, 7):
+    import json
+except ImportError:
+    try:
         import simplejson as json
-    else:
-        import json
-except ImportError:
-    json = None
+    except ImportError:
+        json = None
 
 processlock = threading.Lock()
 
@@ -500,7 +500,7 @@
             except self.failureException, e:
                 # This differs from unittest in that we don't capture
                 # the stack trace. This is for historical reasons and
-                # this decision could be revisted in the future,
+                # this decision could be revisited in the future,
                 # especially for PythonTest instances.
                 if result.addFailure(self, str(e)):
                     success = True
@@ -627,6 +627,8 @@
             (r':%s\b' % self._startport, ':$HGPORT'),
             (r':%s\b' % (self._startport + 1), ':$HGPORT1'),
             (r':%s\b' % (self._startport + 2), ':$HGPORT2'),
+            (r'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
+             r'\1 (glob)'),
             ]
 
         if os.name == 'nt':
@@ -649,7 +651,8 @@
         env["HGPORT2"] = str(self._startport + 2)
         env["HGRCPATH"] = os.path.join(self._threadtmp, '.hgrc')
         env["DAEMON_PIDS"] = os.path.join(self._threadtmp, 'daemon.pids')
-        env["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
+        env["HGEDITOR"] = ('"' + sys.executable + '"'
+                           + ' -c "import sys; sys.exit(0)"')
         env["HGMERGE"] = "internal:merge"
         env["HGUSER"]   = "test"
         env["HGENCODING"] = "ascii"
@@ -688,6 +691,10 @@
         hgrc.write('commit = -d "0 0"\n')
         hgrc.write('shelve = --date "0 0"\n')
         hgrc.write('tag = -d "0 0"\n')
+        hgrc.write('[largefiles]\n')
+        hgrc.write('usercache = %s\n' %
+                   (os.path.join(self._testtmp, '.cache/largefiles')))
+
         for opt in self._extraconfigopts:
             section, key = opt.split('.', 1)
             assert '=' in key, ('extra config opt %s must '
@@ -720,6 +727,15 @@
 
         return result
 
+# This script may want to drop globs from lines matching these patterns on
+# Windows, but check-code.py wants a glob on these lines unconditionally.  Don't
+# warn if that is the case for anything matching these lines.
+checkcodeglobpats = [
+    re.compile(r'^pushing to \$TESTTMP/.*[^)]$'),
+    re.compile(r'^moving \S+/.*[^)]$'),
+    re.compile(r'^pulling from \$TESTTMP/.*[^)]$')
+]
+
 class TTest(Test):
     """A "t test" is a test backed by a .t file."""
 
@@ -976,6 +992,9 @@
         if el + '\n' == l:
             if os.altsep:
                 # matching on "/" is not needed for this line
+                for pat in checkcodeglobpats:
+                    if pat.match(el):
+                        return True
                 return '-glob'
             return True
         i, n = 0, len(el)
@@ -1008,6 +1027,9 @@
             if el.endswith(" (re)\n"):
                 return TTest.rematch(el[:-6], l)
             if el.endswith(" (glob)\n"):
+                # ignore '(glob)' added to l by 'replacements'
+                if l.endswith(" (glob)\n"):
+                    l = l[:-8] + "\n"
                 return TTest.globmatch(el[:-8], l)
             if os.altsep and l.replace('\\', '/') == el:
                 return '+glob'
@@ -1263,7 +1285,7 @@
             iolock.release()
 
 class TestSuite(unittest.TestSuite):
-    """Custom unitest TestSuite that knows how to execute Mercurial tests."""
+    """Custom unittest TestSuite that knows how to execute Mercurial tests."""
 
     def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
                  retest=False, keywords=None, loop=False,
@@ -1622,7 +1644,8 @@
         os.environ["BINDIR"] = self._bindir
         os.environ["PYTHON"] = PYTHON
 
-        path = [self._bindir] + os.environ["PATH"].split(os.pathsep)
+        runtestdir = os.path.abspath(os.path.dirname(__file__))
+        path = [self._bindir, runtestdir] + os.environ["PATH"].split(os.pathsep)
         if self._tmpbindir != self._bindir:
             path = [self._tmpbindir] + path
         os.environ["PATH"] = os.pathsep.join(path)
@@ -1631,8 +1654,7 @@
         # can run .../tests/run-tests.py test-foo where test-foo
         # adds an extension to HGRC. Also include run-test.py directory to
         # import modules like heredoctest.
-        pypath = [self._pythondir, self._testdir,
-                  os.path.abspath(os.path.dirname(__file__))]
+        pypath = [self._pythondir, self._testdir, runtestdir]
         # We have to augment PYTHONPATH, rather than simply replacing
         # it, in case external libraries are only available via current
         # PYTHONPATH.  (In particular, the Subversion bindings on OS X
@@ -1895,8 +1917,8 @@
         the one we expect it to be.  If not, print a warning to stderr."""
         if ((self._bindir == self._pythondir) and
             (self._bindir != self._tmpbindir)):
-            # The pythondir has been infered from --with-hg flag.
-            # We cannot expect anything sensible here
+            # The pythondir has been inferred from --with-hg flag.
+            # We cannot expect anything sensible here.
             return
         expecthg = os.path.join(self._pythondir, 'mercurial')
         actualhg = self._gethgpath()
--- a/tests/silenttestrunner.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/silenttestrunner.py	Sat Jan 17 18:28:30 2015 -0800
@@ -1,4 +1,4 @@
-import unittest, sys
+import unittest, sys, os
 
 def main(modulename):
     '''run the tests found in module, printing nothing when all tests pass'''
@@ -16,3 +16,6 @@
             print
             sys.stdout.write(exc)
         sys.exit(1)
+
+if os.environ.get('SILENT_BE_NOISY'):
+    main = unittest.main
--- a/tests/test-abort-checkin.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-abort-checkin.t	Sat Jan 17 18:28:30 2015 -0800
@@ -7,9 +7,11 @@
   > EOF
   $ abspath=`pwd`/abortcommit.py
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "abortcommit = $abspath" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > abortcommit = $abspath
+  > EOF
 
   $ hg init foo
   $ cd foo
--- a/tests/test-add.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-add.t	Sat Jan 17 18:28:30 2015 -0800
@@ -126,6 +126,19 @@
   M a
   ? a.orig
 
+Forgotten file can be added back (as either clean or modified)
+
+  $ hg forget b
+  $ hg add b
+  $ hg st -A b
+  C b
+  $ hg forget b
+  $ echo modified > b
+  $ hg add b
+  $ hg st -A b
+  M b
+  $ hg revert -qC b
+
   $ hg add c && echo "unexpected addition of missing file"
   c: * (glob)
   [1]
--- a/tests/test-addremove.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-addremove.t	Sat Jan 17 18:28:30 2015 -0800
@@ -6,8 +6,11 @@
   adding dir/bar
   adding foo
   $ hg -v commit -m "add 1"
+  committing files:
   dir/bar
   foo
+  committing manifest
+  committing changelog
   committed changeset 0:6f7f953567a2
   $ cd dir/
   $ touch ../foo_2 bar_2
@@ -15,10 +18,42 @@
   adding dir/bar_2
   adding foo_2
   $ hg -v commit -m "add 2"
+  committing files:
   dir/bar_2
   foo_2
+  committing manifest
+  committing changelog
   committed changeset 1:e65414bf35c5
-  $ cd ../..
+  $ cd ..
+  $ hg forget foo
+  $ hg -v addremove
+  adding foo
+  $ hg forget foo
+#if windows
+  $ hg -v addremove nonexistant
+  nonexistant: The system cannot find the file specified
+  [1]
+#else
+  $ hg -v addremove nonexistant
+  nonexistant: No such file or directory
+  [1]
+#endif
+  $ cd ..
+
+  $ hg init subdir
+  $ cd subdir
+  $ mkdir dir
+  $ cd dir
+  $ touch a.py
+  $ hg addremove 'glob:*.py'
+  adding a.py
+  $ hg forget a.py
+  $ hg addremove -I 'glob:*.py'
+  adding a.py
+  $ hg forget a.py
+  $ hg addremove
+  adding dir/a.py
+  $ cd ..
 
   $ hg init sim
   $ cd sim
@@ -45,4 +80,24 @@
   adding d
   recording removal of a as rename to b (100% similar)
   $ hg commit -mb
+  $ cp b c
+  $ hg forget b
+  $ hg addremove -s 50
+  adding b
+  adding c
+
+  $ rm c
+#if windows
+  $ hg ci -A -m "c" nonexistant
+  nonexistant: The system cannot find the file specified
+  abort: failed to mark all new/missing files as added/removed
+  [255]
+#else
+  $ hg ci -A -m "c" nonexistant
+  nonexistant: No such file or directory
+  abort: failed to mark all new/missing files as added/removed
+  [255]
+#endif
+  $ hg st
+  ! c
   $ cd ..
--- a/tests/test-ancestor.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-ancestor.py	Sat Jan 17 18:28:30 2015 -0800
@@ -1,4 +1,133 @@
 from mercurial import ancestor, commands, hg, ui, util
+from mercurial.node import nullrev
+import binascii, getopt, math, os, random, sys, time
+
+def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
+    '''nodes: total number of nodes in the graph
+    rootprob: probability that a new node (not 0) will be a root
+    mergeprob: probability that, excluding a root a node will be a merge
+    prevprob: probability that p1 will be the previous node
+
+    return value is a graph represented as an adjacency list.
+    '''
+    graph = [None] * nodes
+    for i in xrange(nodes):
+        if i == 0 or rng.random() < rootprob:
+            graph[i] = [nullrev]
+        elif i == 1:
+            graph[i] = [0]
+        elif rng.random() < mergeprob:
+            if i == 2 or rng.random() < prevprob:
+                # p1 is prev
+                p1 = i - 1
+            else:
+                p1 = rng.randrange(i - 1)
+            p2 = rng.choice(range(0, p1) + range(p1 + 1, i))
+            graph[i] = [p1, p2]
+        elif rng.random() < prevprob:
+            graph[i] = [i - 1]
+        else:
+            graph[i] = [rng.randrange(i - 1)]
+
+    return graph
+
+def buildancestorsets(graph):
+    ancs = [None] * len(graph)
+    for i in xrange(len(graph)):
+        ancs[i] = set([i])
+        if graph[i] == [nullrev]:
+            continue
+        for p in graph[i]:
+            ancs[i].update(ancs[p])
+    return ancs
+
+class naiveincrementalmissingancestors(object):
+    def __init__(self, ancs, bases):
+        self.ancs = ancs
+        self.bases = set(bases)
+    def addbases(self, newbases):
+        self.bases.update(newbases)
+    def removeancestorsfrom(self, revs):
+        for base in self.bases:
+            if base != nullrev:
+                revs.difference_update(self.ancs[base])
+        revs.discard(nullrev)
+    def missingancestors(self, revs):
+        res = set()
+        for rev in revs:
+            if rev != nullrev:
+                res.update(self.ancs[rev])
+        for base in self.bases:
+            if base != nullrev:
+                res.difference_update(self.ancs[base])
+        return sorted(res)
+
+def test_missingancestors(seed, rng):
+    # empirically observed to take around 1 second
+    graphcount = 100
+    testcount = 10
+    inccount = 10
+    nerrs = [0]
+    # the default mu and sigma give us a nice distribution of mostly
+    # single-digit counts (including 0) with some higher ones
+    def lognormrandom(mu, sigma):
+        return int(math.floor(rng.lognormvariate(mu, sigma)))
+
+    def samplerevs(nodes, mu=1.1, sigma=0.8):
+        count = min(lognormrandom(mu, sigma), len(nodes))
+        return rng.sample(nodes, count)
+
+    def err(seed, graph, bases, seq, output, expected):
+        if nerrs[0] == 0:
+            print >> sys.stderr, 'seed:', hex(seed)[:-1]
+        if gerrs[0] == 0:
+            print >> sys.stderr, 'graph:', graph
+        print >> sys.stderr, '* bases:', bases
+        print >> sys.stderr, '* seq: ', seq
+        print >> sys.stderr, '*  output:  ', output
+        print >> sys.stderr, '*  expected:', expected
+        nerrs[0] += 1
+        gerrs[0] += 1
+
+    for g in xrange(graphcount):
+        graph = buildgraph(rng)
+        ancs = buildancestorsets(graph)
+        gerrs = [0]
+        for _ in xrange(testcount):
+            # start from nullrev to include it as a possibility
+            graphnodes = range(nullrev, len(graph))
+            bases = samplerevs(graphnodes)
+
+            # fast algorithm
+            inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases)
+            # reference slow algorithm
+            naiveinc = naiveincrementalmissingancestors(ancs, bases)
+            seq = []
+            revs = []
+            for _ in xrange(inccount):
+                if rng.random() < 0.2:
+                    newbases = samplerevs(graphnodes)
+                    seq.append(('addbases', newbases))
+                    inc.addbases(newbases)
+                    naiveinc.addbases(newbases)
+                if rng.random() < 0.4:
+                    # larger set so that there are more revs to remove from
+                    revs = samplerevs(graphnodes, mu=1.5)
+                    seq.append(('removeancestorsfrom', revs))
+                    hrevs = set(revs)
+                    rrevs = set(revs)
+                    inc.removeancestorsfrom(hrevs)
+                    naiveinc.removeancestorsfrom(rrevs)
+                    if hrevs != rrevs:
+                        err(seed, graph, bases, seq, sorted(hrevs),
+                            sorted(rrevs))
+                else:
+                    revs = samplerevs(graphnodes)
+                    seq.append(('missingancestors', revs))
+                    h = inc.missingancestors(revs)
+                    r = naiveinc.missingancestors(revs)
+                    if h != r:
+                        err(seed, graph, bases, seq, h, r)
 
 # graph is a dict of child->parent adjacency lists for this graph:
 # o  13
@@ -32,55 +161,16 @@
 graph = {0: [-1], 1: [0], 2: [1], 3: [1], 4: [2], 5: [4], 6: [4],
          7: [4], 8: [-1], 9: [6, 7], 10: [5], 11: [3, 7], 12: [9],
          13: [8]}
-pfunc = graph.get
-
-class mockchangelog(object):
-    parentrevs = graph.get
-
-def runmissingancestors(revs, bases):
-    print "%% ancestors of %s and not of %s" % (revs, bases)
-    print ancestor.missingancestors(revs, bases, pfunc)
-
-def test_missingancestors():
-    # Empty revs
-    runmissingancestors([], [1])
-    runmissingancestors([], [])
-
-    # If bases is empty, it's the same as if it were [nullrev]
-    runmissingancestors([12], [])
-
-    # Trivial case: revs == bases
-    runmissingancestors([0], [0])
-    runmissingancestors([4, 5, 6], [6, 5, 4])
-
-    # With nullrev
-    runmissingancestors([-1], [12])
-    runmissingancestors([12], [-1])
-
-    # 9 is a parent of 12. 7 is a parent of 9, so an ancestor of 12. 6 is an
-    # ancestor of 12 but not of 7.
-    runmissingancestors([12], [9])
-    runmissingancestors([9], [12])
-    runmissingancestors([12, 9], [7])
-    runmissingancestors([7, 6], [12])
-
-    # More complex cases
-    runmissingancestors([10], [11, 12])
-    runmissingancestors([11], [10])
-    runmissingancestors([11], [10, 12])
-    runmissingancestors([12], [10])
-    runmissingancestors([12], [11])
-    runmissingancestors([10, 11, 12], [13])
-    runmissingancestors([13], [10, 11, 12])
 
 def genlazyancestors(revs, stoprev=0, inclusive=False):
     print ("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" %
            (revs, stoprev, inclusive))
-    return ancestor.lazyancestors(mockchangelog, revs, stoprev=stoprev,
+    return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev,
                                   inclusive=inclusive)
 
 def printlazyancestors(s, l):
-    print [n for n in l if n in s]
+    print 'membership: %r' % [n for n in l if n in s]
+    print 'iteration:  %r' % list(s)
 
 def test_lazyancestors():
     # Empty revs
@@ -134,7 +224,23 @@
                     print "  C returned:      %s" % cgcas
                     print "  Python returned: %s" % pygcas
 
-if __name__ == '__main__':
-    test_missingancestors()
+def main():
+    seed = None
+    opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed='])
+    for o, a in opts:
+        if o in ('-s', '--seed'):
+            seed = long(a, base=0) # accepts base 10 or 16 strings
+
+    if seed is None:
+        try:
+            seed = long(binascii.hexlify(os.urandom(16)), 16)
+        except AttributeError:
+            seed = long(time.time() * 1000)
+
+    rng = random.Random(seed)
+    test_missingancestors(seed, rng)
     test_lazyancestors()
     test_gca()
+
+if __name__ == '__main__':
+    main()
--- a/tests/test-ancestor.py.out	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-ancestor.py.out	Sat Jan 17 18:28:30 2015 -0800
@@ -1,48 +1,18 @@
-% ancestors of [] and not of [1]
-[]
-% ancestors of [] and not of []
-[]
-% ancestors of [12] and not of []
-[0, 1, 2, 4, 6, 7, 9, 12]
-% ancestors of [0] and not of [0]
-[]
-% ancestors of [4, 5, 6] and not of [6, 5, 4]
-[]
-% ancestors of [-1] and not of [12]
-[]
-% ancestors of [12] and not of [-1]
-[0, 1, 2, 4, 6, 7, 9, 12]
-% ancestors of [12] and not of [9]
-[12]
-% ancestors of [9] and not of [12]
-[]
-% ancestors of [12, 9] and not of [7]
-[6, 9, 12]
-% ancestors of [7, 6] and not of [12]
-[]
-% ancestors of [10] and not of [11, 12]
-[5, 10]
-% ancestors of [11] and not of [10]
-[3, 7, 11]
-% ancestors of [11] and not of [10, 12]
-[3, 11]
-% ancestors of [12] and not of [10]
-[6, 7, 9, 12]
-% ancestors of [12] and not of [11]
-[6, 9, 12]
-% ancestors of [10, 11, 12] and not of [13]
-[0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12]
-% ancestors of [13] and not of [10, 11, 12]
-[8, 13]
 % lazy ancestor set for [], stoprev = 0, inclusive = False
-[]
+membership: []
+iteration:  []
 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False
-[7, 8, 3, 4, 1, 0]
+membership: [7, 8, 3, 4, 1, 0]
+iteration:  [3, 7, 8, 1, 4, 0, 2]
 % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False
-[1, 0]
+membership: [1, 0]
+iteration:  [0, 1]
 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True
-[11, 13, 7, 8, 3, 4, 1, 0]
+membership: [11, 13, 7, 8, 3, 4, 1, 0]
+iteration:  [11, 13, 3, 7, 8, 1, 4, 0, 2]
 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False
-[7, 8]
+membership: [7, 8]
+iteration:  [7, 8]
 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True
-[11, 13, 7, 8]
+membership: [11, 13, 7, 8]
+iteration:  [11, 13, 7, 8]
--- a/tests/test-annotate.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-annotate.t	Sat Jan 17 18:28:30 2015 -0800
@@ -451,3 +451,69 @@
   1: b  b
 
   $ cd ..
+
+Annotate with linkrev pointing to another branch
+------------------------------------------------
+
+create history with a filerev whose linkrev points to another branch
+
+  $ hg init branchedlinkrev
+  $ cd branchedlinkrev
+  $ echo A > a
+  $ hg commit -Am 'contentA'
+  adding a
+  $ echo B >> a
+  $ hg commit -m 'contentB'
+  $ hg up --rev 'desc(contentA)'
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo unrelated > unrelated
+  $ hg commit -Am 'unrelated'
+  adding unrelated
+  created new head
+  $ hg graft -r 'desc(contentB)'
+  grafting 1:fd27c222e3e6 "contentB"
+  $ echo C >> a
+  $ hg commit -m 'contentC'
+  $ hg log -G
+  @  changeset:   4:072f1e8df249
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     contentC
+  |
+  o  changeset:   3:ff38df03cc4b
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     contentB
+  |
+  o  changeset:   2:62aaf3f6fc06
+  |  parent:      0:f0932f74827e
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     unrelated
+  |
+  | o  changeset:   1:fd27c222e3e6
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     contentB
+  |
+  o  changeset:   0:f0932f74827e
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     contentA
+  
+
+Annotate should list ancestor of starting revision only
+
+  $ hg annotate a
+  0: A
+  3: B
+  4: C
+
+Even when the starting revision is the linkrev-shadowed one:
+
+  $ hg annotate a -r 3
+  0: A
+  3: B
+
+  $ cd ..
--- a/tests/test-archive.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-archive.t	Sat Jan 17 18:28:30 2015 -0800
@@ -250,6 +250,7 @@
   branch: default
   latesttag: null
   latesttagdistance: 4
+  changessincelatesttag: 4
   $ hg tag -r 2 mytag
   $ hg tag -r 2 anothertag
   $ hg archive -r 2 ../test-lasttag
@@ -322,10 +323,10 @@
   Archive:  ../old.zip
   \s*Length.* (re)
   *-----* (glob)
-  *147*80*00:00*old/.hg_archival.txt (glob)
+  *172*80*00:00*old/.hg_archival.txt (glob)
   *0*80*00:00*old/old (glob)
   *-----* (glob)
-  \s*147\s+2 files (re)
+  \s*172\s+2 files (re)
 
 show an error when a provided pattern matches no files
 
--- a/tests/test-backout.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-backout.t	Sat Jan 17 18:28:30 2015 -0800
@@ -43,6 +43,47 @@
   commit: (clean)
   update: (current)
 
+commit option
+
+  $ cd ..
+  $ hg init commit
+  $ cd commit
+
+  $ echo tomatoes > a
+  $ hg add a
+  $ hg commit -d '0 0' -m tomatoes
+
+  $ echo chair > b
+  $ hg add b
+  $ hg commit -d '1 0' -m chair
+
+  $ echo grapes >> a
+  $ hg commit -d '2 0' -m grapes
+
+  $ hg backout --commit -d '4 0' 1 --tool=:fail
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  changeset 3:1c2161e97c0a backs out changeset 1:22cb4f70d813
+  $ hg summary
+  parent: 3:1c2161e97c0a tip
+   Backed out changeset 22cb4f70d813
+  branch: default
+  commit: (clean)
+  update: (current)
+
+  $ echo ypples > a
+  $ hg commit -d '5 0' -m ypples
+
+  $ hg backout --commit -d '6 0' 2 --tool=:fail
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+  $ hg summary
+  parent: 4:ed99997b793d tip
+   ypples
+  branch: default
+  commit: 1 unresolved (clean)
+  update: (current)
+
 file that was removed is recreated
 (this also tests that editor is not invoked if the commit message is
 specified explicitly)
--- a/tests/test-bad-extension.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bad-extension.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,11 +1,13 @@
   $ echo 'raise Exception("bit bucket overflow")' > badext.py
   $ abspath=`pwd`/badext.py
 
-  $ echo '[extensions]' >> $HGRCPATH
-  $ echo "gpg =" >> $HGRCPATH
-  $ echo "hgext.gpg =" >> $HGRCPATH
-  $ echo "badext = $abspath" >> $HGRCPATH
-  $ echo "badext2 =" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > gpg =
+  > hgext.gpg =
+  > badext = $abspath
+  > badext2 =
+  > EOF
 
   $ hg -q help help
   *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
--- a/tests/test-basic.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-basic.t	Sat Jan 17 18:28:30 2015 -0800
@@ -5,6 +5,7 @@
   defaults.commit=-d "0 0"
   defaults.shelve=--date "0 0"
   defaults.tag=-d "0 0"
+  largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
   ui.slash=True
   ui.interactive=False
   ui.mergemarkers=detailed
--- a/tests/test-bookmarks-pushpull.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bookmarks-pushpull.t	Sat Jan 17 18:28:30 2015 -0800
@@ -55,7 +55,7 @@
   Y	4e3505fd95835d721066b76e75dbb8cc554d7f77
   Z	4e3505fd95835d721066b76e75dbb8cc554d7f77
 
-delete the bookmark to repull it
+delete the bookmark to re-pull it
 
   $ hg book -d X
   $ hg pull -B X ../a
@@ -177,7 +177,7 @@
      Y                         0:4e3505fd9583
      Z                         1:0d2164f0ce0d
 
-explicite pull should overwrite the local version (issue4439)
+explicit pull should overwrite the local version (issue4439)
 
   $ hg pull --config paths.foo=../a foo -B X
   pulling from $TESTTMP/a (glob)
--- a/tests/test-bookmarks-rebase.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bookmarks-rebase.t	Sat Jan 17 18:28:30 2015 -0800
@@ -37,7 +37,8 @@
 rebase
 
   $ hg rebase -s two -d one
-  saved backup bundle to $TESTTMP/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:2ae46b1d99a7 "3" (tip two)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-backup.hg (glob)
 
   $ hg log
   changeset:   3:42e5ed2cdcf4
--- a/tests/test-branches.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-branches.t	Sat Jan 17 18:28:30 2015 -0800
@@ -419,10 +419,12 @@
   
 default branch colors:
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color =" >> $HGRCPATH
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "mode = ansi" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > color =
+  > [color]
+  > mode = ansi
+  > EOF
 
   $ hg up -C c
   3 files updated, 0 files merged, 2 files removed, 0 files unresolved
@@ -444,14 +446,16 @@
   \x1b[0;0ma\x1b[0m\x1b[0;33m                              5:d8cbc61dbaa6\x1b[0m (inactive) (esc)
   \x1b[0;0mdefault\x1b[0m\x1b[0;33m                        0:19709c5a4e75\x1b[0m (inactive) (esc)
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color =" >> $HGRCPATH
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "branches.active = green" >> $HGRCPATH
-  $ echo "branches.closed = blue" >> $HGRCPATH
-  $ echo "branches.current = red" >> $HGRCPATH
-  $ echo "branches.inactive = magenta" >> $HGRCPATH
-  $ echo "log.changeset = cyan" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > color =
+  > [color]
+  > branches.active = green
+  > branches.closed = blue
+  > branches.current = red
+  > branches.inactive = magenta
+  > log.changeset = cyan
+  > EOF
 
 custom branch colors:
 
@@ -516,4 +520,107 @@
    }
   ]
 
+
+Tests of revision branch name caching
+
+We rev branch cache is updated automatically. In these tests we use a trick to
+trigger rebuilds. We remove the branch head cache and run 'hg head' to cause a
+rebuild that also will populate the rev branch cache.
+
+revision branch cache is created when building the branch head cache
+  $ rm -rf .hg/cache; hg head a -T '{rev}\n'
+  5
+  $ f --hexdump --size .hg/cache/rbc-*
+  .hg/cache/rbc-names-v1: size=87
+  0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a |
+  0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much|
+  0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the|
+  0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi|
+  0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b|
+  0050: 72 61 6e 63 68 65 73                            |ranches|
+  .hg/cache/rbc-revs-v1: size=120
+  0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
+  0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....|
+  0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................|
+  0030: 58 97 36 a2 00 00 00 03 10 ff 58 95 00 00 00 04 |X.6.......X.....|
+  0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....|
+  0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....|
+  0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....|
+  0070: f8 94 c2 56 80 00 00 03                         |...V....|
+recovery from invalid cache revs file with trailing data
+  $ echo >> .hg/cache/rbc-revs-v1
+  $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  truncating cache/rbc-revs-v1 to 120
+  5
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=120
+recovery from invalid cache file with partial last record
+  $ mv .hg/cache/rbc-revs-v1 .
+  $ f -qDB 119 rbc-revs-v1 > .hg/cache/rbc-revs-v1
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=119
+  $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  truncating cache/rbc-revs-v1 to 112
+  5
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=120
+recovery from invalid cache file with missing record - no truncation
+  $ mv .hg/cache/rbc-revs-v1 .
+  $ f -qDB 112 rbc-revs-v1 > .hg/cache/rbc-revs-v1
+  $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  5
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=120
+recovery from invalid cache file with some bad records
+  $ mv .hg/cache/rbc-revs-v1 .
+  $ f -qDB 8 rbc-revs-v1 > .hg/cache/rbc-revs-v1
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=8
+  $ f -qDB 112 rbc-revs-v1 >> .hg/cache/rbc-revs-v1
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=120
+  $ hg log -r 'branch(.)' -T '{rev} '
+  3 4 8 9 10 11 12 13  (no-eol)
+  $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug
+  truncating cache/rbc-revs-v1 to 8
+  5
+  $ f --size --hexdump --bytes=16 .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=120
+  0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
+cache is updated when committing
+  $ hg branch i-will-regret-this
+  marked working directory as branch i-will-regret-this
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci -m regrets
+  $ f --size .hg/cache/rbc-*
+  .hg/cache/rbc-names-v1: size=106
+  .hg/cache/rbc-revs-v1: size=128
+update after rollback - the cache will be correct but rbc-names will will still
+contain the branch name even though it no longer is used
+  $ hg up -qr '.^'
+  $ hg rollback -qf
+  $ f --size --hexdump .hg/cache/rbc-*
+  .hg/cache/rbc-names-v1: size=106
+  0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a |
+  0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much|
+  0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the|
+  0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi|
+  0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b|
+  0050: 72 61 6e 63 68 65 73 00 69 2d 77 69 6c 6c 2d 72 |ranches.i-will-r|
+  0060: 65 67 72 65 74 2d 74 68 69 73                   |egret-this|
+  .hg/cache/rbc-revs-v1: size=120
+  0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....|
+  0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....|
+  0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................|
+  0030: 58 97 36 a2 00 00 00 03 10 ff 58 95 00 00 00 04 |X.6.......X.....|
+  0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....|
+  0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....|
+  0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....|
+  0070: f8 94 c2 56 80 00 00 03                         |...V....|
+cache is updated/truncated when stripping - it is thus very hard to get in a
+situation where the cache is out of sync and the hash check detects it
+  $ hg --config extensions.strip= strip -r tip --nob
+  $ f --size .hg/cache/rbc-revs*
+  .hg/cache/rbc-revs-v1: size=112
+
   $ cd ..
--- a/tests/test-bundle.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bundle.t	Sat Jan 17 18:28:30 2015 -0800
@@ -268,51 +268,60 @@
 
 Log -R full.hg in partial
 
-  $ hg -R bundle://../full.hg log
+  $ hg -R bundle://../full.hg log -T phases
   changeset:   8:aa35859c02ea
   tag:         tip
+  phase:       draft
   parent:      3:eebf5a27f8ca
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0.3m
   
   changeset:   7:a6a34bfa0076
+  phase:       draft
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1.3m
   
   changeset:   6:7373c1169842
+  phase:       draft
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1.3
   
   changeset:   5:1bb50a9436a7
+  phase:       draft
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1.2
   
   changeset:   4:095197eb4973
+  phase:       draft
   parent:      0:f9ee2f85a263
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1.1
   
   changeset:   3:eebf5a27f8ca
+  phase:       public
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0.3
   
   changeset:   2:e38ba6f5b7e0
+  phase:       public
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0.2
   
   changeset:   1:34c2bf6b0626
+  phase:       public
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0.1
   
   changeset:   0:f9ee2f85a263
+  phase:       public
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     0.0
--- a/tests/test-bundle2-exchange.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bundle2-exchange.t	Sat Jan 17 18:28:30 2015 -0800
@@ -7,6 +7,11 @@
 
 enable obsolescence
 
+  $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
+  > echo pushkey: lock state after \"\$HG_NAMESPACE\"
+  > hg debuglock
+  > EOF
+
   $ cat >> $HGRCPATH << EOF
   > [experimental]
   > evolution=createmarkers,exchange
@@ -21,7 +26,10 @@
   > publish=False
   > [hooks]
   > changegroup = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" changegroup"
-  > b2x-transactionclose = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
+  > b2x-pretransactionclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
+  > b2x-transactionclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
+  > b2x-transactionclose.env = sh -c  "HG_LOCAL= python \"$TESTDIR/printenv.py\" b2x-transactionclose"
+  > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
   > EOF
 
 The extension requires a repo (currently unused)
@@ -59,8 +67,10 @@
   adding file changes
   added 2 changesets with 2 changes to 2 files
   1 new obsolescence markers
+  pre-close-tip:9520eea781bc draft 
+  postclose-tip:9520eea781bc draft 
+  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   changegroup hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg -R other log -G
@@ -82,8 +92,10 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files (+1 heads)
   1 new obsolescence markers
+  pre-close-tip:24b6387c8c8c draft 
+  postclose-tip:24b6387c8c8c draft 
+  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   changegroup hook: HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
-  b2x-transactionclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg -R other log -G
   o  2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com>  F
@@ -102,6 +114,8 @@
   $ hg -R other pull -r 24b6387c8c8c
   pulling from $TESTTMP/main (glob)
   no changes found
+  pre-close-tip:000000000000 public 
+  postclose-tip:24b6387c8c8c public 
   b2x-transactionclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   $ hg -R other log -G
   o  2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com>  F
@@ -119,6 +133,8 @@
   $ hg -R other pull -r 24b6387c8c8c
   pulling from $TESTTMP/main (glob)
   no changes found
+  pre-close-tip:24b6387c8c8c public 
+  postclose-tip:24b6387c8c8c public 
   b2x-transactionclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_URL=file:$TESTTMP/main
   $ hg -R other log -G
   o  2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com>  F
@@ -156,14 +172,25 @@
   $ hg -R main push other --rev eea13746799a --bookmark book_eea1
   pushing to other
   searching for changes
+  pre-close-tip:eea13746799a public book_eea1
+  pushkey: lock state after "phases"
+  lock:  free
+  wlock: free
+  pushkey: lock state after "bookmarks"
+  lock:  free
+  wlock: free
+  postclose-tip:eea13746799a public book_eea1
+  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
   changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_SOURCE=push HG_URL=push
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_URL=push
   remote: adding changesets
   remote: adding manifests
   remote: adding file changes
   remote: added 1 changesets with 0 changes to 0 files (-1 heads)
   remote: 1 new obsolescence markers
   updating bookmark book_eea1
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=file:$TESTTMP/other
   $ hg -R other log -G
   o    3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
   |\
@@ -189,8 +216,10 @@
   added 1 changesets with 1 changes to 1 files (+1 heads)
   1 new obsolescence markers
   updating bookmark book_02de
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
   changegroup hook: HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=ssh://user@dummy/main
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg -R other debugobsolete
   1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -212,8 +241,10 @@
   added 1 changesets with 1 changes to 1 files (+1 heads)
   1 new obsolescence markers
   updating bookmark book_42cc
+  pre-close-tip:42ccdea3bb16 draft book_42cc
+  postclose-tip:42ccdea3bb16 draft book_42cc
+  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
   changegroup hook: HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
-  b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_URL=http://localhost:$HGPORT/
   (run 'hg heads .' to see heads, 'hg merge' to merge)
   $ cat main-error.log
   $ hg -R other debugobsolete
@@ -234,8 +265,16 @@
   remote: added 1 changesets with 1 changes to 1 files
   remote: 1 new obsolescence markers
   updating bookmark book_5fdd
+  remote: pre-close-tip:5fddd98957c8 draft book_5fdd
+  remote: pushkey: lock state after "bookmarks"
+  remote: lock:  free
+  remote: wlock: free
+  remote: postclose-tip:5fddd98957c8 draft book_5fdd
+  remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
   remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
-  remote: b2x-transactionclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2-EXP=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=ssh://user@dummy/other
   $ hg -R other log -G
   o  6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
   |
@@ -274,6 +313,9 @@
   remote: added 1 changesets with 1 changes to 1 files
   remote: 1 new obsolescence markers
   updating bookmark book_32af
+  pre-close-tip:02de42196ebe draft book_02de
+  postclose-tip:02de42196ebe draft book_02de
+  b2x-transactionclose hook: HG_SOURCE=push-response HG_URL=http://localhost:$HGPORT2/
   $ cat other-error.log
 
 Check final content.
@@ -304,6 +346,15 @@
   6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
   7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
 
+(check that no 'pending' files remain)
+
+  $ ls -1 other/.hg/bookmarks*
+  other/.hg/bookmarks
+  $ ls -1 other/.hg/store/phaseroots*
+  other/.hg/store/phaseroots
+  $ ls -1 other/.hg/store/00changelog.i*
+  other/.hg/store/00changelog.i
+
 Error Handling
 ==============
 
@@ -328,7 +379,7 @@
   >     if reason == 'abort':
   >         bundler.newpart('test:abort')
   >     if reason == 'unknown':
-  >         bundler.newpart('TEST:UNKNOWN')
+  >         bundler.newpart('test:unknown')
   >     if reason == 'race':
   >         # 20 Bytes of crap
   >         bundler.newpart('b2x:check:heads', data='01234567890123456789')
@@ -460,9 +511,9 @@
   $ hg -R main push other -r e7ec4e813ba6
   pushing to other
   searching for changes
+  pre-close-tip:e7ec4e813ba6 draft 
   transaction abort!
   rollback completed
-  changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=push HG_URL=push
   abort: b2x-pretransactionclose.failpush hook exited with status 1
   [255]
 
@@ -470,9 +521,9 @@
   pushing to ssh://user@dummy/other
   searching for changes
   abort: b2x-pretransactionclose.failpush hook exited with status 1
+  remote: pre-close-tip:e7ec4e813ba6 draft 
   remote: transaction abort!
   remote: rollback completed
-  remote: changegroup hook: HG_BUNDLE2-EXP=1 HG_NODE=e7ec4e813ba6b07be2a0516ce1a74bb4e503f91a HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
   [255]
 
   $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
@@ -481,4 +532,12 @@
   abort: b2x-pretransactionclose.failpush hook exited with status 1
   [255]
 
+(check that no 'pending' files remain)
 
+  $ ls -1 other/.hg/bookmarks*
+  other/.hg/bookmarks
+  $ ls -1 other/.hg/store/phaseroots*
+  other/.hg/store/phaseroots
+  $ ls -1 other/.hg/store/00changelog.i*
+  other/.hg/store/00changelog.i
+
--- a/tests/test-bundle2-format.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bundle2-format.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,4 +1,4 @@
-This test is decicated to test the bundle2 container format
+This test is dedicated to test the bundle2 container format
 
 It test multiple existing parts to test different feature of the container. You
 probably do not need to touch this test unless you change the binary encoding
@@ -55,7 +55,8 @@
   >     op.ui.write('received ping request (id %i)\n' % part.id)
   >     if op.reply is not None and 'ping-pong' in op.reply.capabilities:
   >         op.ui.write_err('replying to ping request (id %i)\n' % part.id)
-  >         op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
+  >         op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
+  >                          mandatory=False)
   > 
   > @bundle2.parthandler('test:debugreply')
   > def debugreply(op, part):
@@ -80,7 +81,7 @@
   >           ('r', 'rev', [], 'includes those changeset in the bundle'),],
   >          '[OUTPUTFILE]')
   > def cmdbundle2(ui, repo, path=None, **opts):
-  >     """write a bundle2 container on standard ouput"""
+  >     """write a bundle2 container on standard output"""
   >     bundler = bundle2.bundle20(ui)
   >     for p in opts['param']:
   >         p = p.split('=', 1)
@@ -108,32 +109,34 @@
   >             headcommon  = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
   >             outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
   >             cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
-  >             bundler.newpart('b2x:changegroup', data=cg.getchunks())
+  >             bundler.newpart('b2x:changegroup', data=cg.getchunks(),
+  >                             mandatory=False)
   > 
   >     if opts['parts']:
-  >        bundler.newpart('test:empty')
+  >        bundler.newpart('test:empty', mandatory=False)
   >        # add a second one to make sure we handle multiple parts
-  >        bundler.newpart('test:empty')
-  >        bundler.newpart('test:song', data=ELEPHANTSSONG)
-  >        bundler.newpart('test:debugreply')
+  >        bundler.newpart('test:empty', mandatory=False)
+  >        bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
+  >        bundler.newpart('test:debugreply', mandatory=False)
   >        mathpart = bundler.newpart('test:math')
   >        mathpart.addparam('pi', '3.14')
   >        mathpart.addparam('e', '2.72')
   >        mathpart.addparam('cooking', 'raw', mandatory=False)
   >        mathpart.data = '42'
+  >        mathpart.mandatory = False
   >        # advisory known part with unknown mandatory param
-  >        bundler.newpart('test:song', [('randomparam','')])
+  >        bundler.newpart('test:song', [('randomparam','')], mandatory=False)
   >     if opts['unknown']:
-  >        bundler.newpart('test:UNKNOWN', data='some random content')
+  >        bundler.newpart('test:unknown', data='some random content')
   >     if opts['unknownparams']:
-  >        bundler.newpart('test:SONG', [('randomparams', '')])
+  >        bundler.newpart('test:song', [('randomparams', '')])
   >     if opts['parts']:
-  >        bundler.newpart('test:ping')
+  >        bundler.newpart('test:ping', mandatory=False)
   >     if opts['genraise']:
   >        def genraise():
   >            yield 'first line\n'
   >            raise RuntimeError('Someone set up us the bomb!')
-  >        bundler.newpart('b2x:output', data=genraise())
+  >        bundler.newpart('b2x:output', data=genraise(), mandatory=False)
   > 
   >     if path is None:
   >        file = sys.stdout
@@ -752,13 +755,18 @@
   \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
   \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
 
-  $ hg unbundle2 < ../rev.hg2
+  $ hg debugbundle ../rev.hg2
+  Stream params: {}
+  b2x:changegroup -- '{}'
+      32af7686d403cf45b5d95f2d70cebea587ac806a
+      9520eea781bcca16c1e15acc0ba14335a0e8e5ba
+      eea13746799a9e0bfd88f29d3c2e9dc9389f524f
+      02de42196ebee42ef284b6780a87cdc96e8eaab6
+  $ hg unbundle ../rev.hg2
   adding changesets
   adding manifests
   adding file changes
   added 0 changesets with 0 changes to 3 files
-  0 unread bytes
-  addchangegroup return: 1
 
 with reply
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bundle2-pushback.t	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,111 @@
+  $ cat > bundle2.py << EOF
+  > """A small extension to test bundle2 pushback parts.
+  > Current bundle2 implementation doesn't provide a way to generate those
+  > parts, so they must be created by extensions.
+  > """
+  > from mercurial import bundle2, pushkey, exchange, util
+  > def _newhandlechangegroup(op, inpart):
+  >     """This function wraps the changegroup part handler for getbundle.
+  >     It issues an additional b2x:pushkey part to send a new
+  >     bookmark back to the client"""
+  >     result = bundle2.handlechangegroup(op, inpart)
+  >     if 'b2x:pushback' in op.reply.capabilities:
+  >         params = {'namespace': 'bookmarks',
+  >                   'key': 'new-server-mark',
+  >                   'old': '',
+  >                   'new': 'tip'}
+  >         encodedparams = [(k, pushkey.encode(v)) for (k,v) in params.items()]
+  >         op.reply.newpart('b2x:pushkey', mandatoryparams=encodedparams)
+  >     else:
+  >         op.reply.newpart('b2x:output', data='pushback not enabled')
+  >     return result
+  > _newhandlechangegroup.params = bundle2.handlechangegroup.params
+  > bundle2.parthandlermapping['b2x:changegroup'] = _newhandlechangegroup
+  > EOF
+
+  $ cat >> $HGRCPATH <<EOF
+  > [ui]
+  > ssh = python "$TESTDIR/dummyssh"
+  > username = nobody <no.reply@example.com>
+  > 
+  > [alias]
+  > tglog = log -G -T "{desc} [{phase}:{node|short}]"
+  > EOF
+
+Set up server repository
+
+  $ hg init server
+  $ cd server
+  $ echo c0 > f0
+  $ hg commit -Am 0
+  adding f0
+
+Set up client repository
+
+  $ cd ..
+  $ hg clone ssh://user@dummy/server client -q
+  $ cd client
+
+Enable extension
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > bundle2=$TESTTMP/bundle2.py
+  > [experimental]
+  > bundle2-exp = True
+  > EOF
+
+Without config
+
+  $ cd ../client
+  $ echo c1 > f1
+  $ hg commit -Am 1
+  adding f1
+  $ hg push
+  pushing to ssh://user@dummy/server
+  searching for changes
+  remote: pushback not enabled
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  $ hg bookmark
+  no bookmarks set
+
+  $ cd ../server
+  $ hg tglog
+  o  1 [public:2b9c7234e035]
+  |
+  @  0 [public:6cee5c8f3e5b]
+  
+
+
+
+With config
+
+  $ cd ../client
+  $ echo '[experimental]' >> .hg/hgrc
+  $ echo 'bundle2.pushback = True' >> .hg/hgrc
+  $ echo c2 > f2
+  $ hg commit -Am 2
+  adding f2
+  $ hg push
+  pushing to ssh://user@dummy/server
+  searching for changes
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  $ hg bookmark
+     new-server-mark           2:0a76dfb2e179
+
+  $ cd ../server
+  $ hg tglog
+  o  2 [public:0a76dfb2e179]
+  |
+  o  1 [public:2b9c7234e035]
+  |
+  @  0 [public:6cee5c8f3e5b]
+  
+
+
+
--- a/tests/test-bundle2-remote-changegroup.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-bundle2-remote-changegroup.t	Sat Jan 17 18:28:30 2015 -0800
@@ -50,13 +50,13 @@
   >            bundledata = open(file, 'rb').read()
   >            digest = util.digester.preferred(b2caps['digests'])
   >            d = util.digester([digest], bundledata)
-  >            part = newpart('B2X:REMOTE-CHANGEGROUP')
+  >            part = newpart('b2x:remote-changegroup')
   >            part.addparam('url', url)
   >            part.addparam('size', str(len(bundledata)))
   >            part.addparam('digests', digest)
   >            part.addparam('digest:%s' % digest, d[digest])
   >         elif verb == 'raw-remote-changegroup':
-  >            part = newpart('B2X:REMOTE-CHANGEGROUP')
+  >            part = newpart('b2x:remote-changegroup')
   >            for k, v in eval(args).items():
   >                part.addparam(k, str(v))
   >         elif verb == 'changegroup':
@@ -65,7 +65,7 @@
   >             heads = [repo.lookup(r) for r in repo.revs(heads)]
   >             cg = changegroup.getchangegroup(repo, 'changegroup',
   >                 heads=heads, common=common)
-  >             newpart('B2X:CHANGEGROUP', cg.getchunks())
+  >             newpart('b2x:changegroup', cg.getchunks())
   >         else:
   >             raise Exception('unknown verb')
   > 
@@ -137,7 +137,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -180,12 +180,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: B2X:CHANGEGROUP
+  remote: b2x:changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -228,12 +228,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:CHANGEGROUP
+  remote: b2x:changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -279,17 +279,17 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 1 changes to 1 files
-  remote: B2X:CHANGEGROUP
+  remote: b2x:changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -324,7 +324,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -338,7 +338,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -354,7 +354,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -372,7 +372,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -388,7 +388,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -404,7 +404,7 @@
   > EOF
   $ hg clone ssh://user@dummy/repo clone
   requesting all changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -433,12 +433,12 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files (+1 heads)
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -467,7 +467,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: remote-changegroup: missing "url" param
   [255]
 
@@ -479,7 +479,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: remote-changegroup: missing "size" param
   [255]
 
@@ -491,7 +491,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: remote-changegroup: invalid value for param "size"
   [255]
 
@@ -503,7 +503,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   adding changesets
   adding manifests
   adding file changes
@@ -522,7 +522,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: missing support for b2x:remote-changegroup - digest:foo
   [255]
 
@@ -534,7 +534,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: remote-changegroup: missing "digest:sha1" param
   [255]
 
@@ -546,7 +546,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: remote-changegroup does not support ssh urls
   [255]
 
@@ -561,7 +561,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
   [255]
 
@@ -576,7 +576,7 @@
   $ hg pull -R clone ssh://user@dummy/repo
   pulling from ssh://user@dummy/repo
   searching for changes
-  remote: B2X:REMOTE-CHANGEGROUP
+  remote: b2x:remote-changegroup
   abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
   [255]
 
--- a/tests/test-casefolding.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-casefolding.t	Sat Jan 17 18:28:30 2015 -0800
@@ -24,7 +24,11 @@
   $ echo a > a
   $ hg --debug ci -Am adda
   adding a
+  committing files:
   a
+  committing manifest
+  committing changelog
+  couldn't read revision branch cache names: * (glob)
   committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
 
 Case-changing renames should work:
--- a/tests/test-check-code.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-check-code.t	Sat Jan 17 18:28:30 2015 -0800
@@ -261,7 +261,7 @@
   > print _("concatenating " " by " " space %s" % v)
   > print _("concatenating " + " by " + " '+' %s" % v)
   > 
-  > print _("maping operation in different line %s"
+  > print _("mapping operation in different line %s"
   >         % v)
   > 
   > print _(
@@ -278,7 +278,7 @@
    > print _("concatenating " + " by " + " '+' %s" % v)
    don't use % inside _()
   ./map-inside-gettext.py:6:
-   > print _("maping operation in different line %s"
+   > print _("mapping operation in different line %s"
    don't use % inside _()
   ./map-inside-gettext.py:9:
    > print _(
--- a/tests/test-check-commit-hg.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-check-commit-hg.t	Sat Jan 17 18:28:30 2015 -0800
@@ -14,7 +14,7 @@
   $ for node in `hg log --rev 'draft() and ::.' --template '{node|short}\n'`; do
   >    hg export $node | contrib/check-commit > ${TESTTMP}/check-commit.out
   >    if [ $? -ne 0 ]; then
-  >        echo "Revision $node does not comply to commit message rules"
+  >        echo "Revision $node does not comply to rules"
   >        echo '------------------------------------------------------'
   >        cat ${TESTTMP}/check-commit.out
   >        echo
--- a/tests/test-clone.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-clone.t	Sat Jan 17 18:28:30 2015 -0800
@@ -31,6 +31,8 @@
   default                       10:a7949464abda
   $ ls .hg/cache
   branch2-served
+  rbc-names-v1
+  rbc-revs-v1
 
 Default operation:
 
--- a/tests/test-command-template.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-command-template.t	Sat Jan 17 18:28:30 2015 -0800
@@ -525,6 +525,25 @@
    }
   ]
 
+honor --git but not format-breaking diffopts
+  $ hg --config diff.noprefix=True log --git -vpr . -Tjson
+  [
+   {
+    "rev": 8,
+    "node": "95c24699272ef57d062b8bccc32c878bf841784a",
+    "branch": "default",
+    "phase": "draft",
+    "user": "test",
+    "date": [1577872860, 0],
+    "desc": "third",
+    "bookmarks": [],
+    "tags": ["tip"],
+    "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"],
+    "files": ["fourth", "second", "third"],
+    "diff": "diff --git a/second b/fourth\nrename from second\nrename to fourth\ndiff --git a/third b/third\nnew file mode 100644\n--- /dev/null\n+++ b/third\n@@ -0,0 +1,1 @@\n+third\n"
+   }
+  ]
+
   $ hg log -T json
   [
    {
--- a/tests/test-commandserver.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-commandserver.t	Sat Jan 17 18:28:30 2015 -0800
@@ -178,6 +178,7 @@
   defaults.commit=-d "0 0"
   defaults.shelve=--date "0 0"
   defaults.tag=-d "0 0"
+  largefiles.usercache=$TESTTMP/.cache/largefiles
   ui.slash=True
   ui.interactive=False
   ui.mergemarkers=detailed
@@ -492,6 +493,7 @@
   foo
 
   $ cat <<EOF > dbgui.py
+  > import os, sys
   > from mercurial import cmdutil, commands
   > cmdtable = {}
   > command = cmdutil.command(cmdtable)
@@ -501,6 +503,14 @@
   > @command("debugprompt", norepo=True)
   > def debugprompt(ui):
   >     ui.write("%s\\n" % ui.prompt("prompt:"))
+  > @command("debugreadstdin", norepo=True)
+  > def debugreadstdin(ui):
+  >     ui.write("read: %r\n" % sys.stdin.read(1))
+  > @command("debugwritestdout", norepo=True)
+  > def debugwritestdout(ui):
+  >     os.write(1, "low-level stdout fd and\n")
+  >     sys.stdout.write("stdout should be redirected to /dev/null\n")
+  >     sys.stdout.flush()
   > EOF
   $ cat <<EOF >> .hg/hgrc
   > [extensions]
@@ -518,10 +528,36 @@
   ...     runcommand(server, ['debugprompt', '--config',
   ...                         'ui.interactive=True'],
   ...                input=cStringIO.StringIO('5678\n'))
+  ...     runcommand(server, ['debugreadstdin'])
+  ...     runcommand(server, ['debugwritestdout'])
   *** runcommand debuggetpass --config ui.interactive=True
   password: 1234
   *** runcommand debugprompt --config ui.interactive=True
   prompt: 5678
+  *** runcommand debugreadstdin
+  read: ''
+  *** runcommand debugwritestdout
+
+
+run commandserver in commandserver, which is silly but should work:
+
+  >>> import cStringIO
+  >>> from hgclient import readchannel, runcommand, check
+  >>> @check
+  ... def nested(server):
+  ...     print '%c, %r' % readchannel(server)
+  ...     class nestedserver(object):
+  ...         stdin = cStringIO.StringIO('getencoding\n')
+  ...         stdout = cStringIO.StringIO()
+  ...     runcommand(server, ['serve', '--cmdserver', 'pipe'],
+  ...                output=nestedserver.stdout, input=nestedserver.stdin)
+  ...     nestedserver.stdout.seek(0)
+  ...     print '%c, %r' % readchannel(nestedserver)  # hello
+  ...     print '%c, %r' % readchannel(nestedserver)  # getencoding
+  o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
+  *** runcommand serve --cmdserver pipe
+  o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
+  r, '*' (glob)
 
 
 start without repository:
--- a/tests/test-commit-amend.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-commit-amend.t	Sat Jan 17 18:28:30 2015 -0800
@@ -35,7 +35,7 @@
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -m 'amend base1'
   pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149
   43f1ba15f28a tip
-  saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-f1bf3ab8-amend-backup.hg (glob)
   $ echo 'pretxncommit.foo = ' >> $HGRCPATH
   $ hg diff -c .
   diff -r ad120869acf0 -r 43f1ba15f28a a
@@ -86,7 +86,7 @@
 
 Add new file:
   $ hg ci --amend -m 'amend base1 new file'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-7a3b3496-amend-backup.hg (glob)
 
 Remove file that was added in amended commit:
 (and test logfile option)
@@ -95,7 +95,7 @@
   $ hg rm b
   $ echo 'amend base1 remove new file' > ../logfile
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg ci --amend --logfile ../logfile
-  saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-0b55739a-amend-backup.hg (glob)
 
   $ hg cat b
   b: no such file in rev 74609c7f506e
@@ -106,11 +106,22 @@
   $ hg ci -v --amend -m 'no changes, new message'
   amending changeset 74609c7f506e
   copying changeset 74609c7f506e to ad120869acf0
+  committing files:
   a
+  committing manifest
+  committing changelog
   stripping amended changeset 74609c7f506e
   1 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-amend-backup.hg (glob)
+  uncompressed size of bundle content:
+       250 (changelog)
+       143 (manifests)
+       109  a
+  saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-1bfde511-amend-backup.hg (glob)
   1 changesets found
+  uncompressed size of bundle content:
+       246 (changelog)
+       143 (manifests)
+       109  a
   adding branch
   adding changesets
   adding manifests
@@ -151,10 +162,10 @@
   > EOF
   $ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -u foo -d '1 0'
   HGEDITFORM=commit.amend.normal
-  saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-5f5bcb85-amend-backup.hg (glob)
   $ echo a >> a
   $ hg ci --amend -u foo -d '1 0'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-83b10a27-amend-backup.hg (glob)
   $ hg log -r .
   changeset:   1:5f357c7560ab
   tag:         tip
@@ -182,7 +193,10 @@
   $ hg commit --amend -v -m "message given from command line"
   amending changeset 5f357c7560ab
   copying changeset 5f357c7560ab to ad120869acf0
+  committing files:
   a
+  committing manifest
+  committing changelog
   running hook pretxncommit.test-saving-last-message: false
   transaction abort!
   rollback completed
@@ -204,7 +218,10 @@
   HG: user: foo
   HG: branch 'default'
   HG: changed a
+  committing files:
   a
+  committing manifest
+  committing changelog
   running hook pretxncommit.test-saving-last-message: false
   transaction abort!
   rollback completed
@@ -233,11 +250,22 @@
   HG: user: foo
   HG: branch 'default'
   HG: changed a
+  committing files:
   a
+  committing manifest
+  committing changelog
   stripping amended changeset 5f357c7560ab
   1 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-amend-backup.hg (glob)
+  uncompressed size of bundle content:
+       238 (changelog)
+       143 (manifests)
+       111  a
+  saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-e7c84ade-amend-backup.hg (glob)
   1 changesets found
+  uncompressed size of bundle content:
+       246 (changelog)
+       143 (manifests)
+       111  a
   adding branch
   adding changesets
   adding manifests
@@ -250,7 +278,10 @@
   $ echo a >> a
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
   amending changeset 7ab3bf440b54
+  committing files:
   a
+  committing manifest
+  committing changelog
   copying changeset a0ea9b1a4c8c to ad120869acf0
   another precious commit message
   
@@ -261,12 +292,23 @@
   HG: user: foo
   HG: branch 'default'
   HG: changed a
+  committing files:
   a
+  committing manifest
+  committing changelog
   stripping intermediate changeset a0ea9b1a4c8c
   stripping amended changeset 7ab3bf440b54
   2 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-amend-backup.hg (glob)
+  uncompressed size of bundle content:
+       450 (changelog)
+       282 (manifests)
+       209  a
+  saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-8e3b5088-amend-backup.hg (glob)
   1 changesets found
+  uncompressed size of bundle content:
+       246 (changelog)
+       143 (manifests)
+       113  a
   adding branch
   adding changesets
   adding manifests
@@ -288,13 +330,13 @@
   $ hg book book1
   $ hg book book2
   $ hg ci --amend -m 'move bookmarks'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-e51094db-amend-backup.hg (glob)
   $ hg book
      book1                     1:6cec5aa930e2
    * book2                     1:6cec5aa930e2
   $ echo a >> a
   $ hg ci --amend -m 'move bookmarks'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-e9b06de4-amend-backup.hg (glob)
   $ hg book
      book1                     1:48bb6e53a15f
    * book2                     1:48bb6e53a15f
@@ -331,7 +373,7 @@
   marked working directory as branch default
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci --amend -m 'back to default'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-fd962fef-amend-backup.hg (glob)
   $ hg branches
   default                        2:ce12b0b57d46
 
@@ -347,7 +389,7 @@
   $ echo b >> b
   $ hg ci -mb
   $ hg ci --amend --close-branch -m 'closing branch foo'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-6701c392-amend-backup.hg (glob)
 
 Same thing, different code path:
 
@@ -356,7 +398,7 @@
   reopening closed branch head 4
   $ echo b >> b
   $ hg ci --amend --close-branch
-  saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-49c0c55d-amend-backup.hg (glob)
   $ hg branches
   default                        2:ce12b0b57d46
 
@@ -377,7 +419,7 @@
   $ hg ci -m 'b -> c'
   $ hg mv c d
   $ hg ci --amend -m 'b -> d'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-adaaa8b1-amend-backup.hg (glob)
   $ hg st --rev '.^' --copies d
   A d
     b
@@ -385,7 +427,7 @@
   $ hg ci -m 'e = d'
   $ hg cp e f
   $ hg ci --amend -m 'f = d'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-d37aa788-amend-backup.hg (glob)
   $ hg st --rev '.^' --copies f
   A f
     d
@@ -396,7 +438,7 @@
   $ hg cp a f
   $ mv f.orig f
   $ hg ci --amend -m replacef
-  saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-90259f67-amend-backup.hg (glob)
   $ hg st --change . --copies
   $ hg log -r . --template "{file_copies}\n"
   
@@ -408,7 +450,7 @@
   adding g
   $ hg mv g h
   $ hg ci --amend
-  saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-7059e0f1-amend-backup.hg (glob)
   $ hg st --change . --copies h
   A h
   $ hg log -r . --template "{file_copies}\n"
@@ -428,11 +470,11 @@
   $ echo a >> a
   $ hg ci -ma
   $ hg ci --amend -m "a'"
-  saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-2be01fd1-amend-backup.hg (glob)
   $ hg log -r . --template "{branch}\n"
   a
   $ hg ci --amend -m "a''"
-  saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-ed28c4cd-amend-backup.hg (glob)
   $ hg log -r . --template "{branch}\n"
   a
 
@@ -447,9 +489,9 @@
   $ hg up 11
   5 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg graft 12
-  grafting revision 12
+  grafting 12:2647734878ef "fork" (tip)
   $ hg ci --amend -m 'graft amend'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-eedb103b-amend-backup.hg (glob)
   $ hg log -r . --debug | grep extra
   extra:       amend_source=bd010aea3f39f3fb2a2f884b9ccb0471cd77398e
   extra:       branch=a
@@ -805,7 +847,7 @@
   $ hg branch closewithamend
   marked working directory as branch closewithamend
   (branches are permanent and global, did you want a bookmark?)
-  $ touch foo
+  $ echo foo > foo
   $ hg add foo
   $ hg ci -m..
   $ hg ci --amend --close-branch -m 'closing'
@@ -842,6 +884,8 @@
   $ hg parents --template "{desc}\n"
   editor should be suppressed
 
+  $ hg status --rev '.^1::.'
+  A foo
   $ HGEDITOR=cat hg commit --amend -m "editor should be invoked" --edit
   editor should be invoked
   
@@ -851,10 +895,111 @@
   HG: --
   HG: user: test
   HG: branch 'silliness'
-  HG: changed foo
+  HG: added foo
   $ hg parents --template "{desc}\n"
   editor should be invoked
 
+Test that "diff()" in committemplate works correctly for amending
+-----------------------------------------------------------------
+
+  $ cat >> .hg/hgrc <<EOF
+  > [committemplate]
+  > changeset.commit.amend = {desc}\n
+  >     HG: M: {file_mods}
+  >     HG: A: {file_adds}
+  >     HG: R: {file_dels}
+  >     {splitlines(diff()) % 'HG: {line}\n'}
+  > EOF
+
+  $ hg parents --template "M: {file_mods}\nA: {file_adds}\nR: {file_dels}\n"
+  M: 
+  A: foo
+  R: 
+  $ hg status -amr
+  $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of foo"
+  expecting diff of foo
+  
+  HG: M: 
+  HG: A: foo
+  HG: R: 
+  HG: diff -r 6de0c1bde1c8 foo
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/foo	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +foo
+
+  $ echo y > y
+  $ hg add y
+  $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of foo and y"
+  expecting diff of foo and y
+  
+  HG: M: 
+  HG: A: foo y
+  HG: R: 
+  HG: diff -r 6de0c1bde1c8 foo
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/foo	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +foo
+  HG: diff -r 6de0c1bde1c8 y
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/y	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +y
+
+  $ hg rm a
+  $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of a, foo and y"
+  expecting diff of a, foo and y
+  
+  HG: M: 
+  HG: A: foo y
+  HG: R: a
+  HG: diff -r 6de0c1bde1c8 a
+  HG: --- a/a	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -1,2 +0,0 @@
+  HG: -a
+  HG: -a
+  HG: diff -r 6de0c1bde1c8 foo
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/foo	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +foo
+  HG: diff -r 6de0c1bde1c8 y
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/y	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +y
+
+  $ hg rm x
+  $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of a, foo, x and y"
+  expecting diff of a, foo, x and y
+  
+  HG: M: 
+  HG: A: foo y
+  HG: R: a x
+  HG: diff -r 6de0c1bde1c8 a
+  HG: --- a/a	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -1,2 +0,0 @@
+  HG: -a
+  HG: -a
+  HG: diff -r 6de0c1bde1c8 foo
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/foo	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +foo
+  HG: diff -r 6de0c1bde1c8 x
+  HG: --- a/x	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -1,1 +0,0 @@
+  HG: -x
+  HG: diff -r 6de0c1bde1c8 y
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/y	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +y
+
 Check for issue4405
 -------------------
 
@@ -889,9 +1034,9 @@
 
 The way mercurial does amends is to create a temporary commit (rev 3) and then
 fold the new and old commits together into another commit (rev 4). During this
-process, findlimit is called to check how far back to look for the transitive
+process, _findlimit is called to check how far back to look for the transitive
 closure of file copy information, but due to the divergence of the filelog
-and changelog graph topologies, before findlimit was fixed, it returned a rev
+and changelog graph topologies, before _findlimit was fixed, it returned a rev
 which was not far enough back in this case.
   $ hg mv a1 a2
   $ hg status --copies --rev 0
--- a/tests/test-commit-unresolved.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-commit-unresolved.t	Sat Jan 17 18:28:30 2015 -0800
@@ -35,7 +35,7 @@
 
   $ echo "ABCD" > A
   $ hg commit -m "Merged"
-  abort: unresolved merge conflicts (see hg help resolve)
+  abort: unresolved merge conflicts (see "hg help resolve")
   [255]
 
 Mark the conflict as resolved and commit
--- a/tests/test-commit.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-commit.t	Sat Jan 17 18:28:30 2015 -0800
@@ -50,7 +50,10 @@
   $ hg add
   adding dir/file (glob)
   $ hg -v commit -m commit-9 dir
+  committing files:
   dir/file
+  committing manifest
+  committing changelog
   committed changeset 2:d2a76177cb42
 
   $ echo > dir.file
@@ -71,7 +74,10 @@
   abort: dir2: no match under directory!
   [255]
   $ hg -v commit -m commit-13 ../dir
+  committing files:
   dir/file
+  committing manifest
+  committing changelog
   committed changeset 3:1cd62a2d8db5
   $ cd ..
 
@@ -92,7 +98,10 @@
   [255]
   $ echo >> dir/file
   $ hg -v commit -m commit-17 dir/file
+  committing files:
   dir/file
+  committing manifest
+  committing changelog
   committed changeset 4:49176991390e
 
 An empty date was interpreted as epoch origin
@@ -430,6 +439,84 @@
   [255]
 
   $ cat >> .hg/hgrc <<EOF
+  > [committemplate]
+  > changeset = {desc}
+  >     HG: files={files}
+  >     HG:
+  >     {splitlines(diff()) % 'HG: {line}\n'
+  >    }HG:
+  >     HG: files={files}\n
+  > EOF
+  $ hg status -amr
+  M changed
+  A added
+  R removed
+  $ HGEDITOR=cat hg commit -q -e -m "foo bar" changed
+  foo bar
+  HG: files=changed
+  HG:
+  HG: --- a/changed	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/changed	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -1,1 +1,2 @@
+  HG:  changed
+  HG: +changed
+  HG:
+  HG: files=changed
+  $ hg status -amr
+  A added
+  R removed
+  $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
+  M changed
+  A 
+  R 
+  $ hg rollback -q
+
+  $ cat >> .hg/hgrc <<EOF
+  > [committemplate]
+  > changeset = {desc}
+  >     HG: files={files}
+  >     HG:
+  >     {splitlines(diff("changed")) % 'HG: {line}\n'
+  >    }HG:
+  >     HG: files={files}
+  >     HG:
+  >     {splitlines(diff("added")) % 'HG: {line}\n'
+  >    }HG:
+  >     HG: files={files}
+  >     HG:
+  >     {splitlines(diff("removed")) % 'HG: {line}\n'
+  >    }HG:
+  >     HG: files={files}\n
+  > EOF
+  $ HGEDITOR=cat hg commit -q -e -m "foo bar" added removed
+  foo bar
+  HG: files=added removed
+  HG:
+  HG:
+  HG: files=added removed
+  HG:
+  HG: --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ b/added	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -0,0 +1,1 @@
+  HG: +added
+  HG:
+  HG: files=added removed
+  HG:
+  HG: --- a/removed	Thu Jan 01 00:00:00 1970 +0000
+  HG: +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  HG: @@ -1,1 +0,0 @@
+  HG: -removed
+  HG:
+  HG: files=added removed
+  $ hg status -amr
+  M changed
+  $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
+  M 
+  A added
+  R removed
+  $ hg rollback -q
+
+  $ cat >> .hg/hgrc <<EOF
   > # disable customizing for subsequent tests
   > [committemplate]
   > changeset =
@@ -470,9 +557,15 @@
   > r.commitctx(c)
   > EOF
   $ $PYTHON evil-commit.py
+#if windows
+  $ hg co --clean tip
+  abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc)
+  [255]
+#else
   $ hg co --clean tip
   abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
   [255]
+#endif
 
   $ hg rollback -f
   repository tip rolled back to revision 1 (undo commit)
@@ -489,7 +582,7 @@
   > EOF
   $ $PYTHON evil-commit.py
   $ hg co --clean tip
-  abort: path contains illegal component: HG~1/hgrc
+  abort: path contains illegal component: HG~1/hgrc (glob)
   [255]
 
   $ hg rollback -f
@@ -507,5 +600,5 @@
   > EOF
   $ $PYTHON evil-commit.py
   $ hg co --clean tip
-  abort: path contains illegal component: HG8B6C~2/hgrc
+  abort: path contains illegal component: HG8B6C~2/hgrc (glob)
   [255]
--- a/tests/test-completion.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-completion.t	Sat Jan 17 18:28:30 2015 -0800
@@ -90,6 +90,7 @@
   debugknown
   debuglabelcomplete
   debuglocks
+  debugnamecomplete
   debugobsolete
   debugpathcomplete
   debugpushkey
@@ -202,7 +203,7 @@
   annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
   clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
   commit: addremove, close-branch, amend, secret, edit, include, exclude, message, logfile, date, user, subrepos
-  diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
+  diff: rev, change, text, git, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
   export: output, switch-parent, rev, text, git, nodates
   forget: include, exclude
   init: ssh, remotecmd, insecure
@@ -210,14 +211,14 @@
   merge: force, rev, preview, tool
   pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
   push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
-  remove: after, force, include, exclude
+  remove: after, force, subrepos, include, exclude
   serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate
   status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
   summary: remote
   update: clean, check, date, rev, tool
-  addremove: similarity, include, exclude, dry-run
+  addremove: similarity, subrepos, include, exclude, dry-run
   archive: no-decode, prefix, rev, type, subrepos, include, exclude
-  backout: merge, parent, rev, edit, tool, include, exclude, message, logfile, date, user
+  backout: merge, commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
   bisect: reset, good, bad, skip, extend, command, noupdate
   bookmarks: force, rev, delete, rename, inactive, template
   branch: force, clean
@@ -247,6 +248,7 @@
   debugknown: 
   debuglabelcomplete: 
   debuglocks: force-lock, force-wlock
+  debugnamecomplete: 
   debugobsolete: flags, record-parents, rev, date, user
   debugpathcomplete: full, normal, added, removed
   debugpushkey: 
@@ -321,16 +323,16 @@
   $ hg debugpathcomplete -r F
   Fum
 
-Test debuglabelcomplete
+Test debugnamecomplete
 
-  $ hg debuglabelcomplete
+  $ hg debugnamecomplete
   Fum
   default
   fee
   fie
   fo
   tip
-  $ hg debuglabelcomplete f
+  $ hg debugnamecomplete f
   fee
   fie
   fo
--- a/tests/test-config.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-config.t	Sat Jan 17 18:28:30 2015 -0800
@@ -7,7 +7,7 @@
   > novaluekey
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1: novaluekey
+  hg: parse error at $TESTTMP/.hg/hgrc:1: novaluekey (glob)
   [255]
 
 Invalid syntax: no key
@@ -16,7 +16,7 @@
   > =nokeyvalue
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1: =nokeyvalue
+  hg: parse error at $TESTTMP/.hg/hgrc:1: =nokeyvalue (glob)
   [255]
 
 Test hint about invalid syntax from leading white space
@@ -25,7 +25,7 @@
   >  key=value
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1:  key=value
+  hg: parse error at $TESTTMP/.hg/hgrc:1:  key=value (glob)
   unexpected leading whitespace
   [255]
 
@@ -34,7 +34,7 @@
   > key=value
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1:  [section]
+  hg: parse error at $TESTTMP/.hg/hgrc:1:  [section] (glob)
   unexpected leading whitespace
   [255]
 
@@ -44,9 +44,11 @@
 
 Test case sensitive configuration
 
-  $ echo '[Section]' >> $HGRCPATH
-  $ echo 'KeY = Case Sensitive' >> $HGRCPATH
-  $ echo 'key = lower case' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [Section]
+  > KeY = Case Sensitive
+  > key = lower case
+  > EOF
 
   $ hg showconfig Section
   Section.KeY=Case Sensitive
--- a/tests/test-context.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-context.py	Sat Jan 17 18:28:30 2015 -0800
@@ -50,3 +50,89 @@
 
 for d in ctxb.diff(ctxa, git=True):
     print d
+
+# test safeness and correctness of "cxt.status()"
+print '= checking context.status():'
+
+# ancestor "wcctx ~ 2"
+actx2 = repo['.']
+
+repo.wwrite('bar-m', 'bar-m\n', '')
+repo.wwrite('bar-r', 'bar-r\n', '')
+repo[None].add(['bar-m', 'bar-r'])
+repo.commit(text='add bar-m, bar-r', date="0 0")
+
+# ancestor "wcctx ~ 1"
+actx1 = repo['.']
+
+repo.wwrite('bar-m', 'bar-m bar-m\n', '')
+repo.wwrite('bar-a', 'bar-a\n', '')
+repo[None].add(['bar-a'])
+repo[None].forget(['bar-r'])
+
+# status at this point:
+#   M bar-m
+#   A bar-a
+#   R bar-r
+#   C foo
+
+from mercurial import scmutil
+
+print '== checking workingctx.status:'
+
+wctx = repo[None]
+print 'wctx._status=%s' % (str(wctx._status))
+
+print '=== with "pattern match":'
+print actx1.status(other=wctx,
+                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
+print 'wctx._status=%s' % (str(wctx._status))
+print actx2.status(other=wctx,
+                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
+print 'wctx._status=%s' % (str(wctx._status))
+
+print '=== with "always match" and "listclean=True":'
+print actx1.status(other=wctx, listclean=True)
+print 'wctx._status=%s' % (str(wctx._status))
+print actx2.status(other=wctx, listclean=True)
+print 'wctx._status=%s' % (str(wctx._status))
+
+print "== checking workingcommitctx.status:"
+
+wcctx = context.workingcommitctx(repo,
+                                 scmutil.status(['bar-m'],
+                                                ['bar-a'],
+                                                [],
+                                                [], [], [], []),
+                                 text='', date='0 0')
+print 'wcctx._status=%s' % (str(wcctx._status))
+
+print '=== with "always match":'
+print actx1.status(other=wcctx)
+print 'wcctx._status=%s' % (str(wcctx._status))
+print actx2.status(other=wcctx)
+print 'wcctx._status=%s' % (str(wcctx._status))
+
+print '=== with "always match" and "listclean=True":'
+print actx1.status(other=wcctx, listclean=True)
+print 'wcctx._status=%s' % (str(wcctx._status))
+print actx2.status(other=wcctx, listclean=True)
+print 'wcctx._status=%s' % (str(wcctx._status))
+
+print '=== with "pattern match":'
+print actx1.status(other=wcctx,
+                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
+print 'wcctx._status=%s' % (str(wcctx._status))
+print actx2.status(other=wcctx,
+                   match=scmutil.matchfiles(repo, ['bar-m', 'foo']))
+print 'wcctx._status=%s' % (str(wcctx._status))
+
+print '=== with "pattern match" and "listclean=True":'
+print actx1.status(other=wcctx,
+                   match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
+                   listclean=True)
+print 'wcctx._status=%s' % (str(wcctx._status))
+print actx2.status(other=wcctx,
+                   match=scmutil.matchfiles(repo, ['bar-r', 'foo']),
+                   listclean=True)
+print 'wcctx._status=%s' % (str(wcctx._status))
--- a/tests/test-context.py.out	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-context.py.out	Sat Jan 17 18:28:30 2015 -0800
@@ -11,3 +11,38 @@
  foo
 +bar
 
+= checking context.status():
+== checking workingctx.status:
+wctx._status=<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "pattern match":
+<status modified=['bar-m'], added=[], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wctx._status=<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-m'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wctx._status=<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "always match" and "listclean=True":
+<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=['foo']>
+wctx._status=<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-a', 'bar-m'], removed=[], deleted=[], unknown=[], ignored=[], clean=['foo']>
+wctx._status=<status modified=['bar-m'], added=['bar-a'], removed=['bar-r'], deleted=[], unknown=[], ignored=[], clean=[]>
+== checking workingcommitctx.status:
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "always match":
+<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-a', 'bar-m', 'bar-r'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "always match" and "listclean=True":
+<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=['bar-r', 'foo']>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-a', 'bar-m', 'bar-r'], removed=[], deleted=[], unknown=[], ignored=[], clean=['foo']>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "pattern match":
+<status modified=['bar-m'], added=[], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-m'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+=== with "pattern match" and "listclean=True":
+<status modified=[], added=[], removed=[], deleted=[], unknown=[], ignored=[], clean=['bar-r', 'foo']>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
+<status modified=[], added=['bar-r'], removed=[], deleted=[], unknown=[], ignored=[], clean=['foo']>
+wcctx._status=<status modified=['bar-m'], added=['bar-a'], removed=[], deleted=[], unknown=[], ignored=[], clean=[]>
--- a/tests/test-convert-clonebranches.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-clonebranches.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,8 +1,10 @@
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "convert = " >> $HGRCPATH
-  $ echo "[convert]" >> $HGRCPATH
-  $ echo "hg.tagsbranch=0" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > convert =
+  > [convert]
+  > hg.tagsbranch = 0
+  > EOF
   $ hg init source
   $ cd source
   $ echo a > a
--- a/tests/test-convert-cvs-branch.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-cvs-branch.t	Sat Jan 17 18:28:30 2015 -0800
@@ -7,10 +7,12 @@
   > {
   >     cvs -f "$@" > /dev/null
   > }
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "convert = " >> $HGRCPATH
-  $ echo "[convert]" >> $HGRCPATH
-  $ echo "cvsps.cache=0" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > convert =
+  > [convert]
+  > cvsps.cache = 0
+  > EOF
 
 create cvs repository
 
@@ -21,6 +23,7 @@
   $ CVS_OPTIONS=-f
   $ export CVS_OPTIONS
   $ cd ..
+  $ rmdir cvsrepo
   $ cvscall -q -d "$CVSROOT" init
 
 Create a new project
--- a/tests/test-convert-cvs-detectmerge.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-cvs-detectmerge.t	Sat Jan 17 18:28:30 2015 -0800
@@ -23,15 +23,16 @@
 
 XXX copied from test-convert-cvs-synthetic
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "convert = " >> $HGRCPATH
-  $ echo "[convert]" >> $HGRCPATH
-  $ echo "cvsps.cache=0" >> $HGRCPATH
-  $ echo "cvsps.mergefrom=\[MERGE from (\S+)\]" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > convert =
+  > [convert]
+  > cvsps.cache = 0
+  > cvsps.mergefrom = \[MERGE from (\S+)\]
+  > EOF
 
 create cvs repository with one project
 
-  $ mkdir cvsrepo
   $ cvscall -q -d "$CVSROOT" init
   $ mkdir cvsrepo/proj
 
--- a/tests/test-convert-cvs-synthetic.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-cvs-synthetic.t	Sat Jan 17 18:28:30 2015 -0800
@@ -14,6 +14,7 @@
   $ CVS_OPTIONS=-f
   $ export CVS_OPTIONS
   $ cd ..
+  $ rmdir cvsrepo
   $ cvscall()
   > {
   >     cvs -f "$@"
--- a/tests/test-convert-cvs.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-cvs.t	Sat Jan 17 18:28:30 2015 -0800
@@ -18,9 +18,11 @@
   >     print "%s hook: %d changesets"%(hooktype,len(changesets))
   > EOF
   $ hookpath=`pwd`
-  $ echo "[hooks]" >> $HGRCPATH
-  $ echo "cvslog=python:$hookpath/cvshooks.py:cvslog" >> $HGRCPATH
-  $ echo "cvschangesets=python:$hookpath/cvshooks.py:cvschangesets" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [hooks]
+  > cvslog = python:$hookpath/cvshooks.py:cvslog
+  > cvschangesets = python:$hookpath/cvshooks.py:cvschangesets
+  > EOF
 
 create cvs repository
 
@@ -31,6 +33,7 @@
   $ CVS_OPTIONS=-f
   $ export CVS_OPTIONS
   $ cd ..
+  $ rmdir cvsrepo
   $ cvscall -q -d "$CVSROOT" init
 
 create source directory
--- a/tests/test-convert-cvsnt-mergepoints.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-cvsnt-mergepoints.t	Sat Jan 17 18:28:30 2015 -0800
@@ -32,6 +32,7 @@
   $ CVS_OPTIONS=-f
   $ export CVS_OPTIONS
   $ cd ..
+  $ rmdir cvsmaster
   $ filterpath cvscall -Q -d "$CVSROOT" init
 
 checkout #1: add foo.txt
--- a/tests/test-convert-filemap.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-filemap.t	Sat Jan 17 18:28:30 2015 -0800
@@ -637,7 +637,7 @@
   $ cd namedbranch
   $ hg --config extensions.mq= strip tip
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/namedbranch/.hg/strip-backup/73899bcbe45c-backup.hg (glob)
+  saved backup bundle to $TESTTMP/namedbranch/.hg/strip-backup/73899bcbe45c-92adf160-backup.hg (glob)
   $ hg up foo
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg merge default
--- a/tests/test-convert-hg-svn.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-hg-svn.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,8 +1,10 @@
 #require svn svn-bindings
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "convert = " >> $HGRCPATH
-  $ echo "mq = " >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > convert =
+  > mq =
+  > EOF
 
   $ SVNREPOPATH=`pwd`/svn-repo
 #if windows
--- a/tests/test-convert-svn-encoding.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-svn-encoding.t	Sat Jan 17 18:28:30 2015 -0800
@@ -52,6 +52,8 @@
   5 init projA
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@1
   converting: 0/6 revisions (0.00%)
+  committing changelog
+  couldn't read revision branch cache names: * (glob)
   4 hello
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2
   converting: 1/6 revisions (16.67%)
@@ -59,10 +61,13 @@
   scanning paths: /trunk/\xc3\xa0 0/3 (0.00%) (esc)
   scanning paths: /trunk/\xc3\xa0/e\xcc\x81 1/3 (33.33%) (esc)
   scanning paths: /trunk/\xc3\xa9 2/3 (66.67%) (esc)
+  committing files:
   \xc3\xa0/e\xcc\x81 (esc)
   getting files: \xc3\xa0/e\xcc\x81 1/2 (50.00%) (esc)
   \xc3\xa9 (esc)
   getting files: \xc3\xa9 2/2 (100.00%) (esc)
+  committing manifest
+  committing changelog
   3 copy files
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@3
   converting: 2/6 revisions (33.33%)
@@ -78,16 +83,17 @@
   reparent to file://*/svn-repo/trunk (glob)
   scanning paths: /trunk/\xc3\xb9 3/4 (75.00%) (esc)
   mark /trunk/\xc3\xb9 came from \xc3\xa0:2 (esc)
-  \xc3\xa0/e\xcc\x81 (esc)
   getting files: \xc3\xa0/e\xcc\x81 1/4 (25.00%) (esc)
+  getting files: \xc3\xa9 2/4 (50.00%) (esc)
+  committing files:
   \xc3\xa8 (esc)
-  getting files: \xc3\xa8 2/4 (50.00%) (esc)
+  getting files: \xc3\xa8 3/4 (75.00%) (esc)
    \xc3\xa8: copy \xc3\xa9:6b67ccefd5ce6de77e7ead4f5292843a0255329f (esc)
-  \xc3\xa9 (esc)
-  getting files: \xc3\xa9 3/4 (75.00%) (esc)
   \xc3\xb9/e\xcc\x81 (esc)
   getting files: \xc3\xb9/e\xcc\x81 4/4 (100.00%) (esc)
    \xc3\xb9/e\xcc\x81: copy \xc3\xa0/e\xcc\x81:a9092a3d84a37b9993b5c73576f6de29b7ea50f6 (esc)
+  committing manifest
+  committing changelog
   2 remove files
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@4
   converting: 3/6 revisions (50.00%)
@@ -99,26 +105,32 @@
   gone from -1
   reparent to file://*/svn-repo (glob)
   reparent to file://*/svn-repo/trunk (glob)
-  \xc3\xa8 (esc)
   getting files: \xc3\xa8 1/2 (50.00%) (esc)
-  \xc3\xb9/e\xcc\x81 (esc)
   getting files: \xc3\xb9/e\xcc\x81 2/2 (100.00%) (esc)
+  committing files:
+  committing manifest
+  committing changelog
   1 branch to branch?
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5
   converting: 4/6 revisions (66.67%)
   reparent to file://*/svn-repo/branches/branch%C3%A9 (glob)
   scanning paths: /branches/branch\xc3\xa9 0/1 (0.00%) (esc)
+  committing changelog
   0 branch to branch?e
   source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6
   converting: 5/6 revisions (83.33%)
   reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
   scanning paths: /branches/branch\xc3\xa9e 0/1 (0.00%) (esc)
+  committing changelog
   reparent to file://*/svn-repo (glob)
   reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
   reparent to file://*/svn-repo (glob)
   reparent to file://*/svn-repo/branches/branch%C3%A9e (glob)
   updating tags
+  committing files:
   .hgtags
+  committing manifest
+  committing changelog
   run hg sink post-conversion action
   $ cd A-hg
   $ hg up
--- a/tests/test-convert-svn-sink.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-svn-sink.t	Sat Jan 17 18:28:30 2015 -0800
@@ -256,7 +256,7 @@
   $ hg --cwd a up 5
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg --cwd a --config extensions.strip= strip -r 6
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/bd4f7b7a7067-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/bd4f7b7a7067-ed505e42-backup.hg (glob)
 
 #endif
 
--- a/tests/test-convert-svn-source.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-svn-source.t	Sat Jan 17 18:28:30 2015 -0800
@@ -239,3 +239,16 @@
   converting...
   1 init projA
   0 adddir
+
+Test that a too-new repository format is properly rejected:
+  $ mv svn-empty/format format
+  $ echo 999 > svn-empty/format
+It's important that this command explicitly specify svn, otherwise it
+can have surprising side effects (like falling back to a perforce
+depot that can be seen from the test environment and slurping from that.)
+  $ hg convert --source-type svn svn-empty this-will-fail
+  initializing destination this-will-fail repository
+  file:/*/$TESTTMP/svn-empty does not look like a Subversion repository to libsvn version 1.*.* (glob)
+  abort: svn-empty: missing or unsupported repository
+  [255]
+  $ mv format svn-empty/format
--- a/tests/test-convert-tagsbranch-topology.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-convert-tagsbranch-topology.t	Sat Jan 17 18:28:30 2015 -0800
@@ -4,11 +4,13 @@
   $ echo "autocrlf = false" >> $HOME/.gitconfig
   $ echo "[core]" >> $HOME/.gitconfig
   $ echo "autocrlf = false" >> $HOME/.gitconfig
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "convert=" >> $HGRCPATH
-  $ echo '[convert]' >> $HGRCPATH
-  $ echo 'hg.usebranchnames = True' >> $HGRCPATH
-  $ echo 'hg.tagsbranch = tags-update' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > convert =
+  > [convert]
+  > hg.usebranchnames = True
+  > hg.tagsbranch = tags-update
+  > EOF
   $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
   $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL
   $ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0000"; export GIT_AUTHOR_DATE
--- a/tests/test-copy.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-copy.t	Sat Jan 17 18:28:30 2015 -0800
@@ -20,8 +20,11 @@
   commit: 1 copied
   update: (current)
   $ hg --debug commit -m "2"
+  committing files:
   b
    b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
+  committing manifest
+  committing changelog
   committed changeset 1:93580a2c28a50a56f63526fb305067e6fbf739c4
 
 we should see two history entries
@@ -184,7 +187,7 @@
      rev    offset  length  ..... linkrev nodeid       p1           p2 (re)
        0         0      69  .....       1 7711d36246cc 000000000000 000000000000 (re)
        1        69       6  .....       2 bdf70a2b8d03 7711d36246cc 000000000000 (re)
-       2        75      81  .....       3 b2558327ea8d 000000000000 000000000000 (re)
+       2        75      71  .....       3 b2558327ea8d 000000000000 000000000000 (re)
 should match
   $ hg debugindex foo
      rev    offset  length  ..... linkrev nodeid       p1           p2 (re)
--- a/tests/test-debugbundle.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-debugbundle.t	Sat Jan 17 18:28:30 2015 -0800
@@ -6,8 +6,13 @@
   $ touch a ; hg add a ; hg ci -ma
   $ touch b ; hg add b ; hg ci -mb
   $ touch c ; hg add c ; hg ci -mc
-  $ hg bundle --base 0 --rev tip bundle.hg
+  $ hg bundle --base 0 --rev tip bundle.hg -v
   2 changesets found
+  uncompressed size of bundle content:
+       332 (changelog)
+       282 (manifests)
+       105  b
+       105  c
 
 Terse output:
 
--- a/tests/test-debugcommands.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-debugcommands.t	Sat Jan 17 18:28:30 2015 -0800
@@ -24,6 +24,55 @@
   full revision size (min/max/avg)     : 44 / 44 / 44
   delta size (min/max/avg)             : 0 / 0 / 0
 
+Test debugindex, with and without the --debug flag
+  $ hg debugindex a
+     rev    offset  length   .... linkrev nodeid       p1           p2 (re)
+       0         0       3   ....       0 b789fdd96dc2 000000000000 000000000000 (re)
+  $ hg --debug debugindex a
+     rev    offset  length   .... linkrev nodeid                                   p1                                       p2 (re)
+       0         0       3   ....       0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
+  $ hg debugindex -f 1 a
+     rev flag   offset   length     size   ....   link     p1     p2       nodeid (re)
+       0 0000        0        3        2   ....      0     -1     -1 b789fdd96dc2 (re)
+  $ hg --debug debugindex -f 1 a
+     rev flag   offset   length     size   ....   link     p1     p2                                   nodeid (re)
+       0 0000        0        3        2   ....      0     -1     -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
+
+
+Test max chain len
+  $ cat >> $HGRCPATH << EOF
+  > [format]
+  > maxchainlen=4
+  > EOF
+
+  $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
+  $ hg ci -m a
+  $ printf "b\n" >> a
+  $ hg ci -m a
+  $ printf "c\n" >> a
+  $ hg ci -m a
+  $ printf "d\n" >> a
+  $ hg ci -m a
+  $ printf "e\n" >> a
+  $ hg ci -m a
+  $ printf "f\n" >> a
+  $ hg ci -m a
+  $ printf 'g\n' >> a
+  $ hg ci -m a
+  $ printf 'h\n' >> a
+  $ hg ci -m a
+  $ hg debugrevlog -d a
+  # rev p1rev p2rev start   end deltastart base   p1   p2 rawsize totalsize compression heads chainlen
+      0    -1    -1     0   ???          0    0    0    0     ???      ????           ?     1        0 (glob)
+      1     0    -1   ???   ???          0    0    0    0     ???      ????           ?     1        1 (glob)
+      2     1    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        2 (glob)
+      3     2    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        3 (glob)
+      4     3    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        4 (glob)
+      5     4    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        0 (glob)
+      6     5    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        1 (glob)
+      7     6    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        2 (glob)
+      8     7    -1   ???   ???        ???  ???  ???    0     ???      ????           ?     1        3 (glob)
+  $ cd ..
 
 Test internal debugstacktrace command
 
--- a/tests/test-diff-color.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-diff-color.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
 Setup
 
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "mode = ansi" >> $HGRCPATH
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color=" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [color]
+  > mode = ansi
+  > [extensions]
+  > color =
+  > EOF
   $ hg init repo
   $ cd repo
   $ cat > a <<EOF
@@ -66,11 +68,13 @@
   $ hg diff --stat --color=always
    a |  2 \x1b[0;32m+\x1b[0m\x1b[0;31m-\x1b[0m (esc)
    1 files changed, 1 insertions(+), 1 deletions(-)
-  $ echo "record=" >> $HGRCPATH
-  $ echo "[ui]" >> $HGRCPATH
-  $ echo "interactive=true" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "git=True" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > record =
+  > [ui]
+  > interactive = true
+  > [diff]
+  > git = True
+  > EOF
 
 #if execbit
 
--- a/tests/test-diff-unified.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-diff-unified.t	Sat Jan 17 18:28:30 2015 -0800
@@ -89,6 +89,64 @@
   abort: diff context lines count must be an integer, not 'foo'
   [255]
 
+noprefix config and option
+
+  $ hg --config diff.noprefix=True diff --nodates
+  diff -r cf9f4ba66af2 a
+  --- a
+  +++ a
+  @@ -2,7 +2,7 @@
+   c
+   a
+   a
+  -b
+  +dd
+   a
+   a
+   c
+  $ hg diff --noprefix --nodates
+  diff -r cf9f4ba66af2 a
+  --- a
+  +++ a
+  @@ -2,7 +2,7 @@
+   c
+   a
+   a
+  -b
+  +dd
+   a
+   a
+   c
+
+noprefix config disabled in plain mode, but option still enabled
+
+  $ HGPLAIN=1 hg --config diff.noprefix=True diff --nodates
+  diff -r cf9f4ba66af2 a
+  --- a/a
+  +++ b/a
+  @@ -2,7 +2,7 @@
+   c
+   a
+   a
+  -b
+  +dd
+   a
+   a
+   c
+  $ HGPLAIN=1 hg diff --noprefix --nodates
+  diff -r cf9f4ba66af2 a
+  --- a
+  +++ a
+  @@ -2,7 +2,7 @@
+   c
+   a
+   a
+  -b
+  +dd
+   a
+   a
+   c
+
   $ cd ..
 
 
@@ -171,6 +229,39 @@
   -a
   +b
 
+Git diff with noprefix
+
+  $ hg --config diff.noprefix=True diff --git --nodates
+  diff --git f1 f 1
+  rename from f1
+  rename to f 1
+  --- f1
+  +++ f 1	
+  @@ -1,1 +1,1 @@
+  -a
+  +b
+
+noprefix config disabled in plain mode, but option still enabled
+
+  $ HGPLAIN=1 hg --config diff.noprefix=True diff --git --nodates
+  diff --git a/f1 b/f 1
+  rename from f1
+  rename to f 1
+  --- a/f1
+  +++ b/f 1	
+  @@ -1,1 +1,1 @@
+  -a
+  +b
+  $ HGPLAIN=1 hg diff --git --noprefix --nodates
+  diff --git f1 f 1
+  rename from f1
+  rename to f 1
+  --- f1
+  +++ f 1	
+  @@ -1,1 +1,1 @@
+  -a
+  +b
+
 Regular diff --nodates, file deletion
 
   $ hg ci -m addspace
--- a/tests/test-diff-upgrade.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-diff-upgrade.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
 #require execbit
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "autodiff=$TESTDIR/autodiff.py" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > autodiff = $TESTDIR/autodiff.py
+  > [diff]
+  > nodates = 1
+  > EOF
 
   $ hg init repo
   $ cd repo
--- a/tests/test-eol.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-eol.t	Sat Jan 17 18:28:30 2015 -0800
@@ -408,10 +408,12 @@
 
 Test cleverencode: and cleverdecode: aliases for win32text extension
 
-  $ echo '[encode]' >> $HGRCPATH
-  $ echo '**.txt = cleverencode:' >> $HGRCPATH
-  $ echo '[decode]' >> $HGRCPATH
-  $ echo '**.txt = cleverdecode:' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [encode]
+  > **.txt = cleverencode:
+  > [decode]
+  > **.txt = cleverdecode:
+  > EOF
 
   $ hg init win32compat
   $ cd win32compat
--- a/tests/test-eolfilename.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-eolfilename.t	Sat Jan 17 18:28:30 2015 -0800
@@ -59,10 +59,12 @@
 
   $ hg init bar
   $ cd bar
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color=" >> $HGRCPATH
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "mode = ansi" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > color =
+  > [color]
+  > mode = ansi
+  > EOF
   $ A=`printf 'foo\nbar'`
   $ B=`printf 'foo\nbar.baz'`
   $ touch "$A"
--- a/tests/test-export.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-export.t	Sat Jan 17 18:28:30 2015 -0800
@@ -176,10 +176,12 @@
   [255]
 
 Check for color output
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "mode = ansi" >> $HGRCPATH
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color=" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [color]
+  > mode = ansi
+  > [extensions]
+  > color =
+  > EOF
 
   $ hg export --color always --nodates tip
   # HG changeset patch
--- a/tests/test-extdiff.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-extdiff.t	Sat Jan 17 18:28:30 2015 -0800
@@ -16,11 +16,13 @@
   Only in a: b
   [1]
 
-  $ echo "[extdiff]" >> $HGRCPATH
-  $ echo "cmd.falabala=echo" >> $HGRCPATH
-  $ echo "opts.falabala=diffing" >> $HGRCPATH
-  $ echo "cmd.edspace=echo" >> $HGRCPATH
-  $ echo 'opts.edspace="name  <user@example.com>"' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extdiff]
+  > cmd.falabala = echo
+  > opts.falabala = diffing
+  > cmd.edspace = echo
+  > opts.edspace = "name  <user@example.com>"
+  > EOF
 
   $ hg falabala
   diffing a.000000000000 a
@@ -92,6 +94,72 @@
   diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob)
   diff-like tools yield a non-zero exit code
 
+issue4463: usage of command line configuration without additional quoting
+
+  $ cat <<EOF >> $HGRCPATH
+  > [extdiff]
+  > cmd.4463a = echo
+  > opts.4463a = a-naked 'single quoted' "double quoted"
+  > 4463b = echo b-naked 'single quoted' "double quoted"
+  > echo =
+  > EOF
+  $ hg update -q -C 0
+  $ echo a >> a
+#if windows
+  $ hg --debug 4463a | grep '^running'
+  running 'echo a-naked \'single quoted\' "double quoted" *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug 4463b | grep '^running'
+  running 'echo b-naked \'single quoted\' "double quoted" *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug echo | grep '^running'
+  running '*echo* *\\a *\\a' in */extdiff.* (glob)
+#else
+  $ hg --debug 4463a | grep '^running'
+  running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
+  $ hg --debug 4463b | grep '^running'
+  running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
+  $ hg --debug echo | grep '^running'
+  running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob)
+#endif
+
+(getting options from other than extdiff section)
+
+  $ cat <<EOF >> $HGRCPATH
+  > [extdiff]
+  > # using diff-tools diffargs
+  > 4463b2 = echo
+  > # using merge-tools diffargs
+  > 4463b3 = echo
+  > # no diffargs
+  > 4463b4 = echo
+  > [diff-tools]
+  > 4463b2.diffargs = b2-naked 'single quoted' "double quoted"
+  > [merge-tools]
+  > 4463b3.diffargs = b3-naked 'single quoted' "double quoted"
+  > EOF
+#if windows
+  $ hg --debug 4463b2 | grep '^running'
+  running 'echo b2-naked \'single quoted\' "double quoted" *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug 4463b3 | grep '^running'
+  running 'echo b3-naked \'single quoted\' "double quoted" *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug 4463b4 | grep '^running'
+  running 'echo *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
+  running 'echo b4-naked "being quoted" *\\a *\\a' in */extdiff.* (glob)
+  $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
+  running 'echo echo-naked "being quoted" *\\a *\\a' in */extdiff.* (glob)
+#else
+  $ hg --debug 4463b2 | grep '^running'
+  running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
+  $ hg --debug 4463b3 | grep '^running'
+  running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob)
+  $ hg --debug 4463b4 | grep '^running'
+  running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob)
+  $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running'
+  running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
+  $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running'
+  running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob)
+#endif
+
 #if execbit
 
 Test extdiff of multiple files in tmp dir:
@@ -190,6 +258,26 @@
   */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob)
   [1]
 
+Fallback to merge-tools.tool.executable|regkey
+  $ mkdir dir
+  $ cat > 'dir/tool.sh' << EOF
+  > #!/bin/sh
+  > echo "** custom diff **"
+  > EOF
+  $ chmod +x dir/tool.sh
+  $ tool=`pwd`/dir/tool.sh
+  $ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool
+  making snapshot of 2 files from rev * (glob)
+    a
+    b
+  making snapshot of 2 files from working directory
+    a
+    b
+  running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob)
+  ** custom diff **
+  cleaning up temp directory
+  [1]
+
   $ cd ..
 
 #endif
--- a/tests/test-extension.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-extension.t	Sat Jan 17 18:28:30 2015 -0800
@@ -424,10 +424,9 @@
     #cmd.cdiff = gdiff
     #opts.cdiff = -Nprc5
   
-    # add new command called vdiff, runs kdiff3
-    vdiff = kdiff3
-  
-    # add new command called meld, runs meld (no need to name twice)
+    # add new command called meld, runs meld (no need to name twice).  If
+    # the meld executable is not available, the meld tool in [merge-tools]
+    # will be used, if available
     meld =
   
     # add new command called vimdiff, runs gvimdiff with DirDiff plugin
@@ -463,7 +462,7 @@
   
    extdiff       use external program to diff repository (or selected files)
   
-  (use "hg help -v extdiff" to show built-in aliases and global options)
+  (use "hg help -v -e extdiff" to show built-in aliases and global options)
 
 
 
@@ -558,11 +557,13 @@
   >     "yet another debug command"
   >     ui.write("%s\n" % '\n'.join([x for x, y in extensions.extensions()]))
   > EOF
-  $ echo "debugissue811 = $debugpath" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "strip=" >> $HGRCPATH
-  $ echo "hgext.mq=" >> $HGRCPATH
-  $ echo "hgext/mq=" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > debugissue811 = $debugpath
+  > mq =
+  > strip =
+  > hgext.mq =
+  > hgext/mq =
+  > EOF
 
 Show extensions:
 (note that mq force load strip, also checking it's not loaded twice)
@@ -572,6 +573,214 @@
   strip
   mq
 
+For extensions, which name matches one of its commands, help
+message should ask '-v -e' to get list of built-in aliases
+along with extension help itself
+
+  $ mkdir $TESTTMP/d
+  $ cat > $TESTTMP/d/dodo.py <<EOF
+  > """
+  > This is an awesome 'dodo' extension. It does nothing and
+  > writes 'Foo foo'
+  > """
+  > from mercurial import cmdutil, commands
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('dodo', [], 'hg dodo')
+  > def dodo(ui, *args, **kwargs):
+  >     """Does nothing"""
+  >     ui.write("I do nothing. Yay\\n")
+  > @command('foofoo', [], 'hg foofoo')
+  > def foofoo(ui, *args, **kwargs):
+  >     """Writes 'Foo foo'"""
+  >     ui.write("Foo foo\\n")
+  > EOF
+  $ dodopath=$TESTTMP/d/dodo.py
+
+  $ echo "dodo = $dodopath" >> $HGRCPATH
+
+Make sure that user is asked to enter '-v -e' to get list of built-in aliases
+  $ hg help -e dodo
+  dodo extension -
+  
+  This is an awesome 'dodo' extension. It does nothing and writes 'Foo foo'
+  
+  list of commands:
+  
+   dodo          Does nothing
+   foofoo        Writes 'Foo foo'
+  
+  (use "hg help -v -e dodo" to show built-in aliases and global options)
+
+Make sure that '-v -e' prints list of built-in aliases along with
+extension help itself
+  $ hg help -v -e dodo
+  dodo extension -
+  
+  This is an awesome 'dodo' extension. It does nothing and writes 'Foo foo'
+  
+  list of commands:
+  
+   dodo          Does nothing
+   foofoo        Writes 'Foo foo'
+  
+  global options ([+] can be repeated):
+  
+   -R --repository REPO   repository root directory or name of overlay bundle
+                          file
+      --cwd DIR           change working directory
+   -y --noninteractive    do not prompt, automatically pick the first choice for
+                          all prompts
+   -q --quiet             suppress output
+   -v --verbose           enable additional output
+      --config CONFIG [+] set/override config option (use 'section.name=value')
+      --debug             enable debugging output
+      --debugger          start debugger
+      --encoding ENCODE   set the charset encoding (default: ascii)
+      --encodingmode MODE set the charset encoding mode (default: strict)
+      --traceback         always print a traceback on exception
+      --time              time how long the command takes
+      --profile           print command execution profile
+      --version           output version information and exit
+   -h --help              display help and exit
+      --hidden            consider hidden changesets
+
+Make sure that single '-v' option shows help and built-ins only for 'dodo' command
+  $ hg help -v dodo
+  hg dodo
+  
+  Does nothing
+  
+  (use "hg help -e dodo" to show help for the dodo extension)
+  
+  options:
+  
+    --mq operate on patch repository
+  
+  global options ([+] can be repeated):
+  
+   -R --repository REPO   repository root directory or name of overlay bundle
+                          file
+      --cwd DIR           change working directory
+   -y --noninteractive    do not prompt, automatically pick the first choice for
+                          all prompts
+   -q --quiet             suppress output
+   -v --verbose           enable additional output
+      --config CONFIG [+] set/override config option (use 'section.name=value')
+      --debug             enable debugging output
+      --debugger          start debugger
+      --encoding ENCODE   set the charset encoding (default: ascii)
+      --encodingmode MODE set the charset encoding mode (default: strict)
+      --traceback         always print a traceback on exception
+      --time              time how long the command takes
+      --profile           print command execution profile
+      --version           output version information and exit
+   -h --help              display help and exit
+      --hidden            consider hidden changesets
+
+In case when extension name doesn't match any of its commands,
+help message should ask for '-v' to get list of built-in aliases
+along with extension help
+  $ cat > $TESTTMP/d/dudu.py <<EOF
+  > """
+  > This is an awesome 'dudu' extension. It does something and
+  > also writes 'Beep beep'
+  > """
+  > from mercurial import cmdutil, commands
+  > cmdtable = {}
+  > command = cmdutil.command(cmdtable)
+  > @command('something', [], 'hg something')
+  > def something(ui, *args, **kwargs):
+  >     """Does something"""
+  >     ui.write("I do something. Yaaay\\n")
+  > @command('beep', [], 'hg beep')
+  > def beep(ui, *args, **kwargs):
+  >     """Writes 'Beep beep'"""
+  >     ui.write("Beep beep\\n")
+  > EOF
+  $ dudupath=$TESTTMP/d/dudu.py
+
+  $ echo "dudu = $dudupath" >> $HGRCPATH
+
+  $ hg help -e dudu
+  dudu extension -
+  
+  This is an awesome 'dudu' extension. It does something and also writes 'Beep
+  beep'
+  
+  list of commands:
+  
+   beep          Writes 'Beep beep'
+   something     Does something
+  
+  (use "hg help -v dudu" to show built-in aliases and global options)
+
+In case when extension name doesn't match any of its commands,
+help options '-v' and '-v -e' should be equivalent
+  $ hg help -v dudu
+  dudu extension -
+  
+  This is an awesome 'dudu' extension. It does something and also writes 'Beep
+  beep'
+  
+  list of commands:
+  
+   beep          Writes 'Beep beep'
+   something     Does something
+  
+  global options ([+] can be repeated):
+  
+   -R --repository REPO   repository root directory or name of overlay bundle
+                          file
+      --cwd DIR           change working directory
+   -y --noninteractive    do not prompt, automatically pick the first choice for
+                          all prompts
+   -q --quiet             suppress output
+   -v --verbose           enable additional output
+      --config CONFIG [+] set/override config option (use 'section.name=value')
+      --debug             enable debugging output
+      --debugger          start debugger
+      --encoding ENCODE   set the charset encoding (default: ascii)
+      --encodingmode MODE set the charset encoding mode (default: strict)
+      --traceback         always print a traceback on exception
+      --time              time how long the command takes
+      --profile           print command execution profile
+      --version           output version information and exit
+   -h --help              display help and exit
+      --hidden            consider hidden changesets
+
+  $ hg help -v -e dudu
+  dudu extension -
+  
+  This is an awesome 'dudu' extension. It does something and also writes 'Beep
+  beep'
+  
+  list of commands:
+  
+   beep          Writes 'Beep beep'
+   something     Does something
+  
+  global options ([+] can be repeated):
+  
+   -R --repository REPO   repository root directory or name of overlay bundle
+                          file
+      --cwd DIR           change working directory
+   -y --noninteractive    do not prompt, automatically pick the first choice for
+                          all prompts
+   -q --quiet             suppress output
+   -v --verbose           enable additional output
+      --config CONFIG [+] set/override config option (use 'section.name=value')
+      --debug             enable debugging output
+      --debugger          start debugger
+      --encoding ENCODE   set the charset encoding (default: ascii)
+      --encodingmode MODE set the charset encoding mode (default: strict)
+      --traceback         always print a traceback on exception
+      --time              time how long the command takes
+      --profile           print command execution profile
+      --version           output version information and exit
+   -h --help              display help and exit
+      --hidden            consider hidden changesets
+
 Disabled extension commands:
 
   $ ORGHGRCPATH=$HGRCPATH
@@ -649,7 +858,7 @@
   [255]
 
   $ cat > throw.py <<EOF
-  > from mercurial import cmdutil, commands
+  > from mercurial import cmdutil, commands, util
   > cmdtable = {}
   > command = cmdutil.command(cmdtable)
   > class Bogon(Exception): pass
@@ -658,6 +867,7 @@
   >     """throws an exception"""
   >     raise Bogon()
   > EOF
+
 No declared supported version, extension complains:
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
   ** Unknown exception encountered with possibly-broken third-party extension throw
@@ -667,6 +877,7 @@
   ** Python * (glob)
   ** Mercurial Distributed SCM * (glob)
   ** Extensions loaded: throw
+
 empty declaration of supported version, extension complains:
   $ echo "testedwith = ''" >> throw.py
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
@@ -677,6 +888,7 @@
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
   ** Extensions loaded: throw
+
 If the extension specifies a buglink, show that:
   $ echo 'buglink = "http://example.com/bts"' >> throw.py
   $ rm -f throw.pyc throw.pyo
@@ -688,6 +900,7 @@
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
   ** Extensions loaded: throw
+
 If the extensions declare outdated versions, accuse the older extension first:
   $ echo "from mercurial import util" >> older.py
   $ echo "util.version = lambda:'2.2'" >> older.py
@@ -697,31 +910,33 @@
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
   ** Unknown exception encountered with possibly-broken third-party extension older
-  ** which supports versions 1.9.3 of Mercurial.
+  ** which supports versions 1.9 of Mercurial.
   ** Please disable older and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (version 2.2)
   ** Extensions loaded: throw, older
+
 One extension only tested with older, one only with newer versions:
-  $ echo "util.version = lambda:'2.1.0'" >> older.py
+  $ echo "util.version = lambda:'2.1'" >> older.py
   $ rm -f older.pyc older.pyo
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
   ** Unknown exception encountered with possibly-broken third-party extension older
-  ** which supports versions 1.9.3 of Mercurial.
+  ** which supports versions 1.9 of Mercurial.
   ** Please disable older and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
-  ** Mercurial Distributed SCM (version 2.1.0)
+  ** Mercurial Distributed SCM (version 2.1)
   ** Extensions loaded: throw, older
+
 Older extension is tested with current version, the other only with newer:
   $ echo "util.version = lambda:'1.9.3'" >> older.py
   $ rm -f older.pyc older.pyo
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
   ** Unknown exception encountered with possibly-broken third-party extension throw
-  ** which supports versions 2.1.1 of Mercurial.
+  ** which supports versions 2.1 of Mercurial.
   ** Please disable throw and try your action again.
   ** If that fixes the bug please report it to http://example.com/bts
   ** Python * (glob)
@@ -739,6 +954,17 @@
   ** Mercurial Distributed SCM (*) (glob)
   ** Extensions loaded: throw
 
+Patch version is ignored during compatibility check
+  $ echo "testedwith = '3.2'" >> throw.py
+  $ echo "util.version = lambda:'3.2.2'" >> throw.py
+  $ rm -f throw.pyc throw.pyo
+  $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
+  ** unknown exception encountered, please report by visiting
+  ** http://mercurial.selenic.com/wiki/BugTracker
+  ** Python * (glob)
+  ** Mercurial Distributed SCM (*) (glob)
+  ** Extensions loaded: throw
+
 Test version number support in 'hg version':
   $ echo '__version__ = (1, 2, 3)' >> throw.py
   $ rm -f throw.pyc throw.pyo
@@ -813,9 +1039,11 @@
   $ hg -q -R pull-src1 pull src
   reposetup() for $TESTTMP/reposetup-test/src (glob)
 
-  $ echo '[extensions]' >> $HGRCPATH
-  $ echo '# disable extension globally and explicitly' >> $HGRCPATH
-  $ echo 'reposetuptest = !' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > # disable extension globally and explicitly
+  > reposetuptest = !
+  > EOF
   $ hg clone -U src clone-dst2
   reposetup() for $TESTTMP/reposetup-test/src (glob)
   $ hg init push-dst2
@@ -825,9 +1053,11 @@
   $ hg -q -R pull-src2 pull src
   reposetup() for $TESTTMP/reposetup-test/src (glob)
 
-  $ echo '[extensions]' >> $HGRCPATH
-  $ echo '# enable extension globally' >> $HGRCPATH
-  $ echo "reposetuptest = $TESTTMP/reposetuptest.py" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > # enable extension globally
+  > reposetuptest = $TESTTMP/reposetuptest.py
+  > EOF
   $ hg clone -U src clone-dst3
   reposetup() for $TESTTMP/reposetup-test/src (glob)
   reposetup() for $TESTTMP/reposetup-test/clone-dst3 (glob)
@@ -863,9 +1093,11 @@
   $ hg --config extensions.reposetuptest=! init pull-src5
   $ hg --config extensions.reposetuptest=! -q -R pull-src5 pull src
 
-  $ echo '[extensions]' >> $HGRCPATH
-  $ echo '# disable extension globally and explicitly' >> $HGRCPATH
-  $ echo 'reposetuptest = !' >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > # disable extension globally and explicitly
+  > reposetuptest = !
+  > EOF
   $ hg init parent
   $ hg init parent/sub1
   $ echo 1 > parent/sub1/1
--- a/tests/test-fncache.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-fncache.t	Sat Jan 17 18:28:30 2015 -0800
@@ -71,6 +71,8 @@
   .hg/00manifest.i
   .hg/cache
   .hg/cache/branch2-served
+  .hg/cache/rbc-names-v1
+  .hg/cache/rbc-revs-v1
   .hg/data
   .hg/data/tst.d.hg
   .hg/data/tst.d.hg/foo.i
@@ -79,6 +81,7 @@
   .hg/phaseroots
   .hg/requires
   .hg/undo
+  .hg/undo.backupfiles
   .hg/undo.bookmarks
   .hg/undo.branch
   .hg/undo.desc
@@ -99,6 +102,8 @@
   .hg/00changelog.i
   .hg/cache
   .hg/cache/branch2-served
+  .hg/cache/rbc-names-v1
+  .hg/cache/rbc-revs-v1
   .hg/dirstate
   .hg/last-message.txt
   .hg/requires
@@ -110,6 +115,7 @@
   .hg/store/data/tst.d.hg/_foo.i
   .hg/store/phaseroots
   .hg/store/undo
+  .hg/store/undo.backupfiles
   .hg/store/undo.phaseroots
   .hg/undo.bookmarks
   .hg/undo.branch
@@ -210,19 +216,19 @@
 
   $ cat > ../exceptionext.py <<EOF
   > import os
-  > from mercurial import commands, util, transaction
+  > from mercurial import commands, util, localrepo
   > from mercurial.extensions import wrapfunction
   > 
   > def wrapper(orig, self, *args, **kwargs):
-  >     origonclose = self.onclose
-  >     def onclose():
-  >         origonclose()
+  >     tr = orig(self, *args, **kwargs)
+  >     def fail(tr):
   >         raise util.Abort("forced transaction failure")
-  >     self.onclose = onclose
-  >     return orig(self, *args, **kwargs)
+  >     # zzz prefix to ensure it sorted after store.write
+  >     tr.addfinalize('zzz-forcefails', fail)
+  >     return tr
   > 
   > def uisetup(ui):
-  >     wrapfunction(transaction.transaction, 'close', wrapper)
+  >     wrapfunction(localrepo.localrepository, 'transaction', wrapper)
   > 
   > cmdtable = {}
   > 
@@ -241,22 +247,22 @@
 
   $ cat > ../exceptionext.py <<EOF
   > import os
-  > from mercurial import commands, util, transaction
+  > from mercurial import commands, util, transaction, localrepo
   > from mercurial.extensions import wrapfunction
   > 
-  > def closewrapper(orig, self, *args, **kwargs):
-  >     origonclose = self.onclose
-  >     def onclose():
-  >         origonclose()
+  > def trwrapper(orig, self, *args, **kwargs):
+  >     tr = orig(self, *args, **kwargs)
+  >     def fail(tr):
   >         raise util.Abort("forced transaction failure")
-  >     self.onclose = onclose
-  >     return orig(self, *args, **kwargs)
+  >     # zzz prefix to ensure it sorted after store.write
+  >     tr.addfinalize('zzz-forcefails', fail)
+  >     return tr
   > 
   > def abortwrapper(orig, self, *args, **kwargs):
   >     raise util.Abort("forced transaction failure")
   > 
   > def uisetup(ui):
-  >     wrapfunction(transaction.transaction, 'close', closewrapper)
+  >     wrapfunction(localrepo.localrepository, 'transaction', trwrapper)
   >     wrapfunction(transaction.transaction, '_abort', abortwrapper)
   > 
   > cmdtable = {}
--- a/tests/test-getbundle.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-getbundle.t	Sat Jan 17 18:28:30 2015 -0800
@@ -165,7 +165,30 @@
   8365676dbab05860ce0d9110f2af51368b961bbd
   0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3
 
+= Test bundle2 =
 
+  $ hg debuggetbundle repo bundle -t bundle2
+  $ hg debugbundle bundle
+  Stream params: {}
+  b2x:changegroup -- "{'version': '01'}"
+      7704483d56b2a7b5db54dcee7c62378ac629b348
+      29a4d1f17bd3f0779ca0525bebb1cfb51067c738
+      713346a995c363120712aed1aee7e04afd867638
+      d5f6e1ea452285324836a49d7d3c2a63cfed1d31
+      ff42371d57168345fdf1a3aac66a51f6a45d41d2
+      bac16991d12ff45f9dc43c52da1946dfadb83e80
+      6621d79f61b23ec74cf4b69464343d9e0980ec8b
+      8931463777131cd73923e560b760061f2aa8a4bc
+      f34414c64173e0ecb61b25dc55e116dbbcc89bee
+      928b5f94cdb278bb536eba552de348a4e92ef24d
+      700b7e19db54103633c4bf4a6a6b6d55f4d50c03
+      63476832d8ec6558cf9bbe3cbe0c757e5cf18043
+      13c0170174366b441dc68e8e33757232fa744458
+      5686dbbd9fc46cb806599c878d02fe1cb56b83d3
+      8365676dbab05860ce0d9110f2af51368b961bbd
+      0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3
+      4801a72e5d88cb515b0c7e40fae34180f3f837f2
+      10c14a2cc935e1d8c31f9e98587dcf27fb08a6da
 = Test via HTTP =
 
 Get everything:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-glog-topological.t	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,101 @@
+This test file aims at test topological iteration and the various configuration it can has.
+
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > logtemplate={rev}\n
+  > EOF
+
+On this simple example, all topological branch are displayed in turn until we
+can finally display 0. this implies skipping from 8 to 3 and coming back to 7
+later.
+
+  $ hg init test01
+  $ cd test01
+  $ hg unbundle $TESTDIR/bundles/remote.hg
+  adding changesets
+  adding manifests
+  adding file changes
+  added 9 changesets with 7 changes to 4 files (+1 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+
+  $ hg log -G
+  o  8
+  |
+  | o  7
+  | |
+  | o  6
+  | |
+  | o  5
+  | |
+  | o  4
+  | |
+  o |  3
+  | |
+  o |  2
+  | |
+  o |  1
+  |/
+  o  0
+  
+
+(display all nodes)
+
+  $ hg --config experimental.graph-group-branches=1 log -G
+  o  8
+  |
+  o  3
+  |
+  o  2
+  |
+  o  1
+  |
+  | o  7
+  | |
+  | o  6
+  | |
+  | o  5
+  | |
+  | o  4
+  |/
+  o  0
+  
+
+(revset skipping nodes)
+
+  $ hg --config experimental.graph-group-branches=1 log -G --rev 'not (2+6)'
+  o  8
+  |
+  o  3
+  |
+  o  1
+  |
+  | o  7
+  | |
+  | o  5
+  | |
+  | o  4
+  |/
+  o  0
+  
+
+(begin) from the other branch
+
+  $ hg --config experimental.graph-group-branches=1 --config experimental.graph-group-branches.firstbranch=5 log -G
+  o  7
+  |
+  o  6
+  |
+  o  5
+  |
+  o  4
+  |
+  | o  8
+  | |
+  | o  3
+  | |
+  | o  2
+  | |
+  | o  1
+  |/
+  o  0
+  
--- a/tests/test-graft.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-graft.t	Sat Jan 17 18:28:30 2015 -0800
@@ -50,20 +50,20 @@
 Can't graft ancestor:
 
   $ hg graft 1 2
-  skipping ancestor revision 1
-  skipping ancestor revision 2
+  skipping ancestor revision 1:5d205f8b35b6
+  skipping ancestor revision 2:5c095ad7e90f
   [255]
 
 Specify revisions with -r:
 
   $ hg graft -r 1 -r 2
-  skipping ancestor revision 1
-  skipping ancestor revision 2
+  skipping ancestor revision 1:5d205f8b35b6
+  skipping ancestor revision 2:5c095ad7e90f
   [255]
 
   $ hg graft -r 1 2
-  skipping ancestor revision 2
-  skipping ancestor revision 1
+  skipping ancestor revision 2:5c095ad7e90f
+  skipping ancestor revision 1:5d205f8b35b6
   [255]
 
 Can't graft with dirty wd:
@@ -82,7 +82,7 @@
   A b
   R a
   $ HGEDITOR=cat hg graft 2 -u foo --edit
-  grafting revision 2
+  grafting 2:5c095ad7e90f "2"
   merging a and b to b
   2
   
@@ -132,17 +132,17 @@
 
   $ hg graft 1 5 4 3 'merge()' 2 -n
   skipping ungraftable merge revision 6
-  skipping revision 2 (already grafted to 7)
-  grafting revision 1
-  grafting revision 5
-  grafting revision 4
-  grafting revision 3
+  skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
+  grafting 1:5d205f8b35b6 "1"
+  grafting 5:97f8bfe72746 "5"
+  grafting 4:9c233e8e184d "4"
+  grafting 3:4c60f11aa304 "3"
 
   $ HGEDITOR=cat hg graft 1 5 4 3 'merge()' 2 --debug
   skipping ungraftable merge revision 6
   scanning for duplicate grafts
-  skipping revision 2 (already grafted to 7)
-  grafting revision 1
+  skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
+  grafting 1:5d205f8b35b6 "1"
     searching for copies back to rev 1
     unmatched files in local:
      b
@@ -159,8 +159,11 @@
   merging b and a to b
   my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622
    premerge successful
+  committing files:
   b
-  grafting revision 5
+  committing manifest
+  committing changelog
+  grafting 5:97f8bfe72746 "5"
     searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: True, partial: False
@@ -168,9 +171,12 @@
    e: remote is newer -> g
   getting e
   updating: e 1/1 files (100.00%)
-   b: keep -> k
+   b: remote unchanged -> k
+  committing files:
   e
-  grafting revision 4
+  committing manifest
+  committing changelog
+  grafting 4:9c233e8e184d "4"
     searching for copies back to rev 1
   resolving manifests
    branchmerge: True, force: True, partial: False
@@ -179,7 +185,7 @@
    d: remote is newer -> g
   getting d
   updating: d 1/2 files (50.00%)
-   b: keep -> k
+   b: remote unchanged -> k
    e: versions differ -> m
   updating: e 2/2 files (100.00%)
   picked tool 'internal:merge' for e (binary False symlink False)
@@ -205,7 +211,7 @@
   $ echo c >> e
   $ hg ci -mtest
 
-  $ hg strip . --config extensions.mq=
+  $ hg strip . --config extensions.strip=
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
 
@@ -213,10 +219,10 @@
 
   $ hg graft 1 5 4 3 'merge()' 2
   skipping ungraftable merge revision 6
-  skipping revision 2 (already grafted to 7)
-  skipping revision 1 (already grafted to 8)
-  skipping revision 5 (already grafted to 9)
-  grafting revision 4
+  skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
+  skipping revision 1:5d205f8b35b6 (already grafted to 8:6b9e5368ca4e)
+  skipping revision 5:97f8bfe72746 (already grafted to 9:1905859650ec)
+  grafting 4:9c233e8e184d "4"
   merging e
   warning: conflicts during merge.
   merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -227,8 +233,8 @@
 Continue without resolve should fail:
 
   $ hg graft -c
-  grafting revision 4
-  abort: unresolved merge conflicts (see hg help resolve)
+  grafting 4:9c233e8e184d "4"
+  abort: unresolved merge conflicts (see "hg help resolve")
   [255]
 
 Fix up:
@@ -250,8 +256,8 @@
 Continue for real, clobber usernames
 
   $ hg graft -c -U
-  grafting revision 4
-  grafting revision 3
+  grafting 4:9c233e8e184d "4"
+  grafting 3:4c60f11aa304 "3"
 
 Compare with original:
 
@@ -299,7 +305,7 @@
   $ hg ci -m 7
   created new head
   $ hg graft 7
-  grafting revision 7
+  grafting 7:ef0ef43d49e7 "2"
 
   $ hg log -r 7 --template '{rev}:{node}\n'
   7:ef0ef43d49e79e81ddafdc7997401ba0041efc82
@@ -326,31 +332,31 @@
 Disallow grafting an already grafted cset onto its original branch
   $ hg up -q 6
   $ hg graft 7
-  skipping already grafted revision 7 (was grafted from 2)
+  skipping already grafted revision 7:ef0ef43d49e7 (was grafted from 2:5c095ad7e90f)
   [255]
 
 Disallow grafting already grafted csets with the same origin onto each other
   $ hg up -q 13
   $ hg graft 2
-  skipping revision 2 (already grafted to 13)
+  skipping revision 2:5c095ad7e90f (already grafted to 13:9db0f28fd374)
   [255]
   $ hg graft 7
-  skipping already grafted revision 7 (13 also has origin 2)
+  skipping already grafted revision 7:ef0ef43d49e7 (13:9db0f28fd374 also has origin 2:5c095ad7e90f)
   [255]
 
   $ hg up -q 7
   $ hg graft 2
-  skipping revision 2 (already grafted to 7)
+  skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7)
   [255]
   $ hg graft tip
-  skipping already grafted revision 13 (7 also has origin 2)
+  skipping already grafted revision 13:9db0f28fd374 (7:ef0ef43d49e7 also has origin 2:5c095ad7e90f)
   [255]
 
 Graft with --log
 
   $ hg up -Cq 1
   $ hg graft 3 --log -u foo
-  grafting revision 3
+  grafting 3:4c60f11aa304 "3"
   warning: can't find ancestor for 'c' copied from 'b'!
   $ hg log --template '{rev} {parents} {desc}\n' -r tip
   14 1:5d205f8b35b6  3
@@ -361,39 +367,50 @@
   $ echo b > a
   $ hg ci -m 8
   created new head
-  $ echo a > a
+  $ echo c > a
   $ hg ci -m 9
   $ hg graft 1 --tool internal:fail
-  grafting revision 1
+  grafting 1:5d205f8b35b6 "1"
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
   $ hg resolve --all
   merging a
+  warning: conflicts during merge.
+  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  [1]
+  $ cat a
+  <<<<<<< local: aaa4406d4f0a - test: 9
+  c
+  =======
+  b
+  >>>>>>> other: 5d205f8b35b6  - bar: 1
+  $ echo b > a
+  $ hg resolve -m a
   (no more unresolved files)
   $ hg graft -c
-  grafting revision 1
+  grafting 1:5d205f8b35b6 "1"
   $ hg export tip --git
   # HG changeset patch
   # User bar
   # Date 0 0
   #      Thu Jan 01 00:00:00 1970 +0000
-  # Node ID 64ecd9071ce83c6e62f538d8ce7709d53f32ebf7
-  # Parent  4bdb9a9d0b84ffee1d30f0dfc7744cade17aa19c
+  # Node ID f67661df0c4804d301f064f332b57e7d5ddaf2be
+  # Parent  aaa4406d4f0ae9befd6e58c82ec63706460cbca6
   1
   
   diff --git a/a b/a
   --- a/a
   +++ b/a
   @@ -1,1 +1,1 @@
-  -a
+  -c
   +b
 
 Resolve conflicted graft with rename
   $ echo c > a
   $ hg ci -m 10
   $ hg graft 2 --tool internal:fail
-  grafting revision 2
+  grafting 2:5c095ad7e90f "2"
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
@@ -401,14 +418,14 @@
   merging a and b to b
   (no more unresolved files)
   $ hg graft -c
-  grafting revision 2
+  grafting 2:5c095ad7e90f "2"
   $ hg export tip --git
   # HG changeset patch
   # User test
   # Date 0 0
   #      Thu Jan 01 00:00:00 1970 +0000
-  # Node ID 2e80e1351d6ed50302fe1e05f8bd1d4d412b6e11
-  # Parent  e5a51ae854a8bbaaf25cc5c6a57ff46042dadbb4
+  # Node ID 9627f653b421c61fc1ea4c4e366745070fa3d2bc
+  # Parent  ee295f490a40b97f3d18dd4c4f1c8936c233b612
   2
   
   diff --git a/a b/b
@@ -537,12 +554,12 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     3
   
-  changeset:   17:64ecd9071ce8
+  changeset:   17:f67661df0c48
   user:        bar
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1
   
-  changeset:   19:2e80e1351d6e
+  changeset:   19:9627f653b421
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -566,7 +583,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
   
-  changeset:   19:2e80e1351d6e
+  changeset:   19:9627f653b421
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -608,7 +625,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
   
-  changeset:   19:2e80e1351d6e
+  changeset:   19:9627f653b421
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     2
@@ -630,26 +647,26 @@
 graft works on complex revset
 
   $ hg graft 'origin(13) or destination(origin(13))'
-  skipping ancestor revision 21
-  skipping ancestor revision 22
-  skipping revision 2 (already grafted to 22)
-  grafting revision 7
-  grafting revision 13
-  grafting revision 19
+  skipping ancestor revision 21:7e61b508e709
+  skipping ancestor revision 22:1313d0a825e2
+  skipping revision 2:5c095ad7e90f (already grafted to 22:1313d0a825e2)
+  grafting 7:ef0ef43d49e7 "2"
+  grafting 13:9db0f28fd374 "2"
+  grafting 19:9627f653b421 "2"
   merging b
 
 graft with --force (still doesn't graft merges)
 
   $ hg graft 19 0 6
   skipping ungraftable merge revision 6
-  skipping ancestor revision 0
-  skipping already grafted revision 19 (22 also has origin 2)
+  skipping ancestor revision 0:68795b066622
+  skipping already grafted revision 19:9627f653b421 (22:1313d0a825e2 also has origin 2:5c095ad7e90f)
   [255]
   $ hg graft 19 0 6 --force
   skipping ungraftable merge revision 6
-  grafting revision 19
+  grafting 19:9627f653b421 "2"
   merging b
-  grafting revision 0
+  grafting 0:68795b066622 "0"
 
 graft --force after backout
 
@@ -659,29 +676,33 @@
   reverting a
   changeset 29:484c03b8dfa4 backs out changeset 28:6c56f0f7f033
   $ hg graft 28
-  skipping ancestor revision 28
+  skipping ancestor revision 28:6c56f0f7f033
   [255]
   $ hg graft 28 --force
-  grafting revision 28
+  grafting 28:6c56f0f7f033 "28"
   merging a
   $ cat a
   abc
 
 graft --continue after --force
 
-  $ hg backout 30
-  reverting a
-  changeset 31:3b96c18b7a1b backs out changeset 30:8f539994be33
+  $ echo def > a
+  $ hg ci -m 31
   $ hg graft 28 --force --tool internal:fail
-  grafting revision 28
+  grafting 28:6c56f0f7f033 "28"
   abort: unresolved conflicts, can't continue
   (use hg resolve and hg graft --continue)
   [255]
   $ hg resolve --all
   merging a
+  warning: conflicts during merge.
+  merging a incomplete! (edit conflicts, then use 'hg resolve --mark')
+  [1]
+  $ echo abc > a
+  $ hg resolve -m a
   (no more unresolved files)
   $ hg graft -c
-  grafting revision 28
+  grafting 28:6c56f0f7f033 "28"
   $ cat a
   abc
 
@@ -690,8 +711,17 @@
 
   $ hg up -qC 7
   $ hg tag -l -r 13 tmp
-  $ hg --config extensions.mq= strip 2
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-backup.hg (glob)
+  $ hg --config extensions.strip= strip 2
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-d323a1e4-backup.hg (glob)
   $ hg graft tmp
-  skipping already grafted revision 8 (2 also has unknown origin 5c095ad7e90f871700f02dd1fa5012cb4498a2d4)
+  skipping already grafted revision 8:9db0f28fd374 (2:ef0ef43d49e7 also has unknown origin 5c095ad7e90f)
   [255]
+
+Empty graft
+
+  $ hg up -qr 26
+  $ hg tag -f something
+  $ hg graft -qr 27
+  $ hg graft -f 27
+  grafting 27:3aaa8b6725f0 "28"
+  note: graft of 27:3aaa8b6725f0 created no changes to commit
--- a/tests/test-hardlinks.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hardlinks.t	Sat Jan 17 18:28:30 2015 -0800
@@ -50,6 +50,8 @@
   1 r1/.hg/store/fncache
   1 r1/.hg/store/phaseroots
   1 r1/.hg/store/undo
+  1 r1/.hg/store/undo.backup.fncache
+  1 r1/.hg/store/undo.backupfiles
   1 r1/.hg/store/undo.phaseroots
 
 
@@ -80,6 +82,8 @@
   2 r1/.hg/store/fncache
   1 r1/.hg/store/phaseroots
   1 r1/.hg/store/undo
+  1 r1/.hg/store/undo.backup.fncache
+  1 r1/.hg/store/undo.backupfiles
   1 r1/.hg/store/undo.phaseroots
 
   $ nlinksdir r2/.hg/store
@@ -99,6 +103,7 @@
   1 r3/.hg/store/fncache
   1 r3/.hg/store/phaseroots
   1 r3/.hg/store/undo
+  1 r3/.hg/store/undo.backupfiles
   1 r3/.hg/store/undo.phaseroots
 
 
@@ -124,6 +129,9 @@
   1 r3/.hg/store/fncache
   1 r3/.hg/store/phaseroots
   1 r3/.hg/store/undo
+  1 r3/.hg/store/undo.backup.fncache
+  1 r3/.hg/store/undo.backup.phaseroots
+  1 r3/.hg/store/undo.backupfiles
   1 r3/.hg/store/undo.phaseroots
 
 Push to repo r1 should break up most hardlinks in r2:
@@ -151,7 +159,7 @@
   1 r2/.hg/store/00manifest.i
   1 r2/.hg/store/data/d1/f2.i
   2 r2/.hg/store/data/f1.i
-  1 r2/.hg/store/fncache
+  2 r2/.hg/store/fncache
 
   $ hg -R r2 verify
   checking changesets
@@ -176,7 +184,7 @@
   1 r2/.hg/store/00manifest.i
   1 r2/.hg/store/data/d1/f2.i
   1 r2/.hg/store/data/f1.i
-  1 r2/.hg/store/fncache
+  2 r2/.hg/store/fncache
 
 
   $ cd r3
@@ -196,6 +204,8 @@
   2 r4/.hg/00changelog.i
   2 r4/.hg/branch
   2 r4/.hg/cache/branch2-served
+  2 r4/.hg/cache/rbc-names-v1
+  2 r4/.hg/cache/rbc-revs-v1
   2 r4/.hg/dirstate
   2 r4/.hg/hgrc
   2 r4/.hg/last-message.txt
@@ -208,6 +218,9 @@
   2 r4/.hg/store/fncache
   2 r4/.hg/store/phaseroots
   2 r4/.hg/store/undo
+  2 r4/.hg/store/undo.backup.fncache
+  2 r4/.hg/store/undo.backup.phaseroots
+  2 r4/.hg/store/undo.backupfiles
   2 r4/.hg/store/undo.phaseroots
   2 r4/.hg/undo.bookmarks
   2 r4/.hg/undo.branch
@@ -226,6 +239,8 @@
   2 r4/.hg/00changelog.i
   1 r4/.hg/branch
   2 r4/.hg/cache/branch2-served
+  2 r4/.hg/cache/rbc-names-v1
+  2 r4/.hg/cache/rbc-revs-v1
   1 r4/.hg/dirstate
   2 r4/.hg/hgrc
   2 r4/.hg/last-message.txt
@@ -238,6 +253,9 @@
   2 r4/.hg/store/fncache
   2 r4/.hg/store/phaseroots
   2 r4/.hg/store/undo
+  2 r4/.hg/store/undo.backup.fncache
+  2 r4/.hg/store/undo.backup.phaseroots
+  2 r4/.hg/store/undo.backupfiles
   2 r4/.hg/store/undo.phaseroots
   2 r4/.hg/undo.bookmarks
   2 r4/.hg/undo.branch
--- a/tests/test-help.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-help.t	Sat Jan 17 18:28:30 2015 -0800
@@ -272,7 +272,7 @@
        schemes       extend schemes with shortcuts to repository swarms
        share         share a common history between several working directories
        shelve        save and restore changes to the working directory
-       strip         strip changesets and their descendents from history
+       strip         strip changesets and their descendants from history
        transplant    command to transplant changesets from another branch
        win32mbcs     allow the use of MBCS paths with problematic encodings
        zeroconf      discover and advertise repositories on the local network
@@ -500,6 +500,7 @@
    -a --text                treat all files as text
    -g --git                 use git extended diff format
       --nodates             omit dates from diff headers
+      --noprefix            omit a/ and b/ prefixes from filenames
    -p --show-function       show which function each change is in
       --reverse             produce a diff that undoes the changes
    -w --ignore-all-space    ignore white space when comparing lines
@@ -779,9 +780,9 @@
                  dump an index DAG as a graphviz dot file
    debuginstall  test Mercurial installation
    debugknown    test whether node ids are known to a repo
-   debuglabelcomplete
-                 complete "labels" - tags, open branch names, bookmark names
    debuglocks    show or modify state of locks
+   debugnamecomplete
+                 complete "names" - tags, open branch names, bookmark names
    debugobsolete
                  create arbitrary obsolete marker
    debugoptDEP   (no help text available)
@@ -985,7 +986,7 @@
   > .. container:: verbose
   > 
   >   This paragraph is omitted,
-  >   if :hg:\`help\` is invoked witout \`\`-v\`\` (for extension)
+  >   if :hg:\`help\` is invoked without \`\`-v\`\` (for extension)
   > 
   > This paragraph is never omitted, too (for extension)
   > '''
@@ -996,7 +997,7 @@
   > .. container:: verbose
   > 
   >   This paragraph is omitted,
-  >   if :hg:\`help\` is invoked witout \`\`-v\`\` (for topic)
+  >   if :hg:\`help\` is invoked without \`\`-v\`\` (for topic)
   > 
   > This paragraph is never omitted, too (for topic)
   > """
@@ -1022,7 +1023,8 @@
   
   This paragraph is never omitted (for extension)
   
-  This paragraph is omitted, if "hg help" is invoked witout "-v" (for extension)
+  This paragraph is omitted, if "hg help" is invoked without "-v" (for
+  extension)
   
   This paragraph is never omitted, too (for extension)
   
@@ -1042,7 +1044,8 @@
   
       This paragraph is never omitted (for topic).
   
-      This paragraph is omitted, if "hg help" is invoked witout "-v" (for topic)
+      This paragraph is omitted, if "hg help" is invoked without "-v" (for
+      topic)
   
       This paragraph is never omitted, too (for topic)
 
@@ -1943,6 +1946,9 @@
   <tr><td>-f</td>
   <td>--force</td>
   <td>remove (and delete) file even if added or modified</td></tr>
+  <tr><td>-S</td>
+  <td>--subrepos</td>
+  <td>recurse into subrepositories</td></tr>
   <tr><td>-I</td>
   <td>--include PATTERN [+]</td>
   <td>include names matching the given patterns</td></tr>
--- a/tests/test-hgignore.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgignore.t	Sat Jan 17 18:28:30 2015 -0800
@@ -64,20 +64,39 @@
   ? dir/c.o
   ? syntax
 
-  $ echo "glob:**.o" > .hgignore
+Test that patterns from ui.ignore options are read:
+
+  $ echo > .hgignore
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > ignore.other = $TESTTMP/.hg/testhgignore
+  > EOF
+  $ echo "glob:**.o" > .hg/testhgignore
   $ hg status
   A dir/b.o
   ? .hgignore
   ? a.c
   ? syntax
 
-  $ echo "glob:*.o" > .hgignore
+empty out testhgignore
+  $ echo > .hg/testhgignore
+
+Test relative ignore path (issue4473):
+
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > ignore.relative = .hg/testhgignorerel
+  > EOF
+  $ echo "glob:*.o" > .hg/testhgignorerel
+  $ cd dir
   $ hg status
   A dir/b.o
   ? .hgignore
   ? a.c
   ? syntax
 
+  $ cd ..
+  $ echo > .hg/testhgignorerel
   $ echo "syntax: glob" > .hgignore
   $ echo "re:.*\.o" >> .hgignore
   $ hg status
--- a/tests/test-hgk.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgk.t	Sat Jan 17 18:28:30 2015 -0800
@@ -16,5 +16,32 @@
   phase draft
   
   adda
+  $ echo b > b
+  $ hg ci -Am addb
+  adding b
+  $ hg log -T '{node}\n'
+  102a90ea7b4a3361e4082ed620918c261189a36a
+  07f4944404050f47db2e5c5071e0e84e7a27bba9
+
+  $ hg debug-diff-tree 07f494440405 102a90ea7b4a
+  :000000 100664 000000000000 1e88685f5dde N	b	b
+  $ hg debug-diff-tree 07f494440405 102a90ea7b4a --patch
+  diff --git a/b b/b
+  new file mode 100644
+  --- /dev/null
+  +++ b/b
+  @@ -0,0 +1,1 @@
+  +b
+
+Ensure that diff-tree output isn't affected by diffopts
+  $ hg --config diff.noprefix=True debug-diff-tree 07f494440405 102a90ea7b4a
+  :000000 100664 000000000000 1e88685f5dde N	b	b
+  $ hg --config diff.noprefix=True debug-diff-tree --patch 07f494440405 102a90ea7b4a
+  diff --git a/b b/b
+  new file mode 100644
+  --- /dev/null
+  +++ b/b
+  @@ -0,0 +1,1 @@
+  +b
 
   $ cd ..
--- a/tests/test-hgweb-bundle.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-bundle.t	Sat Jan 17 18:28:30 2015 -0800
@@ -18,11 +18,11 @@
 
   $ hg strip -r 1
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/server/.hg/strip-backup/ed602e697e0f-backup.hg (glob)
+  saved backup bundle to $TESTTMP/server/.hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg (glob)
 
 Serve from a bundle file
 
-  $ hg serve -R .hg/strip-backup/ed602e697e0f-backup.hg -d -p $HGPORT --pid-file=hg.pid
+  $ hg serve -R .hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg -d -p $HGPORT --pid-file=hg.pid
   $ cat hg.pid >> $DAEMON_PIDS
 
 Ensure we're serving from the bundle
--- a/tests/test-hgweb-commands.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-commands.t	Sat Jan 17 18:28:30 2015 -0800
@@ -726,10 +726,10 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed">
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
   </a>
   </div>
   </div>
@@ -891,10 +891,10 @@
     <td class="diffstat">
        2 files changed, 2 insertions(+), 0 deletions(-)
   
-      <a id="diffstatexpand" href="javascript:toggleDiffstat()"/>[<tt>+</tt>]</a>
+      <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
-        <a href="javascript:toggleDiffstat()"/>[<tt>-</tt>]</a>
-        <p>
+        <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
+        <p></p>
         <table class="stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">da/foo</a></td>
       <td class="diffstat-total" align="right">1</td>
@@ -2201,12 +2201,12 @@
   $ hg ci -m 'Babar is in the jungle!'
   created new head
   $ hg graft 0::
-  grafting revision 0
-  grafting revision 1
-  grafting revision 2
-  grafting revision 3
-  grafting revision 4
-  grafting revision 5
+  grafting 0:b4e73ffab476 "0"
+  grafting 1:e06180cbfb0c "1"
+  grafting 2:ab4f1438558b "2"
+  grafting 3:ada793dcc118 "3"
+  grafting 4:b60a39a85a01 "4" (secret)
+  grafting 5:aed2d9c1d0e7 "5"
 (turning the initial root secret (filtered))
   $ hg phase --force --secret 0
   $ PATH_INFO=/graph/; export PATH_INFO
--- a/tests/test-hgweb-diffs.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-diffs.t	Sat Jan 17 18:28:30 2015 -0800
@@ -115,10 +115,10 @@
     <td class="diffstat">
        2 files changed, 2 insertions(+), 0 deletions(-)
   
-      <a id="diffstatexpand" href="javascript:toggleDiffstat()"/>[<tt>+</tt>]</a>
+      <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
-        <a href="javascript:toggleDiffstat()"/>[<tt>-</tt>]</a>
-        <p>
+        <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
+        <p></p>
         <table class="stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
@@ -387,10 +387,10 @@
     <td class="diffstat">
        2 files changed, 2 insertions(+), 0 deletions(-)
   
-      <a id="diffstatexpand" href="javascript:toggleDiffstat()"/>[<tt>+</tt>]</a>
+      <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
-        <a href="javascript:toggleDiffstat()"/>[<tt>-</tt>]</a>
-        <p>
+        <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
+        <p></p>
         <table class="stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
--- a/tests/test-hgweb-empty.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-empty.t	Sat Jan 17 18:28:30 2015 -0800
@@ -48,10 +48,10 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed">
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
   </a>
   </div>
   </div>
@@ -158,10 +158,10 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed">
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
   </a>
   </div>
   </div>
@@ -264,10 +264,10 @@
   <ul>
    <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed">
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
   </a>
   </div>
   </div>
--- a/tests/test-hgweb-filelog.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-filelog.t	Sat Jan 17 18:28:30 2015 -0800
@@ -156,10 +156,11 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/01de2d66a28d/a" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed"></a>
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
+  </a>
   </div>
   </div>
   
@@ -265,10 +266,11 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/01de2d66a28d/a" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed"></a>
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
+  </a>
   </div>
   </div>
   
@@ -374,10 +376,11 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/5ed941583260/a" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed"></a>
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
+  </a>
   </div>
   </div>
   
@@ -478,10 +481,11 @@
   <ul>
   <li><a href="/help">help</a></li>
   </ul>
-  <p>
+  <p></p>
   <div class="atom-logo">
   <a href="/atom-log/5ed941583260/a" title="subscribe to atom feed">
-  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed"></a>
+  <img class="atom-logo" src="/static/feed-icon-14x14.png" alt="atom feed" />
+  </a>
   </div>
   </div>
   
@@ -643,7 +647,7 @@
     <th class="firstline"><a href="/rev/b7682196df1c?style=spartan">change c</a></th>
    </tr>
    <tr>
-    <th class="revision">revision 1:</td>
+    <th class="revision">revision 1:</th>
     <td class="node">
      <a href="/file/b7682196df1c/c?style=spartan">b7682196df1c</a>
      <a href="/diff/b7682196df1c/c?style=spartan">(diff)</a>
@@ -668,7 +672,7 @@
     <th class="firstline"><a href="/rev/1a6696706df2?style=spartan">mv b</a></th>
    </tr>
    <tr>
-    <th class="revision">revision 0:</td>
+    <th class="revision">revision 0:</th>
     <td class="node">
      <a href="/file/1a6696706df2/c?style=spartan">1a6696706df2</a>
      <a href="/diff/1a6696706df2/c?style=spartan">(diff)</a>
--- a/tests/test-hgweb-removed.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb-removed.t	Sat Jan 17 18:28:30 2015 -0800
@@ -96,10 +96,10 @@
     <td class="diffstat">
        1 files changed, 0 insertions(+), 1 deletions(-)
   
-      <a id="diffstatexpand" href="javascript:toggleDiffstat()"/>[<tt>+</tt>]</a>
+      <a id="diffstatexpand" href="javascript:toggleDiffstat()">[<tt>+</tt>]</a>
       <div id="diffstatdetails" style="display:none;">
-        <a href="javascript:toggleDiffstat()"/>[<tt>-</tt>]</a>
-        <p>
+        <a href="javascript:toggleDiffstat()">[<tt>-</tt>]</a>
+        <p></p>
         <table class="stripes2">  <tr>
       <td class="diffstat-file"><a href="#l1.1">a</a></td>
       <td class="diffstat-total" align="right">1</td>
--- a/tests/test-hgweb.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hgweb.t	Sat Jan 17 18:28:30 2015 -0800
@@ -332,7 +332,7 @@
 
   $ "$TESTDIR/get-with-headers.py" --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server
   200 Script output follows
-  content-length: 5262
+  content-length: 5372
   content-type: text/css
   
   body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; }
@@ -421,6 +421,9 @@
   	background-color: #afdffa;
   	border-color: #ccecff #46ace6 #46ace6 #ccecff;
   }
+  span.difflineplus { color:#008800; }
+  span.difflineminus { color:#cc0000; }
+  span.difflineat { color:#990099; }
   
   /* Graph */
   div#wrapper {
--- a/tests/test-histedit-bookmark-motion.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-histedit-bookmark-motion.t	Sat Jan 17 18:28:30 2015 -0800
@@ -91,8 +91,8 @@
   histedit: moving bookmarks three from 055a42cdd887 to 59d9f330561f
   histedit: moving bookmarks two from 177f92b77385 to b346ab9a313d
   histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-backup.hg (glob)
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-48787b8d-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-60cea58b-backup.hg (glob)
   $ hg log --graph
   @  changeset:   3:cacdfd884a93
   |  bookmark:    five
@@ -147,7 +147,7 @@
   histedit: moving bookmarks five from cacdfd884a93 to c04e50810e4b
   histedit: moving bookmarks four from 59d9f330561f to c04e50810e4b
   histedit: moving bookmarks three from 59d9f330561f to c04e50810e4b
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-backup.hg (glob)
 
 We expect 'five' to stay at tip, since the tipmost bookmark is most
 likely the useful signal.
--- a/tests/test-histedit-commute.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-histedit-commute.t	Sat Jan 17 18:28:30 2015 -0800
@@ -382,3 +382,78 @@
   $ hg histedit 0
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd ..
+
+  $ cd ..
+
+
+Test to make sure folding renames doesn't cause bogus conflicts (issue4251):
+  $ hg init issue4251
+  $ cd issue4251
+
+  $ mkdir initial-dir
+  $ echo foo > initial-dir/initial-file
+  $ hg add initial-dir/initial-file
+  $ hg commit -m "initial commit"
+
+Move the file to a new directory, and in the same commit, change its content:
+  $ mkdir another-dir
+  $ hg mv initial-dir/initial-file another-dir/
+  $ echo changed > another-dir/initial-file
+  $ hg commit -m "moved and changed"
+
+Rename the file:
+  $ hg mv another-dir/initial-file another-dir/renamed-file
+  $ hg commit -m "renamed"
+
+Now, let's try to fold the second commit into the first:
+  $ cat > editor.sh <<EOF
+  > #!/bin/sh
+  > cat > \$1 <<ENDOF
+  > pick b0f4233702ca 0 initial commit
+  > fold 5e8704a8f2d2 1 moved and changed
+  > pick 40e7299e8fa7 2 renamed
+  > ENDOF
+  > EOF
+
+  $ HGEDITOR="sh ./editor.sh" hg histedit 0
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  adding another-dir/initial-file
+  removing initial-dir/initial-file
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob)
+
+  $ hg --config diff.git=yes export 0
+  # HG changeset patch
+  # User test
+  # Date 0 0
+  #      Thu Jan 01 00:00:00 1970 +0000
+  # Node ID fffadc26f8f85623ce60b028a3f1ccc3730f8530
+  # Parent  0000000000000000000000000000000000000000
+  pick b0f4233702ca 0 initial commit
+  fold 5e8704a8f2d2 1 moved and changed
+  pick 40e7299e8fa7 2 renamed
+  
+  diff --git a/another-dir/initial-file b/another-dir/initial-file
+  new file mode 100644
+  --- /dev/null
+  +++ b/another-dir/initial-file
+  @@ -0,0 +1,1 @@
+  +changed
+
+  $ hg --config diff.git=yes export 1
+  # HG changeset patch
+  # User test
+  # Date 0 0
+  #      Thu Jan 01 00:00:00 1970 +0000
+  # Node ID 9b730d82b00af8a2766facebfa47cc124405a118
+  # Parent  fffadc26f8f85623ce60b028a3f1ccc3730f8530
+  renamed
+  
+  diff --git a/another-dir/initial-file b/another-dir/renamed-file
+  rename from another-dir/initial-file
+  rename to another-dir/renamed-file
+
+  $ cd ..
--- a/tests/test-histedit-edit.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-histedit-edit.t	Sat Jan 17 18:28:30 2015 -0800
@@ -170,7 +170,7 @@
   HG: branch 'default'
   HG: added f
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-backup.hg (glob)
 
   $ hg status
 
@@ -342,4 +342,4 @@
   [1]
   $ HGEDITOR=true hg histedit --continue
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-backup.hg (glob)
+  saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-backup.hg (glob)
--- a/tests/test-histedit-fold.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-histedit-fold.t	Sat Jan 17 18:28:30 2015 -0800
@@ -172,11 +172,16 @@
   > EOF
 
   $ rm -f .hg/last-message.txt
-  $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit 8e03a72b6f83 --commands - 2>&1 <<EOF | fixbundle
+  $ hg status --rev '8e03a72b6f83^1::c4a9eb7989fc'
+  A c
+  A d
+  A f
+  $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit 8e03a72b6f83 --commands - 2>&1 <<EOF
   > pick 8e03a72b6f83 f
   > fold c4a9eb7989fc d
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  adding d
   allow non-folding commit
   0 files updated, 0 files merged, 3 files removed, 0 files unresolved
   ==== before editing
@@ -193,13 +198,14 @@
   HG: --
   HG: user: test
   HG: branch 'default'
-  HG: changed c
-  HG: changed d
-  HG: changed f
+  HG: added c
+  HG: added d
+  HG: added f
   ====
   transaction abort!
   rollback completed
   abort: pretxncommit.abortfolding hook failed
+  [255]
 
   $ cat .hg/last-message.txt
   f
@@ -381,7 +387,7 @@
   HG: changed file
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-backup.hg (glob)
+  saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-3d69522c-backup.hg (glob)
   $ hg logt -G
   @  1:10c647b2cdd5 +4
   |
@@ -470,7 +476,14 @@
   1:199b6bb90248 b
   0:6c795aa153cb a
 
-  $ hg histedit 6c795aa153cb --config hooks.commit="echo commit \$HG_NODE" --commands - 2>&1 << EOF | fixbundle
+Setup the proper environment variable symbol for the platform, to be subbed
+into the hook command.
+#if windows
+  $ NODE="%HG_NODE%"
+#else
+  $ NODE="\$HG_NODE"
+#endif
+  $ hg histedit 6c795aa153cb --config hooks.commit="echo commit $NODE" --commands - 2>&1 << EOF | fixbundle
   > pick 199b6bb90248 b
   > fold a1a953ffb4b0 c
   > pick 6c795aa153cb a
--- a/tests/test-histedit-obsolete.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-histedit-obsolete.t	Sat Jan 17 18:28:30 2015 -0800
@@ -64,7 +64,7 @@
   > fold e860deea161a 4 e
   > pick 652413bf663e 5 f
   > EOF
-  saved backup bundle to $TESTTMP/base/.hg/strip-backup/96e494a2d553-backup.hg (glob)
+  saved backup bundle to $TESTTMP/base/.hg/strip-backup/96e494a2d553-60cea58b-backup.hg (glob)
   $ hg log --graph --hidden
   @  8:cacdfd884a93 f
   |
@@ -146,6 +146,7 @@
 stabilise
 
   $ hg rebase  -r 'unstable()' -d .
+  rebasing 9:c13eb81022ca "f"
   $ hg up tip -q
 
 Test dropping of changeset on the top of the stack
@@ -426,9 +427,9 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/58019c66f35f-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/83d1858e070b-backup.hg (glob)
-  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/859969f5ed7e-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/58019c66f35f-be4b3835-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/83d1858e070b-08306a6b-backup.hg (glob)
+  saved backup bundle to $TESTTMP/folding/.hg/strip-backup/859969f5ed7e-86c99c41-backup.hg (glob)
   $ hg log -G
   @  19:f9daec13fb98 (secret) i
   |
--- a/tests/test-hook.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hook.t	Sat Jan 17 18:28:30 2015 -0800
@@ -148,6 +148,21 @@
   $ hg -q tip
   4:539e4b31b6dc
 
+(Check that no 'changelog.i.a' file were left behind)
+
+  $ ls -1 .hg/store/
+  00changelog.i
+  00manifest.i
+  data
+  fncache
+  journal.phaseroots
+  phaseroots
+  undo
+  undo.backup.fncache
+  undo.backupfiles
+  undo.phaseroots
+
+
 precommit hook can prevent commit
 
   $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
@@ -498,7 +513,10 @@
   Automatically installed hook
   $ echo >> foo
   $ hg ci --debug -d '0 0' -m 'change foo'
+  committing files:
   foo
+  committing manifest
+  committing changelog
   calling hook commit.auto: hgext_hookext.autohook
   Automatically installed hook
   committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
--- a/tests/test-http.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-http.t	Sat Jan 17 18:28:30 2015 -0800
@@ -201,6 +201,15 @@
   no changes found
   updating to branch default
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+--pull should override server's preferuncompressed
+  $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 5 changes to 5 files
+  updating to branch default
+  5 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg id http://user2@localhost:$HGPORT2/
   abort: http authorization required for http://localhost:$HGPORT2/
@@ -259,6 +268,12 @@
   "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d
   "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
   "GET /?cmd=capabilities HTTP/1.1" 200 -
+  "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks
+  "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
+  "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D
+  "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
+  "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
+  "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip
   "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces
   "GET /?cmd=capabilities HTTP/1.1" 200 -
--- a/tests/test-https.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-https.t	Sat Jan 17 18:28:30 2015 -0800
@@ -121,7 +121,7 @@
   $ DISABLEOSXDUMMYCERT=
 #if osx
   $ hg clone https://localhost:$HGPORT/ copy-pull
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
 
   $ DISABLEOSXDUMMYCERT="--config=web.cacerts="
@@ -205,7 +205,7 @@
   searching for changes
   no changes found
   $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
   $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
   warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
@@ -218,7 +218,7 @@
   $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
   $ cat hg1.pid >> $DAEMON_PIDS
   $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
 
 Test server cert which no longer is valid
@@ -226,7 +226,7 @@
   $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
   $ cat hg2.pid >> $DAEMON_PIDS
   $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
 
 Fingerprints
@@ -286,8 +286,8 @@
 Test https with cert problems through proxy
 
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
-  abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
+  abort: error: *certificate verify failed* (glob)
   [255]
--- a/tests/test-hup.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-hup.t	Sat Jan 17 18:28:30 2015 -0800
@@ -34,5 +34,14 @@
   rollback completed
   killed!
 
-  $ echo .hg/* .hg/store/*
-  .hg/00changelog.i .hg/journal.bookmarks .hg/journal.branch .hg/journal.desc .hg/journal.dirstate .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00changelog.i.a .hg/store/journal.phaseroots
+  $ ls -1d .hg/* .hg/store/*
+  .hg/00changelog.i
+  .hg/journal.bookmarks
+  .hg/journal.branch
+  .hg/journal.desc
+  .hg/journal.dirstate
+  .hg/requires
+  .hg/store
+  .hg/store/00changelog.i
+  .hg/store/00changelog.i.a
+  .hg/store/journal.phaseroots
--- a/tests/test-import.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-import.t	Sat Jan 17 18:28:30 2015 -0800
@@ -411,11 +411,17 @@
   $ hg --cwd b import -v ../patch1 ../patch2
   applying ../patch1
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   created 1d4bd90af0e4
   applying ../patch2
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   created 6d019af21222
   $ hg --cwd b rollback
   repository tip rolled back to revision 0 (undo import)
--- a/tests/test-inherit-mode.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-inherit-mode.t	Sat Jan 17 18:28:30 2015 -0800
@@ -66,6 +66,8 @@
   00600 ./.hg/00changelog.i
   00770 ./.hg/cache/
   00660 ./.hg/cache/branch2-served
+  00660 ./.hg/cache/rbc-names-v1
+  00660 ./.hg/cache/rbc-revs-v1
   00660 ./.hg/dirstate
   00660 ./.hg/last-message.txt
   00600 ./.hg/requires
@@ -79,6 +81,7 @@
   00660 ./.hg/store/fncache
   00660 ./.hg/store/phaseroots
   00660 ./.hg/store/undo
+  00660 ./.hg/store/undo.backupfiles
   00660 ./.hg/store/undo.phaseroots
   00660 ./.hg/undo.bookmarks
   00660 ./.hg/undo.branch
@@ -111,6 +114,8 @@
   00660 ../push/.hg/00changelog.i
   00770 ../push/.hg/cache/
   00660 ../push/.hg/cache/branch2-base
+  00660 ../push/.hg/cache/rbc-names-v1
+  00660 ../push/.hg/cache/rbc-revs-v1
   00660 ../push/.hg/requires
   00770 ../push/.hg/store/
   00660 ../push/.hg/store/00changelog.i
@@ -121,6 +126,7 @@
   00660 ../push/.hg/store/data/foo.i
   00660 ../push/.hg/store/fncache
   00660 ../push/.hg/store/undo
+  00660 ../push/.hg/store/undo.backupfiles
   00660 ../push/.hg/store/undo.phaseroots
   00660 ../push/.hg/undo.bookmarks
   00660 ../push/.hg/undo.branch
--- a/tests/test-issue1175.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-issue1175.t	Sat Jan 17 18:28:30 2015 -0800
@@ -28,9 +28,12 @@
   $ hg ci -Am4 a
 
   $ hg ci --debug --traceback -Am5 b
+  committing files:
   b
    b: searching for copy revision for a
    b: copy a:b80de5d138758541c5f05265ad144ab9fa86d1db
+  committing manifest
+  committing changelog
   committed changeset 5:732aafbecb501a198b3cc9323ad3899ff04ccf95
 
   $ hg verify
--- a/tests/test-issue3084.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-issue3084.t	Sat Jan 17 18:28:30 2015 -0800
@@ -42,9 +42,7 @@
   $ echo "n" | hg merge --config ui.interactive=Yes
   remote turned local normal file foo into a largefile
   use (l)argefile or keep (n)ormal file? n
-  getting changed largefiles
-  0 largefiles updated, 0 removed
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ hg status
@@ -116,8 +114,8 @@
   remote turned local largefile foo into a normal file
   keep (l)argefile or use (n)ormal file? l
   getting changed largefiles
-  1 largefiles updated, 0 removed
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  0 largefiles updated, 0 removed
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ hg status
@@ -285,8 +283,6 @@
 
   $ hg up -Cqr normal2
   $ hg merge -r large
-  local changed f which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local normal file f into a largefile
   use (l)argefile or keep (n)ormal file? l
   getting changed largefiles
@@ -297,48 +293,29 @@
   large
 
   $ hg up -Cqr normal2
-  $ ( echo c; echo n ) | hg merge -r large --config ui.interactive=Yes
-  local changed f which remote deleted
-  use (c)hanged version or (d)elete? c
+  $ echo n | hg merge -r large --config ui.interactive=Yes
   remote turned local normal file f into a largefile
   use (l)argefile or keep (n)ormal file? n
-  getting changed largefiles
-  0 largefiles updated, 0 removed
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
   normal2
 
-  $ hg up -Cqr normal2
-  $ echo d | hg merge -r large --config ui.interactive=Yes
-  local changed f which remote deleted
-  use (c)hanged version or (d)elete? d
-  getting changed largefiles
-  1 largefiles updated, 0 removed
-  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
-  $ cat f
-  large
-
 swap
 
   $ hg up -Cqr large
   $ hg merge -r normal2
-  remote changed f which local deleted
-  use (c)hanged version or leave (d)eleted? c
   remote turned local largefile f into a normal file
   keep (l)argefile or use (n)ormal file? l
   getting changed largefiles
-  1 largefiles updated, 0 removed
-  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  0 largefiles updated, 0 removed
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
   large
 
   $ hg up -Cqr large
-  $ ( echo c; echo n ) | hg merge -r normal2 --config ui.interactive=Yes
-  remote changed f which local deleted
-  use (c)hanged version or leave (d)eleted? c
+  $ echo n | hg merge -r normal2 --config ui.interactive=Yes
   remote turned local largefile f into a normal file
   keep (l)argefile or use (n)ormal file? n
   getting changed largefiles
@@ -348,17 +325,6 @@
   $ cat f
   normal2
 
-  $ hg up -Cqr large
-  $ echo d | hg merge -r normal2 --config ui.interactive=Yes
-  remote changed f which local deleted
-  use (c)hanged version or leave (d)eleted? d
-  getting changed largefiles
-  0 largefiles updated, 0 removed
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
-  $ cat f
-  large
-
 Ancestor: large   Parent: large-id   Parent: normal  result: normal
 
   $ hg up -Cqr large-id
@@ -404,21 +370,19 @@
 
   $ hg up -Cqr large2
   $ hg merge -r normal
-  local changed .hglf/f which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local largefile f into a normal file
   keep (l)argefile or use (n)ormal file? l
   getting changed largefiles
-  1 largefiles updated, 0 removed
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  0 largefiles updated, 0 removed
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
   large2
 
   $ hg up -Cqr large2
-  $ echo d | hg merge -r normal --config ui.interactive=Yes
-  local changed .hglf/f which remote deleted
-  use (c)hanged version or (d)elete? d
+  $ echo n | hg merge -r normal --config ui.interactive=Yes
+  remote turned local largefile f into a normal file
+  keep (l)argefile or use (n)ormal file? n
   getting changed largefiles
   0 largefiles updated, 0 removed
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -430,8 +394,6 @@
 
   $ hg up -Cqr normal
   $ hg merge -r large2
-  remote changed .hglf/f which local deleted
-  use (c)hanged version or leave (d)eleted? c
   remote turned local normal file f into a largefile
   use (l)argefile or keep (n)ormal file? l
   getting changed largefiles
@@ -442,9 +404,9 @@
   large2
 
   $ hg up -Cqr normal
-  $ echo d | hg merge -r large2 --config ui.interactive=Yes
-  remote changed .hglf/f which local deleted
-  use (c)hanged version or leave (d)eleted? d
+  $ echo n | hg merge -r large2 --config ui.interactive=Yes
+  remote turned local normal file f into a largefile
+  use (l)argefile or keep (n)ormal file? n
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
   $ cat f
--- a/tests/test-issue672.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-issue672.t	Sat Jan 17 18:28:30 2015 -0800
@@ -40,7 +40,7 @@
    1a: remote created -> g
   getting 1a
   updating: 1a 2/2 files (100.00%)
-   2: keep -> k
+   2: remote unchanged -> k
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
--- a/tests/test-keyword.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-keyword.t	Sat Jan 17 18:28:30 2015 -0800
@@ -135,8 +135,11 @@
 Commit with several checks
 
   $ hg --debug commit -mabsym -u 'User Name <user@example.com>'
+  committing files:
   a
   b
+  committing manifest
+  committing changelog
   overwriting a expanding keywords
   running hook commit.test: cp a hooktest
   committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
@@ -184,7 +187,7 @@
   $ hg status -A --rev 3:4 i
   C i
 
-  $ hg -q strip -n 2
+  $ hg -q strip --no-backup 2
 
 Test hook execution
 
@@ -475,7 +478,10 @@
   new file mode 100644
   examine changes to 'r'? [Ynesfdaq?] y
   
+  committing files:
   r
+  committing manifest
+  committing changelog
   committed changeset 3:82a2f715724d
   overwriting r expanding keywords
  - status call required for dirstate.normallookup() check
@@ -500,7 +506,10 @@
   new file mode 100644
   examine changes to 'i'? [Ynesfdaq?] y
   
+  committing files:
   i
+  committing manifest
+  committing changelog
   committed changeset 3:9f40ceb5a072
   $ cat i
   $Id$
@@ -521,7 +530,7 @@
   $ head -1 a
   expand $Id: a,v 67d8c481a6be 1970/01/01 00:00:15 test $
 
-  $ hg -q strip -n tip
+  $ hg -q strip --no-backup tip
 
 Test patch queue repo
 
@@ -585,10 +594,11 @@
 Commit and show expansion in original and copy
 
   $ hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
-  invalid branchheads cache (served): tip differs
+  committing files:
   c
    c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
-  invalid branchheads cache (served): tip differs
+  committing manifest
+  committing changelog
   overwriting c expanding keywords
   committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
   $ cat a c
@@ -757,22 +767,11 @@
 
 Commit with multi-line message and custom expansion
 
-|Note:
-|
-| After the last rollback, the "served" branchheads cache became invalid, but
-| all changesets in the repo were public. For filtering this means:
-|   "immutable" == "served" == ø.
-|
-| As the "served" cache is invalid, we fall back to the "immutable" cache. But
-| no update is needed between "immutable" and "served" and the "served" cache
-| is not updated on disk. The on-disk version therefore stays invalid for some
-| time. This explains why the "served" branchheads cache is detected as
-| invalid here.
-
   $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
-  invalid branchheads cache (served): tip differs
+  committing files:
   a
-  invalid branchheads cache (served): tip differs
+  committing manifest
+  committing changelog
   overwriting a expanding keywords
   committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
   $ rm log
@@ -814,6 +813,9 @@
   $ hg debugrebuildstate
   $ hg remove a
   $ hg --debug commit -m rma
+  committing files:
+  committing manifest
+  committing changelog
   committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
   $ hg status
   ? c
@@ -884,7 +886,10 @@
   >>> text = re.sub(r'(Id.*)', r'\1 rejecttest', open('a').read())
   >>> open('a', 'wb').write(text)
   $ hg --debug commit -m'rejects?' -d '3 0' -u 'User Name <user@example.com>'
+  committing files:
   a
+  committing manifest
+  committing changelog
   overwriting a expanding keywords
   committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
   $ hg export -o ../rejecttest.diff tip
@@ -924,8 +929,11 @@
   [255]
   $ cd x
   $ hg --debug commit -m xa -d '3 0' -u 'User Name <user@example.com>'
+  committing files:
   x/a
    x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e
+  committing manifest
+  committing changelog
   overwriting x/a expanding keywords
   committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4
   $ cat a
@@ -1062,13 +1070,13 @@
   foo
   >>>>>>> other: 85d2d2d732a5  - test: simplemerge
 
-resolve to local
+resolve to local, m must contain hash of last change (local parent)
 
-  $ HGMERGE=internal:local hg resolve -a
+  $ hg resolve -t internal:local -a
   (no more unresolved files)
   $ hg commit -m localresolve
   $ cat m
-  $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
+  $Id: m 88a80c8d172e Thu, 01 Jan 1970 00:00:00 +0000 test $
   bar
 
 Test restricted mode with transplant -b
--- a/tests/test-largefiles-misc.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-largefiles-misc.t	Sat Jan 17 18:28:30 2015 -0800
@@ -67,6 +67,11 @@
   dirc/baz/largefile
   dirc/dirb
   dirc/dirb/largefile
+
+  $ hg clone -q . ../fetch
+  $ hg --config extensions.fetch= fetch ../fetch
+  abort: uncommitted changes
+  [255]
   $ hg up -qC
   $ cd ..
 
@@ -209,7 +214,7 @@
   A .hgsub
   ? .hgsubstate
   $ echo "rev 1" > subrepo/large.txt
-  $ hg -R subrepo add --large subrepo/large.txt
+  $ hg add --large subrepo/large.txt
   $ hg sum
   parent: 1:8ee150ea2e9c tip
    add subrepo
@@ -252,23 +257,57 @@
   $ echo 'normal file' > subrepo/normal.txt
   $ touch large.dat
   $ mv subrepo/large.txt subrepo/renamed-large.txt
-  $ hg -R subrepo addremove --dry-run
-  removing large.txt
-  adding normal.txt
-  adding renamed-large.txt
+  $ hg addremove -S --dry-run
+  adding large.dat as a largefile
+  removing subrepo/large.txt
+  adding subrepo/normal.txt
+  adding subrepo/renamed-large.txt
+  $ hg status -S
+  ! subrepo/large.txt
+  ? large.dat
+  ? subrepo/normal.txt
+  ? subrepo/renamed-large.txt
+
+  $ hg addremove --dry-run subrepo
+  removing subrepo/large.txt (glob)
+  adding subrepo/normal.txt (glob)
+  adding subrepo/renamed-large.txt (glob)
   $ hg status -S
   ! subrepo/large.txt
   ? large.dat
   ? subrepo/normal.txt
   ? subrepo/renamed-large.txt
-  $ mv subrepo/renamed-large.txt subrepo/large.txt
-  $ hg -R subrepo add subrepo/normal.txt
+  $ cd ..
+
+  $ hg -R statusmatch addremove --dry-run statusmatch/subrepo
+  removing statusmatch/subrepo/large.txt (glob)
+  adding statusmatch/subrepo/normal.txt (glob)
+  adding statusmatch/subrepo/renamed-large.txt (glob)
+  $ hg -R statusmatch status -S
+  ! subrepo/large.txt
+  ? large.dat
+  ? subrepo/normal.txt
+  ? subrepo/renamed-large.txt
 
-  $ hg addremove
+  $ hg -R statusmatch addremove --dry-run -S
   adding large.dat as a largefile
+  removing subrepo/large.txt
+  adding subrepo/normal.txt
+  adding subrepo/renamed-large.txt
+  $ cd statusmatch
+
+  $ mv subrepo/renamed-large.txt subrepo/large.txt
+  $ hg addremove subrepo
+  adding subrepo/normal.txt (glob)
+  $ hg forget subrepo/normal.txt
+
+  $ hg addremove -S
+  adding large.dat as a largefile
+  adding subrepo/normal.txt
   $ rm large.dat
 
-  $ hg addremove
+  $ hg addremove subrepo
+  $ hg addremove -S
   removing large.dat
 
 Lock in subrepo, otherwise the change isn't archived
@@ -323,6 +362,15 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg status -S
 
+  $ hg rm -v subrepo/large.txt
+  removing subrepo/large.txt (glob)
+  $ hg revert -R subrepo subrepo/large.txt
+  $ rm subrepo/large.txt
+  $ hg addremove -S
+  removing subrepo/large.txt
+  $ hg st -S
+  R subrepo/large.txt
+
 Test archiving a revision that references a subrepo that is not yet
 cloned (see test-subrepo-recursion.t):
 
@@ -501,7 +549,7 @@
   b
   
   $ hg -R clone2 outgoing --large --graph --template "{rev}"
-  comparing with $TESTTMP/issue3651/src
+  comparing with $TESTTMP/issue3651/src (glob)
   searching for changes
   @  1
   
@@ -542,6 +590,8 @@
   all remote heads known locally
   1:1acbe71ce432
   2:6095d0695d70
+  finding outgoing largefiles: 0/2 revision (0.00%)
+  finding outgoing largefiles: 1/2 revision (50.00%)
   largefiles to upload (1 entities):
   b
       89e6c98d92887913cadf06b2adb97f26cde4849b
@@ -597,6 +647,11 @@
   3:7983dce246cc
   4:233f12ada4ae
   5:036794ea641c
+  finding outgoing largefiles: 0/5 revision (0.00%)
+  finding outgoing largefiles: 1/5 revision (20.00%)
+  finding outgoing largefiles: 2/5 revision (40.00%)
+  finding outgoing largefiles: 3/5 revision (60.00%)
+  finding outgoing largefiles: 4/5 revision (80.00%)
   largefiles to upload (3 entities):
   b
       13f9ed0898e315bf59dc2973fec52037b6f441a2
@@ -608,7 +663,7 @@
       89e6c98d92887913cadf06b2adb97f26cde4849b
   
 
-Pusing revision #1 causes uploading entity 89e6c98d9288, which is
+Pushing revision #1 causes uploading entity 89e6c98d9288, which is
 shared also by largefiles b1, b2 in revision #2 and b in revision #5.
 
 Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg
@@ -642,6 +697,10 @@
   3:7983dce246cc
   4:233f12ada4ae
   5:036794ea641c
+  finding outgoing largefiles: 0/4 revision (0.00%)
+  finding outgoing largefiles: 1/4 revision (25.00%)
+  finding outgoing largefiles: 2/4 revision (50.00%)
+  finding outgoing largefiles: 3/4 revision (75.00%)
   largefiles to upload (2 entities):
   b
       13f9ed0898e315bf59dc2973fec52037b6f441a2
@@ -851,4 +910,33 @@
   $ cd ..
 
 
+Test "pull --rebase" when rebase is enabled before largefiles (issue3861)
+=========================================================================
 
+  $ hg showconfig extensions | grep largefiles
+  extensions.largefiles=!
+
+  $ mkdir issue3861
+  $ cd issue3861
+  $ hg init src
+  $ hg clone -q src dst
+  $ echo a > src/a
+  $ hg -R src commit -Aqm "#0"
+  Invoking status precommit hook
+  A a
+
+  $ cat >> dst/.hg/hgrc <<EOF
+  > [extensions]
+  > largefiles=
+  > EOF
+  $ hg -R dst pull --rebase
+  pulling from $TESTTMP/issue3861/src (glob)
+  requesting all changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  nothing to rebase - working directory parent is already an ancestor of destination bf5e395ced2c
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ cd ..
--- a/tests/test-largefiles-update.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-largefiles-update.t	Sat Jan 17 18:28:30 2015 -0800
@@ -25,19 +25,52 @@
   $ hg commit -m '#2'
   created new head
 
+Test that update also updates the lfdirstate of 'unsure' largefiles after
+hashing them:
+
+The previous operations will usually have left us with largefiles with a mtime
+within the same second as the dirstate was written.
+The lfdirstate entries will thus have been written with an invalidated/unset
+mtime to make sure further changes within the same second is detected.
+We will however occasionally be "lucky" and get a tick between writing
+largefiles and writing dirstate so we get valid lfdirstate timestamps. The
+following verification is thus disabled but can be verified manually.
+
+#if false
+  $ hg debugdirstate --large --nodate
+  n 644          7 unset               large1
+  n 644         13 unset               large2
+#endif
+
+Wait to make sure we get a tick so the mtime of the largefiles become valid.
+
+  $ sleep 1
+
+A linear merge will update standins before performing the actual merge. It will
+do a lfdirstate status walk and find 'unset'/'unsure' files, hash them, and
+update the corresponding standins.
+Verify that it actually marks the clean files as clean in lfdirstate so
+we don't have to hash them again next time we update.
+
+  $ hg up
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg debugdirstate --large --nodate
+  n 644          7 set                 large1
+  n 644         13 set                 large2
+
 Test that lfdirstate keeps track of last modification of largefiles and
 prevents unnecessary hashing of content - also after linear/noop update
 
   $ sleep 1
   $ hg st
   $ hg debugdirstate --large --nodate
-  n 644          7 large1
-  n 644         13 large2
+  n 644          7 set                 large1
+  n 644         13 set                 large2
   $ hg up
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg debugdirstate --large --nodate
-  n 644          7 large1
-  n 644         13 large2
+  n 644          7 set                 large1
+  n 644         13 set                 large2
 
 Test that "hg merge" updates largefiles from "other" correctly
 
@@ -221,6 +254,8 @@
   $ hg commit -m '#4'
 
   $ hg rebase -s 1 -d 2 --keep
+  rebasing 1:72518492caa6 "#1"
+  rebasing 4:07d6153b5c04 "#4" (tip)
 #if windows
   $ hg status -A large1
   large1: * (glob)
@@ -332,13 +367,11 @@
   $ hg update -q -C 2
   $ echo 'modified large2 for linear merge' > large2
   $ hg update -q 5
-  local changed .hglf/large2 which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local largefile large2 into a normal file
   keep (l)argefile or use (n)ormal file? l
   $ hg debugdirstate --nodates | grep large2
-  a   0         -1 .hglf/large2
-  r   0          0 large2
+  a   0         -1 unset               .hglf/large2
+  r   0          0 set                 large2
   $ hg status -A large2
   A large2
   $ cat large2
@@ -353,8 +386,8 @@
   remote turned local largefile large3 into a normal file
   keep (l)argefile or use (n)ormal file? l
   $ hg debugdirstate --nodates | grep large3
-  a   0         -1 .hglf/large3
-  r   0          0 large3
+  a   0         -1 unset               .hglf/large3
+  r   0          0 set                 large3
   $ hg status -A large3
   A large3
   $ cat large3
@@ -366,22 +399,20 @@
 
   $ hg update -q -C 2
   $ hg strip 3 4
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9530e27857f7-backup.hg (glob)
-  $ mv .hg/strip-backup/9530e27857f7-backup.hg $TESTTMP
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9530e27857f7-2e7b195d-backup.hg (glob)
+  $ mv .hg/strip-backup/9530e27857f7-2e7b195d-backup.hg $TESTTMP
 
 (internal linear merging at "hg pull --update")
 
   $ echo 'large1 for linear merge (conflict)' > large1
   $ echo 'large2 for linear merge (conflict with normal file)' > large2
-  $ hg pull --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-backup.hg
-  pulling from $TESTTMP/9530e27857f7-backup.hg (glob)
+  $ hg pull --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-2e7b195d-backup.hg
+  pulling from $TESTTMP/9530e27857f7-2e7b195d-backup.hg (glob)
   searching for changes
   adding changesets
   adding manifests
   adding file changes
   added 3 changesets with 5 changes to 5 files
-  local changed .hglf/large2 which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local largefile large2 into a normal file
   keep (l)argefile or use (n)ormal file? l
   largefile large1 has a merge conflict
@@ -410,13 +441,11 @@
 
   $ echo 'large1 for linear merge (conflict)' > large1
   $ echo 'large2 for linear merge (conflict with normal file)' > large2
-  $ hg unbundle --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-backup.hg
+  $ hg unbundle --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-2e7b195d-backup.hg
   adding changesets
   adding manifests
   adding file changes
   added 3 changesets with 5 changes to 5 files
-  local changed .hglf/large2 which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local largefile large2 into a normal file
   keep (l)argefile or use (n)ormal file? l
   largefile large1 has a merge conflict
@@ -463,7 +492,6 @@
   $ hg update --config ui.interactive=True --config debug.dirstate.delaywrite=2 <<EOF
   > m
   > r
-  > c
   > l
   > l
   > EOF
@@ -471,8 +499,6 @@
   (M)erge, keep (l)ocal or keep (r)emote? m
    subrepository sources for sub differ (in checked out version)
   use (l)ocal source (f74e50bd9e55) or (r)emote source (d65e59e952a9)? r
-  local changed .hglf/large2 which remote deleted
-  use (c)hanged version or (d)elete? c
   remote turned local largefile large2 into a normal file
   keep (l)argefile or use (n)ormal file? l
   largefile large1 has a merge conflict
@@ -509,6 +535,7 @@
   $ hg rebase -s 1 -d 3 --keep --config ui.interactive=True <<EOF
   > o
   > EOF
+  rebasing 1:72518492caa6 "#1"
   largefile large1 has a merge conflict
   ancestor was 4669e532d5b2c093a78eca010077e708a071bb64
   keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or
@@ -523,8 +550,27 @@
   $ cat large1
   large1 in #1
 
-  $ hg rebase -q --abort
-  rebase aborted
+Test that rebase updates standins for manually modified largefiles at
+the 1st commit of resuming.
+
+  $ echo "manually modified before 'hg rebase --continue'" > large1
+  $ hg resolve -m normal1
+  (no more unresolved files)
+  $ hg rebase --continue --config ui.interactive=True <<EOF
+  > c
+  > EOF
+  rebasing 1:72518492caa6 "#1"
+  rebasing 4:07d6153b5c04 "#4"
+  local changed .hglf/large1 which remote deleted
+  use (c)hanged version or (d)elete? c
+
+  $ hg diff -c "tip~1" --nodates .hglf/large1 | grep '^[+-][0-9a-z]'
+  -e5bb990443d6a92aaf7223813720f7566c9dd05b
+  +8a4f783556e7dea21139ca0466eafce954c75c13
+  $ rm -f large1
+  $ hg update -q -C tip
+  $ cat large1
+  manually modified before 'hg rebase --continue'
 
 Test that transplant updates largefiles, of which standins are safely
 changed, even if it is aborted by conflict of other.
@@ -557,6 +603,20 @@
   $ cat largeX
   largeX
 
+Test that transplant updates standins for manually modified largefiles
+at the 1st commit of resuming.
+
+  $ echo "manually modified before 'hg transplant --continue'" > large1
+  $ hg transplant --continue
+  07d6153b5c04 transplanted as f1bf30eb88cc
+  $ hg diff -c tip .hglf/large1 | grep '^[+-][0-9a-z]'
+  -e5bb990443d6a92aaf7223813720f7566c9dd05b
+  +6a4f36d4075fbe0f30ec1d26ca44e63c05903671
+  $ rm -f large1
+  $ hg update -q -C tip
+  $ cat large1
+  manually modified before 'hg transplant --continue'
+
 Test that "hg status" doesn't show removal of largefiles not managed
 in the target context.
 
@@ -619,3 +679,16 @@
 #endif
 
   $ cd ..
+
+Test that "hg convert" avoids copying largefiles from the working
+directory into store, because "hg convert" doesn't update largefiles
+in the working directory (removing files under ".cache/largefiles"
+forces "hg convert" to copy corresponding largefiles)
+
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > convert =
+  > EOF
+
+  $ rm $TESTTMP/.cache/largefiles/6a4f36d4075fbe0f30ec1d26ca44e63c05903671
+  $ hg convert -q repo repo.converted
--- a/tests/test-largefiles-wireproto.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-largefiles-wireproto.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,5 +1,5 @@
-This file contains testcases that tend to be related to the wireprotocol part of
-largefile.
+This file contains testcases that tend to be related to the wire protocol part
+of largefiles.
 
   $ USERCACHE="$TESTTMP/cache"; export USERCACHE
   $ mkdir "${USERCACHE}"
--- a/tests/test-largefiles.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-largefiles.t	Sat Jan 17 18:28:30 2015 -0800
@@ -44,13 +44,13 @@
   $ sleep 1
   $ hg st
   $ hg debugstate --nodates
-  n 644         41 .hglf/large1
-  n 644         41 .hglf/sub/large2
-  n 644          8 normal1
-  n 644          8 sub/normal2
+  n 644         41 set                 .hglf/large1
+  n 644         41 set                 .hglf/sub/large2
+  n 644          8 set                 normal1
+  n 644          8 set                 sub/normal2
   $ hg debugstate --large --nodates
-  n 644          7 large1
-  n 644          7 sub/large2
+  n 644          7 set                 large1
+  n 644          7 set                 sub/large2
   $ echo normal11 > normal1
   $ echo normal22 > sub/normal2
   $ echo large11 > large1
@@ -491,9 +491,9 @@
   $ echo "testing addremove with patterns" > testaddremove.dat
   $ echo "normaladdremove" > normaladdremove
   $ cd ..
-  $ hg -R a addremove
+  $ hg -R a -v addremove
   removing sub/large4
-  adding a/testaddremove.dat as a largefile (glob)
+  adding testaddremove.dat as a largefile
   removing normal3
   adding normaladdremove
   $ cd a
@@ -1097,6 +1097,17 @@
   all local heads known remotely
   6 changesets found
   adding changesets
+  uncompressed size of bundle content:
+      1213 (changelog)
+      1479 (manifests)
+       234  .hglf/large1
+       504  .hglf/large3
+       512  .hglf/sub/large4
+       162  .hglf/sub2/large6
+       162  .hglf/sub2/large7
+       192  normal1
+       397  normal3
+       405  sub/normal4
   adding manifests
   adding file changes
   added 6 changesets with 16 changes to 8 files
@@ -1172,12 +1183,12 @@
   adding manifests
   adding file changes
   added 1 changesets with 2 changes to 2 files (+1 heads)
+  0 largefiles cached
+  rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
   Invoking status precommit hook
   M sub/normal4
   M sub2/large6
-  saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-backup.hg (glob)
-  0 largefiles cached
-  nothing to rebase - working directory parent is also destination
+  saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob)
   $ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ]
   $ hg log --template '{rev}:{node|short}  {desc|firstline}\n'
   9:598410d3eb9a  modify normal file largefile in repo d
@@ -1231,10 +1242,11 @@
   added 1 changesets with 2 changes to 2 files (+1 heads)
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg rebase
+  rebasing 8:f574fb32bb45 "modify normal file largefile in repo d"
   Invoking status precommit hook
   M sub/normal4
   M sub2/large6
-  saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-backup.hg (glob)
+  saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob)
   $ hg log --template '{rev}:{node|short}  {desc|firstline}\n'
   9:598410d3eb9a  modify normal file largefile in repo d
   8:a381d2c8c80e  modify normal file and largefile in repo b
@@ -1719,7 +1731,7 @@
   $ rm sub2/large7
   $ echo "largeasnormal" > sub2/large7
   $ hg add sub2/large7
-  sub2/large7 already a largefile
+  sub2/large7 already a largefile (glob)
 
 Test that transplanting a largefile change works correctly.
 
@@ -1741,8 +1753,6 @@
   adding manifests
   adding file changes
   added 1 changesets with 2 changes to 2 files
-  getting changed largefiles
-  0 largefiles updated, 0 removed
   $ hg log --template '{rev}:{node|short}  {desc|firstline}\n'
   9:598410d3eb9a  modify normal file largefile in repo d
   8:a381d2c8c80e  modify normal file and largefile in repo b
@@ -1790,7 +1800,7 @@
   $ hg cat .hglf/sub/large4
   e166e74c7303192238d60af5a9c4ce9bef0b7928
   $ hg cat .hglf/normal3
-  .hglf/normal3: no such file in rev 598410d3eb9a
+  .hglf/normal3: no such file in rev 598410d3eb9a (glob)
   [1]
 
 Test that renaming a largefile results in correct output for status
--- a/tests/test-locate.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-locate.t	Sat Jan 17 18:28:30 2015 -0800
@@ -96,10 +96,10 @@
 
   $ hg files
   b
-  dir.h/foo
+  dir.h/foo (glob)
   t.h
-  t/e.h
-  t/x
+  t/e.h (glob)
+  t/x (glob)
   $ hg files b
   b
 
@@ -130,11 +130,11 @@
   ../t/e.h (glob)
 
   $ hg files
-  ../b
-  ../dir.h/foo
-  ../t.h
-  ../t/e.h
-  ../t/x
+  ../b (glob)
+  ../dir.h/foo (glob)
+  ../t.h (glob)
+  ../t/e.h (glob)
+  ../t/x (glob)
   $ hg files .
   [1]
 
--- a/tests/test-log.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-log.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1031,7 +1031,25 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     commit on default
   
+#if gettext
 
+Test that all log names are translated (e.g. branches, bookmarks, tags):
+
+  $ hg bookmark babar -r tip
+
+  $ HGENCODING=UTF-8 LANGUAGE=de hg log -r tip
+  \xc3\x84nderung:        3:f5d8de11c2e2 (esc)
+  Zweig:           test
+  Lesezeichen:     babar
+  Marke:           tip
+  Vorg\xc3\xa4nger:       1:d32277701ccb (esc)
+  Nutzer:          test
+  Datum:           Thu Jan 01 00:00:00 1970 +0000
+  Zusammenfassung: commit on test
+  
+  $ hg bookmark -d babar
+
+#endif
 
 log -p --cwd dir (in subdir)
 
@@ -1541,6 +1559,45 @@
   user:        
   date:        Thu Jan 01 00:00:00 1970 +0000
   
+Check that adding an arbitrary name shows up in log automatically
+
+  $ cat > ../names.py <<EOF
+  > """A small extension to test adding arbitrary names to a repo"""
+  > from mercurial.namespaces import namespace
+  > 
+  > def reposetup(ui, repo):
+  >     foo = {'foo': repo[0].node()}
+  >     names = lambda r: foo.keys()
+  >     namemap = lambda r, name: foo.get(name)
+  >     nodemap = lambda r, node: [name for name, n in foo.iteritems()
+  >                                if n == node]
+  >     ns = namespace("bars", templatename="bar", logname="barlog",
+  >                    colorname="barcolor", listnames=names, namemap=namemap,
+  >                    nodemap=nodemap)
+  > 
+  >     repo.names.addnamespace(ns)
+  > EOF
+
+  $ hg --config extensions.names=../names.py log -r 0
+  changeset:   0:65624cd9070a
+  tag:         tip
+  barlog:      foo
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a bunch of weird directories
+  
+  $ hg --config extensions.names=../names.py \
+  >  --config extensions.color= --config color.log.barcolor=red \
+  >  --color=always log -r 0
+  \x1b[0;33mchangeset:   0:65624cd9070a\x1b[0m (esc)
+  tag:         tip
+  \x1b[0;31mbarlog:      foo\x1b[0m (esc)
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     a bunch of weird directories
+  
+  $ hg --config extensions.names=../names.py log -r 0 --template '{bars}\n'
+  foo
 
   $ cd ..
 
@@ -1559,8 +1616,303 @@
   o  a
   
   $ hg log -f d/a -T '{desc}' -G
-  o  b
+  @  c
   |
   o  a
   
   $ cd ..
+
+hg log -f with linkrev pointing to another branch
+-------------------------------------------------
+
+create history with a filerev whose linkrev points to another branch
+
+  $ hg init branchedlinkrev
+  $ cd branchedlinkrev
+  $ echo 1 > a
+  $ hg commit -Am 'content1'
+  adding a
+  $ echo 2 > a
+  $ hg commit -m 'content2'
+  $ hg up --rev 'desc(content1)'
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo unrelated > unrelated
+  $ hg commit -Am 'unrelated'
+  adding unrelated
+  created new head
+  $ hg graft -r 'desc(content2)'
+  grafting 1:2294ae80ad84 "content2"
+  $ echo 3 > a
+  $ hg commit -m 'content3'
+  $ hg log -G
+  @  changeset:   4:50b9b36e9c5d
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content3
+  |
+  o  changeset:   3:15b2327059e5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   2:2029acd1168c
+  |  parent:      0:ae0a3c9f9e95
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     unrelated
+  |
+  | o  changeset:   1:2294ae80ad84
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+log -f on the file should list the graft result.
+
+  $ hg log -Gf a
+  @  changeset:   4:50b9b36e9c5d
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content3
+  |
+  o  changeset:   3:15b2327059e5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+plain log lists the original version
+(XXX we should probably list both)
+
+  $ hg log -G a
+  @  changeset:   4:50b9b36e9c5d
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content3
+  |
+  | o  changeset:   1:2294ae80ad84
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+hg log -f from the grafted changeset
+(The bootstrap should properly take the topology in account)
+
+  $ hg up 'desc(content3)^'
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg log -Gf a
+  @  changeset:   3:15b2327059e5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+Test that we use the first non-hidden changeset in that case.
+
+(hide the changeset)
+
+  $ hg log -T '{node}\n' -r 1
+  2294ae80ad8447bc78383182eeac50cb049df623
+  $ hg debugobsolete 2294ae80ad8447bc78383182eeac50cb049df623
+  $ hg log -G
+  o  changeset:   4:50b9b36e9c5d
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content3
+  |
+  @  changeset:   3:15b2327059e5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   2:2029acd1168c
+  |  parent:      0:ae0a3c9f9e95
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     unrelated
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+Check that log on the file does not drop the file revision.
+
+  $ hg log -G a
+  o  changeset:   4:50b9b36e9c5d
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content3
+  |
+  @  changeset:   3:15b2327059e5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+Even when a head revision is linkrev-shadowed.
+
+  $ hg log -T '{node}\n' -r 4
+  50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2
+  $ hg debugobsolete 50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2
+  $ hg log -G a
+  @  changeset:   3:15b2327059e5
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     content2
+  |
+  o  changeset:   0:ae0a3c9f9e95
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     content1
+  
+
+  $ cd ..
+
+Even when the file revision is missing from some head:
+
+  $ hg init issue4490
+  $ cd issue4490
+  $ echo '[experimental]' >> .hg/hgrc
+  $ echo 'evolution=createmarkers' >> .hg/hgrc
+  $ echo a > a
+  $ hg ci -Am0
+  adding a
+  $ echo b > b
+  $ hg ci -Am1
+  adding b
+  $ echo B > b
+  $ hg ci --amend -m 1
+  $ hg up 0
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ echo c > c
+  $ hg ci -Am2
+  adding c
+  created new head
+  $ hg up 'head() and not .'
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg log -G
+  o  changeset:   4:db815d6d32e6
+  |  tag:         tip
+  |  parent:      0:f7b1eb17ad24
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     2
+  |
+  | @  changeset:   3:9bc8ce7f9356
+  |/   parent:      0:f7b1eb17ad24
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     1
+  |
+  o  changeset:   0:f7b1eb17ad24
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     0
+  
+  $ hg log -f -G b
+  @  changeset:   3:9bc8ce7f9356
+  |  parent:      0:f7b1eb17ad24
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     1
+  |
+  $ hg log -G b
+  @  changeset:   3:9bc8ce7f9356
+  |  parent:      0:f7b1eb17ad24
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     1
+  |
+  $ cd ..
+
+Check proper report when the manifest changes but not the file issue4499
+------------------------------------------------------------------------
+
+  $ hg init issue4499
+  $ cd issue4499
+  $ for f in A B C D F E G H I J K L M N O P Q R S T U; do
+  >     echo 1 > $f;
+  >     hg add $f;
+  > done
+  $ hg commit -m 'A1B1C1'
+  $ echo 2 > A
+  $ echo 2 > B
+  $ echo 2 > C
+  $ hg commit -m 'A2B2C2'
+  $ hg up 0
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo 3 > A
+  $ echo 2 > B
+  $ echo 2 > C
+  $ hg commit -m 'A3B2C2'
+  created new head
+
+  $ hg log -G
+  @  changeset:   2:fe5fc3d0eb17
+  |  tag:         tip
+  |  parent:      0:abf4f0e38563
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     A3B2C2
+  |
+  | o  changeset:   1:07dcc6b312c0
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     A2B2C2
+  |
+  o  changeset:   0:abf4f0e38563
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     A1B1C1
+  
+
+Log -f on B should reports current changesets
+
+  $ hg log -fG B
+  @  changeset:   2:fe5fc3d0eb17
+  |  tag:         tip
+  |  parent:      0:abf4f0e38563
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     A3B2C2
+  |
+  o  changeset:   0:abf4f0e38563
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     A1B1C1
+  
+  $ cd ..
--- a/tests/test-manifest.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-manifest.t	Sat Jan 17 18:28:30 2015 -0800
@@ -26,7 +26,7 @@
 
   $ hg files -vr .
            2   a
-           2 x b/a
+           2 x b/a (glob)
            1 l l
   $ hg files -r . -X b
   a
--- a/tests/test-merge-criss-cross.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-merge-criss-cross.t	Sat Jan 17 18:28:30 2015 -0800
@@ -141,8 +141,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
-   f2: keep -> k
    f1: versions differ -> m
+   f2: remote unchanged -> k
   
   auction for merging merge bids
    f1: picking 'get' action
@@ -152,7 +152,7 @@
    f1: remote is newer -> g
   getting f1
   updating: f1 1/1 files (100.00%)
-   f2: keep -> k
+   f2: remote unchanged -> k
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
@@ -176,7 +176,7 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
-   f1: keep -> k
+   f1: remote unchanged -> k
    f2: versions differ -> m
   
   calculating bids for ancestor 40663881a6dd
@@ -184,8 +184,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
+   f1: versions differ -> m
    f2: remote is newer -> g
-   f1: versions differ -> m
   
   auction for merging merge bids
    f1: picking 'keep' action
@@ -195,7 +195,7 @@
    f2: remote is newer -> g
   getting f2
   updating: f2 1/1 files (100.00%)
-   f1: keep -> k
+   f1: remote unchanged -> k
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
@@ -249,8 +249,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
-   f2: keep -> k
    f1: versions differ -> m
+   f2: remote unchanged -> k
   
   auction for merging merge bids
    f1: picking 'get' action
@@ -260,7 +260,7 @@
    f1: remote is newer -> g
   getting f1
   updating: f1 1/1 files (100.00%)
-   f2: keep -> k
+   f2: remote unchanged -> k
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
--- a/tests/test-merge-force.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-merge-force.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,45 +1,675 @@
+Set up a base, local, and remote changeset, as well as the working copy state.
+Files names are of the form base_remote_local_working-copy. For example,
+content1_content2_content1_content2-untracked represents a
+file that was modified in the remote changeset, left untouched in the
+local changeset, and then modified in the working copy to match the
+remote content, then finally forgotten.
+
   $ hg init
 
-  $ echo a > a
-  $ hg ci -qAm 'add a'
+Create base changeset
+
+  $ python $TESTDIR/generate-working-copy-states.py state 3 1
+  $ hg addremove -q --similarity 0
+  $ hg commit -qm 'base'
+
+Create remote changeset
+
+  $ python $TESTDIR/generate-working-copy-states.py state 3 2
+  $ hg addremove -q --similarity 0
+  $ hg commit -qm 'remote'
+
+Create local changeset
+
+  $ hg update -q 0
+  $ python $TESTDIR/generate-working-copy-states.py state 3 3
+  $ hg addremove -q --similarity 0
+  $ hg commit -qm 'local'
+
+Set up working directory
+
+  $ python $TESTDIR/generate-working-copy-states.py state 3 wc
+  $ hg addremove -q --similarity 0
+  $ hg forget *_*_*_*-untracked
+  $ rm *_*_*_missing-*
 
-  $ echo b > b
-  $ hg ci -qAm 'add b'
+  $ hg status -A
+  M content1_content1_content1_content4-tracked
+  M content1_content1_content3_content1-tracked
+  M content1_content1_content3_content4-tracked
+  M content1_content2_content1_content2-tracked
+  M content1_content2_content1_content4-tracked
+  M content1_content2_content2_content1-tracked
+  M content1_content2_content2_content4-tracked
+  M content1_content2_content3_content1-tracked
+  M content1_content2_content3_content2-tracked
+  M content1_content2_content3_content4-tracked
+  M content1_missing_content1_content4-tracked
+  M content1_missing_content3_content1-tracked
+  M content1_missing_content3_content4-tracked
+  M missing_content2_content2_content4-tracked
+  M missing_content2_content3_content2-tracked
+  M missing_content2_content3_content4-tracked
+  M missing_missing_content3_content4-tracked
+  A content1_content1_missing_content1-tracked
+  A content1_content1_missing_content4-tracked
+  A content1_content2_missing_content1-tracked
+  A content1_content2_missing_content2-tracked
+  A content1_content2_missing_content4-tracked
+  A content1_missing_missing_content1-tracked
+  A content1_missing_missing_content4-tracked
+  A missing_content2_missing_content2-tracked
+  A missing_content2_missing_content4-tracked
+  A missing_missing_missing_content4-tracked
+  R content1_content1_content1_content1-untracked
+  R content1_content1_content1_content4-untracked
+  R content1_content1_content1_missing-untracked
+  R content1_content1_content3_content1-untracked
+  R content1_content1_content3_content3-untracked
+  R content1_content1_content3_content4-untracked
+  R content1_content1_content3_missing-untracked
+  R content1_content2_content1_content1-untracked
+  R content1_content2_content1_content2-untracked
+  R content1_content2_content1_content4-untracked
+  R content1_content2_content1_missing-untracked
+  R content1_content2_content2_content1-untracked
+  R content1_content2_content2_content2-untracked
+  R content1_content2_content2_content4-untracked
+  R content1_content2_content2_missing-untracked
+  R content1_content2_content3_content1-untracked
+  R content1_content2_content3_content2-untracked
+  R content1_content2_content3_content3-untracked
+  R content1_content2_content3_content4-untracked
+  R content1_content2_content3_missing-untracked
+  R content1_missing_content1_content1-untracked
+  R content1_missing_content1_content4-untracked
+  R content1_missing_content1_missing-untracked
+  R content1_missing_content3_content1-untracked
+  R content1_missing_content3_content3-untracked
+  R content1_missing_content3_content4-untracked
+  R content1_missing_content3_missing-untracked
+  R missing_content2_content2_content2-untracked
+  R missing_content2_content2_content4-untracked
+  R missing_content2_content2_missing-untracked
+  R missing_content2_content3_content2-untracked
+  R missing_content2_content3_content3-untracked
+  R missing_content2_content3_content4-untracked
+  R missing_content2_content3_missing-untracked
+  R missing_missing_content3_content3-untracked
+  R missing_missing_content3_content4-untracked
+  R missing_missing_content3_missing-untracked
+  ! content1_content1_content1_missing-tracked
+  ! content1_content1_content3_missing-tracked
+  ! content1_content1_missing_missing-tracked
+  ! content1_content2_content1_missing-tracked
+  ! content1_content2_content2_missing-tracked
+  ! content1_content2_content3_missing-tracked
+  ! content1_content2_missing_missing-tracked
+  ! content1_missing_content1_missing-tracked
+  ! content1_missing_content3_missing-tracked
+  ! content1_missing_missing_missing-tracked
+  ! missing_content2_content2_missing-tracked
+  ! missing_content2_content3_missing-tracked
+  ! missing_content2_missing_missing-tracked
+  ! missing_missing_content3_missing-tracked
+  ! missing_missing_missing_missing-tracked
+  ? content1_content1_missing_content1-untracked
+  ? content1_content1_missing_content4-untracked
+  ? content1_content2_missing_content1-untracked
+  ? content1_content2_missing_content2-untracked
+  ? content1_content2_missing_content4-untracked
+  ? content1_missing_missing_content1-untracked
+  ? content1_missing_missing_content4-untracked
+  ? missing_content2_missing_content2-untracked
+  ? missing_content2_missing_content4-untracked
+  ? missing_missing_missing_content4-untracked
+  C content1_content1_content1_content1-tracked
+  C content1_content1_content3_content3-tracked
+  C content1_content2_content1_content1-tracked
+  C content1_content2_content2_content2-tracked
+  C content1_content2_content3_content3-tracked
+  C content1_missing_content1_content1-tracked
+  C content1_missing_content3_content3-tracked
+  C missing_content2_content2_content2-tracked
+  C missing_content2_content3_content3-tracked
+  C missing_missing_content3_content3-tracked
+
+Merge with remote
 
-  $ hg up -qC 0
-  $ hg rm a
-  $ hg ci -m 'rm a'
-  created new head
+# Notes:
+# - local and remote changed content1_content2_*_content2-untracked
+#   in the same way, so it could potentially be left alone
 
-  $ hg up -qC 1
-  $ rm a
+  $ hg merge -f --tool internal:merge3 'desc("remote")'
+  local changed content1_missing_content1_content4-tracked which remote deleted
+  use (c)hanged version or (d)elete? c
+  local changed content1_missing_content3_content3-tracked which remote deleted
+  use (c)hanged version or (d)elete? c
+  local changed content1_missing_content3_content4-tracked which remote deleted
+  use (c)hanged version or (d)elete? c
+  local changed content1_missing_missing_content4-tracked which remote deleted
+  use (c)hanged version or (d)elete? c
+  remote changed content1_content2_content1_content1-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content1_content2-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content1_content4-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content1_missing-tracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content1_missing-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content2_content1-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content2_content2-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content2_content4-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content2_missing-tracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content2_missing-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_content1-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_content2-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_content3-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_content4-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_missing-tracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_content3_missing-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_missing_content1-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_missing_content2-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_missing_content4-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_missing_missing-tracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  remote changed content1_content2_missing_missing-untracked which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  merging content1_content2_content1_content4-tracked
+  warning: conflicts during merge.
+  merging content1_content2_content1_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging content1_content2_content2_content1-tracked
+  merging content1_content2_content2_content4-tracked
+  warning: conflicts during merge.
+  merging content1_content2_content2_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging content1_content2_content3_content1-tracked
+  merging content1_content2_content3_content3-tracked
+  warning: conflicts during merge.
+  merging content1_content2_content3_content3-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging content1_content2_content3_content4-tracked
+  warning: conflicts during merge.
+  merging content1_content2_content3_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging content1_content2_missing_content1-tracked
+  merging content1_content2_missing_content4-tracked
+  warning: conflicts during merge.
+  merging content1_content2_missing_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging missing_content2_content2_content4-tracked
+  warning: conflicts during merge.
+  merging missing_content2_content2_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging missing_content2_content3_content3-tracked
+  warning: conflicts during merge.
+  merging missing_content2_content3_content3-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging missing_content2_content3_content4-tracked
+  warning: conflicts during merge.
+  merging missing_content2_content3_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging missing_content2_missing_content4-tracked
+  warning: conflicts during merge.
+  merging missing_content2_missing_content4-tracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  merging missing_content2_missing_content4-untracked
+  warning: conflicts during merge.
+  merging missing_content2_missing_content4-untracked incomplete! (edit conflicts, then use 'hg resolve --mark')
+  39 files updated, 3 files merged, 8 files removed, 10 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
 
-Local deleted a file, remote removed
+Check which files need to be resolved (should correspond to the output above).
+This should be the files for which the base (1st filename segment), the remote
+(2nd segment) and the working copy (4th segment) are all different.
+
+Interestingly, one untracked file got merged and added, which corresponds to the
+odd 'if force and branchmerge and different' case in manifestmerge().
 
-Should fail, since there are deleted files:
+  $ hg resolve -l
+  U content1_content2_content1_content4-tracked
+  R content1_content2_content2_content1-tracked
+  U content1_content2_content2_content4-tracked
+  R content1_content2_content3_content1-tracked
+  U content1_content2_content3_content3-tracked
+  U content1_content2_content3_content4-tracked
+  R content1_content2_missing_content1-tracked
+  U content1_content2_missing_content4-tracked
+  U missing_content2_content2_content4-tracked
+  U missing_content2_content3_content3-tracked
+  U missing_content2_content3_content4-tracked
+  U missing_content2_missing_content4-tracked
+  U missing_content2_missing_content4-untracked
+
+Check status and file content
+
+Some files get added (e.g. content1_content2_content1_content1-untracked)
+
+It is not intuitive that content1_content2_content1_content4-tracked gets
+merged while content1_content2_content1_content4-untracked gets overwritten.
+Any *_content2_*-untracked triggers the modified/deleted prompt and then gets
+overwritten.
+
+A lot of untracked files become tracked, for example
+content1_content2_content2_content2-untracked.
+
+*_missing_missing_missing-tracked is reported as removed ('R'), which
+doesn't make sense since the file did not exist in the parent, but on the
+other hand, merged-in additions are reported as modifications, which is
+almost as strange.
+
+missing_missing_content3_missing-tracked becomes removed ('R'), even though
+the remote side did not touch the file
 
-  $ hg merge
-  abort: uncommitted changes
-  (use 'hg status' to list changes)
-  [255]
-
-Should succeed with --force:
-
-  $ hg -v merge --force
-  resolving manifests
-  removing a
-  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
-
-Should show 'a' as removed:
-
-  $ hg status
-  R a
-
-  $ hg ci -m merge
-
-Should not show 'a':
-
-  $ hg manifest
-  b
-
+  $ for f in `python $TESTDIR/generate-working-copy-states.py filelist 3`
+  > do
+  >   echo
+  >   hg status -A $f
+  >   if test -f $f
+  >   then
+  >     cat $f
+  >   else
+  >     echo '<missing>'
+  >   fi
+  >   if test -f ${f}.orig
+  >   then
+  >     echo ${f}.orig:
+  >     cat ${f}.orig
+  >   fi
+  > done
+  
+  C content1_content1_content1_content1-tracked
+  content1
+  
+  R content1_content1_content1_content1-untracked
+  content1
+  
+  M content1_content1_content1_content4-tracked
+  content4
+  
+  R content1_content1_content1_content4-untracked
+  content4
+  
+  ! content1_content1_content1_missing-tracked
+  <missing>
+  
+  R content1_content1_content1_missing-untracked
+  <missing>
+  
+  M content1_content1_content3_content1-tracked
+  content1
+  
+  R content1_content1_content3_content1-untracked
+  content1
+  
+  C content1_content1_content3_content3-tracked
+  content3
+  
+  R content1_content1_content3_content3-untracked
+  content3
+  
+  M content1_content1_content3_content4-tracked
+  content4
+  
+  R content1_content1_content3_content4-untracked
+  content4
+  
+  ! content1_content1_content3_missing-tracked
+  <missing>
+  
+  R content1_content1_content3_missing-untracked
+  <missing>
+  
+  A content1_content1_missing_content1-tracked
+  content1
+  
+  ? content1_content1_missing_content1-untracked
+  content1
+  
+  A content1_content1_missing_content4-tracked
+  content4
+  
+  ? content1_content1_missing_content4-untracked
+  content4
+  
+  ! content1_content1_missing_missing-tracked
+  <missing>
+  
+  content1_content1_missing_missing-untracked: * (glob)
+  <missing>
+  
+  M content1_content2_content1_content1-tracked
+  content2
+  
+  M content1_content2_content1_content1-untracked
+  content2
+  
+  M content1_content2_content1_content2-tracked
+  content2
+  
+  M content1_content2_content1_content2-untracked
+  content2
+  
+  M content1_content2_content1_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  content1
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  content1_content2_content1_content4-tracked.orig:
+  content4
+  
+  M content1_content2_content1_content4-untracked
+  content2
+  
+  M content1_content2_content1_missing-tracked
+  content2
+  
+  M content1_content2_content1_missing-untracked
+  content2
+  
+  M content1_content2_content2_content1-tracked
+  content2
+  
+  M content1_content2_content2_content1-untracked
+  content2
+  
+  C content1_content2_content2_content2-tracked
+  content2
+  
+  M content1_content2_content2_content2-untracked
+  content2
+  
+  M content1_content2_content2_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  content1
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  content1_content2_content2_content4-tracked.orig:
+  content4
+  
+  M content1_content2_content2_content4-untracked
+  content2
+  
+  M content1_content2_content2_missing-tracked
+  content2
+  
+  M content1_content2_content2_missing-untracked
+  content2
+  
+  M content1_content2_content3_content1-tracked
+  content2
+  
+  M content1_content2_content3_content1-untracked
+  content2
+  
+  M content1_content2_content3_content2-tracked
+  content2
+  
+  M content1_content2_content3_content2-untracked
+  content2
+  
+  M content1_content2_content3_content3-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content3
+  ||||||| base
+  content1
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  content1_content2_content3_content3-tracked.orig:
+  content3
+  
+  M content1_content2_content3_content3-untracked
+  content2
+  
+  M content1_content2_content3_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  content1
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  content1_content2_content3_content4-tracked.orig:
+  content4
+  
+  M content1_content2_content3_content4-untracked
+  content2
+  
+  M content1_content2_content3_missing-tracked
+  content2
+  
+  M content1_content2_content3_missing-untracked
+  content2
+  
+  M content1_content2_missing_content1-tracked
+  content2
+  
+  M content1_content2_missing_content1-untracked
+  content2
+  
+  M content1_content2_missing_content2-tracked
+  content2
+  
+  M content1_content2_missing_content2-untracked
+  content2
+  
+  M content1_content2_missing_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  content1
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  content1_content2_missing_content4-tracked.orig:
+  content4
+  
+  M content1_content2_missing_content4-untracked
+  content2
+  
+  M content1_content2_missing_missing-tracked
+  content2
+  
+  M content1_content2_missing_missing-untracked
+  content2
+  
+  R content1_missing_content1_content1-tracked
+  <missing>
+  
+  R content1_missing_content1_content1-untracked
+  content1
+  
+  M content1_missing_content1_content4-tracked
+  content4
+  
+  R content1_missing_content1_content4-untracked
+  content4
+  
+  R content1_missing_content1_missing-tracked
+  <missing>
+  
+  R content1_missing_content1_missing-untracked
+  <missing>
+  
+  R content1_missing_content3_content1-tracked
+  <missing>
+  
+  R content1_missing_content3_content1-untracked
+  content1
+  
+  C content1_missing_content3_content3-tracked
+  content3
+  
+  R content1_missing_content3_content3-untracked
+  content3
+  
+  M content1_missing_content3_content4-tracked
+  content4
+  
+  R content1_missing_content3_content4-untracked
+  content4
+  
+  R content1_missing_content3_missing-tracked
+  <missing>
+  
+  R content1_missing_content3_missing-untracked
+  <missing>
+  
+  R content1_missing_missing_content1-tracked
+  <missing>
+  
+  ? content1_missing_missing_content1-untracked
+  content1
+  
+  A content1_missing_missing_content4-tracked
+  content4
+  
+  ? content1_missing_missing_content4-untracked
+  content4
+  
+  R content1_missing_missing_missing-tracked
+  <missing>
+  
+  content1_missing_missing_missing-untracked: * (glob)
+  <missing>
+  
+  C missing_content2_content2_content2-tracked
+  content2
+  
+  M missing_content2_content2_content2-untracked
+  content2
+  
+  M missing_content2_content2_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  missing_content2_content2_content4-tracked.orig:
+  content4
+  
+  M missing_content2_content2_content4-untracked
+  content2
+  
+  M missing_content2_content2_missing-tracked
+  content2
+  
+  M missing_content2_content2_missing-untracked
+  content2
+  
+  M missing_content2_content3_content2-tracked
+  content2
+  
+  M missing_content2_content3_content2-untracked
+  content2
+  
+  M missing_content2_content3_content3-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content3
+  ||||||| base
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  missing_content2_content3_content3-tracked.orig:
+  content3
+  
+  M missing_content2_content3_content3-untracked
+  content2
+  
+  M missing_content2_content3_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  missing_content2_content3_content4-tracked.orig:
+  content4
+  
+  M missing_content2_content3_content4-untracked
+  content2
+  
+  M missing_content2_content3_missing-tracked
+  content2
+  
+  M missing_content2_content3_missing-untracked
+  content2
+  
+  M missing_content2_missing_content2-tracked
+  content2
+  
+  M missing_content2_missing_content2-untracked
+  content2
+  
+  M missing_content2_missing_content4-tracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  missing_content2_missing_content4-tracked.orig:
+  content4
+  
+  M missing_content2_missing_content4-untracked
+  <<<<<<< local: 0447570f1af6 - test: local
+  content4
+  ||||||| base
+  =======
+  content2
+  >>>>>>> other: 85100b8c675b  - test: remote
+  missing_content2_missing_content4-untracked.orig:
+  content4
+  
+  M missing_content2_missing_missing-tracked
+  content2
+  
+  M missing_content2_missing_missing-untracked
+  content2
+  
+  C missing_missing_content3_content3-tracked
+  content3
+  
+  R missing_missing_content3_content3-untracked
+  content3
+  
+  M missing_missing_content3_content4-tracked
+  content4
+  
+  R missing_missing_content3_content4-untracked
+  content4
+  
+  R missing_missing_content3_missing-tracked
+  <missing>
+  
+  R missing_missing_content3_missing-untracked
+  <missing>
+  
+  A missing_missing_missing_content4-tracked
+  content4
+  
+  ? missing_missing_missing_content4-untracked
+  content4
+  
+  R missing_missing_missing_missing-tracked
+  <missing>
+  
+  missing_missing_missing_missing-untracked: * (glob)
+  <missing>
--- a/tests/test-merge-remove.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-merge-remove.t	Sat Jan 17 18:28:30 2015 -0800
@@ -21,8 +21,8 @@
   (branch merge, don't forget to commit)
 
   $ hg debugstate --nodates
-  m   0         -2 bar
-  m   0         -2 foo1
+  m   0         -2 unset               bar
+  m   0         -2 unset               foo1
   copy: foo -> foo1
 
   $ hg st -q
@@ -37,8 +37,8 @@
   $ hg rm -f foo1 bar
 
   $ hg debugstate --nodates
-  r   0         -1 bar
-  r   0         -1 foo1
+  r   0         -1 set                 bar
+  r   0         -1 set                 foo1
   copy: foo -> foo1
 
   $ hg st -qC
@@ -55,8 +55,8 @@
   adding foo1
 
   $ hg debugstate --nodates
-  n   0         -2 bar
-  n   0         -2 foo1
+  n   0         -2 unset               bar
+  n   0         -2 unset               foo1
   copy: foo -> foo1
 
   $ hg st -qC
@@ -74,8 +74,8 @@
   reverting foo1
 
   $ hg debugstate --nodates
-  n   0         -2 bar
-  n   0         -2 foo1
+  n   0         -2 unset               bar
+  n   0         -2 unset               foo1
   copy: foo -> foo1
 
   $ hg st -qC
--- a/tests/test-module-imports.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-module-imports.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,8 +1,11 @@
+#require test-repo
+
 This code uses the ast module, which was new in 2.6, so we'll skip
 this test on anything earlier.
   $ $PYTHON -c 'import sys ; assert sys.version_info >= (2, 6)' || exit 80
 
   $ import_checker="$TESTDIR"/../contrib/import-checker.py
+
 Run the doctests from the import checker, and make sure
 it's working correctly.
   $ TERM=dumb
@@ -10,11 +13,6 @@
   $ python -m doctest $import_checker
 
   $ cd "$TESTDIR"/..
-  $ if hg identify -q > /dev/null 2>&1; then :
-  > else
-  >     echo "skipped: not a Mercurial working dir" >&2
-  >     exit 80
-  > fi
 
 There are a handful of cases here that require renaming a module so it
 doesn't overlap with a stdlib module name. There are also some cycles
--- a/tests/test-mq-eol.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-eol.t	Sat Jan 17 18:28:30 2015 -0800
@@ -2,10 +2,12 @@
 Test interactions between mq and patch.eol
 
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > nodates = 1
+  > EOF
 
   $ cat > makepatch.py <<EOF
   > f = file('eol.diff', 'wb')
--- a/tests/test-mq-git.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-git.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,10 +1,12 @@
 # Test the plumbing of mq.git option
 # Automatic upgrade itself is tested elsewhere.
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > nodates = 1
+  > EOF
 
   $ hg init repo-auto
   $ cd repo-auto
--- a/tests/test-mq-guards.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-guards.t	Sat Jan 17 18:28:30 2015 -0800
@@ -270,7 +270,10 @@
   applying c.patch
   patching file c
   adding c
+  committing files:
   c
+  committing manifest
+  committing changelog
   now at: c.patch
 
 guards in series file: +1 +2 -3
@@ -568,7 +571,7 @@
   3 G b.patch
 
 test that "qselect --reapply" checks applied patches correctly when no
-applied patche becomes guarded but some of unapplied ones become
+applied patches becomes guarded but some of unapplied ones become
 unguarded.
 
   $ hg qpop -q -a
--- a/tests/test-mq-header-date.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-header-date.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,8 +1,10 @@
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=true" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > nodates = true
+  > EOF
   $ catpatch() {
   >     cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" \
   >                                    -e "s/^\(# Parent \).*/\1/"
@@ -410,8 +412,8 @@
   1: Three (again) - test
   0: [mq]: 1.patch - test
   ==== qref -d
+  From: jane
   Date: 12 0
-  From: jane
   
   diff -r ... 6
   --- /dev/null
@@ -463,8 +465,8 @@
   1: Three (again) - test
   0: [mq]: 1.patch - test
   ==== qref -u -d
+  From: john
   Date: 14 0
-  From: john
   
   diff -r ... 8
   --- /dev/null
@@ -493,8 +495,8 @@
   1: Three (again) - test
   0: [mq]: 1.patch - test
   ==== qref -u -d
+  From: john
   Date: 15 0
-  From: john
   
   Nine
   
--- a/tests/test-mq-merge.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-merge.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
 Setup extension:
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq =" >> $HGRCPATH
-  $ echo "[mq]" >> $HGRCPATH
-  $ echo "git = keep" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [mq]
+  > git = keep
+  > EOF
 
 Test merge with mq changeset as the second parent:
 
--- a/tests/test-mq-qdiff.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-qdiff.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,7 +1,9 @@
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[mq]" >> $HGRCPATH
-  $ echo "git=keep" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [mq]
+  > git = keep
+  > EOF
 
   $ hg init a
   $ cd a
--- a/tests/test-mq-qfold.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-qfold.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[mq]" >> $HGRCPATH
-  $ echo "git=keep" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [mq]
+  > git = keep
+  > [diff]
+  > nodates = 1
+  > EOF
 
 init:
 
--- a/tests/test-mq-qimport.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-qimport.t	Sat Jan 17 18:28:30 2015 -0800
@@ -15,10 +15,12 @@
   > f.close()
   > 
   > EOF
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "git=1" >> $HGRCPATH
+  > cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > git = 1
+  > EOF
   $ hg init repo
   $ cd repo
 
--- a/tests/test-mq-qpush-fail.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-qpush-fail.t	Sat Jan 17 18:28:30 2015 -0800
@@ -264,7 +264,10 @@
   applying p2
   saving current version of a as a.orig
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   applying p3
   saving current version of b as b.orig
   saving current version of d as d.orig
@@ -277,7 +280,10 @@
   file e already exists
   1 out of 1 hunks FAILED -- saving rejects to file e.rej
   patch failed to apply
+  committing files:
   b
+  committing manifest
+  committing changelog
   patch failed, rejects left in working dir
   errors during apply, please fix and refresh p3
   [2]
@@ -302,7 +308,10 @@
   $ hg qpush --force --no-backup --verbose
   applying p2
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   now at: p2
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
@@ -316,7 +325,10 @@
   $ hg qgoto --force --no-backup p2 --verbose
   applying p2
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   now at: p2
   $ test -f a.orig && echo 'error: backup with --no-backup'
   [1]
--- a/tests/test-mq-qrefresh.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-qrefresh.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,7 +1,9 @@
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > nodates = 1
+  > EOF
 
   $ hg init a
   $ cd a
--- a/tests/test-mq-subrepo-svn.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-subrepo-svn.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
 #require svn13
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [diff]
+  > nodates = 1
+  > EOF
 
 fn to create new repository, and cd into it
   $ mkrepo() {
--- a/tests/test-mq-subrepo.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-subrepo.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,10 +1,12 @@
-  $ echo "[ui]" >> $HGRCPATH
-  $ echo "commitsubrepos = Yes" >> $HGRCPATH
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "record=" >> $HGRCPATH
-  $ echo "[diff]" >> $HGRCPATH
-  $ echo "nodates=1" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > commitsubrepos = Yes
+  > [extensions]
+  > mq =
+  > record =
+  > [diff]
+  > nodates = 1
+  > EOF
 
   $ stdin=`pwd`/stdin.tmp
 
@@ -241,7 +243,7 @@
   [255]
   $ hg revert sub
   reverting subrepo sub
-  adding sub/a
+  adding sub/a (glob)
   $ hg qpop
   popping 1.diff
   now at: 0.diff
@@ -260,7 +262,7 @@
   [255]
   $ hg revert sub
   reverting subrepo sub
-  adding sub/a
+  adding sub/a (glob)
   $ hg qpush
   applying 1.diff
    subrepository sub diverged (local revision: b2fdb12cd82b, remote revision: aa037b301eba)
--- a/tests/test-mq-symlinks.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq-symlinks.t	Sat Jan 17 18:28:30 2015 -0800
@@ -47,7 +47,10 @@
   $ hg qpush --debug
   applying updatelink
   patching file a
+  committing files:
   a
+  committing manifest
+  committing changelog
   now at: updatelink
   $ "$TESTDIR/readlink.py" a
   a -> c
--- a/tests/test-mq.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mq.t	Sat Jan 17 18:28:30 2015 -0800
@@ -5,11 +5,12 @@
   >     fi
   > }
 
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-
-  $ echo "[mq]" >> $HGRCPATH
-  $ echo "plain=true" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > [mq]
+  > plain = true
+  > EOF
 
 
 help
@@ -800,7 +801,7 @@
 
   $ hg strip -f tip
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/b/.hg/strip-backup/770eb8fce608-0ddcae0f-backup.hg (glob)
   $ cd ..
 
 
@@ -1399,7 +1400,10 @@
   applying empty
   saving current version of hello.txt as hello.txt.orig
   patching file hello.txt
+  committing files:
   hello.txt
+  committing manifest
+  committing changelog
   now at: empty
   $ hg st
   M bye.txt
@@ -1582,7 +1586,7 @@
 
   $ cd ..
 
-Test interraction with revset (issue4426)
+Test interaction with revset (issue4426)
 
   $ hg init issue4426
   $ cd issue4426
--- a/tests/test-mv-cp-st-diff.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-mv-cp-st-diff.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1605,3 +1605,65 @@
   -
   +f
   $ cd ..
+
+Additional tricky linkrev case
+------------------------------
+
+If the first file revision after the diff base has a linkrev pointing to a
+changeset on another branch with a revision lower that the diff base, we can
+jump past the copy detection limit and fail to detect the rename.
+
+  $ hg init diffstoplinkrev
+  $ cd diffstoplinkrev
+
+  $ touch f
+  $ hg ci -Aqm 'empty f'
+
+Make a simple change
+
+  $ echo change > f
+  $ hg ci -m 'change f'
+
+Make a second branch, we use a named branch to create a simple commit
+that does not touch f.
+
+  $ hg up -qr 'desc(empty)'
+  $ hg branch -q dev
+  $ hg ci -Aqm dev
+
+Graft the initial change, as f was untouched, we reuse the same entry and the
+linkrev point to the older branch.
+
+  $ hg graft -q 'desc(change)'
+
+Make a rename because we want to track renames. It is also important that the
+faulty linkrev is not the "start" commit to ensure the linkrev will be used.
+
+  $ hg mv f renamed
+  $ hg ci -m renamed
+
+  $ hg log -G -T '{rev} {desc}'
+  @  4 renamed
+  |
+  o  3 change f
+  |
+  o  2 dev
+  |
+  | o  1 change f
+  |/
+  o  0 empty f
+  
+
+The copy tracking should still reach rev 2 (branch creation).
+accessing the parent of 4 (renamed) should not jump use to revision 1.
+
+  $ hg diff --git -r 'desc(dev)' -r .
+  diff --git a/f b/renamed
+  rename from f
+  rename to renamed
+  --- a/f
+  +++ b/renamed
+  @@ -0,0 +1,1 @@
+  +change
+
+  $ cd ..
--- a/tests/test-obsolete.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-obsolete.t	Sat Jan 17 18:28:30 2015 -0800
@@ -90,8 +90,8 @@
   # rev p1rev p2rev start   end deltastart base   p1   p2 rawsize totalsize compression heads chainlen
       0    -1    -1     0    59          0    0    0    0      58        58           0     1        0
       1     0    -1    59   118         59   59    0    0      58       116           0     1        0
-      2     1    -1   118   204         59   59   59    0      76       192           0     1        1
-      3     1    -1   204   271        204  204   59    0      66       258           0     2        0
+      2     1    -1   118   193        118  118   59    0      76       192           0     1        0
+      3     1    -1   193   260        193  193   59    0      66       258           0     2        0
   $ hg debugobsolete
   245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
 
@@ -702,7 +702,7 @@
   $ hg incoming http://localhost:$HGPORT
   comparing with http://localhost:$HGPORT/
   searching for changes
-  1:3816541e5485 (public) [tip ] A
+  1:3816541e5485 (draft) [tip ] A
   $ hg outgoing http://localhost:$HGPORT
   comparing with http://localhost:$HGPORT/
   searching for changes
--- a/tests/test-parse-date.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-parse-date.t	Sat Jan 17 18:28:30 2015 -0800
@@ -100,7 +100,7 @@
 Negative range
 
   $ hg log -d "--2"
-  abort: -2 must be nonnegative (see 'hg help dates')
+  abort: -2 must be nonnegative (see "hg help dates")
   [255]
 
 Whitespace only
--- a/tests/test-patch-offset.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-patch-offset.t	Sat Jan 17 18:28:30 2015 -0800
@@ -68,7 +68,10 @@
   Hunk #1 succeeded at 43 (offset 34 lines).
   Hunk #2 succeeded at 87 (offset 34 lines).
   Hunk #3 succeeded at 109 (offset 34 lines).
+  committing files:
   a
+  committing manifest
+  committing changelog
   created 189885cecb41
 
 compare imported changes against reference file
--- a/tests/test-patchbomb.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-patchbomb.t	Sat Jan 17 18:28:30 2015 -0800
@@ -86,6 +86,100 @@
   abort: patchbomb canceled
   [255]
 
+  $ hg --config ui.interactive=1 --config patchbomb.confirm=true email -n -f quux -t foo -c bar -r tip<<EOF
+  > n
+  > EOF
+  this patch series consists of 1 patches.
+  
+  
+  Final summary:
+  
+  From: quux
+  To: foo
+  Cc: bar
+  Subject: [PATCH] a
+   a |  1 +
+   1 files changed, 1 insertions(+), 0 deletions(-)
+  
+  are you sure you want to send (yn)? n
+  abort: patchbomb canceled
+  [255]
+
+
+Test diff.git is respected
+  $ hg --config diff.git=True email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -r tip
+  this patch series consists of 1 patches.
+  
+  
+  displaying [PATCH] a ...
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  Subject: [PATCH] a
+  X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
+  X-Mercurial-Series-Index: 1
+  X-Mercurial-Series-Total: 1
+  Message-Id: <8580ff50825a50c8f716.60@*> (glob)
+  X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob)
+  User-Agent: Mercurial-patchbomb/* (glob)
+  Date: Thu, 01 Jan 1970 00:01:00 +0000
+  From: quux
+  To: foo
+  Cc: bar
+  
+  # HG changeset patch
+  # User test
+  # Date 1 0
+  #      Thu Jan 01 00:00:01 1970 +0000
+  # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab
+  # Parent  0000000000000000000000000000000000000000
+  a
+  
+  diff --git a/a b/a
+  new file mode 100644
+  --- /dev/null
+  +++ b/a
+  @@ -0,0 +1,1 @@
+  +a
+  
+
+
+Test breaking format changes aren't
+  $ hg --config diff.noprefix=True email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -r tip
+  this patch series consists of 1 patches.
+  
+  
+  displaying [PATCH] a ...
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  Subject: [PATCH] a
+  X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab
+  X-Mercurial-Series-Index: 1
+  X-Mercurial-Series-Total: 1
+  Message-Id: <8580ff50825a50c8f716.60@*> (glob)
+  X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob)
+  User-Agent: Mercurial-patchbomb/* (glob)
+  Date: Thu, 01 Jan 1970 00:01:00 +0000
+  From: quux
+  To: foo
+  Cc: bar
+  
+  # HG changeset patch
+  # User test
+  # Date 1 0
+  #      Thu Jan 01 00:00:01 1970 +0000
+  # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab
+  # Parent  0000000000000000000000000000000000000000
+  a
+  
+  diff -r 000000000000 -r 8580ff50825a a
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/a	Thu Jan 01 00:00:01 1970 +0000
+  @@ -0,0 +1,1 @@
+  +a
+  
+
   $ echo b > b
   $ hg commit -Amb -d '2 0'
   adding b
@@ -2589,4 +2683,127 @@
   +d
   
 
-  $ cd ..
+Test introduction configuration
+=================================
+
+  $ echo '[patchbomb]' >> $HGRCPATH
+
+"auto" setting
+----------------
+
+  $ echo 'intro=auto' >> $HGRCPATH
+
+single rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' | grep "Write the introductory message for the patch series."
+  [1]
+
+single rev + flag
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+
+Multi rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '9::' | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+"never" setting
+-----------------
+
+  $ echo 'intro=never' >> $HGRCPATH
+
+single rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' | grep "Write the introductory message for the patch series."
+  [1]
+
+single rev + flag
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+
+Multi rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '9::' | grep "Write the introductory message for the patch series."
+  [1]
+
+Multi rev + flag
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '9::' --intro | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+"always" setting
+-----------------
+
+  $ echo 'intro=always' >> $HGRCPATH
+
+single rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+single rev + flag
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+
+Multi rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '9::' | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+Multi rev + flag
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '9::' --intro | grep "Write the introductory message for the patch series."
+  Write the introductory message for the patch series.
+
+bad value setting
+-----------------
+
+  $ echo 'intro=mpmwearaclownnose' >> $HGRCPATH
+
+single rev
+
+  $ hg email --date '1980-1-1 0:1' -n -t foo -s test -r '10'
+  From [test]: test
+  this patch series consists of 1 patches.
+  
+  warning: invalid patchbomb.intro value "mpmwearaclownnose"
+  (should be one of always, never, auto)
+  Cc: 
+  
+  displaying [PATCH] test ...
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  Subject: [PATCH] test
+  X-Mercurial-Node: 3b6f1ec9dde933a40a115a7990f8b320477231af
+  X-Mercurial-Series-Index: 1
+  X-Mercurial-Series-Total: 1
+  Message-Id: <3b6f1ec9dde933a40a11*> (glob)
+  X-Mercurial-Series-Id: <3b6f1ec9dde933a40a11.*> (glob)
+  User-Agent: Mercurial-patchbomb/* (glob)
+  Date: Tue, 01 Jan 1980 00:01:00 +0000
+  From: test
+  To: foo
+  
+  # HG changeset patch
+  # User test
+  # Date 5 0
+  #      Thu Jan 01 00:00:05 1970 +0000
+  # Branch test
+  # Node ID 3b6f1ec9dde933a40a115a7990f8b320477231af
+  # Parent  2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268
+  dd
+  
+  diff -r 2f9fa9b998c5 -r 3b6f1ec9dde9 d
+  --- a/d	Thu Jan 01 00:00:04 1970 +0000
+  +++ b/d	Thu Jan 01 00:00:05 1970 +0000
+  @@ -1,1 +1,2 @@
+   d
+  +d
+  
--- a/tests/test-phases-exchange.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-phases-exchange.t	Sat Jan 17 18:28:30 2015 -0800
@@ -755,7 +755,7 @@
 
 Bare push with next changeset and common changeset needing sync (issue3575)
 
-(reset some stat on remot repo to not confused other test)
+(reset some stat on remote repo to avoid confusing other tests)
 
   $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -764,6 +764,10 @@
   pushing to ../alpha
   searching for changes
   1 changesets found
+  uncompressed size of bundle content:
+       172 (changelog)
+       145 (manifests)
+       111  a-H
   adding changesets
   adding manifests
   adding file changes
--- a/tests/test-progress.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-progress.t	Sat Jan 17 18:28:30 2015 -0800
@@ -296,7 +296,7 @@
   \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 [=====>   ]\r (no-eol) (esc)
                        \r (no-eol) (esc)
 
-test triming progress items, when they contain multi-byte characters,
+test trimming progress items, when they contain multi-byte characters,
 of which length of byte sequence and columns in display are different
 from each other.
 
--- a/tests/test-push-hook-lock.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-push-hook-lock.t	Sat Jan 17 18:28:30 2015 -0800
@@ -15,8 +15,12 @@
   updating to branch default
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
+  $ cat <<EOF > $TESTTMP/debuglocks-pretxn-hook.sh
+  > hg debuglocks
+  > true
+  > EOF
   $ echo '[hooks]' >> 2/.hg/hgrc
-  $ echo 'pretxnchangegroup.a = hg debuglocks; true' >> 2/.hg/hgrc
+  $ echo "pretxnchangegroup.a = sh $TESTTMP/debuglocks-pretxn-hook.sh" >> 2/.hg/hgrc
   $ echo 'changegroup.push = hg push -qf ../1' >> 2/.hg/hgrc
 
   $ echo bar >> 3/foo
--- a/tests/test-push-warn.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-push-warn.t	Sat Jan 17 18:28:30 2015 -0800
@@ -142,6 +142,10 @@
   pushing to ../c
   searching for changes
   2 changesets found
+  uncompressed size of bundle content:
+       308 (changelog)
+       286 (manifests)
+       213  foo
   adding changesets
   adding manifests
   adding file changes
--- a/tests/test-rebase-abort.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-abort.t	Sat Jan 17 18:28:30 2015 -0800
@@ -61,6 +61,8 @@
 Conflicting rebase:
 
   $ hg rebase -s 3 -d 2
+  rebasing 3:3163e20567cc "L1"
+  rebasing 4:46f0b057b5c0 "L2" (tip)
   merging common
   warning: conflicts during merge.
   merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -70,7 +72,7 @@
 Abort:
 
   $ hg rebase --abort
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/3e046f2ecedb-6beef7d5-backup.hg (glob)
   rebase aborted
 
   $ hg tglog
@@ -89,6 +91,8 @@
 earlier than 2.7 by renaming ".hg/rebasestate" temporarily.
 
   $ hg rebase -s 3 -d 2
+  rebasing 3:3163e20567cc "L1"
+  rebasing 4:46f0b057b5c0 "L2" (tip)
   merging common
   warning: conflicts during merge.
   merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -157,6 +161,9 @@
   o  0:public 'A'
   
   $ hg rebase -b 4 -d 2
+  rebasing 3:a6484957d6b9 "B bis"
+  note: rebase of 3:a6484957d6b9 created no changes to commit
+  rebasing 4:145842775fec "C1" (tip)
   merging c
   warning: conflicts during merge.
   merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -216,6 +223,7 @@
   
 
   $ hg rebase -d master -r foo
+  rebasing 3:6c0f977a22d8 "C" (tip foo)
   merging c
   warning: conflicts during merge.
   merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
--- a/tests/test-rebase-bookmarks.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-bookmarks.t	Sat Jan 17 18:28:30 2015 -0800
@@ -74,7 +74,8 @@
   o  0: 'A' bookmarks: Y@diverge
   
   $ hg rebase -s Y -d 3
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:49cb3485fa0c "C" (Y Z)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-backup.hg (glob)
 
   $ hg tglog
   @  3: 'C' bookmarks: Y Z
@@ -95,7 +96,8 @@
   $ hg book W@diverge
 
   $ hg rebase -s W -d .
-  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:41acb9dca9eb "D" (tip W)
+  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-backup.hg (glob)
 
   $ hg bookmarks
      W                         3:0d3554f74897
@@ -112,7 +114,9 @@
   $ hg up -q Z
 
   $ hg rebase -s 1 -d 3
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:6c81ed0049f8 "B" (X)
+  rebasing 2:49cb3485fa0c "C" (Y Z)
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
 
   $ hg tglog
   @  3: 'C' bookmarks: Y Z
@@ -133,7 +137,9 @@
   $ hg up -q X
 
   $ hg rebase -d W
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:6c81ed0049f8 "B" (X)
+  rebasing 2:49cb3485fa0c "C" (Y Z)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
 
   $ hg tglog
   o  3: 'C' bookmarks: Y Z
@@ -162,6 +168,7 @@
   $ hg up 3
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg rebase
+  rebasing 3:3d5fa227f4b5 "C" (Y Z)
   merging c
   warning: conflicts during merge.
   merging c incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -171,7 +178,8 @@
   $ hg resolve --mark c
   (no more unresolved files)
   $ hg rebase --continue
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-backup.hg (glob)
+  rebasing 3:3d5fa227f4b5 "C" (Y Z)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-backup.hg (glob)
   $ hg tglog
   @  4: 'C' bookmarks: Y Z
   |
@@ -198,4 +206,7 @@
   $ hg book bisect
   $ hg update -q Y
   $ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-backup.hg (glob)
+  rebasing 5:345c90f326a4 "bisect"
+  rebasing 6:f677a2907404 "bisect2"
+  rebasing 7:325c16001345 "bisect3" (tip bisect)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-backup.hg (glob)
--- a/tests/test-rebase-cache.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-cache.t	Sat Jan 17 18:28:30 2015 -0800
@@ -105,7 +105,9 @@
   0: 'A' 
 
   $ hg rebase -s 5 -d 8
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:635859577d0b "D"
+  rebasing 6:5097051d331d "E"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-backup.hg (glob)
 
   $ hg branches
   branch3                        8:466cdfb14b62
@@ -166,7 +168,8 @@
   o  0: 'A'
   
   $ hg rebase -s 8 -d 6
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 8:4666b71e8e32 "F" (tip)
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-backup.hg (glob)
 
   $ hg branches
   branch2                        8:6b4bdc1b5ac0
@@ -230,7 +233,10 @@
   o  0: 'A'
   
   $ hg rebase -s 7 -d 6
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 7:653b9feb4616 "branch3"
+  note: rebase of 7:653b9feb4616 created no changes to commit
+  rebasing 8:4666b71e8e32 "F" (tip)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-backup.hg (glob)
 
   $ hg branches
   branch2                        7:6b4bdc1b5ac0
@@ -267,7 +273,7 @@
 
   $ hg strip 2
   0 files updated, 0 files merged, 4 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/0a03079c47fd-11b7c407-backup.hg (glob)
 
   $ hg tglog
   o  3: 'C' branch2
@@ -328,7 +334,7 @@
 
   $ hg strip 2
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/a3/b/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/b/.hg/strip-backup/a5b4b27ed7b4-a3b6984e-backup.hg (glob)
 
   $ hg theads
   1: 'branch2' branch2
@@ -373,14 +379,14 @@
 
   $ hg strip 3 4
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/67a385d4e6f2-b9243789-backup.hg (glob)
 
   $ hg theads
   2: 'C' 
 
   $ hg strip 2 1
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob)
 
   $ hg theads
   0: 'A' 
@@ -475,6 +481,8 @@
   > true
   > EOF
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --dest 7 --source 5 -e
+  rebasing 5:361a99976cc9 "F"
   HGEDITFORM=rebase.merge
+  rebasing 8:326cfedc031c "I" (tip)
   HGEDITFORM=rebase.normal
-  saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-backup.hg (glob)
--- a/tests/test-rebase-check-restore.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-check-restore.t	Sat Jan 17 18:28:30 2015 -0800
@@ -65,6 +65,8 @@
   o  0:draft 'A'
   
   $ hg rebase -s 1 -d 4 --keep
+  rebasing 1:27547f69f254 "B"
+  rebasing 2:965c486023db "C"
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -78,6 +80,8 @@
   $ hg resolve -m A
   (no more unresolved files)
   $ hg rebase --continue
+  already rebased 1:27547f69f254 "B" as 45396c49d53b
+  rebasing 2:965c486023db "C"
 
   $ hg tglog
   o  7:secret 'C'
@@ -119,6 +123,7 @@
   o  0:draft 'A'
   
   $ hg rebase -s 5 -d 4 --keepbranches
+  rebasing 5:01e6ebbd8272 "F" (tip)
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -132,7 +137,8 @@
   $ hg resolve -m A
   (no more unresolved files)
   $ hg rebase --continue
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:01e6ebbd8272 "F" (tip)
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-backup.hg (glob)
 
   $ hg tglog
   @  5:draft 'F' notdefault
--- a/tests/test-rebase-collapse.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-collapse.t	Sat Jan 17 18:28:30 2015 -0800
@@ -59,6 +59,12 @@
   > echo "edited manually" >> \$1
   > EOF
   $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e
+  rebasing 1:42ccdea3bb16 "B"
+  note: rebase of 1:42ccdea3bb16 created no changes to commit
+  rebasing 2:5fddd98957c8 "C"
+  note: rebase of 2:5fddd98957c8 created no changes to commit
+  rebasing 3:32af7686d403 "D"
+  note: rebase of 3:32af7686d403 created no changes to commit
   ==== before editing
   Collapsed revision
   * B
@@ -75,7 +81,7 @@
   HG: added C
   HG: added D
   ====
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglogp
   @  5:secret 'Collapsed revision
@@ -113,7 +119,11 @@
 
   $ hg phase --force --secret 6
   $ hg rebase --source 4 --collapse
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 4:9520eea781bc "E"
+  note: rebase of 4:9520eea781bc created no changes to commit
+  rebasing 6:eea13746799a "G"
+  note: rebase of 6:eea13746799a created no changes to commit
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
 
   $ hg tglog
   o  6: 'Collapsed revision
@@ -153,8 +163,12 @@
   > true
   > EOF
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source 4 --collapse -m 'custom message' -e
+  rebasing 4:9520eea781bc "E"
+  note: rebase of 4:9520eea781bc created no changes to commit
+  rebasing 6:eea13746799a "G"
+  note: rebase of 6:eea13746799a created no changes to commit
   HGEDITFORM=rebase.collapse
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
 
   $ hg tglog
   o  6: 'custom message'
@@ -261,7 +275,13 @@
 Rebase and collapse - E onto H:
 
   $ hg rebase -s 4 --collapse # root (4) is not a merge
-  saved backup bundle to $TESTTMP/b1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 4:8a5212ebc852 "E"
+  note: rebase of 4:8a5212ebc852 created no changes to commit
+  rebasing 5:7f219660301f "F"
+  note: rebase of 5:7f219660301f created no changes to commit
+  rebasing 6:c772a8b2dc17 "G"
+  note: rebase of 6:c772a8b2dc17 created no changes to commit
+  saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob)
 
   $ hg tglog
   o    5: 'Collapsed revision
@@ -310,7 +330,7 @@
   c65502d4178782309ce0574c5ae6ee9485a9bafa o default
 
   $ hg strip 4
-  saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-backup.hg (glob)
+  saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob)
 
   $ cat $TESTTMP/b2/.hg/cache/branch2-served
   c65502d4178782309ce0574c5ae6ee9485a9bafa 4
@@ -409,8 +429,16 @@
   $ cd c1
 
   $ hg rebase -s 4 --collapse # root (4) is not a merge
+  rebasing 4:8a5212ebc852 "E"
+  note: rebase of 4:8a5212ebc852 created no changes to commit
+  rebasing 5:dca5924bb570 "F"
   merging E
-  saved backup bundle to $TESTTMP/c1/.hg/strip-backup/*-backup.hg (glob)
+  note: rebase of 5:dca5924bb570 created no changes to commit
+  rebasing 6:55a44ad28289 "G"
+  note: rebase of 6:55a44ad28289 created no changes to commit
+  rebasing 7:417d3b648079 "H"
+  note: rebase of 7:417d3b648079 created no changes to commit
+  saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-backup.hg (glob)
 
   $ hg tglog
   o    5: 'Collapsed revision
@@ -499,7 +527,15 @@
   $ cd d1
 
   $ hg rebase -s 1 --collapse
-  saved backup bundle to $TESTTMP/d1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:27547f69f254 "B"
+  note: rebase of 1:27547f69f254 created no changes to commit
+  rebasing 2:f838bfaca5c7 "C"
+  note: rebase of 2:f838bfaca5c7 created no changes to commit
+  rebasing 3:7bbcd6078bcc "D"
+  note: rebase of 3:7bbcd6078bcc created no changes to commit
+  rebasing 4:0a42590ed746 "E"
+  note: rebase of 4:0a42590ed746 created no changes to commit
+  saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-backup.hg (glob)
 
   $ hg tglog
   o  2: 'Collapsed revision
@@ -583,7 +619,9 @@
   o  0: 'A'
   
   $ hg rebase -s 5 -d 4
-  saved backup bundle to $TESTTMP/e/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:fbfb97b1089a "E" (tip)
+  note: rebase of 5:fbfb97b1089a created no changes to commit
+  saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-backup.hg (glob)
   $ hg tglog
   @  4: 'E'
   |
@@ -634,12 +672,16 @@
   o  0: 'add'
   
   $ hg rebase --collapse -d 1
+  rebasing 2:6e7340ee38c0 "move1"
   merging a and d to d
   merging b and e to e
   merging c and f to f
+  note: rebase of 2:6e7340ee38c0 created no changes to commit
+  rebasing 3:338e84e2e558 "move2" (tip)
   merging f and c to c
   merging e and g to g
-  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
+  note: rebase of 3:338e84e2e558 created no changes to commit
+  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-backup.hg (glob)
   $ hg st
   $ hg st --copies --change tip
   A d
@@ -678,7 +720,11 @@
 Test collapsing in place
 
   $ hg rebase --collapse -b . -d 0
-  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:1352765a01d4 "change"
+  note: rebase of 1:1352765a01d4 created no changes to commit
+  rebasing 2:64b456429f67 "Collapsed revision" (tip)
+  note: rebase of 2:64b456429f67 created no changes to commit
+  saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-backup.hg (glob)
   $ hg st --change tip --copies
   M a
   M c
@@ -737,7 +783,7 @@
 
   $ hg strip 2
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  saved backup bundle to $TESTTMP/f/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/f/.hg/strip-backup/c5cefa58fd55-629429f4-backup.hg (glob)
 
   $ hg tglog
   o  1: 'B'
@@ -768,7 +814,11 @@
   adding b
   $ hg book foo
   $ hg rebase -d 0 -r "1::2" --collapse -m collapsed
-  saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:6d8d9f24eec3 "a"
+  note: rebase of 1:6d8d9f24eec3 created no changes to commit
+  rebasing 2:1cc73eca5ecc "b" (tip foo)
+  note: rebase of 2:1cc73eca5ecc created no changes to commit
+  saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-backup.hg (glob)
   $ hg log -G --template "{rev}: '{desc}' {bookmarks}"
   @  1: 'collapsed' foo
   |
--- a/tests/test-rebase-conflicts.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-conflicts.t	Sat Jan 17 18:28:30 2015 -0800
@@ -61,6 +61,8 @@
 Conflicting rebase:
 
   $ hg rebase -s 3 -d 2
+  rebasing 3:3163e20567cc "L1"
+  rebasing 4:46f0b057b5c0 "L2"
   merging common
   warning: conflicts during merge.
   merging common incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -70,7 +72,9 @@
 Try to continue without solving the conflict:
 
   $ hg rebase --continue
-  abort: unresolved merge conflicts (see hg help resolve)
+  already rebased 3:3163e20567cc "L1" as 3e046f2ecedb
+  rebasing 4:46f0b057b5c0 "L2"
+  abort: unresolved merge conflicts (see "hg help resolve")
   [255]
 
 Conclude rebase:
@@ -79,7 +83,10 @@
   $ hg resolve -m common
   (no more unresolved files)
   $ hg rebase --continue
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  already rebased 3:3163e20567cc "L1" as 3e046f2ecedb
+  rebasing 4:46f0b057b5c0 "L2"
+  rebasing 5:8029388f38dc "L3" (mybook)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-backup.hg (glob)
 
   $ hg tglog
   @  5:secret 'L3'  mybook
@@ -136,7 +143,6 @@
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd issue4041
-  $ hg phase --draft --force 9
   $ hg log -G
   o    changeset:   10:2f2496ddf49d
   |\   branch:      f1
@@ -212,6 +218,11 @@
   
   $ hg rebase -s9 -d2 --debug # use debug to really check merge base used
   rebase onto 2 starting from e31216eec445
+  ignoring null merge rebase of 3
+  ignoring null merge rebase of 4
+  ignoring null merge rebase of 6
+  ignoring null merge rebase of 8
+  rebasing 9:e31216eec445 "more changes to f1"
   rebasing: 9:e31216eec445 5/6 changesets (83.33%)
    future parents are 2 and -1
   rebase status stored
@@ -234,7 +245,12 @@
    f1.txt: remote is newer -> g
   getting f1.txt
   updating: f1.txt 1/1 files (100.00%)
+  committing files:
   f1.txt
+  committing manifest
+  committing changelog
+  rebased as 19c888675e13
+  rebasing 10:2f2496ddf49d "merge" (tip)
   rebasing: 10:2f2496ddf49d 6/6 changesets (100.00%)
    future parents are 11 and 7
   rebase status stored
@@ -248,7 +264,11 @@
    f1.txt: remote is newer -> g
   getting f1.txt
   updating: f1.txt 1/1 files (100.00%)
+  committing files:
   f1.txt
+  committing manifest
+  committing changelog
+  rebased as 2a7f09cac94c
   rebase merging completed
   update back to initial working directory parent
   resolving manifests
@@ -272,7 +292,7 @@
   bundling: 2/3 manifests (66.67%)
   bundling: 3/3 manifests (100.00%)
   bundling: f1.txt 1/1 files (100.00%)
-  saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-backup.hg (glob)
+  saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-backup.hg (glob)
   3 changesets found
   list of changesets:
   4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c
@@ -301,7 +321,7 @@
   adding f1.txt revisions
   files: 1/1 chunks (100.00%)
   added 2 changesets with 2 changes to 1 files
-  removing unknown node e31216eec445 from 1-phase boundary
   invalid branchheads cache (served): tip differs
+  truncating cache/rbc-revs-v1 to 72
   rebase completed
   updating the branch cache
--- a/tests/test-rebase-detach.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-detach.t	Sat Jan 17 18:28:30 2015 -0800
@@ -48,7 +48,8 @@
   
   $ hg phase --force --secret 3
   $ hg rebase -s 3 -d 7
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
 
   $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
   o  7:secret 'D'
@@ -99,7 +100,9 @@
   o  0: 'A'
   
   $ hg rebase -s 2 -d 7
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg tglog
   o  7: 'D'
@@ -151,7 +154,10 @@
   o  0: 'A'
   
   $ hg rebase -s 1 -d 7
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   o  7: 'D'
@@ -205,7 +211,11 @@
   o  0: 'A'
   
   $ hg rebase --collapse -s 2 -d 7
-  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  note: rebase of 2:5fddd98957c8 created no changes to commit
+  rebasing 3:32af7686d403 "D"
+  note: rebase of 3:32af7686d403 created no changes to commit
+  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg  log -G --template "{rev}:{phase} '{desc}' {branches}\n"
   o  6:secret 'Collapsed revision
@@ -264,7 +274,10 @@
   o  0: 'A'
   
   $ hg rebase -s 1 -d tip
-  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -287,7 +300,9 @@
   
 
   $ hg rebase -d 5 -s 7
-  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/13547172c9c0-backup.hg (glob)
+  rebasing 7:13547172c9c0 "C"
+  rebasing 8:4e27a76c371a "D" (tip)
+  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/13547172c9c0-35685ded-backup.hg (glob)
   $ hg tglog
   o  8: 'D'
   |
@@ -325,7 +340,13 @@
   $ hg ci -m "J"
 
   $ hg rebase -s 8 -d 7 --collapse --config ui.merge=internal:other
-  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 8:9790e768172d "I"
+  note: rebase of 8:9790e768172d created no changes to commit
+  rebasing 9:5d7b11f5fb97 "Merge"
+  note: rebase of 9:5d7b11f5fb97 created no changes to commit
+  rebasing 10:9427d4d5af81 "J" (tip)
+  note: rebase of 10:9427d4d5af81 created no changes to commit
+  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/9790e768172d-c2111e9d-backup.hg (glob)
 
   $ hg tglog
   @  8: 'Collapsed revision
@@ -368,6 +389,7 @@
   adding H
   $ hg phase --force --secret 8
   $ hg rebase -s 8 -d 7 --config ui.merge=internal:fail
+  rebasing 8:6215fafa5447 "H2" (tip)
   merging H
   warning: conflicts during merge.
   merging H incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -376,7 +398,9 @@
   $ hg resolve --all -t internal:local
   (no more unresolved files)
   $ hg rebase -c
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6215fafa5447-backup.hg (glob)
+  rebasing 8:6215fafa5447 "H2" (tip)
+  note: rebase of 8:6215fafa5447 created no changes to commit
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6215fafa5447-5804ebd5-backup.hg (glob)
   $ hg  log -G --template "{rev}:{phase} '{desc}' {branches}\n"
   @  7:draft 'H'
   |
--- a/tests/test-rebase-interruptions.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-interruptions.t	Sat Jan 17 18:28:30 2015 -0800
@@ -57,6 +57,8 @@
 Rebasing B onto E:
 
   $ hg rebase -s 1 -d 4
+  rebasing 1:27547f69f254 "B"
+  rebasing 2:965c486023db "C"
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -93,6 +95,8 @@
 Resume the rebasing:
 
   $ hg rebase --continue
+  already rebased 1:27547f69f254 "B" as 45396c49d53b
+  rebasing 2:965c486023db "C"
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -107,6 +111,8 @@
   (no more unresolved files)
 
   $ hg rebase --continue
+  already rebased 1:27547f69f254 "B" as 45396c49d53b
+  rebasing 2:965c486023db "C"
   warning: new changesets detected on source branch, not stripping
 
   $ hg tglogp
@@ -148,6 +154,8 @@
 Rebasing B onto E:
 
   $ hg rebase -s 1 -d 4
+  rebasing 1:27547f69f254 "B"
+  rebasing 2:965c486023db "C"
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -219,6 +227,8 @@
 Rebasing B onto E:
 
   $ hg rebase -s 1 -d 4
+  rebasing 1:27547f69f254 "B"
+  rebasing 2:965c486023db "C"
   merging A
   warning: conflicts during merge.
   merging A incomplete! (edit conflicts, then use 'hg resolve --mark')
--- a/tests/test-rebase-issue-noparam-single-rev.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-issue-noparam-single-rev.t	Sat Jan 17 18:28:30 2015 -0800
@@ -52,7 +52,8 @@
   $ hg up -q -C 2
 
   $ hg rebase
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:87c180a611f2 "l1"
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-backup.hg (glob)
 
   $ hg tglog
   @  4: 'l1'
@@ -110,7 +111,9 @@
   $ hg up -q -C 3
 
   $ hg rebase
-  saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:87c180a611f2 "l1"
+  rebasing 3:1ac923b736ef "l2"
+  saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-backup.hg (glob)
 
   $ hg tglog
   @  4: 'l2'
--- a/tests/test-rebase-mq-skip.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-mq-skip.t	Sat Jan 17 18:28:30 2015 -0800
@@ -28,12 +28,12 @@
 
   $ hg up -q 0
 
-  $ hg qnew p0.patch
+  $ hg qnew p0.patch -d '1 0'
   $ echo p0 > p0
   $ hg add p0
   $ hg qref -m P0
 
-  $ hg qnew p1.patch
+  $ hg qnew p1.patch -d '2 0'
   $ echo p1 > p1
   $ hg add p1
   $ hg qref -m P1
@@ -49,8 +49,43 @@
 
   $ hg up -q -C qtip
 
-  $ hg rebase
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  $ hg rebase -v
+  rebasing 2:13a46ce44f60 "P0" (p0.patch qbase)
+  resolving manifests
+  removing p0
+  getting r1
+  resolving manifests
+  getting p0
+  committing files:
+  p0
+  committing manifest
+  committing changelog
+  rebasing 3:148775c71080 "P1" (p1.patch qtip)
+  resolving manifests
+  note: rebase of 3:148775c71080 created no changes to commit
+  rebase merging completed
+  updating mq patch p0.patch to 5:9ecc820b1737
+  $TESTTMP/a/.hg/patches/p0.patch (glob)
+  2 changesets found
+  uncompressed size of bundle content:
+       344 (changelog)
+       284 (manifests)
+       109  p0
+       109  p1
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-backup.hg (glob)
+  2 changesets found
+  uncompressed size of bundle content:
+       399 (changelog)
+       284 (manifests)
+       109  p0
+       109  p1
+  adding branch
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files
+  rebase completed
+  1 revisions have been skipped
 
   $ hg tglog
   @  3: 'P0' tags: p0.patch qbase qtip tip
@@ -107,6 +142,12 @@
   $ hg up -q qtip
 
   $ HGMERGE=internal:fail hg rebase
+  rebasing 1:b4bffa6e4776 "r1" (1.diff qbase)
+  note: rebase of 1:b4bffa6e4776 created no changes to commit
+  rebasing 2:c0fd129beb01 "r2" (2.diff)
+  rebasing 3:6ff5b8feed8e "r3" (3.diff)
+  note: rebase of 3:6ff5b8feed8e created no changes to commit
+  rebasing 4:094320fec554 "r4" (4.diff)
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -114,7 +155,15 @@
   (no more unresolved files)
 
   $ hg rebase --continue
-  saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob)
+  already rebased 1:b4bffa6e4776 "r1" (1.diff qbase) as 057f55ff8f44
+  already rebased 2:c0fd129beb01 "r2" (2.diff) as 1660ab13ce9a
+  already rebased 3:6ff5b8feed8e "r3" (3.diff) as 1660ab13ce9a
+  rebasing 4:094320fec554 "r4" (4.diff)
+  note: rebase of 4:094320fec554 created no changes to commit
+  rebasing 5:681a378595ba "r5" (5.diff)
+  rebasing 6:512a1f24768b "r6" (6.diff qtip)
+  note: rebase of 6:512a1f24768b created no changes to commit
+  saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-backup.hg (glob)
 
   $ hg tglog
   @  8: 'r5' tags: 5.diff qtip tip
--- a/tests/test-rebase-mq.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-mq.t	Sat Jan 17 18:28:30 2015 -0800
@@ -24,13 +24,13 @@
 
   $ hg up -q 0
 
-  $ hg qnew f.patch
+  $ hg qnew f.patch -d '1 0'
   $ echo mq1 > f
   $ hg qref -m P0
 
   $ hg qnew f2.patch
   $ echo mq2 > f
-  $ hg qref -m P1
+  $ hg qref -m P1 -d '2 0'
 
   $ hg tglog
   @  3: 'P1' tags: f2.patch qtip tip
@@ -59,6 +59,7 @@
 Rebase - generate a conflict:
 
   $ hg rebase -s 2 -d 1
+  rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
   merging f
   warning: conflicts during merge.
   merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -71,6 +72,8 @@
   $ hg resolve -m f
   (no more unresolved files)
   $ hg rebase -c
+  rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
+  rebasing 3:929394423cd3 "P1" (f2.patch qtip tip)
   merging f
   warning: conflicts during merge.
   merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -83,7 +86,9 @@
   $ hg resolve -m f
   (no more unresolved files)
   $ hg rebase -c
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  already rebased 2:3504f44bffc0 "P0" (f.patch qbase) as ebe9914c0d1c
+  rebasing 3:929394423cd3 "P1" (f2.patch qtip)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-backup.hg (glob)
 
   $ hg tglog
   @  3: 'P1' tags: f2.patch qtip tip
@@ -102,15 +107,15 @@
   $ cat .hg/patches/f.patch
   # HG changeset patch
   # User test
-  # Date ?????????? ? (glob)
-  #      * (glob)
-  # Node ID ???????????????????????????????????????? (glob)
+  # Date 1 0
+  #      Thu Jan 01 00:00:01 1970 +0000
+  # Node ID ebe9914c0d1c3f60096e952fa4dbb3d377dea3ab
   # Parent  bac9ed9960d8992bcad75864a879fa76cadaf1b0
   P0
   
-  diff -r bac9ed9960d8 -r ???????????? f (glob)
+  diff -r bac9ed9960d8 -r ebe9914c0d1c f
   --- a/f	Thu Jan 01 00:00:00 1970 +0000
-  +++ b/f	??? ??? ?? ??:??:?? ???? ????? (glob)
+  +++ b/f	Thu Jan 01 00:00:01 1970 +0000
   @@ -1,1 +1,1 @@
   -r1
   +mq1r1
@@ -125,15 +130,15 @@
   $ cat .hg/patches/f2.patch
   # HG changeset patch
   # User test
-  # Date ?????????? ? (glob)
-  #      * (glob)
-  # Node ID ???????????????????????????????????????? (glob)
-  # Parent  ???????????????????????????????????????? (glob)
+  # Date 2 0
+  #      Thu Jan 01 00:00:02 1970 +0000
+  # Node ID 462012cf340c97d44d62377c985a423f6bb82f07
+  # Parent  ebe9914c0d1c3f60096e952fa4dbb3d377dea3ab
   P1
   
-  diff -r ???????????? -r ???????????? f (glob)
-  --- a/f	??? ??? ?? ??:??:?? ???? ????? (glob)
-  +++ b/f	??? ??? ?? ??:??:?? ???? ????? (glob)
+  diff -r ebe9914c0d1c -r 462012cf340c f
+  --- a/f	Thu Jan 01 00:00:01 1970 +0000
+  +++ b/f	Thu Jan 01 00:00:02 1970 +0000
   @@ -1,1 +1,1 @@
   -mq1r1
   +mq1r1mq2
@@ -150,12 +155,12 @@
 
   $ hg up -q 0
 
-  $ hg qnew --git f_git.patch
+  $ hg qnew --git f_git.patch -d '3 0'
   $ echo mq1 > p
   $ hg add p
   $ hg qref --git -m 'P0 (git)'
 
-  $ hg qnew f.patch
+  $ hg qnew f.patch -d '4 0'
   $ echo mq2 > p
   $ hg qref -m P1
   $ hg qci -m 'save patch state'
@@ -171,6 +176,8 @@
   series
 
   $ cat .hg/patches/f_git.patch
+  Date: 3 0
+  
   P0 (git)
   
   diff --git a/p b/p
@@ -181,6 +188,8 @@
   +mq1
 
   $ cat .hg/patches/f.patch
+  Date: 4 0
+  
   P1
   
   diff -r ???????????? p (glob)
@@ -194,7 +203,9 @@
 Rebase the applied mq patches:
 
   $ hg rebase -s 2 -d 1
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:0c587ffcb480 "P0 (git)" (f_git.patch qbase)
+  rebasing 3:c7f18665e4bc "P1" (f.patch qtip tip)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-backup.hg (glob)
 
   $ hg qci -m 'save patch state'
 
@@ -211,9 +222,9 @@
   $ cat .hg/patches/f_git.patch
   # HG changeset patch
   # User test
-  # Date ?????????? ? (glob)
-  #      * (glob)
-  # Node ID ???????????????????????????????????????? (glob)
+  # Date 3 0
+  #      Thu Jan 01 00:00:03 1970 +0000
+  # Node ID 12d9f6a3bbe560dee50c7c454d434add7fb8e837
   # Parent  bac9ed9960d8992bcad75864a879fa76cadaf1b0
   P0 (git)
   
@@ -227,15 +238,15 @@
   $ cat .hg/patches/f.patch
   # HG changeset patch
   # User test
-  # Date ?????????? ? (glob)
-  #      * (glob)
-  # Node ID ???????????????????????????????????????? (glob)
-  # Parent  ???????????????????????????????????????? (glob)
+  # Date 4 0
+  #      Thu Jan 01 00:00:04 1970 +0000
+  # Node ID c77a2661c64c60d82f63c4f7aefd95b3a948a557
+  # Parent  12d9f6a3bbe560dee50c7c454d434add7fb8e837
   P1
   
-  diff -r ???????????? -r ???????????? p (glob)
-  --- a/p	??? ??? ?? ??:??:?? ???? ????? (glob)
-  +++ b/p	??? ??? ?? ??:??:?? ???? ????? (glob)
+  diff -r 12d9f6a3bbe5 -r c77a2661c64c p
+  --- a/p	Thu Jan 01 00:00:03 1970 +0000
+  +++ b/p	Thu Jan 01 00:00:04 1970 +0000
   @@ -1,1 +1,1 @@
   -mq1
   +mq2
@@ -256,10 +267,10 @@
   $ echo guarded > guarded
   $ hg add guarded
   $ hg qnew guarded
-  $ hg qnew empty-important -m 'important commit message'
+  $ hg qnew empty-important -m 'important commit message' -d '1 0'
   $ echo bar > bar
   $ hg add bar
-  $ hg qnew bar
+  $ hg qnew bar -d '2 0'
   $ echo foo > foo
   $ hg add foo
   $ hg qnew foo
@@ -326,6 +337,7 @@
   foo
   $ [ -f .hg/patches/empty-important ]
   $ hg -q rebase -d 2
+  note: rebase of 1:0aaf4c3af7eb created no changes to commit
   $ hg qseries
   guarded
   bar
@@ -339,10 +351,10 @@
   foo: +baz
 
   $ hg tglog
-  @  2:* '[mq]: bar' tags: bar qbase qtip tip (glob)
+  @  2: '[mq]: bar' tags: bar qbase qtip tip
   |
-  o  1:* 'b' tags: qparent (glob)
+  o  1: 'b' tags: qparent
   |
-  o  0:* 'a' tags: (glob)
+  o  0: 'a' tags:
   
   $ cd ..
--- a/tests/test-rebase-named-branches.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-named-branches.t	Sat Jan 17 18:28:30 2015 -0800
@@ -69,7 +69,11 @@
 Branch name containing a dash (issue3181)
 
   $ hg rebase -b dev-two -d dev-one --keepbranches
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:24b6387c8c8c "F"
+  rebasing 6:eea13746799a "G"
+  rebasing 7:02de42196ebe "H"
+  rebasing 9:cb039b7cae8e "dev-two named branch" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-backup.hg (glob)
 
   $ hg tglog
   @  9: 'dev-two named branch' dev-two
@@ -93,7 +97,13 @@
   o  0: 'A'
   
   $ hg rebase -s dev-one -d 0 --keepbranches
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:643fc9128048 "dev-one named branch"
+  note: rebase of 5:643fc9128048 created no changes to commit
+  rebasing 6:24de4aff8e28 "F"
+  rebasing 7:4b988a958030 "G"
+  rebasing 8:31d0e4ba75e6 "H"
+  rebasing 9:9e70cd31750f "dev-two named branch" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-backup.hg (glob)
 
   $ hg tglog
   @  8: 'dev-two named branch' dev-two
@@ -143,7 +153,11 @@
   o  0: 'A'
   
   $ hg rebase -b 'max(branch("dev-two"))' -d dev-one --keepbranches
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:77854864208c "F"
+  rebasing 6:63b4f9c788a1 "G"
+  rebasing 7:87861e68abd3 "H"
+  rebasing 8:ec00d4e0efca "dev-two named branch"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/77854864208c-74d59436-backup.hg (glob)
 
   $ hg tglog
   o  9: 'dev-two named branch' dev-two
@@ -167,7 +181,13 @@
   o  0: 'A'
   
   $ hg rebase -s 'max(branch("dev-one"))' -d 0 --keepbranches
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:643fc9128048 "dev-one named branch"
+  note: rebase of 5:643fc9128048 created no changes to commit
+  rebasing 6:05584c618d45 "F"
+  rebasing 7:471695f5257d "G"
+  rebasing 8:8382a539a2df "H"
+  rebasing 9:11f718458b32 "dev-two named branch" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-177f3c5c-backup.hg (glob)
 
   $ hg tglog
   o  8: 'dev-two named branch' dev-two
@@ -192,7 +212,10 @@
 Rebasing descendant onto ancestor across different named branches
 
   $ hg rebase -s 1 -d 8 --keepbranches
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -218,7 +241,11 @@
   [255]
 
   $ hg rebase -s 5 -d 4
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:32d3b0de7f37 "dev-two named branch"
+  rebasing 6:580fcd9fd48f "B"
+  rebasing 7:32aba0402ed2 "C"
+  rebasing 8:e4787b575338 "D" (tip)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32d3b0de7f37-c37815ca-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -248,7 +275,12 @@
   $ hg ci -m 'close b' --close
   $ hg rebase -b 8 -d b
   reopening closed branch head ea9de14a36c6
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 4:86693275b2ef "H"
+  rebasing 5:2149726d0970 "dev-two named branch"
+  rebasing 6:81e55225e95d "B"
+  rebasing 7:09eda3dc3195 "C"
+  rebasing 8:31298fc9d159 "D"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/86693275b2ef-f9fcf4e2-backup.hg (glob)
 
   $ cd ..
 
@@ -302,7 +334,8 @@
   $ cd ../case2
   $ hg up -qr 1
   $ hg rebase
-  saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-backup.hg (glob)
+  rebasing 1:40039acb7ca5 "b1"
+  saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-backup.hg (glob)
   $ hg tglog
   @  3: 'b1' b
   |
--- a/tests/test-rebase-newancestor.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-newancestor.t	Sat Jan 17 18:28:30 2015 -0800
@@ -38,9 +38,11 @@
   o  0: 'A'
   
   $ hg rebase -s 1 -d 3
-  merging a
+  rebasing 1:0f4f7cb4f549 "B"
   merging a
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:30ae917c0e4f "C"
+  merging a
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-backup.hg (glob)
 
   $ hg tglog
   o  3: 'C'
@@ -53,3 +55,276 @@
   
 
   $ cd ..
+
+
+Test rebasing of merges with ancestors of the rebase destination - a situation
+that often happens when trying to recover from repeated merging with a mainline
+branch.
+
+The test case creates a dev branch that contains a couple of merges from the
+default branch. When rebasing to the default branch, these merges would be
+merges with ancestors on the same branch. The merges _could_ contain some
+interesting conflict resolutions or additional changes in the merge commit, but
+that is mixed up with the actual merge stuff and there is in general no way to
+separate them.
+
+Note: The dev branch contains _no_ changes to f-default. It might be unclear
+how rebasing of ancestor merges should be handled, but the current behavior
+with spurious prompts for conflicts in files that didn't change seems very
+wrong.
+
+  $ hg init ancestor-merge
+  $ cd ancestor-merge
+
+  $ touch f-default
+  $ hg ci -Aqm 'default: create f-default'
+
+  $ hg branch -q dev
+  $ hg ci -qm 'dev: create branch'
+
+  $ echo stuff > f-dev
+  $ hg ci -Aqm 'dev: f-dev stuff'
+
+  $ hg up -q default
+  $ echo stuff > f-default
+  $ hg ci -m 'default: f-default stuff'
+
+  $ hg up -q dev
+  $ hg merge -q default
+  $ hg ci -m 'dev: merge default'
+
+  $ hg up -q default
+  $ hg rm f-default
+  $ hg ci -m 'default: remove f-default'
+
+  $ hg up -q dev
+  $ hg merge -q default
+  $ hg ci -m 'dev: merge default'
+
+  $ hg up -q default
+  $ echo stuff > f-other
+  $ hg ci -Aqm 'default: f-other stuff'
+
+  $ hg tglog
+  @  7: 'default: f-other stuff'
+  |
+  | o  6: 'dev: merge default' dev
+  |/|
+  o |  5: 'default: remove f-default'
+  | |
+  | o  4: 'dev: merge default' dev
+  |/|
+  o |  3: 'default: f-default stuff'
+  | |
+  | o  2: 'dev: f-dev stuff' dev
+  | |
+  | o  1: 'dev: create branch' dev
+  |/
+  o  0: 'default: create f-default'
+  
+  $ hg clone -qU . ../ancestor-merge-2
+
+Full rebase all the way back from branching point:
+
+  $ hg rebase -r 'only(dev,default)' -d default
+  rebasing 1:1d1a643d390e "dev: create branch"
+  note: rebase of 1:1d1a643d390e created no changes to commit
+  rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
+  rebasing 4:4b019212aaf6 "dev: merge default"
+  remote changed f-default which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  rebasing 6:9455ee510502 "dev: merge default"
+  saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-backup.hg (glob)
+  $ hg tglog
+  o  6: 'dev: merge default'
+  |
+  o  5: 'dev: merge default'
+  |
+  o  4: 'dev: f-dev stuff'
+  |
+  @  3: 'default: f-other stuff'
+  |
+  o  2: 'default: remove f-default'
+  |
+  o  1: 'default: f-default stuff'
+  |
+  o  0: 'default: create f-default'
+  
+Grafty cherry picking rebasing:
+
+  $ cd ../ancestor-merge-2
+
+  $ hg phase -fdr0:
+  $ hg rebase -r 'children(only(dev,default))' -d default
+  rebasing 2:ec2c14fb2984 "dev: f-dev stuff"
+  rebasing 4:4b019212aaf6 "dev: merge default"
+  remote changed f-default which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  rebasing 6:9455ee510502 "dev: merge default"
+  saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-backup.hg (glob)
+  $ hg tglog
+  o  7: 'dev: merge default'
+  |
+  o  6: 'dev: merge default'
+  |
+  o  5: 'dev: f-dev stuff'
+  |
+  o  4: 'default: f-other stuff'
+  |
+  o  3: 'default: remove f-default'
+  |
+  o  2: 'default: f-default stuff'
+  |
+  | o  1: 'dev: create branch' dev
+  |/
+  o  0: 'default: create f-default'
+  
+  $ cd ..
+
+
+Test order of parents of rebased merged with un-rebased changes as p1.
+
+  $ hg init parentorder
+  $ cd parentorder
+  $ touch f
+  $ hg ci -Aqm common
+  $ touch change
+  $ hg ci -Aqm change
+  $ touch target
+  $ hg ci -Aqm target
+  $ hg up -qr 0
+  $ touch outside
+  $ hg ci -Aqm outside
+  $ hg merge -qr 1
+  $ hg ci -m 'merge p1 3=outside p2 1=ancestor'
+  $ hg par
+  changeset:   4:6990226659be
+  tag:         tip
+  parent:      3:f59da8fc0fcf
+  parent:      1:dd40c13f7a6f
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     merge p1 3=outside p2 1=ancestor
+  
+  $ hg up -qr 1
+  $ hg merge -qr 3
+  $ hg ci -qm 'merge p1 1=ancestor p2 3=outside'
+  $ hg par
+  changeset:   5:a57575f79074
+  tag:         tip
+  parent:      1:dd40c13f7a6f
+  parent:      3:f59da8fc0fcf
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     merge p1 1=ancestor p2 3=outside
+  
+  $ hg tglog
+  @    5: 'merge p1 1=ancestor p2 3=outside'
+  |\
+  +---o  4: 'merge p1 3=outside p2 1=ancestor'
+  | |/
+  | o  3: 'outside'
+  | |
+  +---o  2: 'target'
+  | |
+  o |  1: 'change'
+  |/
+  o  0: 'common'
+  
+  $ hg rebase -r 4 -d 2
+  rebasing 4:6990226659be "merge p1 3=outside p2 1=ancestor"
+  saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-backup.hg (glob)
+  $ hg tip
+  changeset:   5:cca50676b1c5
+  tag:         tip
+  parent:      2:a60552eb93fb
+  parent:      3:f59da8fc0fcf
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     merge p1 3=outside p2 1=ancestor
+  
+  $ hg rebase -r 4 -d 2
+  rebasing 4:a57575f79074 "merge p1 1=ancestor p2 3=outside"
+  saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-backup.hg (glob)
+  $ hg tip
+  changeset:   5:f9daf77ffe76
+  tag:         tip
+  parent:      2:a60552eb93fb
+  parent:      3:f59da8fc0fcf
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     merge p1 1=ancestor p2 3=outside
+  
+  $ hg tglog
+  @    5: 'merge p1 1=ancestor p2 3=outside'
+  |\
+  +---o  4: 'merge p1 3=outside p2 1=ancestor'
+  | |/
+  | o  3: 'outside'
+  | |
+  o |  2: 'target'
+  | |
+  o |  1: 'change'
+  |/
+  o  0: 'common'
+  
+rebase of merge of ancestors
+
+  $ hg up -qr 2
+  $ hg merge -qr 3
+  $ echo 'other change while merging future "rebase ancestors"' > other
+  $ hg ci -Aqm 'merge rebase ancestors'
+  $ hg rebase -d 5 -v
+  rebasing 6:4c5f12f25ebe "merge rebase ancestors" (tip)
+  resolving manifests
+  removing other
+  note: merging f9daf77ffe76+ and 4c5f12f25ebe using bids from ancestors a60552eb93fb and f59da8fc0fcf
+  
+  calculating bids for ancestor a60552eb93fb
+  resolving manifests
+  
+  calculating bids for ancestor f59da8fc0fcf
+  resolving manifests
+  
+  auction for merging merge bids
+   other: consensus for g
+  end of auction
+  
+  getting other
+  committing files:
+  other
+  committing manifest
+  committing changelog
+  rebase merging completed
+  1 changesets found
+  uncompressed size of bundle content:
+       193 (changelog)
+       196 (manifests)
+       162  other
+  saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-backup.hg (glob)
+  1 changesets found
+  uncompressed size of bundle content:
+       252 (changelog)
+       147 (manifests)
+       162  other
+  adding branch
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  rebase completed
+  $ hg tglog
+  @  6: 'merge rebase ancestors'
+  |
+  o    5: 'merge p1 1=ancestor p2 3=outside'
+  |\
+  +---o  4: 'merge p1 3=outside p2 1=ancestor'
+  | |/
+  | o  3: 'outside'
+  | |
+  o |  2: 'target'
+  | |
+  o |  1: 'change'
+  |/
+  o  0: 'common'
+  
--- a/tests/test-rebase-obsolete.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-obsolete.t	Sat Jan 17 18:28:30 2015 -0800
@@ -56,6 +56,9 @@
   $ hg up 32af7686d403
   3 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg rebase -d eea13746799a
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
   $ hg log -G
   @  10:8eeb3c33ad33 D
   |
@@ -118,9 +121,14 @@
 set.
 
   $ hg graft 42ccdea3bb16 32af7686d403
-  grafting revision 1
-  grafting revision 3
+  grafting 1:42ccdea3bb16 "B"
+  grafting 3:32af7686d403 "D"
   $ hg rebase  -s 42ccdea3bb16 -d .
+  rebasing 1:42ccdea3bb16 "B"
+  note: rebase of 1:42ccdea3bb16 created no changes to commit
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  note: rebase of 3:32af7686d403 created no changes to commit
   $ hg log -G
   o  10:5ae4c968c6ac C
   |
@@ -170,6 +178,7 @@
 More complex case were part of the rebase set were already rebased
 
   $ hg rebase --rev 'desc(D)' --dest 'desc(H)'
+  rebasing 9:08483444fef9 "D"
   $ hg debugobsolete
   42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
   5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
@@ -195,6 +204,10 @@
   o  0:cd010b8cd998 A
   
   $ hg rebase --source 'desc(B)' --dest 'tip'
+  rebasing 8:8877864f1edb "B"
+  rebasing 9:08483444fef9 "D"
+  note: rebase of 9:08483444fef9 created no changes to commit
+  rebasing 10:5ae4c968c6ac "C"
   $ hg debugobsolete
   42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
   5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
@@ -247,6 +260,12 @@
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd collapse
   $ hg rebase  -s 42ccdea3bb16 -d eea13746799a --collapse
+  rebasing 1:42ccdea3bb16 "B"
+  note: rebase of 1:42ccdea3bb16 created no changes to commit
+  rebasing 2:5fddd98957c8 "C"
+  note: rebase of 2:5fddd98957c8 created no changes to commit
+  rebasing 3:32af7686d403 "D"
+  note: rebase of 3:32af7686d403 created no changes to commit
   $ hg log -G
   o  8:4dc2197e807b Collapsed revision
   |
@@ -299,7 +318,10 @@
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd hidden
   $ hg rebase -s 5fddd98957c8 -d eea13746799a
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
   $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
+  rebasing 1:42ccdea3bb16 "B"
   $ hg log -G
   o  10:7c6027df6a99 B
   |
@@ -351,6 +373,7 @@
   $ hg log -r 'children(8)'
   9:cf44d2f5a9f4 D (no-eol)
   $ hg rebase -r 8
+  rebasing 8:e273c5e7d2d2 "C"
   $ hg log -G
   o  11:0d8f238b634c C
   |
@@ -376,6 +399,10 @@
 ------------------------------------
 
   $ hg rebase --dest 4 --rev '7+11+9'
+  rebasing 7:02de42196ebe "H"
+  rebasing 9:cf44d2f5a9f4 "D"
+  not rebasing ignored 10:7c6027df6a99 "B"
+  rebasing 11:0d8f238b634c "C" (tip)
   $ hg log -G
   o  14:1e8370e38cca C
   |
@@ -442,6 +469,10 @@
 (actual test)
 
   $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
+  rebasing 3:32af7686d403 "D"
+  rebasing 7:02de42196ebe "H"
+  not rebasing ignored 8:53a6a128b2b7 "M"
+  rebasing 9:4bde274eefcf "I" (tip)
   $ hg log -G
   @  12:acd174b7ab39 I
   |
--- a/tests/test-rebase-parameters.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-parameters.t	Sat Jan 17 18:28:30 2015 -0800
@@ -120,7 +120,10 @@
   $ hg up -q -C 3
 
   $ hg rebase
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   @  8: 'D'
@@ -155,7 +158,10 @@
   $ cd a2
 
   $ hg rebase --base .
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   @  8: 'D'
@@ -185,7 +191,10 @@
   $ cd a3
 
   $ hg rebase --dest 'branch(.)'
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   @  8: 'D'
@@ -215,7 +224,9 @@
   $ cd a4
 
   $ hg rebase --source 'desc("C")'
-  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -245,7 +256,10 @@
   $ cd a5
 
   $ hg rebase --dest 6
-  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   @  8: 'D'
@@ -275,7 +289,10 @@
   $ cd a6
 
   $ hg rebase --base 'desc("D")'
-  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -305,7 +322,9 @@
   $ cd a7
 
   $ hg rebase --source 2 --dest 7
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -335,7 +354,10 @@
   $ cd a8
 
   $ hg rebase --base 3 --dest 7
-  saved backup bundle to $TESTTMP/a8/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 1:42ccdea3bb16 "B"
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -365,7 +387,9 @@
   $ cd a9
 
   $ hg rebase --rev 'desc("C")::'
-  saved backup bundle to $TESTTMP/a9/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
 
   $ hg tglog
   o  8: 'D'
@@ -413,7 +437,9 @@
   $ cd b1
 
   $ hg rebase -s 2 -d 1 --tool internal:local
-  saved backup bundle to $TESTTMP/b1/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:e4e3f3546619 "c2b" (tip)
+  note: rebase of 2:e4e3f3546619 created no changes to commit
+  saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
 
   $ hg cat c2
   c2
@@ -425,7 +451,8 @@
   $ cd b2
 
   $ hg rebase -s 2 -d 1 --tool internal:other
-  saved backup bundle to $TESTTMP/b2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:e4e3f3546619 "c2b" (tip)
+  saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
 
   $ hg cat c2
   c2b
@@ -437,6 +464,7 @@
   $ cd b3
 
   $ hg rebase -s 2 -d 1 --tool internal:fail
+  rebasing 2:e4e3f3546619 "c2b" (tip)
   unresolved conflicts (see hg resolve, then hg rebase --continue)
   [1]
 
@@ -457,14 +485,16 @@
   (no more unresolved files)
   $ hg rebase -c --tool internal:fail
   tool option will be ignored
-  saved backup bundle to $TESTTMP/b3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:e4e3f3546619 "c2b" (tip)
+  note: rebase of 2:e4e3f3546619 created no changes to commit
+  saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob)
 
   $ hg rebase -i
-  abort: interactive history editing is supported by the 'histedit' extension (see 'hg help histedit')
+  abort: interactive history editing is supported by the 'histedit' extension (see "hg help histedit")
   [255]
 
   $ hg rebase --interactive
-  abort: interactive history editing is supported by the 'histedit' extension (see 'hg help histedit')
+  abort: interactive history editing is supported by the 'histedit' extension (see "hg help histedit")
   [255]
 
   $ cd ..
--- a/tests/test-rebase-pull.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-pull.t	Sat Jan 17 18:28:30 2015 -0800
@@ -53,7 +53,8 @@
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files (+1 heads)
-  saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 2:ff8d69a621f9 "L1"
+  saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
 
   $ hg tglog
   @  3: 'L1'
@@ -149,7 +150,8 @@
   adding manifests
   adding file changes
   added 2 changesets with 2 changes to 2 files
-  saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-backup.hg (glob)
+  rebasing 3:ff8d69a621f9 "L1"
+  saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob)
   $ hg tglog
   @  5: 'L1'
   |
--- a/tests/test-rebase-rename.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-rename.t	Sat Jan 17 18:28:30 2015 -0800
@@ -60,7 +60,8 @@
 Rebase the revision containing the rename:
 
   $ hg rebase -s 3 -d 2
-  saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:73a3ee40125d "rename A" (tip)
+  saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-backup.hg (glob)
 
   $ hg tglog
   @  3: 'rename A'
@@ -150,7 +151,8 @@
 Rebase the revision containing the copy:
 
   $ hg rebase -s 3 -d 2
-  saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:0a8162ff18a8 "copy A" (tip)
+  saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-backup.hg (glob)
 
   $ hg tglog
   @  3: 'copy A'
@@ -233,7 +235,8 @@
   
 
   $ hg rebase -s 4 -d 3
-  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 4:b918d683b091 "Another unrelated change" (tip)
+  saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-backup.hg (glob)
 
   $ hg diff --stat -c .
    unrelated.txt |  1 +
@@ -281,7 +284,10 @@
 
 Rebase the copies on top of the unrelated change.
   $ hg rebase --source 1 --dest 4
-  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/*.hg (glob)
+  rebasing 1:79d255d24ad2 "File b created as copy of a and modified"
+  rebasing 2:327f772bc074 "File c created as copy of b and modified"
+  rebasing 3:421b7e82bb85 "File d created as copy of c and modified"
+  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-backup.hg (glob)
   $ hg update 4
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -303,9 +309,15 @@
   $ hg ci -m 'unrelated commit is unrelated'
   created new head
   $ hg rebase -s 2 --dest 5 --collapse
+  rebasing 2:68bf06433839 "File b created as copy of a and modified"
+  note: rebase of 2:68bf06433839 created no changes to commit
+  rebasing 3:af74b229bc02 "File c created as copy of b and modified"
   merging b and c to c
+  note: rebase of 3:af74b229bc02 created no changes to commit
+  rebasing 4:dbb9ba033561 "File d created as copy of c and modified"
   merging c and d to d
-  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/*.hg (glob)
+  note: rebase of 4:dbb9ba033561 created no changes to commit
+  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-backup.hg (glob)
   $ hg co tip
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
--- a/tests/test-rebase-scenario-global.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebase-scenario-global.t	Sat Jan 17 18:28:30 2015 -0800
@@ -51,6 +51,7 @@
   $ hg status --rev "3^1" --rev 3
   A D
   $ HGEDITOR=cat hg rebase -s 3 -d 7 --edit
+  rebasing 3:32af7686d403 "D"
   D
   
   
@@ -60,7 +61,7 @@
   HG: user: Nicolas Dumazet <nicdumz.commits@gmail.com>
   HG: branch 'default'
   HG: added D
-  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
 
   $ hg tglog
   o  7: 'D'
@@ -89,7 +90,8 @@
   $ cd a2
 
   $ HGEDITOR=cat hg rebase -s 3 -d 5
-  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob)
 
   $ hg tglog
   o  7: 'D'
@@ -117,7 +119,10 @@
   $ cd a3
 
   $ hg rebase -s 4 -d 7
-  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 4:9520eea781bc "E"
+  rebasing 6:eea13746799a "G"
+  note: rebase of 6:eea13746799a created no changes to commit
+  saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob)
 
   $ hg tglog
   o  6: 'E'
@@ -143,7 +148,11 @@
   $ cd a4
 
   $ hg rebase -s 5 -d 4
-  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:24b6387c8c8c "F"
+  rebasing 6:eea13746799a "G"
+  note: rebase of 6:eea13746799a created no changes to commit
+  rebasing 7:02de42196ebe "H" (tip)
+  saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob)
 
   $ hg tglog
   @  6: 'H'
@@ -169,7 +178,8 @@
   $ cd a5
 
   $ hg rebase -s 6 -d 7
-  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 6:eea13746799a "G"
+  saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-backup.hg (glob)
 
   $ hg tglog
   o    7: 'G'
@@ -197,7 +207,10 @@
   $ cd a6
 
   $ hg rebase -s 5 -d 1
-  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/*-backup.hg (glob)
+  rebasing 5:24b6387c8c8c "F"
+  rebasing 6:eea13746799a "G"
+  rebasing 7:02de42196ebe "H" (tip)
+  saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob)
 
   $ hg tglog
   @  7: 'H'
@@ -239,6 +252,7 @@
 G onto B - merge revision with both parents not in ancestors of target:
 
   $ hg rebase -s 6 -d 1
+  rebasing 6:eea13746799a "G"
   abort: cannot use revision 6 as base, result would have 3 parents
   [255]
 
@@ -266,7 +280,9 @@
 C onto A - rebase onto an ancestor:
 
   $ hg rebase -d 0 -s 2
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-backup.hg (glob)
+  rebasing 2:5fddd98957c8 "C"
+  rebasing 3:32af7686d403 "D"
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob)
   $ hg tglog
   o  7: 'D'
   |
@@ -293,37 +309,44 @@
   [1]
   $ hg rebase -d 5 -b 6
   abort: can't rebase immutable changeset e1c4361dd923
-  (see hg help phases for details)
+  (see "hg help phases" for details)
   [255]
 
   $ hg rebase -d 5 -b 6 --keep
+  rebasing 6:e1c4361dd923 "C"
+  rebasing 7:c9659aac0000 "D" (tip)
 
 Check rebasing mutable changeset
 Source phase greater or equal to destination phase: new changeset get the phase of source:
   $ hg id -n
   5
   $ hg rebase -s9 -d0
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-backup.hg (glob)
+  rebasing 9:2b23e52411f4 "D" (tip)
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-backup.hg (glob)
   $ hg id -n # check we updated back to parent
   5
   $ hg log --template "{phase}\n" -r 9
   draft
   $ hg rebase -s9 -d1
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-backup.hg (glob)
+  rebasing 9:2cb10d0cfc6c "D" (tip)
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-backup.hg (glob)
   $ hg log --template "{phase}\n" -r 9
   draft
   $ hg phase --force --secret 9
   $ hg rebase -s9 -d0
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-backup.hg (glob)
+  rebasing 9:c5b12b67163a "D" (tip)
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-backup.hg (glob)
   $ hg log --template "{phase}\n" -r 9
   secret
   $ hg rebase -s9 -d1
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-backup.hg (glob)
+  rebasing 9:2a0524f868ac "D" (tip)
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-backup.hg (glob)
   $ hg log --template "{phase}\n" -r 9
   secret
 Source phase lower than destination phase: new changeset get the phase of destination:
   $ hg rebase -s8 -d9
-  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-backup.hg (glob)
+  rebasing 8:6d4f22462821 "C"
+  saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-backup.hg (glob)
   $ hg log --template "{phase}\n" -r 'rev(9)'
   secret
 
@@ -375,6 +398,11 @@
   (use --keep to keep original changesets)
   [255]
   $ hg rebase -r '2::8' -d 1 --keep
+  rebasing 2:c9e50f6cdc55 "C"
+  rebasing 3:ffd453c31098 "D"
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
+  rebasing 8:479ddb54a924 "I" (tip)
   $ hg tglog
   o  13: 'I'
   |
@@ -416,6 +444,10 @@
   (use --keep to keep original changesets)
   [255]
   $ hg rebase -r '3::8' -d 1 --keep
+  rebasing 3:ffd453c31098 "D"
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
+  rebasing 8:479ddb54a924 "I" (tip)
   $ hg tglog
   o  12: 'I'
   |
@@ -455,6 +487,9 @@
   (use --keep to keep original changesets)
   [255]
   $ hg rebase -r '3::7' -d 1 --keep
+  rebasing 3:ffd453c31098 "D"
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
   $ hg tglog
   o  11: 'H'
   |
@@ -492,6 +527,11 @@
   (use --keep to keep original changesets)
   [255]
   $ hg rebase -r '3::(7+5)' -d 1 --keep
+  rebasing 3:ffd453c31098 "D"
+  rebasing 4:c01897464e7f "E"
+  rebasing 5:41bfcc75ed73 "F"
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
   $ hg tglog
   o  13: 'H'
   |
@@ -531,7 +571,10 @@
   $ hg clone -q -u . ah ah5
   $ cd ah5
   $ hg rebase -r '6::' -d 2
-  saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
+  rebasing 8:479ddb54a924 "I" (tip)
+  saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-backup.hg (glob)
   $ hg tglog
   o  8: 'I'
   |
@@ -561,7 +604,12 @@
   $ hg clone -q -u . ah ah6
   $ cd ah6
   $ hg rebase -r '(4+6)::' -d 1
-  saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-backup.hg (glob)
+  rebasing 4:c01897464e7f "E"
+  rebasing 5:41bfcc75ed73 "F"
+  rebasing 6:3d8a618087a7 "G"
+  rebasing 7:72434a4e60b0 "H"
+  rebasing 8:479ddb54a924 "I" (tip)
+  saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-backup.hg (glob)
   $ hg tglog
   o  8: 'I'
   |
@@ -628,7 +676,9 @@
 (actual test)
 
   $ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
-  saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-backup.hg (glob)
+  rebasing 8:e7ec4e813ba6 "I"
+  rebasing 10:23a4ace37988 "K" (tip)
+  saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-backup.hg (glob)
   $ hg log --rev 'children(desc(G))'
   changeset:   9:adb617877056
   parent:      6:eea13746799a
@@ -690,4 +740,6 @@
   $ hg add subfile
   $ hg commit -m 'second source with subdir'
   $ hg rebase -b . -d 1 --traceback
-  saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-backup.hg (glob)
+  rebasing 2:779a07b1b7a0 "first source commit"
+  rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
+  saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob)
--- a/tests/test-rebuildstate.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rebuildstate.t	Sat Jan 17 18:28:30 2015 -0800
@@ -17,8 +17,8 @@
 state dump after
 
   $ hg debugstate --nodates | sort
-  n 644         -1 bar
-  n 644         -1 foo
+  n 644         -1 set                 bar
+  n 644         -1 set                 foo
 
 status
 
--- a/tests/test-record.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-record.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
 Set up a repo
 
-  $ echo "[ui]" >> $HGRCPATH
-  $ echo "interactive=true" >> $HGRCPATH
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "record=" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [ui]
+  > interactive = true
+  > [extensions]
+  > record =
+  > EOF
 
   $ hg init a
   $ cd a
@@ -557,9 +559,9 @@
   >   echo $i >> plain
   > done
 
-Record beginning, middle
+Record beginning, middle, and test that format-breaking diffopts are ignored
 
-  $ hg record -d '14 0' -m middle-only plain <<EOF
+  $ hg record --config diff.noprefix=True -d '14 0' -m middle-only plain <<EOF
   > y
   > y
   > y
--- a/tests/test-relink.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-relink.t	Sat Jan 17 18:28:30 2015 -0800
@@ -87,7 +87,7 @@
   pruned down to 2 probably relinkable files
   relinking: data/a.i 1/2 files (50.00%)
   not linkable: data/dummy.i
-  relinked 1 files (1.37 KB reclaimed)
+  relinked 1 files (1.36 KB reclaimed)
   $ cd ..
 
 
--- a/tests/test-rename-dir-merge.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rename-dir-merge.t	Sat Jan 17 18:28:30 2015 -0800
@@ -105,6 +105,88 @@
   $ hg debugrename b/c
   b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob)
 
+Local directory rename with conflicting file added in remote source directory
+and untracked in local target directory.
+
+  $ hg co -qC 1
+  $ echo target > b/c
+  $ hg merge 2
+  b/c: untracked file differs
+  abort: untracked files in working directory differ from files in requested revision
+  [255]
+  $ cat b/c
+  target
+but it should succeed if the content matches
+  $ hg cat -r 2 a/c > b/c
+  $ hg merge 2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg st -C
+  A b/c
+    a/c
+  ? a/d
+
+Local directory rename with conflicting file added in remote source directory
+and committed in local target directory.
+
+  $ hg co -qC 1
+  $ echo target > b/c
+  $ hg add b/c
+  $ hg commit -qm 'new file in target directory'
+  $ hg merge 2
+  merging b/c and a/c to b/c
+  warning: conflicts during merge.
+  merging b/c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
+  $ hg st -A
+  M b/c
+    a/c
+  ? a/d
+  ? b/c.orig
+  C b/a
+  C b/b
+  $ cat b/c
+  <<<<<<< local: f1c50ca4f127 - test: new file in target directory
+  target
+  =======
+  baz
+  >>>>>>> other: ce36d17b18fb  - test: 2 add a/c
+  $ rm b/c.orig
+
+Remote directory rename with conflicting file added in remote target directory
+and committed in local source directory.
+
+  $ hg co -qC 2
+  $ hg st -A
+  ? a/d
+  C a/a
+  C a/b
+  C a/c
+  $ hg merge 5
+  merging a/c and b/c to b/c
+  warning: conflicts during merge.
+  merging b/c incomplete! (edit conflicts, then use 'hg resolve --mark')
+  2 files updated, 0 files merged, 2 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
+  [1]
+  $ hg st -A
+  M b/a
+  M b/b
+  M b/c
+    a/c
+  R a/a
+  R a/b
+  R a/c
+  ? a/d
+  ? b/c.orig
+  $ cat b/c
+  <<<<<<< local: ce36d17b18fb  - test: 2 add a/c
+  baz
+  =======
+  target
+  >>>>>>> other: f1c50ca4f127 - test: new file in target directory
 
 Second scenario with two repos:
 
--- a/tests/test-rename-merge1.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rename-merge1.t	Sat Jan 17 18:28:30 2015 -0800
@@ -40,15 +40,13 @@
   removing a
    b2: remote created -> g
   getting b2
-  updating: b2 1/3 files (33.33%)
+  updating: b2 1/2 files (50.00%)
    b: remote moved from a -> m
-  updating: b 2/3 files (66.67%)
+  updating: b 2/2 files (100.00%)
   picked tool 'internal:merge' for b (binary False symlink False)
   merging a and b to b
   my b@044f8520aeeb+ other b@85c198ef2f6c ancestor a@af1939970a1c
    premerge successful
-   a2: divergent renames -> dr
-  updating: a2 3/3 files (100.00%)
   note: possible conflict - a2 was renamed multiple times to:
    c2
    b2
@@ -183,9 +181,7 @@
    ancestor: 19d7f95df299, local: 0084274f6b67+, remote: 5d32493049f0
    newfile: remote created -> g
   getting newfile
-  updating: newfile 1/2 files (50.00%)
-   file: rename and delete -> rd
-  updating: file 2/2 files (100.00%)
+  updating: newfile 1/1 files (100.00%)
   note: possible conflict - file was deleted and renamed to:
    newfile
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-rename-merge2.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-rename-merge2.t	Sat Jan 17 18:28:30 2015 -0800
@@ -88,7 +88,7 @@
    ancestor: 924404dff337, local: e300d1c794ec+, remote: 4ce40f5aca24
    preserving a for resolve of b
    preserving rev for resolve of rev
-   a: keep -> k
+   a: remote unchanged -> k
    b: remote copied from a -> m
   updating: b 1/2 files (50.00%)
   picked tool 'python ../merge' for b (binary False symlink False)
@@ -343,7 +343,7 @@
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: versions differ -> m
+   b: both renamed from a -> m
   updating: b 1/2 files (50.00%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -382,14 +382,12 @@
    preserving rev for resolve of rev
    c: remote created -> g
   getting c
-  updating: c 1/3 files (33.33%)
+  updating: c 1/2 files (50.00%)
    rev: versions differ -> m
-  updating: rev 2/3 files (66.67%)
+  updating: rev 2/2 files (100.00%)
   picked tool 'python ../merge' for rev (binary False symlink False)
   merging rev
   my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337
-   a: divergent renames -> dr
-  updating: a 3/3 files (100.00%)
   note: possible conflict - a was renamed multiple times to:
    b
    c
@@ -413,7 +411,7 @@
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7
    preserving b for resolve of b
    preserving rev for resolve of rev
-   b: versions differ -> m
+   b: both created -> m
   updating: b 1/2 files (50.00%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -446,7 +444,7 @@
    a: other deleted -> r
   removing a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -478,7 +476,7 @@
    a: remote is newer -> g
   getting a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -511,7 +509,7 @@
    a: other deleted -> r
   removing a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -543,7 +541,7 @@
    a: remote is newer -> g
   getting a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -573,8 +571,8 @@
    ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24
    preserving b for resolve of b
    preserving rev for resolve of rev
-   a: keep -> k
-   b: versions differ -> m
+   a: remote unchanged -> k
+   b: both created -> m
   updating: b 1/2 files (50.00%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -609,7 +607,7 @@
    a: prompt recreating -> g
   getting a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
@@ -643,7 +641,7 @@
    preserving rev for resolve of rev
    a: prompt keep -> a
   updating: a 1/3 files (33.33%)
-   b: versions differ -> m
+   b: both created -> m
   updating: b 2/3 files (66.67%)
   picked tool 'python ../merge' for b (binary False symlink False)
   merging b
--- a/tests/test-revert.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-revert.t	Sat Jan 17 18:28:30 2015 -0800
@@ -404,157 +404,63 @@
 Systematic behavior validation of most possible cases
 =====================================================
 
-This section tests most of the possible combinations of working directory
-changes and inter-revision changes. The number of possible cases is significant
-but they all have a slighly different handling. So this section commits to
-generating and testing all of them to allow safe refactoring of the revert code.
+This section tests most of the possible combinations of revision states and
+working directory states. The number of possible cases is significant but they
+but they all have a slightly different handling. So this section commits to
+and testing all of them to allow safe refactoring of the revert code.
 
 A python script is used to generate a file history for each combination of
-changes between, on one side the working directory and its parent and on
-the other side, changes between a revert target (--rev) and working directory
-parent. The three states generated are:
+states, on one side the content (or lack thereof) in two revisions, and
+on the other side, the content and "tracked-ness" of the working directory. The
+three states generated are:
 
 - a "base" revision
 - a "parent" revision
 - the working directory (based on "parent")
 
-The file generated have names of the form:
+The files generated have names of the form:
 
- <changeset-state>_<working-copy-state>
-
-Here, "changeset-state" conveys the state in "base" and "parent" (or the change
-that happen between them), "working-copy-state" is self explanatory.
+ <rev1-content>_<rev2-content>_<working-copy-content>-<tracked-ness>
 
 All known states are not tested yet. See inline documentation for details.
 Special cases from merge and rename are not tested by this section.
 
-There are also multiple cases where the current revert implementation is known to
-slightly misbehave.
-
 Write the python script to disk
 -------------------------------
 
-  $ cat << EOF > gen-revert-cases.py
-  > # generate proper file state to test revert behavior
-  > import sys
-  > import os
-  > 
-  > # content of the file in "base" and "parent"
-  > # None means no file at all
-  > ctxcontent = {
-  >     # clean: no change from base to parent
-  >     'clean': ['base', 'base'],
-  >     # modified: file content change from base to parent
-  >     'modified': ['base', 'parent'],
-  >     # added: file is missing from base and added in parent
-  >     'added': [None, 'parent'],
-  >     # removed: file exist in base but is removed from parent
-  >     'removed': ['base', None],
-  >     # file exist neither in base not in parent
-  >     'missing': [None, None],
-  > }
-  > 
-  > # content of file in working copy
-  > wccontent = {
-  >     # clean: wc content is the same as parent
-  >     'clean': lambda cc: cc[1],
-  >     # revert: wc content is the same as base
-  >     'revert': lambda cc: cc[0],
-  >     # wc: file exist with a content different from base and parent
-  >     'wc': lambda cc: 'wc',
-  >     # removed: file is missing and marked as untracked
-  >     'removed': lambda cc: None,
-  >     # deleted: file is recorded as tracked but missing
-  >     #          rely on file deletion outside of this script
-  >     'deleted': lambda cc:'TOBEDELETED',
-  > }
-  > # untracked-X is a version of X where the file is not tracked (? unknown)
-  > wccontent['untracked-clean'] = wccontent['clean']
-  > wccontent['untracked-revert'] = wccontent['revert']
-  > wccontent['untracked-wc'] = wccontent['wc']
-  > 
-  > # build the combination of possible states
-  > combination = []
-  > for ctxkey in ctxcontent:
-  >     for wckey in wccontent:
-  >         filename = "%s_%s" % (ctxkey, wckey)
-  >         combination.append((filename, ctxkey, wckey))
-  > 
-  > # make sure we have stable output
-  > combination.sort()
-  > 
-  > # retrieve the state we must generate
-  > target = sys.argv[1]
-  > 
-  > # compute file content
-  > content = []
-  > for filename, ctxkey, wckey in combination:
-  >     cc = ctxcontent[ctxkey]
-  >     if target == 'filelist':
-  >         print filename
-  >     elif target == 'base':
-  >         content.append((filename, cc[0]))
-  >     elif target == 'parent':
-  >         content.append((filename, cc[1]))
-  >     elif target == 'wc':
-  >         content.append((filename, wccontent[wckey](cc)))
-  >     else:
-  >         print >> sys.stderr, "unknown target:", target
-  >         sys.exit(1)
-  > 
-  > # write actual content
-  > for filename, data in content:
-  >     if data is not None:
-  >         f = open(filename, 'w')
-  >         f.write(data + '\n')
-  >         f.close()
-  >     elif os.path.exists(filename):
-  >        os.remove(filename)
-  > EOF
-
 check list of planned files
 
-  $ python gen-revert-cases.py filelist
-  added_clean
-  added_deleted
-  added_removed
-  added_revert
-  added_untracked-clean
-  added_untracked-revert
-  added_untracked-wc
-  added_wc
-  clean_clean
-  clean_deleted
-  clean_removed
-  clean_revert
-  clean_untracked-clean
-  clean_untracked-revert
-  clean_untracked-wc
-  clean_wc
-  missing_clean
-  missing_deleted
-  missing_removed
-  missing_revert
-  missing_untracked-clean
-  missing_untracked-revert
-  missing_untracked-wc
-  missing_wc
-  modified_clean
-  modified_deleted
-  modified_removed
-  modified_revert
-  modified_untracked-clean
-  modified_untracked-revert
-  modified_untracked-wc
-  modified_wc
-  removed_clean
-  removed_deleted
-  removed_removed
-  removed_revert
-  removed_untracked-clean
-  removed_untracked-revert
-  removed_untracked-wc
-  removed_wc
+  $ python $TESTDIR/generate-working-copy-states.py filelist 2
+  content1_content1_content1-tracked
+  content1_content1_content1-untracked
+  content1_content1_content3-tracked
+  content1_content1_content3-untracked
+  content1_content1_missing-tracked
+  content1_content1_missing-untracked
+  content1_content2_content1-tracked
+  content1_content2_content1-untracked
+  content1_content2_content2-tracked
+  content1_content2_content2-untracked
+  content1_content2_content3-tracked
+  content1_content2_content3-untracked
+  content1_content2_missing-tracked
+  content1_content2_missing-untracked
+  content1_missing_content1-tracked
+  content1_missing_content1-untracked
+  content1_missing_content3-tracked
+  content1_missing_content3-untracked
+  content1_missing_missing-tracked
+  content1_missing_missing-untracked
+  missing_content2_content2-tracked
+  missing_content2_content2-untracked
+  missing_content2_content3-tracked
+  missing_content2_content3-untracked
+  missing_content2_missing-tracked
+  missing_content2_missing-untracked
+  missing_missing_content3-tracked
+  missing_missing_content3-untracked
+  missing_missing_missing-tracked
+  missing_missing_missing-untracked
 
 Script to make a simple text version of the content
 ---------------------------------------------------
@@ -579,268 +485,232 @@
 
 Generate base changeset
 
-  $ python ../gen-revert-cases.py base
+  $ python $TESTDIR/generate-working-copy-states.py state 2 1
   $ hg addremove --similarity 0
-  adding clean_clean
-  adding clean_deleted
-  adding clean_removed
-  adding clean_revert
-  adding clean_untracked-clean
-  adding clean_untracked-revert
-  adding clean_untracked-wc
-  adding clean_wc
-  adding modified_clean
-  adding modified_deleted
-  adding modified_removed
-  adding modified_revert
-  adding modified_untracked-clean
-  adding modified_untracked-revert
-  adding modified_untracked-wc
-  adding modified_wc
-  adding removed_clean
-  adding removed_deleted
-  adding removed_removed
-  adding removed_revert
-  adding removed_untracked-clean
-  adding removed_untracked-revert
-  adding removed_untracked-wc
-  adding removed_wc
+  adding content1_content1_content1-tracked
+  adding content1_content1_content1-untracked
+  adding content1_content1_content3-tracked
+  adding content1_content1_content3-untracked
+  adding content1_content1_missing-tracked
+  adding content1_content1_missing-untracked
+  adding content1_content2_content1-tracked
+  adding content1_content2_content1-untracked
+  adding content1_content2_content2-tracked
+  adding content1_content2_content2-untracked
+  adding content1_content2_content3-tracked
+  adding content1_content2_content3-untracked
+  adding content1_content2_missing-tracked
+  adding content1_content2_missing-untracked
+  adding content1_missing_content1-tracked
+  adding content1_missing_content1-untracked
+  adding content1_missing_content3-tracked
+  adding content1_missing_content3-untracked
+  adding content1_missing_missing-tracked
+  adding content1_missing_missing-untracked
   $ hg status
-  A clean_clean
-  A clean_deleted
-  A clean_removed
-  A clean_revert
-  A clean_untracked-clean
-  A clean_untracked-revert
-  A clean_untracked-wc
-  A clean_wc
-  A modified_clean
-  A modified_deleted
-  A modified_removed
-  A modified_revert
-  A modified_untracked-clean
-  A modified_untracked-revert
-  A modified_untracked-wc
-  A modified_wc
-  A removed_clean
-  A removed_deleted
-  A removed_removed
-  A removed_revert
-  A removed_untracked-clean
-  A removed_untracked-revert
-  A removed_untracked-wc
-  A removed_wc
+  A content1_content1_content1-tracked
+  A content1_content1_content1-untracked
+  A content1_content1_content3-tracked
+  A content1_content1_content3-untracked
+  A content1_content1_missing-tracked
+  A content1_content1_missing-untracked
+  A content1_content2_content1-tracked
+  A content1_content2_content1-untracked
+  A content1_content2_content2-tracked
+  A content1_content2_content2-untracked
+  A content1_content2_content3-tracked
+  A content1_content2_content3-untracked
+  A content1_content2_missing-tracked
+  A content1_content2_missing-untracked
+  A content1_missing_content1-tracked
+  A content1_missing_content1-untracked
+  A content1_missing_content3-tracked
+  A content1_missing_content3-untracked
+  A content1_missing_missing-tracked
+  A content1_missing_missing-untracked
   $ hg commit -m 'base'
 
 (create a simple text version of the content)
 
   $ python ../dircontent.py > ../content-base.txt
   $ cat ../content-base.txt
-  base   clean_clean
-  base   clean_deleted
-  base   clean_removed
-  base   clean_revert
-  base   clean_untracked-clean
-  base   clean_untracked-revert
-  base   clean_untracked-wc
-  base   clean_wc
-  base   modified_clean
-  base   modified_deleted
-  base   modified_removed
-  base   modified_revert
-  base   modified_untracked-clean
-  base   modified_untracked-revert
-  base   modified_untracked-wc
-  base   modified_wc
-  base   removed_clean
-  base   removed_deleted
-  base   removed_removed
-  base   removed_revert
-  base   removed_untracked-clean
-  base   removed_untracked-revert
-  base   removed_untracked-wc
-  base   removed_wc
+  content1 content1_content1_content1-tracked
+  content1 content1_content1_content1-untracked
+  content1 content1_content1_content3-tracked
+  content1 content1_content1_content3-untracked
+  content1 content1_content1_missing-tracked
+  content1 content1_content1_missing-untracked
+  content1 content1_content2_content1-tracked
+  content1 content1_content2_content1-untracked
+  content1 content1_content2_content2-tracked
+  content1 content1_content2_content2-untracked
+  content1 content1_content2_content3-tracked
+  content1 content1_content2_content3-untracked
+  content1 content1_content2_missing-tracked
+  content1 content1_content2_missing-untracked
+  content1 content1_missing_content1-tracked
+  content1 content1_missing_content1-untracked
+  content1 content1_missing_content3-tracked
+  content1 content1_missing_content3-untracked
+  content1 content1_missing_missing-tracked
+  content1 content1_missing_missing-untracked
 
 Create parent changeset
 
-  $ python ../gen-revert-cases.py parent
+  $ python $TESTDIR/generate-working-copy-states.py state 2 2
   $ hg addremove --similarity 0
-  adding added_clean
-  adding added_deleted
-  adding added_removed
-  adding added_revert
-  adding added_untracked-clean
-  adding added_untracked-revert
-  adding added_untracked-wc
-  adding added_wc
-  removing removed_clean
-  removing removed_deleted
-  removing removed_removed
-  removing removed_revert
-  removing removed_untracked-clean
-  removing removed_untracked-revert
-  removing removed_untracked-wc
-  removing removed_wc
+  removing content1_missing_content1-tracked
+  removing content1_missing_content1-untracked
+  removing content1_missing_content3-tracked
+  removing content1_missing_content3-untracked
+  removing content1_missing_missing-tracked
+  removing content1_missing_missing-untracked
+  adding missing_content2_content2-tracked
+  adding missing_content2_content2-untracked
+  adding missing_content2_content3-tracked
+  adding missing_content2_content3-untracked
+  adding missing_content2_missing-tracked
+  adding missing_content2_missing-untracked
   $ hg status
-  M modified_clean
-  M modified_deleted
-  M modified_removed
-  M modified_revert
-  M modified_untracked-clean
-  M modified_untracked-revert
-  M modified_untracked-wc
-  M modified_wc
-  A added_clean
-  A added_deleted
-  A added_removed
-  A added_revert
-  A added_untracked-clean
-  A added_untracked-revert
-  A added_untracked-wc
-  A added_wc
-  R removed_clean
-  R removed_deleted
-  R removed_removed
-  R removed_revert
-  R removed_untracked-clean
-  R removed_untracked-revert
-  R removed_untracked-wc
-  R removed_wc
+  M content1_content2_content1-tracked
+  M content1_content2_content1-untracked
+  M content1_content2_content2-tracked
+  M content1_content2_content2-untracked
+  M content1_content2_content3-tracked
+  M content1_content2_content3-untracked
+  M content1_content2_missing-tracked
+  M content1_content2_missing-untracked
+  A missing_content2_content2-tracked
+  A missing_content2_content2-untracked
+  A missing_content2_content3-tracked
+  A missing_content2_content3-untracked
+  A missing_content2_missing-tracked
+  A missing_content2_missing-untracked
+  R content1_missing_content1-tracked
+  R content1_missing_content1-untracked
+  R content1_missing_content3-tracked
+  R content1_missing_content3-untracked
+  R content1_missing_missing-tracked
+  R content1_missing_missing-untracked
   $ hg commit -m 'parent'
 
 (create a simple text version of the content)
 
   $ python ../dircontent.py > ../content-parent.txt
   $ cat ../content-parent.txt
-  parent added_clean
-  parent added_deleted
-  parent added_removed
-  parent added_revert
-  parent added_untracked-clean
-  parent added_untracked-revert
-  parent added_untracked-wc
-  parent added_wc
-  base   clean_clean
-  base   clean_deleted
-  base   clean_removed
-  base   clean_revert
-  base   clean_untracked-clean
-  base   clean_untracked-revert
-  base   clean_untracked-wc
-  base   clean_wc
-  parent modified_clean
-  parent modified_deleted
-  parent modified_removed
-  parent modified_revert
-  parent modified_untracked-clean
-  parent modified_untracked-revert
-  parent modified_untracked-wc
-  parent modified_wc
+  content1 content1_content1_content1-tracked
+  content1 content1_content1_content1-untracked
+  content1 content1_content1_content3-tracked
+  content1 content1_content1_content3-untracked
+  content1 content1_content1_missing-tracked
+  content1 content1_content1_missing-untracked
+  content2 content1_content2_content1-tracked
+  content2 content1_content2_content1-untracked
+  content2 content1_content2_content2-tracked
+  content2 content1_content2_content2-untracked
+  content2 content1_content2_content3-tracked
+  content2 content1_content2_content3-untracked
+  content2 content1_content2_missing-tracked
+  content2 content1_content2_missing-untracked
+  content2 missing_content2_content2-tracked
+  content2 missing_content2_content2-untracked
+  content2 missing_content2_content3-tracked
+  content2 missing_content2_content3-untracked
+  content2 missing_content2_missing-tracked
+  content2 missing_content2_missing-untracked
 
 Setup working directory
 
-  $ python ../gen-revert-cases.py wc | cat
+  $ python $TESTDIR/generate-working-copy-states.py state 2 wc
   $ hg addremove --similarity 0
-  removing added_removed
-  removing added_revert
-  removing added_untracked-revert
-  removing clean_removed
-  adding missing_deleted
-  adding missing_untracked-wc
-  adding missing_wc
-  removing modified_removed
-  adding removed_deleted
-  adding removed_revert
-  adding removed_untracked-revert
-  adding removed_untracked-wc
-  adding removed_wc
-  $ hg forget *untracked*
-  $ rm *deleted*
+  adding content1_missing_content1-tracked
+  adding content1_missing_content1-untracked
+  adding content1_missing_content3-tracked
+  adding content1_missing_content3-untracked
+  adding content1_missing_missing-tracked
+  adding content1_missing_missing-untracked
+  adding missing_missing_content3-tracked
+  adding missing_missing_content3-untracked
+  adding missing_missing_missing-tracked
+  adding missing_missing_missing-untracked
+  $ hg forget *_*_*-untracked
+  $ rm *_*_missing-*
   $ hg status
-  M added_wc
-  M clean_wc
-  M modified_revert
-  M modified_wc
-  A missing_wc
-  A removed_revert
-  A removed_wc
-  R added_removed
-  R added_revert
-  R added_untracked-clean
-  R added_untracked-revert
-  R added_untracked-wc
-  R clean_removed
-  R clean_untracked-clean
-  R clean_untracked-revert
-  R clean_untracked-wc
-  R modified_removed
-  R modified_untracked-clean
-  R modified_untracked-revert
-  R modified_untracked-wc
-  ! added_deleted
-  ! clean_deleted
-  ! missing_deleted
-  ! modified_deleted
-  ! removed_deleted
-  ? missing_untracked-wc
-  ? removed_untracked-revert
-  ? removed_untracked-wc
+  M content1_content1_content3-tracked
+  M content1_content2_content1-tracked
+  M content1_content2_content3-tracked
+  M missing_content2_content3-tracked
+  A content1_missing_content1-tracked
+  A content1_missing_content3-tracked
+  A missing_missing_content3-tracked
+  R content1_content1_content1-untracked
+  R content1_content1_content3-untracked
+  R content1_content1_missing-untracked
+  R content1_content2_content1-untracked
+  R content1_content2_content2-untracked
+  R content1_content2_content3-untracked
+  R content1_content2_missing-untracked
+  R missing_content2_content2-untracked
+  R missing_content2_content3-untracked
+  R missing_content2_missing-untracked
+  ! content1_content1_missing-tracked
+  ! content1_content2_missing-tracked
+  ! content1_missing_missing-tracked
+  ! missing_content2_missing-tracked
+  ! missing_missing_missing-tracked
+  ? content1_missing_content1-untracked
+  ? content1_missing_content3-untracked
+  ? missing_missing_content3-untracked
 
   $ hg status --rev 'desc("base")'
-  M clean_wc
-  M modified_clean
-  M modified_wc
-  M removed_wc
-  A added_clean
-  A added_wc
-  A missing_wc
-  R clean_removed
-  R clean_untracked-clean
-  R clean_untracked-revert
-  R clean_untracked-wc
-  R modified_removed
-  R modified_untracked-clean
-  R modified_untracked-revert
-  R modified_untracked-wc
-  R removed_clean
-  R removed_deleted
-  R removed_removed
-  R removed_untracked-clean
-  R removed_untracked-revert
-  R removed_untracked-wc
-  ! added_deleted
-  ! clean_deleted
-  ! missing_deleted
-  ! modified_deleted
-  ! removed_deleted
-  ? missing_untracked-wc
+  M content1_content1_content3-tracked
+  M content1_content2_content2-tracked
+  M content1_content2_content3-tracked
+  M content1_missing_content3-tracked
+  A missing_content2_content2-tracked
+  A missing_content2_content3-tracked
+  A missing_missing_content3-tracked
+  R content1_content1_content1-untracked
+  R content1_content1_content3-untracked
+  R content1_content1_missing-untracked
+  R content1_content2_content1-untracked
+  R content1_content2_content2-untracked
+  R content1_content2_content3-untracked
+  R content1_content2_missing-untracked
+  R content1_missing_content1-untracked
+  R content1_missing_content3-untracked
+  R content1_missing_missing-untracked
+  ! content1_content1_missing-tracked
+  ! content1_content2_missing-tracked
+  ! content1_missing_missing-tracked
+  ! missing_content2_missing-tracked
+  ! missing_missing_missing-tracked
+  ? missing_missing_content3-untracked
 
 (create a simple text version of the content)
 
   $ python ../dircontent.py > ../content-wc.txt
   $ cat ../content-wc.txt
-  parent added_clean
-  parent added_untracked-clean
-  wc     added_untracked-wc
-  wc     added_wc
-  base   clean_clean
-  base   clean_revert
-  base   clean_untracked-clean
-  base   clean_untracked-revert
-  wc     clean_untracked-wc
-  wc     clean_wc
-  wc     missing_untracked-wc
-  wc     missing_wc
-  parent modified_clean
-  base   modified_revert
-  parent modified_untracked-clean
-  base   modified_untracked-revert
-  wc     modified_untracked-wc
-  wc     modified_wc
-  base   removed_revert
-  base   removed_untracked-revert
-  wc     removed_untracked-wc
-  wc     removed_wc
+  content1 content1_content1_content1-tracked
+  content1 content1_content1_content1-untracked
+  content3 content1_content1_content3-tracked
+  content3 content1_content1_content3-untracked
+  content1 content1_content2_content1-tracked
+  content1 content1_content2_content1-untracked
+  content2 content1_content2_content2-tracked
+  content2 content1_content2_content2-untracked
+  content3 content1_content2_content3-tracked
+  content3 content1_content2_content3-untracked
+  content1 content1_missing_content1-tracked
+  content1 content1_missing_content1-untracked
+  content3 content1_missing_content3-tracked
+  content3 content1_missing_content3-untracked
+  content2 missing_content2_content2-tracked
+  content2 missing_content2_content2-untracked
+  content3 missing_content2_content3-tracked
+  content3 missing_content2_content3-untracked
+  content3 missing_missing_content3-tracked
+  content3 missing_missing_content3-untracked
 
   $ cd ..
 
@@ -855,31 +725,28 @@
 check revert output
 
   $ hg revert --all
-  reverting added_deleted
-  undeleting added_removed
-  undeleting added_revert
-  undeleting added_untracked-clean
-  undeleting added_untracked-revert
-  undeleting added_untracked-wc
-  reverting added_wc
-  reverting clean_deleted
-  undeleting clean_removed
-  undeleting clean_untracked-clean
-  undeleting clean_untracked-revert
-  undeleting clean_untracked-wc
-  reverting clean_wc
-  forgetting missing_deleted
-  forgetting missing_wc
-  reverting modified_deleted
-  undeleting modified_removed
-  reverting modified_revert
-  undeleting modified_untracked-clean
-  undeleting modified_untracked-revert
-  undeleting modified_untracked-wc
-  reverting modified_wc
-  forgetting removed_deleted
-  forgetting removed_revert
-  forgetting removed_wc
+  undeleting content1_content1_content1-untracked
+  reverting content1_content1_content3-tracked
+  undeleting content1_content1_content3-untracked
+  reverting content1_content1_missing-tracked
+  undeleting content1_content1_missing-untracked
+  reverting content1_content2_content1-tracked
+  undeleting content1_content2_content1-untracked
+  undeleting content1_content2_content2-untracked
+  reverting content1_content2_content3-tracked
+  undeleting content1_content2_content3-untracked
+  reverting content1_content2_missing-tracked
+  undeleting content1_content2_missing-untracked
+  forgetting content1_missing_content1-tracked
+  forgetting content1_missing_content3-tracked
+  forgetting content1_missing_missing-tracked
+  undeleting missing_content2_content2-untracked
+  reverting missing_content2_content3-tracked
+  undeleting missing_content2_content3-untracked
+  reverting missing_content2_missing-tracked
+  undeleting missing_content2_missing-untracked
+  forgetting missing_missing_content3-tracked
+  forgetting missing_missing_missing-tracked
 
 Compare resulting directory with revert target.
 
@@ -889,20 +756,20 @@
   $ python ../dircontent.py > ../content-parent-all.txt
   $ cd ..
   $ diff -U 0 -- content-parent.txt content-parent-all.txt | grep _
-  +wc     added_untracked-wc.orig
-  +wc     added_wc.orig
-  +wc     clean_untracked-wc.orig
-  +wc     clean_wc.orig
-  +wc     missing_untracked-wc
-  +wc     missing_wc
-  +base   modified_revert.orig
-  +base   modified_untracked-revert.orig
-  +wc     modified_untracked-wc.orig
-  +wc     modified_wc.orig
-  +base   removed_revert
-  +base   removed_untracked-revert
-  +wc     removed_untracked-wc
-  +wc     removed_wc
+  +content3 content1_content1_content3-tracked.orig
+  +content3 content1_content1_content3-untracked.orig
+  +content1 content1_content2_content1-tracked.orig
+  +content1 content1_content2_content1-untracked.orig
+  +content3 content1_content2_content3-tracked.orig
+  +content3 content1_content2_content3-untracked.orig
+  +content1 content1_missing_content1-tracked
+  +content1 content1_missing_content1-untracked
+  +content3 content1_missing_content3-tracked
+  +content3 content1_missing_content3-untracked
+  +content3 missing_content2_content3-tracked.orig
+  +content3 missing_content2_content3-untracked.orig
+  +content3 missing_missing_content3-tracked
+  +content3 missing_missing_content3-untracked
 
 Test revert --all to "base" content
 -----------------------------------
@@ -915,31 +782,28 @@
 check revert output
 
   $ hg revert --all --rev 'desc(base)'
-  removing added_clean
-  removing added_deleted
-  removing added_wc
-  reverting clean_deleted
-  undeleting clean_removed
-  undeleting clean_untracked-clean
-  undeleting clean_untracked-revert
-  undeleting clean_untracked-wc
-  reverting clean_wc
-  forgetting missing_deleted
-  forgetting missing_wc
-  reverting modified_clean
-  reverting modified_deleted
-  undeleting modified_removed
-  undeleting modified_untracked-clean
-  undeleting modified_untracked-revert
-  undeleting modified_untracked-wc
-  reverting modified_wc
-  adding removed_clean
-  reverting removed_deleted
-  adding removed_removed
-  adding removed_untracked-clean
-  adding removed_untracked-revert
-  adding removed_untracked-wc
-  reverting removed_wc
+  undeleting content1_content1_content1-untracked
+  reverting content1_content1_content3-tracked
+  undeleting content1_content1_content3-untracked
+  reverting content1_content1_missing-tracked
+  undeleting content1_content1_missing-untracked
+  undeleting content1_content2_content1-untracked
+  reverting content1_content2_content2-tracked
+  undeleting content1_content2_content2-untracked
+  reverting content1_content2_content3-tracked
+  undeleting content1_content2_content3-untracked
+  reverting content1_content2_missing-tracked
+  undeleting content1_content2_missing-untracked
+  adding content1_missing_content1-untracked
+  reverting content1_missing_content3-tracked
+  adding content1_missing_content3-untracked
+  reverting content1_missing_missing-tracked
+  adding content1_missing_missing-untracked
+  removing missing_content2_content2-tracked
+  removing missing_content2_content3-tracked
+  removing missing_content2_missing-tracked
+  forgetting missing_missing_content3-tracked
+  forgetting missing_missing_missing-tracked
 
 Compare resulting directory with revert target.
 
@@ -949,18 +813,18 @@
   $ python ../dircontent.py > ../content-base-all.txt
   $ cd ..
   $ diff -U 0 -- content-base.txt content-base-all.txt | grep _
-  +parent added_untracked-clean
-  +wc     added_untracked-wc
-  +wc     added_wc.orig
-  +wc     clean_untracked-wc.orig
-  +wc     clean_wc.orig
-  +wc     missing_untracked-wc
-  +wc     missing_wc
-  +parent modified_untracked-clean.orig
-  +wc     modified_untracked-wc.orig
-  +wc     modified_wc.orig
-  +wc     removed_untracked-wc.orig
-  +wc     removed_wc.orig
+  +content3 content1_content1_content3-tracked.orig
+  +content3 content1_content1_content3-untracked.orig
+  +content2 content1_content2_content2-untracked.orig
+  +content3 content1_content2_content3-tracked.orig
+  +content3 content1_content2_content3-untracked.orig
+  +content3 content1_missing_content3-tracked.orig
+  +content3 content1_missing_content3-untracked.orig
+  +content2 missing_content2_content2-untracked
+  +content3 missing_content2_content3-tracked.orig
+  +content3 missing_content2_content3-untracked
+  +content3 missing_missing_content3-tracked
+  +content3 missing_missing_content3-untracked
 
 Test revert to parent content with explicit file name
 -----------------------------------------------------
@@ -973,108 +837,81 @@
 revert all files individually and check the output
 (output is expected to be different than in the --all case)
 
-  $ for file in `python ../gen-revert-cases.py filelist`; do
+  $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
   >   echo '### revert for:' $file;
   >   hg revert $file;
   >   echo
   > done
-  ### revert for: added_clean
-  no changes needed to added_clean
-  
-  ### revert for: added_deleted
+  ### revert for: content1_content1_content1-tracked
+  no changes needed to content1_content1_content1-tracked
   
-  ### revert for: added_removed
+  ### revert for: content1_content1_content1-untracked
   
-  ### revert for: added_revert
-  
-  ### revert for: added_untracked-clean
+  ### revert for: content1_content1_content3-tracked
   
-  ### revert for: added_untracked-revert
+  ### revert for: content1_content1_content3-untracked
   
-  ### revert for: added_untracked-wc
+  ### revert for: content1_content1_missing-tracked
   
-  ### revert for: added_wc
+  ### revert for: content1_content1_missing-untracked
   
-  ### revert for: clean_clean
-  no changes needed to clean_clean
-  
-  ### revert for: clean_deleted
+  ### revert for: content1_content2_content1-tracked
   
-  ### revert for: clean_removed
-  
-  ### revert for: clean_revert
-  no changes needed to clean_revert
+  ### revert for: content1_content2_content1-untracked
   
-  ### revert for: clean_untracked-clean
+  ### revert for: content1_content2_content2-tracked
+  no changes needed to content1_content2_content2-tracked
   
-  ### revert for: clean_untracked-revert
-  
-  ### revert for: clean_untracked-wc
+  ### revert for: content1_content2_content2-untracked
   
-  ### revert for: clean_wc
+  ### revert for: content1_content2_content3-tracked
   
-  ### revert for: missing_clean
-  missing_clean: no such file in rev * (glob)
+  ### revert for: content1_content2_content3-untracked
   
-  ### revert for: missing_deleted
+  ### revert for: content1_content2_missing-tracked
   
-  ### revert for: missing_removed
-  missing_removed: no such file in rev * (glob)
+  ### revert for: content1_content2_missing-untracked
   
-  ### revert for: missing_revert
-  missing_revert: no such file in rev * (glob)
+  ### revert for: content1_missing_content1-tracked
   
-  ### revert for: missing_untracked-clean
-  missing_untracked-clean: no such file in rev * (glob)
+  ### revert for: content1_missing_content1-untracked
+  file not managed: content1_missing_content1-untracked
   
-  ### revert for: missing_untracked-revert
-  missing_untracked-revert: no such file in rev * (glob)
+  ### revert for: content1_missing_content3-tracked
   
-  ### revert for: missing_untracked-wc
-  file not managed: missing_untracked-wc
-  
-  ### revert for: missing_wc
+  ### revert for: content1_missing_content3-untracked
+  file not managed: content1_missing_content3-untracked
   
-  ### revert for: modified_clean
-  no changes needed to modified_clean
+  ### revert for: content1_missing_missing-tracked
   
-  ### revert for: modified_deleted
+  ### revert for: content1_missing_missing-untracked
+  content1_missing_missing-untracked: no such file in rev * (glob)
   
-  ### revert for: modified_removed
-  
-  ### revert for: modified_revert
+  ### revert for: missing_content2_content2-tracked
+  no changes needed to missing_content2_content2-tracked
   
-  ### revert for: modified_untracked-clean
-  
-  ### revert for: modified_untracked-revert
+  ### revert for: missing_content2_content2-untracked
   
-  ### revert for: modified_untracked-wc
+  ### revert for: missing_content2_content3-tracked
   
-  ### revert for: modified_wc
+  ### revert for: missing_content2_content3-untracked
   
-  ### revert for: removed_clean
-  removed_clean: no such file in rev * (glob)
+  ### revert for: missing_content2_missing-tracked
   
-  ### revert for: removed_deleted
-  
-  ### revert for: removed_removed
-  removed_removed: no such file in rev * (glob)
+  ### revert for: missing_content2_missing-untracked
   
-  ### revert for: removed_revert
+  ### revert for: missing_missing_content3-tracked
   
-  ### revert for: removed_untracked-clean
-  removed_untracked-clean: no such file in rev * (glob)
+  ### revert for: missing_missing_content3-untracked
+  file not managed: missing_missing_content3-untracked
   
-  ### revert for: removed_untracked-revert
-  file not managed: removed_untracked-revert
+  ### revert for: missing_missing_missing-tracked
   
-  ### revert for: removed_untracked-wc
-  file not managed: removed_untracked-wc
-  
-  ### revert for: removed_wc
+  ### revert for: missing_missing_missing-untracked
+  missing_missing_missing-untracked: no such file in rev * (glob)
   
 
-check resulting directory againt the --all run
+check resulting directory against the --all run
 (There should be no difference)
 
   $ python ../dircontent.py > ../content-parent-explicit.txt
@@ -1093,108 +930,81 @@
 revert all files individually and check the output
 (output is expected to be different than in the --all case)
 
-  $ for file in `python ../gen-revert-cases.py filelist`; do
+  $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do
   >   echo '### revert for:' $file;
   >   hg revert $file --rev 'desc(base)';
   >   echo
   > done
-  ### revert for: added_clean
-  
-  ### revert for: added_deleted
+  ### revert for: content1_content1_content1-tracked
+  no changes needed to content1_content1_content1-tracked
   
-  ### revert for: added_removed
-  no changes needed to added_removed
+  ### revert for: content1_content1_content1-untracked
   
-  ### revert for: added_revert
-  no changes needed to added_revert
+  ### revert for: content1_content1_content3-tracked
   
-  ### revert for: added_untracked-clean
-  no changes needed to added_untracked-clean
+  ### revert for: content1_content1_content3-untracked
+  
+  ### revert for: content1_content1_missing-tracked
   
-  ### revert for: added_untracked-revert
-  no changes needed to added_untracked-revert
+  ### revert for: content1_content1_missing-untracked
   
-  ### revert for: added_untracked-wc
-  no changes needed to added_untracked-wc
-  
-  ### revert for: added_wc
+  ### revert for: content1_content2_content1-tracked
+  no changes needed to content1_content2_content1-tracked
   
-  ### revert for: clean_clean
-  no changes needed to clean_clean
+  ### revert for: content1_content2_content1-untracked
   
-  ### revert for: clean_deleted
+  ### revert for: content1_content2_content2-tracked
   
-  ### revert for: clean_removed
+  ### revert for: content1_content2_content2-untracked
   
-  ### revert for: clean_revert
-  no changes needed to clean_revert
-  
-  ### revert for: clean_untracked-clean
+  ### revert for: content1_content2_content3-tracked
   
-  ### revert for: clean_untracked-revert
+  ### revert for: content1_content2_content3-untracked
   
-  ### revert for: clean_untracked-wc
-  
-  ### revert for: clean_wc
+  ### revert for: content1_content2_missing-tracked
   
-  ### revert for: missing_clean
-  missing_clean: no such file in rev * (glob)
+  ### revert for: content1_content2_missing-untracked
   
-  ### revert for: missing_deleted
-  
-  ### revert for: missing_removed
-  missing_removed: no such file in rev * (glob)
+  ### revert for: content1_missing_content1-tracked
+  no changes needed to content1_missing_content1-tracked
   
-  ### revert for: missing_revert
-  missing_revert: no such file in rev * (glob)
+  ### revert for: content1_missing_content1-untracked
+  
+  ### revert for: content1_missing_content3-tracked
   
-  ### revert for: missing_untracked-clean
-  missing_untracked-clean: no such file in rev * (glob)
+  ### revert for: content1_missing_content3-untracked
   
-  ### revert for: missing_untracked-revert
-  missing_untracked-revert: no such file in rev * (glob)
+  ### revert for: content1_missing_missing-tracked
   
-  ### revert for: missing_untracked-wc
-  file not managed: missing_untracked-wc
+  ### revert for: content1_missing_missing-untracked
   
-  ### revert for: missing_wc
-  
-  ### revert for: modified_clean
+  ### revert for: missing_content2_content2-tracked
   
-  ### revert for: modified_deleted
+  ### revert for: missing_content2_content2-untracked
+  no changes needed to missing_content2_content2-untracked
   
-  ### revert for: modified_removed
-  
-  ### revert for: modified_revert
-  no changes needed to modified_revert
+  ### revert for: missing_content2_content3-tracked
   
-  ### revert for: modified_untracked-clean
-  
-  ### revert for: modified_untracked-revert
-  
-  ### revert for: modified_untracked-wc
+  ### revert for: missing_content2_content3-untracked
+  no changes needed to missing_content2_content3-untracked
   
-  ### revert for: modified_wc
+  ### revert for: missing_content2_missing-tracked
   
-  ### revert for: removed_clean
-  
-  ### revert for: removed_deleted
+  ### revert for: missing_content2_missing-untracked
+  no changes needed to missing_content2_missing-untracked
   
-  ### revert for: removed_removed
+  ### revert for: missing_missing_content3-tracked
   
-  ### revert for: removed_revert
-  no changes needed to removed_revert
+  ### revert for: missing_missing_content3-untracked
+  file not managed: missing_missing_content3-untracked
   
-  ### revert for: removed_untracked-clean
-  
-  ### revert for: removed_untracked-revert
+  ### revert for: missing_missing_missing-tracked
   
-  ### revert for: removed_untracked-wc
-  
-  ### revert for: removed_wc
+  ### revert for: missing_missing_missing-untracked
+  missing_missing_missing-untracked: no such file in rev * (glob)
   
 
-check resulting directory againt the --all run
+check resulting directory against the --all run
 (There should be no difference)
 
   $ python ../dircontent.py > ../content-base-explicit.txt
--- a/tests/test-revlog-packentry.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-revlog-packentry.t	Sat Jan 17 18:28:30 2015 -0800
@@ -18,6 +18,6 @@
   $ hg debugindex foo
      rev    offset  length  ..... linkrev nodeid       p1           p2 (re)
        0         0       0  .....       0 b80de5d13875 000000000000 000000000000 (re)
-       1         0      24  .....       1 0376abec49b8 000000000000 000000000000 (re)
+       1         0      13  .....       1 0376abec49b8 000000000000 000000000000 (re)
 
   $ cd ..
--- a/tests/test-revset.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-revset.t	Sat Jan 17 18:28:30 2015 -0800
@@ -438,6 +438,32 @@
   8
   9
 
+Test '%' operator
+
+  $ log '9%'
+  8
+  9
+  $ log '9%5'
+  2
+  4
+  8
+  9
+  $ log '(7 + 9)%(5 + 2)'
+  4
+  6
+  7
+  8
+  9
+
+Test the order of operations
+
+  $ log '7 + 9%5 + 2'
+  7
+  2
+  4
+  8
+  9
+
 Test explicit numeric revision
   $ log 'rev(-1)'
   $ log 'rev(0)'
@@ -959,11 +985,25 @@
     (range
       ('symbol', '2')
       ('symbol', '5')))
-  hg: parse error: not a function: _aliasarg
+  abort: failed to parse the definition of revset alias "injectparamasstring2": not a function: _aliasarg
   [255]
+  $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip"
+  ('symbol', 'tip')
+  warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end
+  warning: failed to parse the definition of revset alias "injectparamasstring2": not a function: _aliasarg
+  9
   >>> data = file('.hg/hgrc', 'rb').read()
   >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', ''))
 
+  $ try 'tip'
+  ('symbol', 'tip')
+  9
+
+  $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip"
+  ('symbol', 'tip')
+  warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token
+  9
+
   $ try 'd(2:5)'
   (func
     ('symbol', 'd')
@@ -1118,6 +1158,54 @@
   $ cd ../repo
   $ log 'remote(".a.b.c.", "../remote3")'
 
+tests for concatenation of strings/symbols by "##"
+
+  $ try "278 ## '5f5' ## 1ee ## 'ce5'"
+  (_concat
+    (_concat
+      (_concat
+        ('symbol', '278')
+        ('string', '5f5'))
+      ('symbol', '1ee'))
+    ('string', 'ce5'))
+  ('string', '2785f51eece5')
+  0
+
+  $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc
+  $ try "cat4(278, '5f5', 1ee, 'ce5')"
+  (func
+    ('symbol', 'cat4')
+    (list
+      (list
+        (list
+          ('symbol', '278')
+          ('string', '5f5'))
+        ('symbol', '1ee'))
+      ('string', 'ce5')))
+  (_concat
+    (_concat
+      (_concat
+        ('symbol', '278')
+        ('string', '5f5'))
+      ('symbol', '1ee'))
+    ('string', 'ce5'))
+  ('string', '2785f51eece5')
+  0
+
+(check concatenation in alias nesting)
+
+  $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc
+  $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc
+  $ log "cat2x2(278, '5f5', 1ee, 'ce5')"
+  0
+
+(check operator priority)
+
+  $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc
+  $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)"
+  0
+  4
+
   $ cd ..
 
 test author/desc/keyword in problematic encoding
--- a/tests/test-run-tests.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-run-tests.t	Sat Jan 17 18:28:30 2015 -0800
@@ -33,8 +33,8 @@
 
   $ $TESTDIR/run-tests.py --with-hg=`which hg`
   
-  --- $TESTTMP/test-failure.t (glob)
-  +++ $TESTTMP/test-failure.t.err (glob)
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
   @@ -1,4 +1,4 @@
      $ echo babar
   -  rataxes
@@ -87,8 +87,8 @@
 
   $ $TESTDIR/run-tests.py --with-hg=`which hg` --retest
   
-  --- $TESTTMP/test-failure.t (glob)
-  +++ $TESTTMP/test-failure.t.err (glob)
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
   @@ -1,4 +1,4 @@
      $ echo babar
   -  rataxes
@@ -121,8 +121,8 @@
 
   $ $TESTDIR/run-tests.py --with-hg=`which hg` test-failure.t
   
-  --- $TESTTMP/test-failure.t (glob)
-  +++ $TESTTMP/test-failure.t.err (glob)
+  --- $TESTTMP/test-failure.t
+  +++ $TESTTMP/test-failure.t.err
   @@ -1,4 +1,4 @@
      $ echo babar
   -  rataxes
@@ -181,22 +181,22 @@
 ======================
 
   $ $TESTDIR/run-tests.py --with-hg=`which hg` --debug 2>&1 | grep -v pwd
-  + echo SALT* 0 0 (glob)
-  SALT* 0 0 (glob)
+  + echo *SALT* 0 0 (glob)
+  *SALT* 0 0 (glob)
   + echo babar
   babar
-  + echo SALT* 4 0 (glob)
-  SALT* 4 0 (glob)
-  .+ echo SALT* 0 0 (glob)
-  SALT* 0 0 (glob)
+  + echo *SALT* 4 0 (glob)
+  *SALT* 4 0 (glob)
+  .+ echo *SALT* 0 0 (glob)
+  *SALT* 0 0 (glob)
   + echo babar
   babar
-  + echo SALT* 2 0 (glob)
-  SALT* 2 0 (glob)
+  + echo *SALT* 2 0 (glob)
+  *SALT* 2 0 (glob)
   + echo xyzzy
   xyzzy
-  + echo SALT* 4 0 (glob)
-  SALT* 4 0 (glob)
+  + echo *SALT* 4 0 (glob)
+  *SALT* 4 0 (glob)
   .
   # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
 
@@ -274,7 +274,7 @@
 Interactive with custom view
 
   $ echo 'n' | $TESTDIR/run-tests.py --with-hg=`which hg` -i --view echo
-  $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err
+  $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob)
   Accept this change? [n]* (glob)
   ERROR: test-failure.t output changed
   !.
@@ -286,7 +286,7 @@
 View the fix
 
   $ echo 'y' | $TESTDIR/run-tests.py --with-hg=`which hg` --view echo
-  $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err
+  $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob)
   
   ERROR: test-failure.t output changed
   !.
@@ -297,24 +297,43 @@
 
 Accept the fix
 
-  $ echo 'y' | $TESTDIR/run-tests.py --with-hg=`which hg` -i
+  $ echo "  $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
+  $ echo "  saved backup bundle to \$TESTTMP/foo.hg" >> test-failure.t
+  $ echo "  $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
+  $ echo "  saved backup bundle to \$TESTTMP/foo.hg (glob)" >> test-failure.t
+  $ echo "  $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t
+  $ echo "  saved backup bundle to \$TESTTMP/*.hg (glob)" >> test-failure.t
+  $ echo 'y' | $TESTDIR/run-tests.py --with-hg=`which hg` -i 2>&1 | \
+  >   sed -e 's,(glob)$,&<,g'
   
   --- $TESTTMP/test-failure.t
   +++ $TESTTMP/test-failure.t.err
-  @@ -1,4 +1,4 @@
+  @@ -1,9 +1,9 @@
      $ echo babar
   -  rataxes
   +  babar
    This is a noop statement so that
    this test is still more bytes than success.
+     $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+  -  saved backup bundle to $TESTTMP/foo.hg
+  +  saved backup bundle to $TESTTMP/foo.hg (glob)<
+     $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+     saved backup bundle to $TESTTMP/foo.hg (glob)<
+     $ echo 'saved backup bundle to $TESTTMP/foo.hg'
   Accept this change? [n] ..
   # Ran 2 tests, 0 skipped, 0 warned, 0 failed.
 
-  $ cat test-failure.t
+  $ sed -e 's,(glob)$,&<,g' test-failure.t
     $ echo babar
     babar
   This is a noop statement so that
   this test is still more bytes than success.
+    $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+    saved backup bundle to $TESTTMP/foo.hg (glob)<
+    $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+    saved backup bundle to $TESTTMP/foo.hg (glob)<
+    $ echo 'saved backup bundle to $TESTTMP/foo.hg'
+    saved backup bundle to $TESTTMP/*.hg (glob)<
 
 (reinstall)
   $ mv backup test-failure.t
--- a/tests/test-setdiscovery.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-setdiscovery.t	Sat Jan 17 18:28:30 2015 -0800
@@ -317,17 +317,14 @@
   query 3; still undecided: 1140, sample size is: 200
   sampling from both directions
   searching: 4 queries
-  query 4; still undecided: 940, sample size is: 200
+  query 4; still undecided: 592, sample size is: 200
   sampling from both directions
   searching: 5 queries
-  query 5; still undecided: 740, sample size is: 200
+  query 5; still undecided: 292, sample size is: 200
   sampling from both directions
   searching: 6 queries
-  query 6; still undecided: 540, sample size is: 200
-  sampling from both directions
-  searching: 7 queries
-  query 7; still undecided: 44, sample size is: 44
-  7 total queries
+  query 6; still undecided: 51, sample size is: 51
+  6 total queries
   common heads: 3ee37d65064a
 
 Test actual protocol when pulling one new head in addition to common heads
@@ -354,6 +351,56 @@
   "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db
   "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
+  "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
   $ cat errors.log
 
   $ cd ..
+
+
+Issue 4438 - test coverage for 3ef893520a85 issues.
+
+  $ mkdir issue4438
+  $ cd issue4438
+#if false
+generate new bundles:
+  $ hg init r1
+  $ for i in `seq 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
+  $ hg clone -q r1 r2
+  $ for i in `seq 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
+  $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
+  $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
+  $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
+#else
+use existing bundles:
+  $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1
+  $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2
+#endif
+
+Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
+
+  $ hg -R r1 outgoing r2 -T'{rev} '
+  comparing with r2
+  searching for changes
+  101 102 103 104 105 106 107 108 109 110  (no-eol)
+
+The case where all the 'initialsamplesize' samples already were common would
+give 'all remote heads known locally' without checking the remaining heads -
+fixed in 86c35b7ae300:
+
+  $ cat >> $TESTTMP/unrandomsample.py << EOF
+  > import random
+  > def sample(population, k):
+  >     return sorted(population)[:k]
+  > random.sample = sample
+  > EOF
+
+  $ cat >> r1/.hg/hgrc << EOF
+  > [extensions]
+  > unrandomsample = $TESTTMP/unrandomsample.py
+  > EOF
+
+  $ hg -R r1 outgoing r2 -T'{rev} '
+  comparing with r2
+  searching for changes
+  101 102 103 104 105 106 107 108 109 110  (no-eol)
+  $ cd ..
--- a/tests/test-share.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-share.t	Sat Jan 17 18:28:30 2015 -0800
@@ -128,6 +128,175 @@
 
   $ cd ..
 
+
+test sharing bookmarks
+
+  $ hg share -B repo1 repo3
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd repo1
+  $ hg bookmark bm1
+  $ hg bookmarks
+   * bm1                       2:c2e0ac586386
+  $ cd ../repo2
+  $ hg book bm2
+  $ hg bookmarks
+   * bm2                       3:0e6e70d1d5f1
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       2:c2e0ac586386
+  $ hg book bm3
+  $ hg bookmarks
+     bm1                       2:c2e0ac586386
+   * bm3                       2:c2e0ac586386
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       2:c2e0ac586386
+     bm3                       2:c2e0ac586386
+
+test that commits work
+
+  $ echo 'shared bookmarks' > a
+  $ hg commit -m 'testing shared bookmarks'
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       2:c2e0ac586386
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       2:c2e0ac586386
+  $ echo 'more shared bookmarks' > a
+  $ hg commit -m 'testing shared bookmarks'
+  created new head
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+  $ cd ..
+
+test pushing bookmarks works
+
+  $ hg clone repo3 repo4
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd repo4
+  $ hg boo bm4
+  $ echo foo > b
+  $ hg commit -m 'foo in b'
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm4                       5:92793bfc8cad
+  $ hg push -B bm4
+  pushing to $TESTTMP/repo3 (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  exporting bookmark bm4
+  $ cd ../repo1
+  $ hg bookmarks
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg bookmarks
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+test behavior when sharing a shared repo
+
+  $ hg share -B repo3 repo5
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd repo5
+  $ hg book
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+test what happens when an active bookmark is deleted
+
+  $ cd repo1
+  $ hg boo -d bm3
+  $ hg boo
+   * bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+verify that bookmarks are not written on failed transaction
+
+  $ cat > failpullbookmarks.py << EOF
+  > """A small extension that makes bookmark pulls fail, for testing"""
+  > from mercurial import extensions, exchange, error
+  > def _pullbookmarks(orig, pullop):
+  >     orig(pullop)
+  >     raise error.HookAbort('forced failure by extension')
+  > def extsetup(ui):
+  >     extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
+  > EOF
+  $ cd repo4
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm4                       5:92793bfc8cad
+  $ cd ../repo3
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
+  pulling from $TESTTMP/repo4 (glob)
+  searching for changes
+  no changes found
+  adding remote bookmark bm3
+  abort: forced failure by extension
+  [255]
+  $ hg boo
+     bm1                       3:b87954705719
+     bm4                       5:92793bfc8cad
+  $ hg pull $TESTTMP/repo4
+  pulling from $TESTTMP/repo4 (glob)
+  searching for changes
+  no changes found
+  adding remote bookmark bm3
+  $ hg boo
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
+verify bookmark behavior after unshare
+
+  $ cd repo3
+  $ hg unshare
+  $ hg boo
+     bm1                       3:b87954705719
+   * bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ hg boo -d bm4
+  $ hg boo bm5
+  $ hg boo
+     bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+   * bm5                       4:62f4ded848e4
+  $ cd ../repo1
+  $ hg boo
+   * bm1                       3:b87954705719
+     bm3                       4:62f4ded848e4
+     bm4                       5:92793bfc8cad
+  $ cd ..
+
 Explicitly kill daemons to let the test exit on Windows
 
   $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
--- a/tests/test-shelve.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-shelve.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,9 +1,11 @@
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "mq=" >> $HGRCPATH
-  $ echo "shelve=" >> $HGRCPATH
-  $ echo "[defaults]" >> $HGRCPATH
-  $ echo "diff = --nodates --git" >> $HGRCPATH
-  $ echo "qnew = --date '0 0'" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > mq =
+  > shelve =
+  > [defaults]
+  > diff = --nodates --git
+  > qnew = --date '0 0'
+  > EOF
 
   $ hg init repo
   $ cd repo
@@ -108,6 +110,7 @@
   unshelving change 'default-01'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
+  rebasing 4:4702e8911fe0 "changes to '[mq]: second.patch'" (tip)
   merging a/a
 
   $ hg revert --all -q
@@ -200,6 +203,7 @@
   unshelving change 'default'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
+  rebasing 5:4702e8911fe0 "changes to '[mq]: second.patch'" (tip)
   merging a/a
   warning: conflicts during merge.
   merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -234,12 +238,6 @@
   diff --git a/b/b b/b.rename/b
   rename from b/b
   rename to b.rename/b
-  diff --git a/b/b b/b/b
-  deleted file mode 100644
-  --- a/b/b
-  +++ /dev/null
-  @@ -1,1 +0,0 @@
-  -b
   diff --git a/c b/c.copy
   copy from c
   copy to c.copy
@@ -310,6 +308,7 @@
   [255]
 
   $ hg unshelve -c
+  rebasing 5:4702e8911fe0 "changes to '[mq]: second.patch'" (tip)
   unshelve of 'default' complete
 
 ensure the repo is as we hope
@@ -380,7 +379,9 @@
   unshelving change 'default'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
+  rebasing 6:c5e6910e7601 "changes to 'second'" (tip)
   merging a/a
+  note: rebase of 6:c5e6910e7601 created no changes to commit
   $ hg parents -q
   4:33f7f61e6c5e
   $ hg shelve -l
@@ -459,11 +460,13 @@
   shelved as default
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg rebase -d 1 --config extensions.rebase=
+  rebasing 2:323bfa07f744 "xyz" (tip)
   merging x
-  saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-backup.hg (glob)
+  saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-backup.hg (glob)
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
+  rebasing 4:b8fefe789ed0 "changes to 'xyz'" (tip)
   $ hg status
   M z
 
@@ -490,6 +493,7 @@
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
+  rebasing 3:0cae6656c016 "changes to 'c'" (tip)
   $ hg status
   A d
 
@@ -503,6 +507,7 @@
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
+  rebasing 3:be58f65f55fb "changes to 'b'" (tip)
   $ hg status
   A d
 
@@ -600,6 +605,7 @@
   unshelving change 'default'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
+  rebasing 5:23b29cada8ba "changes to 'commit stuff'" (tip)
   merging f
   warning: conflicts during merge.
   merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -639,6 +645,7 @@
   unshelving change 'default'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
   rebasing shelved changes
+  rebasing 5:23b29cada8ba "changes to 'commit stuff'" (tip)
   $ hg st
   M a
   A f
@@ -654,6 +661,7 @@
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
+  rebasing 5:23b29cada8ba "changes to 'commit stuff'" (tip)
   merging f
   warning: conflicts during merge.
   merging f incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -695,6 +703,7 @@
   $ hg unshelve
   unshelving change 'default'
   rebasing shelved changes
+  rebasing 5:4b555fdb4e96 "changes to 'second'" (tip)
   merging a/a
   warning: conflicts during merge.
   merging a/a incomplete! (edit conflicts, then use 'hg resolve --mark')
@@ -709,6 +718,8 @@
   $ hg resolve -m a/a
   (no more unresolved files)
   $ hg unshelve -c
+  rebasing 5:4b555fdb4e96 "changes to 'second'" (tip)
+  note: rebase of 5:4b555fdb4e96 created no changes to commit
   unshelve of 'default' complete
   $ hg diff
   $ hg status
--- a/tests/test-status-color.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-status-color.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,7 +1,9 @@
-  $ echo "[extensions]" >> $HGRCPATH
-  $ echo "color=" >> $HGRCPATH
-  $ echo "[color]" >> $HGRCPATH
-  $ echo "mode=ansi" >> $HGRCPATH
+  $ cat <<EOF >> $HGRCPATH
+  > [extensions]
+  > color =
+  > [color]
+  > mode = ansi
+  > EOF
 Terminfo codes compatibility fix
   $ echo "color.none=0" >> $HGRCPATH
 
--- a/tests/test-status-rev.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-status-rev.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1,156 +1,162 @@
 Tests of 'hg status --rev <rev>' to make sure status between <rev> and '.' get
 combined correctly with the dirstate status.
 
-Sets up a history for a number of files where the filename describes the file's
-history. The first two letters of the filename describe the first two commits;
-the third letter describes the dirstate for the file. For example, a file called
-'amr' was added in the first commit, modified in the second and then removed in
-the dirstate.
+  $ hg init
 
-These codes are used for commits:
-x: does not exist
-a: added
-c: clean
-m: modified
-r: removed
+First commit
 
-These codes are used for dirstate:
-d: in dirstate, but deleted from disk
-f: removed from dirstate, but file exists (forgotten)
-r: removed from dirstate and disk
-q: added, but deleted from disk (q for q-rious?)
-u: not in dirstate, but file exists (unknown)
+  $ python $TESTDIR/generate-working-copy-states.py state 2 1
+  $ hg addremove --similarity 0
+  adding content1_content1_content1-tracked
+  adding content1_content1_content1-untracked
+  adding content1_content1_content3-tracked
+  adding content1_content1_content3-untracked
+  adding content1_content1_missing-tracked
+  adding content1_content1_missing-untracked
+  adding content1_content2_content1-tracked
+  adding content1_content2_content1-untracked
+  adding content1_content2_content2-tracked
+  adding content1_content2_content2-untracked
+  adding content1_content2_content3-tracked
+  adding content1_content2_content3-untracked
+  adding content1_content2_missing-tracked
+  adding content1_content2_missing-untracked
+  adding content1_missing_content1-tracked
+  adding content1_missing_content1-untracked
+  adding content1_missing_content3-tracked
+  adding content1_missing_content3-untracked
+  adding content1_missing_missing-tracked
+  adding content1_missing_missing-untracked
+  $ hg commit -m first
 
-  $ hg init
-  $ touch .hgignore
-  $ hg add .hgignore
-  $ hg commit -m initial
-
-First letter: first commit
+Second commit
 
-  $ echo a >acc
-  $ echo a >acd
-  $ echo a >acf
-  $ echo a >acm
-  $ echo a >acr
-  $ echo a >amc
-  $ echo a >amd
-  $ echo a >amf
-  $ echo a >amm
-  $ echo a >amr
-  $ echo a >ara
-  $ echo a >arq
-  $ echo a >aru
-  $ hg commit -Aqm first
-
-Second letter: second commit
+  $ python $TESTDIR/generate-working-copy-states.py state 2 2
+  $ hg addremove --similarity 0
+  removing content1_missing_content1-tracked
+  removing content1_missing_content1-untracked
+  removing content1_missing_content3-tracked
+  removing content1_missing_content3-untracked
+  removing content1_missing_missing-tracked
+  removing content1_missing_missing-untracked
+  adding missing_content2_content2-tracked
+  adding missing_content2_content2-untracked
+  adding missing_content2_content3-tracked
+  adding missing_content2_content3-untracked
+  adding missing_content2_missing-tracked
+  adding missing_content2_missing-untracked
+  $ hg commit -m second
 
-  $ echo b >xad
-  $ echo b >xaf
-  $ echo b >xam
-  $ echo b >xar
-  $ echo b >amc
-  $ echo b >amd
-  $ echo b >amf
-  $ echo b >amm
-  $ echo b >amr
-  $ hg rm ara
-  $ hg rm arq
-  $ hg rm aru
-  $ hg commit -Aqm second
-
-Third letter: dirstate
+Working copy
 
-  $ echo c >acm
-  $ echo c >amm
-  $ echo c >xam
-  $ echo c >ara && hg add ara
-  $ echo c >arq && hg add arq && rm arq
-  $ echo c >aru
-  $ hg rm amr
-  $ hg rm acr
-  $ hg rm xar
-  $ rm acd
-  $ rm amd
-  $ rm xad
-  $ hg forget acf
-  $ hg forget amf
-  $ hg forget xaf
-  $ touch xxu
+  $ python $TESTDIR/generate-working-copy-states.py state 2 wc
+  $ hg addremove --similarity 0
+  adding content1_missing_content1-tracked
+  adding content1_missing_content1-untracked
+  adding content1_missing_content3-tracked
+  adding content1_missing_content3-untracked
+  adding content1_missing_missing-tracked
+  adding content1_missing_missing-untracked
+  adding missing_missing_content3-tracked
+  adding missing_missing_content3-untracked
+  adding missing_missing_missing-tracked
+  adding missing_missing_missing-untracked
+  $ hg forget *_*_*-untracked
+  $ rm *_*_missing-*
+
+Status compared to parent of the working copy, i.e. the dirstate status
 
-Status compared to one revision back
+  $ hg status -A --rev 1 'glob:missing_content2_content3-tracked'
+  M missing_content2_content3-tracked
+  $ hg status -A --rev 1 'glob:missing_content2_content2-tracked'
+  C missing_content2_content2-tracked
+  $ hg status -A --rev 1 'glob:missing_missing_content3-tracked'
+  A missing_missing_content3-tracked
+  $ hg status -A --rev 1 'glob:missing_missing_content3-untracked'
+  ? missing_missing_content3-untracked
+  $ hg status -A --rev 1 'glob:missing_content2_*-untracked'
+  R missing_content2_content2-untracked
+  R missing_content2_content3-untracked
+  R missing_content2_missing-untracked
+  $ hg status -A --rev 1 'glob:missing_*_missing-tracked'
+  ! missing_content2_missing-tracked
+  ! missing_missing_missing-tracked
+#if windows
+  $ hg status -A --rev 1 'glob:missing_missing_missing-untracked'
+  missing_missing_missing-untracked: The system cannot find the file specified
+#else
+  $ hg status -A --rev 1 'glob:missing_missing_missing-untracked'
+  missing_missing_missing-untracked: No such file or directory
+#endif
+
+Status between first and second commit. Should ignore dirstate status.
 
-  $ hg status -A --rev 1 acc
-  C acc
-BROKEN: file appears twice; should be '!'
-  $ hg status -A --rev 1 acd
-  ! acd
-  C acd
-  $ hg status -A --rev 1 acf
-  R acf
-  $ hg status -A --rev 1 acm
-  M acm
-  $ hg status -A --rev 1 acr
-  R acr
-  $ hg status -A --rev 1 amc
-  M amc
-BROKEN: file appears twice; should be '!'
-  $ hg status -A --rev 1 amd
-  ! amd
-  C amd
-  $ hg status -A --rev 1 amf
-  R amf
-  $ hg status -A --rev 1 amm
-  M amm
-  $ hg status -A --rev 1 amr
-  R amr
-  $ hg status -A --rev 1 ara
-  M ara
-BROKEN: file appears twice; should be '!'
-  $ hg status -A --rev 1 arq
-  R arq
-  ! arq
-  $ hg status -A --rev 1 aru
-  R aru
-  $ hg status -A --rev 1 xad
-  ! xad
-  $ hg status -A --rev 1 xaf
-  $ hg status -A --rev 1 xam
-  A xam
-  $ hg status -A --rev 1 xar
-  $ hg status -A --rev 1 xxu
-  ? xxu
+  $ hg status -A --rev 0:1 'glob:content1_content2_*'
+  M content1_content2_content1-tracked
+  M content1_content2_content1-untracked
+  M content1_content2_content2-tracked
+  M content1_content2_content2-untracked
+  M content1_content2_content3-tracked
+  M content1_content2_content3-untracked
+  M content1_content2_missing-tracked
+  M content1_content2_missing-untracked
+  $ hg status -A --rev 0:1 'glob:content1_content1_*'
+  C content1_content1_content1-tracked
+  C content1_content1_content1-untracked
+  C content1_content1_content3-tracked
+  C content1_content1_content3-untracked
+  C content1_content1_missing-tracked
+  C content1_content1_missing-untracked
+  $ hg status -A --rev 0:1 'glob:missing_content2_*'
+  A missing_content2_content2-tracked
+  A missing_content2_content2-untracked
+  A missing_content2_content3-tracked
+  A missing_content2_content3-untracked
+  A missing_content2_missing-tracked
+  A missing_content2_missing-untracked
+  $ hg status -A --rev 0:1 'glob:content1_missing_*'
+  R content1_missing_content1-tracked
+  R content1_missing_content1-untracked
+  R content1_missing_content3-tracked
+  R content1_missing_content3-untracked
+  R content1_missing_missing-tracked
+  R content1_missing_missing-untracked
+  $ hg status -A --rev 0:1 'glob:missing_missing_*'
+
+Status compared to one revision back, checking that the dirstate status
+is correctly combined with the inter-revision status
 
-Status compared to two revisions back
-
-  $ hg status -A --rev 0 acc
-  A acc
-  $ hg status -A --rev 0 acd
-  ! acd
-BROKEN: file exists, so should be listed (as '?')
-  $ hg status -A --rev 0 acf
-  $ hg status -A --rev 0 acm
-  A acm
-  $ hg status -A --rev 0 acr
-  $ hg status -A --rev 0 amc
-  A amc
-  $ hg status -A --rev 0 amd
-  ! amd
-BROKEN: file exists, so should be listed (as '?')
-  $ hg status -A --rev 0 amf
-  $ hg status -A --rev 0 amm
-  A amm
-  $ hg status -A --rev 0 amr
-  $ hg status -A --rev 0 ara
-  A ara
-  $ hg status -A --rev 0 arq
-  ! arq
-  $ hg status -A --rev 0 aru
-  ? aru
-  $ hg status -A --rev 0 xad
-  ! xad
-BROKEN: file exists, so should be listed (as '?')
-  $ hg status -A --rev 0 xaf
-  $ hg status -A --rev 0 xam
-  A xam
-  $ hg status -A --rev 0 xar
+  $ hg status -A --rev 0 'glob:content1_*_content[23]-tracked'
+  M content1_content1_content3-tracked
+  M content1_content2_content2-tracked
+  M content1_content2_content3-tracked
+  M content1_missing_content3-tracked
+  $ hg status -A --rev 0 'glob:content1_*_content1-tracked'
+  C content1_content1_content1-tracked
+  C content1_content2_content1-tracked
+  C content1_missing_content1-tracked
+  $ hg status -A --rev 0 'glob:missing_*_content?-tracked'
+  A missing_content2_content2-tracked
+  A missing_content2_content3-tracked
+  A missing_missing_content3-tracked
+BROKEN: missing_content2_content[23]-untracked exist, so should be listed
+  $ hg status -A --rev 0 'glob:missing_*_content?-untracked'
+  ? missing_missing_content3-untracked
+  $ hg status -A --rev 0 'glob:content1_*_*-untracked'
+  R content1_content1_content1-untracked
+  R content1_content1_content3-untracked
+  R content1_content1_missing-untracked
+  R content1_content2_content1-untracked
+  R content1_content2_content2-untracked
+  R content1_content2_content3-untracked
+  R content1_content2_missing-untracked
+  R content1_missing_content1-untracked
+  R content1_missing_content3-untracked
+  R content1_missing_missing-untracked
+  $ hg status -A --rev 0 'glob:*_*_missing-tracked'
+  ! content1_content1_missing-tracked
+  ! content1_content2_missing-tracked
+  ! content1_missing_missing-tracked
+  ! missing_content2_missing-tracked
+  ! missing_missing_missing-tracked
+  $ hg status -A --rev 0 'glob:missing_*_missing-untracked'
--- a/tests/test-strip.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-strip.t	Sat Jan 17 18:28:30 2015 -0800
@@ -187,6 +187,30 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     a
   
+  $ hg up -C 4
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg parents
+  changeset:   4:264128213d29
+  tag:         tip
+  parent:      1:ef3a871183d7
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     c
+  
+  $ hg --config experimental.bundle2-exp=True --config experimental.strip-bundle2-version=02 --traceback strip 4
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob)
+  $ hg parents
+  changeset:   1:ef3a871183d7
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     b
+  
+  $ hg debugbundle .hg/strip-backup/*
+  Stream params: {}
+  b2x:changegroup -- "{'version': '02'}"
+      264128213d290d868c54642d13aeaa3675551a78
+  $ restore
 
   $ hg up -C 2
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -462,7 +486,7 @@
   $ echo b > b
   $ echo d > d
   $ hg strip --keep tip
-  saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob)
+  saved backup bundle to $TESTTMP/test/.hg/strip-backup/57e364c8a475-4cfed93c-backup.hg (glob)
   $ hg status
   M b
   ! bar
@@ -547,3 +571,25 @@
   
   (use "hg strip -h" to show more help)
   [255]
+
+  $ cd ..
+
+Verify bundles don't get overwritten:
+
+  $ hg init doublebundle
+  $ cd doublebundle
+  $ touch a
+  $ hg commit -Aqm a
+  $ touch b
+  $ hg commit -Aqm b
+  $ hg strip -r 0
+  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-e68910bd-backup.hg (glob)
+  $ ls .hg/strip-backup
+  3903775176ed-e68910bd-backup.hg
+  $ hg pull -q -r 3903775176ed .hg/strip-backup/3903775176ed-e68910bd-backup.hg
+  $ hg strip -r 0
+  saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-54390173-backup.hg (glob)
+  $ ls .hg/strip-backup
+  3903775176ed-54390173-backup.hg
+  3903775176ed-e68910bd-backup.hg
--- a/tests/test-subrepo-deep-nested-change.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-subrepo-deep-nested-change.t	Sat Jan 17 18:28:30 2015 -0800
@@ -106,10 +106,72 @@
   $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
   $ mkdir sub1/sub2/folder
   $ echo 'subfolder' > sub1/sub2/folder/test.txt
-  $ hg --config extensions.largefiles=! add sub1/sub2/folder/test.txt
-  $ hg ci -Sm "add test.txt"
+  $ hg ci -ASm "add test.txt"
+  adding sub1/sub2/folder/test.txt
   committing subrepository sub1
   committing subrepository sub1/sub2 (glob)
+
+.. but first take a detour through some deep removal testing
+
+  $ hg remove -S -I 're:.*.txt' .
+  removing sub1/sub2/folder/test.txt (glob)
+  removing sub1/sub2/test.txt (glob)
+  $ hg status -S
+  R sub1/sub2/folder/test.txt
+  R sub1/sub2/test.txt
+  $ hg update -Cq
+  $ hg remove -I 're:.*.txt' sub1
+  $ hg status -S
+  $ hg remove sub1/sub2/folder/test.txt
+  $ hg remove sub1/.hgsubstate
+  $ hg status -S
+  R sub1/.hgsubstate
+  R sub1/sub2/folder/test.txt
+  $ hg update -Cq
+  $ touch sub1/foo
+  $ hg forget sub1/sub2/folder/test.txt
+  $ rm sub1/sub2/test.txt
+
+Test relative path printing + subrepos
+  $ mkdir -p foo/bar
+  $ cd foo
+  $ touch bar/abc
+  $ hg addremove -S ..
+  adding ../sub1/sub2/folder/test.txt (glob)
+  removing ../sub1/sub2/test.txt (glob)
+  adding ../sub1/foo (glob)
+  adding bar/abc (glob)
+  $ cd ..
+  $ hg status -S
+  A foo/bar/abc
+  A sub1/foo
+  R sub1/sub2/test.txt
+  $ hg update -Cq
+  $ touch sub1/sub2/folder/bar
+  $ hg addremove sub1/sub2
+  adding sub1/sub2/folder/bar (glob)
+  $ hg status -S
+  A sub1/sub2/folder/bar
+  ? foo/bar/abc
+  ? sub1/foo
+  $ hg update -Cq
+  $ hg addremove sub1
+  adding sub1/sub2/folder/bar (glob)
+  adding sub1/foo (glob)
+  $ hg update -Cq
+  $ rm sub1/sub2/folder/test.txt
+  $ rm sub1/sub2/test.txt
+  $ hg ci -ASm "remove test.txt"
+  adding sub1/sub2/folder/bar
+  removing sub1/sub2/folder/test.txt
+  removing sub1/sub2/test.txt
+  adding sub1/foo
+  adding foo/bar/abc
+  committing subrepository sub1
+  committing subrepository sub1/sub2 (glob)
+  $ hg rollback -q
+  $ hg up -Cq
+
   $ hg --config extensions.largefiles=! archive -S ../archive_all
   $ find ../archive_all | sort
   ../archive_all
@@ -261,4 +323,33 @@
   $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
   $ find ../archive_lf 2> /dev/null | sort
 
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > largefiles=
+  > [largefiles]
+  > patterns=glob:**.dat
+  > EOF
+
+Test forget through a deep subrepo with the largefiles extension, both a
+largefile and a normal file.  Then a largefile that hasn't been committed yet.
+  $ touch sub1/sub2/untracked.txt
+  $ touch sub1/sub2/large.dat
+  $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
+  not removing sub1/sub2/untracked.txt: file is already untracked (glob)
+  [1]
+  $ hg add --large --dry-run -v sub1/sub2/untracked.txt
+  adding sub1/sub2/untracked.txt as a largefile (glob)
+  $ hg add --large -v sub1/sub2/untracked.txt
+  adding sub1/sub2/untracked.txt as a largefile (glob)
+  $ hg add --normal -v sub1/sub2/large.dat
+  adding sub1/sub2/large.dat (glob)
+  $ hg forget -v sub1/sub2/untracked.txt
+  removing sub1/sub2/untracked.txt (glob)
+  $ hg status -S
+  A sub1/sub2/large.dat
+  R sub1/sub2/large.bin
+  R sub1/sub2/test.txt
+  ? foo/bar/abc
+  ? sub1/sub2/untracked.txt
+
   $ cd ..
--- a/tests/test-subrepo-git.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-subrepo-git.t	Sat Jan 17 18:28:30 2015 -0800
@@ -10,6 +10,7 @@
   $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
   $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
   $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
+  $ GIT_CONFIG_NOSYSTEM=1; export GIT_CONFIG_NOSYSTEM
 
 root hg repo
 
@@ -103,6 +104,15 @@
   $ echo ggg >> s/g
   $ hg status --subrepos
   M s/g
+  $ hg diff --subrepos
+  diff --git a/s/g b/s/g
+  index 089258f..85341ee 100644
+  --- a/s/g
+  +++ b/s/g
+  @@ -1,2 +1,3 @@
+   g
+   gg
+  +ggg (no-eol)
   $ hg commit --subrepos -m ggg
   committing subrepository s
   $ hg debugsub
@@ -119,7 +129,10 @@
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ cd ../tb/s
+  $ hg status --subrepos
   $ echo f > f
+  $ hg status --subrepos
+  ? s/f
   $ git add f
   $ cd ..
 
@@ -422,6 +435,7 @@
   $ hg status -S
   M s/g
   A s/f1
+  ? s/f2
   $ ls s
   f
   f1
@@ -430,6 +444,8 @@
   $ hg update --clean
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg status -S
+  ? s/f1
+  ? s/f2
   $ ls s
   f
   f1
@@ -658,4 +674,131 @@
   checking out detached HEAD in subrepo s
   check out a git branch if you intend to make changes
 
+check differences made by most recent change
+  $ cd s
+  $ cat > foobar << EOF
+  > woopwoop
+  > 
+  > foo
+  > bar
+  > EOF
+  $ git add foobar
   $ cd ..
+
+  $ hg diff --subrepos
+  diff --git a/s/foobar b/s/foobar
+  new file mode 100644
+  index 0000000..8a5a5e2
+  --- /dev/null
+  +++ b/s/foobar
+  @@ -0,0 +1,4 @@
+  +woopwoop
+  +
+  +foo
+  +bar (no-eol)
+
+  $ hg commit --subrepos -m "Added foobar"
+  committing subrepository s
+  created new head
+
+  $ hg diff -c . --subrepos --nodates
+  diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
+  --- a/.hgsubstate
+  +++ b/.hgsubstate
+  @@ -1,1 +1,1 @@
+  -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
+  +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
+  diff --git a/s/foobar b/s/foobar
+  new file mode 100644
+  index 0000000..8a5a5e2
+  --- /dev/null
+  +++ b/s/foobar
+  @@ -0,0 +1,4 @@
+  +woopwoop
+  +
+  +foo
+  +bar (no-eol)
+
+check output when only diffing the subrepository
+  $ hg diff -c . --subrepos s
+  diff --git a/s/foobar b/s/foobar
+  new file mode 100644
+  index 0000000..8a5a5e2
+  --- /dev/null
+  +++ b/s/foobar
+  @@ -0,0 +1,4 @@
+  +woopwoop
+  +
+  +foo
+  +bar (no-eol)
+
+check output when diffing something else
+  $ hg diff -c . --subrepos .hgsubstate --nodates
+  diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
+  --- a/.hgsubstate
+  +++ b/.hgsubstate
+  @@ -1,1 +1,1 @@
+  -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
+  +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
+
+add new changes, including whitespace
+  $ cd s
+  $ cat > foobar << EOF
+  > woop    woop
+  > 
+  > foo
+  > bar
+  > EOF
+  $ echo foo > barfoo
+  $ git add barfoo
+  $ cd ..
+
+  $ hg diff --subrepos --ignore-all-space
+  diff --git a/s/barfoo b/s/barfoo
+  new file mode 100644
+  index 0000000..257cc56
+  --- /dev/null
+  +++ b/s/barfoo
+  @@ -0,0 +1 @@
+  +foo (no-eol)
+  $ hg diff --subrepos s/foobar
+  diff --git a/s/foobar b/s/foobar
+  index 8a5a5e2..bd5812a 100644
+  --- a/s/foobar
+  +++ b/s/foobar
+  @@ -1,4 +1,4 @@
+  -woopwoop
+  +woop    woop
+   
+   foo
+   bar (no-eol)
+
+execute a diffstat
+the output contains a regex, because git 1.7.10 and 1.7.11
+ change the amount of whitespace
+  $ hg diff --subrepos --stat
+  \s*barfoo |\s*1 + (re)
+  \s*foobar |\s*2 +- (re)
+   2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) \(no-eol\) (re)
+
+ensure adding include/exclude ignores the subrepo
+  $ hg diff --subrepos -I s/foobar
+  $ hg diff --subrepos -X s/foobar
+
+revert the subrepository
+  $ hg revert --all
+  reverting subrepo ../gitroot (glob)
+
+  $ hg status --subrepos
+  ? s/barfoo
+  ? s/foobar.orig
+
+  $ mv s/foobar.orig s/foobar
+
+  $ hg revert --no-backup s
+  reverting subrepo ../gitroot (glob)
+
+  $ hg status --subrepos
+  ? s/barfoo
+
+  $ cd ..
--- a/tests/test-subrepo.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-subrepo.t	Sat Jan 17 18:28:30 2015 -0800
@@ -1324,7 +1324,7 @@
   $ echo phasecheck4 >>   t/t
   $ hg commit -S -m phasecheck4
   committing subrepository s
-  committing subrepository s/ss
+  committing subrepository s/ss (glob)
   warning: changes are committed in secret phase from subrepository ss
   committing subrepository t
   warning: changes are committed in secret phase from subrepository s
--- a/tests/test-symlinks.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-symlinks.t	Sat Jan 17 18:28:30 2015 -0800
@@ -3,12 +3,18 @@
 == tests added in 0.7 ==
 
   $ hg init test-symlinks-0.7; cd test-symlinks-0.7;
-  $ touch foo; ln -s foo bar;
+  $ touch foo; ln -s foo bar; ln -s nonexistent baz
+
+import with add and addremove -- symlink walking should _not_ screwup.
 
-import with addremove -- symlink walking should _not_ screwup.
-
+  $ hg add
+  adding bar
+  adding baz
+  adding foo
+  $ hg forget bar baz foo
   $ hg addremove
   adding bar
+  adding baz
   adding foo
 
 commit -- the symlink should _not_ appear added to dir state
--- a/tests/test-tag.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-tag.t	Sat Jan 17 18:28:30 2015 -0800
@@ -479,7 +479,7 @@
   4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3
   79505d5360b07e3e79d1052e347e73c02b8afa5b t3
 
-check that the merge tried to minimize the diff witht he first merge parent
+check that the merge tried to minimize the diff with the first merge parent
 
   $ hg diff --git -r 'p1()' .hgtags
   diff --git a/.hgtags b/.hgtags
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-tools.t	Sat Jan 17 18:28:30 2015 -0800
@@ -0,0 +1,107 @@
+Tests of the file helper tool
+
+  $ f -h
+  ?sage: f [options] [filenames] (glob)
+  
+  ?ptions: (glob)
+    -h, --help            show this help message and exit
+    -t, --type            show file type (file or directory)
+    -m, --mode            show file mode
+    -l, --links           show number of links
+    -s, --size            show size of file
+    -n NEWER, --newer=NEWER
+                          check if file is newer (or same)
+    -r, --recurse         recurse into directories
+    -S, --sha1            show sha1 hash of the content
+    -M, --md5             show md5 hash of the content
+    -D, --dump            dump file content
+    -H, --hexdump         hexdump file content
+    -B BYTES, --bytes=BYTES
+                          number of characters to dump
+    -L LINES, --lines=LINES
+                          number of lines to dump
+    -q, --quiet           no default output
+
+  $ mkdir dir
+  $ cd dir
+
+  $ f --size
+  size=0
+
+  $ echo hello | f --md5 --size
+  size=6, md5=b1946ac92492d2347c6235b4d2611184
+
+  $ f foo
+  foo: file not found
+
+  $ echo foo > foo
+  $ f foo
+  foo:
+
+#if symlink
+  $ f foo --mode
+  foo: mode=644
+#endif
+
+  $ seq 10 > bar
+#if unix-permissions symlink
+  $ chmod +x bar
+  $ f bar --newer foo --mode --type --size --dump --links --bytes 7
+  bar: file, size=21, mode=755, links=1, newer than foo
+  >>>
+  1
+  2
+  3
+  4
+  <<< no trailing newline
+#endif
+
+#if unix-permissions
+  $ ln bar baz
+  $ f bar -n baz -l --hexdump -t --sha1 --lines=9 -B 20
+  bar: file, links=2, newer than baz, sha1=612ca68d0305c821750a
+  0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
+  0010: 39 0a                                           |9.|
+  $ rm baz
+#endif
+
+#if unix-permissions symlink
+  $ ln -s yadda l
+  $ f . --recurse -MStmsB4
+  .: directory with 3 files, mode=755
+  ./bar: file, size=21, mode=755, md5=3b03, sha1=612c
+  ./foo: file, size=4, mode=644, md5=d3b0, sha1=f1d2
+  ./l: link, size=5, md5=2faa, sha1=af93
+#endif
+
+  $ f --quiet bar -DL 3
+  1
+  2
+  3
+
+  $ cd ..
+
+Yadda is a symlink
+#if symlink
+  $ f -qr dir -HB 17
+  dir: directory with 3 files
+  dir/bar:
+  0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
+  0010: 39                                              |9|
+  dir/foo:
+  0000: 66 6f 6f 0a                                     |foo.|
+  dir/l:
+  0000: 79 61 64 64 61                                  |yadda|
+#else
+  $ f -qr dir -HB 17
+  dir: directory with 3 files
+  dir/bar: (glob)
+  0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
+  0010: 39                                              |9|
+  dir/baz: (glob)
+  0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.|
+  0010: 39                                              |9|
+  dir/foo: (glob)
+  0000: 66 6f 6f 0a                                     |foo.|
+#endif
+
--- a/tests/test-transplant.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-transplant.t	Sat Jan 17 18:28:30 2015 -0800
@@ -230,7 +230,8 @@
   (transplanted from e234d668f844e1b1a765f01db83a32c0c7bfa170)
   1  r2
   0  r1
-remote transplant
+remote transplant, and also test that transplant doesn't break with
+format-breaking diffopts
 
   $ hg clone -r 1 ../t ../remote
   adding changesets
@@ -240,7 +241,7 @@
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd ../remote
-  $ hg transplant --log -s ../t 2 4
+  $ hg --config diff.noprefix=True transplant --log -s ../t 2 4
   searching for changes
   applying 37a1297eb21b
   37a1297eb21b transplanted to c19cf0ccb069
@@ -767,6 +768,22 @@
   searching for changes
   applying 7a7d57e15850
   skipping emptied changeset 7a7d57e15850
+
+Test empty result in --continue
+
+  $ hg transplant -s ../binarysource 1
+  searching for changes
+  applying 645035761929
+  file b already exists
+  1 out of 1 hunks FAILED -- saving rejects to file b.rej
+  patch failed to apply
+  abort: fix up the merge and run hg transplant --continue
+  [255]
+  $ hg status
+  ? b.rej
+  $ hg transplant --continue
+  645035761929 skipped due to empty diff
+
   $ cd ..
 
 Explicitly kill daemons to let the test exit on Windows
--- a/tests/test-treediscovery.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-treediscovery.t	Sat Jan 17 18:28:30 2015 -0800
@@ -509,6 +509,7 @@
   "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961
   "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785
   "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961
+  "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
   "GET /?cmd=capabilities HTTP/1.1" 200 -
   "GET /?cmd=heads HTTP/1.1" 200 -
   "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961
--- a/tests/test-up-local-change.t	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-up-local-change.t	Sat Jan 17 18:28:30 2015 -0800
@@ -83,10 +83,6 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     1
   
-  $ hg --debug merge
-  abort: nothing to merge
-  (use 'hg update' instead)
-  [255]
   $ hg parents
   changeset:   0:c19d34741b0a
   user:        test
@@ -170,52 +166,6 @@
   abort: uncommitted changes
   (commit and merge, or update --clean to discard changes)
   [255]
-  $ hg --debug merge
-  abort: uncommitted changes
-  (use 'hg status' to list changes)
-  [255]
-  $ hg --debug merge -f
-    searching for copies back to rev 1
-    unmatched files new in both:
-     b
-  resolving manifests
-   branchmerge: True, force: True, partial: False
-   ancestor: c19d34741b0a, local: 1e71731e6fbb+, remote: 83c51d0caff4
-   preserving a for resolve of a
-   preserving b for resolve of b
-   a: versions differ -> m
-  updating: a 1/2 files (50.00%)
-  picked tool 'true' for a (binary False symlink False)
-  merging a
-  my a@1e71731e6fbb+ other a@83c51d0caff4 ancestor a@c19d34741b0a
-   b: versions differ -> m
-  updating: b 2/2 files (100.00%)
-  picked tool 'true' for b (binary False symlink False)
-  merging b
-  my b@1e71731e6fbb+ other b@83c51d0caff4 ancestor b@000000000000
-  0 files updated, 2 files merged, 0 files removed, 0 files unresolved
-  (branch merge, don't forget to commit)
-  $ hg parents
-  changeset:   1:1e71731e6fbb
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     2
-  
-  changeset:   2:83c51d0caff4
-  tag:         tip
-  parent:      0:c19d34741b0a
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     3
-  
-  $ hg diff --nodates
-  diff -r 1e71731e6fbb a
-  --- a/a
-  +++ b/a
-  @@ -1,1 +1,1 @@
-  -a2
-  +abc
-
 
 test conflicting untracked files
 
--- a/tests/test-walkrepo.py	Sat Jan 10 21:31:59 2015 +0900
+++ b/tests/test-walkrepo.py	Sat Jan 17 18:28:30 2015 -0800
@@ -41,7 +41,7 @@
     if len(sub2set & reposet) != 1:
         print "sub2set = %r" % (sub2set,)
         print "reposet = %r" % (reposet,)
-        print "sub1set and reposet should have exactly one path in common."
+        print "sub2set and reposet should have exactly one path in common."
     sub3 = pjoin('.', 'circle', 'top1')
     if sym and sub3 not in reposet:
         print "reposet = %r" % (reposet,)