merge default into stable for 2.2 code freeze stable
authorMatt Mackall <mpm@selenic.com>
Tue, 17 Apr 2012 17:56:36 -0500
branchstable
changeset 16459 baa06fb69ece
parent 16450 c9c8c9053119 (current diff)
parent 16458 55982f62651f (diff)
child 16460 d9e2f09d5488
merge default into stable for 2.2 code freeze
--- a/Makefile	Mon Apr 16 11:48:15 2012 +0200
+++ b/Makefile	Tue Apr 17 17:56:36 2012 -0500
@@ -91,8 +91,10 @@
 
 i18n/hg.pot: $(PYFILES) $(DOCFILES)
 	$(PYTHON) i18n/hggettext mercurial/commands.py \
-	  hgext/*.py hgext/*/__init__.py mercurial/fileset.py mercurial/revset.py \
+	  hgext/*.py hgext/*/__init__.py \
+	  mercurial/fileset.py mercurial/revset.py \
 	  mercurial/templatefilters.py mercurial/templatekw.py \
+	  mercurial/filemerge.py \
 	  $(DOCFILES) > i18n/hg.pot
         # All strings marked for translation in Mercurial contain
         # ASCII characters only. But some files contain string
--- a/README	Mon Apr 16 11:48:15 2012 +0200
+++ b/README	Tue Apr 17 17:56:36 2012 -0500
@@ -11,5 +11,10 @@
  $ hg debuginstall # sanity-check setup
  $ hg              # see help
 
+Running without installing:
+
+ $ make local      # build for inplace usage
+ $ ./hg --version  # should show the latest version
+
 See http://mercurial.selenic.com/ for detailed installation
 instructions, platform-specific notes, and Mercurial user information.
--- a/contrib/check-code.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/contrib/check-code.py	Tue Apr 17 17:56:36 2012 -0500
@@ -47,6 +47,7 @@
     (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
     (r'^function', "don't use 'function', use old style"),
     (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
+    (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
     (r'echo.*\\n', "don't use 'echo \\n', use printf"),
     (r'echo -n', "don't use 'echo -n', use printf"),
     (r'^diff.*-\w*N', "don't use 'diff -N'"),
@@ -124,6 +125,8 @@
     (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
     (r'^\s*\t', "don't use tabs"),
     (r'\S;\s*\n', "semicolon"),
+    (r'[^_]_\("[^"]+"\s*%', "don't use % inside _()"),
+    (r"[^_]_\('[^']+'\s*%", "don't use % inside _()"),
     (r'\w,\w', "missing whitespace after ,"),
     (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
     (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
@@ -169,7 +172,7 @@
      "missing whitespace around operator"),
     (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
      "missing whitespace around operator"),
-    (r'[^+=*/!<>&| -](\s=|=\s)[^= ]',
+    (r'[^^+=*/!<>&| -](\s=|=\s)[^= ]',
      "wrong whitespace around ="),
     (r'raise Exception', "don't raise generic exceptions"),
     (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
@@ -224,7 +227,7 @@
     (r' ;', "no space before ;"),
     (r'\w+\* \w+', "use int *foo, not int* foo"),
     (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
-    (r'\S+ (\+\+|--)', "use foo++, not foo ++"),
+    (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
     (r'\w,\w', "missing whitespace after ,"),
     (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
     (r'^#\s+\w', "use #foo, not # foo"),
--- a/contrib/perf.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/contrib/perf.py	Tue Apr 17 17:56:36 2012 -0500
@@ -1,7 +1,7 @@
 # perf.py - performance test routines
 '''helper extension to measure performance'''
 
-from mercurial import cmdutil, scmutil, match, commands
+from mercurial import cmdutil, scmutil, util, match, commands
 import time, os, sys
 
 def timer(func, title=None):
@@ -46,7 +46,7 @@
     timer(lambda: sum(map(len, repo.status())))
 
 def perfheads(ui, repo):
-    timer(lambda: len(repo.changelog.heads()))
+    timer(lambda: len(repo.changelog.headrevs()))
 
 def perftags(ui, repo):
     import mercurial.changelog, mercurial.manifest
@@ -79,13 +79,20 @@
         repo.manifest._cache = None
     timer(d)
 
+def perfchangeset(ui, repo, rev):
+    n = repo[rev].node()
+    def d():
+        c = repo.changelog.read(n)
+        #repo.changelog._cache = None
+    timer(d)
+
 def perfindex(ui, repo):
     import mercurial.revlog
     mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
     n = repo["tip"].node()
     def d():
-        repo.invalidate()
-        repo[n]
+        cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
+        cl.rev(n)
     timer(d)
 
 def perfstartup(ui, repo):
@@ -104,6 +111,36 @@
 def perflookup(ui, repo, rev):
     timer(lambda: len(repo.lookup(rev)))
 
+def perfnodelookup(ui, repo, rev):
+    import mercurial.revlog
+    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+    n = repo[rev].node()
+    def d():
+        cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
+        cl.rev(n)
+    timer(d)
+
+def perfnodelookup(ui, repo, rev):
+    import mercurial.revlog
+    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+    n = repo[rev].node()
+    cl = mercurial.revlog.revlog(repo.sopener, "00changelog.i")
+    # behave somewhat consistently across internal API changes
+    if util.safehasattr(cl, 'clearcaches'):
+        clearcaches = cl.clearcaches
+    elif util.safehasattr(cl, '_nodecache'):
+        from mercurial.node import nullid, nullrev
+        def clearcaches():
+            cl._nodecache = {nullid: nullrev}
+            cl._nodepos = None
+    else:
+        def clearcaches():
+            pass
+    def d():
+        cl.rev(n)
+        clearcaches()
+    timer(d)
+
 def perflog(ui, repo, **opts):
     ui.pushbuffer()
     timer(lambda: commands.log(ui, repo, rev=[], date='', user='',
@@ -117,6 +154,25 @@
                                ' {author|person}: {desc|firstline}\n'))
     ui.popbuffer()
 
+def perfcca(ui, repo):
+    timer(lambda: scmutil.casecollisionauditor(ui, False, repo[None]))
+
+def perffncacheload(ui, repo):
+    from mercurial import scmutil, store
+    s = store.store(set(['store','fncache']), repo.path, scmutil.opener)
+    def d():
+        s.fncache._load()
+    timer(d)
+
+def perffncachewrite(ui, repo):
+    from mercurial import scmutil, store
+    s = store.store(set(['store','fncache']), repo.path, scmutil.opener)
+    s.fncache._load()
+    def d():
+        s.fncache._dirty = True
+        s.fncache.write()
+    timer(d)
+
 def perfdiffwd(ui, repo):
     """Profile diff of working directory changes"""
     options = {
@@ -145,12 +201,17 @@
     timer(d)
 
 cmdtable = {
+    'perfcca': (perfcca, []),
+    'perffncacheload': (perffncacheload, []),
+    'perffncachewrite': (perffncachewrite, []),
     'perflookup': (perflookup, []),
+    'perfnodelookup': (perfnodelookup, []),
     'perfparents': (perfparents, []),
     'perfstartup': (perfstartup, []),
     'perfstatus': (perfstatus, []),
     'perfwalk': (perfwalk, []),
     'perfmanifest': (perfmanifest, []),
+    'perfchangeset': (perfchangeset, []),
     'perfindex': (perfindex, []),
     'perfheads': (perfheads, []),
     'perftags': (perftags, []),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/9diff	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,42 @@
+#!/bin/rc
+# 9diff - Mercurial extdiff wrapper for diff(1)
+
+rfork e
+
+fn getfiles{
+	cd $1 && \
+	for(f in `{du -as | awk '{print $2}'})
+		test -f $f && echo `{cleanname $f}
+}
+
+fn usage{
+	echo >[1=2] usage: 9diff [diff options] parent child root
+	exit usage
+}
+
+opts=()
+while(~ $1 -*){
+	opts=($opts $1)
+	shift
+}
+if(! ~ $#* 3)
+	usage
+
+# extdiff will set the parent and child to a single file if there is
+# only one change. If there are multiple changes, directories will be
+# set. diff(1) does not cope particularly with directories; instead we
+# do the recursion ourselves and diff each file individually.
+if(test -f $1)
+	diff $opts $1 $2
+if not{
+	# extdiff will create a snapshot of the working copy to prevent
+	# conflicts during the diff. We circumvent this behavior by
+	# diffing against the repository root to produce plumbable
+	# output. This is antisocial.
+	for(f in `{sort -u <{getfiles $1} <{getfiles $2}}){
+		file1=$1/$f; test -f $file1 || file1=/dev/null
+		file2=$3/$f; test -f $file2 || file2=/dev/null
+		diff $opts $file1 $file2
+	}
+}
+exit ''
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/README	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,39 @@
+Mercurial for Plan 9 from Bell Labs
+===================================
+
+This directory contains support for Mercurial on Plan 9 from Bell Labs
+platforms. It is assumed that the version of Python running on these
+systems supports the ANSI/POSIX Environment (APE). At the time of this
+writing, the bichued/python port is the most commonly installed version
+of Python on these platforms. If a native port of Python is ever made,
+some minor modification will need to be made to support some of the more
+esoteric requirements of the platform rather than those currently made
+(cf. posix.py).
+
+By default, installations will have the factotum extension enabled; this
+extension permits factotum(4) to act as an authentication agent for
+HTTP repositories. Additionally, an extdiff command named 9diff is
+enabled which generates diff(1) compatible output suitable for use with
+the plumber(4).
+
+Commit messages are plumbed using E if no editor is defined; users must
+update the plumbed file to continue, otherwise the hg process must be
+interrupted.
+
+Some work remains with regard to documentation. Section 5 manual page
+references for hgignore and hgrc need to be re-numbered to section 6 (file
+formats) and a new man page writer should be written to support the
+Plan 9 man macro set. Until these issues can be resolved, manual pages
+are elided from the installation.
+
+Basic install:
+
+  % mk install      # do a system-wide install
+  % hg debuginstall # sanity-check setup
+  % hg              # see help
+
+A proto(2) file is included in this directory as an example of how a
+binary distribution could be packaged, ostensibly with contrib(1).
+
+See http://mercurial.selenic.com/ for detailed installation
+instructions, platform-specific notes, and Mercurial user information.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/hgrc.d/9diff.rc	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,7 @@
+# The 9diff extdiff command generates diff(1) compatible output
+# suitable for use with the plumber(4).
+[extensions]
+extdiff =
+
+[extdiff]
+9diff = 9diff -cm $parent $child $root
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/hgrc.d/factotum.rc	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,4 @@
+# The factotum extension permits factotum(4) to act as an
+# authentication agent for HTTP repositories.
+[extensions]
+factotum =
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/mkfile	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,37 @@
+APE=/sys/src/ape
+<$APE/config
+
+PYTHON=python
+PYTHONBIN=/rc/bin
+SH=ape/psh
+
+PURE=--pure
+ROOT=../..
+
+# This is slightly underhanded; Plan 9 does not support GNU gettext nor
+# does it support dynamically loaded extension modules. We work around
+# this by calling build_py and build_scripts directly; this avoids
+# additional platform hacks in setup.py.
+build:VQ:
+	@{
+		cd $ROOT
+		$SH -c '$PYTHON setup.py $PURE build_py build_scripts'
+	}
+
+clean:VQ:
+	@{
+		cd $ROOT
+		$SH -c '$PYTHON setup.py $PURE clean --all'
+	}
+
+install:VQ:	build
+	@{
+		cd $ROOT
+		$SH -c '$PYTHON setup.py $PURE install \
+			--install-scripts $PYTHONBIN \
+			--skip-build' \
+			--force
+	}
+	mkdir -p /lib/mercurial/hgrc.d
+	dircp hgrc.d /lib/mercurial/hgrc.d
+	cp 9diff /rc/bin
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/plan9/proto	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,23 @@
+lib	- sys sys
+	mercurial	- sys sys
+		hgrc.d	- sys sys
+			9diff.rc	- sys sys
+			factotum.rc	- sys sys
+rc	- sys sys
+	bin	- sys sys
+		9diff	- sys sys
+		hg	- sys sys
+sys	- sys sys
+	lib	- sys sys
+		python	- sys sys
+			lib	- sys sys
+				python2.5	- sys sys
+					site-packages	- sys sys
+						hgext	- sys sys
+							+	- sys sys
+						mercurial	- sys sys
+							+	- sys sys
+						mercurial-VERSION-py2.5.egg-info	- sys sys
+	src	- sys sys
+		cmd	- sys sys
+			mercurial	- sys sys
--- a/contrib/shrink-revlog.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/contrib/shrink-revlog.py	Tue Apr 17 17:56:36 2012 -0500
@@ -16,7 +16,7 @@
 # e.g. by comparing "before" and "after" states of random changesets
 # (maybe: export before, shrink, export after, diff).
 
-import os, tempfile, errno
+import os, errno
 from mercurial import revlog, transaction, node, util, scmutil
 from mercurial import changegroup
 from mercurial.i18n import _
@@ -191,7 +191,6 @@
                            'will corrupt your repository'))
 
     ui.write(_('shrinking %s\n') % indexfn)
-    prefix = os.path.basename(indexfn)[:-1]
     tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
 
     r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
@@ -270,19 +269,23 @@
         lock.release()
 
     if not opts.get('dry_run'):
-        ui.write(_('note: old revlog saved in:\n'
-                   '  %s\n'
-                   '  %s\n'
-                   '(You can delete those files when you are satisfied that your\n'
-                   'repository is still sane.  '
-                   'Running \'hg verify\' is strongly recommended.)\n')
-                 % (oldindexfn, olddatafn))
+        ui.write(
+            _('note: old revlog saved in:\n'
+              '  %s\n'
+              '  %s\n'
+              '(You can delete those files when you are satisfied that your\n'
+              'repository is still sane.  '
+              'Running \'hg verify\' is strongly recommended.)\n')
+            % (oldindexfn, olddatafn))
 
 cmdtable = {
     'shrink': (shrink,
-               [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
-                ('n', 'dry-run', None, _('do not shrink, simulate only')),
-                ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
+               [('', 'revlog', '',
+                 _('the revlog to shrink (.i)')),
+                ('n', 'dry-run', None,
+                 _('do not shrink, simulate only')),
+                ('', 'sort', 'reversepostorder',
+                 _('name of sort algorithm to use')),
                 ],
                _('hg shrink [--revlog PATH]'))
 }
--- a/contrib/zsh_completion	Mon Apr 16 11:48:15 2012 +0200
+++ b/contrib/zsh_completion	Tue Apr 17 17:56:36 2012 -0500
@@ -882,7 +882,7 @@
 
 _hg_cmd_qfinish() {
   _arguments -s -w : $_hg_global_opts \
-  '(--all -a)'{-a,--all}'[finish all patches]' \
+  '(--applied -a)'{-a,--applied}'[finish all applied patches]' \
   '*:patch:_hg_qapplied'
 }
 
--- a/hgext/bugzilla.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/bugzilla.py	Tue Apr 17 17:56:36 2012 -0500
@@ -1,7 +1,7 @@
 # bugzilla.py - bugzilla integration for mercurial
 #
 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
-# Copyright 2011 Jim Hague <jim.hague@acm.org>
+# Copyright 2011-2 Jim Hague <jim.hague@acm.org>
 #
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
@@ -12,7 +12,8 @@
 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
 the Mercurial template mechanism.
 
-The hook does not change bug status.
+The bug references can optionally include an update for Bugzilla of the
+hours spent working on the bug. Bugs can also be marked fixed.
 
 Three basic modes of access to Bugzilla are provided:
 
@@ -32,13 +33,13 @@
 MySQL user and password to have full access rights to the Bugzilla
 database. For these reasons this access mode is now considered
 deprecated, and will not be updated for new Bugzilla versions going
-forward.
+forward. Only adding comments is supported in this access mode.
 
 Access via XMLRPC needs a Bugzilla username and password to be specified
 in the configuration. Comments are added under that username. Since the
 configuration must be readable by all Mercurial users, it is recommended
 that the rights of that user are restricted in Bugzilla to the minimum
-necessary to add comments.
+necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
 
 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
 email to the Bugzilla email interface to submit comments to bugs.
@@ -46,7 +47,8 @@
 user, so the comment appears to come from the Mercurial user. In the event
 that the Mercurial user email is not recognised by Bugzilla as a Bugzilla
 user, the email associated with the Bugzilla username used to log into
-Bugzilla is used instead as the source of the comment.
+Bugzilla is used instead as the source of the comment. Marking bugs fixed
+works on all supported Bugzilla versions.
 
 Configuration items common to all access modes:
 
@@ -62,11 +64,34 @@
                      including 2.18.
 
 bugzilla.regexp
-  Regular expression to match bug IDs in changeset commit message.
-  Must contain one "()" group. The default expression matches ``Bug
-  1234``, ``Bug no. 1234``, ``Bug number 1234``, ``Bugs 1234,5678``,
-  ``Bug 1234 and 5678`` and variations thereof. Matching is case
-  insensitive.
+  Regular expression to match bug IDs for update in changeset commit message.
+  It must contain one "()" named group ``<ids>`` containing the bug
+  IDs separated by non-digit characters. It may also contain
+  a named group ``<hours>`` with a floating-point number giving the
+  hours worked on the bug. If no named groups are present, the first
+  "()" group is assumed to contain the bug IDs, and work time is not
+  updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
+  ``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
+  variations thereof, followed by an hours number prefixed by ``h`` or
+  ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
+
+bugzilla.fixregexp
+  Regular expression to match bug IDs for marking fixed in changeset
+  commit message. This must contain a "()" named group ``<ids>` containing
+  the bug IDs separated by non-digit characters. It may also contain
+  a named group ``<hours>`` with a floating-point number giving the
+  hours worked on the bug. If no named groups are present, the first
+  "()" group is assumed to contain the bug IDs, and work time is not
+  updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
+  ``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
+  variations thereof, followed by an hours number prefixed by ``h`` or
+  ``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
+
+bugzilla.fixstatus
+  The status to set a bug to when marking fixed. Default ``RESOLVED``.
+
+bugzilla.fixresolution
+  The resolution to set a bug to when marking fixed. Default ``FIXED``.
 
 bugzilla.style
   The style file to use when formatting comments.
@@ -274,24 +299,35 @@
         return user
 
     # Methods to be implemented by access classes.
-    def filter_real_bug_ids(self, ids):
-        '''remove bug IDs that do not exist in Bugzilla from set.'''
+    #
+    # 'bugs' is a dict keyed on bug id, where values are a dict holding
+    # updates to bug state. Recognised dict keys are:
+    #
+    # 'hours': Value, float containing work hours to be updated.
+    # 'fix':   If key present, bug is to be marked fixed. Value ignored.
+
+    def filter_real_bug_ids(self, bugs):
+        '''remove bug IDs that do not exist in Bugzilla from bugs.'''
         pass
 
-    def filter_cset_known_bug_ids(self, node, ids):
-        '''remove bug IDs where node occurs in comment text from set.'''
+    def filter_cset_known_bug_ids(self, node, bugs):
+        '''remove bug IDs where node occurs in comment text from bugs.'''
         pass
 
-    def add_comment(self, bugid, text, committer):
-        '''add comment to bug.
+    def updatebug(self, bugid, newstate, text, committer):
+        '''update the specified bug. Add comment text and set new states.
 
         If possible add the comment as being from the committer of
         the changeset. Otherwise use the default Bugzilla user.
         '''
         pass
 
-    def notify(self, ids, committer):
-        '''Force sending of Bugzilla notification emails.'''
+    def notify(self, bugs, committer):
+        '''Force sending of Bugzilla notification emails.
+
+        Only required if the access method does not trigger notification
+        emails automatically.
+        '''
         pass
 
 # Bugzilla via direct access to MySQL database.
@@ -353,30 +389,31 @@
             raise util.Abort(_('unknown database schema'))
         return ids[0][0]
 
-    def filter_real_bug_ids(self, ids):
-        '''filter not-existing bug ids from set.'''
+    def filter_real_bug_ids(self, bugs):
+        '''filter not-existing bugs from set.'''
         self.run('select bug_id from bugs where bug_id in %s' %
-                 bzmysql.sql_buglist(ids))
-        return set([c[0] for c in self.cursor.fetchall()])
+                 bzmysql.sql_buglist(bugs.keys()))
+        existing = [id for (id,) in self.cursor.fetchall()]
+        for id in bugs.keys():
+            if id not in existing:
+                self.ui.status(_('bug %d does not exist\n') % id)
+                del bugs[id]
 
-    def filter_cset_known_bug_ids(self, node, ids):
+    def filter_cset_known_bug_ids(self, node, bugs):
         '''filter bug ids that already refer to this changeset from set.'''
-
         self.run('''select bug_id from longdescs where
                     bug_id in %s and thetext like "%%%s%%"''' %
-                 (bzmysql.sql_buglist(ids), short(node)))
+                 (bzmysql.sql_buglist(bugs.keys()), short(node)))
         for (id,) in self.cursor.fetchall():
             self.ui.status(_('bug %d already knows about changeset %s\n') %
                            (id, short(node)))
-            ids.discard(id)
-        return ids
+            del bugs[id]
 
-    def notify(self, ids, committer):
+    def notify(self, bugs, committer):
         '''tell bugzilla to send mail.'''
-
         self.ui.status(_('telling bugzilla to send mail:\n'))
         (user, userid) = self.get_bugzilla_user(committer)
-        for id in ids:
+        for id in bugs.keys():
             self.ui.status(_('  bug %s\n') % id)
             cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
             bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
@@ -435,9 +472,14 @@
                                  (user, defaultuser))
         return (user, userid)
 
-    def add_comment(self, bugid, text, committer):
-        '''add comment to bug. try adding comment as committer of
-        changeset, otherwise as default bugzilla user.'''
+    def updatebug(self, bugid, newstate, text, committer):
+        '''update bug state with comment text.
+
+        Try adding comment as committer of changeset, otherwise as
+        default bugzilla user.'''
+        if len(newstate) > 0:
+            self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
+
         (user, userid) = self.get_bugzilla_user(committer)
         now = time.strftime('%Y-%m-%d %H:%M:%S')
         self.run('''insert into longdescs
@@ -565,7 +607,14 @@
         user = self.ui.config('bugzilla', 'user', 'bugs')
         passwd = self.ui.config('bugzilla', 'password')
 
+        self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
+        self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
+                                            'FIXED')
+
         self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
+        ver = self.bzproxy.Bugzilla.version()['version'].split('.')
+        self.bzvermajor = int(ver[0])
+        self.bzverminor = int(ver[1])
         self.bzproxy.User.login(dict(login=user, password=passwd))
 
     def transport(self, uri):
@@ -576,36 +625,64 @@
 
     def get_bug_comments(self, id):
         """Return a string with all comment text for a bug."""
-        c = self.bzproxy.Bug.comments(dict(ids=[id]))
+        c = self.bzproxy.Bug.comments(dict(ids=[id], include_fields=['text']))
         return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
 
-    def filter_real_bug_ids(self, ids):
-        res = set()
-        bugs = self.bzproxy.Bug.get(dict(ids=sorted(ids), permissive=True))
-        for bug in bugs['bugs']:
-            res.add(bug['id'])
-        return res
+    def filter_real_bug_ids(self, bugs):
+        probe = self.bzproxy.Bug.get(dict(ids=sorted(bugs.keys()),
+                                          include_fields=[],
+                                          permissive=True))
+        for badbug in probe['faults']:
+            id = badbug['id']
+            self.ui.status(_('bug %d does not exist\n') % id)
+            del bugs[id]
 
-    def filter_cset_known_bug_ids(self, node, ids):
-        for id in sorted(ids):
+    def filter_cset_known_bug_ids(self, node, bugs):
+        for id in sorted(bugs.keys()):
             if self.get_bug_comments(id).find(short(node)) != -1:
                 self.ui.status(_('bug %d already knows about changeset %s\n') %
                                (id, short(node)))
-                ids.discard(id)
-        return ids
+                del bugs[id]
+
+    def updatebug(self, bugid, newstate, text, committer):
+        args = {}
+        if 'hours' in newstate:
+            args['work_time'] = newstate['hours']
 
-    def add_comment(self, bugid, text, committer):
-        self.bzproxy.Bug.add_comment(dict(id=bugid, comment=text))
+        if self.bzvermajor >= 4:
+            args['ids'] = [bugid]
+            args['comment'] = {'body' : text}
+            args['status'] = self.fixstatus
+            args['resolution'] = self.fixresolution
+            self.bzproxy.Bug.update(args)
+        else:
+            if 'fix' in newstate:
+                self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
+                               "to mark bugs fixed\n"))
+            args['id'] = bugid
+            args['comment'] = text
+            self.bzproxy.Bug.add_comment(args)
 
 class bzxmlrpcemail(bzxmlrpc):
     """Read data from Bugzilla via XMLRPC, send updates via email.
 
     Advantages of sending updates via email:
       1. Comments can be added as any user, not just logged in user.
-      2. Bug statuses and other fields not accessible via XMLRPC can
-        be updated. This is not currently used.
+      2. Bug statuses or other fields not accessible via XMLRPC can
+         potentially be updated.
+
+    There is no XMLRPC function to change bug status before Bugzilla
+    4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
+    But bugs can be marked fixed via email from 3.4 onwards.
     """
 
+    # The email interface changes subtly between 3.4 and 3.6. In 3.4,
+    # in-email fields are specified as '@<fieldname> = <value>'. In
+    # 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
+    # in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
+    # compatibility, but rather than rely on this use the new format for
+    # 4.0 onwards.
+
     def __init__(self, ui):
         bzxmlrpc.__init__(self, ui)
 
@@ -614,6 +691,14 @@
             raise util.Abort(_("configuration 'bzemail' missing"))
         mail.validateconfig(self.ui)
 
+    def makecommandline(self, fieldname, value):
+        if self.bzvermajor >= 4:
+            return "@%s %s" % (fieldname, str(value))
+        else:
+            if fieldname == "id":
+                fieldname = "bug_id"
+            return "@%s = %s" % (fieldname, str(value))
+
     def send_bug_modify_email(self, bugid, commands, comment, committer):
         '''send modification message to Bugzilla bug via email.
 
@@ -634,8 +719,9 @@
                 raise util.Abort(_("default bugzilla user %s email not found") %
                                  user)
         user = matches['users'][0]['email']
+        commands.append(self.makecommandline("id", bugid))
 
-        text = "\n".join(commands) + "\n@bug_id = %d\n\n" % bugid + comment
+        text = "\n".join(commands) + "\n\n" + comment
 
         _charsets = mail._charsets(self.ui)
         user = mail.addressencode(self.ui, user, _charsets)
@@ -647,8 +733,14 @@
         sendmail = mail.connect(self.ui)
         sendmail(user, bzemail, msg.as_string())
 
-    def add_comment(self, bugid, text, committer):
-        self.send_bug_modify_email(bugid, [], text, committer)
+    def updatebug(self, bugid, newstate, text, committer):
+        cmds = []
+        if 'hours' in newstate:
+            cmds.append(self.makecommandline("work_time", newstate['hours']))
+        if 'fix' in newstate:
+            cmds.append(self.makecommandline("bug_status", self.fixstatus))
+            cmds.append(self.makecommandline("resolution", self.fixresolution))
+        self.send_bug_modify_email(bugid, cmds, text, committer)
 
 class bugzilla(object):
     # supported versions of bugzilla. different versions have
@@ -662,7 +754,13 @@
         }
 
     _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
-                       r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
+                       r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+                       r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+
+    _default_fix_re = (r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
+                       r'(?:nos?\.?|num(?:ber)?s?)?\s*'
+                       r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+                       r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
 
     _bz = None
 
@@ -688,38 +786,76 @@
         return getattr(self.bz(), key)
 
     _bug_re = None
+    _fix_re = None
     _split_re = None
 
-    def find_bug_ids(self, ctx):
-        '''return set of integer bug IDs from commit comment.
+    def find_bugs(self, ctx):
+        '''return bugs dictionary created from commit comment.
 
-        Extract bug IDs from changeset comments. Filter out any that are
+        Extract bug info from changeset comments. Filter out any that are
         not known to Bugzilla, and any that already have a reference to
         the given changeset in their comments.
         '''
         if bugzilla._bug_re is None:
             bugzilla._bug_re = re.compile(
-                self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
-                re.IGNORECASE)
+                self.ui.config('bugzilla', 'regexp',
+                               bugzilla._default_bug_re), re.IGNORECASE)
+            bugzilla._fix_re = re.compile(
+                self.ui.config('bugzilla', 'fixregexp',
+                               bugzilla._default_fix_re), re.IGNORECASE)
             bugzilla._split_re = re.compile(r'\D+')
         start = 0
-        ids = set()
+        hours = 0.0
+        bugs = {}
+        bugmatch = bugzilla._bug_re.search(ctx.description(), start)
+        fixmatch = bugzilla._fix_re.search(ctx.description(), start)
         while True:
-            m = bugzilla._bug_re.search(ctx.description(), start)
-            if not m:
+            bugattribs = {}
+            if not bugmatch and not fixmatch:
                 break
+            if not bugmatch:
+                m = fixmatch
+            elif not fixmatch:
+                m = bugmatch
+            else:
+                if bugmatch.start() < fixmatch.start():
+                    m = bugmatch
+                else:
+                    m = fixmatch
             start = m.end()
-            for id in bugzilla._split_re.split(m.group(1)):
+            if m is bugmatch:
+                bugmatch = bugzilla._bug_re.search(ctx.description(), start)
+                if 'fix' in bugattribs:
+                    del bugattribs['fix']
+            else:
+                fixmatch = bugzilla._fix_re.search(ctx.description(), start)
+                bugattribs['fix'] = None
+
+            try:
+                ids = m.group('ids')
+            except IndexError:
+                ids = m.group(1)
+            try:
+                hours = float(m.group('hours'))
+                bugattribs['hours'] = hours
+            except IndexError:
+                pass
+            except TypeError:
+                pass
+            except ValueError:
+                self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
+
+            for id in bugzilla._split_re.split(ids):
                 if not id:
                     continue
-                ids.add(int(id))
-        if ids:
-            ids = self.filter_real_bug_ids(ids)
-        if ids:
-            ids = self.filter_cset_known_bug_ids(ctx.node(), ids)
-        return ids
+                bugs[int(id)] = bugattribs
+        if bugs:
+            self.filter_real_bug_ids(bugs)
+        if bugs:
+            self.filter_cset_known_bug_ids(ctx.node(), bugs)
+        return bugs
 
-    def update(self, bugid, ctx):
+    def update(self, bugid, newstate, ctx):
         '''update bugzilla bug with reference to changeset.'''
 
         def webroot(root):
@@ -752,7 +888,7 @@
                root=self.repo.root,
                webroot=webroot(self.repo.root))
         data = self.ui.popbuffer()
-        self.add_comment(bugid, data, util.email(ctx.user()))
+        self.updatebug(bugid, newstate, data, util.email(ctx.user()))
 
 def hook(ui, repo, hooktype, node=None, **kwargs):
     '''add comment to bugzilla for each changeset that refers to a
@@ -764,11 +900,11 @@
     try:
         bz = bugzilla(ui, repo)
         ctx = repo[node]
-        ids = bz.find_bug_ids(ctx)
-        if ids:
-            for id in ids:
-                bz.update(id, ctx)
-            bz.notify(ids, util.email(ctx.user()))
+        bugs = bz.find_bugs(ctx)
+        if bugs:
+            for bug in bugs:
+                bz.update(bug, bugs[bug], ctx)
+            bz.notify(bugs, util.email(ctx.user()))
     except Exception, e:
         raise util.Abort(_('Bugzilla error: %s') % e)
 
--- a/hgext/convert/bzr.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/convert/bzr.py	Tue Apr 17 17:56:36 2012 -0500
@@ -23,7 +23,7 @@
 
 try:
     # bazaar imports
-    from bzrlib import branch, revision, errors
+    from bzrlib import bzrdir, revision, errors
     from bzrlib.revisionspec import RevisionSpec
 except ImportError:
     pass
@@ -42,14 +42,17 @@
 
         try:
             # access bzrlib stuff
-            branch
+            bzrdir
         except NameError:
             raise NoRepo(_('Bazaar modules could not be loaded'))
 
         path = os.path.abspath(path)
         self._checkrepotype(path)
-        self.branch = branch.Branch.open(path)
-        self.sourcerepo = self.branch.repository
+        try:
+            self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
+        except errors.NoRepositoryPresent:
+            raise NoRepo(_('%s does not look like a Bazaar repository')
+                         % path)
         self._parentids = {}
 
     def _checkrepotype(self, path):
@@ -88,16 +91,28 @@
     def after(self):
         self.sourcerepo.unlock()
 
+    def _bzrbranches(self):
+        return self.sourcerepo.find_branches(using=True)
+
     def getheads(self):
         if not self.rev:
-            return [self.branch.last_revision()]
-        try:
-            r = RevisionSpec.from_string(self.rev)
-            info = r.in_history(self.branch)
-        except errors.BzrError:
-            raise util.Abort(_('%s is not a valid revision in current branch')
-                             % self.rev)
-        return [info.rev_id]
+            # Set using=True to avoid nested repositories (see issue3254)
+            heads = sorted([b.last_revision() for b in self._bzrbranches()])
+        else:
+            revid = None
+            for branch in self._bzrbranches():
+                try:
+                    r = RevisionSpec.from_string(self.rev)
+                    info = r.in_history(branch)
+                except errors.BzrError:
+                    pass
+                revid = info.rev_id
+            if revid is None:
+                raise util.Abort(_('%s is not a valid revision') % self.rev)
+            heads = [revid]
+        # Empty repositories return 'null:', which cannot be retrieved
+        heads = [h for h in heads if h != 'null:']
+        return heads
 
     def getfile(self, name, rev):
         revtree = self.sourcerepo.revision_tree(rev)
@@ -140,20 +155,24 @@
             parents = self._filterghosts(rev.parent_ids)
             self._parentids[version] = parents
 
+        branch = self.recode(rev.properties.get('branch-nick', u'default'))
+        if branch == 'trunk':
+            branch = 'default'
         return commit(parents=parents,
                 date='%d %d' % (rev.timestamp, -rev.timezone),
                 author=self.recode(rev.committer),
-                # bzr returns bytestrings or unicode, depending on the content
                 desc=self.recode(rev.message),
+                branch=branch,
                 rev=version)
 
     def gettags(self):
-        if not self.branch.supports_tags():
-            return {}
-        tagdict = self.branch.tags.get_tag_dict()
         bytetags = {}
-        for name, rev in tagdict.iteritems():
-            bytetags[self.recode(name)] = rev
+        for branch in self._bzrbranches():
+            if not branch.supports_tags():
+                return {}
+            tagdict = branch.tags.get_tag_dict()
+            for name, rev in tagdict.iteritems():
+                bytetags[self.recode(name)] = rev
         return bytetags
 
     def getchangedfiles(self, rev, i):
@@ -231,7 +250,11 @@
                 continue
 
             # we got unicode paths, need to convert them
-            path, topath = [self.recode(part) for part in paths]
+            path, topath = paths
+            if path is not None:
+                path = self.recode(path)
+            if topath is not None:
+                topath = self.recode(topath)
             seen.add(path or topath)
 
             if topath is None:
@@ -260,19 +283,3 @@
         parentmap = self.sourcerepo.get_parent_map(ids)
         parents = tuple([parent for parent in ids if parent in parentmap])
         return parents
-
-    def recode(self, s, encoding=None):
-        """This version of recode tries to encode unicode to bytecode,
-        and preferably using the UTF-8 codec.
-        Other types than Unicode are silently returned, this is by
-        intention, e.g. the None-type is not going to be encoded but instead
-        just passed through
-        """
-        if not encoding:
-            encoding = self.encoding or 'utf-8'
-
-        if isinstance(s, unicode):
-            return s.encode(encoding)
-        else:
-            # leave it alone
-            return s
--- a/hgext/convert/git.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/convert/git.py	Tue Apr 17 17:56:36 2012 -0500
@@ -145,20 +145,30 @@
 
     def gettags(self):
         tags = {}
+        alltags = {}
         fh = self.gitopen('git ls-remote --tags "%s"' % self.path)
         prefix = 'refs/tags/'
+
+        # Build complete list of tags, both annotated and bare ones
         for line in fh:
             line = line.strip()
-            if not line.endswith("^{}"):
-                continue
             node, tag = line.split(None, 1)
             if not tag.startswith(prefix):
                 continue
-            tag = tag[len(prefix):-3]
-            tags[tag] = node
+            alltags[tag[len(prefix):]] = node
         if fh.close():
             raise util.Abort(_('cannot read tags from %s') % self.path)
 
+        # Filter out tag objects for annotated tag refs
+        for tag in alltags:
+            if tag.endswith('^{}'):
+                tags[tag[:-3]] = alltags[tag]
+            else:
+                if tag + '^{}' in alltags:
+                    continue
+                else:
+                    tags[tag] = alltags[tag]
+
         return tags
 
     def getchangedfiles(self, version, i):
--- a/hgext/eol.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/eol.py	Tue Apr 17 17:56:36 2012 -0500
@@ -252,7 +252,6 @@
 hook = checkheadshook
 
 def preupdate(ui, repo, hooktype, parent1, parent2):
-    #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2)
     repo.loadeol([parent1])
     return False
 
@@ -270,7 +269,6 @@
 
 def reposetup(ui, repo):
     uisetup(repo.ui)
-    #print "reposetup for", repo.root
 
     if not repo.local():
         return
@@ -315,7 +313,7 @@
                             # again since the new .hgeol file might no
                             # longer match a file it matched before
                             self.dirstate.normallookup(f)
-                    # Touch the cache to update mtime.
+                    # Create or touch the cache to update mtime
                     self.opener("eol.cache", "w").close()
                     wlock.release()
                 except error.LockUnavailable:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hgext/factotum.py	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,120 @@
+# factotum.py - Plan 9 factotum integration for Mercurial
+#
+# Copyright (C) 2012 Steven Stallion <sstallion@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+'''http authentication with factotum
+
+This extension allows the factotum facility on Plan 9 from Bell Labs platforms
+to provide authentication information for HTTP access. Configuration entries
+specified in the auth section as well as authentication information provided
+in the repository URL are fully supported. If no prefix is specified, a value
+of ``*`` will be assumed.
+
+By default, keys are specified as::
+
+  proto=pass service=hg prefix=<prefix> user=<username> !password=<password>
+
+If the factotum extension is unable to read the required key, one will be
+requested interactively.
+
+A configuration section is available to customize runtime behavior. By
+default, these entries are::
+
+  [factotum]
+  mount = /mnt/factotum
+  path = /bin/auth/factotum
+  service = hg
+
+The mount entry defines the mount point for the factotum file service. The
+path entry defines the full path to the factotum binary. Lastly, the service
+entry controls the service name used when reading keys.
+
+'''
+
+from mercurial.i18n import _
+from mercurial.url import passwordmgr
+from mercurial import httpconnection, urllib2, util
+import os
+
+ERRMAX = 128
+
+def auth_getkey(self, params):
+    if not self.ui.interactive():
+        raise util.Abort(_('factotum not interactive'))
+    if 'user=' not in params:
+        params = '%s user?' % params
+    params = '%s !password?' % params
+    os.system("%s -g '%s'" % (_path, params))
+
+def auth_getuserpasswd(self, getkey, params):
+    params = 'proto=pass %s' % params
+    while True:
+        fd = os.open('%s/rpc' % _mount, os.O_RDWR)
+        try:
+            try:
+                os.write(fd, 'start %s' % params)
+                l = os.read(fd, ERRMAX).split()
+                if l[0] == 'ok':
+                    os.write(fd, 'read')
+                    l = os.read(fd, ERRMAX).split()
+                    if l[0] == 'ok':
+                        return l[1:]
+            except (OSError, IOError):
+                raise util.Abort(_('factotum not responding'))
+        finally:
+            os.close(fd)
+        getkey(self, params)
+
+def monkeypatch_method(cls):
+    def decorator(func):
+        setattr(cls, func.__name__, func)
+        return func
+    return decorator
+
+@monkeypatch_method(passwordmgr)
+def find_user_password(self, realm, authuri):
+    user, passwd = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
+        self, realm, authuri)
+    if user and passwd:
+        self._writedebug(user, passwd)
+        return (user, passwd)
+
+    prefix = ''
+    res = httpconnection.readauthforuri(self.ui, authuri, user)
+    if res:
+        _, auth = res
+        prefix = auth.get('prefix')
+        user, passwd = auth.get('username'), auth.get('password')
+    if not user or not passwd:
+        if not prefix:
+            prefix = '*'
+        params = 'service=%s prefix=%s' % (_service, prefix)
+        if user:
+            params = '%s user=%s' % (params, user)
+        user, passwd = auth_getuserpasswd(self, auth_getkey, params)
+
+    self.add_password(realm, authuri, user, passwd)
+    self._writedebug(user, passwd)
+    return (user, passwd)
+
+def uisetup(ui):
+    global _mount
+    _mount = ui.config('factotum', 'mount', '/mnt/factotum')
+    global _path
+    _path = ui.config('factotum', 'path', '/bin/auth/factotum')
+    global _service
+    _service = ui.config('factotum', 'service', 'hg')
--- a/hgext/graphlog.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/graphlog.py	Tue Apr 17 17:56:36 2012 -0500
@@ -13,11 +13,10 @@
 '''
 
 from mercurial.cmdutil import show_changeset
-from mercurial.commands import templateopts
 from mercurial.i18n import _
 from mercurial.node import nullrev
 from mercurial import cmdutil, commands, extensions, scmutil
-from mercurial import hg, util, graphmod
+from mercurial import hg, util, graphmod, templatekw, revset
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -237,69 +236,211 @@
         return (len(repo) - 1, 0)
 
 def check_unsupported_flags(pats, opts):
-    for op in ["follow_first", "copies", "newest_first"]:
+    for op in ["newest_first"]:
         if op in opts and opts[op]:
             raise util.Abort(_("-G/--graph option is incompatible with --%s")
                              % op.replace("_", "-"))
-    if pats and opts.get('follow'):
-        raise util.Abort(_("-G/--graph option is incompatible with --follow "
-                           "with file argument"))
+
+def _makefilematcher(repo, pats, followfirst):
+    # When displaying a revision with --patch --follow FILE, we have
+    # to know which file of the revision must be diffed. With
+    # --follow, we want the names of the ancestors of FILE in the
+    # revision, stored in "fcache". "fcache" is populated by
+    # reproducing the graph traversal already done by --follow revset
+    # and relating linkrevs to file names (which is not "correct" but
+    # good enough).
+    fcache = {}
+    fcacheready = [False]
+    pctx = repo['.']
+    wctx = repo[None]
 
-def revset(pats, opts):
-    """Return revset str built of revisions, log options and file patterns.
+    def populate():
+        for fn in pats:
+            for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
+                for c in i:
+                    fcache.setdefault(c.linkrev(), set()).add(c.path())
+
+    def filematcher(rev):
+        if not fcacheready[0]:
+            # Lazy initialization
+            fcacheready[0] = True
+            populate()
+        return scmutil.match(wctx, fcache.get(rev, []), default='path')
+
+    return filematcher
+
+def _makelogrevset(repo, pats, opts, revs):
+    """Return (expr, filematcher) where expr is a revset string built
+    from log options and file patterns or None. If --stat or --patch
+    are not passed filematcher is None. Otherwise it is a callable
+    taking a revision number and returning a match objects filtering
+    the files to be detailed when displaying the revision.
     """
     opt2revset = {
-        'follow': (0, 'follow()'),
-        'no_merges': (0, 'not merge()'),
-        'only_merges': (0, 'merge()'),
-        'removed': (0, 'removes("*")'),
-        'date': (1, 'date($)'),
-        'branch': (2, 'branch($)'),
-        'exclude': (2, 'not file($)'),
-        'include': (2, 'file($)'),
-        'keyword': (2, 'keyword($)'),
-        'only_branch': (2, 'branch($)'),
-        'prune': (2, 'not ($ or ancestors($))'),
-        'user': (2, 'user($)'),
+        'no_merges':        ('not merge()', None),
+        'only_merges':      ('merge()', None),
+        '_ancestors':       ('ancestors(%(val)s)', None),
+        '_fancestors':      ('_firstancestors(%(val)s)', None),
+        '_descendants':     ('descendants(%(val)s)', None),
+        '_fdescendants':    ('_firstdescendants(%(val)s)', None),
+        '_matchfiles':      ('_matchfiles(%(val)s)', None),
+        'date':             ('date(%(val)r)', None),
+        'branch':           ('branch(%(val)r)', ' or '),
+        '_patslog':         ('filelog(%(val)r)', ' or '),
+        '_patsfollow':      ('follow(%(val)r)', ' or '),
+        '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
+        'keyword':          ('keyword(%(val)r)', ' or '),
+        'prune':            ('not (%(val)r or ancestors(%(val)r))', ' and '),
+        'user':             ('user(%(val)r)', ' or '),
         }
-    optrevset = []
-    revset = []
+
+    opts = dict(opts)
+    # follow or not follow?
+    follow = opts.get('follow') or opts.get('follow_first')
+    followfirst = opts.get('follow_first') and 1 or 0
+    # --follow with FILE behaviour depends on revs...
+    startrev = revs[0]
+    followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
+
+    # branch and only_branch are really aliases and must be handled at
+    # the same time
+    opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
+    opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
+    # pats/include/exclude are passed to match.match() directly in
+    # _matchfile() revset but walkchangerevs() builds its matcher with
+    # scmutil.match(). The difference is input pats are globbed on
+    # platforms without shell expansion (windows).
+    pctx = repo[None]
+    match, pats = scmutil.matchandpats(pctx, pats, opts)
+    slowpath = match.anypats() or (match.files() and opts.get('removed'))
+    if not slowpath:
+        for f in match.files():
+            if follow and f not in pctx:
+                raise util.Abort(_('cannot follow file not in parent '
+                                   'revision: "%s"') % f)
+            filelog = repo.file(f)
+            if not len(filelog):
+                # A zero count may be a directory or deleted file, so
+                # try to find matching entries on the slow path.
+                if follow:
+                    raise util.Abort(
+                        _('cannot follow nonexistent file: "%s"') % f)
+                slowpath = True
+    if slowpath:
+        # See cmdutil.walkchangerevs() slow path.
+        #
+        if follow:
+            raise util.Abort(_('can only follow copies/renames for explicit '
+                               'filenames'))
+        # pats/include/exclude cannot be represented as separate
+        # revset expressions as their filtering logic applies at file
+        # level. For instance "-I a -X a" matches a revision touching
+        # "a" and "b" while "file(a) and not file(b)" does
+        # not. Besides, filesets are evaluated against the working
+        # directory.
+        matchargs = ['r:', 'd:relpath']
+        for p in pats:
+            matchargs.append('p:' + p)
+        for p in opts.get('include', []):
+            matchargs.append('i:' + p)
+        for p in opts.get('exclude', []):
+            matchargs.append('x:' + p)
+        matchargs = ','.join(('%r' % p) for p in matchargs)
+        opts['_matchfiles'] = matchargs
+    else:
+        if follow:
+            fpats = ('_patsfollow', '_patsfollowfirst')
+            fnopats = (('_ancestors', '_fancestors'),
+                       ('_descendants', '_fdescendants'))
+            if pats:
+                # follow() revset inteprets its file argument as a
+                # manifest entry, so use match.files(), not pats.
+                opts[fpats[followfirst]] = list(match.files())
+            else:
+                opts[fnopats[followdescendants][followfirst]] = str(startrev)
+        else:
+            opts['_patslog'] = list(pats)
+
+    filematcher = None
+    if opts.get('patch') or opts.get('stat'):
+        if follow:
+            filematcher = _makefilematcher(repo, pats, followfirst)
+        else:
+            filematcher = lambda rev: match
+
+    expr = []
     for op, val in opts.iteritems():
         if not val:
             continue
-        if op == 'rev':
-            # Already a revset
-            revset.extend(val)
         if op not in opt2revset:
             continue
-        arity, revop = opt2revset[op]
-        revop = revop.replace('$', '%(val)r')
-        if arity == 0:
-            optrevset.append(revop)
-        elif arity == 1:
-            optrevset.append(revop % {'val': val})
+        revop, andor = opt2revset[op]
+        if '%(val)' not in revop:
+            expr.append(revop)
         else:
-            for f in val:
-                optrevset.append(revop % {'val': f})
+            if not isinstance(val, list):
+                e = revop % {'val': val}
+            else:
+                e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
+            expr.append(e)
+
+    if expr:
+        expr = '(' + ' and '.join(expr) + ')'
+    else:
+        expr = None
+    return expr, filematcher
 
-    for path in pats:
-        optrevset.append('file(%r)' % path)
+def getlogrevs(repo, pats, opts):
+    """Return (revs, expr, filematcher) where revs is a list of
+    revision numbers, expr is a revset string built from log options
+    and file patterns or None, and used to filter 'revs'. If --stat or
+    --patch are not passed filematcher is None. Otherwise it is a
+    callable taking a revision number and returning a match objects
+    filtering the files to be detailed when displaying the revision.
+    """
+    if not len(repo):
+        return [], None, None
+    # Default --rev value depends on --follow but --follow behaviour
+    # depends on revisions resolved from --rev...
+    follow = opts.get('follow') or opts.get('follow_first')
+    if opts.get('rev'):
+        revs = scmutil.revrange(repo, opts['rev'])
+    else:
+        if follow and len(repo) > 0:
+            revs = scmutil.revrange(repo, ['.:0'])
+        else:
+            revs = range(len(repo) - 1, -1, -1)
+    if not revs:
+        return [], None, None
+    expr, filematcher = _makelogrevset(repo, pats, opts, revs)
+    if expr:
+        # Evaluate revisions in changelog order for performance
+        # reasons but preserve the original sequence order in the
+        # filtered result.
+        matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
+        revs = [r for r in revs if r in matched]
+    if not opts.get('hidden'):
+        # --hidden is still experimental and not worth a dedicated revset
+        # yet. Fortunately, filtering revision number is fast.
+        revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
+    return revs, expr, filematcher
 
-    if revset or optrevset:
-        if revset:
-            revset = ['(' + ' or '.join(revset) + ')']
-        if optrevset:
-            revset.append('(' + ' and '.join(optrevset) + ')')
-        revset = ' and '.join(revset)
-    else:
-        revset = 'all()'
-    return revset
-
-def generate(ui, dag, displayer, showparents, edgefn):
+def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
+             filematcher=None):
     seen, state = [], asciistate()
     for rev, type, ctx, parents in dag:
         char = ctx.node() in showparents and '@' or 'o'
-        displayer.show(ctx)
+        copies = None
+        if getrenamed and ctx.rev():
+            copies = []
+            for fn in ctx.files():
+                rename = getrenamed(fn, ctx.rev())
+                if rename:
+                    copies.append((fn, rename[0]))
+        revmatchfn = None
+        if filematcher is not None:
+            revmatchfn = filematcher(ctx.rev())
+        displayer.show(ctx, copies=copies, matchfn=revmatchfn)
         lines = displayer.hunk.pop(rev).split('\n')[:-1]
         displayer.flush(rev)
         edges = edgefn(type, char, lines, seen, rev, parents)
@@ -308,12 +449,28 @@
     displayer.close()
 
 @command('glog',
-    [('l', 'limit', '',
-     _('limit number of changes displayed'), _('NUM')),
-    ('p', 'patch', False, _('show patch')),
+    [('f', 'follow', None,
+     _('follow changeset history, or file history across copies and renames')),
+    ('', 'follow-first', None,
+     _('only follow the first parent of merge changesets (DEPRECATED)')),
+    ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
+    ('C', 'copies', None, _('show copied files')),
+    ('k', 'keyword', [],
+     _('do case-insensitive search for a given text'), _('TEXT')),
     ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
-    ] + templateopts,
-    _('hg glog [OPTION]... [FILE]'))
+    ('', 'removed', None, _('include revisions where files were removed')),
+    ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
+    ('u', 'user', [], _('revisions committed by user'), _('USER')),
+    ('', 'only-branch', [],
+     _('show only changesets within the given named branch (DEPRECATED)'),
+     _('BRANCH')),
+    ('b', 'branch', [],
+     _('show changesets within the given named branch'), _('BRANCH')),
+    ('P', 'prune', [],
+     _('do not display revision or any of its ancestors'), _('REV')),
+    ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
+    ] + commands.logopts + commands.walkopts,
+    _('[OPTION]... [FILE]'))
 def graphlog(ui, repo, *pats, **opts):
     """show revision history alongside an ASCII revision graph
 
@@ -324,17 +481,23 @@
     directory.
     """
 
-    check_unsupported_flags(pats, opts)
-
-    revs = sorted(scmutil.revrange(repo, [revset(pats, opts)]), reverse=1)
+    revs, expr, filematcher = getlogrevs(repo, pats, opts)
+    revs = sorted(revs, reverse=1)
     limit = cmdutil.loglimit(opts)
     if limit is not None:
         revs = revs[:limit]
     revdag = graphmod.dagwalker(repo, revs)
 
+    getrenamed = None
+    if opts.get('copies'):
+        endrev = None
+        if opts.get('rev'):
+            endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
+        getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
     displayer = show_changeset(ui, repo, opts, buffered=True)
     showparents = [ctx.node() for ctx in repo[None].parents()]
-    generate(ui, revdag, displayer, showparents, asciiedges)
+    generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
+             filematcher)
 
 def graphrevs(repo, nodes, opts):
     limit = cmdutil.loglimit(opts)
--- a/hgext/largefiles/basestore.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/basestore.py	Tue Apr 17 17:56:36 2012 -0500
@@ -114,14 +114,14 @@
             failed = util.any(self._verifyfile(
                 cctx, cset, contents, standin, verified) for standin in cctx)
 
-        num_revs = len(verified)
-        num_lfiles = len(set([fname for (fname, fnode) in verified]))
+        numrevs = len(verified)
+        numlfiles = len(set([fname for (fname, fnode) in verified]))
         if contents:
             write(_('verified contents of %d revisions of %d largefiles\n')
-                  % (num_revs, num_lfiles))
+                  % (numrevs, numlfiles))
         else:
             write(_('verified existence of %d revisions of %d largefiles\n')
-                  % (num_revs, num_lfiles))
+                  % (numrevs, numlfiles))
 
         return int(failed)
 
@@ -186,9 +186,9 @@
     except KeyError:
         raise util.Abort(_('unsupported URL scheme %r') % scheme)
 
-    for class_obj in storeproviders:
+    for classobj in storeproviders:
         try:
-            return class_obj(ui, repo, remote)
+            return classobj(ui, repo, remote)
         except lfutil.storeprotonotcapable:
             pass
 
--- a/hgext/largefiles/lfcommands.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/lfcommands.py	Tue Apr 17 17:56:36 2012 -0500
@@ -11,7 +11,7 @@
 import os
 import shutil
 
-from mercurial import util, match as match_, hg, node, context, error
+from mercurial import util, match as match_, hg, node, context, error, cmdutil
 from mercurial.i18n import _
 
 import lfutil
@@ -58,7 +58,7 @@
         # Lock destination to prevent modification while it is converted to.
         # Don't need to lock src because we are just reading from its history
         # which can't change.
-        dst_lock = rdst.lock()
+        dstlock = rdst.lock()
 
         # Get a list of all changesets in the source.  The easy way to do this
         # is to simply walk the changelog, using changelog.nodesbewteen().
@@ -113,7 +113,7 @@
         if not success:
             # we failed, remove the new directory
             shutil.rmtree(rdst.root)
-        dst_lock.release()
+        dstlock.release()
 
 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
  # Convert src parents to dst parents
@@ -451,7 +451,7 @@
              expecthash != lfutil.hashfile(abslfile))):
             if not lfutil.copyfromcache(repo, expecthash, lfile):
                 # use normallookup() to allocate entry in largefiles dirstate,
-                # because lack of it misleads lfiles_repo.status() into
+                # because lack of it misleads lfilesrepo.status() into
                 # recognition that such cache missing files are REMOVED.
                 lfdirstate.normallookup(lfile)
                 return None # don't try to set the mode
@@ -485,6 +485,23 @@
         lfdirstate.drop(lfile)
     return ret
 
+def catlfile(repo, lfile, rev, filename):
+    hash = lfutil.readstandin(repo, lfile, rev)
+    if not lfutil.inusercache(repo.ui, hash):
+        store = basestore._openstore(repo)
+        success, missing = store.get([(lfile, hash)])
+        if len(success) != 1:
+            raise util.Abort(
+                _('largefile %s is not in cache and could not be downloaded')
+                    % lfile)
+    path = lfutil.usercachepath(repo.ui, hash)
+    fpout = cmdutil.makefileobj(repo, filename)
+    fpin = open(path, "rb")
+    fpout.write(fpin.read())
+    fpout.close()
+    fpin.close()
+    return 0
+
 # -- hg commands declarations ------------------------------------------------
 
 cmdtable = {
--- a/hgext/largefiles/lfutil.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/lfutil.py	Tue Apr 17 17:56:36 2012 -0500
@@ -23,14 +23,14 @@
 
 # -- Portability wrappers ----------------------------------------------
 
-def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
+def dirstatewalk(dirstate, matcher, unknown=False, ignored=False):
     return dirstate.walk(matcher, [], unknown, ignored)
 
-def repo_add(repo, list):
+def repoadd(repo, list):
     add = repo[None].add
     return add(list)
 
-def repo_remove(repo, list, unlink=False):
+def reporemove(repo, list, unlink=False):
     def remove(list, unlink):
         wlock = repo.wlock()
         try:
@@ -46,7 +46,7 @@
             wlock.release()
     return remove(list, unlink=unlink)
 
-def repo_forget(repo, list):
+def repoforget(repo, list):
     forget = repo[None].forget
     return forget(list)
 
@@ -125,21 +125,21 @@
         return path
     return None
 
-class largefiles_dirstate(dirstate.dirstate):
+class largefilesdirstate(dirstate.dirstate):
     def __getitem__(self, key):
-        return super(largefiles_dirstate, self).__getitem__(unixpath(key))
+        return super(largefilesdirstate, self).__getitem__(unixpath(key))
     def normal(self, f):
-        return super(largefiles_dirstate, self).normal(unixpath(f))
+        return super(largefilesdirstate, self).normal(unixpath(f))
     def remove(self, f):
-        return super(largefiles_dirstate, self).remove(unixpath(f))
+        return super(largefilesdirstate, self).remove(unixpath(f))
     def add(self, f):
-        return super(largefiles_dirstate, self).add(unixpath(f))
+        return super(largefilesdirstate, self).add(unixpath(f))
     def drop(self, f):
-        return super(largefiles_dirstate, self).drop(unixpath(f))
+        return super(largefilesdirstate, self).drop(unixpath(f))
     def forget(self, f):
-        return super(largefiles_dirstate, self).forget(unixpath(f))
+        return super(largefilesdirstate, self).forget(unixpath(f))
     def normallookup(self, f):
-        return super(largefiles_dirstate, self).normallookup(unixpath(f))
+        return super(largefilesdirstate, self).normallookup(unixpath(f))
 
 def openlfdirstate(ui, repo):
     '''
@@ -148,7 +148,7 @@
     '''
     admin = repo.join(longname)
     opener = scmutil.opener(admin)
-    lfdirstate = largefiles_dirstate(opener, ui, repo.root,
+    lfdirstate = largefilesdirstate(opener, ui, repo.root,
                                      repo.dirstate._validate)
 
     # If the largefiles dirstate does not exist, populate and create
@@ -157,7 +157,7 @@
     if not os.path.exists(os.path.join(admin, 'dirstate')):
         util.makedirs(admin)
         matcher = getstandinmatcher(repo)
-        for standin in dirstate_walk(repo.dirstate, matcher):
+        for standin in dirstatewalk(repo.dirstate, matcher):
             lfile = splitstandin(standin)
             hash = readstandin(repo, lfile)
             lfdirstate.normallookup(lfile)
@@ -169,7 +169,7 @@
                     raise
     return lfdirstate
 
-def lfdirstate_status(lfdirstate, repo, rev):
+def lfdirstatestatus(lfdirstate, repo, rev):
     match = match_.always(repo.root, repo.getcwd())
     s = lfdirstate.status(match, [], False, False, False)
     unsure, modified, added, removed, missing, unknown, ignored, clean = s
@@ -286,9 +286,9 @@
     as the paths specified by the user.'''
     smatcher = getstandinmatcher(repo, rmatcher.files())
     isstandin = smatcher.matchfn
-    def composed_matchfn(f):
+    def composedmatchfn(f):
         return isstandin(f) and rmatcher.matchfn(splitstandin(f))
-    smatcher.matchfn = composed_matchfn
+    smatcher.matchfn = composedmatchfn
 
     return smatcher
 
@@ -296,8 +296,8 @@
     '''Return the repo-relative path to the standin for the specified big
     file.'''
     # Notes:
-    # 1) Most callers want an absolute path, but _create_standin() needs
-    #    it repo-relative so lfadd() can pass it to repo_add().  So leave
+    # 1) Most callers want an absolute path, but _createstandin() needs
+    #    it repo-relative so lfadd() can pass it to repoadd().  So leave
     #    it up to the caller to use repo.wjoin() to get an absolute path.
     # 2) Join with '/' because that's what dirstate always uses, even on
     #    Windows. Change existing separator to '/' first in case we are
@@ -449,3 +449,19 @@
         newheads = repo.branchheads(branch)
         heads = heads + newheads
     return heads
+
+def getstandinsstate(repo):
+    standins = []
+    matcher = getstandinmatcher(repo)
+    for standin in dirstatewalk(repo.dirstate, matcher):
+        lfile = splitstandin(standin)
+        standins.append((lfile, readstandin(repo, lfile)))
+    return standins
+
+def getlfilestoupdate(oldstandins, newstandins):
+    changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
+    filelist = []
+    for f in changedstandins:
+        if f[0] not in filelist:
+            filelist.append(f[0])
+    return filelist
--- a/hgext/largefiles/overrides.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/overrides.py	Tue Apr 17 17:56:36 2012 -0500
@@ -26,7 +26,7 @@
     '''overrides scmutil.match so that the matcher it returns will ignore all
     largefiles'''
     oldmatch = None # for the closure
-    def override_match(ctx, pats=[], opts={}, globbed=False,
+    def overridematch(ctx, pats=[], opts={}, globbed=False,
             default='relpath'):
         match = oldmatch(ctx, pats, opts, globbed, default)
         m = copy.copy(match)
@@ -34,10 +34,10 @@
                 manifest)
         m._files = filter(notlfile, m._files)
         m._fmap = set(m._files)
-        orig_matchfn = m.matchfn
-        m.matchfn = lambda f: notlfile(f) and orig_matchfn(f) or None
+        origmatchfn = m.matchfn
+        m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
         return m
-    oldmatch = installmatchfn(override_match)
+    oldmatch = installmatchfn(overridematch)
 
 def installmatchfn(f):
     oldmatch = scmutil.match
@@ -53,7 +53,7 @@
     restore matchfn to reverse'''
     scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
 
-def add_largefiles(ui, repo, *pats, **opts):
+def addlargefiles(ui, repo, *pats, **opts):
     large = opts.pop('large', None)
     lfsize = lfutil.getminsize(
         ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
@@ -109,13 +109,13 @@
                     lfdirstate.add(f)
             lfdirstate.write()
             bad += [lfutil.splitstandin(f)
-                    for f in lfutil.repo_add(repo, standins)
+                    for f in lfutil.repoadd(repo, standins)
                     if f in m.files()]
     finally:
         wlock.release()
     return bad
 
-def remove_largefiles(ui, repo, *pats, **opts):
+def removelargefiles(ui, repo, *pats, **opts):
     after = opts.get('after')
     if not pats and not after:
         raise util.Abort(_('no files specified'))
@@ -164,54 +164,59 @@
         lfdirstate.write()
         forget = [lfutil.standin(f) for f in forget]
         remove = [lfutil.standin(f) for f in remove]
-        lfutil.repo_forget(repo, forget)
+        lfutil.repoforget(repo, forget)
         # If this is being called by addremove, let the original addremove
         # function handle this.
         if not getattr(repo, "_isaddremove", False):
-            lfutil.repo_remove(repo, remove, unlink=True)
+            lfutil.reporemove(repo, remove, unlink=True)
     finally:
         wlock.release()
 
+# For overriding mercurial.hgweb.webcommands so that largefiles will
+# appear at their right place in the manifests.
+def decodepath(orig, path):
+    return lfutil.splitstandin(path) or path
+
 # -- Wrappers: modify existing commands --------------------------------
 
 # Add works by going through the files that the user wanted to add and
 # checking if they should be added as largefiles. Then it makes a new
 # matcher which matches only the normal files and runs the original
 # version of add.
-def override_add(orig, ui, repo, *pats, **opts):
+def overrideadd(orig, ui, repo, *pats, **opts):
     normal = opts.pop('normal')
     if normal:
         if opts.get('large'):
             raise util.Abort(_('--normal cannot be used with --large'))
         return orig(ui, repo, *pats, **opts)
-    bad = add_largefiles(ui, repo, *pats, **opts)
+    bad = addlargefiles(ui, repo, *pats, **opts)
     installnormalfilesmatchfn(repo[None].manifest())
     result = orig(ui, repo, *pats, **opts)
     restorematchfn()
 
     return (result == 1 or bad) and 1 or 0
 
-def override_remove(orig, ui, repo, *pats, **opts):
+def overrideremove(orig, ui, repo, *pats, **opts):
     installnormalfilesmatchfn(repo[None].manifest())
     orig(ui, repo, *pats, **opts)
     restorematchfn()
-    remove_largefiles(ui, repo, *pats, **opts)
+    removelargefiles(ui, repo, *pats, **opts)
 
-def override_status(orig, ui, repo, *pats, **opts):
+def overridestatus(orig, ui, repo, *pats, **opts):
     try:
         repo.lfstatus = True
         return orig(ui, repo, *pats, **opts)
     finally:
         repo.lfstatus = False
 
-def override_log(orig, ui, repo, *pats, **opts):
+def overridelog(orig, ui, repo, *pats, **opts):
     try:
         repo.lfstatus = True
         orig(ui, repo, *pats, **opts)
     finally:
         repo.lfstatus = False
 
-def override_verify(orig, ui, repo, *pats, **opts):
+def overrideverify(orig, ui, repo, *pats, **opts):
     large = opts.pop('large', False)
     all = opts.pop('lfa', False)
     contents = opts.pop('lfc', False)
@@ -225,7 +230,7 @@
 # will go through properly. Then the other update hook (overriding repo.update)
 # will get the new files. Filemerge is also overriden so that the merge
 # will merge standins correctly.
-def override_update(orig, ui, repo, *pats, **opts):
+def overrideupdate(orig, ui, repo, *pats, **opts):
     lfdirstate = lfutil.openlfdirstate(ui, repo)
     s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
         False, False)
@@ -265,13 +270,10 @@
 # The overridden function filters the unknown files by removing any
 # largefiles. This makes the merge proceed and we can then handle this
 # case further in the overridden manifestmerge function below.
-def override_checkunknown(origfn, wctx, mctx, folding):
-    origunknown = wctx.unknown()
-    wctx._unknown = filter(lambda f: lfutil.standin(f) not in wctx, origunknown)
-    try:
-        return origfn(wctx, mctx, folding)
-    finally:
-        wctx._unknown = origunknown
+def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
+    if lfutil.standin(f) in wctx:
+        return False
+    return origfn(repo, wctx, mctx, f)
 
 # The manifest merge handles conflicts on the manifest level. We want
 # to handle changes in largefile-ness of files at this level too.
@@ -299,7 +301,7 @@
 # Finally, the merge.applyupdates function will then take care of
 # writing the files into the working copy and lfcommands.updatelfiles
 # will update the largefiles.
-def override_manifestmerge(origfn, repo, p1, p2, pa, overwrite, partial):
+def overridemanifestmerge(origfn, repo, p1, p2, pa, overwrite, partial):
     actions = origfn(repo, p1, p2, pa, overwrite, partial)
     processed = []
 
@@ -322,7 +324,7 @@
                 processed.append((standin, "g", p2.flags(standin)))
             else:
                 processed.append((standin, "r"))
-        elif m == "m" and lfutil.standin(f) in p1 and f in p2:
+        elif m == "g" and lfutil.standin(f) in p1 and f in p2:
             # Case 2: largefile in the working copy, normal file in
             # the second parent
             standin = lfutil.standin(f)
@@ -342,7 +344,7 @@
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits, and copy/rename +
 # edit without prompting the user.
-def override_filemerge(origfn, repo, mynode, orig, fcd, fco, fca):
+def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca):
     # Use better variable names here. Because this is a wrapper we cannot
     # change the variable names in the function declaration.
     fcdest, fcother, fcancestor = fcd, fco, fca
@@ -387,7 +389,7 @@
 # checks if the destination largefile already exists. It also keeps a
 # list of copied files so that the largefiles can be copied and the
 # dirstate updated.
-def override_copy(orig, ui, repo, pats, opts, rename=False):
+def overridecopy(orig, ui, repo, pats, opts, rename=False):
     # doesn't remove largefile on rename
     if len(pats) < 2:
         # this isn't legal, let the original function deal with it
@@ -431,13 +433,13 @@
 
     try:
         try:
-            # When we call orig below it creates the standins but we don't add them
-            # to the dir state until later so lock during that time.
+            # When we call orig below it creates the standins but we don't add
+            # them to the dir state until later so lock during that time.
             wlock = repo.wlock()
 
             manifest = repo[None].manifest()
             oldmatch = None # for the closure
-            def override_match(ctx, pats=[], opts={}, globbed=False,
+            def overridematch(ctx, pats=[], opts={}, globbed=False,
                     default='relpath'):
                 newpats = []
                 # The patterns were previously mangled to add the standin
@@ -452,13 +454,13 @@
                 lfile = lambda f: lfutil.standin(f) in manifest
                 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
                 m._fmap = set(m._files)
-                orig_matchfn = m.matchfn
+                origmatchfn = m.matchfn
                 m.matchfn = lambda f: (lfutil.isstandin(f) and
-                                    lfile(lfutil.splitstandin(f)) and
-                                    orig_matchfn(lfutil.splitstandin(f)) or
+                                    (f in manifest) and
+                                    origmatchfn(lfutil.splitstandin(f)) or
                                     None)
                 return m
-            oldmatch = installmatchfn(override_match)
+            oldmatch = installmatchfn(overridematch)
             listpats = []
             for pat in pats:
                 if match_.patkind(pat) is not None:
@@ -469,7 +471,7 @@
             try:
                 origcopyfile = util.copyfile
                 copiedfiles = []
-                def override_copyfile(src, dest):
+                def overridecopyfile(src, dest):
                     if (lfutil.shortname in src and
                         dest.startswith(repo.wjoin(lfutil.shortname))):
                         destlfile = dest.replace(lfutil.shortname, '')
@@ -479,7 +481,7 @@
                     copiedfiles.append((src, dest))
                     origcopyfile(src, dest)
 
-                util.copyfile = override_copyfile
+                util.copyfile = overridecopyfile
                 result += orig(ui, repo, listpats, opts, rename)
             finally:
                 util.copyfile = origcopyfile
@@ -524,7 +526,7 @@
 # the matcher to hit standins instead of largefiles. Based on the
 # resulting standins update the largefiles. Then return the standins
 # to their proper state
-def override_revert(orig, ui, repo, *pats, **opts):
+def overriderevert(orig, ui, repo, *pats, **opts):
     # Because we put the standins in a bad state (by updating them)
     # and then return them to a correct state we need to lock to
     # prevent others from changing them in their incorrect state.
@@ -532,7 +534,7 @@
     try:
         lfdirstate = lfutil.openlfdirstate(ui, repo)
         (modified, added, removed, missing, unknown, ignored, clean) = \
-            lfutil.lfdirstate_status(lfdirstate, repo, repo['.'].rev())
+            lfutil.lfdirstatestatus(lfdirstate, repo, repo['.'].rev())
         for lfile in modified:
             lfutil.updatestandin(repo, lfutil.standin(lfile))
         for lfile in missing:
@@ -541,12 +543,12 @@
         try:
             ctx = repo[opts.get('rev')]
             oldmatch = None # for the closure
-            def override_match(ctx, pats=[], opts={}, globbed=False,
+            def overridematch(ctx, pats=[], opts={}, globbed=False,
                     default='relpath'):
                 match = oldmatch(ctx, pats, opts, globbed, default)
                 m = copy.copy(match)
                 def tostandin(f):
-                    if lfutil.standin(f) in ctx or lfutil.standin(f) in ctx:
+                    if lfutil.standin(f) in ctx:
                         return lfutil.standin(f)
                     elif lfutil.standin(f) in repo[None]:
                         return None
@@ -554,7 +556,7 @@
                 m._files = [tostandin(f) for f in m._files]
                 m._files = [f for f in m._files if f is not None]
                 m._fmap = set(m._files)
-                orig_matchfn = m.matchfn
+                origmatchfn = m.matchfn
                 def matchfn(f):
                     if lfutil.isstandin(f):
                         # We need to keep track of what largefiles are being
@@ -563,7 +565,7 @@
                         # largefiles. This is repo-specific, so duckpunch the
                         # repo object to keep the list of largefiles for us
                         # later.
-                        if orig_matchfn(lfutil.splitstandin(f)) and \
+                        if origmatchfn(lfutil.splitstandin(f)) and \
                                 (f in repo[None] or f in ctx):
                             lfileslist = getattr(repo, '_lfilestoupdate', [])
                             lfileslist.append(lfutil.splitstandin(f))
@@ -571,12 +573,12 @@
                             return True
                         else:
                             return False
-                    return orig_matchfn(f)
+                    return origmatchfn(f)
                 m.matchfn = matchfn
                 return m
-            oldmatch = installmatchfn(override_match)
+            oldmatch = installmatchfn(overridematch)
             scmutil.match
-            matches = override_match(repo[None], pats, opts)
+            matches = overridematch(repo[None], pats, opts)
             orig(ui, repo, *pats, **opts)
         finally:
             restorematchfn()
@@ -604,17 +606,21 @@
     finally:
         wlock.release()
 
-def hg_update(orig, repo, node):
+def hgupdate(orig, repo, node):
+    # Only call updatelfiles the standins that have changed to save time
+    oldstandins = lfutil.getstandinsstate(repo)
     result = orig(repo, node)
-    lfcommands.updatelfiles(repo.ui, repo)
+    newstandins = lfutil.getstandinsstate(repo)
+    filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
+    lfcommands.updatelfiles(repo.ui, repo, filelist=filelist, printmessage=True)
     return result
 
-def hg_clean(orig, repo, node, show_stats=True):
+def hgclean(orig, repo, node, show_stats=True):
     result = orig(repo, node, show_stats)
     lfcommands.updatelfiles(repo.ui, repo)
     return result
 
-def hg_merge(orig, repo, node, force=None, remind=True):
+def hgmerge(orig, repo, node, force=None, remind=True):
     # Mark the repo as being in the middle of a merge, so that
     # updatelfiles() will know that it needs to trust the standins in
     # the working copy, not in the standins in the current node
@@ -629,7 +635,7 @@
 # When we rebase a repository with remotely changed largefiles, we need to
 # take some extra care so that the largefiles are correctly updated in the
 # working copy
-def override_pull(orig, ui, repo, source=None, **opts):
+def overridepull(orig, ui, repo, source=None, **opts):
     if opts.get('rebase', False):
         repo._isrebasing = True
         try:
@@ -676,14 +682,14 @@
         ui.status(_("%d largefiles cached\n") % numcached)
     return result
 
-def override_rebase(orig, ui, repo, **opts):
+def overriderebase(orig, ui, repo, **opts):
     repo._isrebasing = True
     try:
         orig(ui, repo, **opts)
     finally:
         repo._isrebasing = False
 
-def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None,
+def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
             prefix=None, mtime=None, subrepos=None):
     # No need to lock because we are only reading history and
     # largefile caches, neither of which are modified.
@@ -765,7 +771,7 @@
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
 # largefiles were changed. This is used by bisect and backout.
-def override_bailifchanged(orig, repo):
+def overridebailifchanged(orig, repo):
     orig(repo)
     repo.lfstatus = True
     modified, added, removed, deleted = repo.status()[:4]
@@ -773,8 +779,8 @@
     if modified or added or removed or deleted:
         raise util.Abort(_('outstanding uncommitted changes'))
 
-# Fetch doesn't use cmdutil.bail_if_changed so override it to add the check
-def override_fetch(orig, ui, repo, *pats, **opts):
+# Fetch doesn't use cmdutil.bailifchanged so override it to add the check
+def overridefetch(orig, ui, repo, *pats, **opts):
     repo.lfstatus = True
     modified, added, removed, deleted = repo.status()[:4]
     repo.lfstatus = False
@@ -782,7 +788,7 @@
         raise util.Abort(_('outstanding uncommitted changes'))
     return orig(ui, repo, *pats, **opts)
 
-def override_forget(orig, ui, repo, *pats, **opts):
+def overrideforget(orig, ui, repo, *pats, **opts):
     installnormalfilesmatchfn(repo[None].manifest())
     orig(ui, repo, *pats, **opts)
     restorematchfn()
@@ -817,7 +823,7 @@
             else:
                 lfdirstate.remove(f)
         lfdirstate.write()
-        lfutil.repo_remove(repo, [lfutil.standin(f) for f in forget],
+        lfutil.reporemove(repo, [lfutil.standin(f) for f in forget],
             unlink=True)
     finally:
         wlock.release()
@@ -864,7 +870,7 @@
             set([f for f in files if lfutil.isstandin(f) and f in ctx]))
     return toupload
 
-def override_outgoing(orig, ui, repo, dest=None, **opts):
+def overrideoutgoing(orig, ui, repo, dest=None, **opts):
     orig(ui, repo, dest, **opts)
 
     if opts.pop('large', None):
@@ -877,7 +883,7 @@
                 ui.status(lfutil.splitstandin(file) + '\n')
             ui.status('\n')
 
-def override_summary(orig, ui, repo, *pats, **opts):
+def overridesummary(orig, ui, repo, *pats, **opts):
     try:
         repo.lfstatus = True
         orig(ui, repo, *pats, **opts)
@@ -891,7 +897,7 @@
         else:
             ui.status(_('largefiles: %d to upload\n') % len(toupload))
 
-def override_addremove(orig, ui, repo, *pats, **opts):
+def overrideaddremove(orig, ui, repo, *pats, **opts):
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(ui, repo)
     s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
@@ -904,11 +910,11 @@
     # confused state later.
     if missing:
         repo._isaddremove = True
-        remove_largefiles(ui, repo, *missing, **opts)
+        removelargefiles(ui, repo, *missing, **opts)
         repo._isaddremove = False
     # Call into the normal add code, and any files that *should* be added as
     # largefiles will be
-    add_largefiles(ui, repo, *pats, **opts)
+    addlargefiles(ui, repo, *pats, **opts)
     # Now that we've handled largefiles, hand off to the original addremove
     # function to take care of the rest.  Make sure it doesn't do anything with
     # largefiles by installing a matcher that will ignore them.
@@ -919,9 +925,9 @@
 
 # Calling purge with --all will cause the largefiles to be deleted.
 # Override repo.status to prevent this from happening.
-def override_purge(orig, ui, repo, *dirs, **opts):
+def overridepurge(orig, ui, repo, *dirs, **opts):
     oldstatus = repo.status
-    def override_status(node1='.', node2=None, match=None, ignored=False,
+    def overridestatus(node1='.', node2=None, match=None, ignored=False,
                         clean=False, unknown=False, listsubrepos=False):
         r = oldstatus(node1, node2, match, ignored, clean, unknown,
                       listsubrepos)
@@ -930,11 +936,11 @@
         unknown = [f for f in unknown if lfdirstate[f] == '?']
         ignored = [f for f in ignored if lfdirstate[f] == '?']
         return modified, added, removed, deleted, unknown, ignored, clean
-    repo.status = override_status
+    repo.status = overridestatus
     orig(ui, repo, *dirs, **opts)
     repo.status = oldstatus
 
-def override_rollback(orig, ui, repo, **opts):
+def overriderollback(orig, ui, repo, **opts):
     result = orig(ui, repo, **opts)
     merge.update(repo, node=None, branchmerge=False, force=True,
         partial=lfutil.isstandin)
@@ -953,12 +959,22 @@
         wlock.release()
     return result
 
-def override_transplant(orig, ui, repo, *revs, **opts):
+def overridetransplant(orig, ui, repo, *revs, **opts):
     try:
+        oldstandins = lfutil.getstandinsstate(repo)
         repo._istransplanting = True
         result = orig(ui, repo, *revs, **opts)
-        lfcommands.updatelfiles(ui, repo, filelist=None,
-                                printmessage=False)
+        newstandins = lfutil.getstandinsstate(repo)
+        filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
+        lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
+                                printmessage=True)
     finally:
         repo._istransplanting = False
     return result
+
+def overridecat(orig, ui, repo, file1, *pats, **opts):
+    rev = opts.get('rev')
+    if not lfutil.standin(file1) in repo[rev]:
+        result = orig(ui, repo, file1, *pats, **opts)
+        return result
+    return lfcommands.catlfile(repo, file1, opts.get('rev'), opts.get('output'))
--- a/hgext/largefiles/proto.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/proto.py	Tue Apr 17 17:56:36 2012 -0500
@@ -131,15 +131,15 @@
 
 # advertise the largefiles=serve capability
 def capabilities(repo, proto):
-    return capabilities_orig(repo, proto) + ' largefiles=serve'
+    return capabilitiesorig(repo, proto) + ' largefiles=serve'
 
 # duplicate what Mercurial's new out-of-band errors mechanism does, because
 # clients old and new alike both handle it well
-def webproto_refuseclient(self, message):
+def webprotorefuseclient(self, message):
     self.req.header([('Content-Type', 'application/hg-error')])
     return message
 
-def sshproto_refuseclient(self, message):
+def sshprotorefuseclient(self, message):
     self.ui.write_err('%s\n-\n' % message)
     self.fout.write('\n')
     self.fout.flush()
@@ -151,16 +151,16 @@
         return wireproto.ooberror(LARGEFILES_REQUIRED_MSG)
     return wireproto.heads(repo, proto)
 
-def sshrepo_callstream(self, cmd, **args):
+def sshrepocallstream(self, cmd, **args):
     if cmd == 'heads' and self.capable('largefiles'):
         cmd = 'lheads'
     if cmd == 'batch' and self.capable('largefiles'):
         args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
-    return ssh_oldcallstream(self, cmd, **args)
+    return ssholdcallstream(self, cmd, **args)
 
-def httprepo_callstream(self, cmd, **args):
+def httprepocallstream(self, cmd, **args):
     if cmd == 'heads' and self.capable('largefiles'):
         cmd = 'lheads'
     if cmd == 'batch' and self.capable('largefiles'):
         args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
-    return http_oldcallstream(self, cmd, **args)
+    return httpoldcallstream(self, cmd, **args)
--- a/hgext/largefiles/reposetup.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/reposetup.py	Tue Apr 17 17:56:36 2012 -0500
@@ -34,54 +34,54 @@
                     'largefiles may behave incorrectly\n')
                     % name)
 
-    class lfiles_repo(repo.__class__):
+    class lfilesrepo(repo.__class__):
         lfstatus = False
         def status_nolfiles(self, *args, **kwargs):
-            return super(lfiles_repo, self).status(*args, **kwargs)
+            return super(lfilesrepo, self).status(*args, **kwargs)
 
         # When lfstatus is set, return a context that gives the names
         # of largefiles instead of their corresponding standins and
         # identifies the largefiles as always binary, regardless of
         # their actual contents.
         def __getitem__(self, changeid):
-            ctx = super(lfiles_repo, self).__getitem__(changeid)
+            ctx = super(lfilesrepo, self).__getitem__(changeid)
             if self.lfstatus:
-                class lfiles_manifestdict(manifest.manifestdict):
+                class lfilesmanifestdict(manifest.manifestdict):
                     def __contains__(self, filename):
-                        if super(lfiles_manifestdict,
+                        if super(lfilesmanifestdict,
                                 self).__contains__(filename):
                             return True
-                        return super(lfiles_manifestdict,
+                        return super(lfilesmanifestdict,
                             self).__contains__(lfutil.standin(filename))
-                class lfiles_ctx(ctx.__class__):
+                class lfilesctx(ctx.__class__):
                     def files(self):
-                        filenames = super(lfiles_ctx, self).files()
+                        filenames = super(lfilesctx, self).files()
                         return [lfutil.splitstandin(f) or f for f in filenames]
                     def manifest(self):
-                        man1 = super(lfiles_ctx, self).manifest()
-                        man1.__class__ = lfiles_manifestdict
+                        man1 = super(lfilesctx, self).manifest()
+                        man1.__class__ = lfilesmanifestdict
                         return man1
                     def filectx(self, path, fileid=None, filelog=None):
                         try:
                             if filelog is not None:
-                                result = super(lfiles_ctx, self).filectx(
+                                result = super(lfilesctx, self).filectx(
                                     path, fileid, filelog)
                             else:
-                                result = super(lfiles_ctx, self).filectx(
+                                result = super(lfilesctx, self).filectx(
                                     path, fileid)
                         except error.LookupError:
                             # Adding a null character will cause Mercurial to
                             # identify this as a binary file.
                             if filelog is not None:
-                                result = super(lfiles_ctx, self).filectx(
+                                result = super(lfilesctx, self).filectx(
                                     lfutil.standin(path), fileid, filelog)
                             else:
-                                result = super(lfiles_ctx, self).filectx(
+                                result = super(lfilesctx, self).filectx(
                                     lfutil.standin(path), fileid)
                             olddata = result.data
                             result.data = lambda: olddata() + '\0'
                         return result
-                ctx.__class__ = lfiles_ctx
+                ctx.__class__ = lfilesctx
             return ctx
 
         # Figure out the status of big files and insert them into the
@@ -92,7 +92,7 @@
                 clean=False, unknown=False, listsubrepos=False):
             listignored, listclean, listunknown = ignored, clean, unknown
             if not self.lfstatus:
-                return super(lfiles_repo, self).status(node1, node2, match,
+                return super(lfilesrepo, self).status(node1, node2, match,
                     listignored, listclean, listunknown, listsubrepos)
             else:
                 # some calls in this function rely on the old version of status
@@ -130,7 +130,7 @@
                         if match(f):
                             break
                     else:
-                        return super(lfiles_repo, self).status(node1, node2,
+                        return super(lfilesrepo, self).status(node1, node2,
                                 match, listignored, listclean,
                                 listunknown, listsubrepos)
 
@@ -157,7 +157,7 @@
 
                 # Get ignored files here even if we weren't asked for them; we
                 # must use the result here for filtering later
-                result = super(lfiles_repo, self).status(node1, node2, m,
+                result = super(lfilesrepo, self).status(node1, node2, m,
                     True, clean, unknown, listsubrepos)
                 if working:
                     try:
@@ -167,7 +167,7 @@
                         # super's status.
                         # Override lfdirstate's ignore matcher to not do
                         # anything
-                        orig_ignore = lfdirstate._ignore
+                        origignore = lfdirstate._ignore
                         lfdirstate._ignore = _ignoreoverride
 
                         def sfindirstate(f):
@@ -216,7 +216,7 @@
                                     added.append(lfile)
                     finally:
                         # Replace the original ignore function
-                        lfdirstate._ignore = orig_ignore
+                        lfdirstate._ignore = origignore
 
                     for standin in ctx1.manifest():
                         if not lfutil.isstandin(standin):
@@ -272,7 +272,7 @@
         # As part of committing, copy all of the largefiles into the
         # cache.
         def commitctx(self, *args, **kwargs):
-            node = super(lfiles_repo, self).commitctx(*args, **kwargs)
+            node = super(lfilesrepo, self).commitctx(*args, **kwargs)
             lfutil.copyalltostore(self, node)
             return node
 
@@ -281,7 +281,7 @@
         # Do that here.
         def commit(self, text="", user=None, date=None, match=None,
                 force=False, editor=False, extra={}):
-            orig = super(lfiles_repo, self).commit
+            orig = super(lfilesrepo, self).commit
 
             wlock = repo.wlock()
             try:
@@ -320,7 +320,8 @@
                     # removed/renamed)
                     for lfile in lfiles:
                         if lfile in modifiedfiles:
-                            if os.path.exists(self.wjoin(lfutil.standin(lfile))):
+                            if os.path.exists(
+                                    self.wjoin(lfutil.standin(lfile))):
                                 # this handles the case where a rebase is being
                                 # performed and the working copy is not updated
                                 # yet.
@@ -350,7 +351,7 @@
                 # Case 2: user calls commit with specified patterns: refresh
                 # any matching big files.
                 smatcher = lfutil.composestandinmatcher(self, match)
-                standins = lfutil.dirstate_walk(self.dirstate, smatcher)
+                standins = lfutil.dirstatewalk(self.dirstate, smatcher)
 
                 # No matching big files: get out of the way and pass control to
                 # the usual commit() method.
@@ -378,7 +379,7 @@
                 # complaining "not tracked" for big files.
                 lfiles = lfutil.listlfiles(repo)
                 match = copy.copy(match)
-                orig_matchfn = match.matchfn
+                origmatchfn = match.matchfn
 
                 # Check both the list of largefiles and the list of
                 # standins because if a largefile was removed, it
@@ -405,7 +406,7 @@
                 match._files = actualfiles
 
                 def matchfn(f):
-                    if orig_matchfn(f):
+                    if origmatchfn(f):
                         return f not in lfiles
                     else:
                         return f in standins
@@ -450,10 +451,10 @@
                              for f in files
                              if lfutil.isstandin(f) and f in ctx]))
                 lfcommands.uploadlfiles(ui, self, remote, toupload)
-            return super(lfiles_repo, self).push(remote, force, revs,
+            return super(lfilesrepo, self).push(remote, force, revs,
                 newbranch)
 
-    repo.__class__ = lfiles_repo
+    repo.__class__ = lfilesrepo
 
     def checkrequireslfiles(ui, repo, **kwargs):
         if 'largefiles' not in repo.requirements and util.any(
--- a/hgext/largefiles/uisetup.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/largefiles/uisetup.py	Tue Apr 17 17:56:36 2012 -0500
@@ -11,7 +11,7 @@
 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
     httprepo, localrepo, merge, sshrepo, sshserver, wireproto
 from mercurial.i18n import _
-from mercurial.hgweb import hgweb_mod, protocol
+from mercurial.hgweb import hgweb_mod, protocol, webcommands
 
 import overrides
 import proto
@@ -21,7 +21,7 @@
     # files in the result are under Mercurial's control
 
     entry = extensions.wrapcommand(commands.table, 'add',
-                                   overrides.override_add)
+                                   overrides.overrideadd)
     addopt = [('', 'large', None, _('add as largefile')),
               ('', 'normal', None, _('add as normal file')),
               ('', 'lfsize', '', _('add all files above this size '
@@ -30,19 +30,19 @@
     entry[1].extend(addopt)
 
     entry = extensions.wrapcommand(commands.table, 'addremove',
-            overrides.override_addremove)
+            overrides.overrideaddremove)
     entry = extensions.wrapcommand(commands.table, 'remove',
-                                   overrides.override_remove)
+                                   overrides.overrideremove)
     entry = extensions.wrapcommand(commands.table, 'forget',
-                                   overrides.override_forget)
+                                   overrides.overrideforget)
     entry = extensions.wrapcommand(commands.table, 'status',
-                                   overrides.override_status)
+                                   overrides.overridestatus)
     entry = extensions.wrapcommand(commands.table, 'log',
-                                   overrides.override_log)
+                                   overrides.overridelog)
     entry = extensions.wrapcommand(commands.table, 'rollback',
-                                   overrides.override_rollback)
+                                   overrides.overriderollback)
     entry = extensions.wrapcommand(commands.table, 'verify',
-                                   overrides.override_verify)
+                                   overrides.overrideverify)
 
     verifyopt = [('', 'large', None, _('verify largefiles')),
                  ('', 'lfa', None,
@@ -52,44 +52,46 @@
     entry[1].extend(verifyopt)
 
     entry = extensions.wrapcommand(commands.table, 'outgoing',
-        overrides.override_outgoing)
+        overrides.overrideoutgoing)
     outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
     entry[1].extend(outgoingopt)
     entry = extensions.wrapcommand(commands.table, 'summary',
-                                   overrides.override_summary)
+                                   overrides.overridesummary)
     summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
     entry[1].extend(summaryopt)
 
     entry = extensions.wrapcommand(commands.table, 'update',
-                                   overrides.override_update)
+                                   overrides.overrideupdate)
     entry = extensions.wrapcommand(commands.table, 'pull',
-                                   overrides.override_pull)
-    entry = extensions.wrapfunction(merge, '_checkunknown',
-                                    overrides.override_checkunknown)
+                                   overrides.overridepull)
+    entry = extensions.wrapcommand(commands.table, 'cat',
+                                   overrides.overridecat)
+    entry = extensions.wrapfunction(merge, '_checkunknownfile',
+                                    overrides.overridecheckunknownfile)
     entry = extensions.wrapfunction(merge, 'manifestmerge',
-                                    overrides.override_manifestmerge)
+                                    overrides.overridemanifestmerge)
     entry = extensions.wrapfunction(filemerge, 'filemerge',
-                                    overrides.override_filemerge)
+                                    overrides.overridefilemerge)
     entry = extensions.wrapfunction(cmdutil, 'copy',
-                                    overrides.override_copy)
+                                    overrides.overridecopy)
 
     # Backout calls revert so we need to override both the command and the
     # function
     entry = extensions.wrapcommand(commands.table, 'revert',
-                                   overrides.override_revert)
+                                   overrides.overriderevert)
     entry = extensions.wrapfunction(commands, 'revert',
-                                    overrides.override_revert)
+                                    overrides.overriderevert)
 
     # clone uses hg._update instead of hg.update even though they are the
     # same function... so wrap both of them)
-    extensions.wrapfunction(hg, 'update', overrides.hg_update)
-    extensions.wrapfunction(hg, '_update', overrides.hg_update)
-    extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
-    extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
+    extensions.wrapfunction(hg, 'update', overrides.hgupdate)
+    extensions.wrapfunction(hg, '_update', overrides.hgupdate)
+    extensions.wrapfunction(hg, 'clean', overrides.hgclean)
+    extensions.wrapfunction(hg, 'merge', overrides.hgmerge)
 
-    extensions.wrapfunction(archival, 'archive', overrides.override_archive)
+    extensions.wrapfunction(archival, 'archive', overrides.overridearchive)
     extensions.wrapfunction(cmdutil, 'bailifchanged',
-                            overrides.override_bailifchanged)
+                            overrides.overridebailifchanged)
 
     # create the new wireproto commands ...
     wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
@@ -107,22 +109,24 @@
     hgweb_mod.perms['getlfile'] = 'pull'
     hgweb_mod.perms['statlfile'] = 'pull'
 
+    extensions.wrapfunction(webcommands, 'decodepath', overrides.decodepath)
+
     # the hello wireproto command uses wireproto.capabilities, so it won't see
     # our largefiles capability unless we replace the actual function as well.
-    proto.capabilities_orig = wireproto.capabilities
+    proto.capabilitiesorig = wireproto.capabilities
     wireproto.capabilities = proto.capabilities
 
     # these let us reject non-largefiles clients and make them display
     # our error messages
-    protocol.webproto.refuseclient = proto.webproto_refuseclient
-    sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
+    protocol.webproto.refuseclient = proto.webprotorefuseclient
+    sshserver.sshserver.refuseclient = proto.sshprotorefuseclient
 
     # can't do this in reposetup because it needs to have happened before
     # wirerepo.__init__ is called
-    proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
-    proto.http_oldcallstream = httprepo.httprepository._callstream
-    sshrepo.sshrepository._callstream = proto.sshrepo_callstream
-    httprepo.httprepository._callstream = proto.httprepo_callstream
+    proto.ssholdcallstream = sshrepo.sshrepository._callstream
+    proto.httpoldcallstream = httprepo.httprepository._callstream
+    sshrepo.sshrepository._callstream = proto.sshrepocallstream
+    httprepo.httprepository._callstream = proto.httprepocallstream
 
     # don't die on seeing a repo with the largefiles requirement
     localrepo.localrepository.supported |= set(['largefiles'])
@@ -131,13 +135,13 @@
     for name, module in extensions.extensions():
         if name == 'fetch':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
-                overrides.override_fetch)
+                overrides.overridefetch)
         if name == 'purge':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
-                overrides.override_purge)
+                overrides.overridepurge)
         if name == 'rebase':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
-                overrides.override_rebase)
+                overrides.overriderebase)
         if name == 'transplant':
             extensions.wrapcommand(getattr(module, 'cmdtable'), 'transplant',
-                overrides.override_transplant)
+                overrides.overridetransplant)
--- a/hgext/mq.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/mq.py	Tue Apr 17 17:56:36 2012 -0500
@@ -257,10 +257,11 @@
                 ci += 1
             del self.comments[ci]
 
-def secretcommit(repo, phase, *args, **kwargs):
-    """helper dedicated to ensure a commit are secret
-
-    It should be used instead of repo.commit inside the mq source
+def newcommit(repo, phase, *args, **kwargs):
+    """helper dedicated to ensure a commit respect mq.secret setting
+
+    It should be used instead of repo.commit inside the mq source for operation
+    creating new changeset.
     """
     if phase is None:
         if repo.ui.configbool('mq', 'secret', False):
@@ -581,7 +582,7 @@
         ret = hg.merge(repo, rev)
         if ret:
             raise util.Abort(_("update returned %d") % ret)
-        n = secretcommit(repo, None, ctx.description(), ctx.user(), force=True)
+        n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
         if n is None:
             raise util.Abort(_("repo commit failed"))
         try:
@@ -621,7 +622,7 @@
             # the first patch in the queue is never a merge patch
             #
             pname = ".hg.patches.merge.marker"
-            n = secretcommit(repo, None, '[mq]: merge marker', force=True)
+            n = newcommit(repo, None, '[mq]: merge marker', force=True)
             self.removeundo(repo)
             self.applied.append(statusentry(n, pname))
             self.applieddirty = True
@@ -752,8 +753,8 @@
 
             match = scmutil.matchfiles(repo, files or [])
             oldtip = repo['tip']
-            n = secretcommit(repo, None, message, ph.user, ph.date, match=match,
-                             force=True)
+            n = newcommit(repo, None, message, ph.user, ph.date, match=match,
+                          force=True)
             if repo['tip'] == oldtip:
                 raise util.Abort(_("qpush exactly duplicates child changeset"))
             if n is None:
@@ -949,6 +950,7 @@
         inclsubs = self.checksubstate(repo)
         if inclsubs:
             inclsubs.append('.hgsubstate')
+            substatestate = repo.dirstate['.hgsubstate']
         if opts.get('include') or opts.get('exclude') or pats:
             if inclsubs:
                 pats = list(pats or []) + inclsubs
@@ -958,10 +960,12 @@
                 if f != '.hgsubstate': # .hgsubstate is auto-created
                     raise util.Abort('%s: %s' % (f, msg))
             match.bad = badfn
-            m, a, r, d = repo.status(match=match)[:4]
+            changes = repo.status(match=match)
+            m, a, r, d = changes[:4]
         else:
-            m, a, r, d = self.checklocalchanges(repo, force=True)
-            match = scmutil.matchfiles(repo, m + a + r + inclsubs)
+            changes = self.checklocalchanges(repo, force=True)
+            m, a, r, d = changes
+        match = scmutil.matchfiles(repo, m + a + r + inclsubs)
         if len(repo[None].parents()) > 1:
             raise util.Abort(_('cannot manage merge changesets'))
         commitfiles = m + a + r
@@ -994,8 +998,8 @@
                 if util.safehasattr(msg, '__call__'):
                     msg = msg()
                 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
-                n = secretcommit(repo, None, commitmsg, user, date, match=match,
-                                 force=True)
+                n = newcommit(repo, None, commitmsg, user, date, match=match,
+                              force=True)
                 if n is None:
                     raise util.Abort(_("repo commit failed"))
                 try:
@@ -1009,8 +1013,15 @@
                         p.write(msg)
                     if commitfiles:
                         parent = self.qparents(repo, n)
+                        if inclsubs:
+                            if substatestate in 'a?':
+                                changes[1].append('.hgsubstate')
+                            elif substatestate in 'r':
+                                changes[2].append('.hgsubstate')
+                            else: # modified
+                                changes[0].append('.hgsubstate')
                         chunks = patchmod.diff(repo, node1=parent, node2=n,
-                                            match=match, opts=diffopts)
+                                               changes=changes, opts=diffopts)
                         for chunk in chunks:
                             p.write(chunk)
                     p.close()
@@ -1043,11 +1054,7 @@
                 hg.clean(repo, urev)
                 repo.dirstate.write()
 
-            self.removeundo(repo)
             repair.strip(self.ui, repo, revs, backup)
-            # strip may have unbundled a set of backed up revisions after
-            # the actual strip
-            self.removeundo(repo)
         finally:
             release(lock, wlock)
 
@@ -1553,8 +1560,8 @@
 
                 # Ensure we create a new changeset in the same phase than
                 # the old one.
-                n = secretcommit(repo, oldphase, message, user, ph.date,
-                                 match=match, force=True)
+                n = newcommit(repo, oldphase, message, user, ph.date,
+                              match=match, force=True)
                 # only write patch after a successful commit
                 patchf.close()
                 self.applied.append(statusentry(n, patchfn))
@@ -2002,7 +2009,7 @@
           ('P', 'push', None, _('qpush after importing'))],
          _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...'))
 def qimport(ui, repo, *filename, **opts):
-    """import a patch
+    """import a patch or existing changeset
 
     The patch is inserted into the series after the last applied
     patch. If no patches have been applied, qimport prepends the patch
--- a/hgext/patchbomb.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/patchbomb.py	Tue Apr 17 17:56:36 2012 -0500
@@ -84,7 +84,7 @@
     if not patchname and not node:
         raise ValueError
 
-    if opts.get('attach'):
+    if opts.get('attach') and not opts.get('body'):
         body = ('\n'.join(desc[1:]).strip() or
                 'Patch subject is complete summary.')
         body += '\n\n\n'
@@ -101,7 +101,11 @@
     if opts.get('diffstat'):
         body += ds + '\n\n'
 
-    if opts.get('attach') or opts.get('inline'):
+    addattachment = opts.get('attach') or opts.get('inline')
+    if not addattachment or opts.get('body'):
+        body += '\n'.join(patchlines)
+
+    if addattachment:
         msg = email.MIMEMultipart.MIMEMultipart()
         if body:
             msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
@@ -124,7 +128,6 @@
         p['Content-Disposition'] = disposition + '; filename=' + patchname
         msg.attach(p)
     else:
-        body += '\n'.join(patchlines)
         msg = mail.mimetextpatch(body, display=opts.get('test'))
 
     flag = ' '.join(opts.get('flag'))
@@ -142,6 +145,7 @@
     return msg, subj, ds
 
 emailopts = [
+    ('', 'body', None, _('send patches as inline message text (default)')),
     ('a', 'attach', None, _('send patches as attachments')),
     ('i', 'inline', None, _('send patches as inline attachments')),
     ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
@@ -199,7 +203,9 @@
     By default the patch is included as text in the email body for
     easy reviewing. Using the -a/--attach option will instead create
     an attachment for the patch. With -i/--inline an inline attachment
-    will be created.
+    will be created. You can include a patch both as text in the email
+    body and as a regular or an inline attachment by combining the
+    -a/--attach or -i/--inline with the --body option.
 
     With -o/--outgoing, emails will be generated for patches not found
     in the destination repository (or only those which are ancestors
@@ -384,7 +390,7 @@
         prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
 
         subj = (opts.get('subject') or
-                prompt(ui, 'Subject: ', rest=prefix, default=''))
+                prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
         if not subj:
             return None         # skip intro if the user doesn't bother
 
--- a/hgext/record.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/record.py	Tue Apr 17 17:56:36 2012 -0500
@@ -261,7 +261,7 @@
 def filterpatch(ui, headers):
     """Interactively filter patch chunks into applied-only chunks"""
 
-    def prompt(skipfile, skipall, query):
+    def prompt(skipfile, skipall, query, chunk):
         """prompt query, and process base inputs
 
         - y/n for the rest of file
@@ -271,14 +271,16 @@
 
         Return True/False and possibly updated skipfile and skipall.
         """
+        newpatches = None
         if skipall is not None:
-            return skipall, skipfile, skipall
+            return skipall, skipfile, skipall, newpatches
         if skipfile is not None:
-            return skipfile, skipfile, skipall
+            return skipfile, skipfile, skipall, newpatches
         while True:
-            resps = _('[Ynsfdaq?]')
+            resps = _('[Ynesfdaq?]')
             choices = (_('&Yes, record this change'),
                     _('&No, skip this change'),
+                    _('&Edit the change manually'),
                     _('&Skip remaining changes to this file'),
                     _('Record remaining changes to this &file'),
                     _('&Done, skip remaining changes and files'),
@@ -287,7 +289,7 @@
                     _('&?'))
             r = ui.promptchoice("%s %s" % (query, resps), choices)
             ui.write("\n")
-            if r == 7: # ?
+            if r == 8: # ?
                 doc = gettext(record.__doc__)
                 c = doc.find('::') + 2
                 for l in doc[c:].splitlines():
@@ -298,17 +300,70 @@
                 ret = True
             elif r == 1: # no
                 ret = False
-            elif r == 2: # Skip
+            elif r == 2: # Edit patch
+                if chunk is None:
+                    ui.write(_('cannot edit patch for whole file'))
+                    ui.write("\n")
+                    continue
+                if chunk.header.binary():
+                    ui.write(_('cannot edit patch for binary file'))
+                    ui.write("\n")
+                    continue
+                # Patch comment based on the Git one (based on comment at end of
+                # http://mercurial.selenic.com/wiki/RecordExtension)
+                phelp = '---' + _("""
+To remove '-' lines, make them ' ' lines (context).
+To remove '+' lines, delete them.
+Lines starting with # will be removed from the patch.
+
+If the patch applies cleanly, the edited hunk will immediately be
+added to the record list. If it does not apply cleanly, a rejects
+file will be generated: you can use that when you try again. If
+all lines of the hunk are removed, then the edit is aborted and
+the hunk is left unchanged.
+""")
+                (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
+                        suffix=".diff", text=True)
+                ncpatchfp = None
+                try:
+                    # Write the initial patch
+                    f = os.fdopen(patchfd, "w")
+                    chunk.header.write(f)
+                    chunk.write(f)
+                    f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
+                    f.close()
+                    # Start the editor and wait for it to complete
+                    editor = ui.geteditor()
+                    util.system("%s \"%s\"" % (editor, patchfn),
+                            environ={'HGUSER': ui.username()},
+                            onerr=util.Abort, errprefix=_("edit failed"),
+                            out=ui.fout)
+                    # Remove comment lines
+                    patchfp = open(patchfn)
+                    ncpatchfp = cStringIO.StringIO()
+                    for line in patchfp:
+                        if not line.startswith('#'):
+                            ncpatchfp.write(line)
+                    patchfp.close()
+                    ncpatchfp.seek(0)
+                    newpatches = parsepatch(ncpatchfp)
+                finally:
+                    os.unlink(patchfn)
+                    del ncpatchfp
+                # Signal that the chunk shouldn't be applied as-is, but
+                # provide the new patch to be used instead.
+                ret = False
+            elif r == 3: # Skip
                 ret = skipfile = False
-            elif r == 3: # file (Record remaining)
+            elif r == 4: # file (Record remaining)
                 ret = skipfile = True
-            elif r == 4: # done, skip remaining
+            elif r == 5: # done, skip remaining
                 ret = skipall = False
-            elif r == 5: # all
+            elif r == 6: # all
                 ret = skipall = True
-            elif r == 6: # quit
+            elif r == 7: # quit
                 raise util.Abort(_('user quit'))
-            return ret, skipfile, skipall
+            return ret, skipfile, skipall, newpatches
 
     seen = set()
     applied = {}        # 'filename' -> [] of chunks
@@ -326,7 +381,7 @@
             h.pretty(ui)
         msg = (_('examine changes to %s?') %
                _(' and ').join(map(repr, h.files())))
-        r, skipfile, skipall = prompt(skipfile, skipall, msg)
+        r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
         if not r:
             continue
         applied[h.filename()] = [h]
@@ -342,12 +397,19 @@
                 idx = pos - len(h.hunks) + i
                 msg = _('record change %d/%d to %r?') % (idx, total,
                                                          chunk.filename())
-            r, skipfile, skipall = prompt(skipfile, skipall, msg)
+            r, skipfile, skipall, newpatches = prompt(skipfile,
+                    skipall, msg, chunk)
             if r:
                 if fixoffset:
                     chunk = copy.copy(chunk)
                     chunk.toline += fixoffset
                 applied[chunk.filename()].append(chunk)
+            elif newpatches is not None:
+                for newpatch in newpatches:
+                    for newhunk in newpatch.hunks:
+                        if fixoffset:
+                            newhunk.toline += fixoffset
+                        applied[newhunk.filename()].append(newhunk)
             else:
                 fixoffset += chunk.removed - chunk.added
     return sum([h for h in applied.itervalues()
@@ -372,6 +434,7 @@
 
       y - record this change
       n - skip this change
+      e - edit this change manually
 
       s - skip remaining changes to this file
       f - record remaining changes to this file
--- a/hgext/transplant.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/hgext/transplant.py	Tue Apr 17 17:56:36 2012 -0500
@@ -144,14 +144,26 @@
                     if not hasnode(repo, node):
                         repo.pull(source, heads=[node])
 
+                skipmerge = False
                 if parents[1] != revlog.nullid:
-                    self.ui.note(_('skipping merge changeset %s:%s\n')
-                                 % (rev, short(node)))
+                    if not opts.get('parent'):
+                        self.ui.note(_('skipping merge changeset %s:%s\n')
+                                     % (rev, short(node)))
+                        skipmerge = True
+                    else:
+                        parent = source.lookup(opts['parent'])
+                        if parent not in parents:
+                            raise util.Abort(_('%s is not a parent of %s') %
+                                             (short(parent), short(node)))
+                else:
+                    parent = parents[0]
+
+                if skipmerge:
                     patchfile = None
                 else:
                     fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
                     fp = os.fdopen(fd, 'w')
-                    gen = patch.diff(source, parents[0], node, opts=diffopts)
+                    gen = patch.diff(source, parent, node, opts=diffopts)
                     for chunk in gen:
                         fp.write(chunk)
                     fp.close()
@@ -295,19 +307,29 @@
     def recover(self, repo):
         '''commit working directory using journal metadata'''
         node, user, date, message, parents = self.readlog()
-        merge = len(parents) == 2
+        merge = False
 
         if not user or not date or not message or not parents[0]:
             raise util.Abort(_('transplant log file is corrupt'))
 
+        parent = parents[0]
+        if len(parents) > 1:
+            if opts.get('parent'):
+                parent = source.lookup(opts['parent'])
+                if parent not in parents:
+                    raise util.Abort(_('%s is not a parent of %s') %
+                                     (short(parent), short(node)))
+            else:
+                merge = True
+
         extra = {'transplant_source': node}
         wlock = repo.wlock()
         try:
             p1, p2 = repo.dirstate.parents()
-            if p1 != parents[0]:
+            if p1 != parent:
                 raise util.Abort(
                     _('working dir not at transplant parent %s') %
-                                 revlog.hex(parents[0]))
+                                 revlog.hex(parent))
             if merge:
                 repo.dirstate.setparents(p1, parents[1])
             n = repo.commit(message, user, date, extra=extra,
@@ -468,6 +490,8 @@
     ('a', 'all', None, _('pull all changesets up to BRANCH')),
     ('p', 'prune', [], _('skip over REV'), _('REV')),
     ('m', 'merge', [], _('merge at REV'), _('REV')),
+    ('', 'parent', '',
+     _('parent to choose when transplanting merge'), _('REV')),
     ('e', 'edit', False, _('invoke editor on commit messages')),
     ('', 'log', None, _('append transplant info to log message')),
     ('c', 'continue', None, _('continue last transplant session '
@@ -510,6 +534,9 @@
     of a merged transplant, and you can merge descendants of them
     normally instead of transplanting them.
 
+    Merge changesets may be transplanted directly by specifying the
+    proper parent changeset by calling :hg:`transplant --parent`.
+
     If no merges or revisions are provided, :hg:`transplant` will
     start an interactive changeset browser.
 
--- a/i18n/de.po	Mon Apr 16 11:48:15 2012 +0200
+++ b/i18n/de.po	Tue Apr 17 17:56:36 2012 -0500
@@ -43,7 +43,7 @@
 #
 # Schlecht:
 #   Mergen Sie zunächst die Branche.
-#   Falls der Tag im Filelog zu finden ist...
+#   Falls der Tag im Filelog nicht gecheckt werden kann...
 #
 # Übersetzungen:
 #  backup        Sicherheitskopie (gewinnt in google-fight gegen Sicherungsk.)
@@ -54,9 +54,12 @@
 #  changeset     Änderungssatz
 #  commit        Commit/Übernahme
 #  commit (v)    übernehmen
+#  comm. message Versionsmeldung
 #  committer     Autor
+#  corrupt       beschädigt (nicht korrumpiert)
 #  deprecated    veraltet
 #  force (v)     erzwingen
+#  history       Historie
 #  merge         zusammenführen
 #  notation      Schreibweise
 #  queue         Reihe
@@ -64,24 +67,25 @@
 #  manage/track  versionieren
 #  pull          abrufen
 #  push          übertragen
-#  rebase        Pfropfung/Verschiebung
+#  rebase        Verschiebung (Pfropfung)
 #  section       Abschnitt (nicht Sektion)
 #  tag           Etikett    (svn verwendet "Marke")
 #
 # Nicht übersetzt bleiben
-#  hook     mangels guter Übersetzung (Einhängeaktion?)
-#  token    Fachbegriff auch im Deutschen
-#  parser   Fachbegriff auch im Deutschen
-#  patch    Bezieht sich auf ein sehr altes Programm mit demselben Namen und
+#  Hook  (m) mangels guter Übersetzung (Einhängeaktion?)
+#  Token (n) Fachbegriff auch im Deutschen
+#  Parser(m) Fachbegriff auch im Deutschen
+#  Patch (m) Bezieht sich auf ein sehr altes Programm mit demselben Namen und
 #           Dateien in dessen Syntax/Format
 #           Die ursprüngliche Bedeutung des englischen Wortes (Flicken) trifft
 #           heute eh nicht mehr zu.
-#  diff     Siehe patch, ausserdem Abkürzung eines auch deutschen Worts
-#  glob
+#  Diff  (n) Siehe patch, ausserdem Abkürzung eines auch deutschen Worts
+#  glob  (n)
 #  dirstate Nur für Entwickler interessant. Im sonstigen Handbuch umschreiben!
 #  .. note:: Dies ist spezielle reStructured-Syntax und darf nicht geändert
 #           werden
-# 
+#  Pager (m) mangels guter Übersetzung (Theoretisch "Verseitener" :)
+#
 # Weiteres:
 #  graft/transplant: Da graft eine Reimplementierung von transplant als
 #         Kernfunktion ist, kann in beiden Fällen "Transplantation" als
@@ -92,12 +96,14 @@
 #         kann die Übersetzung "Revision" lauten.
 #  largefile: Binärriese (neues Wort, vielleicht fällt jemandem ein besseres
 #             ein. Dies ist zur Zeit konsequent verwendet)
+#  default: Sollte dies nicht lieber eine Voreinstellung statt des Standards
+#           sein?
 #
 msgid ""
 msgstr ""
 "Project-Id-Version: Mercurial\n"
 "Report-Msgid-Bugs-To: <mercurial-devel@selenic.com>\n"
-"POT-Creation-Date: 2011-12-04 13:42+0200\n"
+"POT-Creation-Date: 2012-01-21 14:20+0200\n"
 "PO-Revision-Date: 2010-12-24 15:09+0100\n"
 "Last-Translator: Fabian Kreutz <fabian.kreutz@qvantel.com>\n"
 "Language-Team: \n"
@@ -470,8 +476,7 @@
 
 #, python-format
 msgid "acl: user \"%s\" denied on \"%s\" (changeset \"%s\")"
-msgstr ""
-"acl: Zugriff für \"%s\" auf \"%s\" abgelehnt (Änderungssatz \"%s\")"
+msgstr "acl: Zugriff für \"%s\" auf \"%s\" abgelehnt (Änderungssatz \"%s\")"
 
 #, python-format
 msgid "acl: user \"%s\" not allowed on \"%s\" (changeset \"%s\")"
@@ -529,7 +534,8 @@
 msgstr ""
 "Direktes Schreiben in die Datenbank führt leicht zu Problemen bei Schema-\n"
 "veränderungen. Ausserdem wird ein externes Bugzilla Script benötigt, um\n"
-"Benachrichtigungsemails zu versenden. Dieses Script wird mit den Rechten des\n"
+"Benachrichtigungsemails zu versenden. Dieses Script wird mit den Rechten "
+"des\n"
 "Mercurial-Benutzers ausgeführt, benötigt eine lokale Bugzilla-Installation\n"
 "sowie Leserechte an der Bugzilla Konfigurationsdatei und MySQL Benutzername\n"
 "und Passwort, um vollen Zugriff auf die Bugzilla Datenbank zu haben. Aus\n"
@@ -650,7 +656,8 @@
 msgstr ""
 "bugzilla.strip\n"
 "  Die Anzahl von Verzeichnisebenen, die vom Anfang des Archivpfads\n"
-"  (``{root}`` in Vorlagen) abgeschnitten wird, um ``{webroot}`` zu erhalten.\n"
+"  (``{root}`` in Vorlagen) abgeschnitten wird, um ``{webroot}`` zu "
+"erhalten.\n"
 "  Zum Beispiel mit ``{root}`` als ``/var/local/mein-projekt`` und einem\n"
 "  Wert 2, wird ``{webroot}`` auf ``mein-projekt`` gesetzt. Standard 0."
 
@@ -721,8 +728,7 @@
 msgid ""
 "XMLRPC+email access mode uses the XMLRPC access mode configuration items,\n"
 "and also:"
-msgstr ""
-"XMLRPC+email nutzt die gleichen Zugriffskonfiguration wie XMLRPC plus:"
+msgstr "XMLRPC+email nutzt die gleichen Zugriffskonfiguration wie XMLRPC plus:"
 
 msgid ""
 "bugzilla.bzemail\n"
@@ -928,7 +934,8 @@
 "with a collection of Mercurial repositories in ``/var/local/hg/repos/``,\n"
 "with a web interface at ``http://my-project.org/hg``. ::"
 msgstr ""
-"MySQL Beispielkonfiguration. Bugzilla 3.2 ist lokal in ``/opt/bugzilla-3.2``\n"
+"MySQL Beispielkonfiguration. Bugzilla 3.2 ist lokal in ``/opt/"
+"bugzilla-3.2``\n"
 "installiert. Die MySQL Datenbank wird per ``localhost`` angesprochen,\n"
 "der Name der Bugzilla Datenbank ist ``bugs`` und der MySQL Zugriff ist für\n"
 "enutzer ``bugs`` mit Password ``XYZZY`` erlaubt. Die Archive, deren\n"
@@ -1341,7 +1348,8 @@
 "(fettgeschrieben), 'dim' (gedämmt), 'inverse' (Vorder- und Hintergrund\n"
 "getauscht), 'italic' (Schrägschrift), 'standout' (hervorstehend) und\n"
 "'underline' (unterstrichen); im ECMA-48 Modus nur 'bold', 'inverse',\n"
-"'italic' und 'underline'. Wie dies tatsächlich aussieht, hängt vom Terminal-\n"
+"'italic' und 'underline'. Wie dies tatsächlich aussieht, hängt vom "
+"Terminal-\n"
 "emulator ab. Effekte, die nicht realisiert werden können, werden ohne\n"
 "Warnung ignoriert."
 
@@ -1415,13 +1423,11 @@
 "deaktivieren.\n"
 
 msgid "no terminfo entry for setab/setaf: reverting to ECMA-48 color\n"
-msgstr ""
-"Kein terminfo-Eintrag für setab/setaf: Falle auf ECMA-48 zurück\n"
+msgstr "Kein terminfo-Eintrag für setab/setaf: Falle auf ECMA-48 zurück\n"
 
 #, python-format
 msgid "warning: failed to set color mode to %s\n"
-msgstr ""
-"Warnung: Konnte Farbmodus nicht auf %s setzen\n"
+msgstr "Warnung: Konnte Farbmodus nicht auf %s setzen\n"
 
 #, python-format
 msgid "ignoring unknown color/effect %r (configured in color.%s)\n"
@@ -1569,8 +1575,7 @@
 msgstr "      Quellautor = Zielautor"
 
 msgid "    Empty lines and lines starting with a ``#`` are ignored."
-msgstr ""
-"    Leere Zeilen und Zeilen, die mit ``#`` beginnen, werden ignoriert."
+msgstr "    Leere Zeilen und Zeilen, die mit ``#`` beginnen, werden ignoriert."
 
 msgid ""
 "    The filemap is a file that allows filtering and remapping of files\n"
@@ -2476,7 +2481,7 @@
 msgstr ""
 
 msgid "failed to reach end of mtn automate stdio headers"
-msgstr ""
+msgstr "Konnte den Endpunkt der mtn automate-Schnittstelle nicht erreichen"
 
 #, python-format
 msgid "%s does not look like a P4 repository"
@@ -2493,6 +2498,10 @@
 "Mercurial konnte sich selbst nicht ausführen, prüfe, ob die Programmdatei\n"
 "in PATH enthalten ist."
 
+#, python-format
+msgid "log stream exception '%s'"
+msgstr ""
+
 msgid ""
 "svn: cannot probe remote repository, assume it could be a subversion "
 "repository. Use --source-type if you know better.\n"
@@ -3331,10 +3340,10 @@
 "  project = foo\n"
 "  # das Modul (Unterprojekt) (optional)\n"
 "  #module = foo\n"
-"  # Hänge eine Statistik über die Änderungen an die Commit-Nachricht an\n"
+"  # Hänge eine Statistik über Änderungen an die Versionsmeldung an\n"
 "  # (optional)\n"
 "  #diffstat = False\n"
-"  # Vorlage für die Commit-Nachrichten (optional)\n"
+"  # Vorlage für die Versionsmeldungen (optional)\n"
 "  #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}\n"
 "  # zu verwendender Stil (optional)\n"
 "  #style = foo\n"
@@ -4160,9 +4169,10 @@
 "enabled for this to work."
 msgstr ""
 "Wird ein Änderungssatz, welcher einen Binärriesen ändert oder hinzufügt,\n"
-"in ein entferntes Archiv übertragen, so werden auch die Revisionen der\n"
-"Binärdatei übertragen. Das entfernte Mercurial muss hierfür die\n"
-"largefiles-Erweiterung unterstützen."
+"in ein entferntes Archiv übertragen, so werden gleichzeitig (asymmetrisch\n"
+"zum Abrufen per pull) auch die (neuen) Revisionen der Binärdatei "
+"übertragen.\n"
+"Das entfernte Mercurial muss hierfür die largefiles-Erweiterung unterstützen."
 
 msgid ""
 "When you pull a changeset that affects largefiles from a remote\n"
@@ -4242,10 +4252,18 @@
 
 msgid ""
 "Files that match one of these patterns will be added as largefiles\n"
-"regardless of their size.\n"
+"regardless of their size."
 msgstr ""
 "Dateien, die auf ein solches Muster passen, werden unabhängig von ihrer\n"
-"Größe als Binärriesen hinzugefügt.\n"
+"Größe als Binärriesen hinzugefügt."
+
+msgid ""
+"The ``largefiles.minsize`` and ``largefiles.patterns`` config options\n"
+"will be ignored for any repositories not already containing a\n"
+"largefile. To add the first largefile to a repository, you must\n"
+"explicitly do so with the --large flag passed to the :hg:`add`\n"
+"command.\n"
+msgstr ""
 
 msgid "convert a normal repository to a largefiles repository"
 msgstr "Konvertiert ein normales Archiv in ein Archiv mit Binärriesen"
@@ -4335,6 +4353,18 @@
 msgid "largefile %s becomes symlink"
 msgstr "Binärriese %s wird ein symbolischer Verweis"
 
+#, python-format
+msgid "skipping incorrectly formatted tag %s\n"
+msgstr "Überspringe fehlerhaft formatiertes Etikett: %s\n"
+
+#, python-format
+msgid "skipping incorrectly formatted id %s\n"
+msgstr "Überspringe fehlerhaft formatierte ID: %s\n"
+
+#, python-format
+msgid "no mapping for id %s\n"
+msgstr "Keine Abbildung für ID %s\n"
+
 msgid "uploading largefiles"
 msgstr "Lade Binärriesen hoch"
 
@@ -4378,10 +4408,6 @@
 msgid "Found %s in system cache\n"
 msgstr "%s im Zwischenspeicher gefunden\n"
 
-#, python-format
-msgid "bad hash in '%s' (only %d bytes long)"
-msgstr "Prüfsummenfehler in '%s' (nur %d Bytes lang)"
-
 msgid "Can't get file locally"
 msgstr "Kann Datei nicht lokal abrufen"
 
@@ -4417,8 +4443,8 @@
 msgstr "Keine Dateien angegeben"
 
 #, python-format
-msgid "not removing %s: %s (use -f to force removal)\n"
-msgstr "Entferne nicht %s: %s (Nutze -f um Entfernung zu erzwingen)\n"
+msgid "not removing %s: %s (use forget to undo)\n"
+msgstr "Entferne nicht %s: %s (Nutze ``forget`` um rückgängig zu machen)\n"
 
 msgid "file still exists"
 msgstr "Datei existiert noch"
@@ -4436,6 +4462,28 @@
 msgid "uncommitted local changes"
 msgstr "Ausstehende nicht versionierte Änderungen"
 
+msgid "&Largefile"
+msgstr "&Binärriese"
+
+msgid "&Normal file"
+msgstr "&Normale Datei"
+
+#, python-format
+msgid ""
+"%s has been turned into a largefile\n"
+"use (l)argefile or keep as (n)ormal file?"
+msgstr ""
+"%s wurde in einen Binärriesen umgewandelt.\n"
+"Nutze als (B)inärriese oder als (n)ormale Datei?"
+
+#, python-format
+msgid ""
+"%s has been turned into a normal file\n"
+"keep as (l)argefile or use (n)ormal file?"
+msgstr ""
+"%s wurde in eine normale Datei umgewandelt.\n"
+"Nutze als (B)inärriese oder als (n)ormale Datei?"
+
 #, python-format
 msgid "merging %s and %s to %s\n"
 msgstr "Führe %s und %s zusammen zu %s\n"
@@ -4464,6 +4512,13 @@
 msgid "no files to copy"
 msgstr "Keine Dateien zu kopieren"
 
+msgid "caching new largefiles\n"
+msgstr "Lade neue Binärriesen in den Zwischenspeicher\n"
+
+#, python-format
+msgid "%d largefiles cached\n"
+msgstr "%d Binärriesen in den Zwischenspeicher geladen\n"
+
 #, python-format
 msgid "unknown archive type '%s'"
 msgstr "Unbekannter Archivtyp '%s'"
@@ -4472,6 +4527,10 @@
 msgstr "Bei Archivierung in Dateien kann kein Präfix angegeben werden"
 
 #, python-format
+msgid "largefile %s not found in repo store or system cache"
+msgstr "Binärriese %s weder im Archivlager noch Zwischenspeicher gefunden"
+
+#, python-format
 msgid "not removing %s: file is already untracked\n"
 msgstr "Entferne %s nicht: Datei ist nicht versioniert\n"
 
@@ -4485,18 +4544,24 @@
 msgid "largefiles: %d to upload\n"
 msgstr "largefiles: %d hochzuladen\n"
 
-msgid "addremove cannot be run on a repo with largefiles"
-msgstr ""
-"'addremove' kann nicht auf ein Archiv mit Binärriesen angewendet werden"
-
-#, python-format
-msgid "largefiles: failed to put %s (%s) into store: %s"
-msgstr "largefiles: Konnte %s (%s) nicht in einlagern: %s"
+msgid "largefile contents do not match hash"
+msgstr "Inhalt des Binärriesen passt nicht auf Prüfsumme"
+
+#, python-format
+msgid "largefiles: failed to put %s into store: %s"
+msgstr "largefiles: Konnte %s nicht in einlagern: %s"
 
 #, python-format
 msgid "requested largefile %s not present in cache"
 msgstr "Angeforderer Binärriese %s ist nicht im Zwischenspeicher"
 
+msgid "remote: "
+msgstr "Entfernt: "
+
+#, python-format
+msgid "unexpected putlfile response: %s"
+msgstr "Unerwartete Antwort von putlfile: %s"
+
 msgid "putlfile failed:"
 msgstr "putlfile fehlgeschlagen:"
 
@@ -4539,7 +4604,8 @@
 "largefiles: repo method %r appears to have already been wrapped by another "
 "extension: largefiles may behave incorrectly\n"
 msgstr ""
-"largefiles: Aktion %r scheint bereits von einer anderen Erweiterung verändert zu sein. Dadurch kann es zu Fehlern in largefiles kommen\n"
+"largefiles: Aktion %r scheint bereits von einer anderen Erweiterung "
+"verändert zu sein. Dadurch kann es zu Fehlern in largefiles kommen\n"
 
 #, python-format
 msgid "file \"%s\" is a largefile standin"
@@ -5011,6 +5077,13 @@
 msgstr "Nachfahren der Revision %d werden nicht verwaltet"
 
 #, python-format
+msgid "revision %d is not mutable"
+msgstr "Revision %d ist unveränderbar"
+
+msgid "see \"hg help phases\" for details"
+msgstr "siehe \"hg help phases\" für Details"
+
+#, python-format
 msgid "cannot import merge revision %d"
 msgstr "Kann Zusammenführung %d nicht importieren"
 
@@ -5030,7 +5103,8 @@
 msgstr "Benenne %s in %s um\n"
 
 msgid "need --name to import a patch from -"
-msgstr "Beim Import von der Standardeingabe muss die Option --name angegeben werden"
+msgstr ""
+"Beim Import von der Standardeingabe muss die Option --name angegeben werden"
 
 #, python-format
 msgid "unable to read file %s"
@@ -5054,11 +5128,13 @@
 
 msgid ""
 "    The patches must not be applied, and at least one patch is required. "
-"With\n"
-"    -k/--keep, the patch files are preserved in the patch directory."
+"Exact\n"
+"    patch identifiers must be given. With -k/--keep, the patch files are\n"
+"    preserved in the patch directory."
 msgstr ""
 "    Die Patches dürfen nicht angewendet sein und mindestens einer muss\n"
-"    angegeben sein. Mit -k/--keep werden die Patchdateien erhalten."
+"    angegeben sein. Exakte Patch-IDs müssen verwendet werden. Mit -k/--keep\n"
+"    werden die Patchdateien erhalten."
 
 msgid ""
 "    To stop managing a patch and move it into permanent history,\n"
@@ -5896,12 +5972,12 @@
 "    Beispiel::"
 
 msgid ""
-"        qguard foo.patch -stable    (negative guard)\n"
-"        qguard bar.patch +stable    (positive guard)\n"
+"        qguard foo.patch -- -stable    (negative guard)\n"
+"        qguard bar.patch    +stable    (positive guard)\n"
 "        qselect stable"
 msgstr ""
-"        qguard foo.patch -stable    (negativer Wächter)\n"
-"        qguard bar.patch +stable    (positiver Wächter)\n"
+"        qguard foo.patch -- -stable    (negativer Wächter)\n"
+"        qguard bar.patch    +stable    (positiver Wächter)\n"
 "        qselect stable"
 
 msgid ""
@@ -6005,7 +6081,7 @@
 msgstr ""
 "    Schließt die angegebenen Revisionen ab (entspricht den angewandten\n"
 "    Patches), indem sie aus der Kontrolle von mq entfernt und in die\n"
-"    reguläre Projektgeschichte übernommen werden."
+"    reguläre Projekthistorie übernommen werden."
 
 msgid ""
 "    Accepts a revision range or the -a/--applied option. If --applied\n"
@@ -6025,8 +6101,7 @@
 msgstr ""
 "    Dies kann insbes. nützlich sein, wenn Ihre Änderungen in einem\n"
 "    vorgelagerten Projektarchiv angewandt wurden, oder wenn Sie Ihre\n"
-"    Änderungen in ein vorgelagertes Archiv übertragen wollen.\n"
-"    "
+"    Änderungen in ein vorgelagertes Archiv übertragen wollen."
 
 msgid "no revisions specified"
 msgstr "Keine Revisionen angegeben"
@@ -6062,9 +6137,8 @@
 "    Supports switching between different patch queues, as well as creating\n"
 "    new patch queues and deleting existing ones."
 msgstr ""
-"    Unterstützt das Wechseln zwischen verschiedener Patch-Reihen, ebenso "
-"wie\n"
-"    das erstellen neuer Reihen und das Löschen bereits bestehender."
+"    Unterstützt das Wechseln zwischen verschiedener Patch-Reihen, sowie\n"
+"    das Erstellen neuer Reihen und das Löschen bereits bestehender."
 
 msgid ""
 "    Omitting a queue name or specifying -l/--list will show you the "
@@ -6130,7 +6204,7 @@
 
 #, python-format
 msgid "non-queue directory \"%s\" already exists"
-msgstr "Nicht-Reihen-Verzeichnis \"%s\" existiert bereits"
+msgstr "Verzeichnis \"%s\" existiert bereits, ist aber keine Reihe"
 
 msgid "use --create to create a new queue"
 msgstr "Verwenden Sie --create, um eine neue Reihe zu erzeugen"
@@ -6192,6 +6266,9 @@
 "This extension let you run hooks sending email notifications when\n"
 "changesets are being pushed, from the sending or receiving side."
 msgstr ""
+"Diese Erweiterung ermöglicht das Senden von Benachrichtigungsemails,\n"
+"wannimmer Änderungssätze übertragen werden. Dies kann von der über-\n"
+"tragenden oder der empfangenden Seite aus geschehen."
 
 msgid ""
 "First, enable the extension as explained in :hg:`help extensions`, and\n"
@@ -6199,6 +6276,10 @@
 "are run by the changesets receiver while the ``outgoing`` one is for\n"
 "the sender::"
 msgstr ""
+"Zunächst muss die Erweiterung (wie in :hg:`help extensions` beschrieben)\n"
+"aktiviert werden und dann als Hook registriert werden. Für den Empfänger\n"
+"sind dies die ``incoming`` und ``outgoing`` Hooks, für den Sender der\n"
+"``outgoing`` Hook::"
 
 msgid ""
 "  [hooks]\n"
@@ -6217,6 +6298,8 @@
 "  # one email for all outgoing changesets\n"
 "  outgoing.notify = python:hgext.notify.hook"
 msgstr ""
+"  # eine E-Mail für jeden übertragenen Änderungssatz\n"
+"  outgoing.notify = python:hgext.notify.hook"
 
 msgid ""
 "Now the hooks are running, subscribers must be assigned to\n"
@@ -6224,6 +6307,9 @@
 "given email or the ``[reposubs]`` section to map emails to a single\n"
 "repository::"
 msgstr ""
+"Schliesslich müssen noch die Abonnements für die Projektarchive definiert\n"
+"werden. Im Abschnitt ``[usersubs]`` kann man mehrere Archive einer Email\n"
+"zuweisen, in ``[reposubs]`` umgekehrt mehrere Emails für ein Archiv angeben."
 
 msgid ""
 "  [usersubs]\n"
@@ -6252,6 +6338,9 @@
 "root. The subscriptions can be defined in their own file and\n"
 "referenced with::"
 msgstr ""
+"Die glob-Muster müssen auf den absoluten Pfad zum Archiv passen. Alle\n"
+"Abonnements können in einer eigenen Datei gesammelt werden und folgender-\n"
+"maßen in der Konfiguration eingebunden werden::"
 
 msgid ""
 "  [notify]\n"
@@ -6264,23 +6353,33 @@
 "Alternatively, they can be added to Mercurial configuration files by\n"
 "setting the previous entry to an empty value."
 msgstr ""
+"Alternativ (mit leerem Wert für ``notify.config``) können die Abonnements\n"
+"in der Mercurial Konfigurationsdatei angegeben werden."
 
 msgid ""
 "At this point, notifications should be generated but will not be sent until "
 "you\n"
 "set the ``notify.test`` entry to ``False``."
 msgstr ""
+"Nach dieser Konfiguration werden die Benachrichtigungen nun generiert, aber\n"
+"noch nicht gesendet, bis der Wert von ``notify.test`` auf ``False`` gesetzt\n"
+"wird."
 
 msgid ""
 "Notifications content can be tweaked with the following configuration "
 "entries:"
 msgstr ""
+"Der Inhalt der Benachrichtigungen kann mit der folgenden Konfiguration\n"
+"angepasst werden:"
 
 msgid ""
 "notify.test\n"
 "  If ``True``, print messages to stdout instead of sending them. Default: "
 "True."
 msgstr ""
+"notify.test\n"
+"  Falls ``True`` werden die Nachrichten auf die Standardausgabe und nicht\n"
+"  als Email versendet. Standard: True."
 
 msgid ""
 "notify.sources\n"
@@ -6292,6 +6391,14 @@
 "  locally. Outgoing sources are the same except for ``unbundle`` which\n"
 "  is replaced by ``bundle``. Default: serve."
 msgstr ""
+"notify.sources\n"
+"  Kommaseparierte Liste von Quellaktionen. Benachrichtigungen werden nur\n"
+"  gesendet, wenn die Änderungen von einer solchen Aktion ausgelöst wurden.\n"
+"  Quellen für ankommende Änderungen sind ``serve`` (Änderungen via http\n"
+"  oder ssh), ``pull`` (aktiv abgerufen), ``unbundle`` (per :hg:`unbundle`\n"
+"  eingefügt) oder ``push`` (lokal übertragen). Für ausgehende Änderungen\n"
+"  gibt es die gleichen, nur mit ``unbundle`` gegen ``bundle`` getauscht.\n"
+"  Standard: serve."
 
 msgid ""
 "notify.strip\n"
@@ -6303,12 +6410,21 @@
 "change\n"
 "  ``/long/path/repository`` into ``repository``. Default: 0."
 msgstr ""
+"notify.strip\n"
+"  Anzahl der Schrägstriche, die vom URL-Pfad abgeschnitten werden sollen.\n"
+"  Standardmäßig werden Archive mit ihrem absoluten Pfad benannt.\n"
+"  Mit ``notify.strip`` lässt sich das in relative Pfade abwandeln. Zum\n"
+"  Beispiel wird ``/langer/pfad/zum/archiv`` mit ``strip=4`` zu ``archiv``.\n"
+"  Standard: 0."
 
 msgid ""
 "notify.domain\n"
 "  If subscribers emails or the from email have no domain set, complete them\n"
 "  with this value."
 msgstr ""
+"notify.domain\n"
+"  Falls eine Abonnementen- oder die Sender-Adresse keine Domäne haben,\n"
+"  wird dieser Wert eingefügt."
 
 msgid ""
 "notify.style\n"
@@ -6328,62 +6444,103 @@
 "notify.incoming\n"
 "  Template to use when run as incoming hook, override ``notify.template``."
 msgstr ""
+"notify.incoming\n"
+"  Vorlage (mit höherer Priorität als ``notify.template``), falls die Aktion\n"
+"  durch einen 'incoming'-Hook ausgelöst wurde."
 
 msgid ""
 "notify.outgoing\n"
 "  Template to use when run as outgoing hook, override ``notify.template``."
 msgstr ""
+"notify.outcoming\n"
+"  Vorlage (mit höherer Priorität als ``notify.template``), falls die Aktion\n"
+"  durch einen 'outcoming'-Hook ausgelöst wurde."
 
 msgid ""
 "notify.changegroup\n"
 "  Template to use when running as changegroup hook, override\n"
 "  ``notify.template``."
 msgstr ""
+"notify.changegroup\n"
+"  Vorlage (mit höherer Priorität als ``notify.template``), falls die Aktion\n"
+"  durch einen 'changegroup'-Hook ausgelöst wurde."
 
 msgid ""
 "notify.maxdiff\n"
 "  Maximum number of diff lines to include in notification email. Set to 0\n"
 "  to disable the diff, -1 to include all of it. Default: 300."
 msgstr ""
+"notify.maxdiff\n"
+"  Maximale Zeilenanzahl des Diffs in der Benachrichtigungsemail. Der Wert\n"
+"  0 unterbindet die Anzeige des Diffs, -1 wird das gesamte Diff ausgeben.\n"
+"  Standard: 300."
 
 msgid ""
 "notify.maxsubject\n"
 "  Maximum number of characters in emails subject line. Default: 67."
 msgstr ""
+"notify.maxsubject\n"
+"  Maximale Länge der Betreffszeile. Standard: 67."
 
 msgid ""
 "notify.diffstat\n"
 "  Set to True to include a diffstat before diff content. Default: True."
 msgstr ""
+"notify.diffstat\n"
+"  Zeige eine Diff-Statistik vor dem eigentlich Diff an. Standard: True."
 
 msgid ""
 "notify.merge\n"
 "  If True, send notifications for merge changesets. Default: True."
 msgstr ""
+"notify.merge\n"
+"  Sende Nachrichten auch für Zusammenführungen. Standard: True."
 
 msgid ""
 "notify.mbox\n"
 "  If set, append mails to this mbox file instead of sending. Default: None."
 msgstr ""
 "notify.mbox\n"
-"  Schreibe Nachrichten in mbox Datei, anstatt sie zu versenden. Standard: None"
+"  Schreibe Nachrichten in mbox Datei, anstatt sie zu versenden. Standard: "
+"None"
+
+msgid ""
+"notify.fromauthor\n"
+"  If set, use the first committer of the changegroup for the \"From\" field "
+"of\n"
+"  the notification mail. If not set, take the user from the pushing repo.\n"
+"  Default: False."
+msgstr ""
+"notify.fromauthor\n"
+"  Verwende den Autoren der ersten Änderungsgruppe als Absender der\n"
+"  Benachrichtigungsemail. Falls nicht gesetzt, verwende den Nutzer im\n"
+"  übertragenden Archiv. Standard: False."
+
 
 msgid ""
 "If set, the following entries will also be used to customize the "
 "notifications:"
 msgstr ""
+"Durch Setzen der folgenden Einträge können die Benachrichtigungen weiter\n"
+"angepasst werden:"
 
 msgid ""
 "email.from\n"
 "  Email ``From`` address to use if none can be found in generated email "
 "content."
 msgstr ""
+"email.from\n"
+"  Die zu verwendende Senderadresse (``From``), falls die Vorlage keinen\n"
+"  Wert setzt."
 
 msgid ""
 "web.baseurl\n"
 "  Root repository browsing URL to combine with repository paths when making\n"
 "  references. See also ``notify.strip``."
 msgstr ""
+"web.baseurl\n"
+"  Die Basis-URL für alle Archive, die mit dem abgeschnittenen (siehe\n"
+"  ``notify.strip``) Archivpfad für Referenzen verwendet wird."
 
 #, python-format
 msgid "%s: %d new changesets"
@@ -6487,7 +6644,7 @@
 "Befehle den Pager benutzen."
 
 msgid "If pager.attend is present, pager.ignore will be ignored."
-msgstr "Wenn pager.attend vorhanden ist, wird pager.ignore ignoriert."
+msgstr "Wenn pager.attend vorhanden ist, ist pager.ignore wirkungslos."
 
 msgid ""
 "To ignore global commands like :hg:`version` or :hg:`help`, you have\n"
@@ -6550,7 +6707,7 @@
 "configuration file::"
 msgstr ""
 "Andere Standardwerte können beispielsweise durch den folgenden Abschnitt\n"
-"in der hgrc konfiguriert werden::"
+"in der Konfigurationsdatei gesetzt werden::"
 
 msgid ""
 "  [email]\n"
@@ -6571,8 +6728,8 @@
 "Use ``[patchbomb]`` as configuration section name if you need to\n"
 "override global ``[email]`` address settings."
 msgstr ""
-"Benutzen Sie [patchbomb] als Abschnittsnamen, falls Sie globale\n"
-"[email]-Einstellungen überschreiben müssen."
+"Benutzen Sie ``[patchbomb]`` als Abschnittsnamen, falls Sie globale\n"
+"``[email]``-Einstellungen überschreiben müssen."
 
 msgid ""
 "Then you can use the :hg:`email` command to mail a series of\n"
@@ -6709,9 +6866,9 @@
 "    with a final summary of all messages and asked for confirmation before\n"
 "    the messages are sent."
 msgstr ""
-"Bei der Angabe der Optionen -d/--diffstat oder -c/--confirm wird eine\n"
-"abschließende Zusammenfassung aller Nachrichten angezeigt und um\n"
-"Bestätigung gebeten, bevor die Nachrichten versendet werden."
+"    Bei der Angabe der Optionen -d/--diffstat oder -c/--confirm wird eine\n"
+"    abschließende Zusammenfassung aller Nachrichten angezeigt und um\n"
+"    Bestätigung gebeten, bevor die Nachrichten versendet werden."
 
 msgid ""
 "    By default the patch is included as text in the email body for\n"
@@ -6747,9 +6904,9 @@
 "    previewed with any mail user agent which supports UNIX mbox\n"
 "    files."
 msgstr ""
-"Alternativ werden die Nachrichten mit der Option -m/--mbox in eine Datei\n"
-"geschrieben, die von jedem Emailprogramm, welches das UNIX-mbox-Format\n"
-"unterstützt, geprüft werden, zum Beispiel mit mutt::"
+"    Alternativ werden die Nachrichten mit der Option -m/--mbox in eine\n"
+"    Datei geschrieben, die von jedem Emailprogramm, welches das UNIX-mbox-\n"
+"    Format unterstützt, geprüft werden, zum Beispiel mit mutt::"
 
 msgid ""
 "    With -n/--test, all steps will run, but mail will not be sent.\n"
@@ -6759,13 +6916,13 @@
 "    PAGER environment variable is set, your pager will be fired up once\n"
 "    for each patchbomb message, so you can verify everything is alright."
 msgstr ""
-"Um das Versenden verfrühter Patches zu verhindern, sollte man :hg:`email`\n"
-"mit der Option \"-n\" (Testmodus) aufrufen. Sie werden nach einer\n"
-"Empfängeradresse, einem Betreff und einer einleitenden Nachricht gefragt,\n"
-"die die Patches Ihrer Patchbombe beschreibt. Danach werden die\n"
-"Patchbombennachrichten angezeigt. Wenn die PAGER-Umgebungsvariable gesetzt\n"
-"ist, wird Ihr Pager für jede Patchbombe einzeln aufgerufen, so dass alles\n"
-"überprüft werden kann."
+"    Mit der Option -n/--test werden alle Schritte ausgeführt und das\n"
+"    Ergebnis angezeigt, aber keine Email versendet. Sie werden also nach\n"
+"    einer Empfängeradresse, einem Betreff und einer einleitenden Nachricht,\n"
+"    die die Patches Ihrer Patchbombe beschreibt, gefragt. Dann werden alle\n"
+"    Patchbombennachrichten angezeigt. Wenn die PAGER-Umgebungsvariable\n"
+"    gesetzt ist, wird Ihr Pager für jede Patchbombe einzeln aufgerufen, so\n"
+"    dass alles überprüft werden kann."
 
 msgid ""
 "    In case email sending fails, you will find a backup of your series\n"
@@ -6828,7 +6985,7 @@
 "    hgrc. See the [email] section in hgrc(5) for details.\n"
 "    "
 msgstr ""
-"    Um dieses Kommando zu benutzen muss das Email-Versenden im Abschnitt\n"
+"    Um dieses Kommando zu benutzen muss der Emailversand im Abschnitt\n"
 "    [email] der Konfiguration aktiviert sein. Siehe hgrc(5) für Details.\n"
 "    "
 
@@ -6863,23 +7020,23 @@
 msgstr "Diese Patch-Serie besteht aus %d Patches."
 
 msgid "no recipient addresses provided"
-msgstr ""
+msgstr "Keine Empfängeradresse angegeben"
 
 msgid ""
 "\n"
 "Final summary:"
 msgstr ""
 "\n"
-"Zusammenfassung:"
+"Engültige Zusammenfassung:"
 
 msgid "are you sure you want to send (yn)?"
-msgstr "Sicher, dass Sie jetzt senden möchten (y/n)?"
+msgstr "Sicher, dass Sie jetzt senden möchten (j/n)?"
 
 msgid "&No"
 msgstr "&Nein"
 
 msgid "&Yes"
-msgstr "Ja (&y)"
+msgstr "&Ja"
 
 msgid "patchbomb canceled"
 msgstr "patchbomb abgebrochen"
@@ -6957,36 +7114,36 @@
 #. i18n: format XX seconds as "XXs"
 #, python-format
 msgid "%02ds"
-msgstr ""
+msgstr "%02dsek"
 
 #. i18n: format X minutes and YY seconds as "XmYYs"
 #, python-format
 msgid "%dm%02ds"
-msgstr ""
+msgstr "%d:%02d"
 
 #. i18n: format X hours and YY minutes as "XhYYm"
 #, python-format
 msgid "%dh%02dm"
-msgstr ""
+msgstr "%d:%02d"
 
 #. i18n: format X days and YY hours as "XdYYh"
 #, python-format
 msgid "%dd%02dh"
-msgstr ""
+msgstr "%dd%02dh"
 
 #. i18n: format X weeks and YY days as "XwYYd"
 #, python-format
 msgid "%dw%02dd"
-msgstr ""
+msgstr "%dw%02dd"
 
 #. i18n: format X years and YY weeks as "XyYYw"
 #, python-format
 msgid "%dy%02dw"
-msgstr ""
+msgstr "%dy%02dw"
 
 #, python-format
 msgid "%d %s/sec"
-msgstr ""
+msgstr "%d %s/Sek"
 
 msgid "command to delete untracked files from the working directory"
 msgstr "Löscht nicht versionierte Dateien aus dem Arbeitsverzeichnis"
@@ -7056,7 +7213,7 @@
 "    option.\n"
 "    "
 msgstr ""
-"    Seien Sie mit purge vorsichtig, da Sie Dateien unwiderbringlich\n"
+"    Seien Sie mit purge vorsichtig, da Sie Dateien unwiederbringlich\n"
 "    löschen könnten, die Sie nicht zum Projektarchiv hinzugefügt\n"
 "    haben. Wenn Sie nur die Liste der Dateien sehen wollen, die dieses\n"
 "    Programm entfernen würde, nutzen Sie die Option --print.\n"
@@ -7112,16 +7269,16 @@
 msgstr "Verschiebe auf den gegebenen Änderungssatz"
 
 msgid "collapse the rebased changesets"
-msgstr "Faltet die erzeugten Änderungssätze nach dem Rebase zusammen"
+msgstr "Fügt die verschobenen Änderungssätze zu einem einzelnen zusammen"
 
 msgid "use text as collapse commit message"
-msgstr "Nimm Text als gefaltete Commit-Nachricht"
+msgstr "Setzt die Versionsmeldung des zusammengefügten Änderungssatzes"
 
 msgid "invoke editor on commit messages"
-msgstr "Ruft Editor zum setzen der Versionsmeldung auf"
+msgstr "Ruft Editor zum Setzen der Versionsmeldung auf"
 
 msgid "read collapse commit message from file"
-msgstr "Liest gefaltete Commit-Nachricht aus Datei"
+msgstr "Liest Versionsmeldung für den zusammengefügten Änderungssatz aus Datei"
 
 msgid "keep original changesets"
 msgstr "Behält die ursprünglichen Änderungssätze bei"
@@ -7136,15 +7293,17 @@
 msgstr "Methode für das Zusammenführen"
 
 msgid "continue an interrupted rebase"
-msgstr "Führt einen unterbrochenen Rebase fort"
+msgstr "Führt eine unterbrochene Pfropfung fort"
 
 msgid "abort an interrupted rebase"
-msgstr "Bricht einen unterbrochenen Rebase ab"
+msgstr "Bricht eine unterbrochene Pfropfung ab"
 
 msgid ""
 "hg rebase [-s REV | -b REV] [-d REV] [options]\n"
 "hg rebase {-a|-c}"
 msgstr ""
+"hg rebase [-s REV | -b REV] [-d REV] [Optionen]\n"
+"hg rebase {-a|-c}"
 
 msgid "move changeset (and descendants) to a different branch"
 msgstr ""
@@ -7170,7 +7329,7 @@
 "    Sie sollten keine Änderungssätze umpfropfen, die auch andere bereits\n"
 "    haben, ansonsten zwingen Sie jeden anderen die gleiche rebase-\n"
 "    Operation durchzuführen, um die verschobenen Versionen nicht\n"
-"    doppelt zu haben, wenn sie Ihre Änderungen ziehen."
+"    doppelt zu haben, sobald sie Ihre Änderungen abrufen."
 
 msgid ""
 "    If you don't specify a destination changeset (``-d/--dest``),\n"
@@ -7178,10 +7337,10 @@
 "    destination. (The destination changeset is not modified by\n"
 "    rebasing, but new changesets are added as its descendants.)"
 msgstr ""
-"    Wenn Sie keine Zielversion spezifizieren (``-d/--dest``),\n"
-"    verwendet rebase den head des aktuellen named branch, der am \n"
-"    nächsten an tip ist als Ziel (die Zielversion wird durch rebase\n"
-"    nicht verändert. Sie erhält nur neue changesets als Kinder)."
+"    Wenn Sie keine Zielversion spezifizieren (``-d/--dest``), verwendet\n"
+"    rebase als Ziel den Kopf des aktuellen benannten Zweigs, der\n"
+"    der Projektspitze (tip) am nächsten ist. (Die Zielversion wird durch\n"
+"    die Verschiebung nicht verändert, aber erhält neue Kinder.)"
 
 msgid ""
 "    You can specify which changesets to rebase in two ways: as a\n"
@@ -7196,19 +7355,17 @@
 "    the whole branch. If you specify neither ``-s`` nor ``-b``, rebase\n"
 "    uses the parent of the working directory as the base."
 msgstr ""
-"    Sie können auf zwei Arten angeben, welche Changesets rebased werden\n"
-"    sollen: als \"source\" oder als \"base\" Changesets. Beide sind\n"
-"    Abkürzungen für ein Menge von Changesets, die topologisch\n"
-"    zusammenhängen (die \"source\" Branch). Wenn Sie source angeben\n"
-"    (``-s/--source``), rebase wird dieses Changeset und all seine\n"
-"    Descendants nach dest pfropfen. Wenn Sie base angeben (``-b/--base``),\n"
-"    rebase wird Vorgänger von base auswählen, bis zu aber nicht\n"
-"    einschließlich dem gemeinsamen Vorgänger mit dest. Es ist also\n"
-"    ``-b`` weniger präzise, aber bequemer, als ``-s``: Sie können\n"
-"    jegliches Changeset im Quell-Branch angeben, und rebase wird den\n"
-"    gesamten Branch auswählen. Wenn Sie weder ``-s``noch ``-b`` angeben,\n"
-"    wird rebase den Parent des aktuellen Verzeichnisses als Base\n"
-"    auswählen."
+"    Sie können die zu verschiebenden Versionen auf zwei Arten angeben: Als\n"
+"    \"Quell-\" oder als \"Basisversion\". Mit diesen ist jeweils eine Menge\n"
+"    von topologisch verwandten Revisionen gemeint (der \"Quell\"-zweig).\n"
+"    Wenn Sie eine Quellversion (``-s/--source``) angeben, wird Mercurial\n"
+"    diese Version und alle ihrer Nachfahren verschieben. Geben Sie eine\n"
+"    Basisversion (``-b/--base``) an, so sucht Mercurial den jüngsten\n"
+"    gemeinsamen Vorfahren der Basis und des Ziels und verschiebt den Zweig,\n"
+"    der die Basis enthält, nicht jedoch den gemeinsamen Vorfahren.\n"
+"    Somit ist ``-s`` genauer, aber ``-b`` praktischer: Man gibt irgendeine\n"
+"    Version im zu verschiebenden Zweig an. Wenn Sie weder ``-s`` noch\n"
+"    ``-b`` angeben, wird den Vorfahr der Arbeitskopie als Basis verwendet."
 
 msgid ""
 "    By default, rebase recreates the changesets in the source branch\n"
@@ -7217,12 +7374,11 @@
 "    changesets in the source branch (e.g. merges from the destination\n"
 "    branch) may be dropped if they no longer contribute any change."
 msgstr ""
-"    Per Default erzeugt rebase die Changesets im Quell-Branch als\n"
-"    Descendants von dest neu, und zerstört dann die Originale. Benutzen\n"
-"    Sie ``--keep``, um die originalen Quell-Changesets zu bewahren.\n"
-"    Einige Changeset im Quell-Branch (z.B. Merges vom Ziel-Branch),\n"
-"    können gelöscht werden, wenn sie keine weiteren Änderungen\n"
-"    mehr beisteuern."
+"    Standardmäßig werden die Änderungssätze des Quellzweigs als Nachfahren\n"
+"    des Ziels erzeugt und ihr Original zerstört. Mit ``--keep`` werden\n"
+"    die Originale erhalten. Einige Änderungssätze des Quellzweigs könnten\n"
+"    entfernt werden (z.B. Zusammenführungen aus dem Zielzweig, die keine\n"
+"    eigenen Änderungen über die Zusammenführung hinaus enthalten)."
 
 msgid ""
 "    One result of the rules for selecting the destination changeset\n"
@@ -7232,11 +7388,14 @@
 "    destination (or ``update`` to the other head, if it's the head of\n"
 "    the intended source branch)."
 msgstr ""
-"    Ein Ergebnis der Regeln für das Auswählen der Ziel-Changesets und des\n"
-"    Quell-Branches ist, daß, im Gegensatz zu ``merge``, rebase nichts\n"
-"    tun wird, wenn Sie auf dem neuesten (tipmost) Head eines benannten\n"
-"    Branches mit zwei Heads sind. Sie müssen Quelle und/oder Ziel angeben\n"
-"    (oder auf den anderen Head ``update``en)."
+"    Als Folge der automatischen Auswahlregeln für Quelle und Ziel wird\n"
+"    (im Gegensatz zu einer Zusammenführung) keine Verschiebung "
+"durchgeführt,\n"
+"    wenn der jüngere von zwei Köpfen eines Zweiges aktuell ist. In diesem\n"
+"    Falle (z.B. bei lokaler Versionsübernahme nach einem Abrufen), muss\n"
+"    eine Quelle und/oder ein Ziel explizit angegeben werden. Dieser Fall\n"
+"    tritt aber vor allem ein, wenn das Arbeitsverzeichnis im Zielzweig\n"
+"    liegt, also zunächst auf den Quellzweig aktualisiert werden muss."
 
 msgid ""
 "    If a rebase is interrupted to manually resolve a merge, it can be\n"
@@ -7255,9 +7414,11 @@
 
 msgid "message can only be specified with collapse"
 msgstr ""
+"Eine Versionsmeldung kann nur für beim Zusammenfalten (--collapse) gegeben\n"
+"werden"
 
 msgid "cannot use both abort and continue"
-msgstr "abort und continue können nicht gleichzeitig genutzt werden"
+msgstr "abort und continue dürfen nicht gleichzeitig angegeben werden"
 
 msgid "cannot use collapse with continue or abort"
 msgstr "collapse kann nicht mit continue oder abort genutzt werden"
@@ -7266,19 +7427,19 @@
 msgstr "detach kann nicht mit continue oder abort genutzt werden"
 
 msgid "abort and continue do not allow specifying revisions"
-msgstr "abort und continue erlauben die Angabe einer Revision nicht"
+msgstr "abort und continue erlauben keine Angabe einer Revision"
 
 msgid "tool option will be ignored\n"
-msgstr ""
+msgstr "Die Option tool wird ignoriert\n"
 
 msgid "cannot specify both a source and a base"
-msgstr "Es können nicht Quelle und Basis gleichzeitig angegeben werden"
+msgstr "Quelle und Basis dürfen nicht gleichzeitig angegeben werden"
 
 msgid "cannot specify both a revision and a base"
-msgstr "Es können nicht Revision und Basis gleichzeitig angegeben werden"
+msgstr "Revision und Basis dürfen nicht gleichzeitig angegeben werden"
 
 msgid "cannot specify both a revision and a source"
-msgstr "Es können nicht Revision und Quelle gleichzeitig angegeben werden"
+msgstr "Revision und Quelle dürfen nicht gleichzeitig angegeben werden"
 
 msgid "detach requires a revision to be specified"
 msgstr "detach benötigt eine Revision"
@@ -7292,11 +7453,18 @@
 msgid "use --keep to keep original changesets"
 msgstr "Verwende --keep, um die ursprünglichen Änderungssätze zu behalten"
 
+#, python-format
+msgid "Can't rebase immutable changeset %s"
+msgstr "Nicht veränderbarer Änderungssatz %s kann nicht verschoben werden"
+
+msgid "see hg help phases for details"
+msgstr "Siehe hg help phases für Details"
+
 msgid "nothing to rebase\n"
-msgstr "Kein Rebase nötig\n"
+msgstr "Kein Verschiebung nötig\n"
 
 msgid "cannot collapse multiple named branches"
-msgstr "Kann nicht mehrere benannte Zweige kollabieren"
+msgstr "Mehrere benannte Zweige können nicht zusammengefaltet werden"
 
 msgid "rebasing"
 msgstr "Verschiebe"
@@ -7313,13 +7481,13 @@
 msgstr "keine Änderungen, Revision %d übersprungen\n"
 
 msgid "rebase merging completed\n"
-msgstr "Zusammenführungen des Rebase abgeschlossen\n"
+msgstr "Zusammenführungen der Verschiebung abgeschlossen\n"
 
 msgid "warning: new changesets detected on source branch, not stripping\n"
 msgstr "Warnung: Neue Änderungssätze auf Quellzweig gefunden, lösche nicht\n"
 
 msgid "rebase completed\n"
-msgstr "Rebase abgeschlossen\n"
+msgstr "Verschiebung abgeschlossen\n"
 
 #, python-format
 msgid "%d revisions have been skipped\n"
@@ -7332,39 +7500,43 @@
 #, python-format
 msgid "cannot use revision %d as base, result would have 3 parents"
 msgstr ""
-"Revision %d kann nicht als Basis genutzt werden, das Ergebnis hätte 3 "
+"Revision %d kann nicht als Basis genutzt werden: das Ergebnis hätte 3 "
 "Vorgänger"
 
 msgid "no rebase in progress"
-msgstr "Kein vorheriger Rebase zur Wiederaufnahme"
+msgstr "Keine vorherige Verschiebung zur Wiederaufnahme"
+
+msgid "warning: immutable rebased changeset detected, can't abort\n"
+msgstr ""
+"Warnung: Unveränderbare Änderungssätze gefunden. Kann nicht abbrechen\n"
 
 msgid "warning: new changesets detected on target branch, can't abort\n"
 msgstr ""
-"Warnung: Neue Änderungssätze auf Zielzweig gefunden, kann nicht abbrechen\n"
+"Warnung: Neue Änderungssätze auf Zielzweig gefunden. Kann nicht abbrechen\n"
 
 msgid "rebase aborted\n"
-msgstr "Rebase abgebrochen\n"
+msgstr "Verschiebung abgebrochen\n"
 
 msgid "cannot rebase onto an applied mq patch"
-msgstr "Rebase kann auf einem angewandten MQ-Patch nicht aufsetzen"
+msgstr "Verschiebung kann nicht auf einem angewandten MQ-Patch aufsetzen"
 
 msgid "no matching revisions"
 msgstr "keine passenden Revisionen"
 
 msgid "can't rebase multiple roots"
-msgstr ""
+msgstr "Mehrere Wurzeln können nicht verschoben werden"
 
 msgid "source is ancestor of destination"
 msgstr "Quelle ist ein Vorfahr des Ziels"
 
 msgid "--tool can only be used with --rebase"
-msgstr ""
+msgstr "--tool kann nicht gleichzeitig mit --rebase verwendet werden"
 
 msgid "rebase working directory to branch head"
-msgstr "Führt Rebase zu einem Zweigkopf auf dem Arbeitsverzeichnis aus"
+msgstr "Führt Verschiebung des Arbeitsverzeichnisses auf den Zweigkopf"
 
 msgid "specify merge tool for rebase"
-msgstr "Method für Zusammenführungen innerhalb der Verschiebung"
+msgstr "Methode für Zusammenführungen innerhalb der Verschiebung"
 
 msgid "commands to interactively select changes for commit/qrefresh"
 msgstr "Befehle um interaktiv Änderungen für commit/qrefresh zu wählen"
@@ -7389,16 +7561,16 @@
 msgstr "%d Hunks, %d Zeilen geändert\n"
 
 msgid "[Ynsfdaq?]"
-msgstr ""
+msgstr "[Jnsdfab?]"
 
 msgid "&Yes, record this change"
-msgstr "&Yes - übernimmt diese Änderung"
+msgstr "&Ja - übernimmt diese Änderung"
 
 msgid "&No, skip this change"
-msgstr "&No, überspringt diese Änderung"
+msgstr "&Nein, überspringt diese Änderung"
 
 msgid "&Skip remaining changes to this file"
-msgstr "&Überspringe die restlichen Änderungen an dieser Datei"
+msgstr "Über&springe die restlichen Änderungen an dieser Datei"
 
 msgid "Record remaining changes to this &file"
 msgstr "Zeichne die restlichen Änderungen an dieser &Datei auf"
@@ -7407,13 +7579,13 @@
 msgstr "&Fertig, überspringe die restlichen Änderungen und Dateien"
 
 msgid "Record &all changes to all remaining files"
-msgstr "Übernimmt &alle Änderungen aller restlichen Dateien"
+msgstr "Zeichne &alle Änderungen der verbleibenden Dateien auf"
 
 msgid "&Quit, recording no changes"
-msgstr "&Quit, übernimmt keine Änderungen"
+msgstr "&Beende, zeichnet keine Änderungen auf"
 
 msgid "&?"
-msgstr ""
+msgstr "&?"
 
 msgid "user quit"
 msgstr "Abbruch durch Benutzer"
@@ -7460,7 +7632,7 @@
 "      y - record this change\n"
 "      n - skip this change"
 msgstr ""
-"      y - übernimmt diese Änderung\n"
+"      j - übernimmt diese Änderung\n"
 "      n - überspringt diese Änderung"
 
 msgid ""
@@ -7475,9 +7647,9 @@
 "      a - record all changes to all remaining files\n"
 "      q - quit, recording no changes"
 msgstr ""
-"      d - fertig, überspringt verbleibende Änderungen und Dateien\n"
+"      f - fertig, überspringt verbleibende Änderungen und Dateien\n"
 "      a - übernimmt alle Änderungen aller verbleibenden Dateien\n"
-"      q - beendet ohne Änderungen zu übernehmen"
+"      b - beendet ohne Änderungen zu übernehmen"
 
 msgid "      ? - display help"
 msgstr "      ? - zeigt Hilfe an"
@@ -7507,8 +7679,8 @@
 
 msgid "cannot partially commit a merge (use \"hg commit\" instead)"
 msgstr ""
-"Eine Zusammenführung kann nicht teilweise übernommen werden (verwende "
-"stattdessen :h:`commit`)"
+"Eine Zusammenführung kann nicht teilweise übernommen werden (verwende :hg:"
+"`commit`)"
 
 msgid "no changes to record\n"
 msgstr "Keine Änderungen zu übernehmen\n"
@@ -7520,49 +7692,51 @@
 msgstr "Interaktive Auswahl der Änderungen für refresh"
 
 msgid "recreates hardlinks between repository clones"
-msgstr "stellt Hardlinks zwischen Repository Clones wieder her"
+msgstr "stellt Hardlinks zwischen Archivklonen wieder her"
 
 msgid "recreate hardlinks between two repositories"
-msgstr "stellt Hardlinks zwischen zwei Repositories wieder her"
+msgstr "stellt harte Verknüpfungen zwischen zwei Archiven wieder her"
 
 msgid ""
 "    When repositories are cloned locally, their data files will be\n"
 "    hardlinked so that they only use the space of a single repository."
 msgstr ""
-"    Wenn Repositories lokal geklont werden, werden ihre Datendateien\n"
-"    hart gelinkt, sodaß sie nur den Platz eines einzelnen Repositories\n"
-"    belegen."
+"    Wenn Archive lokal geklont werden, werden ihre Datendateien hart\n"
+"    verknüpft, sodass sie nur den Platz eines einzelnen Archivs belegen."
 
 msgid ""
 "    Unfortunately, subsequent pulls into either repository will break\n"
 "    hardlinks for any files touched by the new changesets, even if\n"
 "    both repositories end up pulling the same changes."
 msgstr ""
-"    Unglücklicherweise werden nachfolgende ``pull``s in jedes von diesen\n"
-"    Repositories Hardlinks für alle Dateien, die von dem neuen Changeset\n"
-"    betroffen sind, brechen, selbst wenn beide Repositories schlussendlich\n"
-"    die gleichen Änderungen einbeziehen."
+"    Unglücklicherweise brechen spätere Archivänderungen (z.B. Abrufen) die\n"
+"    Verknüpfungen aller Dateien, die von neuen Änderungssätzen betroffen\n"
+"    sind, selbst wenn beide Archive letzlich dieselben Änderungen enthalten."
 
 msgid ""
 "    Similarly, passing --rev to \"hg clone\" will fail to use any\n"
 "    hardlinks, falling back to a complete copy of the source\n"
 "    repository."
 msgstr ""
-"    In gleicher Weise scheitert \"hg clone\" mit --rev an Hardlinks,\n"
-"    sondern nutzt eine komplette Kopie des Quell-Repositories."
+"    In gleicher Weise scheitert \"hg clone\" mit --rev an harten\n"
+"    Verknüpfungen und nutzt eine komplette Kopie des Quellarchivs."
 
 msgid ""
 "    This command lets you recreate those hardlinks and reclaim that\n"
 "    wasted space."
 msgstr ""
-"    Dieses Kommando erlaubt Ihnen, diese Hardlinks wieder herzustellen und\n"
-"    den verlorenen Platz wieder zurück zu gewinnen."
+"    Dieses Kommando erlaubt es, diese Verknüpfungen wieder herzustellen\n"
+"    und den verlorenen Platz zurückzugewinnen."
 
 msgid ""
 "    This repository will be relinked to share space with ORIGIN, which\n"
 "    must be on the same local disk. If ORIGIN is omitted, looks for\n"
 "    \"default-relink\", then \"default\", in [paths]."
 msgstr ""
+"    Dieses Archiv wird neu verknüpft um den Speicher mit HERKUNFT, welches\n"
+"    auf derselben lokalen Platte sein muss, zu teilen. Wenn HERKUNFT nicht\n"
+"    angegeben ist, wird der Pfade (siehe [paths]) mit dem Namen\n"
+"    \"default-relink\" und schliesslich \"default\" verwendet."
 
 msgid ""
 "    Do not attempt any read operations on this repository while the\n"
@@ -7570,9 +7744,12 @@
 "    writes.)\n"
 "    "
 msgstr ""
+"    Versuchen Sie nicht, während der Aktion Leseoperationen auf diesem\n"
+"    Archiv durchzuführen. Schreiben wird von beiden Archive verhindert.\n"
+"    "
 
 msgid "hardlinks are not supported on this system"
-msgstr "Hardlinks werden von diesem System nicht unterstützt"
+msgstr "Harte Verknüpfungen werden von diesem System nicht unterstützt"
 
 msgid "must specify local origin repository"
 msgstr "Lokales Quellarchiv muss angegeben werden"
@@ -7582,11 +7759,11 @@
 msgstr "Wiederverknüpft: %s nach %s\n"
 
 msgid "there is nothing to relink\n"
-msgstr ""
+msgstr "Es gibt nichts zum wiederverknüpfen\n"
 
 #, python-format
 msgid "tip has %d files, estimated total number of files: %s\n"
-msgstr ""
+msgstr "Die Archivspitze (tip) hat %d Dateien. Geschätzte Gesamtzahl: %s\n"
 
 msgid "collecting"
 msgstr "Sammle"
@@ -7602,21 +7779,21 @@
 msgstr "Quelle und Ziel sind auf unterschiedlichen Geräten"
 
 msgid "pruning"
-msgstr ""
+msgstr "Schränke ein"
 
 #, python-format
 msgid "pruned down to %d probably relinkable files\n"
-msgstr ""
+msgstr "Schränke auf %d wahrscheinlich wiederverknüpfbare Dateien ein\n"
 
 msgid "relinking"
-msgstr ""
+msgstr "Verknüpfe erneut"
 
 #, python-format
 msgid "relinked %d files (%s reclaimed)\n"
-msgstr ""
+msgstr "%d Dateien wiederverknüpft (%s zurückgewonnen)\n"
 
 msgid "[ORIGIN]"
-msgstr ""
+msgstr "[HERKUNFT]"
 
 msgid "extend schemes with shortcuts to repository swarms"
 msgstr ""
@@ -7731,7 +7908,7 @@
 
 #, python-format
 msgid "skipping already applied revision %s\n"
-msgstr ""
+msgstr "Überspringe bereits angewendete Revision %s\n"
 
 #, python-format
 msgid "skipping merge changeset %s:%s\n"
@@ -7750,14 +7927,14 @@
 msgstr ""
 
 msgid "filter failed"
-msgstr ""
+msgstr "Filter fehlgeschlagen"
 
 msgid "can only omit patchfile if merging"
 msgstr ""
 
 #, python-format
 msgid "%s: empty changeset"
-msgstr ""
+msgstr "%s: leerer Änderungssatz"
 
 msgid "fix up the merge and run hg transplant --continue"
 msgstr ""
@@ -7774,7 +7951,7 @@
 msgstr ""
 
 msgid "commit failed"
-msgstr ""
+msgstr "Übernahme der Änderungen schlug fehl"
 
 msgid "filter corrupted changeset (no user or date)"
 msgstr "filtriere beschädigte Änderungssätze (ohne Nutzer oder Datum)"
@@ -8129,8 +8306,8 @@
 msgstr "Aktualisiere Lesezeichen %s\n"
 
 #, python-format
-msgid "not updating divergent bookmark %s\n"
-msgstr "Aktualisiere nicht divergierendes Lesezeichen %s\n"
+msgid "divergent bookmark %s stored as %s\n"
+msgstr "Divergierendes Lesezeichen %s als %s gespeichert\n"
 
 msgid "searching for changed bookmarks\n"
 msgstr "Suche nach geänderten Lesezeichen\n"
@@ -8288,6 +8465,10 @@
 msgstr "Marke:           %s\n"
 
 #, python-format
+msgid "phase:       %s\n"
+msgstr "Phase:       %s\n"
+
+#, python-format
 msgid "parent:      %d:%s\n"
 msgstr "Vorgänger:       %d:%s\n"
 
@@ -8585,6 +8766,8 @@
 "    Returns 0 if all files are successfully added.\n"
 "    "
 msgstr ""
+"    Gibt 0 zurück, wenn alle Dateien erfolgreich hinzugefügt wurden.\n"
+"    "
 
 msgid "add all new files, delete all missing files"
 msgstr "Fügt alle neuen Dateien hinzu, löscht alle fehlenden Dateien"
@@ -8601,8 +8784,8 @@
 "    ``.hgignore``. As with add, these changes take effect at the next\n"
 "    commit."
 msgstr ""
-"    Neue Dateien werden ignoriert, wenn sie einem der Muster aus "
-"``.hgignore``\n"
+"    Neue Dateien werden ignoriert, wenn sie einem der Muster aus ``."
+"hgignore``\n"
 "    entsprechen. Genau wie add, wirken diese Änderungen erst beim nächsten\n"
 "    Übernehmen (commit)."
 
@@ -8632,8 +8815,7 @@
 msgstr "Annotiert die angegebene Revision"
 
 msgid "follow copies/renames and list the filename (DEPRECATED)"
-msgstr ""
-"Folge Kopien/Umbenennungen und liste Dateinamen auf (VERALTET)"
+msgstr "Folge Kopien/Umbenennungen und liste Dateinamen auf (VERALTET)"
 
 msgid "don't follow copies and renames"
 msgstr "Unterläßt das Folgen von Dateikopien und Umbenennungen"
@@ -9086,17 +9268,18 @@
 msgid "track a line of development with movable markers"
 msgstr "Folgt einem Entwicklungsstrang mit einer beweglichen Markierung"
 
-msgid ""
-"    Bookmarks are pointers to certain commits that move when\n"
-"    committing. Bookmarks are local. They can be renamed, copied and\n"
-"    deleted. It is possible to use bookmark names in :hg:`merge` and\n"
-"    :hg:`update` to merge and update respectively to a given bookmark."
+#, fuzzy
+msgid ""
+"    Bookmarks are pointers to certain commits that move when committing.\n"
+"    Bookmarks are local. They can be renamed, copied and deleted. It is\n"
+"    possible to use :hg:`merge NAME` to merge from a given bookmark, and\n"
+"    :hg:`update NAME` to update to a given bookmark."
 msgstr ""
 "    Lesezeichen sind Zeiger auf bestimmte Versionen, die mitwandern,\n"
 "    wenn eine neuen Version erzeugt wird. Lesezeichen sind nur lokal.\n"
 "    Sie können umbenannt, kopiert und gelöscht werden. Es ist möglich,\n"
-"    Lesezeichen bei :hg: `merge` und :hg:`update` zu nutzen, um auf das\n"
-"    angegebene Lesezeichen zu aktualisieren."
+"    Lesezeichen bei :hg: `merge` und :hg:`update` anzugeben, um das an-\n"
+"    gegebene Lesezeichen zusammenzuführen, bzw. darauf zu aktualisieren."
 
 msgid ""
 "    You can use :hg:`bookmark NAME` to set a bookmark on the working\n"
@@ -9165,6 +9348,14 @@
 msgstr "Setzt oder zeigt den Namen des aktuellen Zweigs"
 
 msgid ""
+"    .. note::\n"
+"       Branch names are permanent and global. Use :hg:`bookmark` to create "
+"a\n"
+"       light-weight bookmark instead. See :hg:`help glossary` for more\n"
+"       information about named branches and bookmarks."
+msgstr ""
+
+msgid ""
 "    With no argument, show the current branch name. With one argument,\n"
 "    set the working directory branch name (the branch will not exist\n"
 "    in the repository until the next commit). Standard practice\n"
@@ -9201,13 +9392,6 @@
 "    :hg:`update`. Mit :hg:`commit --close-branch` wird der aktuelle Zweig\n"
 "    geschlossen."
 
-msgid ""
-"    .. note::\n"
-"       Branch names are permanent. Use :hg:`bookmark` to create a\n"
-"       light-weight bookmark instead. See :hg:`help glossary` for more\n"
-"       information about named branches and bookmarks."
-msgstr ""
-
 #, python-format
 msgid "reset working directory to branch %s\n"
 msgstr "Setze Arbeitsverzeichnis auf Zweig %s zurück\n"
@@ -9223,6 +9407,9 @@
 msgid "marked working directory as branch %s\n"
 msgstr "Arbeitsverzeichnis wurde als Zweig %s markiert\n"
 
+msgid "(branches are permanent and global, did you want a bookmark?)\n"
+msgstr ""
+
 msgid "show only branches that have unmerged heads"
 msgstr "Zeigt nur Branches deren Köpfe nicht zusammengeführt wurden"
 
@@ -9259,12 +9446,14 @@
 "    Returns 0.\n"
 "    "
 msgstr ""
+"    Gibt 0 zurück.\n"
+"    "
 
 msgid " (closed)"
-msgstr ""
+msgstr " (geschlossen)"
 
 msgid " (inactive)"
-msgstr ""
+msgstr " (inaktiv)"
 
 msgid "run even when the destination is unrelated"
 msgstr "Auch ausführen, wenn das Ziel keinen Bezug hat"
@@ -9461,7 +9650,8 @@
 "      --pull option to avoid hardlinking."
 msgstr ""
 "      Aus Effizienzgründen werden 'hardlinks' für das Klonen genutzt, wann\n"
-"      immer Quelle und Ziel auf dem selben Dateisystem liegen (dies gilt nur\n"
+"      immer Quelle und Ziel auf dem selben Dateisystem liegen (dies gilt "
+"nur\n"
 "      für die Daten des Archivs, nicht für die Arbeitskopie). Einige\n"
 "      Dateisyteme, wie etwa AFS, implementieren 'hardlinks' fehlerhaft,\n"
 "      erzeugen dabei aber keine Fehlermeldung. Dann muss die --pull Option\n"
@@ -9554,8 +9744,7 @@
 msgstr ""
 
 msgid "    See :hg:`help urls` for details on specifying URLs."
-msgstr ""
-"    Siehe auch :hg:`help urls` für das Format von Adressangaben."
+msgstr "    Siehe auch :hg:`help urls` für das Format von Adressangaben."
 
 msgid "cannot specify both --noupdate and --updaterev"
 msgstr ""
@@ -10171,8 +10360,7 @@
 msgstr "[OPTION]... [-o DATEINAMENMUSTER] REV..."
 
 msgid "dump the header and diffs for one or more changesets"
-msgstr ""
-"Gibt Kopfzeilen und Änderungsverlauf einer oder mehrerer Versionen aus"
+msgstr "Gibt Kopfzeilen und Änderungsverlauf einer oder mehrerer Versionen aus"
 
 msgid "    Print the changeset header and diffs for one or more revisions."
 msgstr ""
@@ -10221,8 +10409,10 @@
 "    :``%N``: Anzahl der generierten Patches\n"
 "    :``%R``: Revisionnummer des Änderungssatzes\n"
 "    :``%b``: Basisname des exportierten Archivs\n"
-"    :``%h``: Kurzform der Prüfsumme des Änderungssatzes (12 Byte hexadezimal)\n"
-"    :``%m``: Erste Zeile der Übernahmenachricht (nur alphanumerische Zeichen)\n"
+"    :``%h``: Kurzform der Prüfsumme des Änderungssatzes (12 Byte "
+"hexadezimal)\n"
+"    :``%m``: Erste Zeile der Übernahmenachricht (nur alphanumerische "
+"Zeichen)\n"
 "    :``%n``: Laufende Nummer mit führenden Nullen, beginnend bei 1\n"
 "    :``%r``: Revisionsnummer mit führenden Nullen"
 
@@ -10349,9 +10539,9 @@
 
 msgid ""
 "    If a graft merge results in conflicts, the graft process is\n"
-"    aborted so that the current merge can be manually resolved. Once\n"
-"    all conflicts are addressed, the graft process can be continued\n"
-"    with the -c/--continue option."
+"    interrupted so that the current merge can be manually resolved.\n"
+"    Once all conflicts are addressed, the graft process can be\n"
+"    continued with the -c/--continue option."
 msgstr ""
 
 msgid ""
@@ -10913,10 +11103,10 @@
 
 msgid ""
 "    With -s/--similarity, hg will attempt to discover renames and\n"
-"    copies in the patch in the same way as 'addremove'."
+"    copies in the patch in the same way as :hg:`addremove`."
 msgstr ""
 "    Mit der Option -s/--similarity werden Umbenennungen und Kopien auf\n"
-"    gleiche Weise wie mit dem Befehl \"hg addremove\" erkannt."
+"    gleiche Weise wie mit dem Befehl :hg:`addremove` erkannt."
 
 msgid ""
 "    To read a patch from standard input, use \"-\" as the patch name. If\n"
@@ -10966,7 +11156,7 @@
 msgstr ""
 
 msgid "patch is damaged or loses information"
-msgstr "Prüfsumme stimmt nicht überein: Patch korrumpiert"
+msgstr "Prüfsumme stimmt nicht überein: Patch beschädigt"
 
 msgid "applied to working directory"
 msgstr "Angewendet aufs Arbeitsverzeichnis"
@@ -11404,12 +11594,11 @@
 msgid "run 'hg heads' to see all heads"
 msgstr "'hg heads' zeigt alle Köpfe"
 
-msgid "there is nothing to merge"
+msgid "nothing to merge"
 msgstr "Es gibt nichts zum Zusammenführen"
 
-#, python-format
-msgid "%s - use \"hg update\" instead"
-msgstr "%s - Nutze \"hg update\" stattdessen"
+msgid "use 'hg update' instead"
+msgstr "Nutze stattdessen 'hg update'"
 
 msgid "working directory not at a head revision"
 msgstr "Arbeitsverzeichnis ist nicht auf Stand der Kopfversion"
@@ -11478,7 +11667,7 @@
 msgstr "'%s' nicht im Manifest gefunden!"
 
 msgid "[NAME]"
-msgstr ""
+msgstr "[NAME]"
 
 msgid "show aliases for remote repositories"
 msgstr "Zeigt Adresse für Aliasnamen von entfernten Projektarchiven an"
@@ -11525,6 +11714,61 @@
 msgid "not found!\n"
 msgstr "nicht gefunden!\n"
 
+msgid "set changeset phase to public"
+msgstr ""
+
+msgid "set changeset phase to draft"
+msgstr ""
+
+msgid "set changeset phase to secret"
+msgstr ""
+
+msgid "allow to move boundary backward"
+msgstr ""
+
+msgid "target revision"
+msgstr ""
+
+msgid "[-p|-d|-s] [-f] [-r] REV..."
+msgstr ""
+
+msgid "set or show the current phase name"
+msgstr ""
+
+msgid "    With no argument, show the phase name of specified revisions."
+msgstr ""
+
+msgid ""
+"    With one of -p/--public, -d/--draft or -s/--secret, change the\n"
+"    phase value of the specified revisions."
+msgstr ""
+
+msgid ""
+"    Unless -f/--force is specified, :hg:`phase` won't move changeset from a\n"
+"    lower phase to an higher phase. Phases are ordered as follows::"
+msgstr ""
+
+msgid "        public < draft < secret"
+msgstr ""
+
+msgid ""
+"    Return 0 on success, 1 if no phases were changed.\n"
+"    "
+msgstr ""
+
+msgid "only one phase can be specified"
+msgstr ""
+
+msgid "no revisions specified!"
+msgstr ""
+
+#, python-format
+msgid "phase change for %i changesets\n"
+msgstr ""
+
+msgid "no phases changed\n"
+msgstr ""
+
 #, python-format
 msgid "not updating: %s\n"
 msgstr "aktualisiere nicht: %s\n"
@@ -11672,8 +11916,7 @@
 msgid ""
 "    Push changesets from the local repository to the specified\n"
 "    destination."
-msgstr ""
-"    Überträgt lokale Änderungen in das angegebene Ziel."
+msgstr "    Überträgt lokale Änderungen in das angegebene Ziel."
 
 msgid ""
 "    This operation is symmetrical to pull: it is identical to a pull\n"
@@ -11728,7 +11971,6 @@
 "gezogen\n"
 "    werden. Beim Weglassen des ZIELs wird standardmäßig der 'default'-Pfad\n"
 "    genutzt. Weitere Hilfe gibt unter :hg:`help urls`."
-"    "
 
 msgid ""
 "    Returns 0 if push was successful, 1 if nothing to push.\n"
@@ -11809,7 +12051,8 @@
 msgstr ""
 "      Option -A/--after kann genutzt werden, um Dateien zu entfernen, die\n"
 "      bereits gelöscht wurden, -f/--force kann genutzt werden, um die\n"
-"      Löschung zu erzwingen. -Af entfernt Dateien aus der nächsten Revision,\n"
+"      Löschung zu erzwingen. -Af entfernt Dateien aus der nächsten "
+"Revision,\n"
 "      ohne sie im Arbeitsverzeichnis zu löschen"
 
 msgid ""
@@ -11819,7 +12062,8 @@
 "      (as reported by :hg:`status`). The actions are Warn, Remove\n"
 "      (from branch) and Delete (from disk):"
 msgstr ""
-"      Die folgende Tabelle beschreibt detailliert das Verhalten von 'remove'\n"
+"      Die folgende Tabelle beschreibt detailliert das Verhalten von "
+"'remove'\n"
 "      für unterschiedliche Dateizustände (Spalten) und Optionskombinationen\n"
 "      (Reihen). Die Dateizustände sind Hinzugefügt (A), Unverändert (C),\n"
 "      Verändert (M) und Fehlend (!) (wie von :hg:`status` angezeigt). Die\n"
@@ -11871,7 +12115,9 @@
 
 #, python-format
 msgid "not removing %s: file has been marked for add (use forget to undo)\n"
-msgstr "Entferne nicht %s: Datei soll hinzugefügt werden ('hg forget' um dies rückgängig zu machen)\n"
+msgstr ""
+"Entferne nicht %s: Datei soll hinzugefügt werden ('hg forget' um dies "
+"rückgängig zu machen)\n"
 
 msgid "record a rename that has already occurred"
 msgstr ""
@@ -11994,8 +12240,7 @@
 msgstr ""
 
 msgid "restore files to their checkout state"
-msgstr ""
-"Setzt gegebene Dateien oder Verzeichnisse auf frühere Version zurück"
+msgstr "Setzt gegebene Dateien oder Verzeichnisse auf frühere Version zurück"
 
 msgid ""
 "    .. note::\n"
@@ -12033,7 +12278,8 @@
 msgstr ""
 "    Mit der -r/--rev oder der -d/--date Option werden die Dateien oder\n"
 "    Verzeichnisse auf die gegebene Revision zurückgesetzt. Da 'revert' aber\n"
-"    nicht die mit dem Arbeitsverzeichnis assoziierte Revisionsnummer ändert,\n"
+"    nicht die mit dem Arbeitsverzeichnis assoziierte Revisionsnummer "
+"ändert,\n"
 "    gelten die betroffenen Dateien dann als modifiziert. Damit kann man\n"
 "    ungewollte aber bereits übernommene Änderungen rückgängig machen. Siehe\n"
 "    auch :hg:`backout` für eine ähnliche Methode."
@@ -12155,17 +12401,14 @@
 "    - unbundle"
 
 msgid ""
-"    It's possible to lose data with rollback: commit, update back to\n"
-"    an older changeset, and then rollback. The update removes the\n"
-"    changes you committed from the working directory, and rollback\n"
-"    removes them from history. To avoid data loss, you must pass\n"
-"    --force in this case."
-msgstr ""
-"    Es gibt die Möglichkeit durch das Zurückrollen Daten zu verlieren, z.B.\n"
-"    die Übernahme neuer Änderungen (commit) gefolgt von einer Aktualisierung\n"
-"    auf eine andere Revision und einem Zurückrollen der Übernahme.\n"
-"    Damit dies nicht aus Versehen passiert, wird in diesem Falle --force als\n"
-"    Parameter gefordert."
+"    To avoid permanent data loss, rollback will refuse to rollback a\n"
+"    commit transaction if it isn't checked out. Use --force to\n"
+"    override this protection."
+msgstr ""
+"    Um einen versehentlichen Datenverlust zu verhindern, wird rollback\n"
+"    keine Änderungssätze aus der Historie entfernen, die nicht im\n"
+"    Arbeitsverzeichnis aktuell sind. Um diesen Schutz abzuschalten,\n"
+"    muss --force (erzwingen) angegeben werden."
 
 msgid ""
 "    This command is not intended for use on public repositories. Once\n"
@@ -12396,7 +12639,8 @@
 "       relative to one merge parent."
 msgstr ""
 "    .. note:\n"
-"       Der Status kann sich vom Diff unterscheiden, wenn sich Berechtigungen\n"
+"       Der Status kann sich vom Diff unterscheiden, wenn sich "
+"Berechtigungen\n"
 "       geändert haben oder eine Zusammenführung durchgeführt wurde. Das\n"
 "       Standard-Diff-Format zeigt keine Berechtigungsänderungen an und\n"
 "       'diff' zeigt nur Änderungen relativ zu einer Vorgängerversion einer\n"
@@ -12436,9 +12680,12 @@
 "      I = ignoriert\n"
 "        = die zuvor hinzugefügt Datei (A) wurde von hier kopiert"
 
-msgid "      - show changes in the working directory relative to a changeset:"
-msgstr ""
-"      - zeigt Änderungen zwischen dem Arbeitsverzeichnis und einer Revision:"
+msgid ""
+"      - show changes in the working directory relative to a\n"
+"        changeset::"
+msgstr ""
+"      - zeigt Änderungen zwischen dem Arbeitsverzeichnis und einer\n"
+"        Revision::"
 
 msgid "          hg status --rev 9353"
 msgstr ""
@@ -12721,7 +12968,8 @@
 "    umbenannt oder manuell einem anderen Änderungssatz angehängt werden."
 
 msgid "update to new branch head if changesets were unbundled"
-msgstr "aktualisiere auf den neuen Zweigkopf when Änderungssätze entpackt wurden"
+msgstr ""
+"aktualisiere auf den neuen Zweigkopf when Änderungssätze entpackt wurden"
 
 msgid "[-u] FILE..."
 msgstr "[-u] DATEI..."
@@ -12745,7 +12993,9 @@
 msgstr "entferne nicht versionierte Änderungen (kein Backup)"
 
 msgid "update across branches if no uncommitted changes"
-msgstr "Aktualisiere auf anderen Zweig (falls keine unversionierte Änderungen ausstehen)"
+msgstr ""
+"Aktualisiere auf anderen Zweig (falls keine unversionierten Änderungen "
+"ausstehen)"
 
 msgid "[-c] [-C] [-d DATE] [[-r] REV]"
 msgstr "[-c] [-C] [-d DATUM] [[-r] REV]"
@@ -12756,10 +13006,11 @@
 msgid ""
 "    Update the repository's working directory to the specified\n"
 "    changeset. If no changeset is specified, update to the tip of the\n"
-"    current named branch."
+"    current named branch and move the current bookmark."
 msgstr ""
 "    Hebt das Arbeitsverzeichnis auf die angegebene Revision an. Ohne\n"
-"    Angabe einer Revision wird der Spitze des aktuellen Zweigs gewählt."
+"    Angabe einer Revision wird der Spitze des aktuellen Zweigs gewählt\n"
+"    und ggf. das aktuelle Lesezeichen verschoben."
 
 msgid ""
 "    If the changeset is not a descendant of the working directory's\n"
@@ -12867,11 +13118,11 @@
 msgstr "(siehe http://mercurial.selenic.com für mehr Information)"
 
 msgid ""
-"Copyright (C) 2005-2011 Matt Mackall and others\n"
+"Copyright (C) 2005-2012 Matt Mackall and others\n"
 "This is free software; see the source for copying conditions. There is NO\n"
 "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
 msgstr ""
-"Copyright (C) 2005-2011 Matt Mackall und andere\n"
+"Copyright (C) 2005-2012 Matt Mackall und andere\n"
 "Dies ist freie Software; siehe Quellen für Kopierbestimmungen. Es besteht\n"
 "KEINE Gewährleistung für das Programm, nicht einmal der Marktreife oder der\n"
 "Verwendbarkeit für einen bestimmten Zweck.\n"
@@ -13175,7 +13426,7 @@
 msgstr "Option --cwd kann nicht abgekürzt werden!"
 
 msgid ""
-"Option -R has to be separated from other options (e.g. not -qR) and --"
+"option -R has to be separated from other options (e.g. not -qR) and --"
 "repository may only be abbreviated as --repo!"
 msgstr ""
 "Option -R muss von anderen Optionen getrennt werden (also z.B. nicht -qR) "
@@ -13899,7 +14150,7 @@
 msgid ""
 "\n"
 "``annotate``\n"
-"\"\"\"\"\"\"\"\""
+"\"\"\"\"\"\"\"\"\"\"\"\""
 msgstr ""
 
 msgid ""
@@ -14502,11 +14753,14 @@
 "various actions such as starting or finishing a commit. Multiple\n"
 "hooks can be run for the same action by appending a suffix to the\n"
 "action. Overriding a site-wide hook can be done by changing its\n"
-"value or setting it to an empty string."
+"value or setting it to an empty string.  Hooks can be prioritized\n"
+"by adding a prefix of ``priority`` to the hook name on a new line\n"
+"and setting the priority.  The default priority is 0 if\n"
+"not specified."
 msgstr ""
 
 msgid "Example ``.hg/hgrc``::"
-msgstr ""
+msgstr "Beispiel ``.hg/hgrc``-Datei::"
 
 msgid ""
 "  [hooks]\n"
@@ -14515,7 +14769,9 @@
 "  # do not use the site-wide hook\n"
 "  incoming =\n"
 "  incoming.email = /my/email/hook\n"
-"  incoming.autobuild = /my/build/hook"
+"  incoming.autobuild = /my/build/hook\n"
+"  # force autobuild hook to run before other incoming hooks\n"
+"  priority.incoming.autobuild = 1"
 msgstr ""
 
 msgid ""
@@ -15798,11 +16054,12 @@
 msgstr ""
 "EDITOR\n"
 "    Manchmal muss Mercurial eine Textdatei in einem Editor öffnen, damit\n"
-"    der Nutzer sie bearbeiten kann, zum Beispiel when eine Commit-\n"
-"    Nachricht geschrieben wird. Der verwendete Editor wird aus den drei\n"
-"    Umgebungsvariablen HGEDITOR, VISUAL und EDITOR (in dieser Reihenfolge)\n"
-"    ermittelt. Der erste nicht-leere wird verwendet. Wenn alle Angaben\n"
-"    leer sind, wird der Standard 'vi' verwendet."
+"    der Nutzer sie bearbeiten kann, zum Beispiel wenn eine Versionsmeldung\n"
+"    geschrieben wird. Der verwendete Editor wird aus den drei Umgebungs-\n"
+"    variablen HGEDITOR, VISUAL und EDITOR (in dieser Reihenfolge) "
+"ermittelt.\n"
+"    Der erste nicht-leere wird verwendet. Wenn alle Angaben leer sind, wird\n"
+"    der Standard 'vi' verwendet."
 
 msgid ""
 "PYTHONPATH\n"
@@ -15993,7 +16250,7 @@
 msgid "- Find C files in a non-standard encoding::"
 msgstr ""
 
-msgid "    hg locate \"set:**.c and not encoding(ascii)\""
+msgid "    hg locate \"set:**.c and not encoding('UTF-8')\""
 msgstr ""
 
 msgid "- Revert copies of large binary files::"
@@ -16656,9 +16913,8 @@
 "character is treated as a comment character, and the ``\\`` character\n"
 "is treated as an escape character."
 msgstr ""
-"Eine ignore-Datei ist eine Textdatei, die aus einer Liste von Patterns "
-"besteht,\n"
-"mit einem Ausdruck pro Zeile. Leere Zeilen werden übersprungen.\n"
+"Eine ignore-Datei ist eine Textdatei, die aus einer Liste von Dateimustern\n"
+"besteht, mit einem Ausdruck pro Zeile. Leere Zeilen werden übersprungen.\n"
 "Das ``#``-Zeichen wird als Kommentarzeichen behandelt und das \n"
 "``\\``-Zeichen als Escape-Zeichen."
 
@@ -16666,11 +16922,11 @@
 "Mercurial supports several pattern syntaxes. The default syntax used\n"
 "is Python/Perl-style regular expressions."
 msgstr ""
-"Mercurial unterstützt verschiedene Pattern-Syntaxen. Im Normalfall\n"
+"Mercurial unterstützt verschiedene Dateimuster-Syntaxen. Im Normalfall\n"
 "werden Python/Perl-artige Reguläre Ausdrücke verwendet."
 
 msgid "To change the syntax used, use a line of the following form::"
-msgstr ""
+msgstr "Die folgende Zeile ändert die von diesem Punkt an verwendete Syntax::"
 
 msgid "  syntax: NAME"
 msgstr "  Syntax: NAME"
@@ -16689,7 +16945,7 @@
 "The chosen syntax stays in effect when parsing all patterns that\n"
 "follow, until another syntax is selected."
 msgstr ""
-"Die gewählte Syntax wird auf auf alle folgenden Patterns angewendet\n"
+"Die gewählte Syntax wird auf auf alle folgenden Muster angewendet\n"
 "bis eine andere Syntax ausgewählt wird."
 
 msgid ""
@@ -17103,8 +17359,7 @@
 msgstr "Beispiel mit regulärem Ausdruck::"
 
 msgid "  re:.*\\.c$      any name ending in \".c\", anywhere in the repository"
-msgstr ""
-"  re:.*\\.c$     jeder Name endend mit \".c\" überall im Projektarchiv"
+msgstr "  re:.*\\.c$     jeder Name endend mit \".c\" überall im Projektarchiv"
 
 msgid "File examples::"
 msgstr "Datei-Beispiele::"
@@ -17398,16 +17653,16 @@
 "subrepositories."
 msgstr ""
 "Mercurial unterstützt im Augenblick Mercurial-, Git- und Subversion-\n"
-"Subrepositories."
+"Unterarchive."
 
 msgid "Subrepositories are made of three components:"
-msgstr "Subrespositories bestehen aus drei Komponenten:"
+msgstr "Unterarchive bestehen aus drei Komponenten:"
 
 msgid ""
 "1. Nested repository checkouts. They can appear anywhere in the\n"
 "   parent working directory."
 msgstr ""
-"1. Verschachtelte Repository Checkouts. Sie können überall im\n"
+"1. Verschachtelte Archivaktualisierungen. Sie können überall im\n"
 "   übergeordneten Arbeitsverzeichnis auftauchen."
 
 msgid ""
@@ -17415,9 +17670,9 @@
 "   tell where the subrepository checkouts come from. Mercurial\n"
 "   subrepositories are referenced like:"
 msgstr ""
-"2. Verschachtelte Repository References. Sie werden in  ``.hgsub``\n"
-"   definiert und geben an, wo Subrepository Checkouts herkommen.\n"
-"   Mercurial Subrepositories werden wie folgt angegeben:"
+"2. Verschachtelte Archivreferenzen. Sie werden in ``.hgsub`` definiert\n"
+"   und geben an, wo Archivaktualisierungen herkommen.\n"
+"   Mercurial-Unterarchive werden wie folgt angegeben:"
 
 msgid "     path/to/nested = https://example.com/nested/repo/path"
 msgstr ""
@@ -17506,8 +17761,8 @@
 "To remove a subrepository from the parent repository, delete its\n"
 "reference from ``.hgsub``, then remove its files."
 msgstr ""
-"Um ein Subrepository aus seinem Parent Repository zu entfernen,\n"
-"entfernen Sie seine Verweise aus ``.hgsub`` und löschen die Dateien."
+"Um ein Unterarchiv aus seinem Elternarchiv zu entfernen, löschen Sie\n"
+"seine Verweise aus ``.hgsub`` und seine Dateien."
 
 msgid ""
 "Interaction with Mercurial Commands\n"
@@ -18125,7 +18380,7 @@
 msgstr ""
 
 msgid "use -f to force"
-msgstr "Benutzen Sie -f, um dennoch fortzufahren"
+msgstr "Benutzen Sie -f zum erzwingen"
 
 #, python-format
 msgid "named branch could not be reset: current branch is still '%s'\n"
@@ -18203,6 +18458,10 @@
 "changegroupsubset nicht unterstützt."
 
 #, python-format
+msgid "updating %s to public failed!\n"
+msgstr ""
+
+#, python-format
 msgid "%d changesets found\n"
 msgstr "%d Änderungssätze gefunden\n"
 
@@ -18339,11 +18598,13 @@
 msgstr "Zeilenangaben im Diff-Kontext müssen Zahlen sein, nicht %r"
 
 #, python-format
-msgid ""
-"untracked file in working directory differs from file in requested revision: "
-"'%s'"
-msgstr ""
-"Unversionierte Datei in der Arbeitskopie unterscheidet sich von der "
+msgid "%s: untracked file differs\n"
+msgstr "%s: Unversionierte Datei verändert\n"
+
+msgid ""
+"untracked files in working directory differ from files in requested revision"
+msgstr ""
+"Unversionierte Dateien in der Arbeitskopie unterscheidet sich von der "
 "angeforderten Revision: '%s'"
 
 #, python-format
@@ -18415,15 +18676,11 @@
 msgid "merging with a working directory ancestor has no effect"
 msgstr "Zusammenführen mit einem Vorfahren der Arbeitskopie hat keinen Effekt"
 
-msgid "nothing to merge (use 'hg update' or check 'hg heads')"
-msgstr ""
-"Nichts zum Zusammenführen gefunden (nutze 'hg update' oder überprüfe 'hg "
-"heads')"
-
-msgid "outstanding uncommitted changes (use 'hg status' to list changes)"
-msgstr ""
-"Ausstehende nicht versionierte Änderungen (nutze 'hg status' zur Auflistung "
-"der Änderungen)"
+msgid "use 'hg update' or check 'hg heads'"
+msgstr "nutze 'hg update' oder überprüfe 'hg heads'"
+
+msgid "use 'hg status' to list changes"
+msgstr "nutze 'hg status' zur Auflistung der Änderungen"
 
 #, python-format
 msgid "outstanding uncommitted changes in subrepository '%s'"
@@ -18571,6 +18828,14 @@
 msgstr ""
 
 #, python-format
+msgid "ignoring inconsistense public root from remote: %s"
+msgstr ""
+
+#, python-format
+msgid "ignoring unexpected root from remote: %i %s"
+msgstr ""
+
+#, python-format
 msgid "exited with status %d"
 msgstr "Beendet mit Status %d"
 
@@ -18796,6 +19061,14 @@
 "    Nachkommen der Änderungssätze in der Liste sind."
 
 msgid ""
+"``draft()``\n"
+"    Changeset in draft phase."
+msgstr ""
+
+msgid "draft takes no arguments"
+msgstr "draft erwartet keine Argumente"
+
+msgid ""
 "``filelog(pattern)``\n"
 "    Changesets connected to the specified filelog."
 msgstr ""
@@ -19047,6 +19320,33 @@
 "    ein Eintrag nicht gefunden wird, die leere Menge."
 
 msgid ""
+"``public()``\n"
+"    Changeset in public phase."
+msgstr ""
+
+msgid "public takes no arguments"
+msgstr "public erwartet keine Argumente"
+
+msgid ""
+"``remote([id], [path])``\n"
+"    Local revision that corresponds to the given identifier in a\n"
+"    remote repository, if present. Here, the '.' identifier is a\n"
+"    synonym for the current local branch."
+msgstr ""
+
+#. i18n: "remote" is a keyword
+msgid "outgoing takes one or two arguments"
+msgstr "'outgoing' erwartet ein oder zwei Argumente"
+
+#. i18n: "remote" is a keyword
+msgid "remote requires a string id"
+msgstr "remote erwartet eine Zeichenkette (ID)"
+
+#. i18n: "remote" is a keyword
+msgid "remote requires a repository path"
+msgstr "'remote' erwartet einen Projektarchivpfad"
+
+msgid ""
 "``removes(pattern)``\n"
 "    Changesets which remove files matching pattern."
 msgstr ""
@@ -19089,6 +19389,14 @@
 "    Änderungssätze, die keine Eltern in der Menge haben."
 
 msgid ""
+"``secret()``\n"
+"    Changeset in secret phase."
+msgstr ""
+
+msgid "secret takes no arguments"
+msgstr "secret erwartet keine Argumente"
+
+msgid ""
 "``sort(set[, [-]key...])``\n"
 "    Sort set by keys. The default sort order is ascending, specify a key\n"
 "    as ``-key`` to sort in descending order."
@@ -19109,7 +19417,7 @@
 msgstr ""
 "    - ``rev`` für die Revisionsnummer,\n"
 "    - ``branch`` für den Zweignamen,\n"
-"    - ``desc`` für die Commit-Nachricht (description),\n"
+"    - ``desc`` für die Versionsmeldung (description),\n"
 "    - ``user`` für den Benutzernamen (Alias ``author``),\n"
 "    - ``date`` für das Datum des Commits"
 
@@ -19163,7 +19471,7 @@
 
 #, python-format
 msgid "possible case-folding collision for %s"
-msgstr ""
+msgstr "Groß-/Kleinschreibungskonflikt bei %s"
 
 #, python-format
 msgid "path ends in directory separator: %s"
@@ -19197,7 +19505,7 @@
 "kopiert).\n"
 
 msgid ".hg/requires file is corrupt"
-msgstr ".hg/requires file ist korrumpiert"
+msgstr ".hg/requires file ist beschädigt"
 
 #, python-format
 msgid "unknown repository format: requires features '%s' (upgrade Mercurial)"
@@ -19216,7 +19524,7 @@
 msgstr ""
 
 msgid "searching"
-msgstr "suchen"
+msgstr ""
 
 msgid "repository is unrelated"
 msgstr "Projektarchiv steht in keinem Zusammenhang"
@@ -19253,13 +19561,13 @@
 msgid "no suitable response from remote hg"
 msgstr "Keine passende Antwort des entfernten hg"
 
-msgid "remote: "
-msgstr "Entfernt: "
-
 #, python-format
 msgid "push refused: %s"
 msgstr "Hochladen abgewiesen: %s"
 
+msgid "ssl connection failed"
+msgstr "ssk-Verbindung fehlgeschlagen"
+
 msgid "Python SSL support not found"
 msgstr "SSL-Unterstützung für Python nicht gefunden"
 
@@ -19276,33 +19584,17 @@
 msgid "IDN in certificate not supported"
 msgstr ""
 
-#, fuzzy
 msgid "no commonName or subjectAltName found in certificate"
-msgstr "Kein commonName oder subjectAltNamt gefunden im Zertifikat"
+msgstr "Kein commonName oder subjectAltName im Zertifikat gefunden"
 
 #, python-format
 msgid "could not find web.cacerts: %s"
 msgstr "Konnte web.cacerts nicht finden: %s"
 
 #, python-format
-msgid "%s certificate error: %s (use --insecure to connect insecurely)"
-msgstr ""
-"%s Zertifikatsfehler: %s (Benutzen Sie --insecure, um unsicher zu verbinden)"
-
-#, python-format
-msgid "invalid certificate for %s with fingerprint %s"
-msgstr ""
-
-#, python-format
-msgid ""
-"warning: %s certificate with fingerprint %s not verified (check "
-"hostfingerprints or web.cacerts config setting)\n"
-msgstr ""
-
-#, python-format
 msgid "host fingerprint for %s can't be verified (Python too old)"
 msgstr ""
-"Host fingerprint für %s kann nicht verifiziert werden (Python ist zu alt)"
+"Server Authentizität für %s kann nicht verifiziert werden (Python ist zu alt)"
 
 #, python-format
 msgid "warning: certificate for %s can't be verified (Python too old)\n"
@@ -19311,6 +19603,34 @@
 "alt)\n"
 
 #, python-format
+msgid "%s ssl connection error"
+msgstr ""
+
+#, python-format
+msgid "%s certificate error: no certificate received"
+msgstr "%s Zertifikatfehler: Kein Zertifikat empfangen"
+
+#, python-format
+msgid "invalid certificate for %s with fingerprint %s"
+msgstr ""
+
+#, python-format
+msgid "%s certificate error: %s"
+msgstr "%s Zertifikatfehler: %s"
+
+#, , python-format
+msgid "configure hostfingerprint %s or use --insecure to connect insecurely"
+msgstr ""
+"Erlauben Sie Serverkennung %s in der Konfiguration oder benutzen Sie "
+"--insecure, um unsicher zu verbinden"
+
+#, python-format
+msgid ""
+"warning: %s certificate with fingerprint %s not verified (check "
+"hostfingerprints or web.cacerts config setting)\n"
+msgstr ""
+
+#, python-format
 msgid "'%s' does not appear to be an hg repository"
 msgstr "'%s' scheint kein hg-Projektarchiv zu sein"
 
@@ -19451,13 +19771,13 @@
 msgid "pushing branch %s of subrepo %s\n"
 msgstr "Übertrage Zweig %s von Unterarchiv %s\n"
 
-#, fuzzy, python-format
+#, python-format
 msgid ""
 "no branch checked out in subrepo %s\n"
 "cannot push revision %s"
 msgstr ""
-"kein Branch in Subrepo %s ausgecheckt\n"
-"Revision %s kann nicht gepusht werden"
+"kein Zweig in Unterarchiv %s aktuell\n"
+"Revision %s kann nicht übertragen werden"
 
 #, python-format
 msgid "%s, line %s: %s\n"
@@ -19540,13 +19860,12 @@
 msgid ":firstline: Any text. Returns the first line of text."
 msgstr ":firstline: Beliebiger Text. Gibt die erste Zeile des Texts zurück."
 
-#, fuzzy
 msgid ""
 ":hex: Any text. Convert a binary Mercurial node identifier into\n"
 "    its long hexadecimal representation."
 msgstr ""
-":hex: Beliebiger Text. Konvertiert einen binären Mercurial node identifier \n"
-"    in seine lange hexadezimale Repräsentation."
+":hex: Beliebiger Text. Konvertiert eine binären Mercurial Knoten-ID \n"
+"    in eine lange hexadezimale Repräsentation."
 
 msgid ""
 ":hgdate: Date. Returns the date as a pair of numbers: \"1157407993\n"
@@ -19613,7 +19932,6 @@
 ":short: Prüfsumme. Gibt die Kurzform der Prüfsumme zurück, d.h.\n"
 "    als 12 Zeichen lange hexadezimale Zeichenkette."
 
-#, fuzzy
 msgid ""
 ":shortbisect: Any text. Treats `text` as a bisection status, and\n"
 "    returns a single-character representing the status (G: good, B: bad,\n"
@@ -19621,11 +19939,9 @@
 "    is not a valid bisection status."
 msgstr ""
 ":shortbisect: Beliebiger text. Behandelt `text` als Teilungsstatus, und\n"
-"    gibt ein einzelnes Zeichen zurück, dass den Status repräsentiert (G: "
-"gut, B: schlecht,\n"
-"    S: übersprungen, U: ungetestet, I: ignoriert). Gibt ein einzelnes "
-"Leerzeichen zurück,\n"
-"    wenn `text` kein gültiger Teilungsstatus ist."
+"    gibt einen repräsentatives Buchstaben zurück (G: gut, B: schlecht,\n"
+"    S: übersprungen, U: ungetestet, I: ignoriert). Wenn `text` kein\n"
+"    gültiger Teilungsstatus ist, wird ein Leerzeichen zurückgegeben."
 
 msgid ":shortdate: Date. Returns a date like \"2006-09-18\"."
 msgstr ":shortdate: Datumsangabe. Gibt ein Datum wie \"2006-09-18\" zurück."
@@ -19762,9 +20078,11 @@
 ":node: Zeichenkette. Die Prüfsumme, die einen Änderungssatz identifiziert,\n"
 "    als 40 Zeichen lange hexadezimale Zeichenkette."
 
-#, fuzzy
-msgid ":rev: Integer. The changeset phase."
-msgstr ":rev: Ganze Zahl. Die Phase des Changesets."
+msgid ":rev: String. The changeset phase name."
+msgstr ""
+
+msgid ":rev: Integer. The changeset phase index."
+msgstr ""
 
 msgid ":rev: Integer. The repository-local changeset revision number."
 msgstr ""
--- a/mercurial/bdiff.c	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/bdiff.c	Tue Apr 17 17:56:36 2012 -0500
@@ -14,38 +14,6 @@
 #include <string.h>
 #include <limits.h>
 
-#if defined __hpux || defined __SUNPRO_C || defined _AIX
-#define inline
-#endif
-
-#ifdef __linux
-#define inline __inline
-#endif
-
-#ifdef _WIN32
-#ifdef _MSC_VER
-#define inline __inline
-typedef unsigned long uint32_t;
-#else
-#include <stdint.h>
-#endif
-static uint32_t htonl(uint32_t x)
-{
-	return ((x & 0x000000ffUL) << 24) |
-		((x & 0x0000ff00UL) <<  8) |
-		((x & 0x00ff0000UL) >>  8) |
-		((x & 0xff000000UL) >> 24);
-}
-#else
-#include <sys/types.h>
-#if defined __BEOS__ && !defined __HAIKU__
-#include <ByteOrder.h>
-#else
-#include <arpa/inet.h>
-#endif
-#include <inttypes.h>
-#endif
-
 #include "util.h"
 
 struct line {
@@ -370,7 +338,6 @@
 	PyObject *result = NULL;
 	struct line *al, *bl;
 	struct hunk l, *h;
-	uint32_t encode[3];
 	int an, bn, len = 0, la, lb, count;
 
 	if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
@@ -407,10 +374,9 @@
 	for (h = l.next; h; h = h->next) {
 		if (h->a1 != la || h->b1 != lb) {
 			len = bl[h->b1].l - bl[lb].l;
-			encode[0] = htonl(al[la].l - al->l);
-			encode[1] = htonl(al[h->a1].l - al->l);
-			encode[2] = htonl(len);
-			memcpy(rb, encode, 12);
+			putbe32(al[la].l - al->l, rb);
+			putbe32(al[h->a1].l - al->l, rb + 4);
+			putbe32(len, rb + 8);
 			memcpy(rb + 12, bl[lb].l, len);
 			rb += 12 + len;
 		}
--- a/mercurial/bundlerepo.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/bundlerepo.py	Tue Apr 17 17:56:36 2012 -0500
@@ -95,15 +95,23 @@
         return mdiff.textdiff(self.revision(self.node(rev1)),
                          self.revision(self.node(rev2)))
 
-    def revision(self, node):
-        """return an uncompressed revision of a given"""
+    def revision(self, nodeorrev):
+        """return an uncompressed revision of a given node or revision
+        number.
+        """
+        if isinstance(nodeorrev, int):
+            rev = nodeorrev
+            node = self.node(rev)
+        else:
+            node = nodeorrev
+            rev = self.rev(node)
+
         if node == nullid:
             return ""
 
         text = None
         chain = []
         iter_node = node
-        rev = self.rev(iter_node)
         # reconstruct the revision if it is from a changegroup
         while self.inbundle(rev):
             if self._cache and self._cache[0] == iter_node:
--- a/mercurial/changelog.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/changelog.py	Tue Apr 17 17:56:36 2012 -0500
@@ -9,6 +9,8 @@
 from i18n import _
 import util, error, revlog, encoding
 
+_defaultextra = {'branch': 'default'}
+
 def _string_escape(text):
     """
     >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
@@ -26,11 +28,11 @@
 def decodeextra(text):
     """
     >>> decodeextra(encodeextra({'foo': 'bar', 'baz': chr(0) + '2'}))
-    {'foo': 'bar', 'baz': '\\x002'}
+    {'foo': 'bar', 'baz': '\\x002', 'branch': 'default'}
     >>> decodeextra(encodeextra({'foo': 'bar', 'baz': chr(92) + chr(0) + '2'}))
-    {'foo': 'bar', 'baz': '\\\\\\x002'}
+    {'foo': 'bar', 'baz': '\\\\\\x002', 'branch': 'default'}
     """
-    extra = {}
+    extra = _defaultextra.copy()
     for l in text.split('\0'):
         if l:
             if '\\0' in l:
@@ -191,28 +193,26 @@
         """
         text = self.revision(node)
         if not text:
-            return (nullid, "", (0, 0), [], "", {'branch': 'default'})
+            return (nullid, "", (0, 0), [], "", _defaultextra)
         last = text.index("\n\n")
         desc = encoding.tolocal(text[last + 2:])
         l = text[:last].split('\n')
         manifest = bin(l[0])
         user = encoding.tolocal(l[1])
 
-        extra_data = l[2].split(' ', 2)
-        if len(extra_data) != 3:
-            time = float(extra_data.pop(0))
+        tdata = l[2].split(' ', 2)
+        if len(tdata) != 3:
+            time = float(tdata[0])
             try:
                 # various tools did silly things with the time zone field.
-                timezone = int(extra_data[0])
+                timezone = int(tdata[1])
             except ValueError:
                 timezone = 0
-            extra = {}
+            extra = _defaultextra
         else:
-            time, timezone, extra = extra_data
-            time, timezone = float(time), int(timezone)
-            extra = decodeextra(extra)
-        if not extra.get('branch'):
-            extra['branch'] = 'default'
+            time, timezone = float(tdata[0]), int(tdata[1])
+            extra = decodeextra(tdata[2])
+
         files = l[3:]
         return (manifest, user, (time, timezone), files, desc, extra)
 
--- a/mercurial/cmdutil.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/cmdutil.py	Tue Apr 17 17:56:36 2012 -0500
@@ -10,7 +10,7 @@
 import os, sys, errno, re, tempfile
 import util, scmutil, templater, patch, error, templatekw, revlog, copies
 import match as matchmod
-import subrepo
+import subrepo, context, repair, bookmarks
 
 def parsealiases(cmd):
     return cmd.lstrip("^").split("|")
@@ -1006,7 +1006,7 @@
         wanted = set(revs)
     copies = []
 
-    if not slowpath:
+    if not slowpath and match.files():
         # We only have to read through the filelog to find wanted revisions
 
         minrev, maxrev = min(revs), max(revs)
@@ -1150,7 +1150,7 @@
     # it might be worthwhile to do this in the iterator if the rev range
     # is descending and the prune args are all within that range
     for rev in opts.get('prune', ()):
-        rev = repo.changelog.rev(repo.lookup(rev))
+        rev = repo[rev].rev()
         ff = followfilter()
         stop = min(revs[0], revs[-1])
         for x in xrange(rev, stop - 1, -1):
@@ -1285,6 +1285,123 @@
     return commitfunc(ui, repo, message,
                       scmutil.match(repo[None], pats, opts), opts)
 
+def amend(ui, repo, commitfunc, old, extra, pats, opts):
+    ui.note(_('amending changeset %s\n') % old)
+    base = old.p1()
+
+    wlock = repo.wlock()
+    try:
+        # Fix up dirstate for copies and renames
+        duplicatecopies(repo, None, base.node())
+
+        # First, do a regular commit to record all changes in the working
+        # directory (if there are any)
+        node = commit(ui, repo, commitfunc, pats, opts)
+        ctx = repo[node]
+
+        # Participating changesets:
+        #
+        # node/ctx o - new (intermediate) commit that contains changes from
+        #          |   working dir to go into amending commit (or a workingctx
+        #          |   if there were no changes)
+        #          |
+        # old      o - changeset to amend
+        #          |
+        # base     o - parent of amending changeset
+
+        files = set(old.files())
+
+        # Second, we use either the commit we just did, or if there were no
+        # changes the parent of the working directory as the version of the
+        # files in the final amend commit
+        if node:
+            ui.note(_('copying changeset %s to %s\n') % (ctx, base))
+
+            user = ctx.user()
+            date = ctx.date()
+            message = ctx.description()
+            extra = ctx.extra()
+
+            # Prune files which were reverted by the updates: if old introduced
+            # file X and our intermediate commit, node, renamed that file, then
+            # those two files are the same and we can discard X from our list
+            # of files. Likewise if X was deleted, it's no longer relevant
+            files.update(ctx.files())
+
+            def samefile(f):
+                if f in ctx.manifest():
+                    a = ctx.filectx(f)
+                    if f in base.manifest():
+                        b = base.filectx(f)
+                        return (a.data() == b.data()
+                                and a.flags() == b.flags()
+                                and a.renamed() == b.renamed())
+                    else:
+                        return False
+                else:
+                    return f not in base.manifest()
+            files = [f for f in files if not samefile(f)]
+
+            def filectxfn(repo, ctx_, path):
+                try:
+                    return ctx.filectx(path)
+                except KeyError:
+                    raise IOError()
+        else:
+            ui.note(_('copying changeset %s to %s\n') % (old, base))
+
+            # Use version of files as in the old cset
+            def filectxfn(repo, ctx_, path):
+                try:
+                    return old.filectx(path)
+                except KeyError:
+                    raise IOError()
+
+            # See if we got a message from -m or -l, if not, open the editor
+            # with the message of the changeset to amend
+            user = opts.get('user') or old.user()
+            date = opts.get('date') or old.date()
+            message = logmessage(ui, opts)
+            if not message:
+                cctx = context.workingctx(repo, old.description(), user, date,
+                                          extra,
+                                          repo.status(base.node(), old.node()))
+                message = commitforceeditor(repo, cctx, [])
+
+        new = context.memctx(repo,
+                             parents=[base.node(), nullid],
+                             text=message,
+                             files=files,
+                             filectxfn=filectxfn,
+                             user=user,
+                             date=date,
+                             extra=extra)
+        newid = repo.commitctx(new)
+        if newid != old.node():
+            # Reroute the working copy parent to the new changeset
+            repo.dirstate.setparents(newid, nullid)
+
+            # Move bookmarks from old parent to amend commit
+            bms = repo.nodebookmarks(old.node())
+            if bms:
+                for bm in bms:
+                    repo._bookmarks[bm] = newid
+                bookmarks.write(repo)
+
+            # Strip the intermediate commit (if there was one) and the amended
+            # commit
+            lock = repo.lock()
+            try:
+                if node:
+                    ui.note(_('stripping intermediate changeset %s\n') % ctx)
+                ui.note(_('stripping amended changeset %s\n') % old)
+                repair.strip(ui, repo, old.node(), topic='amend-backup')
+            finally:
+                lock.release()
+    finally:
+        wlock.release()
+    return newid
+
 def commiteditor(repo, ctx, subs):
     if ctx.description():
         return ctx.description()
@@ -1325,6 +1442,187 @@
 
     return text
 
+def revert(ui, repo, ctx, parents, *pats, **opts):
+    parent, p2 = parents
+    node = ctx.node()
+
+    mf = ctx.manifest()
+    if node == parent:
+        pmf = mf
+    else:
+        pmf = None
+
+    # need all matching names in dirstate and manifest of target rev,
+    # so have to walk both. do not print errors if files exist in one
+    # but not other.
+
+    names = {}
+
+    wlock = repo.wlock()
+    try:
+        # walk dirstate.
+
+        m = scmutil.match(repo[None], pats, opts)
+        m.bad = lambda x, y: False
+        for abs in repo.walk(m):
+            names[abs] = m.rel(abs), m.exact(abs)
+
+        # walk target manifest.
+
+        def badfn(path, msg):
+            if path in names:
+                return
+            if path in repo[node].substate:
+                return
+            path_ = path + '/'
+            for f in names:
+                if f.startswith(path_):
+                    return
+            ui.warn("%s: %s\n" % (m.rel(path), msg))
+
+        m = scmutil.match(repo[node], pats, opts)
+        m.bad = badfn
+        for abs in repo[node].walk(m):
+            if abs not in names:
+                names[abs] = m.rel(abs), m.exact(abs)
+
+        # get the list of subrepos that must be reverted
+        targetsubs = [s for s in repo[node].substate if m(s)]
+        m = scmutil.matchfiles(repo, names)
+        changes = repo.status(match=m)[:4]
+        modified, added, removed, deleted = map(set, changes)
+
+        # if f is a rename, also revert the source
+        cwd = repo.getcwd()
+        for f in added:
+            src = repo.dirstate.copied(f)
+            if src and src not in names and repo.dirstate[src] == 'r':
+                removed.add(src)
+                names[src] = (repo.pathto(src, cwd), True)
+
+        def removeforget(abs):
+            if repo.dirstate[abs] == 'a':
+                return _('forgetting %s\n')
+            return _('removing %s\n')
+
+        revert = ([], _('reverting %s\n'))
+        add = ([], _('adding %s\n'))
+        remove = ([], removeforget)
+        undelete = ([], _('undeleting %s\n'))
+
+        disptable = (
+            # dispatch table:
+            #   file state
+            #   action if in target manifest
+            #   action if not in target manifest
+            #   make backup if in target manifest
+            #   make backup if not in target manifest
+            (modified, revert, remove, True, True),
+            (added, revert, remove, True, False),
+            (removed, undelete, None, False, False),
+            (deleted, revert, remove, False, False),
+            )
+
+        for abs, (rel, exact) in sorted(names.items()):
+            mfentry = mf.get(abs)
+            target = repo.wjoin(abs)
+            def handle(xlist, dobackup):
+                xlist[0].append(abs)
+                if (dobackup and not opts.get('no_backup') and
+                    os.path.lexists(target)):
+                    bakname = "%s.orig" % rel
+                    ui.note(_('saving current version of %s as %s\n') %
+                            (rel, bakname))
+                    if not opts.get('dry_run'):
+                        util.rename(target, bakname)
+                if ui.verbose or not exact:
+                    msg = xlist[1]
+                    if not isinstance(msg, basestring):
+                        msg = msg(abs)
+                    ui.status(msg % rel)
+            for table, hitlist, misslist, backuphit, backupmiss in disptable:
+                if abs not in table:
+                    continue
+                # file has changed in dirstate
+                if mfentry:
+                    handle(hitlist, backuphit)
+                elif misslist is not None:
+                    handle(misslist, backupmiss)
+                break
+            else:
+                if abs not in repo.dirstate:
+                    if mfentry:
+                        handle(add, True)
+                    elif exact:
+                        ui.warn(_('file not managed: %s\n') % rel)
+                    continue
+                # file has not changed in dirstate
+                if node == parent:
+                    if exact:
+                        ui.warn(_('no changes needed to %s\n') % rel)
+                    continue
+                if pmf is None:
+                    # only need parent manifest in this unlikely case,
+                    # so do not read by default
+                    pmf = repo[parent].manifest()
+                if abs in pmf and mfentry:
+                    # if version of file is same in parent and target
+                    # manifests, do nothing
+                    if (pmf[abs] != mfentry or
+                        pmf.flags(abs) != mf.flags(abs)):
+                        handle(revert, False)
+                else:
+                    handle(remove, False)
+
+        if not opts.get('dry_run'):
+            def checkout(f):
+                fc = ctx[f]
+                repo.wwrite(f, fc.data(), fc.flags())
+
+            audit_path = scmutil.pathauditor(repo.root)
+            for f in remove[0]:
+                if repo.dirstate[f] == 'a':
+                    repo.dirstate.drop(f)
+                    continue
+                audit_path(f)
+                try:
+                    util.unlinkpath(repo.wjoin(f))
+                except OSError:
+                    pass
+                repo.dirstate.remove(f)
+
+            normal = None
+            if node == parent:
+                # We're reverting to our parent. If possible, we'd like status
+                # to report the file as clean. We have to use normallookup for
+                # merges to avoid losing information about merged/dirty files.
+                if p2 != nullid:
+                    normal = repo.dirstate.normallookup
+                else:
+                    normal = repo.dirstate.normal
+            for f in revert[0]:
+                checkout(f)
+                if normal:
+                    normal(f)
+
+            for f in add[0]:
+                checkout(f)
+                repo.dirstate.add(f)
+
+            normal = repo.dirstate.normallookup
+            if node == parent and p2 == nullid:
+                normal = repo.dirstate.normal
+            for f in undelete[0]:
+                checkout(f)
+                normal(f)
+
+            if targetsubs:
+                # Revert the subrepos on the revert list
+                for sub in targetsubs:
+                    ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
+    finally:
+        wlock.release()
+
 def command(table):
     '''returns a function object bound to table which can be used as
     a decorator for populating table as a command table'''
--- a/mercurial/commands.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/commands.py	Tue Apr 17 17:56:36 2012 -0500
@@ -16,7 +16,7 @@
 import merge as mergemod
 import minirst, revset, fileset
 import dagparser, context, simplemerge
-import random, setdiscovery, treediscovery, dagutil
+import random, setdiscovery, treediscovery, dagutil, pvec
 import phases
 
 table = {}
@@ -972,6 +972,12 @@
     if 'rev' in opts:
         revs = scmutil.revrange(repo, opts['rev'])
 
+    bundletype = opts.get('type', 'bzip2').lower()
+    btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
+    bundletype = btypes.get(bundletype)
+    if bundletype not in changegroup.bundletypes:
+        raise util.Abort(_('unknown bundle type specified with --type'))
+
     if opts.get('all'):
         base = ['null']
     else:
@@ -998,12 +1004,6 @@
         scmutil.nochangesfound(ui, outgoing and outgoing.excluded)
         return 1
 
-    bundletype = opts.get('type', 'bzip2').lower()
-    btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
-    bundletype = btypes.get(bundletype)
-    if bundletype not in changegroup.bundletypes:
-        raise util.Abort(_('unknown bundle type specified with --type'))
-
     changegroup.writebundle(cg, fname, bundletype)
 
 @command('cat',
@@ -1163,6 +1163,7 @@
      _('mark new/missing files as added/removed before committing')),
     ('', 'close-branch', None,
      _('mark a branch as closed, hiding it from the branch list')),
+    ('', 'amend', None, _('amend the parent of the working dir')),
     ] + walkopts + commitopts + commitopts2 + subrepoopts,
     _('[OPTION]... [FILE]...'))
 def commit(ui, repo, *pats, **opts):
@@ -1183,6 +1184,20 @@
     commit fails, you will find a backup of your message in
     ``.hg/last-message.txt``.
 
+    The --amend flag can be used to amend the parent of the
+    working directory with a new commit that contains the changes
+    in the parent in addition to those currently reported by :hg:`status`,
+    if there are any. The old commit is stored in a backup bundle in
+    ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
+    on how to restore it).
+
+    Message, user and date are taken from the amended commit unless
+    specified. When a message isn't specified on the command line,
+    the editor will open with the message of the amended commit.
+
+    It is not possible to amend public changesets (see :hg:`help phases`)
+    or changesets that have children.
+
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
     Returns 0 on success, 1 if nothing changed.
@@ -1198,31 +1213,70 @@
             # current branch, so it's sufficient to test branchheads
             raise util.Abort(_('can only close branch heads'))
         extra['close'] = 1
-    e = cmdutil.commiteditor
-    if opts.get('force_editor'):
-        e = cmdutil.commitforceeditor
-
-    def commitfunc(ui, repo, message, match, opts):
-        return repo.commit(message, opts.get('user'), opts.get('date'), match,
-                           editor=e, extra=extra)
 
     branch = repo[None].branch()
     bheads = repo.branchheads(branch)
 
-    node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
-    if not node:
-        stat = repo.status(match=scmutil.match(repo[None], pats, opts))
-        if stat[3]:
-            ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
-                      % len(stat[3]))
-        else:
+    if opts.get('amend'):
+        if ui.config('ui', 'commitsubrepos'):
+            raise util.Abort(_('cannot amend recursively'))
+
+        old = repo['.']
+        if old.phase() == phases.public:
+            raise util.Abort(_('cannot amend public changesets'))
+        if len(old.parents()) > 1:
+            raise util.Abort(_('cannot amend merge changesets'))
+        if len(repo[None].parents()) > 1:
+            raise util.Abort(_('cannot amend while merging'))
+        if old.children():
+            raise util.Abort(_('cannot amend changeset with children'))
+
+        e = cmdutil.commiteditor
+        if opts.get('force_editor'):
+            e = cmdutil.commitforceeditor
+
+        def commitfunc(ui, repo, message, match, opts):
+            editor = e
+            # message contains text from -m or -l, if it's empty,
+            # open the editor with the old message
+            if not message:
+                message = old.description()
+                editor = cmdutil.commitforceeditor
+            return repo.commit(message,
+                               opts.get('user') or old.user(),
+                               opts.get('date') or old.date(),
+                               match,
+                               editor=editor,
+                               extra=extra)
+
+        node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
+        if node == old.node():
             ui.status(_("nothing changed\n"))
-        return 1
+            return 1
+    else:
+        e = cmdutil.commiteditor
+        if opts.get('force_editor'):
+            e = cmdutil.commitforceeditor
+
+        def commitfunc(ui, repo, message, match, opts):
+            return repo.commit(message, opts.get('user'), opts.get('date'),
+                               match, editor=e, extra=extra)
+
+        node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
+
+        if not node:
+            stat = repo.status(match=scmutil.match(repo[None], pats, opts))
+            if stat[3]:
+                ui.status(_("nothing changed (%d missing files, see "
+                            "'hg status')\n") % len(stat[3]))
+            else:
+                ui.status(_("nothing changed\n"))
+            return 1
 
     ctx = repo[node]
     parents = ctx.parents()
 
-    if (bheads and node not in bheads and not
+    if (not opts.get('amend') and bheads and node not in bheads and not
         [x for x in parents if x.node() in bheads and x.branch() == branch]):
         ui.status(_('created new head\n'))
         # The message is not printed for initial roots. For the other
@@ -1963,6 +2017,27 @@
             ui.write("%s\t%s\n" % (k.encode('string-escape'),
                                    v.encode('string-escape')))
 
+@command('debugpvec', [], _('A B'))
+def debugpvec(ui, repo, a, b=None):
+    ca = scmutil.revsingle(repo, a)
+    cb = scmutil.revsingle(repo, b)
+    pa = pvec.ctxpvec(ca)
+    pb = pvec.ctxpvec(cb)
+    if pa == pb:
+        rel = "="
+    elif pa > pb:
+        rel = ">"
+    elif pa < pb:
+        rel = "<"
+    elif pa | pb:
+        rel = "|"
+    ui.write(_("a: %s\n") % pa)
+    ui.write(_("b: %s\n") % pb)
+    ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
+    ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
+             (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
+              pa.distance(pb), rel))
+
 @command('debugrebuildstate',
     [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
     _('[-r REV] [REV]'))
@@ -2155,13 +2230,17 @@
 
 @command('debugrevspec', [], ('REVSPEC'))
 def debugrevspec(ui, repo, expr):
-    '''parse and apply a revision specification'''
+    """parse and apply a revision specification
+
+    Use --verbose to print the parsed tree before and after aliases
+    expansion.
+    """
     if ui.verbose:
         tree = revset.parse(expr)[0]
-        ui.note(tree, "\n")
+        ui.note(revset.prettyformat(tree), "\n")
         newtree = revset.findaliases(ui, tree)
         if newtree != tree:
-            ui.note(newtree, "\n")
+            ui.note(revset.prettyformat(newtree), "\n")
     func = revset.match(ui, expr)
     for c in func(repo, range(len(repo))):
         ui.write("%s\n" % c)
@@ -2467,7 +2546,7 @@
       _('record the current date as commit date')),
      ('U', 'currentuser', False,
       _('record the current user as committer'), _('DATE'))]
-    + commitopts2 + mergetoolopts,
+    + commitopts2 + mergetoolopts  + dryrunopts,
     _('[OPTION]... REVISION...'))
 def graft(ui, repo, *revs, **opts):
     '''copy changes from other branches onto the current branch
@@ -2586,7 +2665,10 @@
 
     for pos, ctx in enumerate(repo.set("%ld", revs)):
         current = repo['.']
+
         ui.status(_('grafting revision %s\n') % ctx.rev())
+        if opts.get('dry_run'):
+            continue
 
         # we don't merge the first commit when continuing
         if not cont:
@@ -2629,7 +2711,7 @@
                     date=date, extra=extra, editor=editor)
 
     # remove state when we complete successfully
-    if os.path.exists(repo.join('graftstate')):
+    if not opts.get('dry_run') and os.path.exists(repo.join('graftstate')):
         util.unlinkpath(repo.join('graftstate'))
 
     return 0
@@ -3597,8 +3679,9 @@
     try:
         try:
             wlock = repo.wlock()
-            lock = repo.lock()
-            tr = repo.transaction('import')
+            if not opts.get('no_commit'):
+                lock = repo.lock()
+                tr = repo.transaction('import')
             parents = repo.parents()
             for patchurl in patches:
                 if patchurl == '-':
@@ -3624,7 +3707,8 @@
                 if not haspatch:
                     raise util.Abort(_('%s: no diffs found') % patchurl)
 
-            tr.close()
+            if tr:
+                tr.close()
             if msgs:
                 repo.savecommitmessage('\n* * *\n'.join(msgs))
         except:
@@ -4212,7 +4296,8 @@
 
         public < draft < secret
 
-    Return 0 on success, 1 if no phases were changed.
+    Return 0 on success, 1 if no phases were changed or some could not
+    be changed.
     """
     # search for a unique phase argument
     targetphase = None
@@ -4254,8 +4339,18 @@
             changes = 0
             newdata = repo._phaserev
             changes = sum(o != newdata[i] for i, o in enumerate(olddata))
+            rejected = [n for n in nodes
+                        if newdata[repo[n].rev()] < targetphase]
+            if rejected:
+                ui.warn(_('cannot move %i changesets to a more permissive '
+                          'phase, use --force\n') % len(rejected))
+                ret = 1
             if changes:
-                ui.note(_('phase change for %i changesets\n') % changes)
+                msg = _('phase changed for %i changesets\n') % changes
+                if ret:
+                    ui.status(msg)
+                else:
+                    ui.note(msg)
             else:
                 ui.warn(_('no phases changed\n'))
                 ret = 1
@@ -4741,7 +4836,6 @@
                          hint=_('use "hg update" or see "hg help revert"'))
 
     ctx = scmutil.revsingle(repo, opts.get('rev'))
-    node = ctx.node()
 
     if not pats and not opts.get('all'):
         msg = _("no files or directories specified")
@@ -4750,6 +4844,7 @@
                      " or 'hg update -C .' to abort the merge")
             raise util.Abort(msg, hint=hint)
         dirty = util.any(repo.status())
+        node = ctx.node()
         if node != parent:
             if dirty:
                 hint = _("uncommitted changes, use --all to discard all"
@@ -4763,178 +4858,7 @@
             hint = _("use --all to revert all files")
         raise util.Abort(msg, hint=hint)
 
-    mf = ctx.manifest()
-    if node == parent:
-        pmf = mf
-    else:
-        pmf = None
-
-    # need all matching names in dirstate and manifest of target rev,
-    # so have to walk both. do not print errors if files exist in one
-    # but not other.
-
-    names = {}
-
-    wlock = repo.wlock()
-    try:
-        # walk dirstate.
-
-        m = scmutil.match(repo[None], pats, opts)
-        m.bad = lambda x, y: False
-        for abs in repo.walk(m):
-            names[abs] = m.rel(abs), m.exact(abs)
-
-        # walk target manifest.
-
-        def badfn(path, msg):
-            if path in names:
-                return
-            if path in repo[node].substate:
-                ui.warn("%s: %s\n" % (m.rel(path),
-                    'reverting subrepos is unsupported'))
-                return
-            path_ = path + '/'
-            for f in names:
-                if f.startswith(path_):
-                    return
-            ui.warn("%s: %s\n" % (m.rel(path), msg))
-
-        m = scmutil.match(repo[node], pats, opts)
-        m.bad = badfn
-        for abs in repo[node].walk(m):
-            if abs not in names:
-                names[abs] = m.rel(abs), m.exact(abs)
-
-        m = scmutil.matchfiles(repo, names)
-        changes = repo.status(match=m)[:4]
-        modified, added, removed, deleted = map(set, changes)
-
-        # if f is a rename, also revert the source
-        cwd = repo.getcwd()
-        for f in added:
-            src = repo.dirstate.copied(f)
-            if src and src not in names and repo.dirstate[src] == 'r':
-                removed.add(src)
-                names[src] = (repo.pathto(src, cwd), True)
-
-        def removeforget(abs):
-            if repo.dirstate[abs] == 'a':
-                return _('forgetting %s\n')
-            return _('removing %s\n')
-
-        revert = ([], _('reverting %s\n'))
-        add = ([], _('adding %s\n'))
-        remove = ([], removeforget)
-        undelete = ([], _('undeleting %s\n'))
-
-        disptable = (
-            # dispatch table:
-            #   file state
-            #   action if in target manifest
-            #   action if not in target manifest
-            #   make backup if in target manifest
-            #   make backup if not in target manifest
-            (modified, revert, remove, True, True),
-            (added, revert, remove, True, False),
-            (removed, undelete, None, False, False),
-            (deleted, revert, remove, False, False),
-            )
-
-        for abs, (rel, exact) in sorted(names.items()):
-            mfentry = mf.get(abs)
-            target = repo.wjoin(abs)
-            def handle(xlist, dobackup):
-                xlist[0].append(abs)
-                if (dobackup and not opts.get('no_backup') and
-                    os.path.lexists(target)):
-                    bakname = "%s.orig" % rel
-                    ui.note(_('saving current version of %s as %s\n') %
-                            (rel, bakname))
-                    if not opts.get('dry_run'):
-                        util.rename(target, bakname)
-                if ui.verbose or not exact:
-                    msg = xlist[1]
-                    if not isinstance(msg, basestring):
-                        msg = msg(abs)
-                    ui.status(msg % rel)
-            for table, hitlist, misslist, backuphit, backupmiss in disptable:
-                if abs not in table:
-                    continue
-                # file has changed in dirstate
-                if mfentry:
-                    handle(hitlist, backuphit)
-                elif misslist is not None:
-                    handle(misslist, backupmiss)
-                break
-            else:
-                if abs not in repo.dirstate:
-                    if mfentry:
-                        handle(add, True)
-                    elif exact:
-                        ui.warn(_('file not managed: %s\n') % rel)
-                    continue
-                # file has not changed in dirstate
-                if node == parent:
-                    if exact:
-                        ui.warn(_('no changes needed to %s\n') % rel)
-                    continue
-                if pmf is None:
-                    # only need parent manifest in this unlikely case,
-                    # so do not read by default
-                    pmf = repo[parent].manifest()
-                if abs in pmf and mfentry:
-                    # if version of file is same in parent and target
-                    # manifests, do nothing
-                    if (pmf[abs] != mfentry or
-                        pmf.flags(abs) != mf.flags(abs)):
-                        handle(revert, False)
-                else:
-                    handle(remove, False)
-
-        if not opts.get('dry_run'):
-            def checkout(f):
-                fc = ctx[f]
-                repo.wwrite(f, fc.data(), fc.flags())
-
-            audit_path = scmutil.pathauditor(repo.root)
-            for f in remove[0]:
-                if repo.dirstate[f] == 'a':
-                    repo.dirstate.drop(f)
-                    continue
-                audit_path(f)
-                try:
-                    util.unlinkpath(repo.wjoin(f))
-                except OSError:
-                    pass
-                repo.dirstate.remove(f)
-
-            normal = None
-            if node == parent:
-                # We're reverting to our parent. If possible, we'd like status
-                # to report the file as clean. We have to use normallookup for
-                # merges to avoid losing information about merged/dirty files.
-                if p2 != nullid:
-                    normal = repo.dirstate.normallookup
-                else:
-                    normal = repo.dirstate.normal
-            for f in revert[0]:
-                checkout(f)
-                if normal:
-                    normal(f)
-
-            for f in add[0]:
-                checkout(f)
-                repo.dirstate.add(f)
-
-            normal = repo.dirstate.normallookup
-            if node == parent and p2 == nullid:
-                normal = repo.dirstate.normal
-            for f in undelete[0]:
-                checkout(f)
-                normal(f)
-
-    finally:
-        wlock.release()
+    return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
 
 @command('rollback', dryrunopts +
          [('f', 'force', False, _('ignore safety measures'))])
@@ -5264,18 +5188,22 @@
     if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
         copy = copies.pathcopies(repo[node1], repo[node2])
 
+    fm = ui.formatter('status', opts)
+    format = '%s %s' + end
+    if opts.get('no_status'):
+        format = '%.0s%s' + end
+
     for state, char, files in changestates:
         if state in show:
-            format = "%s %%s%s" % (char, end)
-            if opts.get('no_status'):
-                format = "%%s%s" % end
-
+            label = 'status.' + state
             for f in files:
-                ui.write(format % repo.pathto(f, cwd),
-                         label='status.' + state)
+                fm.startitem()
+                fm.write("status path", format, char,
+                         repo.pathto(f, cwd), label=label)
                 if f in copy:
-                    ui.write('  %s%s' % (repo.pathto(copy[f], cwd), end),
+                    fm.write("copy", '  %s' + end, repo.pathto(copy[f], cwd),
                              label='status.copied')
+    fm.end()
 
 @command('^summary|sum',
     [('', 'remote', None, _('check for push and pull'))], '[--remote]')
@@ -5712,18 +5640,21 @@
     if check and clean:
         raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
 
+    if date:
+        if rev is not None:
+            raise util.Abort(_("you can't specify a revision and a date"))
+        rev = cmdutil.finddate(ui, repo, date)
+
     if check:
         # we could use dirty() but we can ignore merge and branch trivia
         c = repo[None]
         if c.modified() or c.added() or c.removed():
             raise util.Abort(_("uncommitted local changes"))
-
-    if date:
-        if rev is not None:
-            raise util.Abort(_("you can't specify a revision and a date"))
-        rev = cmdutil.finddate(ui, repo, date)
-
-    if clean or check:
+        if not rev:
+            rev = repo[repo[None].branch()].rev()
+        mergemod._checkunknown(repo, repo[None], repo[rev])
+
+    if clean:
         ret = hg.clean(repo, rev)
     else:
         ret = hg.update(repo, rev)
--- a/mercurial/config.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/config.py	Tue Apr 17 17:56:36 2012 -0500
@@ -115,6 +115,9 @@
 
         for l in data.splitlines(True):
             line += 1
+            if line == 1 and l.startswith('\xef\xbb\xbf'):
+                # Someone set us up the BOM
+                l = l[3:]
             if cont:
                 if commentre.match(l):
                     continue
--- a/mercurial/context.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/context.py	Tue Apr 17 17:56:36 2012 -0500
@@ -5,7 +5,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-from node import nullid, nullrev, short, hex
+from node import nullid, nullrev, short, hex, bin
 from i18n import _
 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
 import match as matchmod
@@ -21,12 +21,84 @@
         if changeid == '':
             changeid = '.'
         self._repo = repo
-        if isinstance(changeid, (long, int)):
+
+        if isinstance(changeid, int):
             self._rev = changeid
-            self._node = self._repo.changelog.node(changeid)
-        else:
-            self._node = self._repo.lookup(changeid)
-            self._rev = self._repo.changelog.rev(self._node)
+            self._node = repo.changelog.node(changeid)
+            return
+        if changeid == '.':
+            self._node = repo.dirstate.p1()
+            self._rev = repo.changelog.rev(self._node)
+            return
+        if changeid == 'null':
+            self._node = nullid
+            self._rev = nullrev
+            return
+        if changeid == 'tip':
+            self._rev = len(repo.changelog) - 1
+            self._node = repo.changelog.node(self._rev)
+            return
+        if len(changeid) == 20:
+            try:
+                self._node = changeid
+                self._rev = repo.changelog.rev(changeid)
+                return
+            except LookupError:
+                pass
+
+        try:
+            r = int(changeid)
+            if str(r) != changeid:
+                raise ValueError
+            l = len(repo.changelog)
+            if r < 0:
+                r += l
+            if r < 0 or r >= l:
+                raise ValueError
+            self._rev = r
+            self._node = repo.changelog.node(r)
+            return
+        except (ValueError, OverflowError):
+            pass
+
+        if len(changeid) == 40:
+            try:
+                self._node = bin(changeid)
+                self._rev = repo.changelog.rev(self._node)
+                return
+            except (TypeError, LookupError):
+                pass
+
+        if changeid in repo._bookmarks:
+            self._node = repo._bookmarks[changeid]
+            self._rev = repo.changelog.rev(self._node)
+            return
+        if changeid in repo._tagscache.tags:
+            self._node = repo._tagscache.tags[changeid]
+            self._rev = repo.changelog.rev(self._node)
+            return
+        if changeid in repo.branchtags():
+            self._node = repo.branchtags()[changeid]
+            self._rev = repo.changelog.rev(self._node)
+            return
+
+        self._node = repo.changelog._partialmatch(changeid)
+        if self._node is not None:
+            self._rev = repo.changelog.rev(self._node)
+            return
+
+        # lookup failed
+        # check if it might have come from damaged dirstate
+        if changeid in repo.dirstate.parents():
+            raise error.Abort(_("working directory has unknown parent '%s'!")
+                              % short(changeid))
+        try:
+            if len(changeid) == 20:
+                changeid = hex(changeid)
+        except TypeError:
+            pass
+        raise error.RepoLookupError(
+            _("unknown revision '%s'") % changeid)
 
     def __str__(self):
         return short(self.node())
@@ -57,7 +129,7 @@
 
     @propertycache
     def _changeset(self):
-        return self._repo.changelog.read(self.node())
+        return self._repo.changelog.read(self.rev())
 
     @propertycache
     def _manifest(self):
@@ -611,11 +683,12 @@
 
         return None
 
-    def ancestors(self):
+    def ancestors(self, followfirst=False):
         visit = {}
         c = self
+        cut = followfirst and 1 or None
         while True:
-            for parent in c.parents():
+            for parent in c.parents()[:cut]:
                 visit[(parent.rev(), parent.node())] = parent
             if not visit:
                 break
@@ -715,9 +788,6 @@
     def _manifest(self):
         """generate a manifest corresponding to the working directory"""
 
-        if self._unknown is None:
-            self.status(unknown=True)
-
         man = self._parents[0].manifest().copy()
         if len(self._parents) > 1:
             man2 = self.p2().manifest()
@@ -731,8 +801,7 @@
         copied = self._repo.dirstate.copies()
         ff = self._flagfunc
         modified, added, removed, deleted = self._status
-        unknown = self._unknown
-        for i, l in (("a", added), ("m", modified), ("u", unknown)):
+        for i, l in (("a", added), ("m", modified)):
             for f in l:
                 orig = copied.get(f, f)
                 man[f] = getman(orig).get(orig, nullid) + i
--- a/mercurial/copies.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/copies.py	Tue Apr 17 17:56:36 2012 -0500
@@ -18,15 +18,6 @@
         return ""
     return f[:s]
 
-def _dirs(files):
-    d = set()
-    for f in files:
-        f = _dirname(f)
-        while f not in d:
-            d.add(f)
-            f = _dirname(f)
-    return d
-
 def _findlimit(repo, a, b):
     """Find the earliest revision that's an ancestor of a or b but not both,
     None if no such revision exists.
@@ -174,9 +165,18 @@
         return _backwardcopies(x, y)
     return _chain(x, y, _backwardcopies(x, a), _forwardcopies(a, y))
 
-def mergecopies(repo, c1, c2, ca, checkdirs=True):
+def mergecopies(repo, c1, c2, ca):
     """
-    Find moves and copies between context c1 and c2
+    Find moves and copies between context c1 and c2 that are relevant
+    for merging.
+
+    Returns two dicts, "copy" and "diverge".
+
+    "copy" is a mapping from destination name -> source name,
+    where source is in c1 and destination is in c2 or vice-versa.
+
+    "diverge" is a mapping of source name -> list of destination names
+    for divergent renames.
     """
     # avoid silly behavior for update from empty dir
     if not c1 or not c2 or c1 == c2:
@@ -301,14 +301,14 @@
             repo.ui.debug("   %s -> %s %s\n" % (f, fullcopy[f], note))
     del diverge2
 
-    if not fullcopy or not checkdirs:
+    if not fullcopy:
         return copy, diverge
 
     repo.ui.debug("  checking for directory renames\n")
 
     # generate a directory move map
-    d1, d2 = _dirs(m1), _dirs(m2)
-    invalid = set()
+    d1, d2 = c1.dirs(), c2.dirs()
+    invalid = set([""])
     dirmove = {}
 
     # examine each file copy for a potential directory move, which is
--- a/mercurial/dispatch.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/dispatch.py	Tue Apr 17 17:56:36 2012 -0500
@@ -687,6 +687,59 @@
         if repo and repo != req.repo:
             repo.close()
 
+def lsprofile(ui, func, fp):
+    format = ui.config('profiling', 'format', default='text')
+    field = ui.config('profiling', 'sort', default='inlinetime')
+    climit = ui.configint('profiling', 'nested', default=5)
+
+    if not format in ['text', 'kcachegrind']:
+        ui.warn(_("unrecognized profiling format '%s'"
+                    " - Ignored\n") % format)
+        format = 'text'
+
+    try:
+        from mercurial import lsprof
+    except ImportError:
+        raise util.Abort(_(
+            'lsprof not available - install from '
+            'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
+    p = lsprof.Profiler()
+    p.enable(subcalls=True)
+    try:
+        return func()
+    finally:
+        p.disable()
+
+        if format == 'kcachegrind':
+            import lsprofcalltree
+            calltree = lsprofcalltree.KCacheGrind(p)
+            calltree.output(fp)
+        else:
+            # format == 'text'
+            stats = lsprof.Stats(p.getstats())
+            stats.sort(field)
+            stats.pprint(limit=30, file=fp, climit=climit)
+
+def statprofile(ui, func, fp):
+    try:
+        import statprof
+    except ImportError:
+        raise util.Abort(_(
+            'statprof not available - install using "easy_install statprof"'))
+
+    freq = ui.configint('profiling', 'freq', default=1000)
+    if freq > 0:
+        statprof.reset(freq)
+    else:
+        ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
+
+    statprof.start()
+    try:
+        return func()
+    finally:
+        statprof.stop()
+        statprof.display(fp)
+
 def _runcommand(ui, options, cmd, cmdfunc):
     def checkargs():
         try:
@@ -695,45 +748,28 @@
             raise error.CommandError(cmd, _("invalid arguments"))
 
     if options['profile']:
-        format = ui.config('profiling', 'format', default='text')
-
-        if not format in ['text', 'kcachegrind']:
-            ui.warn(_("unrecognized profiling format '%s'"
-                        " - Ignored\n") % format)
-            format = 'text'
+        profiler = os.getenv('HGPROF')
+        if profiler is None:
+            profiler = ui.config('profiling', 'type', default='ls')
+        if profiler not in ('ls', 'stat'):
+            ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
+            profiler = 'ls'
 
         output = ui.config('profiling', 'output')
 
         if output:
             path = ui.expandpath(output)
-            ostream = open(path, 'wb')
+            fp = open(path, 'wb')
         else:
-            ostream = sys.stderr
+            fp = sys.stderr
 
         try:
-            from mercurial import lsprof
-        except ImportError:
-            raise util.Abort(_(
-                'lsprof not available - install from '
-                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
-        p = lsprof.Profiler()
-        p.enable(subcalls=True)
-        try:
-            return checkargs()
+            if profiler == 'ls':
+                return lsprofile(ui, checkargs, fp)
+            else:
+                return statprofile(ui, checkargs, fp)
         finally:
-            p.disable()
-
-            if format == 'kcachegrind':
-                import lsprofcalltree
-                calltree = lsprofcalltree.KCacheGrind(p)
-                calltree.output(ostream)
-            else:
-                # format == 'text'
-                stats = lsprof.Stats(p.getstats())
-                stats.sort()
-                stats.pprint(top=10, file=ostream, climit=5)
-
             if output:
-                ostream.close()
+                fp.close()
     else:
         return checkargs()
--- a/mercurial/encoding.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/encoding.py	Tue Apr 17 17:56:36 2012 -0500
@@ -92,24 +92,32 @@
     'foo: \\xc3\\xa4'
     """
 
-    for e in ('UTF-8', fallbackencoding):
+    try:
         try:
-            u = s.decode(e) # attempt strict decoding
+            # make sure string is actually stored in UTF-8
+            u = s.decode('UTF-8')
+            if encoding == 'UTF-8':
+                # fast path
+                return s
             r = u.encode(encoding, "replace")
             if u == r.decode(encoding):
                 # r is a safe, non-lossy encoding of s
                 return r
-            elif e == 'UTF-8':
-                return localstr(s, r)
-            else:
+            return localstr(s, r)
+        except UnicodeDecodeError:
+            # we should only get here if we're looking at an ancient changeset
+            try:
+                u = s.decode(fallbackencoding)
+                r = u.encode(encoding, "replace")
+                if u == r.decode(encoding):
+                    # r is a safe, non-lossy encoding of s
+                    return r
                 return localstr(u.encode('UTF-8'), r)
-
-        except LookupError, k:
-            raise error.Abort(k, hint="please check your locale settings")
-        except UnicodeDecodeError:
-            pass
-    u = s.decode("utf-8", "replace") # last ditch
-    return u.encode(encoding, "replace") # can't round-trip
+            except UnicodeDecodeError:
+                u = s.decode("utf-8", "replace") # last ditch
+                return u.encode(encoding, "replace") # can't round-trip
+    except LookupError, k:
+        raise error.Abort(k, hint="please check your locale settings")
 
 def fromlocal(s):
     """
@@ -160,6 +168,10 @@
 def lower(s):
     "best-effort encoding-aware case-folding of local string s"
     try:
+        return s.encode('ascii').lower()
+    except UnicodeDecodeError:
+        pass
+    try:
         if isinstance(s, localstr):
             u = s._utf8.decode("utf-8")
         else:
@@ -190,3 +202,80 @@
         return s.upper() # we don't know how to fold this except in ASCII
     except LookupError, k:
         raise error.Abort(k, hint="please check your locale settings")
+
+def toutf8b(s):
+    '''convert a local, possibly-binary string into UTF-8b
+
+    This is intended as a generic method to preserve data when working
+    with schemes like JSON and XML that have no provision for
+    arbitrary byte strings. As Mercurial often doesn't know
+    what encoding data is in, we use so-called UTF-8b.
+
+    If a string is already valid UTF-8 (or ASCII), it passes unmodified.
+    Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
+    uDC00-uDCFF.
+
+    Principles of operation:
+
+    - ASCII and UTF-8 data sucessfully round-trips and is understood
+      by Unicode-oriented clients
+    - filenames and file contents in arbitrary other encodings can have
+      be round-tripped or recovered by clueful clients
+    - local strings that have a cached known UTF-8 encoding (aka
+      localstr) get sent as UTF-8 so Unicode-oriented clients get the
+      Unicode data they want
+    - because we must preserve UTF-8 bytestring in places such as
+      filenames, metadata can't be roundtripped without help
+
+    (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
+    arbitrary bytes into an internal Unicode format that can be
+    re-encoded back into the original. Here we are exposing the
+    internal surrogate encoding as a UTF-8 string.)
+    '''
+
+    if isinstance(s, localstr):
+        return s._utf8
+
+    try:
+        if s.decode('utf-8'):
+            return s
+    except UnicodeDecodeError:
+        # surrogate-encode any characters that don't round-trip
+        s2 = s.decode('utf-8', 'ignore').encode('utf-8')
+        r = ""
+        pos = 0
+        for c in s:
+            if s2[pos:pos + 1] == c:
+                r += c
+                pos += 1
+            else:
+                r += unichr(0xdc00 + ord(c)).encode('utf-8')
+        return r
+
+def fromutf8b(s):
+    '''Given a UTF-8b string, return a local, possibly-binary string.
+
+    return the original binary string. This
+    is a round-trip process for strings like filenames, but metadata
+    that's was passed through tolocal will remain in UTF-8.
+
+    >>> m = "\\xc3\\xa9\\x99abcd"
+    >>> n = toutf8b(m)
+    >>> n
+    '\\xc3\\xa9\\xed\\xb2\\x99abcd'
+    >>> fromutf8b(n) == m
+    True
+    '''
+
+    # fast path - look for uDxxx prefixes in s
+    if "\xed" not in s:
+        return s
+
+    u = s.decode("utf-8")
+    r = ""
+    for c in u:
+        if ord(c) & 0xff00 == 0xdc00:
+            r += chr(ord(c) & 0xff)
+        else:
+            r += c.encode("utf-8")
+    return r
--- a/mercurial/filemerge.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/filemerge.py	Tue Apr 17 17:56:36 2012 -0500
@@ -19,11 +19,21 @@
 def _toollist(ui, tool, part, default=[]):
     return ui.configlist("merge-tools", tool + "." + part, default)
 
-_internal = ['internal:' + s
-             for s in 'fail local other merge prompt dump'.split()]
+internals = {}
+
+def internaltool(name, trymerge, onfailure=None):
+    '''return a decorator for populating internal merge tool table'''
+    def decorator(func):
+        fullname = 'internal:' + name
+        func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
+        internals[fullname] = func
+        func.trymerge = trymerge
+        func.onfailure = onfailure
+        return func
+    return decorator
 
 def _findtool(ui, tool):
-    if tool in _internal:
+    if tool in internals:
         return tool
     for kn in ("regkey", "regkeyalt"):
         k = _toolstr(ui, tool, kn)
@@ -126,6 +136,131 @@
             if newdata != data:
                 util.writefile(file, newdata)
 
+@internaltool('prompt', False)
+def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
+    """Asks the user which of the local or the other version to keep as
+    the merged version."""
+    ui = repo.ui
+    fd = fcd.path()
+
+    if ui.promptchoice(_(" no tool found to merge %s\n"
+                         "keep (l)ocal or take (o)ther?") % fd,
+                       (_("&Local"), _("&Other")), 0):
+        return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
+    else:
+        return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
+
+@internaltool('local', False)
+def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
+    """Uses the local version of files as the merged version."""
+    return 0
+
+@internaltool('other', False)
+def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
+    """Uses the other version of files as the merged version."""
+    repo.wwrite(fcd.path(), fco.data(), fco.flags())
+    return 0
+
+@internaltool('fail', False)
+def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
+    """
+    Rather than attempting to merge files that were modified on both
+    branches, it marks them as unresolved. The resolve command must be
+    used to resolve these conflicts."""
+    return 1
+
+def _premerge(repo, toolconf, files):
+    tool, toolpath, binary, symlink = toolconf
+    a, b, c, back = files
+
+    ui = repo.ui
+
+    # do we attempt to simplemerge first?
+    try:
+        premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
+    except error.ConfigError:
+        premerge = _toolstr(ui, tool, "premerge").lower()
+        valid = 'keep'.split()
+        if premerge not in valid:
+            _valid = ', '.join(["'" + v + "'" for v in valid])
+            raise error.ConfigError(_("%s.premerge not valid "
+                                      "('%s' is neither boolean nor %s)") %
+                                    (tool, premerge, _valid))
+
+    if premerge:
+        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
+        if not r:
+            ui.debug(" premerge successful\n")
+            return 0
+        if premerge != 'keep':
+            util.copyfile(back, a) # restore from backup and try again
+    return 1 # continue merging
+
+@internaltool('merge', True,
+              _("merging %s incomplete! "
+                "(edit conflicts, then use 'hg resolve --mark')\n"))
+def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
+    """
+    Uses the internal non-interactive simple merge algorithm for merging
+    files. It will fail if there are any conflicts and leave markers in
+    the partially merged file."""
+    r = _premerge(repo, toolconf, files)
+    if r:
+        a, b, c, back = files
+
+        ui = repo.ui
+
+        r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
+        return True, r
+    return False, 0
+
+@internaltool('dump', True)
+def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files):
+    """
+    Creates three versions of the files to merge, containing the
+    contents of local, other and base. These files can then be used to
+    perform a merge manually. If the file to be merged is named
+    ``a.txt``, these files will accordingly be named ``a.txt.local``,
+    ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
+    same directory as ``a.txt``."""
+    r = _premerge(repo, toolconf, files)
+    if r:
+        a, b, c, back = files
+
+        fd = fcd.path()
+
+        util.copyfile(a, a + ".local")
+        repo.wwrite(fd + ".other", fco.data(), fco.flags())
+        repo.wwrite(fd + ".base", fca.data(), fca.flags())
+    return False, r
+
+def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
+    r = _premerge(repo, toolconf, files)
+    if r:
+        tool, toolpath, binary, symlink = toolconf
+        a, b, c, back = files
+        out = ""
+        env = dict(HG_FILE=fcd.path(),
+                   HG_MY_NODE=short(mynode),
+                   HG_OTHER_NODE=str(fco.changectx()),
+                   HG_BASE_NODE=str(fca.changectx()),
+                   HG_MY_ISLINK='l' in fcd.flags(),
+                   HG_OTHER_ISLINK='l' in fco.flags(),
+                   HG_BASE_ISLINK='l' in fca.flags())
+
+        ui = repo.ui
+
+        args = _toolstr(ui, tool, "args", '$local $base $other')
+        if "$output" in args:
+            out, a = a, back # read input from backup, write to original
+        replace = dict(local=a, base=b, other=c, output=out)
+        args = util.interpolate(r'\$', replace, args,
+                                lambda s: '"%s"' % util.localpath(s))
+        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
+                        out=ui.fout)
+        return True, r
+    return False, 0
+
 def filemerge(repo, mynode, orig, fcd, fco, fca):
     """perform a 3-way merge in the working directory
 
@@ -156,25 +291,23 @@
     ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
                (tool, fd, binary, symlink))
 
-    if not tool or tool == 'internal:prompt':
-        tool = "internal:local"
-        if ui.promptchoice(_(" no tool found to merge %s\n"
-                             "keep (l)ocal or take (o)ther?") % fd,
-                           (_("&Local"), _("&Other")), 0):
-            tool = "internal:other"
-    if tool == "internal:local":
-        return 0
-    if tool == "internal:other":
-        repo.wwrite(fd, fco.data(), fco.flags())
-        return 0
-    if tool == "internal:fail":
-        return 1
+    if tool in internals:
+        func = internals[tool]
+        trymerge = func.trymerge
+        onfailure = func.onfailure
+    else:
+        func = _xmerge
+        trymerge = True
+        onfailure = _("merging %s failed!\n")
 
-    # do the actual merge
+    toolconf = tool, toolpath, binary, symlink
+
+    if not trymerge:
+        return func(repo, mynode, orig, fcd, fco, fca, toolconf)
+
     a = repo.wjoin(fd)
     b = temp("base", fca)
     c = temp("other", fco)
-    out = ""
     back = a + ".orig"
     util.copyfile(a, back)
 
@@ -185,56 +318,18 @@
 
     ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
 
-    # do we attempt to simplemerge first?
-    try:
-        premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
-    except error.ConfigError:
-        premerge = _toolstr(ui, tool, "premerge").lower()
-        valid = 'keep'.split()
-        if premerge not in valid:
-            _valid = ', '.join(["'" + v + "'" for v in valid])
-            raise error.ConfigError(_("%s.premerge not valid "
-                                      "('%s' is neither boolean nor %s)") %
-                                    (tool, premerge, _valid))
-
-    if premerge:
-        r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
-        if not r:
-            ui.debug(" premerge successful\n")
+    needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
+                        (a, b, c, back))
+    if not needcheck:
+        if r:
+            if onfailure:
+                ui.warn(onfailure % fd)
+        else:
             os.unlink(back)
-            os.unlink(b)
-            os.unlink(c)
-            return 0
-        if premerge != 'keep':
-            util.copyfile(back, a) # restore from backup and try again
 
-    env = dict(HG_FILE=fd,
-               HG_MY_NODE=short(mynode),
-               HG_OTHER_NODE=str(fco.changectx()),
-               HG_BASE_NODE=str(fca.changectx()),
-               HG_MY_ISLINK='l' in fcd.flags(),
-               HG_OTHER_ISLINK='l' in fco.flags(),
-               HG_BASE_ISLINK='l' in fca.flags())
-
-    if tool == "internal:merge":
-        r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
-    elif tool == 'internal:dump':
-        a = repo.wjoin(fd)
-        util.copyfile(a, a + ".local")
-        repo.wwrite(fd + ".other", fco.data(), fco.flags())
-        repo.wwrite(fd + ".base", fca.data(), fca.flags())
         os.unlink(b)
         os.unlink(c)
-        return 1 # unresolved
-    else:
-        args = _toolstr(ui, tool, "args", '$local $base $other')
-        if "$output" in args:
-            out, a = a, back # read input from backup, write to original
-        replace = dict(local=a, base=b, other=c, output=out)
-        args = util.interpolate(r'\$', replace, args,
-                                lambda s: '"%s"' % util.localpath(s))
-        r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
-                        out=ui.fout)
+        return r
 
     if not r and (_toolbool(ui, tool, "checkconflicts") or
                   'conflicts' in _toollist(ui, tool, "check")):
@@ -251,24 +346,24 @@
 
     if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
                                   'changed' in _toollist(ui, tool, "check")):
-        if filecmp.cmp(repo.wjoin(fd), back):
+        if filecmp.cmp(a, back):
             if ui.promptchoice(_(" output file %s appears unchanged\n"
                                  "was merge successful (yn)?") % fd,
                                (_("&Yes"), _("&No")), 1):
                 r = 1
 
     if _toolbool(ui, tool, "fixeol"):
-        _matcheol(repo.wjoin(fd), back)
+        _matcheol(a, back)
 
     if r:
-        if tool == "internal:merge":
-            ui.warn(_("merging %s incomplete! "
-                      "(edit conflicts, then use 'hg resolve --mark')\n") % fd)
-        else:
-            ui.warn(_("merging %s failed!\n") % fd)
+        if onfailure:
+            ui.warn(onfailure % fd)
     else:
         os.unlink(back)
 
     os.unlink(b)
     os.unlink(c)
     return r
+
+# tell hggettext to extract docstrings from these functions:
+i18nfunctions = internals.values()
--- a/mercurial/fileset.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/fileset.py	Tue Apr 17 17:56:36 2012 -0500
@@ -358,6 +358,28 @@
             s.append(f)
     return s
 
+def subrepo(mctx, x):
+    """``subrepo([pattern])``
+    Subrepositories whose paths match the given pattern.
+    """
+    # i18n: "subrepo" is a keyword
+    getargs(x, 0, 1, _("subrepo takes at most one argument"))
+    ctx = mctx.ctx
+    sstate = ctx.substate
+    if x:
+        pat = getstring(x, _("subrepo requires a pattern or no arguments"))
+
+        import match as matchmod # avoid circular import issues
+        fast = not matchmod.patkind(pat)
+        if fast:
+            def m(s):
+                return (s == pat)
+        else:
+            m = matchmod.match(ctx._repo.root, '', [pat], ctx=ctx)
+        return [sub for sub in sstate if m(sub)]
+    else:
+        return [sub for sub in sstate]
+
 symbols = {
     'added': added,
     'binary': binary,
@@ -376,6 +398,7 @@
     'symlink': symlink,
     'unknown': unknown,
     'unresolved': unresolved,
+    'subrepo': subrepo,
 }
 
 methods = {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/formatter.py	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,71 @@
+# formatter.py - generic output formatting for mercurial
+#
+# Copyright 2012 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+class baseformatter(object):
+    def __init__(self, ui, topic, opts):
+        self._ui = ui
+        self._topic = topic
+        self._style = opts.get("style")
+        self._template = opts.get("template")
+        self._item = None
+    def __bool__(self):
+        '''return False if we're not doing real templating so we can
+        skip extra work'''
+        return True
+    def _showitem(self):
+        '''show a formatted item once all data is collected'''
+        pass
+    def startitem(self):
+        '''begin an item in the format list'''
+        if self._item is not None:
+            self._showitem()
+        self._item = {}
+    def data(self, **data):
+        '''insert data into item that's not shown in default output'''
+    def write(self, fields, deftext, *fielddata, **opts):
+        '''do default text output while assigning data to item'''
+        for k, v in zip(fields.split(), fielddata):
+            self._item[k] = v
+    def plain(self, text, **opts):
+        '''show raw text for non-templated mode'''
+        pass
+    def end(self):
+        '''end output for the formatter'''
+        if self._item is not None:
+            self._showitem()
+
+class plainformatter(baseformatter):
+    '''the default text output scheme'''
+    def __init__(self, ui, topic, opts):
+        baseformatter.__init__(self, ui, topic, opts)
+    def __bool__(self):
+        return False
+    def startitem(self):
+        pass
+    def data(self, **data):
+        pass
+    def write(self, fields, deftext, *fielddata, **opts):
+        self._ui.write(deftext % fielddata, **opts)
+    def plain(self, text, **opts):
+        self._ui.write(text, **opts)
+    def end(self):
+        pass
+
+class debugformatter(baseformatter):
+    def __init__(self, ui, topic, opts):
+        baseformatter.__init__(self, ui, topic, opts)
+        self._ui.write("%s = {\n" % self._topic)
+    def _showitem(self):
+        self._ui.write("    " + repr(self._item) + ",\n")
+    def end(self):
+        baseformatter.end(self)
+        self._ui.write("}\n")
+
+def formatter(ui, topic, opts):
+    if ui.configbool('ui', 'formatdebug'):
+        return debugformatter(ui, topic, opts)
+    return plainformatter(ui, topic, opts)
--- a/mercurial/graphmod.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/graphmod.py	Tue Apr 17 17:56:36 2012 -0500
@@ -18,6 +18,7 @@
 """
 
 from mercurial.node import nullrev
+import util
 
 CHANGESET = 'C'
 
@@ -67,7 +68,7 @@
         parents = set([p.rev() for p in ctx.parents() if p.node() in include])
         yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
 
-def colored(dag):
+def colored(dag, repo):
     """annotates a DAG with colored edge information
 
     For each DAG node this function emits tuples::
@@ -83,6 +84,23 @@
     seen = []
     colors = {}
     newcolor = 1
+    config = {}
+
+    for key, val in repo.ui.configitems('graph'):
+        if '.' in key:
+            branch, setting = key.rsplit('.', 1)
+            # Validation
+            if setting == "width" and val.isdigit():
+                config.setdefault(branch, {})[setting] = int(val)
+            elif setting == "color" and val.isalnum():
+                config.setdefault(branch, {})[setting] = val
+
+    if config:
+        getconf = util.lrucachefunc(
+            lambda rev: config.get(repo[rev].branch(), {}))
+    else:
+        getconf = lambda rev: {}
+
     for (cur, type, data, parents) in dag:
 
         # Compute seen and next
@@ -111,10 +129,18 @@
         edges = []
         for ecol, eid in enumerate(seen):
             if eid in next:
-                edges.append((ecol, next.index(eid), colors[eid]))
+                bconf = getconf(eid)
+                edges.append((
+                    ecol, next.index(eid), colors[eid],
+                    bconf.get('width', -1),
+                    bconf.get('color', '')))
             elif eid == cur:
                 for p in parents:
-                    edges.append((ecol, next.index(p), color))
+                    bconf = getconf(p)
+                    edges.append((
+                        ecol, next.index(p), color,
+                        bconf.get('width', -1),
+                        bconf.get('color', '')))
 
         # Yield and move on
         yield (cur, type, data, (col, color), edges)
--- a/mercurial/help.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/help.py	Tue Apr 17 17:56:36 2012 -0500
@@ -7,7 +7,7 @@
 
 from i18n import gettext, _
 import sys, os
-import extensions, revset, fileset, templatekw, templatefilters
+import extensions, revset, fileset, templatekw, templatefilters, filemerge
 import util
 
 def listexts(header, exts, indent=1):
@@ -94,8 +94,13 @@
             continue
         text = gettext(text)
         lines = text.splitlines()
-        lines[1:] = [('  ' + l.strip()) for l in lines[1:]]
-        entries.append('\n'.join(lines))
+        doclines = [(lines[0])]
+        for l in lines[1:]:
+            # Stop once we find some Python doctest
+            if l.strip().startswith('>>>'):
+                break
+            doclines.append('  ' + l.strip())
+        entries.append('\n'.join(doclines))
     entries = '\n\n'.join(entries)
     return doc.replace(marker, entries)
 
@@ -105,6 +110,7 @@
     addtopichook(topic, add)
 
 addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
+addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internals)
 addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols)
 addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords)
 addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
--- a/mercurial/help/config.txt	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/help/config.txt	Tue Apr 17 17:56:36 2012 -0500
@@ -28,16 +28,17 @@
 paths are given below, settings from earlier paths override later
 ones.
 
-| (Unix, Windows) ``<repo>/.hg/hgrc``
+| (All) ``<repo>/.hg/hgrc``
 
     Per-repository configuration options that only apply in a
     particular repository. This file is not version-controlled, and
     will not get transferred during a "clone" operation. Options in
     this file override options in all other configuration files. On
-    Unix, most of this file will be ignored if it doesn't belong to a
-    trusted user or to a trusted group. See the documentation for the
-    ``[trusted]`` section below for more details.
+    Plan 9 and Unix, most of this file will be ignored if it doesn't
+    belong to a trusted user or to a trusted group. See the documentation
+    for the ``[trusted]`` section below for more details.
 
+| (Plan 9) ``$home/lib/hgrc``
 | (Unix) ``$HOME/.hgrc``
 | (Windows) ``%USERPROFILE%\.hgrc``
 | (Windows) ``%USERPROFILE%\Mercurial.ini``
@@ -50,6 +51,8 @@
     directory. Options in these files override per-system and per-installation
     options.
 
+| (Plan 9) ``/lib/mercurial/hgrc``
+| (Plan 9) ``/lib/mercurial/hgrc.d/*.rc``
 | (Unix) ``/etc/mercurial/hgrc``
 | (Unix) ``/etc/mercurial/hgrc.d/*.rc``
 
@@ -58,6 +61,8 @@
     executed by any user in any directory. Options in these files
     override per-installation options.
 
+| (Plan 9) ``<install-root>/lib/mercurial/hgrc``
+| (Plan 9) ``<install-root>/lib/mercurial/hgrc.d/*.rc``
 | (Unix) ``<install-root>/etc/mercurial/hgrc``
 | (Unix) ``<install-root>/etc/mercurial/hgrc.d/*.rc``
 
@@ -489,24 +494,6 @@
   myfeature = ~/.hgext/myfeature.py
 
 
-``hostfingerprints``
-""""""""""""""""""""
-
-Fingerprints of the certificates of known HTTPS servers.
-A HTTPS connection to a server with a fingerprint configured here will
-only succeed if the servers certificate matches the fingerprint.
-This is very similar to how ssh known hosts works.
-The fingerprint is the SHA-1 hash value of the DER encoded certificate.
-The CA chain and web.cacerts is not used for servers with a fingerprint.
-
-For example::
-
-    [hostfingerprints]
-    hg.intevation.org = 38:76:52:7c:87:26:9a:8f:4a:f8:d3:de:08:45:3b:ea:d6:4b:ee:cc
-
-This feature is only supported when using Python 2.6 or later.
-
-
 ``format``
 """"""""""
 
@@ -534,119 +521,33 @@
     option ensures that the on-disk format of newly created
     repositories will be compatible with Mercurial before version 1.7.
 
-``merge-patterns``
-""""""""""""""""""
-
-This section specifies merge tools to associate with particular file
-patterns. Tools matched here will take precedence over the default
-merge tool. Patterns are globs by default, rooted at the repository
-root.
+``graph``
+"""""""""
 
-Example::
+Web graph view configuration. This section let you change graph
+elements display properties by branches, for instance to make the
+``default`` branch stand out.
 
-  [merge-patterns]
-  **.c = kdiff3
-  **.jpg = myimgmerge
-
-``merge-tools``
-"""""""""""""""
+Each line has the following format::
 
-This section configures external merge tools to use for file-level
-merges.
+    <branch>.<argument> = <value>
 
-Example ``~/.hgrc``::
+where ``<branch>`` is the name of the branch being
+customized. Example::
 
-  [merge-tools]
-  # Override stock tool location
-  kdiff3.executable = ~/bin/kdiff3
-  # Specify command line
-  kdiff3.args = $base $local $other -o $output
-  # Give higher priority
-  kdiff3.priority = 1
-
-  # Define new tool
-  myHtmlTool.args = -m $local $other $base $output
-  myHtmlTool.regkey = Software\FooSoftware\HtmlMerge
-  myHtmlTool.priority = 1
+    [graph]
+    # 2px width
+    default.width = 2
+    # red color
+    default.color = FF0000
 
 Supported arguments:
 
-``priority``
-  The priority in which to evaluate this tool.
-  Default: 0.
-
-``executable``
-  Either just the name of the executable or its pathname.  On Windows,
-  the path can use environment variables with ${ProgramFiles} syntax.
-  Default: the tool name.
-
-``args``
-  The arguments to pass to the tool executable. You can refer to the
-  files being merged as well as the output file through these
-  variables: ``$base``, ``$local``, ``$other``, ``$output``.
-  Default: ``$local $base $other``
-
-``premerge``
-  Attempt to run internal non-interactive 3-way merge tool before
-  launching external tool.  Options are ``true``, ``false``, or ``keep``
-  to leave markers in the file if the premerge fails.
-  Default: True
-
-``binary``
-  This tool can merge binary files. Defaults to False, unless tool
-  was selected by file pattern match.
-
-``symlink``
-  This tool can merge symlinks. Defaults to False, even if tool was
-  selected by file pattern match.
-
-``check``
-  A list of merge success-checking options:
+``width``
+    Set branch edges width in pixels.
 
-  ``changed``
-    Ask whether merge was successful when the merged file shows no changes.
-  ``conflicts``
-    Check whether there are conflicts even though the tool reported success.
-  ``prompt``
-    Always prompt for merge success, regardless of success reported by tool.
-
-``checkchanged``
-  True is equivalent to ``check = changed``.
-  Default: False
-
-``checkconflicts``
-  True is equivalent to ``check = conflicts``.
-  Default: False
-
-``fixeol``
-  Attempt to fix up EOL changes caused by the merge tool.
-  Default: False
-
-``gui``
-  This tool requires a graphical interface to run. Default: False
-
-``regkey``
-  Windows registry key which describes install location of this
-  tool. Mercurial will search for this key first under
-  ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``.
-  Default: None
-
-``regkeyalt``
-  An alternate Windows registry key to try if the first key is not
-  found.  The alternate key uses the same ``regname`` and ``regappend``
-  semantics of the primary key.  The most common use for this key
-  is to search for 32bit applications on 64bit operating systems.
-  Default: None
-
-``regname``
-  Name of value to read from specified registry key. Defaults to the
-  unnamed (default) value.
-
-``regappend``
-  String to append to the value read from the registry, typically
-  the executable name of the tool.
-  Default: None
-
+``color``
+    Set branch edges color in hexadecimal RGB notation.
 
 ``hooks``
 """""""""
@@ -827,6 +728,24 @@
 is treated as a failure.
 
 
+``hostfingerprints``
+""""""""""""""""""""
+
+Fingerprints of the certificates of known HTTPS servers.
+A HTTPS connection to a server with a fingerprint configured here will
+only succeed if the servers certificate matches the fingerprint.
+This is very similar to how ssh known hosts works.
+The fingerprint is the SHA-1 hash value of the DER encoded certificate.
+The CA chain and web.cacerts is not used for servers with a fingerprint.
+
+For example::
+
+    [hostfingerprints]
+    hg.intevation.org = 38:76:52:7c:87:26:9a:8f:4a:f8:d3:de:08:45:3b:ea:d6:4b:ee:cc
+
+This feature is only supported when using Python 2.6 or later.
+
+
 ``http_proxy``
 """"""""""""""
 
@@ -851,33 +770,118 @@
     Optional. Always use the proxy, even for localhost and any entries
     in ``http_proxy.no``. True or False. Default: False.
 
-``smtp``
-""""""""
+``merge-patterns``
+""""""""""""""""""
+
+This section specifies merge tools to associate with particular file
+patterns. Tools matched here will take precedence over the default
+merge tool. Patterns are globs by default, rooted at the repository
+root.
+
+Example::
 
-Configuration for extensions that need to send email messages.
+  [merge-patterns]
+  **.c = kdiff3
+  **.jpg = myimgmerge
+
+``merge-tools``
+"""""""""""""""
+
+This section configures external merge tools to use for file-level
+merges.
+
+Example ``~/.hgrc``::
 
-``host``
-    Host name of mail server, e.g. "mail.example.com".
+  [merge-tools]
+  # Override stock tool location
+  kdiff3.executable = ~/bin/kdiff3
+  # Specify command line
+  kdiff3.args = $base $local $other -o $output
+  # Give higher priority
+  kdiff3.priority = 1
+
+  # Define new tool
+  myHtmlTool.args = -m $local $other $base $output
+  myHtmlTool.regkey = Software\FooSoftware\HtmlMerge
+  myHtmlTool.priority = 1
+
+Supported arguments:
 
-``port``
-    Optional. Port to connect to on mail server. Default: 25.
+``priority``
+  The priority in which to evaluate this tool.
+  Default: 0.
+
+``executable``
+  Either just the name of the executable or its pathname.  On Windows,
+  the path can use environment variables with ${ProgramFiles} syntax.
+  Default: the tool name.
+
+``args``
+  The arguments to pass to the tool executable. You can refer to the
+  files being merged as well as the output file through these
+  variables: ``$base``, ``$local``, ``$other``, ``$output``.
+  Default: ``$local $base $other``
 
-``tls``
-    Optional. Method to enable TLS when connecting to mail server: starttls,
-    smtps or none. Default: none.
+``premerge``
+  Attempt to run internal non-interactive 3-way merge tool before
+  launching external tool.  Options are ``true``, ``false``, or ``keep``
+  to leave markers in the file if the premerge fails.
+  Default: True
+
+``binary``
+  This tool can merge binary files. Defaults to False, unless tool
+  was selected by file pattern match.
+
+``symlink``
+  This tool can merge symlinks. Defaults to False, even if tool was
+  selected by file pattern match.
 
-``username``
-    Optional. User name for authenticating with the SMTP server.
-    Default: none.
+``check``
+  A list of merge success-checking options:
+
+  ``changed``
+    Ask whether merge was successful when the merged file shows no changes.
+  ``conflicts``
+    Check whether there are conflicts even though the tool reported success.
+  ``prompt``
+    Always prompt for merge success, regardless of success reported by tool.
+
+``checkchanged``
+  True is equivalent to ``check = changed``.
+  Default: False
 
-``password``
-    Optional. Password for authenticating with the SMTP server. If not
-    specified, interactive sessions will prompt the user for a
-    password; non-interactive sessions will fail. Default: none.
+``checkconflicts``
+  True is equivalent to ``check = conflicts``.
+  Default: False
+
+``fixeol``
+  Attempt to fix up EOL changes caused by the merge tool.
+  Default: False
+
+``gui``
+  This tool requires a graphical interface to run. Default: False
 
-``local_hostname``
-    Optional. It's the hostname that the sender can use to identify
-    itself to the MTA.
+``regkey``
+  Windows registry key which describes install location of this
+  tool. Mercurial will search for this key first under
+  ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``.
+  Default: None
+
+``regkeyalt``
+  An alternate Windows registry key to try if the first key is not
+  found.  The alternate key uses the same ``regname`` and ``regappend``
+  semantics of the primary key.  The most common use for this key
+  is to search for 32bit applications on 64bit operating systems.
+  Default: None
+
+``regname``
+  Name of value to read from specified registry key. Defaults to the
+  unnamed (default) value.
+
+``regappend``
+  String to append to the value read from the registry, typically
+  the executable name of the tool.
+  Default: None
 
 
 ``patch``
@@ -934,14 +938,31 @@
 ``profiling``
 """""""""""""
 
-Specifies profiling format and file output. In this section
-description, 'profiling data' stands for the raw data collected
-during profiling, while 'profiling report' stands for a statistical
-text report generated from the profiling data. The profiling is done
-using lsprof.
+Specifies profiling type, format, and file output. Two profilers are
+supported: an instrumenting profiler (named ``ls``), and a sampling
+profiler (named ``stat``).
+
+In this section description, 'profiling data' stands for the raw data
+collected during profiling, while 'profiling report' stands for a
+statistical text report generated from the profiling data. The
+profiling is done using lsprof.
+
+``type``
+    The type of profiler to use.
+    Default: ls.
+
+    ``ls``
+      Use Python's built-in instrumenting profiler. This profiler
+      works on all platforms, but each line number it reports is the
+      first line of a function. This restriction makes it difficult to
+      identify the expensive parts of a non-trivial function.
+    ``stat``
+      Use a third-party statistical profiler, statprof. This profiler
+      currently runs only on Unix systems, and is most useful for
+      profiling commands that run for longer than about 0.1 seconds.
 
 ``format``
-    Profiling format.
+    Profiling format.  Specific to the ``ls`` instrumenting profiler.
     Default: text.
 
     ``text``
@@ -953,6 +974,10 @@
       file, the generated file can directly be loaded into
       kcachegrind.
 
+``frequency``
+    Sampling frequency.  Specific to the ``stat`` sampling profiler.
+    Default: 1000.
+
 ``output``
     File path where profiling data or report should be saved. If the
     file exists, it is replaced. Default: None, data is printed on
@@ -980,11 +1005,44 @@
     the write lock while determining what data to transfer.
     Default is True.
 
+``preferuncompressed``
+    When set, clients will try to use the uncompressed streaming
+    protocol. Default is False.
+
 ``validate``
     Whether to validate the completeness of pushed changesets by
     checking that all new file revisions specified in manifests are
     present. Default is False.
 
+``smtp``
+""""""""
+
+Configuration for extensions that need to send email messages.
+
+``host``
+    Host name of mail server, e.g. "mail.example.com".
+
+``port``
+    Optional. Port to connect to on mail server. Default: 25.
+
+``tls``
+    Optional. Method to enable TLS when connecting to mail server: starttls,
+    smtps or none. Default: none.
+
+``username``
+    Optional. User name for authenticating with the SMTP server.
+    Default: none.
+
+``password``
+    Optional. Password for authenticating with the SMTP server. If not
+    specified, interactive sessions will prompt the user for a
+    password; non-interactive sessions will fail. Default: none.
+
+``local_hostname``
+    Optional. It's the hostname that the sender can use to identify
+    itself to the MTA.
+
+
 ``subpaths``
 """"""""""""
 
@@ -1098,6 +1156,10 @@
 ``remotecmd``
     remote command to use for clone/push/pull operations. Default is ``hg``.
 
+``reportoldssl``
+    Warn if an SSL certificate is unable to be due to using Python
+    2.5 or earlier. True or False. Default is True.
+
 ``report_untrusted``
     Warn if a ``.hg/hgrc`` file is ignored due to not being owned by a
     trusted user or group. True or False. Default is True.
@@ -1231,7 +1293,20 @@
     authority certificates. Environment variables and ``~user``
     constructs are expanded in the filename. If specified on the
     client, then it will verify the identity of remote HTTPS servers
-    with these certificates. The form must be as follows::
+    with these certificates.
+
+    This feature is only supported when using Python 2.6 or later. If you wish
+    to use it with earlier versions of Python, install the backported
+    version of the ssl library that is available from
+    ``http://pypi.python.org``.
+
+    To disable SSL verification temporarily, specify ``--insecure`` from
+    command line.
+
+    You can use OpenSSL's CA certificate file if your platform has
+    one. On most Linux systems this will be
+    ``/etc/ssl/certs/ca-certificates.crt``. Otherwise you will have to
+    generate this file manually. The form must be as follows::
 
         -----BEGIN CERTIFICATE-----
         ... (certificate in base64 PEM encoding) ...
@@ -1240,18 +1315,6 @@
         ... (certificate in base64 PEM encoding) ...
         -----END CERTIFICATE-----
 
-    This feature is only supported when using Python 2.6 or later. If you wish
-    to use it with earlier versions of Python, install the backported
-    version of the ssl library that is available from
-    ``http://pypi.python.org``.
-
-    You can use OpenSSL's CA certificate file if your platform has one.
-    On most Linux systems this will be ``/etc/ssl/certs/ca-certificates.crt``.
-    Otherwise you will have to generate this file manually.
-
-    To disable SSL verification temporarily, specify ``--insecure`` from
-    command line.
-
 ``cache``
     Whether to support caching in hgweb. Defaults to True.
 
--- a/mercurial/help/merge-tools.txt	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/help/merge-tools.txt	Tue Apr 17 17:56:36 2012 -0500
@@ -34,33 +34,7 @@
 There are some internal merge tools which can be used. The internal
 merge tools are:
 
-``internal:merge``
-   Uses the internal non-interactive simple merge algorithm for merging
-   files. It will fail if there are any conflicts and leave markers in
-   the partially merged file.
-
-``internal:fail``
-   Rather than attempting to merge files that were modified on both
-   branches, it marks them as unresolved. The resolve command must be
-   used to resolve these conflicts.
-
-``internal:local``
-   Uses the local version of files as the merged version.
-
-``internal:other``
-   Uses the other version of files as the merged version.
-
-``internal:prompt``
-   Asks the user which of the local or the other version to keep as
-   the merged version.
-
-``internal:dump``
-   Creates three versions of the files to merge, containing the
-   contents of local, other and base. These files can then be used to
-   perform a merge manually. If the file to be merged is named
-   ``a.txt``, these files will accordingly be named ``a.txt.local``,
-   ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
-   same directory as ``a.txt``.
+.. internaltoolsmarker
 
 Internal tools are always available and do not require a GUI but will by default
 not handle symlinks or binary files.
--- a/mercurial/hgweb/hgwebdir_mod.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py	Tue Apr 17 17:56:36 2012 -0500
@@ -245,12 +245,67 @@
         def rawentries(subdir="", **map):
 
             descend = self.ui.configbool('web', 'descend', True)
+            collapse = self.ui.configbool('web', 'collapse', False)
+            seenrepos = set()
+            seendirs = set()
             for name, path in self.repos:
 
                 if not name.startswith(subdir):
                     continue
                 name = name[len(subdir):]
-                if not descend and '/' in name:
+                directory = False
+
+                if '/' in name:
+                    if not descend:
+                        continue
+
+                    nameparts = name.split('/')
+                    rootname = nameparts[0]
+
+                    if not collapse:
+                        pass
+                    elif rootname in seendirs:
+                        continue
+                    elif rootname in seenrepos:
+                        pass
+                    else:
+                        directory = True
+                        name = rootname
+
+                        # redefine the path to refer to the directory
+                        discarded = '/'.join(nameparts[1:])
+
+                        # remove name parts plus accompanying slash
+                        path = path[:-len(discarded) - 1]
+
+                parts = [name]
+                if 'PATH_INFO' in req.env:
+                    parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
+                if req.env['SCRIPT_NAME']:
+                    parts.insert(0, req.env['SCRIPT_NAME'])
+                url = re.sub(r'/+', '/', '/'.join(parts) + '/')
+
+                # show either a directory entry or a repository
+                if directory:
+                    # get the directory's time information
+                    try:
+                        d = (get_mtime(path), util.makedate()[1])
+                    except OSError:
+                        continue
+
+                    row = dict(contact="",
+                               contact_sort="",
+                               name=name,
+                               name_sort=name,
+                               url=url,
+                               description="",
+                               description_sort="",
+                               lastchange=d,
+                               lastchange_sort=d[1]-d[0],
+                               archives=[])
+
+                    seendirs.add(name)
+                    yield row
                     continue
 
                 u = self.ui.copy()
@@ -268,13 +323,6 @@
                 if not self.read_allowed(u, req):
                     continue
 
-                parts = [name]
-                if 'PATH_INFO' in req.env:
-                    parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
-                if req.env['SCRIPT_NAME']:
-                    parts.insert(0, req.env['SCRIPT_NAME'])
-                url = re.sub(r'/+', '/', '/'.join(parts) + '/')
-
                 # update time with local timezone
                 try:
                     r = hg.repository(self.ui, path)
@@ -302,6 +350,8 @@
                            lastchange=d,
                            lastchange_sort=d[1]-d[0],
                            archives=archivelist(u, "tip", url))
+
+                seenrepos.add(name)
                 yield row
 
         sortdefault = None, False
--- a/mercurial/hgweb/webcommands.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/hgweb/webcommands.py	Tue Apr 17 17:56:36 2012 -0500
@@ -262,10 +262,10 @@
 
     files = []
     parity = paritygen(web.stripecount)
-    for f in ctx.files():
+    for blockno, f in enumerate(ctx.files()):
         template = f in ctx and 'filenodelink' or 'filenolink'
         files.append(tmpl(template,
-                          node=ctx.hex(), file=f,
+                          node=ctx.hex(), file=f, blockno=blockno + 1,
                           parity=parity.next()))
 
     style = web.config('web', 'style', 'paper')
@@ -303,6 +303,14 @@
 
 rev = changeset
 
+def decodepath(path):
+    """Hook for mapping a path in the repository to a path in the
+    working copy.
+
+    Extensions (e.g., largefiles) can override this to remap files in
+    the virtual file system presented by the manifest command below."""
+    return path
+
 def manifest(web, req, tmpl):
     ctx = webutil.changectx(web.repo, req)
     path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
@@ -318,13 +326,17 @@
     l = len(path)
     abspath = "/" + path
 
-    for f, n in mf.iteritems():
+    for full, n in mf.iteritems():
+        # the virtual path (working copy path) used for the full
+        # (repository) path
+        f = decodepath(full)
+
         if f[:l] != path:
             continue
         remain = f[l:]
         elements = remain.split('/')
         if len(elements) == 1:
-            files[remain] = f
+            files[remain] = full
         else:
             h = dirs # need to retain ref to dirs (root)
             for elem in elements[0:-1]:
@@ -770,7 +782,7 @@
         startrev = uprev
 
     dag = graphmod.dagwalker(web.repo, range(startrev, downrev - 1, -1))
-    tree = list(graphmod.colored(dag))
+    tree = list(graphmod.colored(dag, web.repo))
     canvasheight = (len(tree) + 1) * bg_height - 27
     data = []
     for (id, type, ctx, vtx, edges) in tree:
--- a/mercurial/hgweb/webutil.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/hgweb/webutil.py	Tue Apr 17 17:56:36 2012 -0500
@@ -173,8 +173,7 @@
             start += 1
 
     blockcount = countgen()
-    def prettyprintlines(diff):
-        blockno = blockcount.next()
+    def prettyprintlines(diff, blockno):
         for lineno, l in enumerate(diff.splitlines(True)):
             lineno = "%d.%d" % (blockno, lineno + 1)
             if l.startswith('+'):
@@ -203,14 +202,16 @@
     block = []
     for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
         if chunk.startswith('diff') and block:
-            yield tmpl('diffblock', parity=parity.next(),
-                       lines=prettyprintlines(''.join(block)))
+            blockno = blockcount.next()
+            yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+                       lines=prettyprintlines(''.join(block), blockno))
             block = []
         if chunk.startswith('diff') and style != 'raw':
             chunk = ''.join(chunk.splitlines(True)[1:])
         block.append(chunk)
-    yield tmpl('diffblock', parity=parity.next(),
-               lines=prettyprintlines(''.join(block)))
+    blockno = blockcount.next()
+    yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
+               lines=prettyprintlines(''.join(block), blockno))
 
 def diffstatgen(ctx):
     '''Generator function that provides the diffstat data.'''
--- a/mercurial/localrepo.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/localrepo.py	Tue Apr 17 17:56:36 2012 -0500
@@ -398,7 +398,15 @@
 
     def tags(self):
         '''return a mapping of tag to node'''
-        return self._tagscache.tags
+        t = {}
+        for k, v in self._tagscache.tags.iteritems():
+            try:
+                # ignore tags to unknown nodes
+                self.changelog.rev(v)
+                t[k] = v
+            except error.LookupError:
+                pass
+        return t
 
     def _findtags(self):
         '''Do the hard work of finding tags.  Return a pair of dicts
@@ -427,12 +435,7 @@
         tags = {}
         for (name, (node, hist)) in alltags.iteritems():
             if node != nullid:
-                try:
-                    # ignore tags to unknown nodes
-                    self.changelog.lookup(node)
-                    tags[encoding.tolocal(name)] = node
-                except error.LookupError:
-                    pass
+                tags[encoding.tolocal(name)] = node
         tags['tip'] = self.changelog.tip()
         tagtypes = dict([(encoding.tolocal(name), value)
                          for (name, value) in tagtypes.iteritems()])
@@ -464,7 +467,7 @@
         '''return the tags associated with a node'''
         if not self._tagscache.nodetagscache:
             nodetagscache = {}
-            for t, n in self.tags().iteritems():
+            for t, n in self._tagscache.tags.iteritems():
                 nodetagscache.setdefault(n, []).append(t)
             for tags in nodetagscache.itervalues():
                 tags.sort()
@@ -590,37 +593,7 @@
             partial[branch] = bheads
 
     def lookup(self, key):
-        if isinstance(key, int):
-            return self.changelog.node(key)
-        elif key == '.':
-            return self.dirstate.p1()
-        elif key == 'null':
-            return nullid
-        elif key == 'tip':
-            return self.changelog.tip()
-        n = self.changelog._match(key)
-        if n:
-            return n
-        if key in self._bookmarks:
-            return self._bookmarks[key]
-        if key in self.tags():
-            return self.tags()[key]
-        if key in self.branchtags():
-            return self.branchtags()[key]
-        n = self.changelog._partialmatch(key)
-        if n:
-            return n
-
-        # can't find key, check if it might have come from damaged dirstate
-        if key in self.dirstate.parents():
-            raise error.Abort(_("working directory has unknown parent '%s'!")
-                              % short(key))
-        try:
-            if len(key) == 20:
-                key = hex(key)
-        except TypeError:
-            pass
-        raise error.RepoLookupError(_("unknown revision '%s'") % key)
+        return self[key].node()
 
     def lookupbranch(self, key, remote=None):
         repo = remote or self
@@ -750,8 +723,8 @@
             raise error.RepoError(
                 _("abandoned transaction found - run hg recover"))
 
-        journalfiles = self._writejournal(desc)
-        renames = [(x, undoname(x)) for x in journalfiles]
+        self._writejournal(desc)
+        renames = [(x, undoname(x)) for x in self._journalfiles()]
 
         tr = transaction.transaction(self.ui.warn, self.sopener,
                                      self.sjoin("journal"),
@@ -760,34 +733,26 @@
         self._transref = weakref.ref(tr)
         return tr
 
+    def _journalfiles(self):
+        return (self.sjoin('journal'), self.join('journal.dirstate'),
+                self.join('journal.branch'), self.join('journal.desc'),
+                self.join('journal.bookmarks'),
+                self.sjoin('journal.phaseroots'))
+
+    def undofiles(self):
+        return [undoname(x) for x in self._journalfiles()]
+
     def _writejournal(self, desc):
-        # save dirstate for rollback
-        try:
-            ds = self.opener.read("dirstate")
-        except IOError:
-            ds = ""
-        self.opener.write("journal.dirstate", ds)
+        self.opener.write("journal.dirstate",
+                          self.opener.tryread("dirstate"))
         self.opener.write("journal.branch",
                           encoding.fromlocal(self.dirstate.branch()))
         self.opener.write("journal.desc",
                           "%d\n%s\n" % (len(self), desc))
-
-        try:
-            bk = self.opener.read("bookmarks")
-        except IOError:
-            bk = ""
-        self.opener.write("journal.bookmarks", bk)
-
-        phasesname = self.sjoin('phaseroots')
-        if os.path.exists(phasesname):
-            util.copyfile(phasesname, self.sjoin('journal.phaseroots'))
-        else:
-            self.sopener.write('journal.phaseroots', '')
-
-        return (self.sjoin('journal'), self.join('journal.dirstate'),
-                self.join('journal.branch'), self.join('journal.desc'),
-                self.join('journal.bookmarks'),
-                self.sjoin('journal.phaseroots'))
+        self.opener.write("journal.bookmarks",
+                          self.opener.tryread("bookmarks"))
+        self.sopener.write("journal.phaseroots",
+                           self.sopener.tryread("phaseroots"))
 
     def recover(self):
         lock = self.lock()
@@ -1106,37 +1071,58 @@
 
             # check subrepos
             subs = []
-            removedsubs = set()
+            commitsubs = set()
+            newstate = wctx.substate.copy()
+            # only manage subrepos and .hgsubstate if .hgsub is present
             if '.hgsub' in wctx:
-                # only manage subrepos and .hgsubstate if .hgsub is present
+                # we'll decide whether to track this ourselves, thanks
+                if '.hgsubstate' in changes[0]:
+                    changes[0].remove('.hgsubstate')
+                if '.hgsubstate' in changes[2]:
+                    changes[2].remove('.hgsubstate')
+
+                # compare current state to last committed state
+                # build new substate based on last committed state
+                oldstate = wctx.p1().substate
+                for s in sorted(newstate.keys()):
+                    if not match(s):
+                        # ignore working copy, use old state if present
+                        if s in oldstate:
+                            newstate[s] = oldstate[s]
+                            continue
+                        if not force:
+                            raise util.Abort(
+                                _("commit with new subrepo %s excluded") % s)
+                    if wctx.sub(s).dirty(True):
+                        if not self.ui.configbool('ui', 'commitsubrepos'):
+                            raise util.Abort(
+                                _("uncommitted changes in subrepo %s") % s,
+                                hint=_("use --subrepos for recursive commit"))
+                        subs.append(s)
+                        commitsubs.add(s)
+                    else:
+                        bs = wctx.sub(s).basestate()
+                        newstate[s] = (newstate[s][0], bs, newstate[s][2])
+                        if oldstate.get(s, (None, None, None))[1] != bs:
+                            subs.append(s)
+
+                # check for removed subrepos
                 for p in wctx.parents():
-                    removedsubs.update(s for s in p.substate if match(s))
-                for s in wctx.substate:
-                    removedsubs.discard(s)
-                    if match(s) and wctx.sub(s).dirty():
-                        subs.append(s)
-                if (subs or removedsubs):
+                    r = [s for s in p.substate if s not in newstate]
+                    subs += [s for s in r if match(s)]
+                if subs:
                     if (not match('.hgsub') and
                         '.hgsub' in (wctx.modified() + wctx.added())):
                         raise util.Abort(
                             _("can't commit subrepos without .hgsub"))
-                    if '.hgsubstate' not in changes[0]:
-                        changes[0].insert(0, '.hgsubstate')
-                        if '.hgsubstate' in changes[2]:
-                            changes[2].remove('.hgsubstate')
+                    changes[0].insert(0, '.hgsubstate')
+
             elif '.hgsub' in changes[2]:
                 # clean up .hgsubstate when .hgsub is removed
                 if ('.hgsubstate' in wctx and
                     '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
                     changes[2].insert(0, '.hgsubstate')
 
-            if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
-                changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
-                if changedsubs:
-                    raise util.Abort(_("uncommitted changes in subrepo %s")
-                                     % changedsubs[0],
-                                     hint=_("use --subrepos for recursive commit"))
-
             # make sure all explicit patterns are matched
             if not force and match.files():
                 matched = set(changes[0] + changes[1] + changes[2])
@@ -1172,16 +1158,15 @@
                 cctx._text = editor(self, cctx, subs)
             edited = (text != cctx._text)
 
-            # commit subs
-            if subs or removedsubs:
-                state = wctx.substate.copy()
-                for s in sorted(subs):
+            # commit subs and write new state
+            if subs:
+                for s in sorted(commitsubs):
                     sub = wctx.sub(s)
                     self.ui.status(_('committing subrepository %s\n') %
                         subrepo.subrelpath(sub))
                     sr = sub.commit(cctx._text, user, date)
-                    state[s] = (state[s][0], sr)
-                subrepo.writestate(self, state)
+                    newstate[s] = (newstate[s][0], sr)
+                subrepo.writestate(self, newstate)
 
             # Save commit message in case this transaction gets rolled back
             # (e.g. by a pretxncommit hook).  Leave the content alone on
@@ -1823,7 +1808,7 @@
         fnodes = {} # needed file nodes
         changedfiles = set()
         fstate = ['', {}]
-        count = [0]
+        count = [0, 0]
 
         # can we go through the fast path ?
         heads.sort()
@@ -1836,8 +1821,15 @@
 
         # filter any nodes that claim to be part of the known set
         def prune(revlog, missing):
+            rr, rl = revlog.rev, revlog.linkrev
             return [n for n in missing
-                    if revlog.linkrev(revlog.rev(n)) not in commonrevs]
+                    if rl(rr(n)) not in commonrevs]
+
+        progress = self.ui.progress
+        _bundling = _('bundling')
+        _changesets = _('changesets')
+        _manifests = _('manifests')
+        _files = _('files')
 
         def lookup(revlog, x):
             if revlog == cl:
@@ -1845,23 +1837,22 @@
                 changedfiles.update(c[3])
                 mfs.setdefault(c[0], x)
                 count[0] += 1
-                self.ui.progress(_('bundling'), count[0],
-                                 unit=_('changesets'), total=len(csets))
+                progress(_bundling, count[0],
+                         unit=_changesets, total=count[1])
                 return x
             elif revlog == mf:
                 clnode = mfs[x]
                 mdata = mf.readfast(x)
-                for f in changedfiles:
-                    if f in mdata:
-                        fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
+                for f, n in mdata.iteritems():
+                    if f in changedfiles:
+                        fnodes[f].setdefault(n, clnode)
                 count[0] += 1
-                self.ui.progress(_('bundling'), count[0],
-                                 unit=_('manifests'), total=len(mfs))
-                return mfs[x]
+                progress(_bundling, count[0],
+                         unit=_manifests, total=count[1])
+                return clnode
             else:
-                self.ui.progress(
-                    _('bundling'), count[0], item=fstate[0],
-                    unit=_('files'), total=len(changedfiles))
+                progress(_bundling, count[0], item=fstate[0],
+                         unit=_files, total=count[1])
                 return fstate[1][x]
 
         bundler = changegroup.bundle10(lookup)
@@ -1874,21 +1865,24 @@
         def gengroup():
             # Create a changenode group generator that will call our functions
             # back to lookup the owning changenode and collect information.
+            count[:] = [0, len(csets)]
             for chunk in cl.group(csets, bundler, reorder=reorder):
                 yield chunk
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
             # Create a generator for the manifestnodes that calls our lookup
             # and data collection functions back.
-            count[0] = 0
+            for f in changedfiles:
+                fnodes[f] = {}
+            count[:] = [0, len(mfs)]
             for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
                 yield chunk
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
             mfs.clear()
 
             # Go through all our files in order sorted by name.
-            count[0] = 0
+            count[:] = [0, len(changedfiles)]
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
@@ -1905,7 +1899,7 @@
 
             # Signal that no more groups are left.
             yield bundler.close()
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
             if csets:
                 self.hook('outgoing', node=hex(csets[0]), source=source)
@@ -1931,7 +1925,7 @@
         mfs = {}
         changedfiles = set()
         fstate = ['']
-        count = [0]
+        count = [0, 0]
 
         self.hook('preoutgoing', throw=True, source=source)
         self.changegroupinfo(nodes, source)
@@ -1939,7 +1933,14 @@
         revset = set([cl.rev(n) for n in nodes])
 
         def gennodelst(log):
-            return [log.node(r) for r in log if log.linkrev(r) in revset]
+            ln, llr = log.node, log.linkrev
+            return [ln(r) for r in log if llr(r) in revset]
+
+        progress = self.ui.progress
+        _bundling = _('bundling')
+        _changesets = _('changesets')
+        _manifests = _('manifests')
+        _files = _('files')
 
         def lookup(revlog, x):
             if revlog == cl:
@@ -1947,18 +1948,17 @@
                 changedfiles.update(c[3])
                 mfs.setdefault(c[0], x)
                 count[0] += 1
-                self.ui.progress(_('bundling'), count[0],
-                                 unit=_('changesets'), total=len(nodes))
+                progress(_bundling, count[0],
+                         unit=_changesets, total=count[1])
                 return x
             elif revlog == mf:
                 count[0] += 1
-                self.ui.progress(_('bundling'), count[0],
-                                 unit=_('manifests'), total=len(mfs))
+                progress(_bundling, count[0],
+                         unit=_manifests, total=count[1])
                 return cl.node(revlog.linkrev(revlog.rev(x)))
             else:
-                self.ui.progress(
-                    _('bundling'), count[0], item=fstate[0],
-                    total=len(changedfiles), unit=_('files'))
+                progress(_bundling, count[0], item=fstate[0],
+                    total=count[1], unit=_files)
                 return cl.node(revlog.linkrev(revlog.rev(x)))
 
         bundler = changegroup.bundle10(lookup)
@@ -1972,16 +1972,17 @@
             '''yield a sequence of changegroup chunks (strings)'''
             # construct a list of all changed files
 
+            count[:] = [0, len(nodes)]
             for chunk in cl.group(nodes, bundler, reorder=reorder):
                 yield chunk
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
-            count[0] = 0
+            count[:] = [0, len(mfs)]
             for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
                 yield chunk
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
-            count[0] = 0
+            count[:] = [0, len(changedfiles)]
             for fname in sorted(changedfiles):
                 filerevlog = self.file(fname)
                 if not len(filerevlog):
@@ -1994,7 +1995,7 @@
                     for chunk in filerevlog.group(nodelist, bundler, reorder):
                         yield chunk
             yield bundler.close()
-            self.ui.progress(_('bundling'), None)
+            progress(_bundling, None)
 
             if nodes:
                 self.hook('outgoing', node=hex(nodes[0]), source=source)
@@ -2227,7 +2228,9 @@
                 except (ValueError, TypeError):
                     raise error.ResponseError(
                         _('Unexpected response from remote server:'), l)
-                self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
+                if self.ui.debugflag:
+                    self.ui.debug('adding %s (%s)\n' %
+                                  (name, util.bytecount(size)))
                 # for backwards compat, name was partially encoded
                 ofp = self.sopener(store.decodedir(name), 'w')
                 for chunk in util.filechunkiter(fp, limit=size):
@@ -2266,6 +2269,10 @@
         # and format flags on "stream" capability, and use
         # uncompressed only if compatible.
 
+        if not stream:
+            # if the server explicitely prefer to stream (for fast LANs)
+            stream = remote.capable('stream-preferred')
+
         if stream and not heads:
             # 'stream' means remote revlog format is revlogv1 only
             if remote.capable('stream'):
@@ -2310,7 +2317,10 @@
     renamefiles = [tuple(t) for t in files]
     def a():
         for src, dest in renamefiles:
-            util.rename(src, dest)
+            try:
+                util.rename(src, dest)
+            except OSError: # journal file does not yet exist
+                pass
     return a
 
 def undoname(fn):
--- a/mercurial/lsprof.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/lsprof.py	Tue Apr 17 17:56:36 2012 -0500
@@ -48,7 +48,7 @@
             if limit is not None and count == limit:
                 return
             ccount = 0
-            if e.calls:
+            if climit and e.calls:
                 for se in e.calls:
                     file.write(cols % ("+%s" % se.callcount, se.reccallcount,
                                        se.totaltime, se.inlinetime,
--- a/mercurial/match.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/match.py	Tue Apr 17 17:56:36 2012 -0500
@@ -333,5 +333,5 @@
 
 def _anypats(patterns):
     for kind, name in patterns:
-        if kind in ('glob', 're', 'relglob', 'relre'):
+        if kind in ('glob', 're', 'relglob', 'relre', 'set'):
             return True
--- a/mercurial/merge.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/merge.py	Tue Apr 17 17:56:36 2012 -0500
@@ -81,22 +81,20 @@
             self.mark(dfile, 'r')
         return r
 
-def _checkunknown(wctx, mctx, folding):
+def _checkunknownfile(repo, wctx, mctx, f):
+    return (not repo.dirstate._ignore(f)
+        and os.path.exists(repo.wjoin(f))
+        and repo.dirstate.normalize(f) not in repo.dirstate
+        and mctx[f].cmp(wctx[f]))
+
+def _checkunknown(repo, wctx, mctx):
     "check for collisions between unknown files and files in mctx"
-    if folding:
-        foldf = util.normcase
-    else:
-        foldf = lambda fn: fn
-    folded = {}
-    for fn in mctx:
-        folded[foldf(fn)] = fn
 
     error = False
-    for fn in wctx.unknown():
-        f = foldf(fn)
-        if f in folded and mctx[folded[f]].cmp(wctx[f]):
+    for f in mctx:
+        if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
             error = True
-            wctx._repo.ui.warn(_("%s: untracked file differs\n") % fn)
+            wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
     if error:
         raise util.Abort(_("untracked files in working directory differ "
                            "from files in requested revision"))
@@ -192,8 +190,7 @@
     elif pa == p2: # backwards
         pa = p1.p1()
     elif pa and repo.ui.configbool("merge", "followcopies", True):
-        dirs = repo.ui.configbool("merge", "followdirs", True)
-        copy, diverge = copies.mergecopies(repo, p1, p2, pa, dirs)
+        copy, diverge = copies.mergecopies(repo, p1, p2, pa)
         for of, fl in diverge.iteritems():
             act("divergent renames", "dr", of, fl)
 
@@ -249,7 +246,7 @@
                     act("prompt keep", "a", f)
             elif n[20:] == "a": # added, no remote
                 act("remote deleted", "f", f)
-            elif n[20:] != "u":
+            else:
                 act("other deleted", "r", f)
 
     for f, n in m2.iteritems():
@@ -269,7 +266,13 @@
                 act("remote moved to " + f, "m",
                     f2, f, f, fmerge(f2, f, f2), True)
         elif f not in ma:
-            act("remote created", "g", f, m2.flags(f))
+            if (not overwrite
+                and _checkunknownfile(repo, p1, p2, f)):
+                rflags = fmerge(f, f, f)
+                act("remote differs from untracked local",
+                    "m", f, f, f, rflags, False)
+            else:
+                act("remote created", "g", f, m2.flags(f))
         elif n != ma[f]:
             if repo.ui.promptchoice(
                 _("remote changed %s which local deleted\n"
@@ -559,16 +562,15 @@
                                    " --check to force update)"))
             else:
                 # Allow jumping branches if clean and specific rev given
-                overwrite = True
+                pa = p1
 
         ### calculate phase
         action = []
-        wc.status(unknown=True) # prime cache
         folding = not util.checkcase(repo.path)
-        if not force:
-            _checkunknown(wc, p2, folding)
         if folding:
             _checkcollision(p2, branchmerge and p1)
+        if not force:
+            _checkunknown(repo, wc, p2)
         action += _forgetremoved(wc, p2, branchmerge)
         action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
 
--- a/mercurial/mpatch.c	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/mpatch.c	Tue Apr 17 17:56:36 2012 -0500
@@ -26,42 +26,6 @@
 
 #include "util.h"
 
-/* Definitions to get compatibility with python 2.4 and earlier which
-   does not have Py_ssize_t. See also PEP 353.
-   Note: msvc (8 or earlier) does not have ssize_t, so we use Py_ssize_t.
-*/
-#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
-typedef int Py_ssize_t;
-#define PY_SSIZE_T_MAX INT_MAX
-#define PY_SSIZE_T_MIN INT_MIN
-#endif
-
-#ifdef _WIN32
-#ifdef _MSC_VER
-/* msvc 6.0 has problems */
-#define inline __inline
-typedef unsigned long uint32_t;
-#else
-#include <stdint.h>
-#endif
-static uint32_t ntohl(uint32_t x)
-{
-	return ((x & 0x000000ffUL) << 24) |
-		((x & 0x0000ff00UL) <<  8) |
-		((x & 0x00ff0000UL) >>  8) |
-		((x & 0xff000000UL) >> 24);
-}
-#else
-/* not windows */
-#include <sys/types.h>
-#if defined __BEOS__ && !defined __HAIKU__
-#include <ByteOrder.h>
-#else
-#include <arpa/inet.h>
-#endif
-#include <inttypes.h>
-#endif
-
 static char mpatch_doc[] = "Efficient binary patching.";
 static PyObject *mpatch_Error;
 
@@ -238,7 +202,6 @@
 	struct flist *l;
 	struct frag *lt;
 	const char *data = bin + 12, *end = bin + len;
-	uint32_t decode[3]; /* for dealing with alignment issues */
 
 	/* assume worst case size, we won't have many of these lists */
 	l = lalloc(len / 12);
@@ -248,10 +211,9 @@
 	lt = l->tail;
 
 	while (data <= end) {
-		memcpy(decode, bin, 12);
-		lt->start = ntohl(decode[0]);
-		lt->end = ntohl(decode[1]);
-		lt->len = ntohl(decode[2]);
+		lt->start = getbe32(bin);
+		lt->end = getbe32(bin + 4);
+		lt->len = getbe32(bin + 8);
 		if (lt->start > lt->end)
 			break; /* sanity check */
 		bin = data + lt->len;
@@ -397,7 +359,6 @@
 	long orig, start, end, len, outlen = 0, last = 0;
 	int patchlen;
 	char *bin, *binend, *data;
-	uint32_t decode[3]; /* for dealing with alignment issues */
 
 	if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
 		return NULL;
@@ -406,10 +367,9 @@
 	data = bin + 12;
 
 	while (data <= binend) {
-		memcpy(decode, bin, 12);
-		start = ntohl(decode[0]);
-		end = ntohl(decode[1]);
-		len = ntohl(decode[2]);
+		start = getbe32(bin);
+		end = getbe32(bin + 4);
+		len = getbe32(bin + 8);
 		if (start > end)
 			break; /* sanity check */
 		bin = data + len;
--- a/mercurial/parsers.c	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/parsers.c	Tue Apr 17 17:56:36 2012 -0500
@@ -135,33 +135,6 @@
 	return NULL;
 }
 
-#ifdef _WIN32
-#ifdef _MSC_VER
-/* msvc 6.0 has problems */
-#define inline __inline
-typedef unsigned long uint32_t;
-typedef unsigned __int64 uint64_t;
-#else
-#include <stdint.h>
-#endif
-static uint32_t ntohl(uint32_t x)
-{
-	return ((x & 0x000000ffUL) << 24) |
-	       ((x & 0x0000ff00UL) <<  8) |
-	       ((x & 0x00ff0000UL) >>  8) |
-	       ((x & 0xff000000UL) >> 24);
-}
-#else
-/* not windows */
-#include <sys/types.h>
-#if defined __BEOS__ && !defined __HAIKU__
-#include <ByteOrder.h>
-#else
-#include <arpa/inet.h>
-#endif
-#include <inttypes.h>
-#endif
-
 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
 {
 	PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
@@ -170,7 +143,6 @@
 	int state, mode, size, mtime;
 	unsigned int flen;
 	int len;
-	uint32_t decode[4]; /* for alignment */
 
 	if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
 			      &PyDict_Type, &dmap,
@@ -193,11 +165,10 @@
 	while (cur < end - 17) {
 		/* unpack header */
 		state = *cur;
-		memcpy(decode, cur + 1, 16);
-		mode = ntohl(decode[0]);
-		size = ntohl(decode[1]);
-		mtime = ntohl(decode[2]);
-		flen = ntohl(decode[3]);
+		mode = getbe32(cur + 1);
+		size = getbe32(cur + 5);
+		mtime = getbe32(cur + 9);
+		flen = getbe32(cur + 13);
 		cur += 17;
 		if (cur + flen > end || cur + flen < cur) {
 			PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
@@ -241,10 +212,88 @@
 	return ret;
 }
 
-const char nullid[20];
-const int nullrev = -1;
+/*
+ * A base-16 trie for fast node->rev mapping.
+ *
+ * Positive value is index of the next node in the trie
+ * Negative value is a leaf: -(rev + 1)
+ * Zero is empty
+ */
+typedef struct {
+	int children[16];
+} nodetree;
 
-/* RevlogNG format (all in big endian, data may be inlined):
+/*
+ * This class has two behaviours.
+ *
+ * When used in a list-like way (with integer keys), we decode an
+ * entry in a RevlogNG index file on demand. Our last entry is a
+ * sentinel, always a nullid.  We have limited support for
+ * integer-keyed insert and delete, only at elements right before the
+ * sentinel.
+ *
+ * With string keys, we lazily perform a reverse mapping from node to
+ * rev, using a base-16 trie.
+ */
+typedef struct {
+	PyObject_HEAD
+	/* Type-specific fields go here. */
+	PyObject *data;        /* raw bytes of index */
+	PyObject **cache;      /* cached tuples */
+	const char **offsets;  /* populated on demand */
+	Py_ssize_t raw_length; /* original number of elements */
+	Py_ssize_t length;     /* current number of elements */
+	PyObject *added;       /* populated on demand */
+	nodetree *nt;          /* base-16 trie */
+	int ntlength;          /* # nodes in use */
+	int ntcapacity;        /* # nodes allocated */
+	int ntdepth;           /* maximum depth of tree */
+	int ntsplits;          /* # splits performed */
+	int ntrev;             /* last rev scanned */
+	int ntlookups;         /* # lookups */
+	int ntmisses;          /* # lookups that miss the cache */
+	int inlined;
+} indexObject;
+
+static Py_ssize_t index_length(const indexObject *self)
+{
+	if (self->added == NULL)
+		return self->length;
+	return self->length + PyList_GET_SIZE(self->added);
+}
+
+static PyObject *nullentry;
+static const char nullid[20];
+
+static long inline_scan(indexObject *self, const char **offsets);
+
+#if LONG_MAX == 0x7fffffffL
+static char *tuple_format = "Kiiiiiis#";
+#else
+static char *tuple_format = "kiiiiiis#";
+#endif
+
+/*
+ * Return a pointer to the beginning of a RevlogNG record.
+ */
+static const char *index_deref(indexObject *self, Py_ssize_t pos)
+{
+	if (self->inlined && pos > 0) {
+		if (self->offsets == NULL) {
+			self->offsets = malloc(self->raw_length *
+					       sizeof(*self->offsets));
+			if (self->offsets == NULL)
+				return (const char *)PyErr_NoMemory();
+			inline_scan(self, self->offsets);
+		}
+		return self->offsets[pos];
+	}
+
+	return PyString_AS_STRING(self->data) + pos * 64;
+}
+
+/*
+ * RevlogNG format (all in big endian, data may be inlined):
  *    6 bytes: offset
  *    2 bytes: flags
  *    4 bytes: compressed length
@@ -255,138 +304,850 @@
  *    4 bytes: parent 2 revision
  *   32 bytes: nodeid (only 20 bytes used)
  */
-static int _parse_index_ng(const char *data, int size, int inlined,
-			   PyObject *index)
+static PyObject *index_get(indexObject *self, Py_ssize_t pos)
 {
-	PyObject *entry;
-	int n = 0, err;
 	uint64_t offset_flags;
 	int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
 	const char *c_node_id;
-	const char *end = data + size;
-	uint32_t decode[8]; /* to enforce alignment with inline data */
+	const char *data;
+	Py_ssize_t length = index_length(self);
+	PyObject *entry;
+
+	if (pos < 0)
+		pos += length;
+
+	if (pos < 0 || pos >= length) {
+		PyErr_SetString(PyExc_IndexError, "revlog index out of range");
+		return NULL;
+	}
 
-	while (data < end) {
-		unsigned int step;
+	if (pos == length - 1) {
+		Py_INCREF(nullentry);
+		return nullentry;
+	}
+
+	if (pos >= self->length - 1) {
+		PyObject *obj;
+		obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
+		Py_INCREF(obj);
+		return obj;
+	}
 
-		memcpy(decode, data, 32);
-		offset_flags = ntohl(decode[1]);
-		if (n == 0) /* mask out version number for the first entry */
-			offset_flags &= 0xFFFF;
-		else {
-			uint32_t offset_high =  ntohl(decode[0]);
-			offset_flags |= ((uint64_t)offset_high) << 32;
+	if (self->cache) {
+		if (self->cache[pos]) {
+			Py_INCREF(self->cache[pos]);
+			return self->cache[pos];
 		}
+	} else {
+		self->cache = calloc(self->raw_length, sizeof(PyObject *));
+		if (self->cache == NULL)
+			return PyErr_NoMemory();
+	}
+
+	data = index_deref(self, pos);
+	if (data == NULL)
+		return NULL;
 
-		comp_len = ntohl(decode[2]);
-		uncomp_len = ntohl(decode[3]);
-		base_rev = ntohl(decode[4]);
-		link_rev = ntohl(decode[5]);
-		parent_1 = ntohl(decode[6]);
-		parent_2 = ntohl(decode[7]);
-		c_node_id = data + 32;
+	offset_flags = getbe32(data + 4);
+	if (pos == 0) /* mask out version number for the first entry */
+		offset_flags &= 0xFFFF;
+	else {
+		uint32_t offset_high = getbe32(data);
+		offset_flags |= ((uint64_t)offset_high) << 32;
+	}
 
-		entry = Py_BuildValue("Liiiiiis#", offset_flags, comp_len,
+	comp_len = getbe32(data + 8);
+	uncomp_len = getbe32(data + 12);
+	base_rev = getbe32(data + 16);
+	link_rev = getbe32(data + 20);
+	parent_1 = getbe32(data + 24);
+	parent_2 = getbe32(data + 28);
+	c_node_id = data + 32;
+
+	entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
 			      uncomp_len, base_rev, link_rev,
 			      parent_1, parent_2, c_node_id, 20);
 
-		if (!entry)
-			return 0;
+	if (entry)
+		PyObject_GC_UnTrack(entry);
+
+	self->cache[pos] = entry;
+	Py_INCREF(entry);
+
+	return entry;
+}
+
+/*
+ * Return the 20-byte SHA of the node corresponding to the given rev.
+ */
+static const char *index_node(indexObject *self, Py_ssize_t pos)
+{
+	Py_ssize_t length = index_length(self);
+	const char *data;
+
+	if (pos == length - 1)
+		return nullid;
+
+	if (pos >= length)
+		return NULL;
+
+	if (pos >= self->length - 1) {
+		PyObject *tuple, *str;
+		tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
+		str = PyTuple_GetItem(tuple, 7);
+		return str ? PyString_AS_STRING(str) : NULL;
+	}
+
+	data = index_deref(self, pos);
+	return data ? data + 32 : NULL;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev);
+
+static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
+{
+	if (PyString_AsStringAndSize(obj, node, nodelen) == -1)
+		return -1;
+	if (*nodelen == 20)
+		return 0;
+	PyErr_SetString(PyExc_ValueError, "20-byte hash required");
+	return -1;
+}
+
+static PyObject *index_insert(indexObject *self, PyObject *args)
+{
+	PyObject *obj;
+	char *node;
+	long offset;
+	Py_ssize_t len, nodelen;
+
+	if (!PyArg_ParseTuple(args, "lO", &offset, &obj))
+		return NULL;
 
-		PyObject_GC_UnTrack(entry); /* don't waste time with this */
+	if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
+		PyErr_SetString(PyExc_TypeError, "8-tuple required");
+		return NULL;
+	}
+
+	if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
+		return NULL;
+
+	len = index_length(self);
+
+	if (offset < 0)
+		offset += len;
+
+	if (offset != len - 1) {
+		PyErr_SetString(PyExc_IndexError,
+				"insert only supported at index -1");
+		return NULL;
+	}
+
+	if (offset > INT_MAX) {
+		PyErr_SetString(PyExc_ValueError,
+				"currently only 2**31 revs supported");
+		return NULL;
+	}
+
+	if (self->added == NULL) {
+		self->added = PyList_New(0);
+		if (self->added == NULL)
+			return NULL;
+	}
+
+	if (PyList_Append(self->added, obj) == -1)
+		return NULL;
+
+	if (self->nt)
+		nt_insert(self, node, (int)offset);
+
+	Py_RETURN_NONE;
+}
+
+static void _index_clearcaches(indexObject *self)
+{
+	if (self->cache) {
+		Py_ssize_t i;
+
+		for (i = 0; i < self->raw_length; i++) {
+			Py_XDECREF(self->cache[i]);
+			self->cache[i] = NULL;
+		}
+		free(self->cache);
+		self->cache = NULL;
+	}
+	if (self->offsets) {
+		free(self->offsets);
+		self->offsets = NULL;
+	}
+	if (self->nt) {
+		free(self->nt);
+		self->nt = NULL;
+	}
+}
 
-		if (inlined) {
-			err = PyList_Append(index, entry);
-			Py_DECREF(entry);
-			if (err)
+static PyObject *index_clearcaches(indexObject *self)
+{
+	_index_clearcaches(self);
+	self->ntlength = self->ntcapacity = 0;
+	self->ntdepth = self->ntsplits = 0;
+	self->ntrev = -1;
+	self->ntlookups = self->ntmisses = 0;
+	Py_RETURN_NONE;
+}
+
+static PyObject *index_stats(indexObject *self)
+{
+	PyObject *obj = PyDict_New();
+
+	if (obj == NULL)
+		return NULL;
+
+#define istat(__n, __d) \
+	if (PyDict_SetItemString(obj, __d, PyInt_FromLong(self->__n)) == -1) \
+		goto bail;
+
+	if (self->added) {
+		Py_ssize_t len = PyList_GET_SIZE(self->added);
+		if (PyDict_SetItemString(obj, "index entries added",
+					 PyInt_FromLong(len)) == -1)
+			goto bail;
+	}
+
+	if (self->raw_length != self->length - 1)
+		istat(raw_length, "revs on disk");
+	istat(length, "revs in memory");
+	istat(ntcapacity, "node trie capacity");
+	istat(ntdepth, "node trie depth");
+	istat(ntlength, "node trie count");
+	istat(ntlookups, "node trie lookups");
+	istat(ntmisses, "node trie misses");
+	istat(ntrev, "node trie last rev scanned");
+	istat(ntsplits, "node trie splits");
+
+#undef istat
+
+	return obj;
+
+bail:
+	Py_XDECREF(obj);
+	return NULL;
+}
+
+static inline int nt_level(const char *node, int level)
+{
+	int v = node[level>>1];
+	if (!(level & 1))
+		v >>= 4;
+	return v & 0xf;
+}
+
+static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen)
+{
+	int level, off;
+
+	if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
+		return -1;
+
+	if (self->nt == NULL)
+		return -2;
+
+	for (level = off = 0; level < nodelen; level++) {
+		int k = nt_level(node, level);
+		nodetree *n = &self->nt[off];
+		int v = n->children[k];
+
+		if (v < 0) {
+			const char *n;
+			v = -v - 1;
+			n = index_node(self, v);
+			if (n == NULL)
+				return -2;
+			return memcmp(node, n, nodelen > 20 ? 20 : nodelen)
+				? -2 : v;
+		}
+		if (v == 0)
+			return -2;
+		off = v;
+	}
+	return -2;
+}
+
+static int nt_new(indexObject *self)
+{
+	if (self->ntlength == self->ntcapacity) {
+		self->ntcapacity *= 2;
+		self->nt = realloc(self->nt,
+				   self->ntcapacity * sizeof(nodetree));
+		if (self->nt == NULL) {
+			PyErr_SetString(PyExc_MemoryError, "out of memory");
+			return -1;
+		}
+		memset(&self->nt[self->ntlength], 0,
+		       sizeof(nodetree) * (self->ntcapacity - self->ntlength));
+	}
+	return self->ntlength++;
+}
+
+static int nt_insert(indexObject *self, const char *node, int rev)
+{
+	int level = 0;
+	int off = 0;
+
+	while (level < 20) {
+		int k = nt_level(node, level);
+		nodetree *n;
+		int v;
+
+		n = &self->nt[off];
+		v = n->children[k];
+
+		if (v == 0) {
+			n->children[k] = -rev - 1;
+			return 0;
+		}
+		if (v < 0) {
+			const char *oldnode = index_node(self, -v - 1);
+			int noff;
+
+			if (!oldnode || !memcmp(oldnode, node, 20)) {
+				n->children[k] = -rev - 1;
 				return 0;
-		} else
-			PyList_SET_ITEM(index, n, entry); /* steals reference */
+			}
+			noff = nt_new(self);
+			if (noff == -1)
+				return -1;
+			/* self->nt may have been changed by realloc */
+			self->nt[off].children[k] = noff;
+			off = noff;
+			n = &self->nt[off];
+			n->children[nt_level(oldnode, ++level)] = v;
+			if (level > self->ntdepth)
+				self->ntdepth = level;
+			self->ntsplits += 1;
+		} else {
+			level += 1;
+			off = v;
+		}
+	}
+
+	return -1;
+}
+
+/*
+ * Return values:
+ *
+ *   -3: error (exception set)
+ *   -2: not found (no exception set)
+ * rest: valid rev
+ */
+static int index_find_node(indexObject *self,
+			   const char *node, Py_ssize_t nodelen)
+{
+	int rev;
+
+	self->ntlookups++;
+	rev = nt_find(self, node, nodelen);
+	if (rev >= -1)
+		return rev;
+
+	if (self->nt == NULL) {
+		self->ntcapacity = self->raw_length < 4
+			? 4 : self->raw_length / 2;
+		self->nt = calloc(self->ntcapacity, sizeof(nodetree));
+		if (self->nt == NULL) {
+			PyErr_SetString(PyExc_MemoryError, "out of memory");
+			return -3;
+		}
+		self->ntlength = 1;
+		self->ntrev = (int)index_length(self) - 1;
+		self->ntlookups = 1;
+		self->ntmisses = 0;
+	}
+
+	/*
+	 * For the first handful of lookups, we scan the entire index,
+	 * and cache only the matching nodes. This optimizes for cases
+	 * like "hg tip", where only a few nodes are accessed.
+	 *
+	 * After that, we cache every node we visit, using a single
+	 * scan amortized over multiple lookups.  This gives the best
+	 * bulk performance, e.g. for "hg log".
+	 */
+	if (self->ntmisses++ < 4) {
+		for (rev = self->ntrev - 1; rev >= 0; rev--) {
+			const char *n = index_node(self, rev);
+			if (n == NULL)
+				return -2;
+			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+				if (nt_insert(self, n, rev) == -1)
+					return -3;
+				break;
+			}
+		}
+	} else {
+		for (rev = self->ntrev - 1; rev >= 0; rev--) {
+			const char *n = index_node(self, rev);
+			if (n == NULL)
+				return -2;
+			if (nt_insert(self, n, rev) == -1)
+				return -3;
+			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+				break;
+			}
+		}
+		self->ntrev = rev;
+	}
+
+	if (rev >= 0)
+		return rev;
+	return -2;
+}
+
+static PyObject *raise_revlog_error(void)
+{
+	static PyObject *errclass;
+	PyObject *mod = NULL, *errobj;
+
+	if (errclass == NULL) {
+		PyObject *dict;
+
+		mod = PyImport_ImportModule("mercurial.error");
+		if (mod == NULL)
+			goto classfail;
+
+		dict = PyModule_GetDict(mod);
+		if (dict == NULL)
+			goto classfail;
+
+		errclass = PyDict_GetItemString(dict, "RevlogError");
+		if (errclass == NULL) {
+			PyErr_SetString(PyExc_SystemError,
+					"could not find RevlogError");
+			goto classfail;
+		}
+		Py_INCREF(errclass);
+	}
+
+	errobj = PyObject_CallFunction(errclass, NULL);
+	if (errobj == NULL)
+		return NULL;
+	PyErr_SetObject(errclass, errobj);
+	return errobj;
+
+classfail:
+	Py_XDECREF(mod);
+	return NULL;
+}
 
-		n++;
-		step = 64 + (inlined ? comp_len : 0);
-		if (data + step > end || data + step < data)
-			break;
-		data += step;
+static PyObject *index_getitem(indexObject *self, PyObject *value)
+{
+	char *node;
+	Py_ssize_t nodelen;
+	int rev;
+
+	if (PyInt_Check(value))
+		return index_get(self, PyInt_AS_LONG(value));
+
+	if (PyString_AsStringAndSize(value, &node, &nodelen) == -1)
+		return NULL;
+	rev = index_find_node(self, node, nodelen);
+	if (rev >= -1)
+		return PyInt_FromLong(rev);
+	if (rev == -2)
+		raise_revlog_error();
+	return NULL;
+}
+
+static PyObject *index_m_get(indexObject *self, PyObject *args)
+{
+	char *node;
+	int nodelen, rev;
+
+	if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
+		return NULL;
+
+	rev = index_find_node(self, node, nodelen);
+	if (rev ==  -3)
+		return NULL;
+	if (rev == -2)
+		Py_RETURN_NONE;
+	return PyInt_FromLong(rev);
+}
+
+static int index_contains(indexObject *self, PyObject *value)
+{
+	char *node;
+	Py_ssize_t nodelen;
+
+	if (PyInt_Check(value)) {
+		long rev = PyInt_AS_LONG(value);
+		return rev >= -1 && rev < index_length(self);
+	}
+
+	if (!PyString_Check(value))
+		return 0;
+
+	node = PyString_AS_STRING(value);
+	nodelen = PyString_GET_SIZE(value);
+
+	switch (index_find_node(self, node, nodelen)) {
+	case -3:
+		return -1;
+	case -2:
+		return 0;
+	default:
+		return 1;
 	}
-	if (data != end) {
-		if (!PyErr_Occurred())
-			PyErr_SetString(PyExc_ValueError, "corrupt index file");
+}
+
+/*
+ * Invalidate any trie entries introduced by added revs.
+ */
+static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
+{
+	Py_ssize_t i, len = PyList_GET_SIZE(self->added);
+
+	for (i = start; i < len; i++) {
+		PyObject *tuple = PyList_GET_ITEM(self->added, i);
+		PyObject *node = PyTuple_GET_ITEM(tuple, 7);
+
+		nt_insert(self, PyString_AS_STRING(node), -1);
+	}
+
+	if (start == 0) {
+		Py_DECREF(self->added);
+		self->added = NULL;
+	}
+}
+
+/*
+ * Delete a numeric range of revs, which must be at the end of the
+ * range, but exclude the sentinel nullid entry.
+ */
+static int index_slice_del(indexObject *self, PyObject *item)
+{
+	Py_ssize_t start, stop, step, slicelength;
+	Py_ssize_t length = index_length(self);
+
+	if (PySlice_GetIndicesEx((PySliceObject*)item, length,
+				 &start, &stop, &step, &slicelength) < 0)
+		return -1;
+
+	if (slicelength <= 0)
+		return 0;
+
+	if ((step < 0 && start < stop) || (step > 0 && start > stop))
+		stop = start;
+
+	if (step < 0) {
+		stop = start + 1;
+		start = stop + step*(slicelength - 1) - 1;
+		step = -step;
+	}
+
+	if (step != 1) {
+		PyErr_SetString(PyExc_ValueError,
+				"revlog index delete requires step size of 1");
+		return -1;
+	}
+
+	if (stop != length - 1) {
+		PyErr_SetString(PyExc_IndexError,
+				"revlog index deletion indices are invalid");
+		return -1;
+	}
+
+	if (start < self->length - 1) {
+		if (self->nt) {
+			Py_ssize_t i;
+
+			for (i = start + 1; i < self->length - 1; i++) {
+				const char *node = index_node(self, i);
+
+				if (node)
+					nt_insert(self, node, -1);
+			}
+			if (self->added)
+				nt_invalidate_added(self, 0);
+			if (self->ntrev > start)
+				self->ntrev = (int)start;
+		}
+		self->length = start + 1;
 		return 0;
 	}
 
-	/* create the magic nullid entry in the index at [-1] */
-	entry = Py_BuildValue("Liiiiiis#", (uint64_t)0, 0, 0, -1, -1, -1, -1, nullid, 20);
+	if (self->nt) {
+		nt_invalidate_added(self, start - self->length + 1);
+		if (self->ntrev > start)
+			self->ntrev = (int)start;
+	}
+	return self->added
+		? PyList_SetSlice(self->added, start - self->length + 1,
+				  PyList_GET_SIZE(self->added), NULL)
+		: 0;
+}
 
-	if (!entry)
-		return 0;
+/*
+ * Supported ops:
+ *
+ * slice deletion
+ * string assignment (extend node->rev mapping)
+ * string deletion (shrink node->rev mapping)
+ */
+static int index_assign_subscript(indexObject *self, PyObject *item,
+				  PyObject *value)
+{
+	char *node;
+	Py_ssize_t nodelen;
+	long rev;
 
-	PyObject_GC_UnTrack(entry); /* don't waste time with this */
+	if (PySlice_Check(item) && value == NULL)
+		return index_slice_del(self, item);
+
+	if (node_check(item, &node, &nodelen) == -1)
+		return -1;
 
-	if (inlined) {
-		err = PyList_Append(index, entry);
-		Py_DECREF(entry);
-		if (err)
-			return 0;
-	} else
-		PyList_SET_ITEM(index, n, entry); /* steals reference */
+	if (value == NULL)
+		return self->nt ? nt_insert(self, node, -1) : 0;
+	rev = PyInt_AsLong(value);
+	if (rev > INT_MAX || rev < 0) {
+		if (!PyErr_Occurred())
+			PyErr_SetString(PyExc_ValueError, "rev out of range");
+		return -1;
+	}
+	return nt_insert(self, node, (int)rev);
+}
+
+/*
+ * Find all RevlogNG entries in an index that has inline data. Update
+ * the optional "offsets" table with those entries.
+ */
+static long inline_scan(indexObject *self, const char **offsets)
+{
+	const char *data = PyString_AS_STRING(self->data);
+	const char *end = data + PyString_GET_SIZE(self->data);
+	const long hdrsize = 64;
+	long incr = hdrsize;
+	Py_ssize_t len = 0;
 
-	return 1;
+	while (data + hdrsize <= end) {
+		uint32_t comp_len;
+		const char *old_data;
+		/* 3rd element of header is length of compressed inline data */
+		comp_len = getbe32(data + 8);
+		incr = hdrsize + comp_len;
+		if (incr < hdrsize)
+			break;
+		if (offsets)
+			offsets[len] = data;
+		len++;
+		old_data = data;
+		data += incr;
+		if (data <= old_data)
+			break;
+	}
+
+	if (data != end && data + hdrsize != end) {
+		if (!PyErr_Occurred())
+			PyErr_SetString(PyExc_ValueError, "corrupt index file");
+		return -1;
+	}
+
+	return len;
 }
 
-/* This function parses a index file and returns a Python tuple of the
- * following format: (index, cache)
+static int index_real_init(indexObject *self, const char *data, int size,
+			   PyObject *inlined_obj, PyObject *data_obj)
+{
+	self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
+	self->data = data_obj;
+	self->cache = NULL;
+
+	self->added = NULL;
+	self->offsets = NULL;
+	self->nt = NULL;
+	self->ntlength = self->ntcapacity = 0;
+	self->ntdepth = self->ntsplits = 0;
+	self->ntlookups = self->ntmisses = 0;
+	self->ntrev = -1;
+	Py_INCREF(self->data);
+
+	if (self->inlined) {
+		long len = inline_scan(self, NULL);
+		if (len == -1)
+			goto bail;
+		self->raw_length = len;
+		self->length = len + 1;
+	} else {
+		if (size % 64) {
+			PyErr_SetString(PyExc_ValueError, "corrupt index file");
+			goto bail;
+		}
+		self->raw_length = size / 64;
+		self->length = self->raw_length + 1;
+	}
+
+	return 0;
+bail:
+	return -1;
+}
+
+static int index_init(indexObject *self, PyObject *args, PyObject *kwds)
+{
+	const char *data;
+	int size;
+	PyObject *inlined_obj;
+
+	if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj))
+		return -1;
+
+	return index_real_init(self, data, size, inlined_obj,
+			       PyTuple_GET_ITEM(args, 0));
+}
+
+static PyObject *index_nodemap(indexObject *self)
+{
+	return (PyObject *)self;
+}
+
+static void index_dealloc(indexObject *self)
+{
+	_index_clearcaches(self);
+	Py_DECREF(self->data);
+	Py_XDECREF(self->added);
+	PyObject_Del(self);
+}
+
+static PySequenceMethods index_sequence_methods = {
+	(lenfunc)index_length,   /* sq_length */
+	0,                       /* sq_concat */
+	0,                       /* sq_repeat */
+	(ssizeargfunc)index_get, /* sq_item */
+	0,                       /* sq_slice */
+	0,                       /* sq_ass_item */
+	0,                       /* sq_ass_slice */
+	(objobjproc)index_contains, /* sq_contains */
+};
+
+static PyMappingMethods index_mapping_methods = {
+	(lenfunc)index_length,                 /* mp_length */
+	(binaryfunc)index_getitem,             /* mp_subscript */
+	(objobjargproc)index_assign_subscript, /* mp_ass_subscript */
+};
+
+static PyMethodDef index_methods[] = {
+	{"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
+	 "clear the index caches"},
+	{"get", (PyCFunction)index_m_get, METH_VARARGS,
+	 "get an index entry"},
+	{"insert", (PyCFunction)index_insert, METH_VARARGS,
+	 "insert an index entry"},
+	{"stats", (PyCFunction)index_stats, METH_NOARGS,
+	 "stats for the index"},
+	{NULL} /* Sentinel */
+};
+
+static PyGetSetDef index_getset[] = {
+	{"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
+	{NULL} /* Sentinel */
+};
+
+static PyTypeObject indexType = {
+	PyObject_HEAD_INIT(NULL)
+	0,                         /* ob_size */
+	"parsers.index",           /* tp_name */
+	sizeof(indexObject),       /* tp_basicsize */
+	0,                         /* tp_itemsize */
+	(destructor)index_dealloc, /* tp_dealloc */
+	0,                         /* tp_print */
+	0,                         /* tp_getattr */
+	0,                         /* tp_setattr */
+	0,                         /* tp_compare */
+	0,                         /* tp_repr */
+	0,                         /* tp_as_number */
+	&index_sequence_methods,   /* tp_as_sequence */
+	&index_mapping_methods,    /* tp_as_mapping */
+	0,                         /* tp_hash */
+	0,                         /* tp_call */
+	0,                         /* tp_str */
+	0,                         /* tp_getattro */
+	0,                         /* tp_setattro */
+	0,                         /* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT,        /* tp_flags */
+	"revlog index",            /* tp_doc */
+	0,                         /* tp_traverse */
+	0,                         /* tp_clear */
+	0,                         /* tp_richcompare */
+	0,                         /* tp_weaklistoffset */
+	0,                         /* tp_iter */
+	0,                         /* tp_iternext */
+	index_methods,             /* tp_methods */
+	0,                         /* tp_members */
+	index_getset,              /* tp_getset */
+	0,                         /* tp_base */
+	0,                         /* tp_dict */
+	0,                         /* tp_descr_get */
+	0,                         /* tp_descr_set */
+	0,                         /* tp_dictoffset */
+	(initproc)index_init,      /* tp_init */
+	0,                         /* tp_alloc */
+	PyType_GenericNew,         /* tp_new */
+};
+
+/*
+ * returns a tuple of the form (index, index, cache) with elements as
+ * follows:
  *
- * index: a list of tuples containing the RevlogNG records
- * cache: if data is inlined, a tuple (index_file_content, 0) else None
+ * index: an index object that lazily parses RevlogNG records
+ * cache: if data is inlined, a tuple (index_file_content, 0), else None
+ *
+ * added complications are for backwards compatibility
  */
 static PyObject *parse_index2(PyObject *self, PyObject *args)
 {
 	const char *data;
-	int size, inlined;
-	PyObject *rval = NULL, *index = NULL, *cache = NULL;
-	PyObject *data_obj = NULL, *inlined_obj;
+	int size, ret;
+	PyObject *inlined_obj, *tuple = NULL, *cache = NULL;
+	indexObject *idx;
 
 	if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj))
 		return NULL;
-	inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
+
+	idx = PyObject_New(indexObject, &indexType);
+
+	if (idx == NULL)
+		goto bail;
 
-	/* If no data is inlined, we know the size of the index list in
-	 * advance: size divided by the size of one revlog record (64 bytes)
-	 * plus one for nullid */
-	index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1);
-	if (!index)
-		goto quit;
+	ret = index_real_init(idx, data, size, inlined_obj,
+			      PyTuple_GET_ITEM(args, 0));
+	if (ret)
+		goto bail;
 
-	/* set up the cache return value */
-	if (inlined) {
-		/* Note that the reference to data_obj is only borrowed */
-		data_obj = PyTuple_GET_ITEM(args, 0);
-		cache = Py_BuildValue("iO", 0, data_obj);
-		if (!cache)
-			goto quit;
+	if (idx->inlined) {
+		Py_INCREF(idx->data);
+		cache = Py_BuildValue("iO", 0, idx->data);
+		if (cache == NULL)
+			goto bail;
 	} else {
 		cache = Py_None;
-		Py_INCREF(Py_None);
+		Py_INCREF(cache);
 	}
 
-	/* actually populate the index with data */
-	if (!_parse_index_ng(data, size, inlined, index))
-		goto quit;
+	Py_INCREF(idx);
 
-	rval = Py_BuildValue("NN", index, cache);
-	if (!rval)
-		goto quit;
-	return rval;
+	tuple = Py_BuildValue("NN", idx, cache);
+	if (!tuple)
+		goto bail;
+	return tuple;
 
-quit:
-	Py_XDECREF(index);
+bail:
+	Py_XDECREF(idx);
 	Py_XDECREF(cache);
-	Py_XDECREF(rval);
+	Py_XDECREF(tuple);
 	return NULL;
 }
 
-
 static char parsers_doc[] = "Efficient content parsing.";
 
 static PyMethodDef methods[] = {
@@ -396,6 +1157,20 @@
 	{NULL, NULL}
 };
 
+static void module_init(PyObject *mod)
+{
+	if (PyType_Ready(&indexType) < 0)
+		return;
+	Py_INCREF(&indexType);
+
+	PyModule_AddObject(mod, "index", (PyObject *)&indexType);
+
+	nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
+				  -1, -1, -1, -1, nullid, 20);
+	if (nullentry)
+		PyObject_GC_UnTrack(nullentry);
+}
+
 #ifdef IS_PY3K
 static struct PyModuleDef parsers_module = {
 	PyModuleDef_HEAD_INIT,
@@ -407,12 +1182,14 @@
 
 PyMODINIT_FUNC PyInit_parsers(void)
 {
-	return PyModule_Create(&parsers_module);
+	PyObject *mod = PyModule_Create(&parsers_module);
+	module_init(mod);
+	return mod;
 }
 #else
 PyMODINIT_FUNC initparsers(void)
 {
-	Py_InitModule3("parsers", methods, parsers_doc);
+	PyObject *mod = Py_InitModule3("parsers", methods, parsers_doc);
+	module_init(mod);
 }
 #endif
-
--- a/mercurial/patch.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/patch.py	Tue Apr 17 17:56:36 2012 -0500
@@ -1286,7 +1286,6 @@
                 current_file = None
             afile, bfile, first_hunk, gp = values
             if gp:
-                path = pstrip(gp.path)
                 gp.path = pstrip(gp.path)
                 if gp.oldpath:
                     gp.oldpath = pstrip(gp.oldpath)
--- a/mercurial/posix.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/posix.py	Tue Apr 17 17:56:36 2012 -0500
@@ -270,6 +270,19 @@
 
         return encodingupper(path)
 
+    # Cygwin translates native ACLs to POSIX permissions,
+    # but these translations are not supported by native
+    # tools, so the exec bit tends to be set erroneously.
+    # Therefore, disable executable bit access on Cygwin.
+    def checkexec(path):
+        return False
+
+    # Similarly, Cygwin's symlink emulation is likely to create
+    # problems when Mercurial is used from both Cygwin and native
+    # Windows, with other native tools, or on shared volumes
+    def checklink(path):
+        return False
+
 def shellquote(s):
     if os.sys.platform == 'OpenVMS':
         return '"%s"' % s
@@ -320,6 +333,9 @@
     if os.sep in command:
         return findexisting(command)
 
+    if sys.platform == 'plan9':
+        return findexisting(os.path.join('/bin', command))
+
     for path in os.environ.get('PATH', '').split(os.pathsep):
         executable = findexisting(os.path.join(path, command))
         if executable is not None:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/pvec.py	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,210 @@
+# pvec.py - probabilistic vector clocks for Mercurial
+#
+# Copyright 2012 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''
+A "pvec" is a changeset property based on the theory of vector clocks
+that can be compared to discover relatedness without consulting a
+graph. This can be useful for tasks like determining how a
+disconnected patch relates to a repository.
+
+Currently a pvec consist of 448 bits, of which 24 are 'depth' and the
+remainder are a bit vector. It is represented as a 70-character base85
+string.
+
+Construction:
+
+- a root changeset has a depth of 0 and a bit vector based on its hash
+- a normal commit has a changeset where depth is increased by one and
+  one bit vector bit is flipped based on its hash
+- a merge changeset pvec is constructed by copying changes from one pvec into
+  the other to balance its depth
+
+Properties:
+
+- for linear changes, difference in depth is always <= hamming distance
+- otherwise, changes are probably divergent
+- when hamming distance is < 200, we can reliably detect when pvecs are near
+
+Issues:
+
+- hamming distance ceases to work over distances of ~ 200
+- detecting divergence is less accurate when the common ancestor is very close
+  to either revision or total distance is high
+- this could probably be improved by modeling the relation between
+  delta and hdist
+
+Uses:
+
+- a patch pvec can be used to locate the nearest available common ancestor for
+  resolving conflicts
+- ordering of patches can be established without a DAG
+- two head pvecs can be compared to determine whether push/pull/merge is needed
+  and approximately how many changesets are involved
+- can be used to find a heuristic divergence measure between changesets on
+  different branches
+'''
+
+import base85, util
+from node import nullrev
+
+_size = 448 # 70 chars b85-encoded
+_bytes = _size / 8
+_depthbits = 24
+_depthbytes = _depthbits / 8
+_vecbytes = _bytes - _depthbytes
+_vecbits = _vecbytes * 8
+_radius = (_vecbits - 30) / 2 # high probability vecs are related
+
+def _bin(bs):
+    '''convert a bytestring to a long'''
+    v = 0
+    for b in bs:
+        v = v * 256 + ord(b)
+    return v
+
+def _str(v, l):
+    bs = ""
+    for p in xrange(l):
+        bs = chr(v & 255) + bs
+        v >>= 8
+    return bs
+
+def _split(b):
+    '''depth and bitvec'''
+    return _bin(b[:_depthbytes]), _bin(b[_depthbytes:])
+
+def _join(depth, bitvec):
+    return _str(depth, _depthbytes) + _str(bitvec, _vecbytes)
+
+def _hweight(x):
+    c = 0
+    while x:
+        if x & 1:
+            c += 1
+        x >>= 1
+    return c
+_htab = [_hweight(x) for x in xrange(256)]
+
+def _hamming(a, b):
+    '''find the hamming distance between two longs'''
+    d = a ^ b
+    c = 0
+    while d:
+        c += _htab[d & 0xff]
+        d >>= 8
+    return c
+
+def _mergevec(x, y, c):
+    # Ideally, this function would be x ^ y ^ ancestor, but finding
+    # ancestors is a nuisance. So instead we find the minimal number
+    # of changes to balance the depth and hamming distance
+
+    d1, v1 = x
+    d2, v2 = y
+    if d1 < d2:
+        d1, d2, v1, v2 = d2, d1, v2, v1
+
+    hdist = _hamming(v1, v2)
+    ddist = d1 - d2
+    v = v1
+    m = v1 ^ v2 # mask of different bits
+    i = 1
+
+    if hdist > ddist:
+        # if delta = 10 and hdist = 100, then we need to go up 55 steps
+        # to the ancestor and down 45
+        changes = (hdist - ddist + 1) / 2
+    else:
+        # must make at least one change
+        changes = 1
+    depth = d1 + changes
+
+    # copy changes from v2
+    if m:
+        while changes:
+            if m & i:
+                v ^= i
+                changes -= 1
+            i <<= 1
+    else:
+        v = _flipbit(v, c)
+
+    return depth, v
+
+def _flipbit(v, node):
+    # converting bit strings to longs is slow
+    bit = (hash(node) & 0xffffffff) % _vecbits
+    return v ^ (1<<bit)
+
+def ctxpvec(ctx):
+    '''construct a pvec for ctx while filling in the cache'''
+    r = ctx._repo
+    if not util.safehasattr(r, "_pveccache"):
+        r._pveccache = {}
+    pvc = r._pveccache
+    if ctx.rev() not in pvc:
+        cl = r.changelog
+        for n in xrange(ctx.rev() + 1):
+            if n not in pvc:
+                node = cl.node(n)
+                p1, p2 = cl.parentrevs(n)
+                if p1 == nullrev:
+                    # start with a 'random' vector at root
+                    pvc[n] = (0, _bin((node * 3)[:_vecbytes]))
+                elif p2 == nullrev:
+                    d, v = pvc[p1]
+                    pvc[n] = (d + 1, _flipbit(v, node))
+                else:
+                    pvc[n] = _mergevec(pvc[p1], pvc[p2], node)
+    bs = _join(*pvc[ctx.rev()])
+    return pvec(base85.b85encode(bs))
+
+class pvec(object):
+    def __init__(self, hashorctx):
+        if isinstance(hashorctx, str):
+            self._bs = hashorctx
+            self._depth, self._vec = _split(base85.b85decode(hashorctx))
+        else:
+            self._vec = ctxpvec(ctx)
+
+    def __str__(self):
+        return self._bs
+
+    def __eq__(self, b):
+        return self._vec == b._vec and self._depth == b._depth
+
+    def __lt__(self, b):
+        delta = b._depth - self._depth
+        if delta < 0:
+            return False # always correct
+        if _hamming(self._vec, b._vec) > delta:
+            return False
+        return True
+
+    def __gt__(self, b):
+        return b < self
+
+    def __or__(self, b):
+        delta = abs(b._depth - self._depth)
+        if _hamming(self._vec, b._vec) <= delta:
+            return False
+        return True
+
+    def __sub__(self, b):
+        if self | b:
+            raise ValueError("concurrent pvecs")
+        return self._depth - b._depth
+
+    def distance(self, b):
+        d = abs(b._depth - self._depth)
+        h = _hamming(self._vec, b._vec)
+        return max(d, h)
+
+    def near(self, b):
+        dist = abs(b.depth - self._depth)
+        if dist > _radius or _hamming(self._vec, b._vec) > _radius:
+            return False
--- a/mercurial/repair.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/repair.py	Tue Apr 17 17:56:36 2012 -0500
@@ -10,6 +10,7 @@
 from mercurial.node import short
 from mercurial.i18n import _
 import os
+import errno
 
 def _bundle(repo, bases, heads, node, suffix, compress=True):
     """create a bundle with the specified revisions as a backup"""
@@ -54,9 +55,9 @@
 
     return s
 
-def strip(ui, repo, nodelist, backup="all"):
+def strip(ui, repo, nodelist, backup="all", topic='backup'):
     cl = repo.changelog
-    # TODO delete the undo files, and handle undo of merge sets
+    # TODO handle undo of merge sets
     if isinstance(nodelist, str):
         nodelist = [nodelist]
     striplist = [cl.rev(node) for node in nodelist]
@@ -105,7 +106,7 @@
     # create a changegroup for all the branches we need to keep
     backupfile = None
     if backup == "all":
-        backupfile = _bundle(repo, stripbases, cl.heads(), node, 'backup')
+        backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
         repo.ui.status(_("saved backup bundle to %s\n") % backupfile)
     if saveheads or savebases:
         # do not compress partial bundle if we remove it from disk later
@@ -148,6 +149,14 @@
             if not keeppartialbundle:
                 os.unlink(chgrpfile)
 
+        # remove undo files
+        for undofile in repo.undofiles():
+            try:
+                os.unlink(undofile)
+            except OSError, e:
+                if e.errno != errno.ENOENT:
+                    ui.warn(_('error removing %s: %s\n') % (undofile, str(e)))
+
         for m in updatebm:
             bm[m] = repo['.'].node()
         bookmarks.write(repo)
--- a/mercurial/revlog.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/revlog.py	Tue Apr 17 17:56:36 2012 -0500
@@ -174,7 +174,7 @@
     def parseindex(self, data, inline):
         # call the C implementation to parse the index data
         index, cache = parsers.parse_index2(data, inline)
-        return index, None, cache
+        return index, getattr(index, 'nodemap', None), cache
 
     def packentry(self, entry, node, version, rev):
         p = _pack(indexformatng, *entry)
@@ -288,10 +288,28 @@
         self.rev(self.node(0))
         return self._nodecache
 
+    def hasnode(self, node):
+        try:
+            self.rev(node)
+            return True
+        except KeyError:
+            return False
+
+    def clearcaches(self):
+        try:
+            self._nodecache.clearcaches()
+        except AttributeError:
+            self._nodecache = {nullid: nullrev}
+            self._nodepos = None
+
     def rev(self, node):
         try:
             return self._nodecache[node]
+        except RevlogError:
+            # parsers.c radix tree lookup failed
+            raise LookupError(node, self.indexfile, _('no node'))
         except KeyError:
+            # pure python cache lookup failed
             n = self._nodecache
             i = self.index
             p = self._nodepos
@@ -794,13 +812,13 @@
         else:
             df = self.opener(self.datafile)
 
-        readahead = max(65536, length)
+        readahead = max(_chunksize, length)
         df.seek(offset)
         d = df.read(readahead)
         df.close()
         self._addchunk(offset, d)
         if readahead > length:
-            return d[:length]
+            return util.buffer(d, 0, length)
         return d
 
     def _getchunk(self, offset, length):
@@ -813,7 +831,7 @@
         if cachestart >= 0 and cacheend <= l:
             if cachestart == 0 and cacheend == l:
                 return d # avoid a copy
-            return d[cachestart:cacheend]
+            return util.buffer(d, cachestart, cacheend - cachestart)
 
         return self._loadchunk(offset, length)
 
@@ -846,13 +864,22 @@
     def revdiff(self, rev1, rev2):
         """return or calculate a delta between two revisions"""
         if rev1 != nullrev and self.deltaparent(rev2) == rev1:
-            return self._chunk(rev2)
+            return str(self._chunk(rev2))
+
+        return mdiff.textdiff(self.revision(rev1),
+                              self.revision(rev2))
 
-        return mdiff.textdiff(self.revision(self.node(rev1)),
-                              self.revision(self.node(rev2)))
+    def revision(self, nodeorrev):
+        """return an uncompressed revision of a given node or revision
+        number.
+        """
+        if isinstance(nodeorrev, int):
+            rev = nodeorrev
+            node = self.node(rev)
+        else:
+            node = nodeorrev
+            rev = None
 
-    def revision(self, node):
-        """return an uncompressed revision of a given node"""
         cachedrev = None
         if node == nullid:
             return ""
@@ -863,7 +890,8 @@
 
         # look up what we need to read
         text = None
-        rev = self.rev(node)
+        if rev is None:
+            rev = self.rev(node)
 
         # check rev flags
         if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
@@ -895,7 +923,7 @@
 
         self._chunkraw(base, rev)
         if text is None:
-            text = self._chunkbase(base)
+            text = str(self._chunkbase(base))
 
         bins = [self._chunk(r) for r in chain]
         text = mdiff.patches(text, bins)
--- a/mercurial/revset.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/revset.py	Tue Apr 17 17:56:36 2012 -0500
@@ -7,12 +7,46 @@
 
 import re
 import parser, util, error, discovery, hbisect, phases
-import node as nodemod
+import node
 import bookmarks as bookmarksmod
 import match as matchmod
 from i18n import _
 import encoding
 
+def _revancestors(repo, revs, followfirst):
+    """Like revlog.ancestors(), but supports followfirst."""
+    cut = followfirst and 1 or None
+    cl = repo.changelog
+    visit = list(revs)
+    seen = set([node.nullrev])
+    while visit:
+        for parent in cl.parentrevs(visit.pop(0))[:cut]:
+            if parent not in seen:
+                visit.append(parent)
+                seen.add(parent)
+                yield parent
+
+def _revdescendants(repo, revs, followfirst):
+    """Like revlog.descendants() but supports followfirst."""
+    cut = followfirst and 1 or None
+    cl = repo.changelog
+    first = min(revs)
+    nullrev = node.nullrev
+    if first == nullrev:
+        # Are there nodes with a null first parent and a non-null
+        # second one? Maybe. Do we care? Probably not.
+        for i in cl:
+            yield i
+        return
+
+    seen = set(revs)
+    for i in xrange(first + 1, len(cl)):
+        for x in cl.parentrevs(i)[:cut]:
+            if x != nullrev and x in seen:
+                seen.add(i)
+                yield i
+                break
+
 elements = {
     "(": (20, ("group", 1, ")"), ("func", 1, ")")),
     "~": (18, None, ("ancestor", 18)),
@@ -112,7 +146,7 @@
 
 def getargs(x, min, max, err):
     l = getlist(x)
-    if len(l) < min or len(l) > max:
+    if len(l) < min or (max >= 0 and len(l) > max):
         raise error.ParseError(err)
     return l
 
@@ -203,15 +237,23 @@
 
     return [r for r in an if r in subset]
 
+def _ancestors(repo, subset, x, followfirst=False):
+    args = getset(repo, range(len(repo)), x)
+    if not args:
+        return []
+    s = set(_revancestors(repo, args, followfirst)) | set(args)
+    return [r for r in subset if r in s]
+
 def ancestors(repo, subset, x):
     """``ancestors(set)``
     Changesets that are ancestors of a changeset in set.
     """
-    args = getset(repo, range(len(repo)), x)
-    if not args:
-        return []
-    s = set(repo.changelog.ancestors(*args)) | set(args)
-    return [r for r in subset if r in s]
+    return _ancestors(repo, subset, x)
+
+def _firstancestors(repo, subset, x):
+    # ``_firstancestors(set)``
+    # Like ``ancestors(set)`` but follows only the first parents.
+    return _ancestors(repo, subset, x, followfirst=True)
 
 def ancestorspec(repo, subset, x, n):
     """``set~n``
@@ -394,15 +436,23 @@
             l.append(r)
     return l
 
+def _descendants(repo, subset, x, followfirst=False):
+    args = getset(repo, range(len(repo)), x)
+    if not args:
+        return []
+    s = set(_revdescendants(repo, args, followfirst)) | set(args)
+    return [r for r in subset if r in s]
+
 def descendants(repo, subset, x):
     """``descendants(set)``
     Changesets which are descendants of changesets in set.
     """
-    args = getset(repo, range(len(repo)), x)
-    if not args:
-        return []
-    s = set(repo.changelog.descendants(*args)) | set(args)
-    return [r for r in subset if r in s]
+    return _descendants(repo, subset, x)
+
+def _firstdescendants(repo, subset, x):
+    # ``_firstdescendants(set)``
+    # Like ``descendants(set)`` but follows only the first parents.
+    return _descendants(repo, subset, x, followfirst=True)
 
 def draft(repo, subset, x):
     """``draft()``
@@ -440,29 +490,36 @@
     """
     return limit(repo, subset, x)
 
+def _follow(repo, subset, x, name, followfirst=False):
+    l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
+    c = repo['.']
+    if l:
+        x = getstring(l[0], _("%s expected a filename") % name)
+        if x in c:
+            cx = c[x]
+            s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
+            # include the revision responsible for the most recent version
+            s.add(cx.linkrev())
+        else:
+            return []
+    else:
+        s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
+
+    return [r for r in subset if r in s]
+
 def follow(repo, subset, x):
     """``follow([file])``
     An alias for ``::.`` (ancestors of the working copy's first parent).
     If a filename is specified, the history of the given file is followed,
     including copies.
     """
-    # i18n: "follow" is a keyword
-    l = getargs(x, 0, 1, _("follow takes no arguments or a filename"))
-    c = repo['.']
-    if l:
-        x = getstring(l[0], _("follow expected a filename"))
-        if x in c:
-            cx = c[x]
-            s = set(ctx.rev() for ctx in cx.ancestors())
-            # include the revision responsible for the most recent version
-            s.add(cx.linkrev())
-        else:
-            return []
-    else:
-        s = set(repo.changelog.ancestors(c.rev()))
-        s.add(c.rev())
+    return _follow(repo, subset, x, 'follow')
 
-    return [r for r in subset if r in s]
+def _followfirst(repo, subset, x):
+    # ``followfirst([file])``
+    # Like ``follow([file])`` but follows only the first parent of
+    # every revision or file revision.
+    return _follow(repo, subset, x, '_followfirst', followfirst=True)
 
 def getall(repo, subset, x):
     """``all()``
@@ -492,23 +549,72 @@
                 break
     return l
 
+def _matchfiles(repo, subset, x):
+    # _matchfiles takes a revset list of prefixed arguments:
+    #
+    #   [p:foo, i:bar, x:baz]
+    #
+    # builds a match object from them and filters subset. Allowed
+    # prefixes are 'p:' for regular patterns, 'i:' for include
+    # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
+    # a revision identifier, or the empty string to reference the
+    # working directory, from which the match object is
+    # initialized. Use 'd:' to set the default matching mode, default
+    # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
+
+    # i18n: "_matchfiles" is a keyword
+    l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
+    pats, inc, exc = [], [], []
+    hasset = False
+    rev, default = None, None
+    for arg in l:
+        s = getstring(arg, _("_matchfiles requires string arguments"))
+        prefix, value = s[:2], s[2:]
+        if prefix == 'p:':
+            pats.append(value)
+        elif prefix == 'i:':
+            inc.append(value)
+        elif prefix == 'x:':
+            exc.append(value)
+        elif prefix == 'r:':
+            if rev is not None:
+                raise error.ParseError(_('_matchfiles expected at most one '
+                                         'revision'))
+            rev = value
+        elif prefix == 'd:':
+            if default is not None:
+                raise error.ParseError(_('_matchfiles expected at most one '
+                                         'default mode'))
+            default = value
+        else:
+            raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
+        if not hasset and matchmod.patkind(value) == 'set':
+            hasset = True
+    if not default:
+        default = 'glob'
+    m = None
+    s = []
+    for r in subset:
+        c = repo[r]
+        if not m or (hasset and rev is None):
+            ctx = c
+            if rev is not None:
+                ctx = repo[rev or None]
+            m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
+                               exclude=exc, ctx=ctx, default=default)
+        for f in c.files():
+            if m(f):
+                s.append(r)
+                break
+    return s
+
 def hasfile(repo, subset, x):
     """``file(pattern)``
     Changesets affecting files matched by pattern.
     """
     # i18n: "file" is a keyword
     pat = getstring(x, _("file requires a pattern"))
-    m = None
-    s = []
-    for r in subset:
-        c = repo[r]
-        if not m or matchmod.patkind(pat) == 'set':
-            m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
-        for f in c.files():
-            if m(f):
-                s.append(r)
-                break
-    return s
+    return _matchfiles(repo, subset, ('string', 'p:' + pat))
 
 def head(repo, subset, x):
     """``head()``
@@ -619,7 +725,7 @@
     pat = getstring(x, _("modifies requires a pattern"))
     return checkstatus(repo, subset, pat, 0)
 
-def node(repo, subset, x):
+def node_(repo, subset, x):
     """``id(string)``
     Revision non-ambiguously specified by the given hex string prefix.
     """
@@ -800,6 +906,103 @@
         raise error.ParseError(_("rev expects a number"))
     return [r for r in subset if r == l]
 
+def matching(repo, subset, x):
+    """``matching(revision [, field])``
+    Changesets in which a given set of fields match the set of fields in the
+    selected revision or set.
+    To match more than one field pass the list of fields to match separated
+    by spaces (e.g. 'author description').
+    Valid fields are most regular revision fields and some special fields:
+    * regular fields:
+      - description, author, branch, date, files, phase, parents,
+      substate, user.
+      Note that author and user are synonyms.
+    * special fields: summary, metadata.
+      - summary: matches the first line of the description.
+      - metatadata: It is equivalent to matching 'description user date'
+        (i.e. it matches the main metadata fields).
+    metadata is the default field which is used when no fields are specified.
+    You can match more than one field at a time.
+    """
+    l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
+
+    revs = getset(repo, xrange(len(repo)), l[0])
+
+    fieldlist = ['metadata']
+    if len(l) > 1:
+            fieldlist = getstring(l[1],
+                _("matching requires a string "
+                "as its second argument")).split()
+
+    # Make sure that there are no repeated fields, and expand the
+    # 'special' 'metadata' field type
+    fields = []
+    for field in fieldlist:
+        if field == 'metadata':
+            fields += ['user', 'description', 'date']
+        else:
+            if field == 'author':
+                field = 'user'
+            fields.append(field)
+    fields = set(fields)
+    if 'summary' in fields and 'description' in fields:
+        # If a revision matches its description it also matches its summary
+        fields.discard('summary')
+
+    # We may want to match more than one field
+    # Not all fields take the same amount of time to be matched
+    # Sort the selected fields in order of increasing matching cost
+    fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
+        'files', 'description', 'substate']
+    def fieldkeyfunc(f):
+        try:
+            return fieldorder.index(f)
+        except ValueError:
+            # assume an unknown field is very costly
+            return len(fieldorder)
+    fields = list(fields)
+    fields.sort(key=fieldkeyfunc)
+
+    # Each field will be matched with its own "getfield" function
+    # which will be added to the getfieldfuncs array of functions
+    getfieldfuncs = []
+    _funcs = {
+        'user': lambda r: repo[r].user(),
+        'branch': lambda r: repo[r].branch(),
+        'date': lambda r: repo[r].date(),
+        'description': lambda r: repo[r].description(),
+        'files': lambda r: repo[r].files(),
+        'parents': lambda r: repo[r].parents(),
+        'phase': lambda r: repo[r].phase(),
+        'substate': lambda r: repo[r].substate,
+        'summary': lambda r: repo[r].description().splitlines()[0],
+    }
+    for info in fields:
+        getfield = _funcs.get(info, None)
+        if getfield is None:
+            raise error.ParseError(
+                _("unexpected field name passed to matching: %s") % info)
+        getfieldfuncs.append(getfield)
+    # convert the getfield array of functions into a "getinfo" function
+    # which returns an array of field values (or a single value if there
+    # is only one field to match)
+    getinfo = lambda r: [f(r) for f in getfieldfuncs]
+
+    matches = []
+    for rev in revs:
+        target = getinfo(rev)
+        for r in subset:
+            match = True
+            for n, f in enumerate(getfieldfuncs):
+                if target[n] != f(r):
+                    match = False
+                    break
+            if match:
+                matches.append(r)
+    if len(revs) > 1:
+        matches = sorted(set(matches))
+    return matches
+
 def reverse(repo, subset, x):
     """``reverse(set)``
     Reverse order of set.
@@ -920,6 +1123,7 @@
     "all": getall,
     "ancestor": ancestor,
     "ancestors": ancestors,
+    "_firstancestors": _firstancestors,
     "author": author,
     "bisect": bisect,
     "bisected": bisected,
@@ -931,18 +1135,21 @@
     "date": date,
     "desc": desc,
     "descendants": descendants,
+    "_firstdescendants": _firstdescendants,
     "draft": draft,
     "file": hasfile,
     "filelog": filelog,
     "first": first,
     "follow": follow,
+    "_followfirst": _followfirst,
     "grep": grep,
     "head": head,
     "heads": heads,
-    "id": node,
+    "id": node_,
     "keyword": keyword,
     "last": last,
     "limit": limit,
+    "_matchfiles": _matchfiles,
     "max": maxrev,
     "merge": merge,
     "min": minrev,
@@ -960,6 +1167,7 @@
     "roots": roots,
     "sort": sort,
     "secret": secret,
+    "matching": matching,
     "tag": tag,
     "tagged": tagged,
     "user": user,
@@ -1071,46 +1279,85 @@
         h = heads(default)
         b($1) = ancestors($1) - ancestors(default)
         '''
-        if isinstance(name, tuple): # parameter substitution
-            self.tree = name
-            self.replacement = value
-        else: # alias definition
-            m = self.funcre.search(name)
-            if m:
-                self.tree = ('func', ('symbol', m.group(1)))
-                self.args = [x.strip() for x in m.group(2).split(',')]
-                for arg in self.args:
-                    value = value.replace(arg, repr(arg))
-            else:
-                self.tree = ('symbol', name)
+        m = self.funcre.search(name)
+        if m:
+            self.name = m.group(1)
+            self.tree = ('func', ('symbol', m.group(1)))
+            self.args = [x.strip() for x in m.group(2).split(',')]
+            for arg in self.args:
+                value = value.replace(arg, repr(arg))
+        else:
+            self.name = name
+            self.tree = ('symbol', name)
+
+        self.replacement, pos = parse(value)
+        if pos != len(value):
+            raise error.ParseError(_('invalid token'), pos)
 
-            self.replacement, pos = parse(value)
-            if pos != len(value):
-                raise error.ParseError(_('invalid token'), pos)
+def _getalias(aliases, tree):
+    """If tree looks like an unexpanded alias, return it. Return None
+    otherwise.
+    """
+    if isinstance(tree, tuple) and tree:
+        if tree[0] == 'symbol' and len(tree) == 2:
+            name = tree[1]
+            alias = aliases.get(name)
+            if alias and alias.args is None and alias.tree == tree:
+                return alias
+        if tree[0] == 'func' and len(tree) > 1:
+            if tree[1][0] == 'symbol' and len(tree[1]) == 2:
+                name = tree[1][1]
+                alias = aliases.get(name)
+                if alias and alias.args is not None and alias.tree == tree[:2]:
+                    return alias
+    return None
 
-    def process(self, tree):
-        if isinstance(tree, tuple):
-            if self.args is None:
-                if tree == self.tree:
-                    return self.replacement
-            elif tree[:2] == self.tree:
-                l = getlist(tree[2])
-                if len(l) != len(self.args):
-                    raise error.ParseError(
-                        _('invalid number of arguments: %s') % len(l))
-                result = self.replacement
-                for a, v in zip(self.args, l):
-                    valalias = revsetalias(('string', a), v)
-                    result = valalias.process(result)
-                return result
-            return tuple(map(self.process, tree))
+def _expandargs(tree, args):
+    """Replace all occurences of ('string', name) with the
+    substitution value of the same name in args, recursively.
+    """
+    if not isinstance(tree, tuple):
+        return tree
+    if len(tree) == 2 and tree[0] == 'string':
+        return args.get(tree[1], tree)
+    return tuple(_expandargs(t, args) for t in tree)
+
+def _expandaliases(aliases, tree, expanding):
+    """Expand aliases in tree, recursively.
+
+    'aliases' is a dictionary mapping user defined aliases to
+    revsetalias objects.
+    """
+    if not isinstance(tree, tuple):
+        # Do not expand raw strings
         return tree
+    alias = _getalias(aliases, tree)
+    if alias is not None:
+        if alias in expanding:
+            raise error.ParseError(_('infinite expansion of revset alias "%s" '
+                                     'detected') % alias.name)
+        expanding.append(alias)
+        result = alias.replacement
+        if alias.args is not None:
+            l = getlist(tree[2])
+            if len(l) != len(alias.args):
+                raise error.ParseError(
+                    _('invalid number of arguments: %s') % len(l))
+            result = _expandargs(result, dict(zip(alias.args, l)))
+        # Recurse in place, the base expression may have been rewritten
+        result = _expandaliases(aliases, result, expanding)
+        expanding.pop()
+    else:
+        result = tuple(_expandaliases(aliases, t, expanding)
+                       for t in tree)
+    return result
 
 def findaliases(ui, tree):
+    aliases = {}
     for k, v in ui.configitems('revsetalias'):
         alias = revsetalias(k, v)
-        tree = alias.process(tree)
-    return tree
+        aliases[alias.name] = alias
+    return _expandaliases(aliases, tree, [])
 
 parse = parser.parser(tokenize, elements).parse
 
@@ -1172,7 +1419,7 @@
             parse(arg) # make sure syntax errors are confined
             return '(%s)' % arg
         elif c == 'n':
-            return quote(nodemod.hex(arg))
+            return quote(node.hex(arg))
         elif c == 'b':
             return quote(arg.branch())
 
@@ -1187,7 +1434,7 @@
         elif t == 's':
             return "_list('%s')" % "\0".join(s)
         elif t == 'n':
-            return "_list('%s')" % "\0".join(nodemod.hex(a) for a in s)
+            return "_list('%s')" % "\0".join(node.hex(a) for a in s)
         elif t == 'b':
             return "_list('%s')" % "\0".join(a.branch() for a in s)
 
@@ -1221,5 +1468,20 @@
 
     return ret
 
+def prettyformat(tree):
+    def _prettyformat(tree, level, lines):
+        if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
+            lines.append((level, str(tree)))
+        else:
+            lines.append((level, '(%s' % tree[0]))
+            for s in tree[1:]:
+                _prettyformat(s, level + 1, lines)
+            lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
+
+    lines = []
+    _prettyformat(tree, 0, lines)
+    output = '\n'.join(('  '*l + s) for l, s in lines)
+    return output
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = symbols.values()
--- a/mercurial/scmutil.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/scmutil.py	Tue Apr 17 17:56:36 2012 -0500
@@ -159,6 +159,15 @@
         '''Prevent instantiation; don't call this from subclasses.'''
         raise NotImplementedError('attempted instantiating ' + str(type(self)))
 
+    def tryread(self, path):
+        'gracefully return an empty string for missing files'
+        try:
+            return self.read(path)
+        except IOError, inst:
+            if inst.errno != errno.ENOENT:
+                raise
+        return ""
+
     def read(self, path):
         fp = self(path, 'rb')
         try:
@@ -436,15 +445,22 @@
 
     def systemrcpath():
         path = []
+        if sys.platform == 'plan9':
+            root = 'lib/mercurial'
+        else:
+            root = 'etc/mercurial'
         # old mod_python does not set sys.argv
         if len(getattr(sys, 'argv', [])) > 0:
             p = os.path.dirname(os.path.dirname(sys.argv[0]))
-            path.extend(rcfiles(os.path.join(p, 'etc/mercurial')))
-        path.extend(rcfiles('/etc/mercurial'))
+            path.extend(rcfiles(os.path.join(p, root)))
+        path.extend(rcfiles('/' + root))
         return path
 
     def userrcpath():
-        return [os.path.expanduser('~/.hgrc')]
+        if sys.platform == 'plan9':
+            return [os.environ['home'] + '/lib/hgrc']
+        else:
+            return [os.path.expanduser('~/.hgrc')]
 
 else:
 
@@ -523,10 +539,12 @@
     def revfix(repo, val, defval):
         if not val and val != 0 and defval is not None:
             return defval
-        return repo.changelog.rev(repo.lookup(val))
+        return repo[val].rev()
 
     seen, l = set(), []
     for spec in revs:
+        if l and not seen:
+            seen = set(l)
         # attempt to parse old-style ranges first to deal with
         # things like old-tag which contain query metacharacters
         try:
@@ -540,11 +558,18 @@
                 start = revfix(repo, start, 0)
                 end = revfix(repo, end, len(repo) - 1)
                 step = start > end and -1 or 1
-                for rev in xrange(start, end + step, step):
-                    if rev in seen:
-                        continue
-                    seen.add(rev)
-                    l.append(rev)
+                if not seen and not l:
+                    # by far the most common case: revs = ["-1:0"]
+                    l = range(start, end + step, step)
+                    # defer syncing seen until next iteration
+                    continue
+                newrevs = set(xrange(start, end + step, step))
+                if seen:
+                    newrevs.difference_update(seen)
+                    seen.union(newrevs)
+                else:
+                    seen = newrevs
+                l.extend(sorted(newrevs, reverse=start > end))
                 continue
             elif spec and spec in repo: # single unquoted rev
                 rev = revfix(repo, spec, None)
@@ -582,7 +607,7 @@
         ret.append(p)
     return ret
 
-def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
+def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
     if pats == ("",):
         pats = []
     if not globbed and default == 'relpath':
@@ -593,7 +618,10 @@
     def badfn(f, msg):
         ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
     m.bad = badfn
-    return m
+    return m, pats
+
+def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
+    return matchandpats(ctx, pats, opts, globbed, default)[0]
 
 def matchall(repo):
     return matchmod.always(repo.root, repo.getcwd())
@@ -610,6 +638,9 @@
     added, unknown, deleted, removed = [], [], [], []
     audit_path = pathauditor(repo.root)
     m = match(repo[None], pats, opts)
+    rejected = []
+    m.bad = lambda x, y: rejected.append(x)
+
     for abs in repo.walk(m):
         target = repo.wjoin(abs)
         good = True
@@ -654,6 +685,11 @@
         finally:
             wlock.release()
 
+    for f in rejected:
+        if f in m.files():
+            return 1
+    return 0
+
 def updatedir(ui, repo, patches, similarity=0):
     '''Update dirstate after patch application according to metadata'''
     if not patches:
--- a/mercurial/sslutil.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/sslutil.py	Tue Apr 17 17:56:36 2012 -0500
@@ -107,8 +107,9 @@
             if hostfingerprint:
                 raise util.Abort(_("host fingerprint for %s can't be "
                                    "verified (Python too old)") % host)
-            self.ui.warn(_("warning: certificate for %s can't be verified "
-                           "(Python too old)\n") % host)
+            if self.ui.configbool('ui', 'reportoldssl', True):
+                self.ui.warn(_("warning: certificate for %s can't be verified "
+                               "(Python too old)\n") % host)
             return
         if not sock.cipher(): # work around http://bugs.python.org/issue13721
             raise util.Abort(_('%s ssl connection error') % host)
--- a/mercurial/store.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/store.py	Tue Apr 17 17:56:36 2012 -0500
@@ -317,36 +317,36 @@
 
     def _load(self):
         '''fill the entries from the fncache file'''
-        self.entries = set()
         self._dirty = False
         try:
             fp = self.opener('fncache', mode='rb')
         except IOError:
             # skip nonexistent file
+            self.entries = set()
             return
-        for n, line in enumerate(fp):
-            if (len(line) < 2) or (line[-1] != '\n'):
-                t = _('invalid entry in fncache, line %s') % (n + 1)
-                raise util.Abort(t)
-            self.entries.add(decodedir(line[:-1]))
+        self.entries = set(map(decodedir, fp.read().splitlines()))
+        if '' in self.entries:
+            fp.seek(0)
+            for n, line in enumerate(fp):
+                if not line.rstrip('\n'):
+                    t = _('invalid entry in fncache, line %s') % (n + 1)
+                    raise util.Abort(t)
         fp.close()
 
+    def _write(self, files, atomictemp):
+        fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
+        if files:
+            fp.write('\n'.join(map(encodedir, files)) + '\n')
+        fp.close()
+        self._dirty = False
+
     def rewrite(self, files):
-        fp = self.opener('fncache', mode='wb')
-        for p in files:
-            fp.write(encodedir(p) + '\n')
-        fp.close()
+        self._write(files, False)
         self.entries = set(files)
-        self._dirty = False
 
     def write(self):
-        if not self._dirty:
-            return
-        fp = self.opener('fncache', mode='wb', atomictemp=True)
-        for p in self.entries:
-            fp.write(encodedir(p) + '\n')
-        fp.close()
-        self._dirty = False
+        if self._dirty:
+            self._write(self.entries, True)
 
     def add(self, fn):
         if self.entries is None:
--- a/mercurial/subrepo.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/subrepo.py	Tue Apr 17 17:56:36 2012 -0500
@@ -275,6 +275,11 @@
         """
         raise NotImplementedError
 
+    def basestate(self):
+        """current working directory base state, disregarding .hgsubstate
+        state and working directory modifications"""
+        raise NotImplementedError
+
     def checknested(self, path):
         """check if path is a subrepository within this repository"""
         return False
@@ -363,6 +368,9 @@
     def forget(self, ui, match, prefix):
         return []
 
+    def revert(self, ui, substate, *pats, **opts):
+        return []
+
 class hgsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
         self._path = path
@@ -446,6 +454,9 @@
             return True
         return w.dirty() # working directory changed
 
+    def basestate(self):
+        return self._repo['.'].hex()
+
     def checknested(self, path):
         return self._repo._checknested(self._repo.wjoin(path))
 
@@ -565,6 +576,38 @@
         return cmdutil.forget(ui, self._repo, match,
                               os.path.join(prefix, self._path), True)
 
+    def revert(self, ui, substate, *pats, **opts):
+        # reverting a subrepo is a 2 step process:
+        # 1. if the no_backup is not set, revert all modified
+        #    files inside the subrepo
+        # 2. update the subrepo to the revision specified in
+        #    the corresponding substate dictionary
+        ui.status(_('reverting subrepo %s\n') % substate[0])
+        if not opts.get('no_backup'):
+            # Revert all files on the subrepo, creating backups
+            # Note that this will not recursively revert subrepos
+            # We could do it if there was a set:subrepos() predicate
+            opts = opts.copy()
+            opts['date'] = None
+            opts['rev'] = substate[1]
+
+            pats = []
+            if not opts['all']:
+                pats = ['set:modified()']
+            self.filerevert(ui, *pats, **opts)
+
+        # Update the repo to the revision specified in the given substate
+        self.get(substate, overwrite=True)
+
+    def filerevert(self, ui, *pats, **opts):
+        ctx = self._repo[opts['rev']]
+        parents = self._repo.dirstate.parents()
+        if opts['all']:
+            pats = ['set:modified()']
+        else:
+            pats = []
+        cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
+
 class svnsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state):
         self._path = path
@@ -666,6 +709,9 @@
                 return False
         return True
 
+    def basestate(self):
+        return self._wcrev()
+
     def commit(self, text, user, date):
         # user and date are out of our hands since svn is centralized
         changed, extchanged = self._wcchanged()
@@ -916,6 +962,9 @@
         out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
         return code == 1
 
+    def basestate(self):
+        return self._gitstate()
+
     def get(self, state, overwrite=False):
         source, revision, kind = state
         if not revision:
--- a/mercurial/templatefilters.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templatefilters.py	Tue Apr 17 17:56:36 2012 -0500
@@ -242,12 +242,29 @@
     return "-rw-r--r--"
 
 def person(author):
-    """:person: Any text. Returns the text before an email address."""
+    """:person: Any text. Returns the name before an email address,
+    interpreting it as per RFC 5322.
+
+    >>> person('foo@bar')
+    'foo'
+    >>> person('Foo Bar <foo@bar>')
+    'Foo Bar'
+    >>> person('"Foo Bar" <foo@bar>')
+    'Foo Bar'
+    >>> person('"Foo \"buz\" Bar" <foo@bar>')
+    'Foo "buz" Bar'
+    >>> # The following are invalid, but do exist in real-life
+    ...
+    >>> person('Foo "buz" Bar <foo@bar>')
+    'Foo "buz" Bar'
+    >>> person('"Foo Bar <foo@bar>')
+    'Foo Bar'
+    """
     if not '@' in author:
         return author
     f = author.find('<')
     if f != -1:
-        return author[:f].rstrip()
+        return author[:f].strip(' "').replace('\\"', '"')
     f = author.find('@')
     return author[:f].replace('.', ' ')
 
@@ -319,9 +336,14 @@
     return urllib.quote(text)
 
 def userfilter(text):
-    """:user: Any text. Returns the user portion of an email address."""
+    """:user: Any text. Returns a short representation of a user name or email
+    address."""
     return util.shortuser(text)
 
+def emailuser(text):
+    """:emailuser: Any text. Returns the user portion of an email address."""
+    return util.emailuser(text)
+
 def xmlescape(text):
     text = (text
             .replace('&', '&amp;')
@@ -365,6 +387,7 @@
     "tabindent": tabindent,
     "urlescape": urlescape,
     "user": userfilter,
+    "emailuser": emailuser,
     "xmlescape": xmlescape,
 }
 
--- a/mercurial/templates/gitweb/graph.tmpl	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templates/gitweb/graph.tmpl	Tue Apr 17 17:56:36 2012 -0500
@@ -51,16 +51,6 @@
 var graph = new Graph();
 graph.scale({bg_height});
 
-graph.edge = function(x0, y0, x1, y1, color) \{
-	
-	this.setColor(color, 0.0, 0.65);
-	this.ctx.beginPath();
-	this.ctx.moveTo(x0, y0);
-	this.ctx.lineTo(x1, y1);
-	this.ctx.stroke();
-	
-}
-
 var revlink = '<li style="_STYLE"><span class="desc">';
 revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>';
 revlink += '</span> _TAGS';
--- a/mercurial/templates/monoblue/graph.tmpl	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templates/monoblue/graph.tmpl	Tue Apr 17 17:56:36 2012 -0500
@@ -49,16 +49,6 @@
     var graph = new Graph();
     graph.scale({bg_height});
 
-    graph.edge = function(x0, y0, x1, y1, color) \{
-
-        this.setColor(color, 0.0, 0.65);
-        this.ctx.beginPath();
-        this.ctx.moveTo(x0, y0);
-        this.ctx.lineTo(x1, y1);
-        this.ctx.stroke();
-
-    }
-
     var revlink = '<li style="_STYLE"><span class="desc">';
     revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
     revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>';
--- a/mercurial/templates/paper/graph.tmpl	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templates/paper/graph.tmpl	Tue Apr 17 17:56:36 2012 -0500
@@ -62,16 +62,6 @@
 var graph = new Graph();
 graph.scale({bg_height});
 
-graph.edge = function(x0, y0, x1, y1, color) \{
-	
-	this.setColor(color, 0.0, 0.65);
-	this.ctx.beginPath();
-	this.ctx.moveTo(x0, y0);
-	this.ctx.lineTo(x1, y1);
-	this.ctx.stroke();
-	
-}
-
 var revlink = '<li style="_STYLE"><span class="desc">';
 revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
 revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>';
--- a/mercurial/templates/spartan/graph.tmpl	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templates/spartan/graph.tmpl	Tue Apr 17 17:56:36 2012 -0500
@@ -43,16 +43,6 @@
 var graph = new Graph();
 graph.scale({bg_height});
 
-graph.edge = function(x0, y0, x1, y1, color) \{
-	
-	this.setColor(color, 0.0, 0.65);
-	this.ctx.beginPath();
-	this.ctx.moveTo(x0, y0);
-	this.ctx.lineTo(x1, y1);
-	this.ctx.stroke();
-	
-}
-
 var revlink = '<li style="_STYLE"><span class="desc">';
 revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
 revlink += '</span><span class="info">_DATE, by _USER</span></li>';
--- a/mercurial/templates/static/mercurial.js	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/templates/static/mercurial.js	Tue Apr 17 17:56:36 2012 -0500
@@ -58,25 +58,44 @@
 		
 		// Set the colour.
 		//
-		// Picks a distinct colour based on an internal wheel; the bg
-		// parameter provides the value that should be assigned to the 'zero'
-		// colours and the fg parameter provides the multiplier that should be
-		// applied to the foreground colours.
-		
-		color %= colors.length;
-		var red = (colors[color][0] * fg) || bg;
-		var green = (colors[color][1] * fg) || bg;
-		var blue = (colors[color][2] * fg) || bg;
-		red = Math.round(red * 255);
-		green = Math.round(green * 255);
-		blue = Math.round(blue * 255);
-		var s = 'rgb(' + red + ', ' + green + ', ' + blue + ')';
+		// If color is a string, expect an hexadecimal RGB
+		// value and apply it unchanged. If color is a number,
+		// pick a distinct colour based on an internal wheel;
+		// the bg parameter provides the value that should be
+		// assigned to the 'zero' colours and the fg parameter
+		// provides the multiplier that should be applied to
+		// the foreground colours.
+		var s;
+		if(typeof color == "string") {
+			s = "#" + color;
+		} else { //typeof color == "number"
+			color %= colors.length;
+			var red = (colors[color][0] * fg) || bg;
+			var green = (colors[color][1] * fg) || bg;
+			var blue = (colors[color][2] * fg) || bg;
+			red = Math.round(red * 255);
+			green = Math.round(green * 255);
+			blue = Math.round(blue * 255);
+			s = 'rgb(' + red + ', ' + green + ', ' + blue + ')';
+		}
 		this.ctx.strokeStyle = s;
 		this.ctx.fillStyle = s;
 		return s;
 		
 	}
 
+	this.edge = function(x0, y0, x1, y1, color, width) {
+		
+		this.setColor(color, 0.0, 0.65);
+		if(width >= 0)
+			 this.ctx.lineWidth = width;
+		this.ctx.beginPath();
+		this.ctx.moveTo(x0, y0);
+		this.ctx.lineTo(x1, y1);
+		this.ctx.stroke();
+		
+	}
+
 	this.render = function(data) {
 		
 		var backgrounds = '';
@@ -93,13 +112,20 @@
 			var edges = cur[2];
 			var fold = false;
 			
+			var prevWidth = this.ctx.lineWidth;
 			for (var j in edges) {
 				
 				line = edges[j];
 				start = line[0];
 				end = line[1];
 				color = line[2];
-
+				var width = line[3];
+				if(width < 0)
+					 width = prevWidth;
+				var branchcolor = line[4];
+				if(branchcolor)
+					color = branchcolor;
+				
 				if (end > this.columns || start > this.columns) {
 					this.columns += 1;
 				}
@@ -113,9 +139,10 @@
 				x1 = this.cell[0] + this.box_size * end + this.box_size / 2;
 				y1 = this.bg[1] + this.bg_height / 2;
 				
-				this.edge(x0, y0, x1, y1, color);
+				this.edge(x0, y0, x1, y1, color, width);
 				
 			}
+			this.ctx.lineWidth = prevWidth;
 			
 			// Draw the revision node in the right column
 			
--- a/mercurial/ui.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/ui.py	Tue Apr 17 17:56:36 2012 -0500
@@ -7,7 +7,7 @@
 
 from i18n import _
 import errno, getpass, os, socket, sys, tempfile, traceback
-import config, scmutil, util, error
+import config, scmutil, util, error, formatter
 
 class ui(object):
     def __init__(self, src=None):
@@ -46,6 +46,9 @@
     def copy(self):
         return self.__class__(self)
 
+    def formatter(self, topic, opts):
+        return formatter.formatter(self, topic, opts)
+
     def _trusted(self, fp, f):
         st = util.fstat(fp)
         if util.isowner(st):
@@ -684,10 +687,17 @@
 
     def geteditor(self):
         '''return editor to use'''
+        if sys.platform == 'plan9':
+            # vi is the MIPS instruction simulator on Plan 9. We
+            # instead default to E to plumb commit messages to
+            # avoid confusion.
+            editor = 'E'
+        else:
+            editor = 'vi'
         return (os.environ.get("HGEDITOR") or
                 self.config("ui", "editor") or
                 os.environ.get("VISUAL") or
-                os.environ.get("EDITOR", "vi"))
+                os.environ.get("EDITOR", editor))
 
     def progress(self, topic, pos, item="", unit="", total=None):
         '''show a progress message
--- a/mercurial/util.h	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/util.h	Tue Apr 17 17:56:36 2012 -0500
@@ -101,5 +101,65 @@
 
 #endif /* PY_VERSION_HEX */
 
+#if (PY_VERSION_HEX < 0x02050000)
+/* Definitions to get compatibility with python 2.4 and earlier which
+   does not have Py_ssize_t. See also PEP 353.
+   Note: msvc (8 or earlier) does not have ssize_t, so we use Py_ssize_t.
+*/
+typedef int Py_ssize_t;
+typedef Py_ssize_t (*lenfunc)(PyObject *);
+typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
+
+#if !defined(PY_SSIZE_T_MIN)
+#define PY_SSIZE_T_MAX INT_MAX
+#define PY_SSIZE_T_MIN INT_MIN
+#endif
+#endif
+
+#ifdef _WIN32
+#ifdef _MSC_VER
+/* msvc 6.0 has problems */
+#define inline __inline
+typedef unsigned long uint32_t;
+typedef unsigned __int64 uint64_t;
+#else
+#include <stdint.h>
+#endif
+#else
+/* not windows */
+#include <sys/types.h>
+#if defined __BEOS__ && !defined __HAIKU__
+#include <ByteOrder.h>
+#else
+#include <arpa/inet.h>
+#endif
+#include <inttypes.h>
+#endif
+
+#if defined __hpux || defined __SUNPRO_C || defined _AIX
+#define inline
+#endif
+
+#ifdef __linux
+#define inline __inline
+#endif
+
+static inline uint32_t getbe32(const char *c)
+{
+	const unsigned char *d = (const unsigned char *)c;
+
+	return ((d[0] << 24) |
+		(d[1] << 16) |
+		(d[2] << 8) |
+		(d[3]));
+}
+
+static inline void putbe32(uint32_t x, char *c)
+{
+	c[0] = (x >> 24) & 0xff;
+	c[1] = (x >> 16) & 0xff;
+	c[2] = (x >> 8) & 0xff;
+	c[3] = (x) & 0xff;
+}
+
 #endif /* _HG_UTIL_H_ */
-
--- a/mercurial/util.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/util.py	Tue Apr 17 17:56:36 2012 -0500
@@ -422,22 +422,29 @@
         return str(val)
     origcmd = cmd
     cmd = quotecommand(cmd)
-    env = dict(os.environ)
-    env.update((k, py2shell(v)) for k, v in environ.iteritems())
-    env['HG'] = hgexecutable()
-    if out is None or out == sys.__stdout__:
-        rc = subprocess.call(cmd, shell=True, close_fds=closefds,
-                             env=env, cwd=cwd)
+    if sys.platform == 'plan9':
+        # subprocess kludge to work around issues in half-baked Python
+        # ports, notably bichued/python:
+        if not cwd is None:
+            os.chdir(cwd)
+        rc = os.system(cmd)
     else:
-        proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
-                                env=env, cwd=cwd, stdout=subprocess.PIPE,
-                                stderr=subprocess.STDOUT)
-        for line in proc.stdout:
-            out.write(line)
-        proc.wait()
-        rc = proc.returncode
-    if sys.platform == 'OpenVMS' and rc & 1:
-        rc = 0
+        env = dict(os.environ)
+        env.update((k, py2shell(v)) for k, v in environ.iteritems())
+        env['HG'] = hgexecutable()
+        if out is None or out == sys.__stdout__:
+            rc = subprocess.call(cmd, shell=True, close_fds=closefds,
+                                 env=env, cwd=cwd)
+        else:
+            proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
+                                    env=env, cwd=cwd, stdout=subprocess.PIPE,
+                                    stderr=subprocess.STDOUT)
+            for line in proc.stdout:
+                out.write(line)
+            proc.wait()
+            rc = proc.returncode
+        if sys.platform == 'OpenVMS' and rc & 1:
+            rc = 0
     if rc and onerr:
         errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
                             explainexit(rc)[0])
@@ -1125,6 +1132,16 @@
         user = user[:f]
     return user
 
+def emailuser(user):
+    """Return the user portion of an email address."""
+    f = user.find('@')
+    if f >= 0:
+        user = user[:f]
+    f = user.find('<')
+    if f >= 0:
+        user = user[f + 1:]
+    return user
+
 def email(author):
     '''get email of author.'''
     r = author.find('>')
@@ -1150,23 +1167,23 @@
     except (UnicodeDecodeError, UnicodeEncodeError):
         return _ellipsis(text, maxlength)[0]
 
+_byteunits = (
+    (100, 1 << 30, _('%.0f GB')),
+    (10, 1 << 30, _('%.1f GB')),
+    (1, 1 << 30, _('%.2f GB')),
+    (100, 1 << 20, _('%.0f MB')),
+    (10, 1 << 20, _('%.1f MB')),
+    (1, 1 << 20, _('%.2f MB')),
+    (100, 1 << 10, _('%.0f KB')),
+    (10, 1 << 10, _('%.1f KB')),
+    (1, 1 << 10, _('%.2f KB')),
+    (1, 1, _('%.0f bytes')),
+    )
+
 def bytecount(nbytes):
     '''return byte count formatted as readable string, with units'''
 
-    units = (
-        (100, 1 << 30, _('%.0f GB')),
-        (10, 1 << 30, _('%.1f GB')),
-        (1, 1 << 30, _('%.2f GB')),
-        (100, 1 << 20, _('%.0f MB')),
-        (10, 1 << 20, _('%.1f MB')),
-        (1, 1 << 20, _('%.2f MB')),
-        (100, 1 << 10, _('%.0f KB')),
-        (10, 1 << 10, _('%.1f KB')),
-        (1, 1 << 10, _('%.2f KB')),
-        (1, 1, _('%.0f bytes')),
-        )
-
-    for multiplier, divisor, format in units:
+    for multiplier, divisor, format in _byteunits:
         if nbytes >= divisor * multiplier:
             return format % (nbytes / float(divisor))
     return units[-1][2] % nbytes
--- a/mercurial/windows.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/windows.py	Tue Apr 17 17:56:36 2012 -0500
@@ -123,7 +123,7 @@
         msvcrt.setmode(fno(), os.O_BINARY)
 
 def pconvert(path):
-    return '/'.join(path.split(os.sep))
+    return path.replace(os.sep, '/')
 
 def localpath(path):
     return path.replace('/', '\\')
--- a/mercurial/wireproto.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/mercurial/wireproto.py	Tue Apr 17 17:56:36 2012 -0500
@@ -415,6 +415,8 @@
     caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
             'unbundlehash batch').split()
     if _allowstream(repo.ui):
+        if repo.ui.configbool('server', 'preferuncompressed', False):
+            caps.append('stream-preferred')
         requiredformats = repo.requirements & repo.supportedformats
         # if our local revlogs are just revlogv1, add 'stream' cap
         if not requiredformats - set(('revlogv1',)):
--- a/setup.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/setup.py	Tue Apr 17 17:56:36 2012 -0500
@@ -127,10 +127,16 @@
     py2exeloaded = False
 
 def runcmd(cmd, env):
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                         stderr=subprocess.PIPE, env=env)
-    out, err = p.communicate()
-    return out, err
+    if sys.platform == 'plan9':
+        # subprocess kludge to work around issues in half-baked Python
+        # ports, notably bichued/python:
+        _, out, err = os.popen3(cmd)
+        return str(out), str(err)
+    else:
+        p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE, env=env)
+        out, err = p.communicate()
+        return out, err
 
 def runhg(cmd, env):
     out, err = runcmd(cmd, env)
--- a/tests/bzr-definitions	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/bzr-definitions	Tue Apr 17 17:56:36 2012 -0500
@@ -9,7 +9,7 @@
 
 glog()
 {
-    hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
+    hg glog --template '{rev}@{branch} "{desc|firstline}" files: {files}\n' "$@"
 }
 
 manifest()
--- a/tests/run-tests.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/run-tests.py	Tue Apr 17 17:56:36 2012 -0500
@@ -75,7 +75,7 @@
         def t():
             start = time.time()
             while time.time() - start < timeout and p.returncode is None:
-                time.sleep(1)
+                time.sleep(.1)
             p.timeout = True
             if p.returncode is None:
                 terminate(p)
@@ -360,7 +360,7 @@
                 os.kill(pid, 0)
                 vlog('# Killing daemon process %d' % pid)
                 os.kill(pid, signal.SIGTERM)
-                time.sleep(0.25)
+                time.sleep(0.1)
                 os.kill(pid, 0)
                 vlog('# Daemon process %d is stuck - really killing it' % pid)
                 os.kill(pid, signal.SIGKILL)
@@ -1275,7 +1275,7 @@
         else:
             runtests(options, tests)
     finally:
-        time.sleep(1)
+        time.sleep(.1)
         cleanup(options)
 
 if __name__ == '__main__':
--- a/tests/test-bundle-type.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-bundle-type.t	Tue Apr 17 17:56:36 2012 -0500
@@ -95,7 +95,6 @@
 
   $ cd t1
   $ hg bundle -a -t garbage ../bgarbage
-  1 changesets found
   abort: unknown bundle type specified with --type
   [255]
   $ cd ..
--- a/tests/test-casefolding.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-casefolding.t	Tue Apr 17 17:56:36 2012 -0500
@@ -65,7 +65,7 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ echo gold > a
   $ hg up
-  a: untracked file differs
+  A: untracked file differs
   abort: untracked files in working directory differ from files in requested revision
   [255]
   $ cat a
--- a/tests/test-check-code-hg.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-check-code-hg.t	Tue Apr 17 17:56:36 2012 -0500
@@ -31,15 +31,6 @@
    warning: naked except clause
    warning: naked except clause
   contrib/shrink-revlog.py:0:
-   >                    '(You can delete those files when you are satisfied that your\n'
-   warning: line over 80 characters
-  contrib/shrink-revlog.py:0:
-   >                 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
-   warning: line over 80 characters
-  contrib/shrink-revlog.py:0:
-   >                [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
-   warning: line over 80 characters
-  contrib/shrink-revlog.py:0:
    >         except:
    warning: naked except clause
   doc/gendoc.py:0:
@@ -202,12 +193,6 @@
   hgext/keyword.py:0:
    >     ui.note("hg ci -m '%s'\n" % msg)
    warning: unwrapped ui message
-  hgext/largefiles/overrides.py:0:
-   >             # When we call orig below it creates the standins but we don't add them
-   warning: line over 80 characters
-  hgext/largefiles/reposetup.py:0:
-   >                             if os.path.exists(self.wjoin(lfutil.standin(lfile))):
-   warning: line over 80 characters
   hgext/mq.py:0:
    >                     raise util.Abort(_("cannot push --exact with applied patches"))
    warning: line over 80 characters
@@ -474,9 +459,6 @@
    >     except:
    warning: naked except clause
   mercurial/localrepo.py:0:
-   >                                      hint=_("use --subrepos for recursive commit"))
-   warning: line over 80 characters
-  mercurial/localrepo.py:0:
    >                         # we return an integer indicating remote head count change
    warning: line over 80 characters
   mercurial/localrepo.py:0:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-commit-amend.t	Tue Apr 17 17:56:36 2012 -0500
@@ -0,0 +1,292 @@
+  $ hg init
+
+Setup:
+
+  $ echo a >> a
+  $ hg ci -Am 'base'
+  adding a
+
+Refuse to amend public csets:
+
+  $ hg phase -r . -p
+  $ hg ci --amend
+  abort: cannot amend public changesets
+  [255]
+  $ hg phase -r . -f -d
+
+  $ echo a >> a
+  $ hg ci -Am 'base1'
+
+Nothing to amend:
+
+  $ hg ci --amend
+  nothing changed
+  [1]
+
+Amending changeset with changes in working dir:
+
+  $ echo a >> a
+  $ hg ci --amend -m 'amend base1'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-amend-backup.hg
+  $ hg diff -c .
+  diff -r ad120869acf0 -r 9cd25b479c51 a
+  --- a/a	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/a	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,3 @@
+   a
+  +a
+  +a
+  $ hg log
+  changeset:   1:9cd25b479c51
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     amend base1
+  
+  changeset:   0:ad120869acf0
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     base
+  
+
+Add new file:
+
+  $ echo b > b
+  $ hg ci --amend -Am 'amend base1 new file'
+  adding b
+  saved backup bundle to $TESTTMP/.hg/strip-backup/9cd25b479c51-amend-backup.hg
+
+Remove file that was added in amended commit:
+
+  $ hg rm b
+  $ hg ci --amend -m 'amend base1 remove new file'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/e2bb3ecffd2f-amend-backup.hg
+
+  $ hg cat b
+  b: no such file in rev 664a9b2d60cd
+  [1]
+
+No changes, just a different message:
+
+  $ hg ci -v --amend -m 'no changes, new message'
+  amending changeset 664a9b2d60cd
+  copying changeset 664a9b2d60cd to ad120869acf0
+  a
+  stripping amended changeset 664a9b2d60cd
+  1 changesets found
+  saved backup bundle to $TESTTMP/.hg/strip-backup/664a9b2d60cd-amend-backup.hg
+  1 changesets found
+  adding branch
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  committed changeset 1:ea6e356ff2ad
+  $ hg diff -c .
+  diff -r ad120869acf0 -r ea6e356ff2ad a
+  --- a/a	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/a	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,3 @@
+   a
+  +a
+  +a
+  $ hg log
+  changeset:   1:ea6e356ff2ad
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     no changes, new message
+  
+  changeset:   0:ad120869acf0
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     base
+  
+
+Disable default date on commit so when -d isn't given, the old date is preserved:
+
+  $ echo '[defaults]' >> $HGRCPATH
+  $ echo 'commit=' >> $HGRCPATH
+
+Test -u/-d:
+
+  $ hg ci --amend -u foo -d '1 0'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/ea6e356ff2ad-amend-backup.hg
+  $ echo a >> a
+  $ hg ci --amend -u foo -d '1 0'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/377b91ce8b56-amend-backup.hg
+  $ hg log -r .
+  changeset:   1:2c94e4a5756f
+  tag:         tip
+  user:        foo
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     no changes, new message
+  
+
+Open editor with old commit message if a message isn't given otherwise:
+
+  $ cat > editor << '__EOF__'
+  > #!/bin/sh
+  > cat $1
+  > echo "another precious commit message" > "$1"
+  > __EOF__
+  $ chmod +x editor
+  $ HGEDITOR="'`pwd`'"/editor hg commit --amend -v
+  amending changeset 2c94e4a5756f
+  copying changeset 2c94e4a5756f to ad120869acf0
+  no changes, new message
+  
+  
+  HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  HG: Leave message empty to abort commit.
+  HG: --
+  HG: user: foo
+  HG: branch 'default'
+  HG: changed a
+  a
+  stripping amended changeset 2c94e4a5756f
+  1 changesets found
+  saved backup bundle to $TESTTMP/.hg/strip-backup/2c94e4a5756f-amend-backup.hg
+  1 changesets found
+  adding branch
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  committed changeset 1:ffb49186f961
+
+Same, but with changes in working dir (different code path):
+
+  $ echo a >> a
+  $ HGEDITOR="'`pwd`'"/editor hg commit --amend -v
+  amending changeset ffb49186f961
+  another precious commit message
+  
+  
+  HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  HG: Leave message empty to abort commit.
+  HG: --
+  HG: user: foo
+  HG: branch 'default'
+  HG: changed a
+  a
+  copying changeset 27f3aacd3011 to ad120869acf0
+  a
+  stripping intermediate changeset 27f3aacd3011
+  stripping amended changeset ffb49186f961
+  2 changesets found
+  saved backup bundle to $TESTTMP/.hg/strip-backup/ffb49186f961-amend-backup.hg
+  1 changesets found
+  adding branch
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  committed changeset 1:fb6cca43446f
+
+  $ rm editor
+  $ hg log -r .
+  changeset:   1:fb6cca43446f
+  tag:         tip
+  user:        foo
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     another precious commit message
+  
+
+Moving bookmarks, preserve active bookmark:
+
+  $ hg book book1
+  $ hg book book2
+  $ hg ci --amend -m 'move bookmarks'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/fb6cca43446f-amend-backup.hg
+  $ hg book
+     book1                     1:0cf1c7a51bcf
+   * book2                     1:0cf1c7a51bcf
+  $ echo a >> a
+  $ hg ci --amend -m 'move bookmarks'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/0cf1c7a51bcf-amend-backup.hg
+  $ hg book
+     book1                     1:7344472bd951
+   * book2                     1:7344472bd951
+
+  $ echo '[defaults]' >> $HGRCPATH
+  $ echo "commit=-d '0 0'" >> $HGRCPATH
+
+Moving branches:
+
+  $ hg branch foo
+  marked working directory as branch foo
+  (branches are permanent and global, did you want a bookmark?)
+  $ echo a >> a
+  $ hg ci -m 'branch foo'
+  $ hg branch default -f
+  marked working directory as branch default
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci --amend -m 'back to default'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/1661ca36a2db-amend-backup.hg
+  $ hg branches
+  default                        2:f24ee5961967
+
+Close branch:
+
+  $ hg up -q 0
+  $ echo b >> b
+  $ hg branch foo
+  marked working directory as branch foo
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci -Am 'fork'
+  adding b
+  $ echo b >> b
+  $ hg ci -mb
+  $ hg ci --amend --close-branch -m 'closing branch foo'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-amend-backup.hg
+
+Same thing, different code path:
+
+  $ echo b >> b
+  $ hg ci -m 'reopen branch'
+  reopening closed branch head 4
+  $ echo b >> b
+  $ hg ci --amend --close-branch
+  saved backup bundle to $TESTTMP/.hg/strip-backup/5e302dcc12b8-amend-backup.hg
+  $ hg branches
+  default                        2:f24ee5961967
+
+Refuse to amend merges:
+
+  $ hg up -q default
+  $ hg merge foo
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg ci --amend
+  abort: cannot amend while merging
+  [255]
+  $ hg ci -m 'merge'
+  $ hg ci --amend
+  abort: cannot amend merge changesets
+  [255]
+
+Follow copies/renames:
+
+  $ hg mv b c
+  $ hg ci -m 'b -> c'
+  $ hg mv c d
+  $ hg ci --amend -m 'b -> d'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/9c207120aa98-amend-backup.hg
+  $ hg st --rev .^ --copies d
+  A d
+    b
+  $ hg cp d e
+  $ hg ci -m 'e = d'
+  $ hg cp e f
+  $ hg ci --amend -m 'f = d'
+  saved backup bundle to $TESTTMP/.hg/strip-backup/fda2b3b27b22-amend-backup.hg
+  $ hg st --rev .^ --copies f
+  A f
+    d
+
+Can't rollback an amend:
+
+  $ hg rollback
+  no rollback information available
+  [1]
--- a/tests/test-convert-bzr-ghosts.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-convert-bzr-ghosts.t	Tue Apr 17 17:56:36 2012 -0500
@@ -30,7 +30,7 @@
   1 Initial layout setup
   0 Commit with ghost revision
   $ glog -R source-hg
-  o  1 "Commit with ghost revision" files: somefile
+  o  1@source "Commit with ghost revision" files: somefile
   |
-  o  0 "Initial layout setup" files: somefile
+  o  0@source "Initial layout setup" files: somefile
   
--- a/tests/test-convert-bzr-merges.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-convert-bzr-merges.t	Tue Apr 17 17:56:36 2012 -0500
@@ -48,17 +48,17 @@
   1 Added brach2 file
   0 Merged branches
   $ glog -R source-hg
-  o    5 "(octopus merge fixup)" files:
+  o    5@source "(octopus merge fixup)" files:
   |\
-  | o    4 "Merged branches" files: file-branch2
+  | o    4@source "Merged branches" files: file-branch2
   | |\
-  o---+  3 "Added brach2 file" files: file-branch2
+  o---+  3@source-branch2 "Added brach2 file" files: file-branch2
    / /
-  | o  2 "Added parent file" files: file-parent
+  | o  2@source "Added parent file" files: file-parent
   | |
-  o |  1 "Added branch1 file" files: file file-branch1
+  o |  1@source-branch1 "Added branch1 file" files: file file-branch1
   |/
-  o  0 "Initial add" files: file
+  o  0@source "Initial add" files: file
   
   $ manifest source-hg tip
   % manifest of tip
--- a/tests/test-convert-bzr.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-convert-bzr.t	Tue Apr 17 17:56:36 2012 -0500
@@ -7,6 +7,17 @@
   $ mkdir test-createandrename
   $ cd test-createandrename
   $ bzr init -q source
+
+test empty repo conversion (issue3233)
+
+  $ hg convert source source-hg
+  initializing destination source-hg repository
+  scanning source...
+  sorting...
+  converting...
+
+back to the rename stuff
+
   $ cd source
   $ echo a > a
   $ echo c > c
@@ -25,16 +36,15 @@
   $ bzr commit -q -m 'rename a into b, create a, rename c into d'
   $ cd ..
   $ hg convert source source-hg
-  initializing destination source-hg repository
   scanning source...
   sorting...
   converting...
   1 Initial add: a, c, e
   0 rename a into b, create a, rename c into d
   $ glog -R source-hg
-  o  1 "rename a into b, create a, rename c into d" files: a b c d e f
+  o  1@source "rename a into b, create a, rename c into d" files: a b c d e f
   |
-  o  0 "Initial add: a, c, e" files: a c e
+  o  0@source "Initial add: a, c, e" files: a c e
   
 
 manifest
@@ -54,7 +64,7 @@
   converting...
   0 Initial add: a, c, e
   $ glog -R source-1-hg
-  o  0 "Initial add: a, c, e" files: a c e
+  o  0@source "Initial add: a, c, e" files: a c e
   
 
 test with filemap
@@ -77,22 +87,12 @@
 convert from lightweight checkout
 
   $ bzr checkout --lightweight source source-light
-  $ hg convert source-light source-light-hg
+  $ hg convert -s bzr source-light source-light-hg
   initializing destination source-light-hg repository
   warning: lightweight checkouts may cause conversion failures, try with a regular branch instead.
-  scanning source...
-  sorting...
-  converting...
-  1 Initial add: a, c, e
-  0 rename a into b, create a, rename c into d
-
-lightweight manifest
-
-  $ hg manifest -R source-light-hg -r tip
-  a
-  b
-  d
-  f
+  $TESTTMP/test-createandrename/source-light does not look like a Bazaar repository
+  abort: source-light: missing or unsupported repository
+  [255]
 
 extract timestamps that look just like hg's {date|isodate}:
 yyyy-mm-dd HH:MM zzzz (no seconds!)
@@ -147,13 +147,13 @@
   1 Editing b
   0 Merged improve branch
   $ glog -R source-hg
-  o    3 "Merged improve branch" files:
+  o    3@source "Merged improve branch" files:
   |\
-  | o  2 "Editing b" files: b
+  | o  2@source-improve "Editing b" files: b
   | |
-  o |  1 "Editing a" files: a
+  o |  1@source "Editing a" files: a
   |/
-  o  0 "Initial add" files: a b
+  o  0@source "Initial add" files: a b
   
   $ cd ..
 
@@ -208,3 +208,77 @@
   $ hg cat syma; echo
   a
 
+Multiple branches
+
+  $ bzr init-repo -q --no-trees repo
+  $ bzr init -q repo/trunk
+  $ bzr co repo/trunk repo-trunk
+  $ cd repo-trunk
+  $ echo a > a
+  $ bzr add a
+  adding a
+  $ bzr ci -qm adda --commit-time '2012-01-01 00:00:01 +0000'
+  $ bzr tag trunk-tag
+  Created tag trunk-tag.
+  $ bzr switch -b branch
+  Tree is up to date at revision 1.
+  Switched to branch: *repo/branch/ (glob)
+  $ echo b > b
+  $ bzr add b
+  adding b
+  $ bzr ci -qm addb --commit-time '2012-01-01 00:00:02 +0000'
+  $ bzr tag branch-tag
+  Created tag branch-tag.
+  $ bzr switch --force ../repo/trunk
+  Updated to revision 1.
+  Switched to branch: */repo/trunk/ (glob)
+  $ echo a >> a
+  $ bzr ci -qm changea --commit-time '2012-01-01 00:00:03 +0000'
+  $ cd ..
+  $ hg convert --datesort repo repo-bzr
+  initializing destination repo-bzr repository
+  scanning source...
+  sorting...
+  converting...
+  2 adda
+  1 addb
+  0 changea
+  updating tags
+  $ (cd repo-bzr; glog)
+  o  3@default "update tags" files: .hgtags
+  |
+  o  2@default "changea" files: a
+  |
+  | o  1@branch "addb" files: b
+  |/
+  o  0@default "adda" files: a
+  
+
+Test tags (converted identifiers are not stable because bzr ones are
+not and get incorporated in extra fields).
+
+  $ hg -R repo-bzr tags
+  tip                                3:* (glob)
+  branch-tag                         1:* (glob)
+  trunk-tag                          0:* (glob)
+
+Nested repositories (issue3254)
+
+  $ bzr init-repo -q --no-trees repo/inner
+  $ bzr init -q repo/inner/trunk
+  $ bzr co repo/inner/trunk inner-trunk
+  $ cd inner-trunk
+  $ echo b > b
+  $ bzr add b
+  adding b
+  $ bzr ci -qm addb
+  $ cd ..
+  $ hg convert --datesort repo noinner-bzr
+  initializing destination noinner-bzr repository
+  scanning source...
+  sorting...
+  converting...
+  2 adda
+  1 addb
+  0 changea
+  updating tags
--- a/tests/test-debugcomplete.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-debugcomplete.t	Tue Apr 17 17:56:36 2012 -0500
@@ -87,6 +87,7 @@
   debuginstall
   debugknown
   debugpushkey
+  debugpvec
   debugrebuildstate
   debugrename
   debugrevlog
@@ -192,7 +193,7 @@
   add: include, exclude, subrepos, dry-run
   annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude
   clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
-  commit: addremove, close-branch, include, exclude, message, logfile, date, user, subrepos
+  commit: addremove, close-branch, amend, include, exclude, message, logfile, date, user, subrepos
   diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude, subrepos
   export: output, switch-parent, rev, text, git, nodates
   forget: include, exclude
@@ -236,6 +237,7 @@
   debuginstall: 
   debugknown: 
   debugpushkey: 
+  debugpvec: 
   debugrebuildstate: rev
   debugrename: rev
   debugrevlog: changelog, manifest, dump
@@ -245,7 +247,7 @@
   debugsub: rev
   debugwalk: include, exclude
   debugwireargs: three, four, five, ssh, remotecmd, insecure
-  graft: continue, edit, currentdate, currentuser, date, user, tool
+  graft: continue, edit, currentdate, currentuser, date, user, tool, dry-run
   grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
   heads: rev, topo, active, closed, style, template
   help: extension, command
--- a/tests/test-diff-color.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-diff-color.t	Tue Apr 17 17:56:36 2012 -0500
@@ -85,7 +85,7 @@
   \x1b[0;36;1mold mode 100644\x1b[0m (esc)
   \x1b[0;36;1mnew mode 100755\x1b[0m (esc)
   1 hunks, 1 lines changed
-  \x1b[0;33mexamine changes to 'a'? [Ynsfdaq?]\x1b[0m  (esc)
+  \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m  (esc)
   \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc)
    c
    a
@@ -95,7 +95,7 @@
    a
    a
    c
-  \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m  (esc)
+  \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m  (esc)
 
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "mq=" >> $HGRCPATH
@@ -113,7 +113,7 @@
   \x1b[0;36;1mold mode 100644\x1b[0m (esc)
   \x1b[0;36;1mnew mode 100755\x1b[0m (esc)
   1 hunks, 1 lines changed
-  \x1b[0;33mexamine changes to 'a'? [Ynsfdaq?]\x1b[0m  (esc)
+  \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m  (esc)
   \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc)
    c
    a
@@ -123,4 +123,4 @@
    a
    a
    c
-  \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m  (esc)
+  \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m  (esc)
--- a/tests/test-doctest.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-doctest.py	Tue Apr 17 17:56:36 2012 -0500
@@ -39,3 +39,6 @@
 
 import mercurial.minirst
 doctest.testmod(mercurial.minirst)
+
+import mercurial.templatefilters
+doctest.testmod(mercurial.templatefilters)
--- a/tests/test-duplicateoptions.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-duplicateoptions.py	Tue Apr 17 17:56:36 2012 -0500
@@ -1,7 +1,7 @@
 import os
 from mercurial import ui, commands, extensions
 
-ignore = set(['highlight', 'inotify', 'win32text'])
+ignore = set(['highlight', 'inotify', 'win32text', 'factotum'])
 
 if os.name != 'nt':
     ignore.add('win32mbcs')
--- a/tests/test-glog.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-glog.t	Tue Apr 17 17:56:36 2012 -0500
@@ -83,8 +83,30 @@
   >   hg commit -Aqd "$rev 0" -m "($rev) $msg"
   > }
 
+  $ cat > printrevset.py <<EOF
+  > from mercurial import extensions, revset, commands
+  > from hgext import graphlog
+  >  
+  > def uisetup(ui):
+  >     def printrevset(orig, ui, repo, *pats, **opts):
+  >         if opts.get('print_revset'):
+  >             expr = graphlog.getlogrevs(repo, pats, opts)[1]
+  >             if expr:
+  >                 tree = revset.parse(expr)[0]
+  >             else:
+  >                 tree = []
+  >             ui.write('%r\n' % (opts.get('rev', []),))
+  >             ui.write(revset.prettyformat(tree) + '\n')
+  >             return 0
+  >         return orig(ui, repo, *pats, **opts)
+  >     entry = extensions.wrapcommand(commands.table, 'log', printrevset)
+  >     entry[1].append(('', 'print-revset', False,
+  >                      'print generated revset and exit (DEPRECATED)'))
+  > EOF
+
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "graphlog=" >> $HGRCPATH
+  $ echo "printrevset=`pwd`/printrevset.py" >> $HGRCPATH
 
   $ hg init repo
   $ cd repo
@@ -1359,9 +1381,13 @@
 
 Do not crash or produce strange graphs if history is buggy
 
+  $ hg branch branch
+  marked working directory as branch branch
+  (branches are permanent and global, did you want a bookmark?)
   $ commit 36 "buggy merge: identical parents" 35 35
   $ hg glog -l5
-  @  changeset:   36:95fa8febd08a
+  @  changeset:   36:08a19a744424
+  |  branch:      branch
   |  tag:         tip
   |  parent:      35:9159c3644c5e
   |  parent:      35:9159c3644c5e
@@ -1396,76 +1422,640 @@
 
 Test log -G options
 
-  $ hg log -G -u 'something nice'
-  $ hg log -G -b 'something nice'
-  abort: unknown revision 'something nice'!
-  [255]
-  $ hg log -G -k 'something nice'
-  $ hg log -G --only-branch 'something nice'
-  abort: unknown revision 'something nice'!
-  [255]
-  $ hg log -G --include 'some file' --exclude 'another file'
-  $ hg log -G --follow  --template 'nodetag {rev}\n' | grep nodetag | wc -l
-  \s*36 (re)
-  $ hg log -G --removed --template 'nodetag {rev}\n' | grep nodetag | wc -l
-  \s*0 (re)
-  $ hg log -G --only-merges --template 'nodetag {rev}\n' | grep nodetag | wc -l
-  \s*28 (re)
-  $ hg log -G --no-merges --template 'nodetag {rev}\n'
-  o  nodetag 35
-  |
-  o    nodetag 34
-  |\
-  | \
-  | |\
-  | | \
-  | | |\
-  | | | \
-  | | | |\
-  | | | | \
-  | | | | |\
-  +-+-+-+-----o  nodetag 33
-  | | | | | |
-  +---------o  nodetag 29
-  | | | | |
-  +-+-+---o  nodetag 27
-  | | | |/
-  | | | o  nodetag 3
-  | | |/
-  | | o  nodetag 2
-  | |/
-  | o  nodetag 1
-  |/
-  o  nodetag 0
-  
+  $ testlog() {
+  >   hg log -G --print-revset "$@"
+  >   hg log --template 'nodetag {rev}\n' "$@" | grep nodetag \
+  >     | sed 's/.*nodetag/nodetag/' > log.nodes
+  >   hg log -G --template 'nodetag {rev}\n' "$@" | grep nodetag \
+  >     | sed 's/.*nodetag/nodetag/' > glog.nodes
+  >   diff -u log.nodes glog.nodes
+  > }
+
+glog always reorders nodes which explains the difference with log
+
+  $ testlog -r 27 -r 25 -r 21 -r 34 -r 32 -r 31
+  ['27', '25', '21', '34', '32', '31']
+  []
+  --- log.nodes	* (glob)
+  +++ glog.nodes	* (glob)
+  @@ -1,6 +1,6 @@
+  -nodetag 27
+  -nodetag 25
+  -nodetag 21
+   nodetag 34
+   nodetag 32
+   nodetag 31
+  +nodetag 27
+  +nodetag 25
+  +nodetag 21
+  [1]
+  $ testlog -u test -u not-a-user
+  []
+  (group
+    (group
+      (or
+        (func
+          ('symbol', 'user')
+          ('string', 'test'))
+        (func
+          ('symbol', 'user')
+          ('string', 'not-a-user')))))
+  $ testlog -b not-a-branch
+  abort: unknown revision 'not-a-branch'!
+  abort: unknown revision 'not-a-branch'!
+  abort: unknown revision 'not-a-branch'!
+  $ testlog -b 35 -b 36 --only-branch branch
+  []
+  (group
+    (group
+      (or
+        (or
+          (func
+            ('symbol', 'branch')
+            ('string', 'default'))
+          (func
+            ('symbol', 'branch')
+            ('string', 'branch')))
+        (func
+          ('symbol', 'branch')
+          ('string', 'branch')))))
+  $ testlog -k expand -k merge
+  []
+  (group
+    (group
+      (or
+        (func
+          ('symbol', 'keyword')
+          ('string', 'expand'))
+        (func
+          ('symbol', 'keyword')
+          ('string', 'merge')))))
+  $ testlog --only-merges
+  []
+  (group
+    (func
+      ('symbol', 'merge')
+      None))
+  $ testlog --no-merges
+  []
+  (group
+    (not
+      (func
+        ('symbol', 'merge')
+        None)))
+  $ testlog --date '2 0 to 4 0'
+  []
+  (group
+    (func
+      ('symbol', 'date')
+      ('string', '2 0 to 4 0')))
   $ hg log -G -d 'brace ) in a date'
   abort: invalid date: 'brace ) in a date'
   [255]
-  $ hg log -G -P 32 --template '{rev}\n'
-  @  36
+  $ testlog --prune 31 --prune 32
+  []
+  (group
+    (group
+      (and
+        (not
+          (group
+            (or
+              ('string', '31')
+              (func
+                ('symbol', 'ancestors')
+                ('string', '31')))))
+        (not
+          (group
+            (or
+              ('string', '32')
+              (func
+                ('symbol', 'ancestors')
+                ('string', '32'))))))))
+
+Dedicated repo for --follow and paths filtering. The g is crafted to
+have 2 filelog topological heads in a linear changeset graph.
+
+  $ cd ..
+  $ hg init follow
+  $ cd follow
+  $ testlog --follow
+  []
+  []
+  $ echo a > a
+  $ echo aa > aa
+  $ echo f > f
+  $ hg ci -Am "add a" a aa f
+  $ hg cp a b
+  $ hg cp f g
+  $ hg ci -m "copy a b"
+  $ mkdir dir
+  $ hg mv b dir
+  $ echo g >> g
+  $ echo f >> f
+  $ hg ci -m "mv b dir/b"
+  $ hg mv a b
+  $ hg cp -f f g
+  $ echo a > d
+  $ hg add d
+  $ hg ci -m "mv a b; add d"
+  $ hg mv dir/b e
+  $ hg ci -m "mv dir/b e"
+  $ hg glog --template '({rev}) {desc|firstline}\n'
+  @  (4) mv dir/b e
   |
-  o  35
+  o  (3) mv a b; add d
+  |
+  o  (2) mv b dir/b
+  |
+  o  (1) copy a b
   |
-  o  34
-  |
-  | o  33
+  o  (0) add a
+  
+
+  $ testlog a
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'filelog')
+        ('string', 'a'))))
+  $ testlog a b
+  []
+  (group
+    (group
+      (or
+        (func
+          ('symbol', 'filelog')
+          ('string', 'a'))
+        (func
+          ('symbol', 'filelog')
+          ('string', 'b')))))
+
+Test falling back to slow path for non-existing files
+
+  $ testlog a c
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          (list
+            ('string', 'r:')
+            ('string', 'd:relpath'))
+          ('string', 'p:a'))
+        ('string', 'p:c'))))
+
+Test multiple --include/--exclude/paths
+
+  $ testlog --include a --include e --exclude b --exclude e a e
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          (list
+            (list
+              (list
+                (list
+                  (list
+                    ('string', 'r:')
+                    ('string', 'd:relpath'))
+                  ('string', 'p:a'))
+                ('string', 'p:e'))
+              ('string', 'i:a'))
+            ('string', 'i:e'))
+          ('string', 'x:b'))
+        ('string', 'x:e'))))
+
+Test glob expansion of pats
+
+  $ expandglobs=`python -c "import mercurial.util; \
+  >   print mercurial.util.expandglobs and 'true' or 'false'"`
+  $ if [ $expandglobs = "true" ]; then
+  >    testlog 'a*';
+  > else
+  >    testlog a*;
+  > fi;
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'filelog')
+        ('string', 'aa'))))
+
+Test --follow on a directory
+
+  $ testlog -f dir
+  abort: cannot follow file not in parent revision: "dir"
+  abort: cannot follow file not in parent revision: "dir"
+  abort: cannot follow file not in parent revision: "dir"
+
+Test --follow on file not in parent revision
+
+  $ testlog -f a
+  abort: cannot follow file not in parent revision: "a"
+  abort: cannot follow file not in parent revision: "a"
+  abort: cannot follow file not in parent revision: "a"
+
+Test --follow and patterns
+
+  $ testlog -f 'glob:*'
+  abort: can only follow copies/renames for explicit filenames
+  abort: can only follow copies/renames for explicit filenames
+  abort: can only follow copies/renames for explicit filenames
+
+Test --follow on a single rename
+
+  $ hg up -q 2
+  $ testlog -f a
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'follow')
+        ('string', 'a'))))
+
+Test --follow and multiple renames
+
+  $ hg up -q tip
+  $ testlog -f e
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'follow')
+        ('string', 'e'))))
+
+Test --follow and multiple filelog heads
+
+  $ hg up -q 2
+  $ testlog -f g
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'follow')
+        ('string', 'g'))))
+  $ cat log.nodes
+  nodetag 2
+  nodetag 1
+  nodetag 0
+  $ hg up -q tip
+  $ testlog -f g
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'follow')
+        ('string', 'g'))))
+  $ cat log.nodes
+  nodetag 3
+  nodetag 2
+  nodetag 0
+
+Test --follow and multiple files
+
+  $ testlog -f g e
+  []
+  (group
+    (group
+      (or
+        (func
+          ('symbol', 'follow')
+          ('string', 'g'))
+        (func
+          ('symbol', 'follow')
+          ('string', 'e')))))
+  $ cat log.nodes
+  nodetag 4
+  nodetag 3
+  nodetag 2
+  nodetag 1
+  nodetag 0
+
+Test --follow-first
+
+  $ hg up -q 3
+  $ echo ee > e
+  $ hg ci -Am "add another e" e
+  created new head
+  $ hg merge --tool internal:other 4
+  0 files updated, 1 files merged, 1 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ echo merge > e
+  $ hg ci -m "merge 5 and 4"
+  $ testlog --follow-first
+  []
+  (group
+    (func
+      ('symbol', '_firstancestors')
+      ('symbol', '6')))
+
+Cannot compare with log --follow-first FILE as it never worked
+
+  $ hg log -G --print-revset --follow-first e
+  []
+  (group
+    (group
+      (func
+        ('symbol', '_followfirst')
+        ('string', 'e'))))
+  $ hg log -G --follow-first e --template '{rev} {desc|firstline}\n'
+  @    6 merge 5 and 4
+  |\
+  o |  5 add another e
   | |
-  $ hg log -G --follow a
-  abort: -G/--graph option is incompatible with --follow with file argument
-  [255]
+
+Test --copies
 
-Test multiple revision specifications are correctly handled
-
-  $ hg log -G -r 27 -r 25 -r 21 -r 34 -r 32 -r 31 --template '{rev}\n'
-  o  34
-  |
-  o    32
+  $ hg log -G --copies --template "{rev} {desc|firstline} \
+  >   copies: {file_copies_switch}\n"
+  @    6 merge 5 and 4   copies:
   |\
-  | o    31
-  | |\
-  o | |  27
-  |/ /
-  | o  25
+  | o  5 add another e   copies:
+  | |
+  o |  4 mv dir/b e   copies: e (dir/b)
   |/
-  o    21
+  o  3 mv a b; add d   copies: b (a)g (f)
+  |
+  o  2 mv b dir/b   copies: dir/b (b)
+  |
+  o  1 copy a b   copies: b (a)g (f)
+  |
+  o  0 add a   copies:
+  
+Test "set:..." and parent revision
+
+  $ hg up -q 4
+  $ testlog "set:copied()"
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          ('string', 'r:')
+          ('string', 'd:relpath'))
+        ('string', 'p:set:copied()'))))
+  $ testlog --include "set:copied()"
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          ('string', 'r:')
+          ('string', 'd:relpath'))
+        ('string', 'i:set:copied()'))))
+  $ testlog -r "sort(file('set:copied()'), -rev)"
+  ["sort(file('set:copied()'), -rev)"]
+  []
+
+Test --removed
+
+  $ testlog --removed
+  []
+  []
+  $ testlog --removed a
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          ('string', 'r:')
+          ('string', 'd:relpath'))
+        ('string', 'p:a'))))
+  $ testlog --removed --follow a
+  abort: can only follow copies/renames for explicit filenames
+  abort: can only follow copies/renames for explicit filenames
+  abort: can only follow copies/renames for explicit filenames
+
+Test --patch and --stat with --follow and --follow-first
+
+  $ hg up -q 3
+  $ hg log -G --git --patch b
+  o  changeset:   1:216d4c92cf98
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     copy a b
+  |
+  |  diff --git a/a b/b
+  |  copy from a
+  |  copy to b
+  |
+
+  $ hg log -G --git --stat b
+  o  changeset:   1:216d4c92cf98
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     copy a b
+  |
+  |   a |  0
+  |   1 files changed, 0 insertions(+), 0 deletions(-)
+  |
+
+  $ hg log -G --git --patch --follow b
+  o  changeset:   1:216d4c92cf98
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     copy a b
+  |
+  |  diff --git a/a b/b
+  |  copy from a
+  |  copy to b
+  |
+  o  changeset:   0:f8035bb17114
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     add a
+  
+     diff --git a/a b/a
+     new file mode 100644
+     --- /dev/null
+     +++ b/a
+     @@ -0,0 +1,1 @@
+     +a
+  
+
+  $ hg log -G --git --stat --follow b
+  o  changeset:   1:216d4c92cf98
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     copy a b
+  |
+  |   a |  0
+  |   1 files changed, 0 insertions(+), 0 deletions(-)
+  |
+  o  changeset:   0:f8035bb17114
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     add a
+  
+      a |  1 +
+      1 files changed, 1 insertions(+), 0 deletions(-)
+  
+
+  $ hg up -q 6
+  $ hg log -G --git --patch --follow-first e
+  @    changeset:   6:fc281d8ff18d
+  |\   tag:         tip
+  | |  parent:      5:99b31f1c2782
+  | |  parent:      4:17d952250a9d
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     merge 5 and 4
+  | |
+  | |  diff --git a/e b/e
+  | |  --- a/e
+  | |  +++ b/e
+  | |  @@ -1,1 +1,1 @@
+  | |  -ee
+  | |  +merge
+  | |
+  o |  changeset:   5:99b31f1c2782
+  | |  parent:      3:5918b8d165d1
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     add another e
+  | |
+  | |  diff --git a/e b/e
+  | |  new file mode 100644
+  | |  --- /dev/null
+  | |  +++ b/e
+  | |  @@ -0,0 +1,1 @@
+  | |  +ee
+  | |
+
+Test old-style --rev
+
+  $ hg tag 'foo-bar'
+  $ testlog -r 'foo-bar'
+  ['foo-bar']
+  []
+
+Test --follow and forward --rev
+
+  $ hg up -q 6
+  $ echo g > g
+  $ hg ci -Am 'add g' g
+  created new head
+  $ hg up -q 2
+  $ hg log -G --template "{rev} {desc|firstline}\n"
+  o  8 add g
+  |
+  | o  7 Added tag foo-bar for changeset fc281d8ff18d
+  |/
+  o    6 merge 5 and 4
   |\
+  | o  5 add another e
+  | |
+  o |  4 mv dir/b e
+  |/
+  o  3 mv a b; add d
+  |
+  @  2 mv b dir/b
+  |
+  o  1 copy a b
+  |
+  o  0 add a
+  
+  $ testlog --follow -r6 -r8 -r5 -r7 -r4
+  ['6', '8', '5', '7', '4']
+  (group
+    (func
+      ('symbol', 'descendants')
+      ('symbol', '6')))
+  --- log.nodes	* (glob)
+  +++ glog.nodes	* (glob)
+  @@ -1,3 +1,3 @@
+  -nodetag 6
+   nodetag 8
+   nodetag 7
+  +nodetag 6
+  [1]
+
+Test --follow-first and forward --rev
+
+  $ testlog --follow-first -r6 -r8 -r5 -r7 -r4
+  ['6', '8', '5', '7', '4']
+  (group
+    (func
+      ('symbol', '_firstdescendants')
+      ('symbol', '6')))
+  --- log.nodes	* (glob)
+  +++ glog.nodes	* (glob)
+  @@ -1,3 +1,3 @@
+  -nodetag 6
+   nodetag 8
+   nodetag 7
+  +nodetag 6
+  [1]
+
+Test --follow and backward --rev
+
+  $ testlog --follow -r6 -r5 -r7 -r8 -r4
+  ['6', '5', '7', '8', '4']
+  (group
+    (func
+      ('symbol', 'ancestors')
+      ('symbol', '6')))
+
+Test --follow-first and backward --rev
+
+  $ testlog --follow-first -r6 -r5 -r7 -r8 -r4
+  ['6', '5', '7', '8', '4']
+  (group
+    (func
+      ('symbol', '_firstancestors')
+      ('symbol', '6')))
+
+Test subdir
+
+  $ hg up -q 3
+  $ cd dir
+  $ testlog .
+  []
+  (group
+    (func
+      ('symbol', '_matchfiles')
+      (list
+        (list
+          ('string', 'r:')
+          ('string', 'd:relpath'))
+        ('string', 'p:.'))))
+  $ testlog ../b
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'filelog')
+        ('string', '../b'))))
+  $ testlog -f ../b
+  []
+  (group
+    (group
+      (func
+        ('symbol', 'follow')
+        ('string', 'b'))))
+  $ cd ..
+
+Test --hidden
+
+  $ cat > $HGTMP/testhidden.py << EOF
+  > def reposetup(ui, repo):
+  >     for line in repo.opener('hidden'):
+  >         ctx = repo[line.strip()]
+  >         repo.changelog.hiddenrevs.add(ctx.rev())
+  > EOF
+  $ echo '[extensions]' >> .hg/hgrc
+  $ echo "hidden=$HGTMP/testhidden.py" >> .hg/hgrc
+  $ hg id --debug -i -r 0 > .hg/hidden
+  $ testlog
+  []
+  []
+  $ testlog --hidden
+  []
+  []
--- a/tests/test-graft.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-graft.t	Tue Apr 17 17:56:36 2012 -0500
@@ -107,6 +107,14 @@
 
 Graft out of order, skipping a merge and a duplicate
 
+  $ hg graft 1 5 4 3 'merge()' 2 -n
+  skipping ungraftable merge revision 6
+  skipping already grafted revision 2
+  grafting revision 1
+  grafting revision 5
+  grafting revision 4
+  grafting revision 3
+
   $ hg graft 1 5 4 3 'merge()' 2 --debug
   skipping ungraftable merge revision 6
   scanning for duplicate grafts
@@ -114,7 +122,6 @@
   grafting revision 1
     searching for copies back to rev 1
     unmatched files in local:
-     a.orig
      b
     all copies found (* = to merge, ! = divergent):
      b -> a *
@@ -130,8 +137,6 @@
    b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
   grafting revision 5
     searching for copies back to rev 1
-    unmatched files in local:
-     a.orig
   resolving manifests
    overwrite: False, partial: False
    ancestor: 4c60f11aa304, local: 6f5ea6ac8b70+, remote: 97f8bfe72746
@@ -141,8 +146,6 @@
   e
   grafting revision 4
     searching for copies back to rev 1
-    unmatched files in local:
-     a.orig
   resolving manifests
    overwrite: False, partial: False
    ancestor: 4c60f11aa304, local: 77eb504366ab+, remote: 9c233e8e184d
--- a/tests/test-hgweb-commands.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-hgweb-commands.t	Tue Apr 17 17:56:36 2012 -0500
@@ -24,6 +24,14 @@
   marked working directory as branch stable
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci -Ambranch
+  $ hg branch unstable
+  marked working directory as branch unstable
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci -Ambranch
+  $ echo [graph] >> .hg/hgrc
+  $ echo default.width = 3 >> .hg/hgrc
+  $ echo stable.width = 3 >> .hg/hgrc
+  $ echo stable.color = FF0000 >> .hg/hgrc
   $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
 
@@ -43,6 +51,22 @@
   
    <entry>
     <title>branch</title>
+    <id>http://*:$HGPORT/#changeset-ba87b23d29ca67a305625d81a20ac279c1e3f444</id> (glob)
+    <link href="http://*:$HGPORT/rev/ba87b23d29ca"/> (glob)
+    <author>
+     <name>test</name>
+     <email>&#116;&#101;&#115;&#116;</email>
+    </author>
+    <updated>1970-01-01T00:00:00+00:00</updated>
+    <published>1970-01-01T00:00:00+00:00</published>
+    <content type="xhtml">
+     <div xmlns="http://www.w3.org/1999/xhtml">
+      <pre xml:space="preserve">branch</pre>
+     </div>
+    </content>
+   </entry>
+   <entry>
+    <title>branch</title>
     <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
     <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
     <author>
@@ -105,6 +129,22 @@
   
    <entry>
     <title>branch</title>
+    <id>http://*:$HGPORT/#changeset-ba87b23d29ca67a305625d81a20ac279c1e3f444</id> (glob)
+    <link href="http://*:$HGPORT/rev/ba87b23d29ca"/> (glob)
+    <author>
+     <name>test</name>
+     <email>&#116;&#101;&#115;&#116;</email>
+    </author>
+    <updated>1970-01-01T00:00:00+00:00</updated>
+    <published>1970-01-01T00:00:00+00:00</published>
+    <content type="xhtml">
+     <div xmlns="http://www.w3.org/1999/xhtml">
+      <pre xml:space="preserve">branch</pre>
+     </div>
+    </content>
+   </entry>
+   <entry>
+    <title>branch</title>
     <id>http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe</id> (glob)
     <link href="http://*:$HGPORT/rev/1d22e65f027e"/> (glob)
     <author>
@@ -208,14 +248,14 @@
   </div>
   <ul>
   <li class="active">log</li>
-  <li><a href="/graph/1d22e65f027e">graph</a></li>
+  <li><a href="/graph/ba87b23d29ca">graph</a></li>
   <li><a href="/tags">tags</a></li>
   <li><a href="/bookmarks">bookmarks</a></li>
   <li><a href="/branches">branches</a></li>
   </ul>
   <ul>
-  <li><a href="/rev/1d22e65f027e">changeset</a></li>
-  <li><a href="/file/1d22e65f027e">browse</a></li>
+  <li><a href="/rev/ba87b23d29ca">changeset</a></li>
+  <li><a href="/file/ba87b23d29ca">browse</a></li>
   </ul>
   <ul>
   
@@ -237,9 +277,9 @@
   </form>
   
   <div class="navigate">
-  <a href="/shortlog/2?revcount=30">less</a>
-  <a href="/shortlog/2?revcount=120">more</a>
-  | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a> 
+  <a href="/shortlog/3?revcount=30">less</a>
+  <a href="/shortlog/3?revcount=120">more</a>
+  | rev 3: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a> 
   </div>
   
   <table class="bigtable">
@@ -251,14 +291,19 @@
    <tr class="parity0">
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
     <td class="author">test</td>
-    <td class="description"><a href="/rev/1d22e65f027e">branch</a><span class="branchhead">stable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
+    <td class="description"><a href="/rev/ba87b23d29ca">branch</a><span class="branchhead">unstable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
    </tr>
    <tr class="parity1">
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
     <td class="author">test</td>
+    <td class="description"><a href="/rev/1d22e65f027e">branch</a><span class="branchhead">stable</span> </td>
+   </tr>
+   <tr class="parity0">
+    <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
+    <td class="author">test</td>
     <td class="description"><a href="/rev/a4f92ed23982">Added tag 1.0 for changeset 2ef0ac749a14</a><span class="branchhead">default</span> </td>
    </tr>
-   <tr class="parity0">
+   <tr class="parity1">
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
     <td class="author">test</td>
     <td class="description"><a href="/rev/2ef0ac749a14">base</a><span class="tag">1.0</span> <span class="tag">anotherthing</span> </td>
@@ -267,9 +312,9 @@
   </table>
   
   <div class="navigate">
-  <a href="/shortlog/2?revcount=30">less</a>
-  <a href="/shortlog/2?revcount=120">more</a>
-  | rev 2: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a> 
+  <a href="/shortlog/3?revcount=30">less</a>
+  <a href="/shortlog/3?revcount=120">more</a>
+  | rev 3: <a href="/shortlog/2ef0ac749a14">(0)</a> <a href="/shortlog/tip">tip</a> 
   </div>
   
   </div>
@@ -637,18 +682,19 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-tags'
   200 Script output follows
   
-  tip	1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
+  tip	ba87b23d29ca67a305625d81a20ac279c1e3f444
   1.0	2ef0ac749a14e4f57a5a822464a0902c6f7f448f
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-branches'
   200 Script output follows
   
-  stable	1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe	open
+  unstable	ba87b23d29ca67a305625d81a20ac279c1e3f444	open
+  stable	1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe	inactive
   default	a4f92ed23982be056b9852de5dfe873eaac7f0de	inactive
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/raw-bookmarks'
   200 Script output follows
   
   anotherthing	2ef0ac749a14e4f57a5a822464a0902c6f7f448f
-  something	1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
+  something	ba87b23d29ca67a305625d81a20ac279c1e3f444
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/summary/?style=gitweb'
   200 Script output follows
   
@@ -688,7 +734,7 @@
   <a href="/tags?style=gitweb">tags</a> |
   <a href="/bookmarks?style=gitweb">bookmarks</a> |
   <a href="/branches?style=gitweb">branches</a> |
-  <a href="/file/1d22e65f027e?style=gitweb">files</a> |
+  <a href="/file/ba87b23d29ca?style=gitweb">files</a> |
   <a href="/help?style=gitweb">help</a>
   <br/>
   </div>
@@ -707,9 +753,23 @@
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><i>test</i></td>
   <td>
+  <a class="list" href="/rev/ba87b23d29ca?style=gitweb">
+  <b>branch</b>
+  <span class="logtags"><span class="branchtag" title="unstable">unstable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
+  </a>
+  </td>
+  <td class="link" nowrap>
+  <a href="/rev/ba87b23d29ca?style=gitweb">changeset</a> |
+  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
+  </td>
+  </tr>
+  <tr class="parity1">
+  <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
+  <td><i>test</i></td>
+  <td>
   <a class="list" href="/rev/1d22e65f027e?style=gitweb">
   <b>branch</b>
-  <span class="logtags"><span class="branchtag" title="stable">stable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
+  <span class="logtags"><span class="branchtag" title="stable">stable</span> </span>
   </a>
   </td>
   <td class="link" nowrap>
@@ -717,7 +777,7 @@
   <a href="/file/1d22e65f027e?style=gitweb">files</a>
   </td>
   </tr>
-  <tr class="parity1">
+  <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><i>test</i></td>
   <td>
@@ -731,7 +791,7 @@
   <a href="/file/a4f92ed23982?style=gitweb">files</a>
   </td>
   </tr>
-  <tr class="parity0">
+  <tr class="parity1">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><i>test</i></td>
   <td>
@@ -777,11 +837,11 @@
   </tr>
   <tr class="parity1">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/rev/1d22e65f027e?style=gitweb"><b>something</b></a></td>
+  <td><a class="list" href="/rev/ba87b23d29ca?style=gitweb"><b>something</b></a></td>
   <td class="link">
-  <a href="/rev/1d22e65f027e?style=gitweb">changeset</a> |
-  <a href="/log/1d22e65f027e?style=gitweb">changelog</a> |
-  <a href="/file/1d22e65f027e?style=gitweb">files</a>
+  <a href="/rev/ba87b23d29ca?style=gitweb">changeset</a> |
+  <a href="/log/ba87b23d29ca?style=gitweb">changelog</a> |
+  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
   </td>
   </tr>
   <tr class="light"><td colspan="3"><a class="list" href="/bookmarks?style=gitweb">...</a></td></tr>
@@ -792,6 +852,16 @@
   
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
+  <td><a class="list" href="/shortlog/ba87b23d29ca?style=gitweb"><b>ba87b23d29ca</b></a></td>
+  <td class="">unstable</td>
+  <td class="link">
+  <a href="/changeset/ba87b23d29ca?style=gitweb">changeset</a> |
+  <a href="/log/ba87b23d29ca?style=gitweb">changelog</a> |
+  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
+  </td>
+  </tr>
+  <tr class="parity1">
+  <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><a class="list" href="/shortlog/1d22e65f027e?style=gitweb"><b>1d22e65f027e</b></a></td>
   <td class="">stable</td>
   <td class="link">
@@ -800,7 +870,7 @@
   <a href="/file/1d22e65f027e?style=gitweb">files</a>
   </td>
   </tr>
-  <tr class="parity1">
+  <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><a class="list" href="/shortlog/a4f92ed23982?style=gitweb"><b>a4f92ed23982</b></a></td>
   <td class="">default</td>
@@ -861,17 +931,17 @@
   <div class="page_nav">
   <a href="/summary?style=gitweb">summary</a> |
   <a href="/shortlog?style=gitweb">shortlog</a> |
-  <a href="/log/2?style=gitweb">changelog</a> |
+  <a href="/log/3?style=gitweb">changelog</a> |
   graph |
   <a href="/tags?style=gitweb">tags</a> |
   <a href="/bookmarks?style=gitweb">bookmarks</a> |
   <a href="/branches?style=gitweb">branches</a> |
-  <a href="/file/1d22e65f027e?style=gitweb">files</a> |
+  <a href="/file/ba87b23d29ca?style=gitweb">files</a> |
   <a href="/help?style=gitweb">help</a>
   <br/>
-  <a href="/graph/2?style=gitweb&revcount=30">less</a>
-  <a href="/graph/2?style=gitweb&revcount=120">more</a>
-  | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
+  <a href="/graph/3?style=gitweb&revcount=30">less</a>
+  <a href="/graph/3?style=gitweb&revcount=120">more</a>
+  | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-3</a> <a href="/graph/tip?style=gitweb">tip</a> <br/>
   </div>
   
   <div class="title">&nbsp;</div>
@@ -880,27 +950,17 @@
   
   <div id="wrapper">
   <ul id="nodebgs"></ul>
-  <canvas id="graph" width="480" height="129"></canvas>
+  <canvas id="graph" width="480" height="168"></canvas>
   <ul id="graphnodes"></ul>
   </div>
   
   <script>
   <!-- hide script content
   
-  var data = [["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+  var data = [["ba87b23d29ca", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
   var graph = new Graph();
   graph.scale(39);
   
-  graph.edge = function(x0, y0, x1, y1, color) {
-  	
-  	this.setColor(color, 0.0, 0.65);
-  	this.ctx.beginPath();
-  	this.ctx.moveTo(x0, y0);
-  	this.ctx.lineTo(x1, y1);
-  	this.ctx.stroke();
-  	
-  }
-  
   var revlink = '<li style="_STYLE"><span class="desc">';
   revlink += '<a class="list" href="/rev/_NODEID?style=gitweb" title="_NODEID"><b>_DESC</b></a>';
   revlink += '</span> _TAGS';
@@ -960,9 +1020,9 @@
   </script>
   
   <div class="page_nav">
-  <a href="/graph/2?style=gitweb&revcount=30">less</a>
-  <a href="/graph/2?style=gitweb&revcount=120">more</a>
-  | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-2</a> <a href="/graph/tip?style=gitweb">tip</a> 
+  <a href="/graph/3?style=gitweb&revcount=30">less</a>
+  <a href="/graph/3?style=gitweb&revcount=120">more</a>
+  | <a href="/graph/2ef0ac749a14?style=gitweb">(0)</a> <a href="/graph/2ef0ac749a14?style=gitweb">-3</a> <a href="/graph/tip?style=gitweb">tip</a> 
   </div>
   
   <script type="text/javascript">process_dates()</script>
@@ -991,7 +1051,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
   200 Script output follows
   
-  1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe
+  ba87b23d29ca67a305625d81a20ac279c1e3f444
 
 branches
 
@@ -1005,10 +1065,11 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000'
   200 Script output follows
   
-  x\x9c\xbdTMHUA\x14\xbe\xa8\xf9\xec\xda&\x10\x11*\xb8\x88\x81\x99\xbef\xe6\xce\xbdw\xc6\xf2a\x16E\x1b\x11[%\x98\xcc\xaf\x8f\x8c\xf7\xc0\xf7\x82 (esc)
-  4\x11KP2m\x95\xad*\xabE\x05AP\xd0\xc22Z\x14\xf9\x03\xb9j\xa3\x9b$\xa4MJ\xb4\x90\xc0\x9a\x9bO0\x10\xdf\x13\xa2\x81\x0f\x869g\xe6|\xe7\x9c\xef\x8ceY\xf7\xa2KO\xd2\xb7K\x16~\\n\xe9\xad\x90w\x86\xab\x93W\x8e\xdf\xb0r\\Y\xee6(\xa2)\xf6\x95\xc6\x01\xe4\x1az\x80R\xe8kN\x98\xe7R\xa4\xa9K@\xe0!A\xb4k\xa7U*m\x03\x07\xd8\x92\x1d\xd2\xc9\xa4\x1d\xc2\xe6,\xa5\xcc+\x1f\xef\xafDgi\xef\xab\x1d\x1d\xb7\x9a\xe7[W\xfbc\x8f\xde-\xcd\xe7\xcaz\xb3\xbb\x19\xd3\x81\x10>c>\x08\x00"X\x11\xc2\x84@\xd2\xe7B*L\x00\x01P\x04R\xc3@\xbaB0\xdb8#\x83:\x83\xa2h\xbc=\xcd\xdaS\xe1Y,L\xd3\xa0\xf2\xa8\x94J:\xe6\xd8\x81Q\xe0\xe8d\xa7#\xe2,\xd1\xaeR*\xed \xa5\x01\x13\x01\xa6\x0cb\xe3;\xbe\xaf\xfcK[^wK\xe1N\xaf\xbbk\xe8B\xd1\xf4\xc1\x07\xb3\xab[\x10\xfdkmvwcB\xa6\xa4\xd4G\xc4D\xc2\x141\xad\x91\x10\x00\x08J\x81\xcb}\xee	\xee+W\xba\x8a\x80\x90|\xd4\xa0\xd6\xa0\xd4T\xde\xe1\x9d,!\xe2\xb5\xa94\xe3\xe7\xd5\x9f\x06\x18\xcba\x03aP\xb8f\xcd\x04\x1a_\\9\xf1\xed\xe4\x9e\xe5\xa6\xd1\xd2\x9f\x03\xa7o\xae\x90H\xf3\xfb\xef\xffH3\xadk (esc)
-  \xb0\x90\x92\x88\xb9\x14"\x068\xc2\x1e@\x00\xbb\x8a)\xd3'\x859 (esc)
-  \xa8\x80\x84S \xa5\xbd-g\x13`\xe4\xdc\xc3H^\xdf\xe2\xc0TM\xc7\xf4BO\xcf\xde\xae\xe5\xae#\x1frM(K\x97`F\x19\x16s\x05GD\xb9\x01\xc1\x00+\x8c|\x9fp\xc11\xf0\x14\x00\x9cJ\x82<\xe0\x12\x9f\xc1\x90\xd0\xf5\xc8\x19>Pr\xaa\xeaW\xf5\xc4\xae\xd1\xfc\x17\xcf'\x13u\xb1\x9e\xcdHnC\x0e\xcc`\xc8\xa0&\xac\x0e\xf1|\x8c\x10$\xc4\x8c\xa2p\x05`\xdc\x08 \x80\xc4\xd7Rr-\x94\x10\x102\xedi;\xf3f\xf1z\x16\x86\xdb\xd8d\xe5\xe7\x8b\xf5\x8d\rzp\xb2\xfe\xac\xf5\xf2\xd3\xfe\xfckws\xedt\x96b\xd5l\x1c\x0b\x85\xb5\x170\x8f\x11\x84\xb0\x8f\x19\xa0\x00	_\x07\x1ac\xa2\xc3\x89Z\xe7\x96\xf9 \xccNFg\xc7F\xaa\x8a+\x9a\x9cc_\x17\x1b\x17\x9e]z38<\x97+\xb5,",\xc8\xc8?\\\x91\xff\x17.~U\x96\x97\xf5%\xdeN<\x8e\xf5\x97%\xe7^\xcfL\xed~\xda\x96k\xdc->\x86\x02\x83"\x96H\xa6\xe3\xaas=-\xeb7\xe5\xda\x8f\xbc (no-eol) (esc)
+  x\x9c\xbdTMHTQ\x14\x1e\xfc\xef\xd9&\x10\x11*x\x88\x81\x9aN\xf7\xddw\xdf{\xf7Y\x0efR\xb4\x11\xb1U\x82\xc5\xfd\x9d!c\x06\x9c'd\xa0\x99X\x82\x92i\xablUZ-*\x08\x84\x82\x02KkQ\xf8\x13\xe4\xaa\x8dn\x94\x906)\xd5B\x02\xeb\xbe\x9c\x01\x85\xc9\x996\x1d\xf8x\x97{\xefy\xe7;\xe7|\xe7\x06\x02\x81\xb1\xe0\xda\x13\xefN\xd1\xca\x8f\xcb-\xbde\xfc\xeepU\xecJ\xc3\xcd@\x86\x96\xc6\xb7^`\xe9"[H\xe4\x18T\x1a\x16p]\xc3\x96\x14\x13\xcbt\xa1tM\x0c\x1c\x0b2,M\xcd\x13qO\x03:\xd089"c1\xcd\x87FI\\\xa8\xbf|\xbc\xbf\x11\\p{_\xe5\xb6\xddn^j\xdd\xec\x0f=z\xb7\xb6\x94)\xebT\xbe\x89\xa3 (esc)
+  \x1f6!6p\x00\xc4H`L\x18\x83\xdc\xa6\x8c\x0b\x84\x01\x06\x06s\xb84\x1cn2F4u\x19*\xd4*\x14\x04#a\x8f\x84\xe3\xfe^\xc8OS\xa1\xfc8\xe7\x82\xebj[7\x82@\x97\xb1v\x9dEH4,\xe2\xc2\xd3\xa1\x90\x800\x07\xb9\xc4@\xea\xee\xe4\xc1\xd2\xcf\xe7\xb3\xba[\xf2\xf6X\xdd]C\x1d\x05\xf3\x87\x1f,l\xeeBt\x87\xa5\xf2\xdd\x9e\x90*\xa9kC\xac"!\x17\x12)!c\x000\xd7\x05&\xb5\xa9\xc5\xa8-Ln (esc)
+  \x0c|\xf2A\x85\x1a\x85bUy\x9d\xb6\x93(\x8b\xd4\xc4=B/\x8a?\rP'G\x15\x98B\xde\xd6\xa9Zy/\xfb'j+f\xc2\xe3\xb9\xb4\xf5\xea\x98\xf6\xa6sz\xf9{\xc3.\xa4vX*\xdf\x04\x0f\xff[\xb4\x8dGG4\xc1$\xe1:\xb9\xbaq\xf2\xeb\xa9\xfd\xebM\xa3\xc5?\x07\xce\xdc\xda\xc0\xf9\xcd\xef\xbf\xa5\xd3g\xd2\xd2\xa8\xa5uKu\x01(8$\xa6k@\x02(D\x16\x80\x00\x99\x82\x08\xa5\r\x81(t\\f`\xea\x02\xce\xb5\x7f\xba\xac\x02\x8c\\x\x98\x9f\xd5\xb7:0W\xdd6\xbf\xd2\xd3s\xa0k\xbd\xeb\xd8L\xa6	\xa5Q\x86\x91Pc\x80\x98\x8cB,L\x07#\x80\x04\x82\xb6\x8d)\xa3\x08X\x02\x00\xear\x0c-`b\x9b\x18>\xa1\x1b\xf9g\xe9@\xd1\xe9\xca_US{G\xb3\x9f?\x9b\x8d\xd6\x86zR\x91LE\xe8/\xdd& (esc)
+  C
+  \xd5~u\xb0e#\x08\r\x8c\xd5\xf83\x93\x01B\x95\xe8\x1c\x03\xdb\x92s*\x99`\xcc0\x88\xb4d\xb2\xbd\x85\xc9,\x14\xb7\xf1\xd9\xf2\xe5Ku\x8d\xf5rp\xb6\xee\\\xe0\xc5\xa7C\xd9\xd7\xefe\xda\xe94\xc5\xaa\xde>\x8a\x02I\xcb!\x16\xc1\x10"\x1b\x11\xe0\x02\xc8l\xe9H\x84\xb0\xf4\xa78\xc9-\xf1(\xa9\x15\x0f.\x8c\x8fT\x16\x965\xe9'\xbe\xac6\xaeLtN\x0f\x0e/fJ-\x8d\x08s\x12#\xe7[\xfe\xff\x0b\x17\xb9\xc6KK\xfa\xa2o\xa7\x1e\x87\xfaKb\x8b\xaf?\xcc\xed{z>\xd3\xb8\xbb\xcc}\x8eB\x01\x89\xc6\xbc\x88hO\xa6\x15\xf8\rr4\xb3\xe5 (no-eol) (esc)
 
 stream_out
 
@@ -1137,10 +1198,10 @@
   	top: -1px;
   }
 
-Stop and restart with HGENCODING=cp932
+Stop and restart with HGENCODING=cp932 and preferuncompressed
 
   $ "$TESTDIR/killdaemons.py"
-  $ HGENCODING=cp932 hg serve --config server.uncompressed=False -n test \
+  $ HGENCODING=cp932 hg serve --config server.preferuncompressed=True -n test \
   >     -p $HGPORT -d --pid-file=hg.pid -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
 
@@ -1153,7 +1214,16 @@
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/graph/' \
   >     | grep '^var data ='
-  var data = [["40b4d6888e92", [0, 1], [[0, 0, 1]], "\u80fd", "test", "1970-01-01", ["stable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1]], "branch", "test", "1970-01-01", ["stable", false], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+  var data = [["548001d11f45", [0, 1], [[0, 0, 1, -1, ""]], "\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["ba87b23d29ca", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+
+capabilities
+
+  $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=capabilities'; echo
+  200 Script output follows
+  
+  lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch stream-preferred stream unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024
+
+heads
 
 ERRORS ENCOUNTERED
 
--- a/tests/test-hgweb-diffs.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-hgweb-diffs.t	Tue Apr 17 17:56:36 2012 -0500
@@ -552,6 +552,51 @@
   $ cd test1
   $ hg import -q --exact http://localhost:$HGPORT/rev/1
 
+raw revision with diff block numbers
+
+  $ "$TESTDIR/killdaemons.py"
+  $ cat <<EOF > .hg/hgrc
+  > [web]
+  > templates = rawdiff
+  > EOF
+  $ mkdir rawdiff
+  $ cat <<EOF > rawdiff/map
+  > mimetype = 'text/plain; charset={encoding}'
+  > changeset = '{diff}'
+  > difflineplus = '{line}'
+  > difflineminus = '{line}'
+  > difflineat = '{line}'
+  > diffline = '{line}'
+  > filenodelink = ''
+  > filenolink = ''
+  > fileline = '{line}'
+  > diffblock = 'Block: {blockno}\n{lines}\n'
+  > EOF
+  $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT '/raw-rev/0'
+  200 Script output follows
+  
+  Block: 1
+  diff -r 000000000000 -r 0cd96de13884 a
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/a	Thu Jan 01 00:00:00 1970 +0000
+  @@ -0,0 +1,1 @@
+  +a
+  
+  Block: 2
+  diff -r 000000000000 -r 0cd96de13884 b
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/b	Thu Jan 01 00:00:00 1970 +0000
+  @@ -0,0 +1,1 @@
+  +b
+  
+  $ "$TESTDIR/killdaemons.py"
+  $ rm .hg/hgrc rawdiff/map
+  $ rmdir rawdiff
+  $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
+  $ cat hg.pid >> $DAEMON_PIDS
+
 errors
 
   $ cat ../test/errors.log
--- a/tests/test-hgweb-empty.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-hgweb-empty.t	Tue Apr 17 17:56:36 2012 -0500
@@ -250,16 +250,6 @@
   var graph = new Graph();
   graph.scale(39);
   
-  graph.edge = function(x0, y0, x1, y1, color) {
-  	
-  	this.setColor(color, 0.0, 0.65);
-  	this.ctx.beginPath();
-  	this.ctx.moveTo(x0, y0);
-  	this.ctx.lineTo(x1, y1);
-  	this.ctx.stroke();
-  	
-  }
-  
   var revlink = '<li style="_STYLE"><span class="desc">';
   revlink += '<a href="/rev/_NODEID" title="_NODEID">_DESC</a>';
   revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>';
--- a/tests/test-hgwebdir.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-hgwebdir.t	Tue Apr 17 17:56:36 2012 -0500
@@ -31,6 +31,28 @@
   $ hg --cwd c ci -Amc -d'3 0'
   adding c
 
+create a subdirectory containing repositories and subrepositories
+
+  $ mkdir notrepo
+  $ cd notrepo
+  $ hg init e
+  $ echo e > e/e
+  $ hg --cwd e ci -Ame -d'4 0'
+  adding e
+  $ hg init e/e2
+  $ echo e2 > e/e2/e2
+  $ hg --cwd e/e2 ci -Ame2 -d '4 0'
+  adding e2
+  $ hg init f
+  $ echo f > f/f
+  $ hg --cwd f ci -Amf -d'4 0'
+  adding f
+  $ hg init f/f2
+  $ echo f2 > f/f2/f2
+  $ hg --cwd f/f2 ci -Amf2 -d '4 0'
+  adding f2
+  $ cd ..
+
 create repository without .hg/store
 
   $ hg init nostore
@@ -119,20 +141,32 @@
   /coll/a/.hg/patches/
   /coll/b/
   /coll/c/
+  /coll/notrepo/e/
+  /coll/notrepo/f/
   /rcoll/a/
   /rcoll/a/.hg/patches/
   /rcoll/b/
   /rcoll/b/d/
   /rcoll/c/
+  /rcoll/notrepo/e/
+  /rcoll/notrepo/e/e2/
+  /rcoll/notrepo/f/
+  /rcoll/notrepo/f/f2/
   /star/webdir/a/
   /star/webdir/a/.hg/patches/
   /star/webdir/b/
   /star/webdir/c/
+  /star/webdir/notrepo/e/
+  /star/webdir/notrepo/f/
   /starstar/webdir/a/
   /starstar/webdir/a/.hg/patches/
   /starstar/webdir/b/
   /starstar/webdir/b/d/
   /starstar/webdir/c/
+  /starstar/webdir/notrepo/e/
+  /starstar/webdir/notrepo/e/e2/
+  /starstar/webdir/notrepo/f/
+  /starstar/webdir/notrepo/f/f2/
   /astar/
   /astar/.hg/patches/
   
@@ -217,6 +251,22 @@
   </tr>
   
   <tr class="parity0">
+  <td><a href="/coll/notrepo/e/?style=paper">coll/notrepo/e</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
+  <td><a href="/coll/notrepo/f/?style=paper">coll/notrepo/f</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
   <td><a href="/rcoll/a/?style=paper">rcoll/a</a></td>
   <td>unknown</td>
   <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
@@ -257,6 +307,38 @@
   </tr>
   
   <tr class="parity1">
+  <td><a href="/rcoll/notrepo/e/?style=paper">rcoll/notrepo/e</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
+  <td><a href="/rcoll/notrepo/e/e2/?style=paper">rcoll/notrepo/e/e2</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
+  <td><a href="/rcoll/notrepo/f/?style=paper">rcoll/notrepo/f</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
+  <td><a href="/rcoll/notrepo/f/f2/?style=paper">rcoll/notrepo/f/f2</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
   <td><a href="/star/webdir/a/?style=paper">star/webdir/a</a></td>
   <td>unknown</td>
   <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
@@ -289,6 +371,22 @@
   </tr>
   
   <tr class="parity1">
+  <td><a href="/star/webdir/notrepo/e/?style=paper">star/webdir/notrepo/e</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
+  <td><a href="/star/webdir/notrepo/f/?style=paper">star/webdir/notrepo/f</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
   <td><a href="/starstar/webdir/a/?style=paper">starstar/webdir/a</a></td>
   <td>unknown</td>
   <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
@@ -329,6 +427,38 @@
   </tr>
   
   <tr class="parity0">
+  <td><a href="/starstar/webdir/notrepo/e/?style=paper">starstar/webdir/notrepo/e</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
+  <td><a href="/starstar/webdir/notrepo/e/e2/?style=paper">starstar/webdir/notrepo/e/e2</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
+  <td><a href="/starstar/webdir/notrepo/f/?style=paper">starstar/webdir/notrepo/f</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity1">
+  <td><a href="/starstar/webdir/notrepo/f/f2/?style=paper">starstar/webdir/notrepo/f/f2</a></td>
+  <td>unknown</td>
+  <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
+  <td class="age">*</td> (glob)
+  <td class="indexlinks"></td>
+  </tr>
+  
+  <tr class="parity0">
   <td><a href="/astar/?style=paper">astar</a></td>
   <td>unknown</td>
   <td>&#70;&#111;&#111;&#32;&#66;&#97;&#114;&#32;&#60;&#102;&#111;&#111;&#46;&#98;&#97;&#114;&#64;&#101;&#120;&#97;&#109;&#112;&#108;&#101;&#46;&#99;&#111;&#109;&#62;</td>
@@ -489,6 +619,8 @@
   /coll/a/.hg/patches/
   /coll/b/
   /coll/c/
+  /coll/notrepo/e/
+  /coll/notrepo/f/
   
   $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/coll/a/file/tip/a?style=raw'
   200 Script output follows
@@ -506,12 +638,140 @@
   /rcoll/b/
   /rcoll/b/d/
   /rcoll/c/
+  /rcoll/notrepo/e/
+  /rcoll/notrepo/e/e2/
+  /rcoll/notrepo/f/
+  /rcoll/notrepo/f/f2/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/b/d/file/tip/d?style=raw'
+  200 Script output follows
+  
+  d
+
+Test collapse = True
+
+  $ "$TESTDIR/killdaemons.py"
+  $ cat >> paths.conf <<EOF
+  > [web]
+  > collapse=true
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
+  >     -A access-paths.log -E error-paths-3.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/coll/?style=raw'
+  200 Script output follows
+  
+  
+  /coll/a/
+  /coll/a/.hg/patches/
+  /coll/b/
+  /coll/c/
+  /coll/notrepo/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/coll/a/file/tip/a?style=raw'
+  200 Script output follows
+  
+  a
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/?style=raw'
+  200 Script output follows
+  
+  
+  /rcoll/a/
+  /rcoll/a/.hg/patches/
+  /rcoll/b/
+  /rcoll/b/d/
+  /rcoll/c/
+  /rcoll/notrepo/
   
   $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/b/d/file/tip/d?style=raw'
   200 Script output follows
   
   d
 
+Test intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/?style=raw'
+  200 Script output follows
+  
+  
+  /rcoll/notrepo/e/
+  /rcoll/notrepo/e/e2/
+  /rcoll/notrepo/f/
+  /rcoll/notrepo/f/f2/
+  
+
+Test repositories inside intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/e/file/tip/e?style=raw'
+  200 Script output follows
+  
+  e
+
+Test subrepositories inside intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/f/f2/file/tip/f2?style=raw'
+  200 Script output follows
+  
+  f2
+
+Test descend = False
+
+  $ "$TESTDIR/killdaemons.py"
+  $ cat >> paths.conf <<EOF
+  > descend=false
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
+  >     -A access-paths.log -E error-paths-4.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/coll/?style=raw'
+  200 Script output follows
+  
+  
+  /coll/a/
+  /coll/b/
+  /coll/c/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/coll/a/file/tip/a?style=raw'
+  200 Script output follows
+  
+  a
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/?style=raw'
+  200 Script output follows
+  
+  
+  /rcoll/a/
+  /rcoll/b/
+  /rcoll/c/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/b/d/file/tip/d?style=raw'
+  200 Script output follows
+  
+  d
+
+Test intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/?style=raw'
+  200 Script output follows
+  
+  
+  /rcoll/notrepo/e/
+  /rcoll/notrepo/f/
+  
+
+Test repositories inside intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/e/file/tip/e?style=raw'
+  200 Script output follows
+  
+  e
+
+Test subrepositories inside intermediate directories
+
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/rcoll/notrepo/f/f2/file/tip/f2?style=raw'
+  200 Script output follows
+  
+  f2
+
 Test [paths] '*' in a repo root
 
   $ hg id http://localhost:$HGPORT1/astar
@@ -523,15 +783,60 @@
   > t/a = $root/a
   > t/b = $root/b
   > c = $root/c
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
+  >     -A access-paths.log -E error-paths-5.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=raw'
+  200 Script output follows
+  
+  
+  /t/a/
+  /t/b/
+  /c/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/?style=raw'
+  200 Script output follows
+  
+  
+  /t/a/
+  /t/b/
+  
+
+Test collapse = True
+
+  $ "$TESTDIR/killdaemons.py"
+  $ cat >> paths.conf <<EOF
   > [web]
+  > collapse=true
+  > EOF
+  $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
+  >     -A access-paths.log -E error-paths-6.log
+  $ cat hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=raw'
+  200 Script output follows
+  
+  
+  /t/
+  /c/
+  
+  $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/t/?style=raw'
+  200 Script output follows
+  
+  
+  /t/a/
+  /t/b/
+  
+
+test descend = False
+
+  $ "$TESTDIR/killdaemons.py"
+  $ cat >> paths.conf <<EOF
   > descend=false
   > EOF
   $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
-  >     -A access-paths.log -E error-paths-3.log
+  >     -A access-paths.log -E error-paths-7.log
   $ cat hg.pid >> $DAEMON_PIDS
-
-test descend = False
-
   $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT1 '/?style=raw'
   200 Script output follows
   
@@ -552,7 +857,7 @@
   > inexistent = $root/inexistent
   > EOF
   $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \
-  >     -A access-paths.log -E error-paths-4.log
+  >     -A access-paths.log -E error-paths-8.log
   $ cat hg.pid >> $DAEMON_PIDS
 
 test inexistent and inaccessible repo should be ignored silently
@@ -617,6 +922,8 @@
   /a/.hg/patches/
   /b/
   /c/
+  /notrepo/e/
+  /notrepo/f/
   
   $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 '/a/file/tip/a?style=raw'
   200 Script output follows
@@ -672,6 +979,26 @@
 
   $ cat error-paths-3.log
 
+paths errors 4
+
+  $ cat error-paths-4.log
+
+paths errors 5
+
+  $ cat error-paths-5.log
+
+paths errors 6
+
+  $ cat error-paths-6.log
+
+paths errors 7
+
+  $ cat error-paths-7.log
+
+paths errors 8
+
+  $ cat error-paths-8.log
+
 collections errors
 
   $ cat error-collections.log
--- a/tests/test-http.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-http.t	Tue Apr 17 17:56:36 2012 -0500
@@ -112,6 +112,7 @@
   [255]
 
 test http authentication
++ use the same server to test server side streaming preference
 
   $ cd test
   $ cat << EOT > userpass.py
@@ -127,7 +128,8 @@
   > def extsetup():
   >     common.permhooks.insert(0, perform_authentication)
   > EOT
-  $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid
+  $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \
+  >    --config server.preferuncompressed=True
   $ cat pid >> $DAEMON_PIDS
 
   $ hg id http://localhost:$HGPORT2/  
@@ -149,8 +151,13 @@
   5fed3813f7f5
   $ hg id http://user@localhost:$HGPORT2/ 
   5fed3813f7f5
-  $ hg id http://user:pass@localhost:$HGPORT2/
-  5fed3813f7f5
+  $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1
+  streaming all changes
+  7 files to transfer, 916 bytes of data
+  transferred * bytes in * seconds (*/sec) (glob)
+  updating to branch default
+  5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
   $ hg id http://user2@localhost:$HGPORT2/ 
   abort: http authorization required
   [255]
--- a/tests/test-issue612.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-issue612.t	Tue Apr 17 17:56:36 2012 -0500
@@ -24,11 +24,11 @@
 
   $ hg merge
   merging src/a.c and source/a.c to source/a.c
-  1 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ hg status
   M source/a.c
   R src/a.c
-  ? source/a.o
+  ? src/a.o
 
--- a/tests/test-keyword.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-keyword.t	Tue Apr 17 17:56:36 2012 -0500
@@ -338,18 +338,18 @@
   > EOF
   diff --git a/a b/a
   2 hunks, 2 lines changed
-  examine changes to 'a'? [Ynsfdaq?] 
+  examine changes to 'a'? [Ynesfdaq?] 
   @@ -1,3 +1,4 @@
    expand $Id$
   +foo
    do not process $Id:
    xxx $
-  record change 1/2 to 'a'? [Ynsfdaq?] 
+  record change 1/2 to 'a'? [Ynesfdaq?] 
   @@ -2,2 +3,3 @@
    do not process $Id:
    xxx $
   +bar
-  record change 2/2 to 'a'? [Ynsfdaq?] 
+  record change 2/2 to 'a'? [Ynesfdaq?] 
 
   $ hg identify
   d17e03c92c97+ tip
@@ -395,18 +395,18 @@
   > EOF
   diff --git a/a b/a
   2 hunks, 2 lines changed
-  examine changes to 'a'? [Ynsfdaq?] 
+  examine changes to 'a'? [Ynesfdaq?] 
   @@ -1,3 +1,4 @@
    expand $Id$
   +foo
    do not process $Id:
    xxx $
-  record change 1/2 to 'a'? [Ynsfdaq?] 
+  record change 1/2 to 'a'? [Ynesfdaq?] 
   @@ -2,2 +3,3 @@
    do not process $Id:
    xxx $
   +bar
-  record change 2/2 to 'a'? [Ynsfdaq?] 
+  record change 2/2 to 'a'? [Ynesfdaq?] 
 
 File a should be clean
 
@@ -462,7 +462,7 @@
   > EOF
   diff --git a/r b/r
   new file mode 100644
-  examine changes to 'r'? [Ynsfdaq?] 
+  examine changes to 'r'? [Ynesfdaq?] 
   r
   committed changeset 3:899491280810
   overwriting r expanding keywords
@@ -486,7 +486,7 @@
   > EOF
   diff --git a/i b/i
   new file mode 100644
-  examine changes to 'i'? [Ynsfdaq?] 
+  examine changes to 'i'? [Ynesfdaq?] 
   i
   committed changeset 3:5f40fe93bbdc
   $ cat i
--- a/tests/test-largefiles.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-largefiles.t	Tue Apr 17 17:56:36 2012 -0500
@@ -127,6 +127,29 @@
   $ cat sub/large4
   large22
 
+Test display of largefiles in hgweb
+
+  $ hg serve -d -p $HGPORT --pid-file ../hg.pid
+  $ cat ../hg.pid >> $DAEMON_PIDS
+  $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/tip/?style=raw'
+  200 Script output follows
+  
+  
+  drwxr-xr-x sub
+  -rw-r--r-- 41 large3
+  -rw-r--r-- 9 normal3
+  
+  
+  $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '/file/tip/sub/?style=raw'
+  200 Script output follows
+  
+  
+  -rw-r--r-- 41 large4
+  -rw-r--r-- 9 normal4
+  
+  
+  $ "$TESTDIR/killdaemons.py"
+
 Test archiving the various revisions.  These hit corner cases known with
 archiving.
 
@@ -737,6 +760,8 @@
   adding manifests
   adding file changes
   added 1 changesets with 2 changes to 2 files
+  getting changed largefiles
+  1 largefiles updated, 0 removed
   $ hg log --template '{rev}:{node|short}  {desc|firstline}\n'
   9:598410d3eb9a  modify normal file largefile in repo d
   8:a381d2c8c80e  modify normal file and largefile in repo b
@@ -759,6 +784,19 @@
   $ cat sub2/large7
   large7
 
+Cat a largefile
+  $ hg cat normal3
+  normal3-modified
+  $ hg cat sub/large4
+  large4-modified
+  $ rm ${USERCACHE}/*
+  $ hg cat -r a381d2c8c80e -o cat.out sub/large4
+  $ cat cat.out
+  large4-modified
+  $ rm cat.out
+  $ hg cat -r a381d2c8c80e normal3
+  normal3-modified
+
 Test that renaming a largefile results in correct output for status
 
   $ hg rename sub/large4 large4-renamed
--- a/tests/test-mq-qrefresh-interactive.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-mq-qrefresh-interactive.t	Tue Apr 17 17:56:36 2012 -0500
@@ -185,22 +185,22 @@
   > EOF
   diff --git a/1.txt b/1.txt
   2 hunks, 2 lines changed
-  examine changes to '1.txt'? [Ynsfdaq?] 
+  examine changes to '1.txt'? [Ynesfdaq?] 
   @@ -1,3 +1,3 @@
    1
   -2
   +2 2
    3
-  record change 1/4 to '1.txt'? [Ynsfdaq?] 
+  record change 1/4 to '1.txt'? [Ynesfdaq?] 
   @@ -3,3 +3,3 @@
    3
   -4
   +4 4
    5
-  record change 2/4 to '1.txt'? [Ynsfdaq?] 
+  record change 2/4 to '1.txt'? [Ynesfdaq?] 
   diff --git a/2.txt b/2.txt
   1 hunks, 1 lines changed
-  examine changes to '2.txt'? [Ynsfdaq?] 
+  examine changes to '2.txt'? [Ynesfdaq?] 
   @@ -1,5 +1,5 @@
    a
   -b
@@ -208,10 +208,10 @@
    c
    d
    e
-  record change 3/4 to '2.txt'? [Ynsfdaq?] 
+  record change 3/4 to '2.txt'? [Ynesfdaq?] 
   diff --git a/dir/a.txt b/dir/a.txt
   1 hunks, 1 lines changed
-  examine changes to 'dir/a.txt'? [Ynsfdaq?] 
+  examine changes to 'dir/a.txt'? [Ynesfdaq?] 
 
 After partial qrefresh 'tip'
 
@@ -279,7 +279,7 @@
   > EOF
   diff --git a/1.txt b/1.txt
   1 hunks, 1 lines changed
-  examine changes to '1.txt'? [Ynsfdaq?] 
+  examine changes to '1.txt'? [Ynesfdaq?] 
   @@ -1,5 +1,5 @@
    1
    2 2
@@ -287,17 +287,17 @@
   -4
   +4 4
    5
-  record change 1/2 to '1.txt'? [Ynsfdaq?] 
+  record change 1/2 to '1.txt'? [Ynesfdaq?] 
   diff --git a/dir/a.txt b/dir/a.txt
   1 hunks, 1 lines changed
-  examine changes to 'dir/a.txt'? [Ynsfdaq?] 
+  examine changes to 'dir/a.txt'? [Ynesfdaq?] 
   @@ -1,4 +1,4 @@
   -hello world
   +hello world!
    
    someone
    up
-  record change 2/2 to 'dir/a.txt'? [Ynsfdaq?] 
+  record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] 
 
 After final qrefresh 'tip'
 
--- a/tests/test-mq-subrepo-svn.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-mq-subrepo-svn.t	Tue Apr 17 17:56:36 2012 -0500
@@ -37,7 +37,6 @@
   $ hg status -S -X '**/format'
   A .hgsub
   $ hg qnew -m0 0.diff
-  committing subrepository sub
   $ cd sub
   $ echo a > a
   $ svn add a
--- a/tests/test-mq-subrepo.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-mq-subrepo.t	Tue Apr 17 17:56:36 2012 -0500
@@ -105,7 +105,6 @@
   % update substate when adding .hgsub w/clean updated subrepo
   A .hgsub
   % qnew -m0 0.diff
-  committing subrepository sub
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -121,7 +120,6 @@
   % update substate when modifying .hgsub w/clean updated subrepo
   M .hgsub
   % qnew -m1 1.diff
-  committing subrepository sub2
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -166,7 +164,6 @@
   % update substate when adding .hgsub w/clean updated subrepo
   A .hgsub
   % qrefresh
-  committing subrepository sub
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -183,7 +180,6 @@
   % update substate when modifying .hgsub w/clean updated subrepo
   M .hgsub
   % qrefresh
-  committing subrepository sub2
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -225,7 +221,6 @@
   $ echo sub = sub > .hgsub
   $ hg add .hgsub
   $ hg qnew -m0 0.diff
-  committing subrepository sub
   $ hg debugsub
   path sub
    source   sub
@@ -268,7 +263,7 @@
   % qrecord --config ui.interactive=1 -m0 0.diff
   diff --git a/.hgsub b/.hgsub
   new file mode 100644
-  examine changes to '.hgsub'? [Ynsfdaq?] 
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   abort: uncommitted changes in subrepository sub
   [255]
   % update substate when adding .hgsub w/clean updated subrepo
@@ -276,8 +271,7 @@
   % qrecord --config ui.interactive=1 -m0 0.diff
   diff --git a/.hgsub b/.hgsub
   new file mode 100644
-  examine changes to '.hgsub'? [Ynsfdaq?] 
-  committing subrepository sub
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -293,11 +287,11 @@
   % qrecord --config ui.interactive=1 -m1 1.diff
   diff --git a/.hgsub b/.hgsub
   1 hunks, 1 lines changed
-  examine changes to '.hgsub'? [Ynsfdaq?] 
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   @@ -1,1 +1,2 @@
    sub = sub
   +sub2 = sub2
-  record this change to '.hgsub'? [Ynsfdaq?] 
+  record this change to '.hgsub'? [Ynesfdaq?] 
   abort: uncommitted changes in subrepository sub2
   [255]
   % update substate when modifying .hgsub w/clean updated subrepo
@@ -305,12 +299,11 @@
   % qrecord --config ui.interactive=1 -m1 1.diff
   diff --git a/.hgsub b/.hgsub
   1 hunks, 1 lines changed
-  examine changes to '.hgsub'? [Ynsfdaq?] 
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   @@ -1,1 +1,2 @@
    sub = sub
   +sub2 = sub2
-  record this change to '.hgsub'? [Ynsfdaq?] 
-  committing subrepository sub2
+  record this change to '.hgsub'? [Ynesfdaq?] 
   path sub
    source   sub
    revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31
@@ -331,7 +324,7 @@
   % qrecord --config ui.interactive=1 -m2 2.diff
   diff --git a/.hgsub b/.hgsub
   deleted file mode 100644
-  examine changes to '.hgsub'? [Ynsfdaq?] 
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   % debugsub should be empty
 
   $ hg qpop -qa
@@ -346,7 +339,7 @@
   % qrecord --config ui.interactive=1 -m3 3.diff
   diff --git a/.hgsub b/.hgsub
   deleted file mode 100644
-  examine changes to '.hgsub'? [Ynsfdaq?] 
+  examine changes to '.hgsub'? [Ynesfdaq?] 
   % debugsub should be empty
 
   $ cd ..
@@ -360,4 +353,3 @@
   $ echo sub = sub >> .hgsub
   $ hg add .hgsub
   $ hg qnew 0.diff
-  committing subrepository sub
--- a/tests/test-mq.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-mq.t	Tue Apr 17 17:56:36 2012 -0500
@@ -70,7 +70,7 @@
    qgoto         push or pop patches until named patch is at top of stack
    qguard        set or print guards for a patch
    qheader       print the header of the topmost or specified patch
-   qimport       import a patch
+   qimport       import a patch or existing changeset
    qnew          create a new patch
    qnext         print the name of the next pushable patch
    qpop          pop the current patch off the stack
--- a/tests/test-parseindex2.py	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-parseindex2.py	Tue Apr 17 17:56:36 2012 -0500
@@ -52,7 +52,6 @@
 
     return index, cache
 
-
 data_inlined = '\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c' \
     '\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff' \
     '\xff\xff\xff\xff\xff\xebG\x97\xb7\x1fB\x04\xcf\x13V\x81\tw\x1b' \
@@ -94,13 +93,16 @@
     '\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00' \
     '\x00\x00\x00\x00\x00\x00\x00\x00\x00'
 
-def runtest() :
+def parse_index2(data, inline):
+    index, chunkcache = parsers.parse_index2(data, inline)
+    return list(index), chunkcache
 
+def runtest() :
     py_res_1 = py_parseindex(data_inlined, True)
-    c_res_1 = parsers.parse_index2(data_inlined, True)
+    c_res_1 = parse_index2(data_inlined, True)
 
     py_res_2 = py_parseindex(data_non_inlined, False)
-    c_res_2 = parsers.parse_index2(data_non_inlined, False)
+    c_res_2 = parse_index2(data_non_inlined, False)
 
     if py_res_1 != c_res_1:
         print "Parse index result (with inlined data) differs!"
@@ -108,6 +110,13 @@
     if py_res_2 != c_res_2:
         print "Parse index result (no inlined data) differs!"
 
+    ix = parsers.parse_index2(data_inlined, True)[0]
+    for i, r in enumerate(ix):
+        if r[7] == nullid:
+            i = -1
+        if ix[r[7]] != i:
+            print 'Reverse lookup inconsistent for %r' % r[7].encode('hex')
+
     print "done"
 
 runtest()
--- a/tests/test-patchbomb.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-patchbomb.t	Tue Apr 17 17:56:36 2012 -0500
@@ -979,6 +979,62 @@
   
   --===*-- (glob)
 
+test attach and body for single patch:
+  $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a --body -r 2
+  This patch series consists of 1 patches.
+  
+  
+  Displaying [PATCH] test ...
+  Content-Type: multipart/mixed; boundary="===*" (glob)
+  MIME-Version: 1.0
+  Subject: [PATCH] test
+  X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f
+  Message-Id: <ff2c9fa2018b15fa74b3.60@*> (glob)
+  User-Agent: Mercurial-patchbomb/* (glob)
+  Date: Thu, 01 Jan 1970 00:01:00 +0000
+  From: quux
+  To: foo
+  Cc: bar
+  
+  --===* (glob)
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  
+  # HG changeset patch
+  # User test
+  # Date 3 0
+  # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f
+  # Parent  97d72e5f12c7e84f85064aa72e5a297142c36ed9
+  c
+  
+  diff -r 97d72e5f12c7 -r ff2c9fa2018b c
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/c	Thu Jan 01 00:00:03 1970 +0000
+  @@ -0,0 +1,1 @@
+  +c
+  
+  --===* (glob)
+  Content-Type: text/x-patch; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  Content-Disposition: attachment; filename=t2.patch
+  
+  # HG changeset patch
+  # User test
+  # Date 3 0
+  # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f
+  # Parent  97d72e5f12c7e84f85064aa72e5a297142c36ed9
+  c
+  
+  diff -r 97d72e5f12c7 -r ff2c9fa2018b c
+  --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/c	Thu Jan 01 00:00:03 1970 +0000
+  @@ -0,0 +1,1 @@
+  +c
+  
+  --===*-- (glob)
+
 test attach for multiple patches:
   $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a \
   >  -r 0:1 -r 4
@@ -1572,7 +1628,7 @@
   >  -r 0:1
   This patch series consists of 2 patches.
   
-  Subject: [PATCH 0 of 2] 
+  (optional) Subject: [PATCH 0 of 2] 
   
   Displaying [PATCH 1 of 2] a ...
   Content-Type: text/plain; charset="us-ascii"
--- a/tests/test-phases.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-phases.t	Tue Apr 17 17:56:36 2012 -0500
@@ -402,3 +402,34 @@
   |
   o  0 public A
   
+test partial failure
+
+  $ hg phase --public 7
+  $ hg phase --draft '5 or 7'
+  cannot move 1 changesets to a more permissive phase, use --force
+  phase changed for 1 changesets
+  [1]
+  $ hg log -G --template "{rev} {phase} {desc}\n"
+  @    7 public merge B' and E
+  |\
+  | o  6 public B'
+  | |
+  +---o  5 draft H
+  | |
+  o |  4 public E
+  | |
+  o |  3 public D
+  | |
+  o |  2 public C
+  |/
+  o  1 public B
+  |
+  o  0 public A
+  
+
+test complete failure
+
+  $ hg phase --draft 7
+  cannot move 1 changesets to a more permissive phase, use --force
+  no phases changed
+  [1]
--- a/tests/test-qrecord.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-qrecord.t	Tue Apr 17 17:56:36 2012 -0500
@@ -40,6 +40,7 @@
   
         y - record this change
         n - skip this change
+        e - edit this change manually
   
         s - skip remaining changes to this file
         f - record remaining changes to this file
@@ -58,6 +59,7 @@
                             committing
       --close-branch        mark a branch as closed, hiding it from the branch
                             list
+      --amend               amend the parent of the working dir
    -I --include PATTERN [+] include names matching the given patterns
    -X --exclude PATTERN [+] exclude names matching the given patterns
    -m --message TEXT        use text as commit message
@@ -245,22 +247,22 @@
   > EOF
   diff --git a/1.txt b/1.txt
   2 hunks, 2 lines changed
-  examine changes to '1.txt'? [Ynsfdaq?] 
+  examine changes to '1.txt'? [Ynesfdaq?] 
   @@ -1,3 +1,3 @@
    1
   -2
   +2 2
    3
-  record change 1/4 to '1.txt'? [Ynsfdaq?] 
+  record change 1/4 to '1.txt'? [Ynesfdaq?] 
   @@ -3,3 +3,3 @@
    3
   -4
   +4 4
    5
-  record change 2/4 to '1.txt'? [Ynsfdaq?] 
+  record change 2/4 to '1.txt'? [Ynesfdaq?] 
   diff --git a/2.txt b/2.txt
   1 hunks, 1 lines changed
-  examine changes to '2.txt'? [Ynsfdaq?] 
+  examine changes to '2.txt'? [Ynesfdaq?] 
   @@ -1,5 +1,5 @@
    a
   -b
@@ -268,10 +270,10 @@
    c
    d
    e
-  record change 3/4 to '2.txt'? [Ynsfdaq?] 
+  record change 3/4 to '2.txt'? [Ynesfdaq?] 
   diff --git a/dir/a.txt b/dir/a.txt
   1 hunks, 1 lines changed
-  examine changes to 'dir/a.txt'? [Ynsfdaq?] 
+  examine changes to 'dir/a.txt'? [Ynesfdaq?] 
 
 After qrecord a.patch 'tip'"
 
@@ -340,7 +342,7 @@
   > EOF
   diff --git a/1.txt b/1.txt
   1 hunks, 1 lines changed
-  examine changes to '1.txt'? [Ynsfdaq?] 
+  examine changes to '1.txt'? [Ynesfdaq?] 
   @@ -1,5 +1,5 @@
    1
    2 2
@@ -348,17 +350,17 @@
   -4
   +4 4
    5
-  record change 1/2 to '1.txt'? [Ynsfdaq?] 
+  record change 1/2 to '1.txt'? [Ynesfdaq?] 
   diff --git a/dir/a.txt b/dir/a.txt
   1 hunks, 1 lines changed
-  examine changes to 'dir/a.txt'? [Ynsfdaq?] 
+  examine changes to 'dir/a.txt'? [Ynesfdaq?] 
   @@ -1,4 +1,4 @@
   -hello world
   +hello world!
    
    someone
    up
-  record change 2/2 to 'dir/a.txt'? [Ynsfdaq?] 
+  record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] 
 
 After qrecord b.patch 'tip'
 
--- a/tests/test-record.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-record.t	Tue Apr 17 17:56:36 2012 -0500
@@ -20,7 +20,7 @@
   > EOF
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
-  examine changes to 'empty-rw'? [Ynsfdaq?] 
+  examine changes to 'empty-rw'? [Ynesfdaq?] 
   no changes to record
 
   $ hg tip -p
@@ -39,7 +39,7 @@
   > EOF
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
-  examine changes to 'empty-rw'? [Ynsfdaq?] 
+  examine changes to 'empty-rw'? [Ynesfdaq?] 
   abort: empty commit message
   [255]
 
@@ -59,7 +59,7 @@
   > EOF
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
-  examine changes to 'empty-rw'? [Ynsfdaq?] 
+  examine changes to 'empty-rw'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   0:c0708cf4e46e
@@ -88,7 +88,7 @@
   diff --git a/empty-rw b/empty-rename
   rename from empty-rw
   rename to empty-rename
-  examine changes to 'empty-rw' and 'empty-rename'? [Ynsfdaq?] 
+  examine changes to 'empty-rw' and 'empty-rename'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   1:d695e8dcb197
@@ -108,7 +108,7 @@
   diff --git a/empty-rename b/empty-copy
   copy from empty-rename
   copy to empty-copy
-  examine changes to 'empty-rename' and 'empty-copy'? [Ynsfdaq?] 
+  examine changes to 'empty-rename' and 'empty-copy'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   2:1d4b90bea524
@@ -127,7 +127,7 @@
   > EOF
   diff --git a/empty-copy b/empty-copy
   deleted file mode 100644
-  examine changes to 'empty-copy'? [Ynsfdaq?] 
+  examine changes to 'empty-copy'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   3:b39a238f01a1
@@ -149,7 +149,7 @@
   diff --git a/tip.bundle b/tip.bundle
   new file mode 100644
   this is a binary file
-  examine changes to 'tip.bundle'? [Ynsfdaq?] 
+  examine changes to 'tip.bundle'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   4:ad816da3711e
@@ -171,7 +171,7 @@
   > EOF
   diff --git a/tip.bundle b/tip.bundle
   this modifies a binary file (all or nothing)
-  examine changes to 'tip.bundle'? [Ynsfdaq?] 
+  examine changes to 'tip.bundle'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   5:dccd6f3eb485
@@ -196,7 +196,7 @@
   rename from tip.bundle
   rename to top.bundle
   this modifies a binary file (all or nothing)
-  examine changes to 'tip.bundle' and 'top.bundle'? [Ynsfdaq?] 
+  examine changes to 'tip.bundle' and 'top.bundle'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   6:7fa44105f5b3
@@ -224,7 +224,7 @@
   > EOF
   diff --git a/plain b/plain
   new file mode 100644
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   7:11fb457c1be4
@@ -258,13 +258,13 @@
   > EOF
   diff --git a/plain b/plain
   1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -8,3 +8,4 @@
    8
    9
    10
   +11
-  record this change to 'plain'? [Ynsfdaq?] 
+  record this change to 'plain'? [Ynesfdaq?] 
 
 Modify end of plain file, no EOL
 
@@ -275,14 +275,14 @@
   > EOF
   diff --git a/plain b/plain
   1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -9,3 +9,4 @@
    9
    10
    11
   +7264f99c5f5ff3261504828afa4fb4d406c3af54
   \ No newline at end of file
-  record this change to 'plain'? [Ynsfdaq?] 
+  record this change to 'plain'? [Ynesfdaq?] 
 
 Modify end of plain file, add EOL
 
@@ -296,7 +296,7 @@
   > EOF
   diff --git a/plain b/plain
   1 hunks, 1 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -9,4 +9,4 @@
    9
    10
@@ -304,10 +304,10 @@
   -7264f99c5f5ff3261504828afa4fb4d406c3af54
   \ No newline at end of file
   +7264f99c5f5ff3261504828afa4fb4d406c3af54
-  record change 1/2 to 'plain'? [Ynsfdaq?] 
+  record change 1/2 to 'plain'? [Ynesfdaq?] 
   diff --git a/plain2 b/plain2
   new file mode 100644
-  examine changes to 'plain2'? [Ynsfdaq?] 
+  examine changes to 'plain2'? [Ynesfdaq?] 
 
 Modify beginning, trim end, record both, add another file to test
 changes numbering
@@ -327,28 +327,28 @@
   > EOF
   diff --git a/plain b/plain
   2 hunks, 3 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -1,4 +1,4 @@
   -1
   +2
    2
    3
    4
-  record change 1/3 to 'plain'? [Ynsfdaq?] 
+  record change 1/3 to 'plain'? [Ynesfdaq?] 
   @@ -8,5 +8,3 @@
    8
    9
    10
   -11
   -7264f99c5f5ff3261504828afa4fb4d406c3af54
-  record change 2/3 to 'plain'? [Ynsfdaq?] 
+  record change 2/3 to 'plain'? [Ynesfdaq?] 
   diff --git a/plain2 b/plain2
   1 hunks, 1 lines changed
-  examine changes to 'plain2'? [Ynsfdaq?] 
+  examine changes to 'plain2'? [Ynesfdaq?] 
   @@ -1,1 +1,2 @@
    1
   +2
-  record change 3/3 to 'plain2'? [Ynsfdaq?] 
+  record change 3/3 to 'plain2'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   11:21df83db12b8
@@ -396,7 +396,7 @@
   > EOF
   diff --git a/plain b/plain
   2 hunks, 4 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -1,9 +1,6 @@
   -2
   -2
@@ -407,7 +407,7 @@
    7
    8
    9
-  record change 1/2 to 'plain'? [Ynsfdaq?] 
+  record change 1/2 to 'plain'? [Ynesfdaq?] 
   @@ -4,7 +1,7 @@
    4
    5
@@ -417,7 +417,7 @@
    9
   -10
   +10.new
-  record change 2/2 to 'plain'? [Ynsfdaq?] 
+  record change 2/2 to 'plain'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   12:99337501826f
@@ -445,7 +445,7 @@
   > EOF
   diff --git a/plain b/plain
   1 hunks, 3 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -1,6 +1,3 @@
   -2
   -2
@@ -453,7 +453,7 @@
    4
    5
    6
-  record this change to 'plain'? [Ynsfdaq?] 
+  record this change to 'plain'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   13:bbd45465d540
@@ -490,7 +490,7 @@
   > EOF
   diff --git a/plain b/plain
   2 hunks, 4 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -1,6 +1,9 @@
   +1
   +2
@@ -501,7 +501,7 @@
    7
    8
    9
-  record change 1/2 to 'plain'? [Ynsfdaq?] 
+  record change 1/2 to 'plain'? [Ynesfdaq?] 
   @@ -1,7 +4,6 @@
    4
    5
@@ -510,7 +510,7 @@
    8
    9
   -10.new
-  record change 2/2 to 'plain'? [Ynsfdaq?] 
+  record change 2/2 to 'plain'? [Ynesfdaq?] 
 
 Add to beginning, middle, end
 
@@ -529,14 +529,14 @@
   > EOF
   diff --git a/plain b/plain
   3 hunks, 7 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -1,2 +1,5 @@
   +1
   +2
   +3
    4
    5
-  record change 1/3 to 'plain'? [Ynsfdaq?] 
+  record change 1/3 to 'plain'? [Ynesfdaq?] 
   @@ -1,6 +4,8 @@
    4
    5
@@ -546,7 +546,7 @@
    7
    8
    9
-  record change 2/3 to 'plain'? [Ynsfdaq?] 
+  record change 2/3 to 'plain'? [Ynesfdaq?] 
   @@ -3,4 +8,6 @@
    6
    7
@@ -554,7 +554,7 @@
    9
   +10
   +11
-  record change 3/3 to 'plain'? [Ynsfdaq?] 
+  record change 3/3 to 'plain'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   15:f34a7937ec33
@@ -587,14 +587,14 @@
   > EOF
   diff --git a/plain b/plain
   1 hunks, 2 lines changed
-  examine changes to 'plain'? [Ynsfdaq?] 
+  examine changes to 'plain'? [Ynesfdaq?] 
   @@ -9,3 +9,5 @@
    7
    8
    9
   +10
   +11
-  record this change to 'plain'? [Ynsfdaq?] 
+  record this change to 'plain'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   16:f9900b71a04c
@@ -627,11 +627,11 @@
   > EOF
   diff --git a/subdir/a b/subdir/a
   1 hunks, 1 lines changed
-  examine changes to 'subdir/a'? [Ynsfdaq?] 
+  examine changes to 'subdir/a'? [Ynesfdaq?] 
   @@ -1,1 +1,2 @@
    a
   +a
-  record this change to 'subdir/a'? [Ynsfdaq?] 
+  record this change to 'subdir/a'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   18:61be427a9deb
@@ -665,16 +665,17 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   y - record this change
   n - skip this change
+  e - edit this change manually
   s - skip remaining changes to this file
   f - record remaining changes to this file
   d - done, skip remaining changes and files
   a - record all changes to all remaining files
   q - quit, recording no changes
   ? - display help
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   abort: user quit
   [255]
 
@@ -685,10 +686,10 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   diff --git a/subdir/f2 b/subdir/f2
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynsfdaq?] abort: response expected
+  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
   [255]
 
 No
@@ -698,10 +699,10 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   diff --git a/subdir/f2 b/subdir/f2
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynsfdaq?] abort: response expected
+  examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected
   [255]
 
 f, quit
@@ -712,10 +713,10 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   diff --git a/subdir/f2 b/subdir/f2
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynsfdaq?] 
+  examine changes to 'subdir/f2'? [Ynesfdaq?] 
   abort: user quit
   [255]
 
@@ -727,10 +728,10 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   diff --git a/subdir/f2 b/subdir/f2
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f2'? [Ynsfdaq?] 
+  examine changes to 'subdir/f2'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   20:b3df3dda369a
@@ -754,7 +755,7 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
 
   $ hg tip -p
   changeset:   21:38ec577f126b
@@ -784,12 +785,12 @@
   old mode 100644
   new mode 100755
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   @@ -1,2 +1,3 @@
    a
    a
   +a
-  record this change to 'subdir/f1'? [Ynsfdaq?] 
+  record this change to 'subdir/f1'? [Ynesfdaq?] 
 
   $ hg tip --config diff.git=True -p
   changeset:   22:3261adceb075
@@ -819,13 +820,13 @@
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   @@ -1,3 +1,4 @@
    a
    a
    a
   +b
-  record this change to 'subdir/f1'? [Ynsfdaq?] 
+  record this change to 'subdir/f1'? [Ynesfdaq?] 
 
   $ hg tip --config diff.git=True -p
   changeset:   23:b429867550db
@@ -857,13 +858,13 @@
   old mode 100755
   new mode 100644
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   @@ -2,3 +2,4 @@
    a
    a
    b
   +c
-  record this change to 'subdir/f1'? [Ynsfdaq?] 
+  record this change to 'subdir/f1'? [Ynesfdaq?] 
 
   $ hg tip --config diff.git=True -p
   changeset:   24:0b082130c20a
@@ -914,6 +915,155 @@
   $ hg up -C
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
+Editing patch
+
+  $ cat > editor << '__EOF__'
+  > #!/bin/sh
+  > sed -e 7d -e '5s/^-/ /' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ chmod +x editor
+  $ cat > editedfile << '__EOF__'
+  > This is the first line
+  > This is the second line
+  > This is the third line
+  > __EOF__
+  $ hg add editedfile
+  $ hg commit -medit-patch-1
+  $ cat > editedfile << '__EOF__'
+  > This line has changed
+  > This change will be committed
+  > This is the third line
+  > __EOF__
+  $ HGEDITOR="'`pwd`'"/editor hg record -d '23 0' -medit-patch-2 <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 2 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] 
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This is the second line
+  +This line has changed
+  +This change will be committed
+   This is the third line
+  record this change to 'editedfile'? [Ynesfdaq?] 
+  $ cat editedfile
+  This line has changed
+  This change will be committed
+  This is the third line
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ hg revert editedfile
+
+Trying to edit patch for whole file
+
+  $ echo "This is the fourth line" >> editedfile
+  $ hg record <<EOF
+  > e
+  > q
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 1 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] 
+  cannot edit patch for whole file
+  examine changes to 'editedfile'? [Ynesfdaq?] 
+  abort: user quit
+  [255]
+  $ hg revert editedfile
+
+Removing changes from patch
+
+  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
+  $ mv tmp editedfile
+  $ echo "This line has been added" >> editedfile
+  $ cat > editor << '__EOF__'
+  > #!/bin/sh
+  > sed -e 's/^[-+]/ /' "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ chmod +x editor
+  $ HGEDITOR="'`pwd`'"/editor hg record <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] 
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] 
+  no changes to record
+  $ cat editedfile
+  This change will not be committed
+  This is the second line
+  This line has been added
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ hg revert editedfile
+
+Invalid patch
+
+  $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp
+  $ mv tmp editedfile
+  $ echo "This line has been added" >> editedfile
+  $ cat > editor << '__EOF__'
+  > #!/bin/sh
+  > sed s/This/That/ "$1" > tmp
+  > mv tmp "$1"
+  > __EOF__
+  $ chmod +x editor
+  $ HGEDITOR="'`pwd`'"/editor hg record <<EOF
+  > y
+  > e
+  > EOF
+  diff --git a/editedfile b/editedfile
+  1 hunks, 3 lines changed
+  examine changes to 'editedfile'? [Ynesfdaq?] 
+  @@ -1,3 +1,3 @@
+  -This is the first line
+  -This change will be committed
+  -This is the third line
+  +This change will not be committed
+  +This is the second line
+  +This line has been added
+  record this change to 'editedfile'? [Ynesfdaq?] 
+  patching file editedfile
+  Hunk #1 FAILED at 0
+  1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej
+  abort: patch failed to apply
+  [255]
+  $ cat editedfile
+  This change will not be committed
+  This is the second line
+  This line has been added
+  $ hg cat -r tip editedfile
+  This is the first line
+  This change will be committed
+  This is the third line
+  $ cat editedfile.rej
+  --- editedfile
+  +++ editedfile
+  @@ -1,3 +1,3 @@
+  -That is the first line
+  -That change will be committed
+  -That is the third line
+  +That change will not be committed
+  +That is the second line
+  +That line has been added
+  $ hg up -C
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
 With win32text
 
   $ echo '[extensions]' >> .hg/hgrc
@@ -931,31 +1081,30 @@
   $ echo 'warn = no' >> .hg/hgrc
 
   $ echo d >> subdir/f1
-  $ hg record -d '23 0' -mw1 <<EOF
+  $ hg record -d '24 0' -mw1 <<EOF
   > y
   > y
   > EOF
   diff --git a/subdir/f1 b/subdir/f1
   1 hunks, 1 lines changed
-  examine changes to 'subdir/f1'? [Ynsfdaq?] 
+  examine changes to 'subdir/f1'? [Ynesfdaq?] 
   @@ -3,3 +3,4 @@
    a
    b
    c
   +d
-  record this change to 'subdir/f1'? [Ynsfdaq?] 
+  record this change to 'subdir/f1'? [Ynesfdaq?] 
 
   $ hg tip -p
-  changeset:   26:b8306e70edc4
+  changeset:   28:287ad1f41a72
   tag:         tip
-  parent:      24:0b082130c20a
   user:        test
-  date:        Thu Jan 01 00:00:23 1970 +0000
+  date:        Thu Jan 01 00:00:24 1970 +0000
   summary:     w1
   
-  diff -r 0b082130c20a -r b8306e70edc4 subdir/f1
-  --- a/subdir/f1	Thu Jan 01 00:00:22 1970 +0000
-  +++ b/subdir/f1	Thu Jan 01 00:00:23 1970 +0000
+  diff -r 65ce23a81197 -r 287ad1f41a72 subdir/f1
+  --- a/subdir/f1	Thu Jan 01 00:00:23 1970 +0000
+  +++ b/subdir/f1	Thu Jan 01 00:00:24 1970 +0000
   @@ -3,3 +3,4 @@
    a
    b
--- a/tests/test-rename-dir-merge.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-rename-dir-merge.t	Tue Apr 17 17:56:36 2012 -0500
@@ -27,7 +27,6 @@
     searching for copies back to rev 1
     unmatched files in local:
      a/c
-     a/d
     unmatched files in other:
      b/a
      b/b
@@ -37,33 +36,29 @@
     checking for directory renames
     dir a/ -> b/
     file a/c -> b/c
-    file a/d -> b/d
   resolving manifests
    overwrite: False, partial: False
    ancestor: f9b20c0d4c51, local: ce36d17b18fb+, remote: 397f8b00a740
-   a/d: remote renamed directory to b/d -> d
    a/c: remote renamed directory to b/c -> d
    a/b: other deleted -> r
    a/a: other deleted -> r
    b/a: remote created -> g
    b/b: remote created -> g
-  updating: a/a 1/6 files (16.67%)
+  updating: a/a 1/5 files (20.00%)
   removing a/a
-  updating: a/b 2/6 files (33.33%)
+  updating: a/b 2/5 files (40.00%)
   removing a/b
-  updating: a/c 3/6 files (50.00%)
+  updating: a/c 3/5 files (60.00%)
   moving a/c to b/c
-  updating: a/d 4/6 files (66.67%)
-  moving a/d to b/d
-  updating: b/a 5/6 files (83.33%)
+  updating: b/a 4/5 files (80.00%)
   getting b/a
-  updating: b/b 6/6 files (100.00%)
+  updating: b/b 5/5 files (100.00%)
   getting b/b
-  4 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  3 files updated, 0 files merged, 2 files removed, 0 files unresolved
   (branch merge, don't forget to commit)
 
   $ echo a/* b/*
-  a/* b/a b/b b/c b/d
+  a/d b/a b/b b/c
   $ hg st -C
   M b/a
   M b/b
@@ -72,7 +67,7 @@
   R a/a
   R a/b
   R a/c
-  ? b/d
+  ? a/d
   $ hg ci -m "3 merge 2+1"
   $ hg debugrename b/c
   b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob)
@@ -84,7 +79,6 @@
     unmatched files in local:
      b/a
      b/b
-     b/d
     unmatched files in other:
      a/c
     all copies found (* = to merge, ! = divergent):
@@ -103,11 +97,11 @@
   (branch merge, don't forget to commit)
 
   $ echo a/* b/*
-  a/* b/a b/b b/c b/d
+  a/d b/a b/b b/c
   $ hg st -C
   A b/c
     a/c
-  ? b/d
+  ? a/d
   $ hg ci -m "4 merge 1+2"
   created new head
   $ hg debugrename b/c
--- a/tests/test-revset-dirstate-parents.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-revset-dirstate-parents.t	Tue Apr 17 17:56:36 2012 -0500
@@ -13,11 +13,17 @@
   $ cd repo
 
   $ try 'p1()'
-  ('func', ('symbol', 'p1'), None)
+  (func
+    ('symbol', 'p1')
+    None)
   $ try 'p2()'
-  ('func', ('symbol', 'p2'), None)
+  (func
+    ('symbol', 'p2')
+    None)
   $ try 'parents()'
-  ('func', ('symbol', 'parents'), None)
+  (func
+    ('symbol', 'parents')
+    None)
 
 null revision
   $ log 'p1()'
--- a/tests/test-revset.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-revset.t	Tue Apr 17 17:56:36 2012 -0500
@@ -94,19 +94,25 @@
   ('symbol', 'a')
   0
   $ try b-a
-  ('minus', ('symbol', 'b'), ('symbol', 'a'))
+  (minus
+    ('symbol', 'b')
+    ('symbol', 'a'))
   1
   $ try _a_b_c_
   ('symbol', '_a_b_c_')
   6
   $ try _a_b_c_-a
-  ('minus', ('symbol', '_a_b_c_'), ('symbol', 'a'))
+  (minus
+    ('symbol', '_a_b_c_')
+    ('symbol', 'a'))
   6
   $ try .a.b.c.
   ('symbol', '.a.b.c.')
   7
   $ try .a.b.c.-a
-  ('minus', ('symbol', '.a.b.c.'), ('symbol', 'a'))
+  (minus
+    ('symbol', '.a.b.c.')
+    ('symbol', 'a'))
   7
   $ try -- '-a-b-c-' # complains
   hg: parse error at 7: not a prefix: end
@@ -114,7 +120,15 @@
   $ log -a-b-c- # succeeds with fallback
   4
   $ try -- -a-b-c--a # complains
-  ('minus', ('minus', ('minus', ('negate', ('symbol', 'a')), ('symbol', 'b')), ('symbol', 'c')), ('negate', ('symbol', 'a')))
+  (minus
+    (minus
+      (minus
+        (negate
+          ('symbol', 'a'))
+        ('symbol', 'b'))
+      ('symbol', 'c'))
+    (negate
+      ('symbol', 'a')))
   abort: unknown revision '-a'!
   [255]
   $ try é
@@ -124,7 +138,9 @@
 quoting needed
 
   $ try '"-a-b-c-"-a'
-  ('minus', ('string', '-a-b-c-'), ('symbol', 'a'))
+  (minus
+    ('string', '-a-b-c-')
+    ('symbol', 'a'))
   4
 
   $ log '1 or 2'
@@ -136,15 +152,32 @@
   $ log '1 and 2'
   $ log '1&2'
   $ try '1&2|3' # precedence - and is higher
-  ('or', ('and', ('symbol', '1'), ('symbol', '2')), ('symbol', '3'))
+  (or
+    (and
+      ('symbol', '1')
+      ('symbol', '2'))
+    ('symbol', '3'))
   3
   $ try '1|2&3'
-  ('or', ('symbol', '1'), ('and', ('symbol', '2'), ('symbol', '3')))
+  (or
+    ('symbol', '1')
+    (and
+      ('symbol', '2')
+      ('symbol', '3')))
   1
   $ try '1&2&3' # associativity
-  ('and', ('and', ('symbol', '1'), ('symbol', '2')), ('symbol', '3'))
+  (and
+    (and
+      ('symbol', '1')
+      ('symbol', '2'))
+    ('symbol', '3'))
   $ try '1|(2|3)'
-  ('or', ('symbol', '1'), ('group', ('or', ('symbol', '2'), ('symbol', '3'))))
+  (or
+    ('symbol', '1')
+    (group
+      (or
+        ('symbol', '2')
+        ('symbol', '3'))))
   1
   2
   3
@@ -213,7 +246,7 @@
   7
   8
   9
-  $ log 'file(b)'
+  $ log 'file("b*")'
   1
   4
   $ log 'follow()'
@@ -226,13 +259,19 @@
   $ log 'grep("issue\d+")'
   6
   $ try 'grep("(")' # invalid regular expression
-  ('func', ('symbol', 'grep'), ('string', '('))
+  (func
+    ('symbol', 'grep')
+    ('string', '('))
   hg: parse error: invalid match pattern: unbalanced parenthesis
   [255]
   $ try 'grep("\bissue\d+")'
-  ('func', ('symbol', 'grep'), ('string', '\x08issue\\d+'))
+  (func
+    ('symbol', 'grep')
+    ('string', '\x08issue\\d+'))
   $ try 'grep(r"\bissue\d+")'
-  ('func', ('symbol', 'grep'), ('string', '\\bissue\\d+'))
+  (func
+    ('symbol', 'grep')
+    ('string', '\\bissue\\d+'))
   6
   $ try 'grep(r"\")'
   hg: parse error at 7: unterminated string
@@ -253,6 +292,11 @@
   6
   $ log 'limit(head(), 1)'
   0
+  $ log 'matching(6)'
+  6
+  $ log 'matching(6:7, "phase parents user date branch summary files description substate")'
+  6
+  7
   $ log 'max(contains(a))'
   5
   $ log 'min(contains(a))'
@@ -432,41 +476,158 @@
 
   $ echo '[revsetalias]' >> .hg/hgrc
   $ echo 'm = merge()' >> .hg/hgrc
+  $ echo 'sincem = descendants(m)' >> .hg/hgrc
   $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc
   $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
   $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc
 
   $ try m
   ('symbol', 'm')
-  ('func', ('symbol', 'merge'), None)
+  (func
+    ('symbol', 'merge')
+    None)
+  6
+
+test alias recursion
+
+  $ try sincem
+  ('symbol', 'sincem')
+  (func
+    ('symbol', 'descendants')
+    (func
+      ('symbol', 'merge')
+      None))
   6
+  7
+
+test infinite recursion
+
+  $ echo 'recurse1 = recurse2' >> .hg/hgrc
+  $ echo 'recurse2 = recurse1' >> .hg/hgrc
+  $ try recurse1
+  ('symbol', 'recurse1')
+  hg: parse error: infinite expansion of revset alias "recurse1" detected
+  [255]
+
+test nesting and variable passing
+
+  $ echo 'nested($1) = nested2($1)' >> .hg/hgrc
+  $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc
+  $ echo 'nested3($1) = max($1)' >> .hg/hgrc
+  $ try 'nested(2:5)'
+  (func
+    ('symbol', 'nested')
+    (range
+      ('symbol', '2')
+      ('symbol', '5')))
+  (func
+    ('symbol', 'max')
+    (range
+      ('symbol', '2')
+      ('symbol', '5')))
+  5
+
+test variable isolation, variable placeholders are rewritten as string
+then parsed and matched again as string. Check they do not leak too
+far away.
+
+  $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc
+  $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc
+  $ try 'callinjection(2:5)'
+  (func
+    ('symbol', 'callinjection')
+    (range
+      ('symbol', '2')
+      ('symbol', '5')))
+  (func
+    ('symbol', 'descendants')
+    (func
+      ('symbol', 'max')
+      ('string', '$1')))
+  abort: unknown revision '$1'!
+  [255]
+
   $ try 'd(2:5)'
-  ('func', ('symbol', 'd'), ('range', ('symbol', '2'), ('symbol', '5')))
-  ('func', ('symbol', 'reverse'), ('func', ('symbol', 'sort'), ('list', ('range', ('symbol', '2'), ('symbol', '5')), ('symbol', 'date'))))
+  (func
+    ('symbol', 'd')
+    (range
+      ('symbol', '2')
+      ('symbol', '5')))
+  (func
+    ('symbol', 'reverse')
+    (func
+      ('symbol', 'sort')
+      (list
+        (range
+          ('symbol', '2')
+          ('symbol', '5'))
+        ('symbol', 'date'))))
   4
   5
   3
   2
   $ try 'rs(2 or 3, date)'
-  ('func', ('symbol', 'rs'), ('list', ('or', ('symbol', '2'), ('symbol', '3')), ('symbol', 'date')))
-  ('func', ('symbol', 'reverse'), ('func', ('symbol', 'sort'), ('list', ('or', ('symbol', '2'), ('symbol', '3')), ('symbol', 'date'))))
+  (func
+    ('symbol', 'rs')
+    (list
+      (or
+        ('symbol', '2')
+        ('symbol', '3'))
+      ('symbol', 'date')))
+  (func
+    ('symbol', 'reverse')
+    (func
+      ('symbol', 'sort')
+      (list
+        (or
+          ('symbol', '2')
+          ('symbol', '3'))
+        ('symbol', 'date'))))
   3
   2
   $ try 'rs()'
-  ('func', ('symbol', 'rs'), None)
+  (func
+    ('symbol', 'rs')
+    None)
   hg: parse error: invalid number of arguments: 0
   [255]
   $ try 'rs(2)'
-  ('func', ('symbol', 'rs'), ('symbol', '2'))
+  (func
+    ('symbol', 'rs')
+    ('symbol', '2'))
   hg: parse error: invalid number of arguments: 1
   [255]
   $ try 'rs(2, data, 7)'
-  ('func', ('symbol', 'rs'), ('list', ('list', ('symbol', '2'), ('symbol', 'data')), ('symbol', '7')))
+  (func
+    ('symbol', 'rs')
+    (list
+      (list
+        ('symbol', '2')
+        ('symbol', 'data'))
+      ('symbol', '7')))
   hg: parse error: invalid number of arguments: 3
   [255]
   $ try 'rs4(2 or 3, x, x, date)'
-  ('func', ('symbol', 'rs4'), ('list', ('list', ('list', ('or', ('symbol', '2'), ('symbol', '3')), ('symbol', 'x')), ('symbol', 'x')), ('symbol', 'date')))
-  ('func', ('symbol', 'reverse'), ('func', ('symbol', 'sort'), ('list', ('or', ('symbol', '2'), ('symbol', '3')), ('symbol', 'date'))))
+  (func
+    ('symbol', 'rs4')
+    (list
+      (list
+        (list
+          (or
+            ('symbol', '2')
+            ('symbol', '3'))
+          ('symbol', 'x'))
+        ('symbol', 'x'))
+      ('symbol', 'date')))
+  (func
+    ('symbol', 'reverse')
+    (func
+      ('symbol', 'sort')
+      (list
+        (or
+          ('symbol', '2')
+          ('symbol', '3'))
+        ('symbol', 'date'))))
   3
   2
 
--- a/tests/test-subrepo-deep-nested-change.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-deep-nested-change.t	Tue Apr 17 17:56:36 2012 -0500
@@ -18,7 +18,6 @@
   adding sub1/.hgsub (glob)
   adding sub1/sub1 (glob)
   $ hg commit -R sub1 -m "sub1 import"
-  committing subrepository sub2
 
 Preparing the 'main' repo which depends on the subrepo 'sub1'
 
@@ -33,7 +32,6 @@
   adding main/.hgsub (glob)
   adding main/main (glob)
   $ hg commit -R main -m "main import"
-  committing subrepository sub1
 
 Cleaning both repositories, just as a clone -U
 
--- a/tests/test-subrepo-git.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-git.t	Tue Apr 17 17:56:36 2012 -0500
@@ -34,7 +34,6 @@
   $ git clone -q ../gitroot s
   $ hg add .hgsub
   $ hg commit -m 'new git subrepo'
-  committing subrepository s
   $ hg debugsub
   path s
    source   ../gitroot
@@ -55,7 +54,6 @@
   $ hg status --subrepos
   M s/g
   $ hg commit -m 'update git subrepo'
-  committing subrepository s
   $ hg debugsub
   path s
    source   ../gitroot
@@ -222,7 +220,6 @@
   $ git pull -q >/dev/null 2>/dev/null
   $ cd ..
   $ hg commit -m 'git upstream sync'
-  committing subrepository s
   $ hg debugsub
   path s
    source   ../gitroot
@@ -287,7 +284,6 @@
   $ echo inner = inner > .hgsub
   $ hg add .hgsub
   $ hg commit -m 'nested sub'
-  committing subrepository inner
 
 nested commit
 
@@ -339,27 +335,32 @@
   $ hg update 1 -q
   $ hg rm .hgsubstate
   $ hg commit .hgsubstate -m 'no substate'
-  created new head
+  nothing changed
+  [1]
   $ hg tag -l nosubstate
   $ hg manifest
   .hgsub
+  .hgsubstate
   a
 
   $ hg status -S
+  R .hgsubstate
   $ hg sum | grep commit
-  commit: 1 subrepos
+  commit: 1 removed, 1 subrepos (new branch head)
 
   $ hg commit -m 'restore substate'
-  committing subrepository s
+  nothing changed
+  [1]
   $ hg manifest
   .hgsub
   .hgsubstate
   a
   $ hg sum | grep commit
-  commit: (clean)
+  commit: 1 removed, 1 subrepos (new branch head)
 
   $ hg update -qC nosubstate
   $ ls s
+  g
 
 issue3109: false positives in git diff-index
 
--- a/tests/test-subrepo-missing.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-missing.t	Tue Apr 17 17:56:36 2012 -0500
@@ -7,12 +7,10 @@
   $ echo 'subrepo = subrepo' > .hgsub
   $ hg ci -Am addsubrepo
   adding .hgsub
-  committing subrepository subrepo
   $ echo b > subrepo/b
   $ hg -R subrepo ci -Am addb
   adding b
   $ hg ci -m updatedsub
-  committing subrepository subrepo
 
 delete .hgsub and revert it
 
--- a/tests/test-subrepo-recursion.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-recursion.t	Tue Apr 17 17:56:36 2012 -0500
@@ -79,11 +79,9 @@
 
   $ cd ..
   $ hg commit -m 0-2-1
-  committing subrepository bar
 
   $ cd ..
   $ hg commit -m 1-2-1
-  committing subrepository foo
 
 Change working directory:
 
--- a/tests/test-subrepo-relative-path.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-relative-path.t	Tue Apr 17 17:56:36 2012 -0500
@@ -20,7 +20,6 @@
   adding main/.hgsub (glob)
   adding main/main (glob)
   $ hg commit -R main -m "main import"
-  committing subrepository sub
 
 Cleaning both repositories, just as a clone -U
 
--- a/tests/test-subrepo-svn.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo-svn.t	Tue Apr 17 17:56:36 2012 -0500
@@ -69,8 +69,6 @@
   $ svn co --quiet "$SVNREPO"/src subdir/s
   $ hg add .hgsub
   $ hg ci -m1
-  committing subrepository s
-  committing subrepository subdir/s
 
 make sure we avoid empty commits (issue2445)
 
@@ -432,7 +430,6 @@
   $ echo "s =        [svn]       $SVNREPO/src" >> .hgsub
   $ hg add .hgsub
   $ hg ci -m addsub
-  committing subrepository s
   $ echo a > a
   $ hg ci -Am adda
   adding a
@@ -440,7 +437,6 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ svn up -qr6 s
   $ hg ci -m updatesub
-  committing subrepository s
   created new head
   $ echo pyc > s/dir/epsilon.pyc
   $ hg up 1
@@ -462,14 +458,12 @@
   $ echo "obstruct =        [svn]       $SVNREPO/externals" >> .hgsub
   $ svn co -r5 --quiet "$SVNREPO"/externals obstruct
   $ hg commit -m 'Start making obstructed working copy'
-  committing subrepository obstruct
   $ hg book other
   $ hg co -r 'p1(tip)'
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ echo "obstruct =        [svn]       $SVNREPO/src" >> .hgsub
   $ svn co -r5 --quiet "$SVNREPO"/src obstruct
   $ hg commit -m 'Other branch which will be obstructed'
-  committing subrepository obstruct
   created new head
 
 Switching back to the head where we have another path mapped to the
@@ -530,12 +524,10 @@
   Checked out revision 10.
   $ echo "recreated =        [svn]       $SVNREPO/branch" >> .hgsub
   $ hg ci -m addsub
-  committing subrepository recreated
   $ cd recreated
   $ svn up -q
   $ cd ..
   $ hg ci -m updatesub
-  committing subrepository recreated
   $ hg up -r-2
   D    *recreated/somethingnew (glob)
   A    *recreated/somethingold (glob)
--- a/tests/test-subrepo.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-subrepo.t	Tue Apr 17 17:56:36 2012 -0500
@@ -37,19 +37,21 @@
   commit: 1 added, 1 subrepos
   update: (current)
   $ hg ci -m1
-  committing subrepository s
 
-Revert can't (yet) revert subrepos:
+Revert subrepo and test subrepo fileset keyword:
 
   $ echo b > s/a
-  $ hg revert s
-  s: reverting subrepos is unsupported
+  $ hg revert "set:subrepo('glob:s*')"
+  reverting subrepo s
+  reverting s/a
+  $ rm s/a.orig
 
-Revert currently ignores subrepos by default
+Revert subrepo with no backup. The "reverting s/a" line is gone since
+we're really running 'hg update' in the subrepo:
 
-  $ hg revert -a
-  $ hg revert -R s -a -C
-  reverting s/a (glob)
+  $ echo b > s/a
+  $ hg revert --no-backup s
+  reverting subrepo s
 
 Issue2022: update -C
 
@@ -105,7 +107,6 @@
   $ echo b > s/a
   $ hg -R s ci -ms1
   $ hg --config ui.commitsubrepos=no ci -m3
-  committing subrepository s
 
 leave sub dirty (and check ui.commitsubrepos=no aborts the commit)
 
@@ -455,7 +456,6 @@
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg ci -Am1
   adding .hgsub
-  committing subrepository s
   $ hg branch br
   marked working directory as branch br
   (branches are permanent and global, did you want a bookmark?)
@@ -464,7 +464,6 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg ci -Am1
   adding b
-  committing subrepository s
   $ hg up default
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ echo c > c
@@ -483,7 +482,6 @@
   $ echo d > d
   $ hg ci -Am1
   adding d
-  committing subrepository s
   $ hg up 3
   2 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg -R s up 5
@@ -491,7 +489,6 @@
   $ echo e > e
   $ hg ci -Am1
   adding e
-  committing subrepository s
 
   $ hg up 5
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -519,8 +516,6 @@
   $ hg -R testdelete add
   adding testdelete/.hgsub (glob)
   $ hg -R testdelete ci -m "nested 1 & 2 added"
-  committing subrepository nested
-  committing subrepository nested2
   $ echo nested = nested > testdelete/.hgsub
   $ hg -R testdelete ci -m "nested 2 deleted"
   $ cat testdelete/.hgsubstate
@@ -550,8 +545,6 @@
   $ hg -R main add
   adding main/.hgsub (glob)
   $ hg -R main ci -m "add subrepos"
-  committing subrepository nested_absolute
-  committing subrepository nested_relative
   $ cd ..
   $ hg clone mercurial/main mercurial2/main
   updating to branch default
@@ -574,7 +567,6 @@
   $ echo s = s > repo/.hgsub
   $ hg -R repo ci -Am1
   adding .hgsub
-  committing subrepository s
   $ hg clone repo repo2
   updating to branch default
   cloning subrepo s from $TESTTMP/sub/repo/s (glob)
@@ -589,7 +581,6 @@
   $ hg -R repo2/s ci -m3
   created new head
   $ hg -R repo2 ci -m3
-  committing subrepository s
   $ hg -q -R repo2 push
   abort: push creates new remote head 9d66565e64e1!
   (did you forget to merge? use push -f to force)
@@ -699,7 +690,6 @@
   $ echo subrepo-2 = subrepo-2 >> .hgsub
   $ hg add .hgsub
   $ hg ci -m 'Added subrepos'
-  committing subrepository subrepo-1
   committing subrepository subrepo-2
   $ hg st subrepo-2/file
 
@@ -857,17 +847,16 @@
 
   $ hg rm -f .hgsubstate
   $ hg ci -mrm
-  committing subrepository s
-  committing subrepository t
-  created new head
+  nothing changed
+  [1]
   $ hg log -vr tip
-  changeset:   14:3941e0aa5236
+  changeset:   13:925c17564ef8
   tag:         tip
-  parent:      11:365661e5936a
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
+  files:       .hgsubstate
   description:
-  rm
+  13
   
   
 
@@ -875,9 +864,11 @@
 
   $ hg rm .hgsub
   $ hg ci -mrm2
+  created new head
   $ hg log -vr tip
-  changeset:   15:8b31de9d13d1
+  changeset:   14:2400bccd50af
   tag:         tip
+  parent:      11:365661e5936a
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   files:       .hgsub .hgsubstate
@@ -888,13 +879,13 @@
 Test issue3153: diff -S with deleted subrepos
 
   $ hg diff --nodates -S -c .
-  diff -r 3941e0aa5236 -r 8b31de9d13d1 .hgsub
+  diff -r 365661e5936a -r 2400bccd50af .hgsub
   --- a/.hgsub
   +++ /dev/null
   @@ -1,2 +0,0 @@
   -s = s
   -t = t
-  diff -r 3941e0aa5236 -r 8b31de9d13d1 .hgsubstate
+  diff -r 365661e5936a -r 2400bccd50af .hgsubstate
   --- a/.hgsubstate
   +++ /dev/null
   @@ -1,2 +0,0 @@
@@ -909,7 +900,6 @@
   $ hg add .hgsub
   $ hg init s
   $ hg ci -m0
-  committing subrepository s
 Adding with an explicit path in a subrepo adds the file
   $ echo c1 > f1
   $ echo c2 > s/f2
@@ -923,7 +913,6 @@
   $ hg ci -R s -m0
   $ hg ci -Am1
   adding f1
-  committing subrepository s
 Adding with an explicit path in a subrepo with -S has the same behavior
   $ echo c3 > f3
   $ echo c4 > s/f4
@@ -937,7 +926,6 @@
   $ hg ci -R s -m1
   $ hg ci -Ama2
   adding f3
-  committing subrepository s
 Adding without a path or pattern silently ignores subrepos
   $ echo c5 > f5
   $ echo c6 > s/f6
@@ -956,7 +944,6 @@
   adding f6
   adding f7
   $ hg ci -m3
-  committing subrepository s
 Adding without a path or pattern with -S also adds files in subrepos
   $ echo c8 > f8
   $ echo c9 > s/f9
@@ -975,7 +962,6 @@
   A s/f9
   $ hg ci -R s -m3
   $ hg ci -m4
-  committing subrepository s
 Adding with a pattern silently ignores subrepos
   $ echo c11 > fm11
   $ echo c12 > fn12
@@ -998,7 +984,6 @@
   adding fn14
   $ hg ci -Am5
   adding fn12
-  committing subrepository s
 Adding with a pattern with -S also adds matches in subrepos
   $ echo c15 > fm15
   $ echo c16 > fn16
@@ -1021,7 +1006,6 @@
   adding fn18
   $ hg ci -Am6
   adding fn16
-  committing subrepository s
 
 Test behavior of forget for explicit path in subrepo:
 Forgetting an explicit path in a subrepo untracks the file
--- a/tests/test-transplant.t	Mon Apr 16 11:48:15 2012 +0200
+++ b/tests/test-transplant.t	Tue Apr 17 17:56:36 2012 -0500
@@ -424,3 +424,57 @@
   a\r (esc)
   b\r (esc)
   $ cd ..
+
+test transplant with merge changeset is skipped
+
+  $ hg init merge1a
+  $ cd merge1a
+  $ echo a > a
+  $ hg ci -Am a
+  adding a
+  $ hg branch b
+  marked working directory as branch b
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci -m branchb
+  $ echo b > b
+  $ hg ci -Am b
+  adding b
+  $ hg update default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg merge b
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg ci -m mergeb
+  $ cd ..
+
+  $ hg init merge1b
+  $ cd merge1b
+  $ hg transplant -s ../merge1a tip
+
+test transplant with merge changeset accepts --parent
+
+  $ hg init merge2a
+  $ cd merge2a
+  $ echo a > a
+  $ hg ci -Am a
+  adding a
+  $ hg branch b
+  marked working directory as branch b
+  (branches are permanent and global, did you want a bookmark?)
+  $ hg ci -m branchb
+  $ echo b > b
+  $ hg ci -Am b
+  adding b
+  $ hg update default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg merge b
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg ci -m mergeb
+  $ cd ..
+
+  $ hg init merge2b
+  $ cd merge2b
+  $ hg transplant -s ../merge2a --parent 0 tip
+  applying be9f9b39483f
+  be9f9b39483f transplanted to 9959e51f94d1