--- a/contrib/check-code.py Thu May 23 17:39:33 2013 -0500
+++ b/contrib/check-code.py Thu May 23 17:52:21 2013 -0500
@@ -109,6 +109,16 @@
(r'^ changeset .* references (corrupted|missing) \$TESTTMP/.*[^)]$',
winglobmsg),
(r'^ pulling from \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ reverting .*/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ cloning subrepo \S+/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ pushing to \$TESTTMP/.*[^)]$', winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ pushing subrepo \S+/\S+ to.*[^)]$', winglobmsg,
+ '\$TESTTMP/unix-repo$'),
+ (r'^ moving \S+/.*[^)]$', winglobmsg),
+ (r'^ no changes made to subrepo since.*/.*[^)]$',
+ winglobmsg, '\$TESTTMP/unix-repo$'),
+ (r'^ .*: largefile \S+ not available from file:.*/.*[^)]$',
+ winglobmsg, '\$TESTTMP/unix-repo$'),
],
# warnings
[
--- a/contrib/shrink-revlog.py Thu May 23 17:39:33 2013 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,294 +0,0 @@
-"""reorder a revlog (the manifest by default) to save space
-
-Specifically, this topologically sorts the revisions in the revlog so that
-revisions on the same branch are adjacent as much as possible. This is a
-workaround for the fact that Mercurial computes deltas relative to the
-previous revision rather than relative to a parent revision.
-
-This is *not* safe to run on a changelog.
-"""
-
-# Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
-# as a patch to rewrite-log. Cleaned up, refactored, documented, and
-# renamed by Greg Ward <greg at gerg.ca>.
-
-# XXX would be nice to have a way to verify the repository after shrinking,
-# e.g. by comparing "before" and "after" states of random changesets
-# (maybe: export before, shrink, export after, diff).
-
-import os, errno
-from mercurial import revlog, transaction, node, util, scmutil
-from mercurial import changegroup
-from mercurial.i18n import _
-
-
-def postorder(start, edges):
- result = []
- visit = list(start)
- finished = set()
-
- while visit:
- cur = visit[-1]
- for p in edges[cur]:
- # defend against node.nullrev because it's occasionally
- # possible for a node to have parents (null, something)
- # rather than (something, null)
- if p not in finished and p != node.nullrev:
- visit.append(p)
- break
- else:
- result.append(cur)
- finished.add(cur)
- visit.pop()
-
- return result
-
-def toposort_reversepostorder(ui, rl):
- # postorder of the reverse directed graph
-
- # map rev to list of parent revs (p2 first)
- parents = {}
- heads = set()
- ui.status(_('reading revs\n'))
- try:
- for rev in rl:
- ui.progress(_('reading'), rev, total=len(rl))
- (p1, p2) = rl.parentrevs(rev)
- if p1 == p2 == node.nullrev:
- parents[rev] = () # root node
- elif p1 == p2 or p2 == node.nullrev:
- parents[rev] = (p1,) # normal node
- else:
- parents[rev] = (p2, p1) # merge node
- heads.add(rev)
- for p in parents[rev]:
- heads.discard(p)
- finally:
- ui.progress(_('reading'), None)
-
- heads = list(heads)
- heads.sort(reverse=True)
-
- ui.status(_('sorting revs\n'))
- return postorder(heads, parents)
-
-def toposort_postorderreverse(ui, rl):
- # reverse-postorder of the reverse directed graph
-
- children = {}
- roots = set()
- ui.status(_('reading revs\n'))
- try:
- for rev in rl:
- ui.progress(_('reading'), rev, total=len(rl))
- (p1, p2) = rl.parentrevs(rev)
- if p1 == p2 == node.nullrev:
- roots.add(rev)
- children[rev] = []
- if p1 != node.nullrev:
- children[p1].append(rev)
- if p2 != node.nullrev:
- children[p2].append(rev)
- finally:
- ui.progress(_('reading'), None)
-
- roots = list(roots)
- roots.sort()
-
- ui.status(_('sorting revs\n'))
- result = postorder(roots, children)
- result.reverse()
- return result
-
-def writerevs(ui, r1, r2, order, tr):
-
- ui.status(_('writing revs\n'))
-
-
- order = [r1.node(r) for r in order]
-
- # this is a bit ugly, but it works
- count = [0]
- def lookup(revl, x):
- count[0] += 1
- ui.progress(_('writing'), count[0], total=len(order))
- return "%020d" % revl.linkrev(revl.rev(x))
-
- unlookup = lambda x: int(x, 10)
-
- try:
- bundler = changegroup.bundle10(lookup)
- group = util.chunkbuffer(r1.group(order, bundler))
- group = changegroup.unbundle10(group, "UN")
- r2.addgroup(group, unlookup, tr)
- finally:
- ui.progress(_('writing'), None)
-
-def report(ui, r1, r2):
- def getsize(r):
- s = 0
- for fn in (r.indexfile, r.datafile):
- try:
- s += os.stat(fn).st_size
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- return s
-
- oldsize = float(getsize(r1))
- newsize = float(getsize(r2))
-
- # argh: have to pass an int to %d, because a float >= 2^32
- # blows up under Python 2.5 or earlier
- ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
- % (int(oldsize), oldsize / 1024 / 1024))
- ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
- % (int(newsize), newsize / 1024 / 1024))
-
- shrink_percent = (oldsize - newsize) / oldsize * 100
- shrink_factor = oldsize / newsize
- ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
- % (shrink_percent, shrink_factor))
-
-def shrink(ui, repo, **opts):
- """shrink a revlog by reordering revisions
-
- Rewrites all the entries in some revlog of the current repository
- (by default, the manifest log) to save space.
-
- Different sort algorithms have different performance
- characteristics. Use ``--sort`` to select a sort algorithm so you
- can determine which works best for your data.
- """
-
- if not repo.local():
- raise util.Abort(_('not a local repository: %s') % repo.root)
-
- fn = opts.get('revlog')
- if not fn:
- indexfn = repo.sjoin('00manifest.i')
- else:
- if not fn.endswith('.i'):
- raise util.Abort(_('--revlog option must specify the revlog index '
- 'file (*.i), not %s') % opts.get('revlog'))
-
- indexfn = os.path.realpath(fn)
- store = repo.sjoin('')
- if not indexfn.startswith(store):
- raise util.Abort(_('--revlog option must specify a revlog in %s, '
- 'not %s') % (store, indexfn))
-
- sortname = opts['sort']
- try:
- toposort = globals()['toposort_' + sortname]
- except KeyError:
- raise util.Abort(_('no such toposort algorithm: %s') % sortname)
-
- if not os.path.exists(indexfn):
- raise util.Abort(_('no such file: %s') % indexfn)
- if '00changelog' in indexfn:
- raise util.Abort(_('shrinking the changelog '
- 'will corrupt your repository'))
-
- ui.write(_('shrinking %s\n') % indexfn)
- tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
-
- r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
- r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
-
- datafn, tmpdatafn = r1.datafile, r2.datafile
-
- oldindexfn = indexfn + '.old'
- olddatafn = datafn + '.old'
- if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
- raise util.Abort(_('one or both of\n'
- ' %s\n'
- ' %s\n'
- 'exists from a previous run; please clean up '
- 'before running again') % (oldindexfn, olddatafn))
-
- # Don't use repo.transaction(), because then things get hairy with
- # paths: some need to be relative to .hg, and some need to be
- # absolute. Doing it this way keeps things simple: everything is an
- # absolute path.
- lock = repo.lock(wait=False)
- tr = transaction.transaction(ui.warn,
- open,
- repo.sjoin('journal'))
-
- def ignoremissing(func):
- def f(*args, **kw):
- try:
- return func(*args, **kw)
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- return f
-
- try:
- try:
- order = toposort(ui, r1)
-
- suboptimal = 0
- for i in xrange(1, len(order)):
- parents = [p for p in r1.parentrevs(order[i])
- if p != node.nullrev]
- if parents and order[i - 1] not in parents:
- suboptimal += 1
- ui.note(_('%d suboptimal nodes\n') % suboptimal)
-
- writerevs(ui, r1, r2, order, tr)
- report(ui, r1, r2)
- tr.close()
- except: # re-raises
- # Abort transaction first, so we truncate the files before
- # deleting them.
- tr.abort()
- for fn in (tmpindexfn, tmpdatafn):
- ignoremissing(os.unlink)(fn)
- raise
- if not opts.get('dry_run'):
- # racy, both files cannot be renamed atomically
- # copy files
- util.oslink(indexfn, oldindexfn)
- ignoremissing(util.oslink)(datafn, olddatafn)
-
- # rename
- util.rename(tmpindexfn, indexfn)
- try:
- os.chmod(tmpdatafn, os.stat(datafn).st_mode)
- util.rename(tmpdatafn, datafn)
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- ignoremissing(os.unlink)(datafn)
- else:
- for fn in (tmpindexfn, tmpdatafn):
- ignoremissing(os.unlink)(fn)
- finally:
- lock.release()
-
- if not opts.get('dry_run'):
- ui.write(
- _('note: old revlog saved in:\n'
- ' %s\n'
- ' %s\n'
- '(You can delete those files when you are satisfied that your\n'
- 'repository is still sane. '
- 'Running \'hg verify\' is strongly recommended.)\n')
- % (oldindexfn, olddatafn))
-
-cmdtable = {
- 'shrink': (shrink,
- [('', 'revlog', '',
- _('the revlog to shrink (.i)')),
- ('n', 'dry-run', None,
- _('do not shrink, simulate only')),
- ('', 'sort', 'reversepostorder',
- _('name of sort algorithm to use')),
- ],
- _('hg shrink [--revlog PATH]'))
-}
-
-if __name__ == "__main__":
- print "shrink-revlog.py is now an extension (see hg help extensions)"
--- a/doc/gendoc.py Thu May 23 17:39:33 2013 -0500
+++ b/doc/gendoc.py Thu May 23 17:52:21 2013 -0500
@@ -7,7 +7,7 @@
from mercurial import encoding
from mercurial import minirst
from mercurial.commands import table, globalopts
-from mercurial.i18n import _
+from mercurial.i18n import gettext, _
from mercurial.help import helptable
from mercurial import extensions
from mercurial import util
@@ -51,7 +51,7 @@
d['cmd'] = cmds[0]
d['aliases'] = cmd.split("|")[1:]
- d['desc'] = get_desc(attr[0].__doc__)
+ d['desc'] = get_desc(gettext(attr[0].__doc__))
d['opts'] = list(get_opts(attr[1]))
s = 'hg ' + cmds[0]
@@ -74,20 +74,9 @@
ui.write(minirst.section(_("Commands")))
commandprinter(ui, table, minirst.subsection)
- # print topics
- for names, sec, doc in helptable:
- if names[0] == "config":
- # The config help topic is included in the hgrc.5 man
- # page.
- continue
- for name in names:
- ui.write(".. _%s:\n" % name)
- ui.write("\n")
- ui.write(minirst.section(sec))
- if util.safehasattr(doc, '__call__'):
- doc = doc()
- ui.write(doc)
- ui.write("\n")
+ # print help topics
+ # The config help topic is included in the hgrc.5 man page.
+ helpprinter(ui, helptable, minirst.section, exclude=['config'])
ui.write(minirst.section(_("Extensions")))
ui.write(_("This section contains help for extensions that are "
@@ -102,12 +91,28 @@
for extensionname in sorted(allextensionnames()):
mod = extensions.load(None, extensionname, None)
ui.write(minirst.subsection(extensionname))
- ui.write("%s\n\n" % mod.__doc__)
+ ui.write("%s\n\n" % gettext(mod.__doc__))
cmdtable = getattr(mod, 'cmdtable', None)
if cmdtable:
ui.write(minirst.subsubsection(_('Commands')))
commandprinter(ui, cmdtable, minirst.subsubsubsection)
+def helpprinter(ui, helptable, sectionfunc, include=[], exclude=[]):
+ for names, sec, doc in helptable:
+ if exclude and names[0] in exclude:
+ continue
+ if include and names[0] not in include:
+ continue
+ for name in names:
+ ui.write(".. _%s:\n" % name)
+ ui.write("\n")
+ if sectionfunc:
+ ui.write(sectionfunc(sec))
+ if util.safehasattr(doc, '__call__'):
+ doc = doc()
+ ui.write(doc)
+ ui.write("\n")
+
def commandprinter(ui, cmdtable, sectionfunc):
h = {}
for c, attr in cmdtable.items():
--- a/hgext/color.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/color.py Thu May 23 17:52:21 2013 -0500
@@ -60,6 +60,11 @@
tags.normal = green
tags.local = black bold
+ rebase.rebased = blue
+ rebase.remaining = red bold
+
+ histedit.remaining = red bold
+
The available effects in terminfo mode are 'blink', 'bold', 'dim',
'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
@@ -248,8 +253,11 @@
'diff.trailingwhitespace': 'bold red_background',
'diffstat.deleted': 'red',
'diffstat.inserted': 'green',
+ 'histedit.remaining': 'red bold',
'ui.prompt': 'yellow',
'log.changeset': 'yellow',
+ 'rebase.rebased': 'blue',
+ 'rebase.remaining': 'red bold',
'resolve.resolved': 'green bold',
'resolve.unresolved': 'red bold',
'status.added': 'green bold',
--- a/hgext/convert/common.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/convert/common.py Thu May 23 17:52:21 2013 -0500
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import base64, errno, subprocess, os, datetime
+import base64, errno, subprocess, os, datetime, re
import cPickle as pickle
from mercurial import util
from mercurial.i18n import _
@@ -63,6 +63,14 @@
self.encoding = 'utf-8'
+ def checkhexformat(self, revstr):
+ """ fails if revstr is not a 40 byte hex. mercurial and git both uses
+ such format for their revision numbering
+ """
+ if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
+ raise util.Abort(_('splicemap entry %s is not a valid revision'
+ ' identifier') % revstr)
+
def before(self):
pass
@@ -164,6 +172,13 @@
"""
return {}
+ def checkrevformat(self, revstr):
+ """revstr is a string that describes a revision in the given
+ source control system. Return true if revstr has correct
+ format.
+ """
+ return True
+
class converter_sink(object):
"""Conversion sink (target) interface"""
@@ -424,34 +439,6 @@
self.fp.close()
self.fp = None
-def parsesplicemap(path):
- """Parse a splicemap, return a child/parents dictionary."""
- if not path:
- return {}
- m = {}
- try:
- fp = open(path, 'r')
- for i, line in enumerate(fp):
- line = line.splitlines()[0].rstrip()
- if not line:
- # Ignore blank lines
- continue
- try:
- child, parents = line.split(' ', 1)
- parents = parents.replace(',', ' ').split()
- except ValueError:
- raise util.Abort(_('syntax error in %s(%d): child parent1'
- '[,parent2] expected') % (path, i + 1))
- pp = []
- for p in parents:
- if p not in pp:
- pp.append(p)
- m[child] = pp
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- return m
-
def makedatetimestamp(t):
"""Like util.makedate() but for time t instead of current time"""
delta = (datetime.datetime.utcfromtimestamp(t) -
--- a/hgext/convert/convcmd.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/convert/convcmd.py Thu May 23 17:52:21 2013 -0500
@@ -15,9 +15,9 @@
from gnuarch import gnuarch_source
from bzr import bzr_source
from p4 import p4_source
-import filemap, common
+import filemap
-import os, shutil
+import os, shutil, shlex
from mercurial import hg, util, encoding
from mercurial.i18n import _
@@ -118,9 +118,53 @@
self.readauthormap(opts.get('authormap'))
self.authorfile = self.dest.authorfile()
- self.splicemap = common.parsesplicemap(opts.get('splicemap'))
+ self.splicemap = self.parsesplicemap(opts.get('splicemap'))
self.branchmap = mapfile(ui, opts.get('branchmap'))
+ def parsesplicemap(self, path):
+ """ check and validate the splicemap format and
+ return a child/parents dictionary.
+ Format checking has two parts.
+ 1. generic format which is same across all source types
+ 2. specific format checking which may be different for
+ different source type. This logic is implemented in
+ checkrevformat function in source files like
+ hg.py, subversion.py etc.
+ """
+
+ if not path:
+ return {}
+ m = {}
+ try:
+ fp = open(path, 'r')
+ for i, line in enumerate(fp):
+ line = line.splitlines()[0].rstrip()
+ if not line:
+ # Ignore blank lines
+ continue
+ # split line
+ lex = shlex.shlex(line, posix=True)
+ lex.whitespace_split = True
+ lex.whitespace += ','
+ line = list(lex)
+ # check number of parents
+ if not (2 <= len(line) <= 3):
+ raise util.Abort(_('syntax error in %s(%d): child parent1'
+ '[,parent2] expected') % (path, i + 1))
+ for part in line:
+ self.source.checkrevformat(part)
+ child, p1, p2 = line[0], line[1:2], line[2:]
+ if p1 == p2:
+ m[child] = p1
+ else:
+ m[child] = p1 + p2
+ # if file does not exist or error reading, exit
+ except IOError:
+ raise util.Abort(_('splicemap file not found or error reading %s:')
+ % path)
+ return m
+
+
def walktree(self, heads):
'''Return a mapping that identifies the uncommitted parents of every
uncommitted changeset.'''
--- a/hgext/convert/git.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/convert/git.py Thu May 23 17:52:21 2013 -0500
@@ -296,3 +296,8 @@
pass
return bookmarks
+
+ def checkrevformat(self, revstr):
+ """ git revision string is a 40 byte hex """
+ self.checkhexformat(revstr)
+
--- a/hgext/convert/hg.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/convert/hg.py Thu May 23 17:52:21 2013 -0500
@@ -397,3 +397,7 @@
def getbookmarks(self):
return bookmarks.listbookmarks(self.repo)
+
+ def checkrevformat(self, revstr):
+ """ Mercurial, revision string is a 40 byte hex """
+ self.checkhexformat(revstr)
--- a/hgext/convert/subversion.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/convert/subversion.py Thu May 23 17:52:21 2013 -0500
@@ -452,6 +452,14 @@
del self.commits[rev]
return commit
+ def checkrevformat(self, revstr):
+ """ fails if revision format does not match the correct format"""
+ if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
+ '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
+ '{12,12}(.*)\@[0-9]+$',revstr):
+ raise util.Abort(_('splicemap entry %s is not a valid revision'
+ ' identifier') % revstr)
+
def gettags(self):
tags = {}
if self.tags is None:
--- a/hgext/histedit.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/histedit.py Thu May 23 17:52:21 2013 -0500
@@ -856,3 +856,16 @@
repair.strip(ui, repo, c)
finally:
lockmod.release(lock)
+
+def summaryhook(ui, repo):
+ if not os.path.exists(repo.join('histedit-state')):
+ return
+ (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
+ if rules:
+ # i18n: column positioning for "hg summary"
+ ui.write(_('hist: %s (histedit --continue)\n') %
+ (ui.label(_('%d remaining'), 'histedit.remaining') %
+ len(rules)))
+
+def extsetup(ui):
+ cmdutil.summaryhooks.add('histedit', summaryhook)
--- a/hgext/inotify/client.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/inotify/client.py Thu May 23 17:52:21 2013 -0500
@@ -159,7 +159,8 @@
vdirs = cs.read(nbytes)
if vdirs:
for vdir in vdirs.split('\0'):
- match.dir(vdir)
+ if match.explicitdir:
+ match.explicitdir(vdir)
return results
--- a/hgext/largefiles/overrides.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/largefiles/overrides.py Thu May 23 17:52:21 2013 -0500
@@ -376,8 +376,6 @@
continue
f, m, args, msg = action
- choices = (_('&Largefile'), _('&Normal file'))
-
splitstandin = lfutil.splitstandin(f)
if (m == "g" and splitstandin is not None and
splitstandin in p1 and f in p2):
@@ -386,8 +384,9 @@
lfile = splitstandin
standin = f
msg = _('%s has been turned into a largefile\n'
- 'use (l)argefile or keep as (n)ormal file?') % lfile
- if repo.ui.promptchoice(msg, choices, 0) == 0:
+ 'use (l)argefile or keep as (n)ormal file?'
+ '$$ &Largefile $$ &Normal file') % lfile
+ if repo.ui.promptchoice(msg, 0) == 0:
processed.append((lfile, "r", None, msg))
processed.append((standin, "g", (p2.flags(standin),), msg))
else:
@@ -398,8 +397,9 @@
standin = lfutil.standin(f)
lfile = f
msg = _('%s has been turned into a normal file\n'
- 'keep as (l)argefile or use (n)ormal file?') % lfile
- if repo.ui.promptchoice(msg, choices, 0) == 0:
+ 'keep as (l)argefile or use (n)ormal file?'
+ '$$ &Largefile $$ &Normal file') % lfile
+ if repo.ui.promptchoice(msg, 0) == 0:
processed.append((lfile, "r", None, msg))
else:
processed.append((standin, "r", None, msg))
@@ -444,9 +444,9 @@
return 0
if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
- 'keep (l)ocal or take (o)ther?') %
- lfutil.splitstandin(orig),
- (_('&Local'), _('&Other')), 0) == 0:
+ 'keep (l)ocal or take (o)ther?'
+ '$$ &Local $$ &Other') %
+ lfutil.splitstandin(orig), 0) == 0:
return 0
else:
repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
--- a/hgext/mq.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/mq.py Thu May 23 17:52:21 2013 -0500
@@ -3533,8 +3533,7 @@
raise util.Abort(_('no queue repository'))
return orig(r.ui, r, *args, **kwargs)
-def summary(orig, ui, repo, *args, **kwargs):
- r = orig(ui, repo, *args, **kwargs)
+def summaryhook(ui, repo):
q = repo.mq
m = []
a, u = len(q.applied), len(q.unapplied(repo))
@@ -3548,7 +3547,6 @@
else:
# i18n: column positioning for "hg summary"
ui.note(_("mq: (empty queue)\n"))
- return r
def revsetmq(repo, subset, x):
"""``mq()``
@@ -3567,7 +3565,7 @@
mqopt = [('', 'mq', None, _("operate on patch repository"))]
extensions.wrapcommand(commands.table, 'import', mqimport)
- extensions.wrapcommand(commands.table, 'summary', summary)
+ cmdutil.summaryhooks.add('mq', summaryhook)
entry = extensions.wrapcommand(commands.table, 'init', mqinit)
entry[1].extend(mqopt)
--- a/hgext/patchbomb.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/patchbomb.py Thu May 23 17:52:21 2013 -0500
@@ -482,8 +482,8 @@
if ds:
ui.write(ds)
ui.write('\n')
- if ui.promptchoice(_('are you sure you want to send (yn)?'),
- (_('&Yes'), _('&No'))):
+ if ui.promptchoice(_('are you sure you want to send (yn)?'
+ '$$ &Yes $$ &No')):
raise util.Abort(_('patchbomb canceled'))
ui.write('\n')
--- a/hgext/purge.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/purge.py Thu May 23 17:52:21 2013 -0500
@@ -97,7 +97,7 @@
directories = []
match = scmutil.match(repo[None], dirs, opts)
- match.dir = directories.append
+ match.explicitdir = match.traversedir = directories.append
status = repo.status(match=match, ignored=opts['all'], unknown=True)
for f in sorted(status[4] + status[5]):
--- a/hgext/rebase.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/rebase.py Thu May 23 17:52:21 2013 -0500
@@ -779,6 +779,17 @@
raise util.Abort(_('--tool can only be used with --rebase'))
orig(ui, repo, *args, **opts)
+def summaryhook(ui, repo):
+ if not os.path.exists(repo.join('rebasestate')):
+ return
+ state = restorestatus(repo)[2]
+ numrebased = len([i for i in state.itervalues() if i != -1])
+ # i18n: column positioning for "hg summary"
+ ui.write(_('rebase: %s, %s (rebase --continue)\n') %
+ (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
+ ui.label(_('%d remaining'), 'rebase.remaining') %
+ (len(state) - numrebased)))
+
def uisetup(ui):
'Replace pull with a decorator to provide --rebase option'
entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
@@ -786,3 +797,4 @@
_("rebase working directory to branch head")))
entry[1].append(('t', 'tool', '',
_("specify merge tool for rebase")))
+ cmdutil.summaryhooks.add('rebase', summaryhook)
--- a/hgext/record.py Thu May 23 17:39:33 2013 -0500
+++ b/hgext/record.py Thu May 23 17:52:21 2013 -0500
@@ -283,17 +283,17 @@
if skipfile is not None:
return skipfile, skipfile, skipall, newpatches
while True:
- resps = _('[Ynesfdaq?]')
- choices = (_('&Yes, record this change'),
- _('&No, skip this change'),
- _('&Edit the change manually'),
- _('&Skip remaining changes to this file'),
- _('Record remaining changes to this &file'),
- _('&Done, skip remaining changes and files'),
- _('Record &all changes to all remaining files'),
- _('&Quit, recording no changes'),
- _('&?'))
- r = ui.promptchoice("%s %s" % (query, resps), choices)
+ resps = _('[Ynesfdaq?]'
+ '$$ &Yes, record this change'
+ '$$ &No, skip this change'
+ '$$ &Edit the change manually'
+ '$$ &Skip remaining changes to this file'
+ '$$ Record remaining changes to this &file'
+ '$$ &Done, skip remaining changes and files'
+ '$$ Record &all changes to all remaining files'
+ '$$ &Quit, recording no changes'
+ '$$ &?')
+ r = ui.promptchoice("%s %s" % (query, resps))
ui.write("\n")
if r == 8: # ?
doc = gettext(record.__doc__)
--- a/mercurial/changegroup.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/changegroup.py Thu May 23 17:52:21 2013 -0500
@@ -6,8 +6,8 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-from node import nullrev
-import mdiff, util
+from node import nullrev, hex
+import mdiff, util, dagutil
import struct, os, bz2, zlib, tempfile
_BUNDLE10_DELTA_HEADER = "20s20s20s20s"
@@ -225,13 +225,173 @@
class bundle10(object):
deltaheader = _BUNDLE10_DELTA_HEADER
- def __init__(self, lookup):
- self._lookup = lookup
+ def __init__(self, repo, bundlecaps=None):
+ """Given a source repo, construct a bundler.
+
+ bundlecaps is optional and can be used to specify the set of
+ capabilities which can be used to build the bundle.
+ """
+ # Set of capabilities we can use to build the bundle.
+ if bundlecaps is None:
+ bundlecaps = set()
+ self._bundlecaps = bundlecaps
+ self._changelog = repo.changelog
+ self._manifest = repo.manifest
+ reorder = repo.ui.config('bundle', 'reorder', 'auto')
+ if reorder == 'auto':
+ reorder = None
+ else:
+ reorder = util.parsebool(reorder)
+ self._repo = repo
+ self._reorder = reorder
+ self._progress = repo.ui.progress
def close(self):
return closechunk()
+
def fileheader(self, fname):
return chunkheader(len(fname)) + fname
- def revchunk(self, revlog, rev, prev):
+
+ def group(self, nodelist, revlog, lookup, units=None, reorder=None):
+ """Calculate a delta group, yielding a sequence of changegroup chunks
+ (strings).
+
+ Given a list of changeset revs, return a set of deltas and
+ metadata corresponding to nodes. The first delta is
+ first parent(nodelist[0]) -> nodelist[0], the receiver is
+ guaranteed to have this parent as it has all history before
+ these changesets. In the case firstparent is nullrev the
+ changegroup starts with a full revision.
+
+ If units is not None, progress detail will be generated, units specifies
+ the type of revlog that is touched (changelog, manifest, etc.).
+ """
+ # if we don't have any revisions touched by these changesets, bail
+ if len(nodelist) == 0:
+ yield self.close()
+ return
+
+ # for generaldelta revlogs, we linearize the revs; this will both be
+ # much quicker and generate a much smaller bundle
+ if (revlog._generaldelta and reorder is not False) or reorder:
+ dag = dagutil.revlogdag(revlog)
+ revs = set(revlog.rev(n) for n in nodelist)
+ revs = dag.linearize(revs)
+ else:
+ revs = sorted([revlog.rev(n) for n in nodelist])
+
+ # add the parent of the first rev
+ p = revlog.parentrevs(revs[0])[0]
+ revs.insert(0, p)
+
+ # build deltas
+ total = len(revs) - 1
+ msgbundling = _('bundling')
+ for r in xrange(len(revs) - 1):
+ if units is not None:
+ self._progress(msgbundling, r + 1, unit=units, total=total)
+ prev, curr = revs[r], revs[r + 1]
+ linknode = lookup(revlog.node(curr))
+ for c in self.revchunk(revlog, curr, prev, linknode):
+ yield c
+
+ yield self.close()
+
+ def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
+ '''yield a sequence of changegroup chunks (strings)'''
+ repo = self._repo
+ cl = self._changelog
+ mf = self._manifest
+ reorder = self._reorder
+ progress = self._progress
+
+ # for progress output
+ msgbundling = _('bundling')
+
+ mfs = {} # needed manifests
+ fnodes = {} # needed file nodes
+ changedfiles = set()
+
+ # filter any nodes that claim to be part of the known set
+ def prune(revlog, missing):
+ rr, rl = revlog.rev, revlog.linkrev
+ return [n for n in missing if rl(rr(n)) not in commonrevs]
+
+ # Callback for the changelog, used to collect changed files and manifest
+ # nodes.
+ # Returns the linkrev node (identity in the changelog case).
+ def lookupcl(x):
+ c = cl.read(x)
+ changedfiles.update(c[3])
+ # record the first changeset introducing this manifest version
+ mfs.setdefault(c[0], x)
+ return x
+
+ # Callback for the manifest, used to collect linkrevs for filelog
+ # revisions.
+ # Returns the linkrev node (collected in lookupcl).
+ def lookupmf(x):
+ clnode = mfs[x]
+ if not fastpathlinkrev:
+ mdata = mf.readfast(x)
+ for f, n in mdata.iteritems():
+ if f in changedfiles:
+ # record the first changeset introducing this filelog
+ # version
+ fnodes[f].setdefault(n, clnode)
+ return clnode
+
+ for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
+ reorder=reorder):
+ yield chunk
+ progress(msgbundling, None)
+
+ for f in changedfiles:
+ fnodes[f] = {}
+ mfnodes = prune(mf, mfs)
+ for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
+ reorder=reorder):
+ yield chunk
+ progress(msgbundling, None)
+
+ mfs.clear()
+ total = len(changedfiles)
+ # for progress output
+ msgfiles = _('files')
+ for i, fname in enumerate(sorted(changedfiles)):
+ filerevlog = repo.file(fname)
+ if not len(filerevlog):
+ raise util.Abort(_("empty or missing revlog for %s") % fname)
+
+ if fastpathlinkrev:
+ ln, llr = filerevlog.node, filerevlog.linkrev
+ def genfilenodes():
+ for r in filerevlog:
+ linkrev = llr(r)
+ if linkrev not in commonrevs:
+ yield filerevlog.node(r), cl.node(linkrev)
+ fnodes[fname] = dict(genfilenodes())
+
+ linkrevnodes = fnodes.pop(fname, {})
+ # Lookup for filenodes, we collected the linkrev nodes above in the
+ # fastpath case and with lookupmf in the slowpath case.
+ def lookupfilelog(x):
+ return linkrevnodes[x]
+
+ filenodes = prune(filerevlog, linkrevnodes)
+ if filenodes:
+ progress(msgbundling, i + 1, item=fname, unit=msgfiles,
+ total=total)
+ yield self.fileheader(fname)
+ for chunk in self.group(filenodes, filerevlog, lookupfilelog,
+ reorder=reorder):
+ yield chunk
+ yield self.close()
+ progress(msgbundling, None)
+
+ if clnodes:
+ repo.hook('outgoing', node=hex(clnodes[0]), source=source)
+
+ def revchunk(self, revlog, rev, prev, linknode):
node = revlog.node(rev)
p1, p2 = revlog.parentrevs(rev)
base = prev
@@ -242,7 +402,6 @@
prefix = mdiff.trivialdiffheader(len(delta))
else:
delta = revlog.revdiff(base, rev)
- linknode = self._lookup(revlog, node)
p1n, p2n = revlog.parents(node)
basenode = revlog.node(base)
meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
--- a/mercurial/cmdutil.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/cmdutil.py Thu May 23 17:52:21 2013 -0500
@@ -2082,3 +2082,6 @@
return decorator
return cmd
+
+# a list of (ui, repo) functions called by commands.summary
+summaryhooks = util.hooks()
--- a/mercurial/commands.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/commands.py Thu May 23 17:52:21 2013 -0500
@@ -767,9 +767,8 @@
('d', 'delete', False, _('delete a given bookmark')),
('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
('i', 'inactive', False, _('mark a bookmark inactive'))],
- _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
-def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
- rename=None, inactive=False):
+ _('hg bookmarks [OPTIONS]... [NAME]...'))
+def bookmark(ui, repo, *names, **opts):
'''track a line of development with movable markers
Bookmarks are pointers to certain commits that move when committing.
@@ -796,6 +795,12 @@
active even if -i/--inactive is not given. If no NAME is given, the
current active bookmark will be marked inactive.
'''
+ force = opts.get('force')
+ rev = opts.get('rev')
+ delete = opts.get('delete')
+ rename = opts.get('rename')
+ inactive = opts.get('inactive')
+
hexfn = ui.debugflag and hex or short
marks = repo._bookmarks
cur = repo.changectx('.').node()
@@ -846,21 +851,24 @@
raise util.Abort(_("--rev is incompatible with --delete"))
if rename and rev:
raise util.Abort(_("--rev is incompatible with --rename"))
- if mark is None and (delete or rev):
+ if not names and (delete or rev):
raise util.Abort(_("bookmark name required"))
if delete:
- if mark not in marks:
- raise util.Abort(_("bookmark '%s' does not exist") % mark)
- if mark == repo._bookmarkcurrent:
- bookmarks.setcurrent(repo, None)
- del marks[mark]
+ for mark in names:
+ if mark not in marks:
+ raise util.Abort(_("bookmark '%s' does not exist") % mark)
+ if mark == repo._bookmarkcurrent:
+ bookmarks.setcurrent(repo, None)
+ del marks[mark]
marks.write()
elif rename:
- if mark is None:
+ if not names:
raise util.Abort(_("new bookmark name required"))
- mark = checkformat(mark)
+ elif len(names) > 1:
+ raise util.Abort(_("only one new bookmark name allowed"))
+ mark = checkformat(names[0])
if rename not in marks:
raise util.Abort(_("bookmark '%s' does not exist") % rename)
checkconflict(repo, mark, force)
@@ -870,19 +878,23 @@
del marks[rename]
marks.write()
- elif mark is not None:
- mark = checkformat(mark)
- if inactive and mark == repo._bookmarkcurrent:
- bookmarks.setcurrent(repo, None)
- return
- tgt = cur
- if rev:
- tgt = scmutil.revsingle(repo, rev).node()
- checkconflict(repo, mark, force, tgt)
- marks[mark] = tgt
- if not inactive and cur == marks[mark] and not rev:
- bookmarks.setcurrent(repo, mark)
- elif cur != tgt and mark == repo._bookmarkcurrent:
+ elif names:
+ newact = None
+ for mark in names:
+ mark = checkformat(mark)
+ if newact is None:
+ newact = mark
+ if inactive and mark == repo._bookmarkcurrent:
+ bookmarks.setcurrent(repo, None)
+ return
+ tgt = cur
+ if rev:
+ tgt = scmutil.revsingle(repo, rev).node()
+ checkconflict(repo, mark, force, tgt)
+ marks[mark] = tgt
+ if not inactive and cur == marks[newact] and not rev:
+ bookmarks.setcurrent(repo, newact)
+ elif cur != tgt and newact == repo._bookmarkcurrent:
bookmarks.setcurrent(repo, None)
marks.write()
@@ -1084,13 +1096,16 @@
base = ['null']
else:
base = scmutil.revrange(repo, opts.get('base'))
+ # TODO: get desired bundlecaps from command line.
+ bundlecaps = None
if base:
if dest:
raise util.Abort(_("--base is incompatible with specifying "
"a destination"))
common = [repo.lookup(rev) for rev in base]
heads = revs and map(repo.lookup, revs) or revs
- cg = repo.getbundle('bundle', heads=heads, common=common)
+ cg = repo.getbundle('bundle', heads=heads, common=common,
+ bundlecaps=bundlecaps)
outgoing = None
else:
dest = ui.expandpath(dest or 'default-push', dest or 'default')
@@ -1102,7 +1117,7 @@
onlyheads=heads,
force=opts.get('force'),
portable=True)
- cg = repo.getlocalbundle('bundle', outgoing)
+ cg = repo.getlocalbundle('bundle', outgoing, bundlecaps)
if not cg:
scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
return 1
@@ -1903,6 +1918,8 @@
args['common'] = [bin(s) for s in common]
if head:
args['heads'] = [bin(s) for s in head]
+ # TODO: get desired bundlecaps from command line.
+ args['bundlecaps'] = None
bundle = repo.getbundle('debug', **args)
bundletype = opts.get('type', 'bzip2').lower()
@@ -5474,6 +5491,8 @@
ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
(new, len(bheads)))
+ cmdutil.summaryhooks(ui, repo)
+
if opts.get('remote'):
t = []
source, branches = hg.parseurl(ui.expandpath('default'))
--- a/mercurial/context.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/context.py Thu May 23 17:52:21 2013 -0500
@@ -398,7 +398,7 @@
("bad args: changeid=%r, fileid=%r, changectx=%r"
% (changeid, fileid, changectx))
- if filelog:
+ if filelog is not None:
self._filelog = filelog
if changeid is not None:
@@ -437,7 +437,9 @@
@propertycache
def _changeid(self):
- if '_changectx' in self.__dict__:
+ if '_changeid' in self.__dict__:
+ return self._changeid
+ elif '_changectx' in self.__dict__:
return self._changectx.rev()
else:
return self._filelog.linkrev(self._filerev)
@@ -1167,7 +1169,7 @@
self._changeid = None
self._filerev = self._filenode = None
- if filelog:
+ if filelog is not None:
self._filelog = filelog
if workingctx:
self._changectx = workingctx
--- a/mercurial/copies.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/copies.py Thu May 23 17:52:21 2013 -0500
@@ -222,65 +222,8 @@
fullcopy = {}
diverge = {}
- def related(f1, f2, limit):
- # Walk back to common ancestor to see if the two files originate
- # from the same file. Since workingfilectx's rev() is None it messes
- # up the integer comparison logic, hence the pre-step check for
- # None (f1 and f2 can only be workingfilectx's initially).
-
- if f1 == f2:
- return f1 # a match
-
- g1, g2 = f1.ancestors(), f2.ancestors()
- try:
- f1r, f2r = f1.rev(), f2.rev()
-
- if f1r is None:
- f1 = g1.next()
- if f2r is None:
- f2 = g2.next()
-
- while True:
- f1r, f2r = f1.rev(), f2.rev()
- if f1r > f2r:
- f1 = g1.next()
- elif f2r > f1r:
- f2 = g2.next()
- elif f1 == f2:
- return f1 # a match
- elif f1r == f2r or f1r < limit or f2r < limit:
- return False # copy no longer relevant
- except StopIteration:
- return False
-
- def checkcopies(f, m1, m2):
- '''check possible copies of f from m1 to m2'''
- of = None
- seen = set([f])
- for oc in ctx(f, m1[f]).ancestors():
- ocr = oc.rev()
- of = oc.path()
- if of in seen:
- # check limit late - grab last rename before
- if ocr < limit:
- break
- continue
- seen.add(of)
-
- fullcopy[f] = of # remember for dir rename detection
- if of not in m2:
- continue # no match, keep looking
- if m2[of] == ma.get(of):
- break # no merge needed, quit early
- c2 = ctx(of, m2[of])
- cr = related(oc, c2, ca.rev())
- if cr and (of == f or of == c2.path()): # non-divergent
- copy[f] = of
- of = None
- break
-
- if of in ma:
- diverge.setdefault(of, []).append(f)
+ def _checkcopies(f, m1, m2):
+ checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy)
repo.ui.debug(" searching for copies back to rev %d\n" % limit)
@@ -295,9 +238,9 @@
% "\n ".join(u2))
for f in u1:
- checkcopies(f, m1, m2)
+ _checkcopies(f, m1, m2)
for f in u2:
- checkcopies(f, m2, m1)
+ _checkcopies(f, m2, m1)
renamedelete = {}
renamedelete2 = set()
@@ -386,3 +329,78 @@
break
return copy, movewithdir, diverge, renamedelete
+
+def checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
+ """
+ check possible copies of f from m1 to m2
+
+ ctx = function accepting (filename, node) that returns a filectx.
+ f = the filename to check
+ m1 = the source manifest
+ m2 = the destination manifest
+ ca = the changectx of the common ancestor
+ limit = the rev number to not search beyond
+ diverge = record all diverges in this dict
+ copy = record all non-divergent copies in this dict
+ fullcopy = record all copies in this dict
+ """
+
+ ma = ca.manifest()
+
+ def _related(f1, f2, limit):
+ # Walk back to common ancestor to see if the two files originate
+ # from the same file. Since workingfilectx's rev() is None it messes
+ # up the integer comparison logic, hence the pre-step check for
+ # None (f1 and f2 can only be workingfilectx's initially).
+
+ if f1 == f2:
+ return f1 # a match
+
+ g1, g2 = f1.ancestors(), f2.ancestors()
+ try:
+ f1r, f2r = f1.rev(), f2.rev()
+
+ if f1r is None:
+ f1 = g1.next()
+ if f2r is None:
+ f2 = g2.next()
+
+ while True:
+ f1r, f2r = f1.rev(), f2.rev()
+ if f1r > f2r:
+ f1 = g1.next()
+ elif f2r > f1r:
+ f2 = g2.next()
+ elif f1 == f2:
+ return f1 # a match
+ elif f1r == f2r or f1r < limit or f2r < limit:
+ return False # copy no longer relevant
+ except StopIteration:
+ return False
+
+ of = None
+ seen = set([f])
+ for oc in ctx(f, m1[f]).ancestors():
+ ocr = oc.rev()
+ of = oc.path()
+ if of in seen:
+ # check limit late - grab last rename before
+ if ocr < limit:
+ break
+ continue
+ seen.add(of)
+
+ fullcopy[f] = of # remember for dir rename detection
+ if of not in m2:
+ continue # no match, keep looking
+ if m2[of] == ma.get(of):
+ break # no merge needed, quit early
+ c2 = ctx(of, m2[of])
+ cr = _related(oc, c2, ca.rev())
+ if cr and (of == f or of == c2.path()): # non-divergent
+ copy[f] = of
+ of = None
+ break
+
+ if of in ma:
+ diverge.setdefault(of, []).append(f)
--- a/mercurial/dirstate.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/dirstate.py Thu May 23 17:52:21 2013 -0500
@@ -522,18 +522,15 @@
return True
return False
- def walk(self, match, subrepos, unknown, ignored):
- '''
- Walk recursively through the directory tree, finding all files
- matched by match.
+ def _walkexplicit(self, match, subrepos):
+ '''Get stat data about the files explicitly specified by match.
- Return a dict mapping filename to stat-like object (either
- mercurial.osutil.stat instance or return value of os.stat()).
- '''
-
- def fwarn(f, msg):
- self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
- return False
+ Return a triple (results, dirsfound, dirsnotfound).
+ - results is a mapping from filename to stat result. It also contains
+ listings mapping subrepos and .hg to None.
+ - dirsfound is a list of files found to be directories.
+ - dirsnotfound is a list of files that the dirstate thinks are
+ directories and that were not found.'''
def badtype(mode):
kind = _('unknown')
@@ -549,41 +546,23 @@
kind = _('directory')
return _('unsupported file type (type is %s)') % kind
- ignore = self._ignore
- dirignore = self._dirignore
- if ignored:
- ignore = util.never
- dirignore = util.never
- elif not unknown:
- # if unknown and ignored are False, skip step 2
- ignore = util.always
- dirignore = util.always
-
- matchfn = match.matchfn
- matchalways = match.always()
+ matchedir = match.explicitdir
badfn = match.bad
dmap = self._map
normpath = util.normpath
- listdir = osutil.listdir
lstat = os.lstat
getkind = stat.S_IFMT
dirkind = stat.S_IFDIR
regkind = stat.S_IFREG
lnkkind = stat.S_IFLNK
join = self._join
- work = []
- wadd = work.append
+ dirsfound = []
+ foundadd = dirsfound.append
+ dirsnotfound = []
+ notfoundadd = dirsnotfound.append
- exact = skipstep3 = False
- if matchfn == match.exact: # match.exact
- exact = True
- dirignore = util.always # skip step 2
- elif match.files() and not match.anypats(): # match.match, no patterns
- skipstep3 = True
-
- if not exact and self._checkcase:
+ if match.matchfn != match.exact and self._checkcase:
normalize = self._normalize
- skipstep3 = False
else:
normalize = None
@@ -604,7 +583,6 @@
results = dict.fromkeys(subrepos)
results['.hg'] = None
- # step 1: find all explicit files
for ff in files:
if normalize:
nf = normalize(normpath(ff), False, True)
@@ -617,13 +595,12 @@
st = lstat(join(nf))
kind = getkind(st.st_mode)
if kind == dirkind:
- skipstep3 = False
if nf in dmap:
#file deleted on disk but still in dirstate
results[nf] = None
- match.dir(nf)
- if not dirignore(nf):
- wadd(nf)
+ if matchedir:
+ matchedir(nf)
+ foundadd(nf)
elif kind == regkind or kind == lnkkind:
results[nf] = st
else:
@@ -637,12 +614,75 @@
prefix = nf + "/"
for fn in dmap:
if fn.startswith(prefix):
- match.dir(nf)
- skipstep3 = False
+ if matchedir:
+ matchedir(nf)
+ notfoundadd(nf)
break
else:
badfn(ff, inst.strerror)
+ return results, dirsfound, dirsnotfound
+
+ def walk(self, match, subrepos, unknown, ignored, full=True):
+ '''
+ Walk recursively through the directory tree, finding all files
+ matched by match.
+
+ If full is False, maybe skip some known-clean files.
+
+ Return a dict mapping filename to stat-like object (either
+ mercurial.osutil.stat instance or return value of os.stat()).
+
+ '''
+ # full is a flag that extensions that hook into walk can use -- this
+ # implementation doesn't use it at all. This satisfies the contract
+ # because we only guarantee a "maybe".
+
+ def fwarn(f, msg):
+ self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
+ return False
+
+ ignore = self._ignore
+ dirignore = self._dirignore
+ if ignored:
+ ignore = util.never
+ dirignore = util.never
+ elif not unknown:
+ # if unknown and ignored are False, skip step 2
+ ignore = util.always
+ dirignore = util.always
+
+ matchfn = match.matchfn
+ matchalways = match.always()
+ matchtdir = match.traversedir
+ dmap = self._map
+ listdir = osutil.listdir
+ lstat = os.lstat
+ dirkind = stat.S_IFDIR
+ regkind = stat.S_IFREG
+ lnkkind = stat.S_IFLNK
+ join = self._join
+
+ exact = skipstep3 = False
+ if matchfn == match.exact: # match.exact
+ exact = True
+ dirignore = util.always # skip step 2
+ elif match.files() and not match.anypats(): # match.match, no patterns
+ skipstep3 = True
+
+ if not exact and self._checkcase:
+ normalize = self._normalize
+ skipstep3 = False
+ else:
+ normalize = None
+
+ # step 1: find all explicit files
+ results, work, dirsnotfound = self._walkexplicit(match, subrepos)
+
+ skipstep3 = skipstep3 and not (work or dirsnotfound)
+ work = [d for d in work if not dirignore(d)]
+ wadd = work.append
+
# step 2: visit subdirectories
while work:
nd = work.pop()
@@ -666,7 +706,8 @@
if nf not in results:
if kind == dirkind:
if not ignore(nf):
- match.dir(nf)
+ if matchtdir:
+ matchtdir(nf)
wadd(nf)
if nf in dmap and (matchalways or matchfn(nf)):
results[nf] = None
@@ -766,8 +807,13 @@
lnkkind = stat.S_IFLNK
- for fn, st in self.walk(match, subrepos, listunknown,
- listignored).iteritems():
+ # We need to do full walks when either
+ # - we're listing all clean files, or
+ # - match.traversedir does something, because match.traversedir should
+ # be called for every dir in the working dir
+ full = listclean or match.traversedir is not None
+ for fn, st in self.walk(match, subrepos, listunknown, listignored,
+ full=full).iteritems():
if fn not in dmap:
if (listignored or mexact(fn)) and dirignore(fn):
if listignored:
--- a/mercurial/filelog.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/filelog.py Thu May 23 17:52:21 2013 -0500
@@ -31,7 +31,7 @@
class filelog(revlog.revlog):
def __init__(self, opener, path):
- revlog.revlog.__init__(self, opener,
+ super(filelog, self).__init__(opener,
"/".join(("data", path + ".i")))
def read(self, node):
@@ -64,7 +64,7 @@
return len(self.read(node))
# XXX if self.read(node).startswith("\1\n"), this returns (size+4)
- return revlog.revlog.size(self, rev)
+ return super(filelog, self).size(rev)
def cmp(self, node, text):
"""compare text with a given file revision
@@ -76,7 +76,7 @@
if text.startswith('\1\n'):
t = '\1\n\1\n' + text
- samehashes = not revlog.revlog.cmp(self, node, t)
+ samehashes = not super(filelog, self).cmp(node, t)
if samehashes:
return False
--- a/mercurial/filemerge.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/filemerge.py Thu May 23 17:52:21 2013 -0500
@@ -144,8 +144,8 @@
fd = fcd.path()
if ui.promptchoice(_(" no tool found to merge %s\n"
- "keep (l)ocal or take (o)ther?") % fd,
- (_("&Local"), _("&Other")), 0):
+ "keep (l)ocal or take (o)ther?"
+ "$$ &Local $$ &Other") % fd, 0):
return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
else:
return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
@@ -348,16 +348,16 @@
checked = False
if 'prompt' in _toollist(ui, tool, "check"):
checked = True
- if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
- (_("&Yes"), _("&No")), 1):
+ if ui.promptchoice(_("was merge of '%s' successful (yn)?"
+ "$$ &Yes $$ &No") % fd, 1):
r = 1
if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
'changed' in _toollist(ui, tool, "check")):
if filecmp.cmp(a, back):
if ui.promptchoice(_(" output file %s appears unchanged\n"
- "was merge successful (yn)?") % fd,
- (_("&Yes"), _("&No")), 1):
+ "was merge successful (yn)?"
+ "$$ &Yes $$ &No") % fd, 1):
r = 1
if _toolbool(ui, tool, "fixeol"):
--- a/mercurial/fileset.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/fileset.py Thu May 23 17:52:21 2013 -0500
@@ -263,23 +263,10 @@
raise error.ParseError(_('invalid match pattern: %s') % e)
return [f for f in mctx.existing() if r.search(mctx.ctx[f].data())]
-_units = dict(k=2**10, K=2**10, kB=2**10, KB=2**10,
- M=2**20, MB=2**20, G=2**30, GB=2**30)
-
-def _sizetoint(s):
- try:
- s = s.strip()
- for k, v in _units.items():
- if s.endswith(k):
- return int(float(s[:-len(k)]) * v)
- return int(s)
- except ValueError:
- raise error.ParseError(_("couldn't parse size: %s") % s)
-
def _sizetomax(s):
try:
s = s.strip()
- for k, v in _units.items():
+ for k, v in util._sizeunits:
if s.endswith(k):
# max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
n = s[:-len(k)]
@@ -306,23 +293,23 @@
expr = getstring(x, _("size requires an expression")).strip()
if '-' in expr: # do we have a range?
a, b = expr.split('-', 1)
- a = _sizetoint(a)
- b = _sizetoint(b)
+ a = util.sizetoint(a)
+ b = util.sizetoint(b)
m = lambda x: x >= a and x <= b
elif expr.startswith("<="):
- a = _sizetoint(expr[2:])
+ a = util.sizetoint(expr[2:])
m = lambda x: x <= a
elif expr.startswith("<"):
- a = _sizetoint(expr[1:])
+ a = util.sizetoint(expr[1:])
m = lambda x: x < a
elif expr.startswith(">="):
- a = _sizetoint(expr[2:])
+ a = util.sizetoint(expr[2:])
m = lambda x: x >= a
elif expr.startswith(">"):
- a = _sizetoint(expr[1:])
+ a = util.sizetoint(expr[1:])
m = lambda x: x > a
elif expr[0].isdigit or expr[0] == '.':
- a = _sizetoint(expr)
+ a = util.sizetoint(expr)
b = _sizetomax(expr)
m = lambda x: x >= a and x <= b
else:
--- a/mercurial/help/templates.txt Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/help/templates.txt Thu May 23 17:52:21 2013 -0500
@@ -6,8 +6,8 @@
You can customize output for any "log-like" command: log,
outgoing, incoming, tip, parents, heads and glog.
-Four styles are packaged with Mercurial: default (the style used
-when no explicit preference is passed), compact, changelog,
+Five styles are packaged with Mercurial: default (the style used
+when no explicit preference is passed), compact, changelog, phases
and xml.
Usage::
--- a/mercurial/httpclient/__init__.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/httpclient/__init__.py Thu May 23 17:52:21 2013 -0500
@@ -37,6 +37,9 @@
* implements ssl inline instead of in a different class
"""
+# Many functions in this file have too many arguments.
+# pylint: disable=R0913
+
import cStringIO
import errno
import httplib
@@ -117,6 +120,8 @@
def _close(self):
if self._reader is not None:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._close()
def readline(self):
@@ -137,6 +142,7 @@
return ''.join(blocks)
def read(self, length=None):
+ """Read data from the response body."""
# if length is None, unbounded read
while (not self.complete() # never select on a finished read
and (not length # unbounded, so we wait for complete()
@@ -150,7 +156,8 @@
return r
def _select(self):
- r, _, _ = select.select([self.sock], [], [], self._timeout)
+ r, unused_write, unused_err = select.select(
+ [self.sock], [], [], self._timeout)
if not r:
# socket was not readable. If the response is not
# complete, raise a timeout.
@@ -170,13 +177,16 @@
# raise an exception if this is an invalid situation.
if not data:
if self._reader:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._close()
return False
else:
self._load_response(data)
return True
- def _load_response(self, data):
+ # This method gets replaced by _load later, which confuses pylint.
+ def _load_response(self, data): # pylint: disable=E0202
# Being here implies we're not at the end of the headers yet,
# since at the end of this method if headers were completely
# loaded we replace this method with the load() method of the
@@ -201,7 +211,7 @@
# handle 100-continue response
hdrs, body = self.raw_response.split(self._end_headers, 1)
- http_ver, status = hdrs.split(' ', 1)
+ unused_http_ver, status = hdrs.split(' ', 1)
if status.startswith('100'):
self.raw_response = body
self.continued = True
@@ -260,9 +270,13 @@
self.will_close = True
if body:
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._reader._load(body)
logger.debug('headers complete')
self.headers = headers
+ # We're a friend of the reader class here.
+ # pylint: disable=W0212
self._load_response = self._reader._load
@@ -335,9 +349,9 @@
self._proxy_port))
if self.ssl:
# TODO proxy header support
- data = self.buildheaders('CONNECT', '%s:%d' % (self.host,
- self.port),
- {}, HTTP_VER_1_0)
+ data = self._buildheaders('CONNECT', '%s:%d' % (self.host,
+ self.port),
+ {}, HTTP_VER_1_0)
sock.send(data)
sock.setblocking(0)
r = self.response_class(sock, self.timeout, 'CONNECT')
@@ -345,6 +359,9 @@
'Timed out waiting for CONNECT response from proxy')
while not r.complete():
try:
+ # We're a friend of the response class, so let
+ # us use the private attribute.
+ # pylint: disable=W0212
if not r._select():
if not r.complete():
raise timeout_exc
@@ -376,7 +393,7 @@
sock.setblocking(0)
self.sock = sock
- def buildheaders(self, method, path, headers, http_ver):
+ def _buildheaders(self, method, path, headers, http_ver):
if self.ssl and self.port == 443 or self.port == 80:
# default port for protocol, so leave it out
hdrhost = self.host
@@ -437,6 +454,11 @@
return True
return False
+ def _reconnect(self, where):
+ logger.info('reconnecting during %s', where)
+ self.close()
+ self._connect()
+
def request(self, method, path, body=None, headers={},
expect_continue=False):
"""Send a request to the server.
@@ -474,16 +496,11 @@
raise BadRequestData('body has no __len__() nor read()')
self._connect()
- outgoing_headers = self.buildheaders(
+ outgoing_headers = self._buildheaders(
method, path, hdrs, self.http_version)
response = None
first = True
- def reconnect(where):
- logger.info('reconnecting during %s', where)
- self.close()
- self._connect()
-
while ((outgoing_headers or body)
and not (response and response.complete())):
select_timeout = self.timeout
@@ -523,14 +540,17 @@
except socket.sslerror, e:
if e.args[0] != socket.SSL_ERROR_WANT_READ:
raise
- logger.debug(
- 'SSL_ERROR_WANT_READ while sending data, retrying...')
+ logger.debug('SSL_ERROR_WANT_READ while sending '
+ 'data, retrying...')
continue
if not data:
logger.info('socket appears closed in read')
self.sock = None
self._current_response = None
if response is not None:
+ # We're a friend of the response class, so let
+ # us use the private attribute.
+ # pylint: disable=W0212
response._close()
# This if/elif ladder is a bit subtle,
# comments in each branch should help.
@@ -550,7 +570,7 @@
logger.info(
'Connection appeared closed in read on first'
' request loop iteration, will retry.')
- reconnect('read')
+ self._reconnect('read')
continue
else:
# We didn't just send the first data hunk,
@@ -563,7 +583,11 @@
'response was missing or incomplete!')
logger.debug('read %d bytes in request()', len(data))
if response is None:
- response = self.response_class(r[0], self.timeout, method)
+ response = self.response_class(
+ r[0], self.timeout, method)
+ # We're a friend of the response class, so let us
+ # use the private attribute.
+ # pylint: disable=W0212
response._load_response(data)
# Jump to the next select() call so we load more
# data if the server is still sending us content.
@@ -576,6 +600,8 @@
if w and out:
try:
if getattr(out, 'read', False):
+ # pylint guesses the type of out incorrectly here
+ # pylint: disable=E1103
data = out.read(OUTGOING_BUFFER_SIZE)
if not data:
continue
@@ -599,14 +625,10 @@
elif (e[0] not in (errno.ECONNRESET, errno.EPIPE)
and not first):
raise
- reconnect('write')
+ self._reconnect('write')
amt = self.sock.send(out)
logger.debug('sent %d', amt)
first = False
- # stash data we think we sent in case the socket breaks
- # when we read from it
- if was_first:
- sent_data = out[:amt]
if out is body:
body = out[amt:]
else:
@@ -616,7 +638,6 @@
# the whole request
if response is None:
response = self.response_class(self.sock, self.timeout, method)
- complete = response.complete()
data_left = bool(outgoing_headers or body)
if data_left:
logger.info('stopped sending request early, '
@@ -629,10 +650,14 @@
self._current_response = response
def getresponse(self):
+ """Returns the response to the most recent request."""
if self._current_response is None:
raise httplib.ResponseNotReady()
r = self._current_response
while r.headers is None:
+ # We're a friend of the response class, so let us use the
+ # private attribute.
+ # pylint: disable=W0212
if not r._select() and not r.complete():
raise _readers.HTTPRemoteClosedError()
if r.will_close:
--- a/mercurial/httpclient/_readers.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/httpclient/_readers.py Thu May 23 17:52:21 2013 -0500
@@ -33,7 +33,6 @@
"""
import httplib
-import itertools
import logging
logger = logging.getLogger(__name__)
@@ -59,33 +58,35 @@
self._done_chunks = []
self.available_data = 0
- def addchunk(self, data):
+ def _addchunk(self, data):
self._done_chunks.append(data)
self.available_data += len(data)
- def pushchunk(self, data):
+ def _pushchunk(self, data):
self._done_chunks.insert(0, data)
self.available_data += len(data)
- def popchunk(self):
+ def _popchunk(self):
b = self._done_chunks.pop(0)
self.available_data -= len(b)
return b
def done(self):
+ """Returns true if the response body is entirely read."""
return self._finished
def read(self, amt):
+ """Read amt bytes from the response body."""
if self.available_data < amt and not self._finished:
raise ReadNotReady()
blocks = []
need = amt
while self._done_chunks:
- b = self.popchunk()
+ b = self._popchunk()
if len(b) > need:
nb = b[:need]
- self.pushchunk(b[need:])
+ self._pushchunk(b[need:])
b = nb
blocks.append(b)
need -= len(b)
@@ -107,11 +108,11 @@
blocks = []
while self._done_chunks:
- b = self.popchunk()
+ b = self._popchunk()
i = b.find(delimstr) + len(delimstr)
if i:
if i < len(b):
- self.pushchunk(b[i:])
+ self._pushchunk(b[i:])
blocks.append(b[:i])
break
else:
@@ -154,8 +155,9 @@
if data:
assert not self._finished, (
'tried to add data (%r) to a closed reader!' % data)
- logger.debug('%s read an additional %d data', self.name, len(data))
- self.addchunk(data)
+ logger.debug('%s read an additional %d data',
+ self.name, len(data)) # pylint: disable=E1101
+ self._addchunk(data)
class CloseIsEndReader(AbstractSimpleReader):
@@ -172,7 +174,7 @@
name = 'content-length'
def __init__(self, amount):
- AbstractReader.__init__(self)
+ AbstractSimpleReader.__init__(self)
self._amount = amount
if amount == 0:
self._finished = True
@@ -199,7 +201,8 @@
logger.debug('chunked read an additional %d data', len(data))
position = 0
if self._leftover_data:
- logger.debug('chunked reader trying to finish block from leftover data')
+ logger.debug(
+ 'chunked reader trying to finish block from leftover data')
# TODO: avoid this string concatenation if possible
data = self._leftover_data + data
position = self._leftover_skip_amt
@@ -224,6 +227,6 @@
self._finished = True
logger.debug('closing chunked reader due to chunk of length 0')
return
- self.addchunk(data[block_start:block_start + amt])
+ self._addchunk(data[block_start:block_start + amt])
position = block_start + amt + len(self._eol)
# no-check-code
--- a/mercurial/httpclient/socketutil.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/httpclient/socketutil.py Thu May 23 17:52:21 2013 -0500
@@ -39,7 +39,8 @@
try:
import ssl
- ssl.wrap_socket # make demandimporters load the module
+ # make demandimporters load the module
+ ssl.wrap_socket # pylint: disable=W0104
have_ssl = True
except ImportError:
import httplib
@@ -52,12 +53,13 @@
create_connection = socket.create_connection
except AttributeError:
def create_connection(address):
+ """Backport of socket.create_connection from Python 2.6."""
host, port = address
msg = "getaddrinfo returns an empty list"
sock = None
for res in socket.getaddrinfo(host, port, 0,
socket.SOCK_STREAM):
- af, socktype, proto, _canonname, sa = res
+ af, socktype, proto, unused_canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
logger.info("connect: (%s, %s)", host, port)
@@ -80,8 +82,11 @@
CERT_REQUIRED = ssl.CERT_REQUIRED
else:
class FakeSocket(httplib.FakeSocket):
- """Socket wrapper that supports SSL.
- """
+ """Socket wrapper that supports SSL."""
+
+ # Silence lint about this goofy backport class
+ # pylint: disable=W0232,E1101,R0903,R0913,C0111
+
# backport the behavior from Python 2.6, which is to busy wait
# on the socket instead of anything nice. Sigh.
# See http://bugs.python.org/issue3890 for more info.
@@ -107,11 +112,16 @@
CERT_OPTIONAL = 1
CERT_REQUIRED = 2
+ # Disable unused-argument because we're making a dumb wrapper
+ # that's like an upstream method.
+ #
+ # pylint: disable=W0613,R0913
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=_PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True):
+ """Backport of ssl.wrap_socket from Python 2.6."""
if cert_reqs != CERT_NONE and ca_certs:
raise CertificateValidationUnsupported(
'SSL certificate validation requires the ssl module'
@@ -120,6 +130,7 @@
# borrow httplib's workaround for no ssl.wrap_socket
sock = FakeSocket(sock, sslob)
return sock
+ # pylint: enable=W0613,R0913
class CertificateValidationUnsupported(Exception):
--- a/mercurial/localrepo.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/localrepo.py Thu May 23 17:52:21 2013 -0500
@@ -99,8 +99,9 @@
def known(self, nodes):
return self._repo.known(nodes)
- def getbundle(self, source, heads=None, common=None):
- return self._repo.getbundle(source, heads=heads, common=common)
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
+ return self._repo.getbundle(source, heads=heads, common=common,
+ bundlecaps=None)
# TODO We might want to move the next two calls into legacypeer and add
# unbundle instead.
@@ -1145,7 +1146,7 @@
if not force:
vdirs = []
- match.dir = vdirs.append
+ match.explicitdir = vdirs.append
match.bad = fail
wlock = self.wlock()
@@ -1674,6 +1675,7 @@
heads = rheads
if remote.capable('getbundle'):
+ # TODO: get bundlecaps from remote
cg = remote.getbundle('pull', common=common,
heads=heads or rheads)
elif heads is None:
@@ -1836,13 +1838,19 @@
remoteheads, newbranch,
bool(inc))
+ # TODO: get bundlecaps from remote
+ bundlecaps = None
# create a changegroup from local
if revs is None and not outgoing.excluded:
# push everything,
# use the fast path, no race possible on push
- cg = self._changegroup(outgoing.missing, 'push')
+ bundler = changegroup.bundle10(self, bundlecaps)
+ cg = self._changegroupsubset(outgoing,
+ bundler,
+ 'push',
+ fastpath=True)
else:
- cg = self.getlocalbundle('push', outgoing)
+ cg = self.getlocalbundle('push', outgoing, bundlecaps)
# apply changegroup to remote
if unbundle:
@@ -1983,24 +1991,24 @@
cl = self.changelog
if not bases:
bases = [nullid]
+ # TODO: remove call to nodesbetween.
csets, bases, heads = cl.nodesbetween(bases, heads)
- # We assume that all ancestors of bases are known
- common = cl.ancestors([cl.rev(n) for n in bases])
- return self._changegroupsubset(common, csets, heads, source)
+ bases = [p for n in bases for p in cl.parents(n) if p != nullid]
+ outgoing = discovery.outgoing(cl, bases, heads)
+ bundler = changegroup.bundle10(self)
+ return self._changegroupsubset(outgoing, bundler, source)
- def getlocalbundle(self, source, outgoing):
+ def getlocalbundle(self, source, outgoing, bundlecaps=None):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
precomputed sets in outgoing."""
if not outgoing.missing:
return None
- return self._changegroupsubset(outgoing.common,
- outgoing.missing,
- outgoing.missingheads,
- source)
+ bundler = changegroup.bundle10(self, bundlecaps)
+ return self._changegroupsubset(outgoing, bundler, source)
- def getbundle(self, source, heads=None, common=None):
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
"""Like changegroupsubset, but returns the set difference between the
ancestors of heads and the ancestors common.
@@ -2018,215 +2026,32 @@
if not heads:
heads = cl.heads()
return self.getlocalbundle(source,
- discovery.outgoing(cl, common, heads))
+ discovery.outgoing(cl, common, heads),
+ bundlecaps=bundlecaps)
@unfilteredmethod
- def _changegroupsubset(self, commonrevs, csets, heads, source):
+ def _changegroupsubset(self, outgoing, bundler, source,
+ fastpath=False):
+ commonrevs = outgoing.common
+ csets = outgoing.missing
+ heads = outgoing.missingheads
+ # We go through the fast path if we get told to, or if all (unfiltered
+ # heads have been requested (since we then know there all linkrevs will
+ # be pulled by the client).
+ heads.sort()
+ fastpathlinkrev = fastpath or (
+ self.filtername is None and heads == sorted(self.heads()))
- cl = self.changelog
- mf = self.manifest
- mfs = {} # needed manifests
- fnodes = {} # needed file nodes
- changedfiles = set()
- fstate = ['', {}]
- count = [0, 0]
-
- # can we go through the fast path ?
- heads.sort()
- if heads == sorted(self.heads()):
- return self._changegroup(csets, source)
-
- # slow path
self.hook('preoutgoing', throw=True, source=source)
self.changegroupinfo(csets, source)
-
- # filter any nodes that claim to be part of the known set
- def prune(revlog, missing):
- rr, rl = revlog.rev, revlog.linkrev
- return [n for n in missing
- if rl(rr(n)) not in commonrevs]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
-
- def lookup(revlog, x):
- if revlog == cl:
- c = cl.read(x)
- changedfiles.update(c[3])
- mfs.setdefault(c[0], x)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
- return x
- elif revlog == mf:
- clnode = mfs[x]
- mdata = mf.readfast(x)
- for f, n in mdata.iteritems():
- if f in changedfiles:
- fnodes[f].setdefault(n, clnode)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
- return clnode
- else:
- progress(_bundling, count[0], item=fstate[0],
- unit=_files, total=count[1])
- return fstate[1][x]
-
- bundler = changegroup.bundle10(lookup)
- reorder = self.ui.config('bundle', 'reorder', 'auto')
- if reorder == 'auto':
- reorder = None
- else:
- reorder = util.parsebool(reorder)
-
- def gengroup():
- # Create a changenode group generator that will call our functions
- # back to lookup the owning changenode and collect information.
- count[:] = [0, len(csets)]
- for chunk in cl.group(csets, bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- # Create a generator for the manifestnodes that calls our lookup
- # and data collection functions back.
- for f in changedfiles:
- fnodes[f] = {}
- count[:] = [0, len(mfs)]
- for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- mfs.clear()
-
- # Go through all our files in order sorted by name.
- count[:] = [0, len(changedfiles)]
- for fname in sorted(changedfiles):
- filerevlog = self.file(fname)
- if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
- fstate[0] = fname
- fstate[1] = fnodes.pop(fname, {})
-
- nodelist = prune(filerevlog, fstate[1])
- if nodelist:
- count[0] += 1
- yield bundler.fileheader(fname)
- for chunk in filerevlog.group(nodelist, bundler, reorder):
- yield chunk
-
- # Signal that no more groups are left.
- yield bundler.close()
- progress(_bundling, None)
-
- if csets:
- self.hook('outgoing', node=hex(csets[0]), source=source)
-
- return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
+ gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
+ return changegroup.unbundle10(util.chunkbuffer(gengroup), 'UN')
def changegroup(self, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
return self.changegroupsubset(basenodes, self.heads(), source)
@unfilteredmethod
- def _changegroup(self, nodes, source):
- """Compute the changegroup of all nodes that we have that a recipient
- doesn't. Return a chunkbuffer object whose read() method will return
- successive changegroup chunks.
-
- This is much easier than the previous function as we can assume that
- the recipient has any changenode we aren't sending them.
-
- nodes is the set of nodes to send"""
-
- cl = self.changelog
- mf = self.manifest
- mfs = {}
- changedfiles = set()
- fstate = ['']
- count = [0, 0]
-
- self.hook('preoutgoing', throw=True, source=source)
- self.changegroupinfo(nodes, source)
-
- revset = set([cl.rev(n) for n in nodes])
-
- def gennodelst(log):
- ln, llr = log.node, log.linkrev
- return [ln(r) for r in log if llr(r) in revset]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
-
- def lookup(revlog, x):
- if revlog == cl:
- c = cl.read(x)
- changedfiles.update(c[3])
- mfs.setdefault(c[0], x)
- count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
- return x
- elif revlog == mf:
- count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
- return cl.node(revlog.linkrev(revlog.rev(x)))
- else:
- progress(_bundling, count[0], item=fstate[0],
- total=count[1], unit=_files)
- return cl.node(revlog.linkrev(revlog.rev(x)))
-
- bundler = changegroup.bundle10(lookup)
- reorder = self.ui.config('bundle', 'reorder', 'auto')
- if reorder == 'auto':
- reorder = None
- else:
- reorder = util.parsebool(reorder)
-
- def gengroup():
- '''yield a sequence of changegroup chunks (strings)'''
- # construct a list of all changed files
-
- count[:] = [0, len(nodes)]
- for chunk in cl.group(nodes, bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- count[:] = [0, len(mfs)]
- for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
- yield chunk
- progress(_bundling, None)
-
- count[:] = [0, len(changedfiles)]
- for fname in sorted(changedfiles):
- filerevlog = self.file(fname)
- if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
- fstate[0] = fname
- nodelist = gennodelst(filerevlog)
- if nodelist:
- count[0] += 1
- yield bundler.fileheader(fname)
- for chunk in filerevlog.group(nodelist, bundler, reorder):
- yield chunk
- yield bundler.close()
- progress(_bundling, None)
-
- if nodes:
- self.hook('outgoing', node=hex(nodes[0]), source=source)
-
- return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
-
- @unfilteredmethod
def addchangegroup(self, source, srctype, url, emptyok=False):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
--- a/mercurial/match.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/match.py Thu May 23 17:52:21 2013 -0500
@@ -119,8 +119,12 @@
found/accessed, with an error message
'''
pass
- def dir(self, f):
- pass
+ # If this is set, it will be called when an explicitly listed directory is
+ # visited.
+ explicitdir = None
+ # If this is set, it will be called when a directory discovered by recursive
+ # traversal is visited.
+ traversedir = None
def missing(self, f):
pass
def exact(self, f):
--- a/mercurial/merge.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/merge.py Thu May 23 17:52:21 2013 -0500
@@ -95,6 +95,7 @@
def _checkunknownfile(repo, wctx, mctx, f):
return (not repo.dirstate._ignore(f)
and os.path.isfile(repo.wjoin(f))
+ and repo.wopener.audit.check(f)
and repo.dirstate.normalize(f) not in repo.dirstate
and mctx[f].cmp(wctx[f]))
@@ -364,8 +365,8 @@
actions.append((f, "r", None, "remote delete"))
elif repo.ui.promptchoice(
_("local changed %s which remote deleted\n"
- "use (c)hanged version or (d)elete?") % f,
- (_("&Changed"), _("&Delete")), 0):
+ "use (c)hanged version or (d)elete?"
+ "$$ &Changed $$ &Delete") % f, 0):
actions.append((f, "r", None, "prompt delete"))
else:
actions.append((f, "a", None, "prompt keep"))
@@ -374,8 +375,8 @@
actions.append((f, "g", (m2.flags(f),), "remote recreating"))
elif repo.ui.promptchoice(
_("remote changed %s which local deleted\n"
- "use (c)hanged version or leave (d)eleted?") % f,
- (_("&Changed"), _("&Deleted")), 0) == 0:
+ "use (c)hanged version or leave (d)eleted?"
+ "$$ &Changed $$ &Deleted") % f, 0) == 0:
actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
else: assert False, m
return actions
--- a/mercurial/patch.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/patch.py Thu May 23 17:52:21 2013 -0500
@@ -481,7 +481,7 @@
def close(self):
wctx = self.repo[None]
- addremoved = set(self.changed)
+ changed = set(self.changed)
for src, dst in self.copied:
scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
if self.removed:
@@ -491,14 +491,10 @@
# File was deleted and no longer belongs to the
# dirstate, it was probably marked added then
# deleted, and should not be considered by
- # addremove().
- addremoved.discard(f)
- if addremoved:
- cwd = self.repo.getcwd()
- if cwd:
- addremoved = [util.pathto(self.repo.root, cwd, f)
- for f in addremoved]
- scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
+ # marktouched().
+ changed.discard(f)
+ if changed:
+ scmutil.marktouched(self.repo, changed, self.similarity)
return sorted(self.changed)
class filestore(object):
@@ -1397,12 +1393,7 @@
ui.warn(line + '\n')
finally:
if files:
- cfiles = list(files)
- cwd = repo.getcwd()
- if cwd:
- cfiles = [util.pathto(repo.root, cwd, f)
- for f in cfiles]
- scmutil.addremove(repo, cfiles, similarity=similarity)
+ scmutil.marktouched(repo, files, similarity)
code = fp.close()
if code:
raise PatchError(_("patch command failed: %s") %
--- a/mercurial/revlog.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/revlog.py Thu May 23 17:52:21 2013 -0500
@@ -14,7 +14,7 @@
# import stuff from node for others to import from revlog
from node import bin, hex, nullid, nullrev
from i18n import _
-import ancestor, mdiff, parsers, error, util, dagutil
+import ancestor, mdiff, parsers, error, util
import struct, zlib, errno
_pack = struct.pack
@@ -1143,44 +1143,6 @@
self._basecache = (curr, chainbase)
return node
- def group(self, nodelist, bundler, reorder=None):
- """Calculate a delta group, yielding a sequence of changegroup chunks
- (strings).
-
- Given a list of changeset revs, return a set of deltas and
- metadata corresponding to nodes. The first delta is
- first parent(nodelist[0]) -> nodelist[0], the receiver is
- guaranteed to have this parent as it has all history before
- these changesets. In the case firstparent is nullrev the
- changegroup starts with a full revision.
- """
-
- # if we don't have any revisions touched by these changesets, bail
- if len(nodelist) == 0:
- yield bundler.close()
- return
-
- # for generaldelta revlogs, we linearize the revs; this will both be
- # much quicker and generate a much smaller bundle
- if (self._generaldelta and reorder is not False) or reorder:
- dag = dagutil.revlogdag(self)
- revs = set(self.rev(n) for n in nodelist)
- revs = dag.linearize(revs)
- else:
- revs = sorted([self.rev(n) for n in nodelist])
-
- # add the parent of the first rev
- p = self.parentrevs(revs[0])[0]
- revs.insert(0, p)
-
- # build deltas
- for r in xrange(len(revs) - 1):
- prev, curr = revs[r], revs[r + 1]
- for c in bundler.revchunk(self, curr, prev):
- yield c
-
- yield bundler.close()
-
def addgroup(self, bundle, linkmapper, transaction):
"""
add a delta group
--- a/mercurial/scmutil.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/scmutil.py Thu May 23 17:52:21 2013 -0500
@@ -685,26 +685,11 @@
if similarity is None:
similarity = float(opts.get('similarity') or 0)
# we'd use status here, except handling of symlinks and ignore is tricky
- added, unknown, deleted, removed = [], [], [], []
- audit_path = pathauditor(repo.root)
m = match(repo[None], pats, opts)
rejected = []
m.bad = lambda x, y: rejected.append(x)
- ctx = repo[None]
- dirstate = repo.dirstate
- walkresults = dirstate.walk(m, sorted(ctx.substate), True, False)
- for abs, st in walkresults.iteritems():
- dstate = dirstate[abs]
- if dstate == '?' and audit_path.check(abs):
- unknown.append(abs)
- elif dstate != 'r' and not st:
- deleted.append(abs)
- # for finding renames
- elif dstate == 'r':
- removed.append(abs)
- elif dstate == 'a':
- added.append(abs)
+ added, unknown, deleted, removed = _interestingfiles(repo, m)
unknownset = set(unknown)
toprint = unknownset.copy()
@@ -718,32 +703,101 @@
status = _('removing %s\n') % ((pats and rel) or abs)
repo.ui.status(status)
- copies = {}
- if similarity > 0:
- for old, new, score in similar.findrenames(repo,
- added + unknown, removed + deleted, similarity):
- if repo.ui.verbose or not m.exact(old) or not m.exact(new):
- repo.ui.status(_('recording removal of %s as rename to %s '
- '(%d%% similar)\n') %
- (m.rel(old), m.rel(new), score * 100))
- copies[new] = old
+ renames = _findrenames(repo, m, added + unknown, removed + deleted,
+ similarity)
if not dry_run:
- wctx = repo[None]
- wlock = repo.wlock()
- try:
- wctx.forget(deleted)
- wctx.add(unknown)
- for new, old in copies.iteritems():
- wctx.copy(old, new)
- finally:
- wlock.release()
+ _markchanges(repo, unknown, deleted, renames)
+
+ for f in rejected:
+ if f in m.files():
+ return 1
+ return 0
+
+def marktouched(repo, files, similarity=0.0):
+ '''Assert that files have somehow been operated upon. files are relative to
+ the repo root.'''
+ m = matchfiles(repo, files)
+ rejected = []
+ m.bad = lambda x, y: rejected.append(x)
+
+ added, unknown, deleted, removed = _interestingfiles(repo, m)
+
+ if repo.ui.verbose:
+ unknownset = set(unknown)
+ toprint = unknownset.copy()
+ toprint.update(deleted)
+ for abs in sorted(toprint):
+ if abs in unknownset:
+ status = _('adding %s\n') % abs
+ else:
+ status = _('removing %s\n') % abs
+ repo.ui.status(status)
+
+ renames = _findrenames(repo, m, added + unknown, removed + deleted,
+ similarity)
+
+ _markchanges(repo, unknown, deleted, renames)
for f in rejected:
if f in m.files():
return 1
return 0
+def _interestingfiles(repo, matcher):
+ '''Walk dirstate with matcher, looking for files that addremove would care
+ about.
+
+ This is different from dirstate.status because it doesn't care about
+ whether files are modified or clean.'''
+ added, unknown, deleted, removed = [], [], [], []
+ audit_path = pathauditor(repo.root)
+
+ ctx = repo[None]
+ dirstate = repo.dirstate
+ walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False)
+ for abs, st in walkresults.iteritems():
+ dstate = dirstate[abs]
+ if dstate == '?' and audit_path.check(abs):
+ unknown.append(abs)
+ elif dstate != 'r' and not st:
+ deleted.append(abs)
+ # for finding renames
+ elif dstate == 'r':
+ removed.append(abs)
+ elif dstate == 'a':
+ added.append(abs)
+
+ return added, unknown, deleted, removed
+
+def _findrenames(repo, matcher, added, removed, similarity):
+ '''Find renames from removed files to added ones.'''
+ renames = {}
+ if similarity > 0:
+ for old, new, score in similar.findrenames(repo, added, removed,
+ similarity):
+ if (repo.ui.verbose or not matcher.exact(old)
+ or not matcher.exact(new)):
+ repo.ui.status(_('recording removal of %s as rename to %s '
+ '(%d%% similar)\n') %
+ (matcher.rel(old), matcher.rel(new),
+ score * 100))
+ renames[new] = old
+ return renames
+
+def _markchanges(repo, unknown, deleted, renames):
+ '''Marks the files in unknown as added, the files in deleted as removed,
+ and the files in renames as copied.'''
+ wctx = repo[None]
+ wlock = repo.wlock()
+ try:
+ wctx.forget(deleted)
+ wctx.add(unknown)
+ for new, old in renames.iteritems():
+ wctx.copy(old, new)
+ finally:
+ wlock.release()
+
def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
"""Update the dirstate to reflect the intent of copying src to dst. For
different reasons it might not end with dst being marked as copied from src.
--- a/mercurial/store.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/store.py Thu May 23 17:52:21 2013 -0500
@@ -322,13 +322,16 @@
def datafiles(self):
return self._walk('data', True)
+ def topfiles(self):
+ # yield manifest before changelog
+ return reversed(self._walk('', False))
+
def walk(self):
'''yields (unencoded, encoded, size)'''
# yield data files first
for x in self.datafiles():
yield x
- # yield manifest before changelog
- for x in reversed(self._walk('', False)):
+ for x in self.topfiles():
yield x
def copylist(self):
--- a/mercurial/subrepo.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/subrepo.py Thu May 23 17:52:21 2013 -0500
@@ -191,9 +191,8 @@
elif ld[0] != r[0]: # sources differ
if repo.ui.promptchoice(
_(' subrepository sources for %s differ\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?')
- % (s, l[0], r[0]),
- (_('&Local'), _('&Remote')), 0):
+ 'use (l)ocal source (%s) or (r)emote source (%s)?'
+ '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
debug(s, "prompt changed, get", r)
wctx.sub(s).get(r, overwrite)
sm[s] = r
@@ -215,8 +214,8 @@
else:
if repo.ui.promptchoice(
_(' local changed subrepository %s which remote removed\n'
- 'use (c)hanged version or (d)elete?') % s,
- (_('&Changed'), _('&Delete')), 0):
+ 'use (c)hanged version or (d)elete?'
+ '$$ &Changed $$ &Delete') % s, 0):
debug(s, "prompt remove")
wctx.sub(s).remove()
@@ -230,8 +229,8 @@
elif r != sa[s]:
if repo.ui.promptchoice(
_(' remote changed subrepository %s which local removed\n'
- 'use (c)hanged version or (d)elete?') % s,
- (_('&Changed'), _('&Delete')), 0) == 0:
+ 'use (c)hanged version or (d)elete?'
+ '$$ &Changed $$ &Delete') % s, 0) == 0:
debug(s, "prompt recreate", r)
wctx.sub(s).get(r)
sm[s] = r
@@ -242,14 +241,16 @@
def _updateprompt(ui, sub, dirty, local, remote):
if dirty:
msg = (_(' subrepository sources for %s differ\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
+ 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
+ '$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
else:
msg = (_(' subrepository sources for %s differ (in checked out '
'version)\n'
- 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
+ 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
+ '$$ &Local $$ &Remote')
% (subrelpath(sub), local, remote))
- return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
+ return ui.promptchoice(msg, 0)
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
--- a/mercurial/templatefilters.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/templatefilters.py Thu May 23 17:52:21 2013 -0500
@@ -5,9 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from i18n import _
import cgi, re, os, time, urllib
-import encoding, node, util, error
+import encoding, node, util
import hbisect
def addbreaks(text):
@@ -100,8 +99,8 @@
para_re = None
space_re = None
-def fill(text, width):
- '''fill many paragraphs.'''
+def fill(text, width, initindent = '', hangindent = ''):
+ '''fill many paragraphs with optional indentation.'''
global para_re, space_re
if para_re is None:
para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
@@ -122,7 +121,8 @@
yield text[start:m.start(0)], m.group(1)
start = m.end(1)
- return "".join([space_re.sub(' ', util.wrap(para, width=width)) + rest
+ return "".join([util.wrap(space_re.sub(' ', util.wrap(para, width)),
+ width, initindent, hangindent) + rest
for para, rest in findparas()])
def fill68(text):
@@ -401,34 +401,5 @@
text = regexp.sub(format, text)
return text
-def fillfunc(context, mapping, args):
- if not (1 <= len(args) <= 2):
- raise error.ParseError(_("fill expects one or two arguments"))
-
- text = stringify(args[0][0](context, mapping, args[0][1]))
- width = 76
- if len(args) == 2:
- try:
- width = int(stringify(args[1][0](context, mapping, args[1][1])))
- except ValueError:
- raise error.ParseError(_("fill expects an integer width"))
-
- return fill(text, width)
-
-def datefunc(context, mapping, args):
- if not (1 <= len(args) <= 2):
- raise error.ParseError(_("date expects one or two arguments"))
-
- date = args[0][0](context, mapping, args[0][1])
- if len(args) == 2:
- fmt = stringify(args[1][0](context, mapping, args[1][1]))
- return util.datestr(date, fmt)
- return util.datestr(date)
-
-funcs = {
- "fill": fillfunc,
- "date": datefunc,
-}
-
# tell hggettext to extract docstrings from these functions:
i18nfunctions = filters.values()
--- a/mercurial/templater.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/templater.py Thu May 23 17:52:21 2013 -0500
@@ -199,9 +199,6 @@
if n in funcs:
f = funcs[n]
return (f, args)
- if n in templatefilters.funcs:
- f = templatefilters.funcs[n]
- return (f, args)
if n in context._filters:
if len(args) != 1:
raise error.ParseError(_("filter %s expects one argument") % n)
@@ -301,6 +298,41 @@
return minirst.format(text, style=style, keep=['verbose'])
+def fill(context, mapping, args):
+ if not (1 <= len(args) <= 4):
+ raise error.ParseError(_("fill expects one to four arguments"))
+
+ text = stringify(args[0][0](context, mapping, args[0][1]))
+ width = 76
+ initindent = ''
+ hangindent = ''
+ if 2 <= len(args) <= 4:
+ try:
+ width = int(stringify(args[1][0](context, mapping, args[1][1])))
+ except ValueError:
+ raise error.ParseError(_("fill expects an integer width"))
+ try:
+ initindent = stringify(args[2][0](context, mapping, args[2][1]))
+ initindent = stringify(runtemplate(context, mapping,
+ compiletemplate(initindent, context)))
+ hangindent = stringify(args[3][0](context, mapping, args[3][1]))
+ hangindent = stringify(runtemplate(context, mapping,
+ compiletemplate(hangindent, context)))
+ except IndexError:
+ pass
+
+ return templatefilters.fill(text, width, initindent, hangindent)
+
+def date(context, mapping, args):
+ if not (1 <= len(args) <= 2):
+ raise error.ParseError(_("date expects one or two arguments"))
+
+ date = args[0][0](context, mapping, args[0][1])
+ if len(args) == 2:
+ fmt = stringify(args[1][0](context, mapping, args[1][1]))
+ return util.datestr(date, fmt)
+ return util.datestr(date)
+
methods = {
"string": lambda e, c: (runstring, e[1]),
"symbol": lambda e, c: (runsymbol, e[1]),
@@ -319,6 +351,8 @@
"label": label,
"rstdoc": rstdoc,
"sub": sub,
+ "fill": fill,
+ "date": date,
}
# template engine
@@ -394,6 +428,16 @@
engines = {'default': engine}
+def stylelist():
+ path = templatepath()[0]
+ dirlist = os.listdir(path)
+ stylelist = []
+ for file in dirlist:
+ split = file.split(".")
+ if split[0] == "map-cmdline":
+ stylelist.append(split[1])
+ return ", ".join(sorted(stylelist))
+
class templater(object):
def __init__(self, mapfile, filters={}, defaults={}, cache={},
@@ -415,7 +459,8 @@
if not mapfile:
return
if not os.path.exists(mapfile):
- raise util.Abort(_('style not found: %s') % mapfile)
+ raise util.Abort(_("style '%s' not found") % mapfile,
+ hint=_("available styles: %s") % stylelist())
conf = config.config()
conf.read(mapfile)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/templates/map-cmdline.phases Thu May 23 17:52:21 2013 -0500
@@ -0,0 +1,25 @@
+changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}phase: {phase}\n{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
+changeset_quiet = '{rev}:{node|short}\n'
+changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
+changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}phase: {phase}\n{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
+start_files = 'files: '
+file = ' {file}'
+end_files = '\n'
+start_file_mods = 'files: '
+file_mod = ' {file_mod}'
+end_file_mods = '\n'
+start_file_adds = 'files+: '
+file_add = ' {file_add}'
+end_file_adds = '\n'
+start_file_dels = 'files-: '
+file_del = ' {file_del}'
+end_file_dels = '\n'
+start_file_copies = 'copies: '
+file_copy = ' {name} ({source})'
+end_file_copies = '\n'
+parent = 'parent: {rev}:{node|formatnode}\n'
+manifest = 'manifest: {rev}:{node}\n'
+branch = 'branch: {branch}\n'
+tag = 'tag: {tag}\n'
+bookmark = 'bookmark: {bookmark}\n'
+extra = 'extra: {key}={value|stringescape}\n'
--- a/mercurial/ui.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/ui.py Thu May 23 17:52:21 2013 -0500
@@ -6,7 +6,7 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import errno, getpass, os, re, socket, sys, tempfile, traceback
+import errno, getpass, os, socket, sys, tempfile, traceback
import config, scmutil, util, error, formatter
class ui(object):
@@ -284,22 +284,16 @@
ConfigError: foo.invalid is not a byte quantity ('somevalue')
"""
- orig = string = self.config(section, name)
- if orig is None:
+ value = self.config(section, name)
+ if value is None:
if not isinstance(default, str):
return default
- orig = string = default
- multiple = 1
- m = re.match(r'([^kmbg]+?)\s*([kmg]?)b?$', string, re.I)
- if m:
- string, key = m.groups()
- key = key.lower()
- multiple = dict(k=1024, m=1048576, g=1073741824).get(key, 1)
+ value = default
try:
- return int(float(string) * multiple)
- except ValueError:
+ return util.sizetoint(value)
+ except error.ParseError:
raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
- % (section, name, orig))
+ % (section, name, value))
def configlist(self, section, name, default=None, untrusted=False):
"""parse a configuration element as a list of comma/space separated
@@ -645,13 +639,20 @@
except EOFError:
raise util.Abort(_('response expected'))
- def promptchoice(self, msg, choices, default=0):
- """Prompt user with msg, read response, and ensure it matches
- one of the provided choices. The index of the choice is returned.
- choices is a sequence of acceptable responses with the format:
- ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
- If ui is not interactive, the default is returned.
+ def promptchoice(self, prompt, default=0):
+ """Prompt user with a message, read response, and ensure it matches
+ one of the provided choices. The prompt is formatted as follows:
+
+ "would you like fries with that (Yn)? $$ &Yes $$ &No"
+
+ The index of the choice is returned. Responses are case
+ insensitive. If ui is not interactive, the default is
+ returned.
"""
+
+ parts = prompt.split('$$')
+ msg = parts[0].rstrip(' ')
+ choices = [p.strip(' ') for p in parts[1:]]
resps = [s[s.index('&') + 1].lower() for s in choices]
while True:
r = self.prompt(msg, resps[default])
--- a/mercurial/util.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/util.py Thu May 23 17:52:21 2013 -0500
@@ -1924,3 +1924,41 @@
(' ' * _timenesting[0], func.__name__,
timecount(elapsed)))
return wrapper
+
+_sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
+ ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
+
+def sizetoint(s):
+ '''Convert a space specifier to a byte count.
+
+ >>> sizetoint('30')
+ 30
+ >>> sizetoint('2.2kb')
+ 2252
+ >>> sizetoint('6M')
+ 6291456
+ '''
+ t = s.strip().lower()
+ try:
+ for k, u in _sizeunits:
+ if t.endswith(k):
+ return int(float(t[:-len(k)]) * u)
+ return int(t)
+ except ValueError:
+ raise error.ParseError(_("couldn't parse size: %s") % s)
+
+class hooks(object):
+ '''A collection of hook functions that can be used to extend a
+ function's behaviour. Hooks are called in lexicographic order,
+ based on the names of their sources.'''
+
+ def __init__(self):
+ self._hooks = []
+
+ def add(self, source, hook):
+ self._hooks.append((source, hook))
+
+ def __call__(self, *args):
+ self._hooks.sort(key=lambda x: x[0])
+ for source, hook in self._hooks:
+ hook(*args)
--- a/mercurial/wireproto.py Thu May 23 17:39:33 2013 -0500
+++ b/mercurial/wireproto.py Thu May 23 17:52:21 2013 -0500
@@ -281,13 +281,15 @@
bases=bases, heads=heads)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
- def getbundle(self, source, heads=None, common=None):
+ def getbundle(self, source, heads=None, common=None, bundlecaps=None):
self.requirecap('getbundle', _('look up remote changes'))
opts = {}
if heads is not None:
opts['heads'] = encodelist(heads)
if common is not None:
opts['common'] = encodelist(common)
+ if bundlecaps is not None:
+ opts['bundlecaps'] = ','.join(bundlecaps)
f = self._callstream("getbundle", **opts)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
@@ -449,9 +451,12 @@
return repo.debugwireargs(one, two, **opts)
def getbundle(repo, proto, others):
- opts = options('getbundle', ['heads', 'common'], others)
+ opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
for k, v in opts.iteritems():
- opts[k] = decodelist(v)
+ if k in ('heads', 'common'):
+ opts[k] = decodelist(v)
+ elif k == 'bundlecaps':
+ opts[k] = set(v.split(','))
cg = repo.getbundle('serve', **opts)
return streamres(proto.groupchunks(cg))
@@ -523,6 +528,10 @@
def _allowstream(ui):
return ui.configbool('server', 'uncompressed', True, untrusted=True)
+def _walkstreamfiles(repo):
+ # this is it's own function so extensions can override it
+ return repo.store.walk()
+
def stream(repo, proto):
'''If the server supports streaming clone, it advertises the "stream"
capability with a value representing the version and flags of the repo
@@ -544,7 +553,7 @@
lock = repo.lock()
try:
repo.ui.debug('scanning\n')
- for name, ename, size in repo.store.walk():
+ for name, ename, size in _walkstreamfiles(repo):
if size:
entries.append((name, size))
total_bytes += size
--- a/tests/run-tests.py Thu May 23 17:39:33 2013 -0500
+++ b/tests/run-tests.py Thu May 23 17:52:21 2013 -0500
@@ -1120,6 +1120,8 @@
childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
childtmp = os.path.join(HGTMP, 'child%d' % j)
childopts += ['--tmpdir', childtmp]
+ if options.keep_tmpdir:
+ childopts.append('--keep-tmpdir')
cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
vlog(' '.join(cmdline))
proc = subprocess.Popen(cmdline, executable=cmdline[0])
@@ -1288,7 +1290,8 @@
global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
TESTDIR = os.environ["TESTDIR"] = os.getcwd()
if options.tmpdir:
- options.keep_tmpdir = True
+ if not options.child:
+ options.keep_tmpdir = True
tmpdir = options.tmpdir
if os.path.exists(tmpdir):
# Meaning of tmpdir has changed since 1.3: we used to create
--- a/tests/test-bisect.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-bisect.t Thu May 23 17:52:21 2013 -0500
@@ -184,6 +184,12 @@
$ hg bisect -r
$ hg bisect -b
+ $ hg summary
+ parent: 31:58c80a7c8a40 tip
+ msg 31
+ branch: default
+ commit: (clean)
+ update: (current)
$ hg bisect -g 1
Testing changeset 16:a2e6ea4973e9 (30 changesets remaining, ~4 tests)
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-bookmarks-current.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-bookmarks-current.t Thu May 23 17:52:21 2013 -0500
@@ -43,16 +43,19 @@
$ hg bookmarks
* Z -1:000000000000
-new bookmark Y
+new bookmarks X and Y, first one made active
- $ hg bookmark Y
+ $ hg bookmark Y X
list bookmarks
$ hg bookmark
+ X -1:000000000000
* Y -1:000000000000
Z -1:000000000000
+ $ hg bookmark -d X
+
commit
$ echo 'b' > b
--- a/tests/test-bookmarks.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-bookmarks.t Thu May 23 17:52:21 2013 -0500
@@ -168,11 +168,14 @@
$ hg bookmark -d REVSET
$ hg bookmark -d TIP
-rename without new name
+rename without new name or multiple names
$ hg bookmark -m Y
abort: new bookmark name required
[255]
+ $ hg bookmark -m Y Y2 Y3
+ abort: only one new bookmark name allowed
+ [255]
delete without name
@@ -417,8 +420,9 @@
a@ 2:db815d6d32e6
x y 2:db815d6d32e6
- $ hg bookmark -d @
- $ hg bookmark -d a@
+delete multiple bookmarks at once
+
+ $ hg bookmark -d @ a@
test clone with a bookmark named "default" (issue3677)
--- a/tests/test-command-template.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-command-template.t Thu May 23 17:52:21 2013 -0500
@@ -458,7 +458,8 @@
Error if no style:
$ hg log --style notexist
- abort: style not found: notexist
+ abort: style 'notexist' not found
+ (available styles: bisect, changelog, compact, default, phases, xml)
[255]
Error if style missing key:
--- a/tests/test-commandserver.py Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-commandserver.py Thu May 23 17:52:21 2013 -0500
@@ -25,7 +25,11 @@
else:
return channel, server.stdout.read(length)
-def runcommand(server, args, output=sys.stdout, error=sys.stderr, input=None):
+def sep(text):
+ return text.replace('\\', '/')
+
+def runcommand(server, args, output=sys.stdout, error=sys.stderr, input=None,
+ outfilter=lambda x: x):
print ' runcommand', ' '.join(args)
sys.stdout.flush()
server.stdin.write('runcommand\n')
@@ -37,7 +41,7 @@
while True:
ch, data = readchannel(server)
if ch == 'o':
- output.write(data)
+ output.write(outfilter(data))
output.flush()
elif ch == 'e':
error.write(data)
@@ -249,7 +253,8 @@
# make it public; draft marker moves to 4:7966c8e3734d
runcommand(server, ['phase', '-p', '.'])
- runcommand(server, ['phase', '.']) # load _phasecache.phaseroots
+ # load _phasecache.phaseroots
+ runcommand(server, ['phase', '.'], outfilter=sep)
# strip 1::4 outside server
os.system('hg -q --config extensions.mq= strip 1')
--- a/tests/test-contrib.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-contrib.t Thu May 23 17:52:21 2013 -0500
@@ -103,34 +103,6 @@
no changes found
[1]
-
-#if hardlink
-
-Test shrink-revlog:
- $ cd repo-a
- $ hg --config extensions.shrink="$CONTRIBDIR/shrink-revlog.py" shrink
- shrinking $TESTTMP/repo-a/.hg/store/00manifest.i (glob)
- reading revs
- sorting revs
- writing revs
- old file size: 324 bytes ( 0.0 MiB)
- new file size: 324 bytes ( 0.0 MiB)
- shrinkage: 0.0% (1.0x)
- note: old revlog saved in:
- $TESTTMP/repo-a/.hg/store/00manifest.i.old (glob)
- $TESTTMP/repo-a/.hg/store/00manifest.d.old (glob)
- (You can delete those files when you are satisfied that your
- repository is still sane. Running 'hg verify' is strongly recommended.)
- $ hg verify
- checking changesets
- checking manifests
- crosschecking files in changesets and manifests
- checking files
- 1 files, 3 changesets, 3 total revisions
- $ cd ..
-
-#endif
-
Test simplemerge command:
$ cp "$CONTRIBDIR/simplemerge" .
--- a/tests/test-convert-git.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-convert-git.t Thu May 23 17:52:21 2013 -0500
@@ -13,6 +13,10 @@
$ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
$ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
$ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
+ $ INVALIDID1=afd12345af
+ $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86
+ $ VALIDID1=39b3d83f9a69a9ba4ebb111461071a0af0027357
+ $ VALIDID2=8dd6476bd09d9c7776355dc454dafe38efaec5da
$ count=10
$ commit()
> {
@@ -298,6 +302,36 @@
$ commit -a -m 'addsubmodule' >/dev/null 2>/dev/null
$ cd ..
+test invalid splicemap1
+
+ $ cat > splicemap <<EOF
+ > $VALIDID1
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap1-hg
+ initializing destination git-repo2-splicemap1-hg repository
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap2
+
+ $ cat > splicemap <<EOF
+ > $VALIDID1 $VALIDID2, $VALIDID2, $VALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap2-hg
+ initializing destination git-repo2-splicemap2-hg repository
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap3
+
+ $ cat > splicemap <<EOF
+ > $INVALIDID1 $INVALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap git-repo2 git-repo2-splicemap3-hg
+ initializing destination git-repo2-splicemap3-hg repository
+ abort: splicemap entry afd12345af is not a valid revision identifier
+ [255]
+
convert sub modules
$ hg convert git-repo6 git-repo6-hg
initializing destination git-repo6-hg repository
--- a/tests/test-convert-splicemap.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-convert-splicemap.t Thu May 23 17:52:21 2013 -0500
@@ -37,6 +37,8 @@
$ hg ci -Am addaandd
adding a
adding d
+ $ INVALIDID1=afd12345af
+ $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86
$ CHILDID1=`hg id --debug -i`
$ echo d >> d
$ hg ci -Am changed
@@ -53,7 +55,7 @@
o 0:527cdedf31fb "addaandd" files: a d
-test invalid splicemap
+test invalid splicemap1
$ cat > splicemap <<EOF
> $CHILDID2
@@ -62,6 +64,24 @@
abort: syntax error in splicemap(1): child parent1[,parent2] expected
[255]
+test invalid splicemap2
+
+ $ cat > splicemap <<EOF
+ > $CHILDID2 $PARENTID1, $PARENTID2, $PARENTID2
+ > EOF
+ $ hg convert --splicemap splicemap repo2 repo1
+ abort: syntax error in splicemap(1): child parent1[,parent2] expected
+ [255]
+
+test invalid splicemap3
+
+ $ cat > splicemap <<EOF
+ > $INVALIDID1 $INVALIDID2
+ > EOF
+ $ hg convert --splicemap splicemap repo2 repo1
+ abort: splicemap entry afd12345af is not a valid revision identifier
+ [255]
+
splice repo2 on repo1
$ cat > splicemap <<EOF
--- a/tests/test-convert-svn-source.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-convert-svn-source.t Thu May 23 17:52:21 2013 -0500
@@ -16,6 +16,8 @@
#else
$ SVNREPOURL=file://`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
#endif
+ $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1
+ $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
Now test that it works with trunk/tags layout, but no branches yet.
@@ -168,6 +170,15 @@
|
o 0 second letter files: letter2.txt
+test invalid splicemap1
+
+ $ cat > splicemap <<EOF
+ > $INVALIDREVISIONID $VALIDREVISIONID
+ > EOF
+ $ hg convert --splicemap splicemap "$SVNREPOURL/proj%20B/mytrunk" smap
+ initializing destination smap repository
+ abort: splicemap entry svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 is not a valid revision identifier
+ [255]
Test stop revision
$ hg convert --rev 1 "$SVNREPOURL/proj%20B/mytrunk" stoprev
--- a/tests/test-histedit-edit.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-histedit-edit.t Thu May 23 17:52:21 2013 -0500
@@ -152,6 +152,15 @@
When you are finished, run hg histedit --continue to resume.
$ hg status
A f
+
+ $ hg summary
+ parent: 5:a5e1ba2f7afb
+ foobaz
+ branch: default
+ commit: 1 added (new branch head)
+ update: 1 new changesets (update)
+ hist: 1 remaining (histedit --continue)
+
$ HGEDITOR='true' hg histedit --continue
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-backup.hg (glob)
--- a/tests/test-log.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-log.t Thu May 23 17:52:21 2013 -0500
@@ -84,6 +84,25 @@
abort: cannot follow file not in parent revision: "dir"
[255]
+-f, a wrong style
+
+ $ hg log -f -l1 --style something
+ abort: style 'something' not found
+ (available styles: bisect, changelog, compact, default, phases, xml)
+ [255]
+
+-f, phases style
+
+
+ $ hg log -f -l1 --style phases
+ changeset: 4:7e4639b4691b
+ tag: tip
+ phase: draft
+ user: test
+ date: Thu Jan 01 00:00:05 1970 +0000
+ summary: e
+
+
-f, but no args
$ hg log -f
--- a/tests/test-nested-repo.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-nested-repo.t Thu May 23 17:52:21 2013 -0500
@@ -8,6 +8,9 @@
$ hg add b
$ hg st
+ $ echo y > b/y
+ $ hg st
+
Should fail:
$ hg st b/x
--- a/tests/test-rebase-parameters.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-rebase-parameters.t Thu May 23 17:52:21 2013 -0500
@@ -419,6 +419,16 @@
unresolved conflicts (see hg resolve, then hg rebase --continue)
[1]
+ $ hg summary
+ parent: 1:56daeba07f4b
+ c2
+ parent: 2:e4e3f3546619 tip
+ c2b
+ branch: default
+ commit: 1 modified, 1 unresolved (merge)
+ update: (current)
+ rebase: 0 rebased, 1 remaining (rebase --continue)
+
$ hg resolve -l
U c2
--- a/tests/test-symlinks.t Thu May 23 17:39:33 2013 -0500
+++ b/tests/test-symlinks.t Thu May 23 17:52:21 2013 -0500
@@ -160,6 +160,15 @@
adding bar/a
adding foo
removing foo/a
+
+commit and update back
+
+ $ hg ci -mb
+ $ hg up '.^'
+ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+ $ hg up tip
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
$ cd ..
== root of repository is symlinked ==