--- a/contrib/clang-format-ignorelist Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/clang-format-ignorelist Thu Feb 11 20:36:46 2021 -0800
@@ -9,3 +9,4 @@
hgext/fsmonitor/pywatchman/**.c
mercurial/thirdparty/**.c
mercurial/thirdparty/**.h
+mercurial/pythoncapi_compat.h
--- a/contrib/heptapod-ci.yml Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/heptapod-ci.yml Thu Feb 11 20:36:46 2021 -0800
@@ -7,6 +7,7 @@
variables:
PYTHON: python
TEST_HGMODULEPOLICY: "allow"
+ HG_CI_IMAGE_TAG: "latest"
.runtests_template: &runtests
stage: tests
@@ -17,6 +18,8 @@
- hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
- cd /tmp/mercurial-ci/
- ls -1 tests/test-check-*.* > /tmp/check-tests.txt
+ - black --version
+ - clang-format --version
script:
- echo "python used, $PYTHON"
- echo "$RUNTEST_ARGS"
@@ -29,7 +32,7 @@
- hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
- ls -1 tests/test-check-*.* > /tmp/check-tests.txt
- cd /tmp/mercurial-ci/rust/rhg
- - cargo build
+ - cargo build --release
- cd /tmp/mercurial-ci/
--- a/contrib/perf.py Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/perf.py Thu Feb 11 20:36:46 2021 -0800
@@ -744,7 +744,7 @@
# perf commands
-@command(b'perfwalk', formatteropts)
+@command(b'perf::walk|perfwalk', formatteropts)
def perfwalk(ui, repo, *pats, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -759,7 +759,7 @@
fm.end()
-@command(b'perfannotate', formatteropts)
+@command(b'perf::annotate|perfannotate', formatteropts)
def perfannotate(ui, repo, f, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -769,7 +769,7 @@
@command(
- b'perfstatus',
+ b'perf::status|perfstatus',
[
(b'u', b'unknown', False, b'ask status to look for unknown files'),
(b'', b'dirstate', False, b'benchmark the internal dirstate call'),
@@ -806,7 +806,7 @@
fm.end()
-@command(b'perfaddremove', formatteropts)
+@command(b'perf::addremove|perfaddremove', formatteropts)
def perfaddremove(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -837,7 +837,7 @@
cl._nodepos = None
-@command(b'perfheads', formatteropts)
+@command(b'perf::heads|perfheads', formatteropts)
def perfheads(ui, repo, **opts):
"""benchmark the computation of a changelog heads"""
opts = _byteskwargs(opts)
@@ -855,7 +855,7 @@
@command(
- b'perftags',
+ b'perf::tags|perftags',
formatteropts
+ [
(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
@@ -880,7 +880,7 @@
fm.end()
-@command(b'perfancestors', formatteropts)
+@command(b'perf::ancestors|perfancestors', formatteropts)
def perfancestors(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -894,7 +894,7 @@
fm.end()
-@command(b'perfancestorset', formatteropts)
+@command(b'perf::ancestorset|perfancestorset', formatteropts)
def perfancestorset(ui, repo, revset, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -910,7 +910,7 @@
fm.end()
-@command(b'perfdiscovery', formatteropts, b'PATH')
+@command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH')
def perfdiscovery(ui, repo, path, **opts):
"""benchmark discovery between local repo and the peer at given path"""
repos = [repo, None]
@@ -928,7 +928,7 @@
@command(
- b'perfbookmarks',
+ b'perf::bookmarks|perfbookmarks',
formatteropts
+ [
(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
@@ -953,7 +953,7 @@
fm.end()
-@command(b'perfbundleread', formatteropts, b'BUNDLE')
+@command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE')
def perfbundleread(ui, repo, bundlepath, **opts):
"""Benchmark reading of bundle files.
@@ -1080,7 +1080,7 @@
@command(
- b'perfchangegroupchangelog',
+ b'perf::changegroupchangelog|perfchangegroupchangelog',
formatteropts
+ [
(b'', b'cgversion', b'02', b'changegroup version'),
@@ -1116,7 +1116,7 @@
fm.end()
-@command(b'perfdirs', formatteropts)
+@command(b'perf::dirs|perfdirs', formatteropts)
def perfdirs(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1132,7 +1132,7 @@
@command(
- b'perfdirstate',
+ b'perf::dirstate|perfdirstate',
[
(
b'',
@@ -1195,7 +1195,7 @@
fm.end()
-@command(b'perfdirstatedirs', formatteropts)
+@command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts)
def perfdirstatedirs(ui, repo, **opts):
"""benchmap a 'dirstate.hasdir' call from an empty `dirs` cache"""
opts = _byteskwargs(opts)
@@ -1212,7 +1212,7 @@
fm.end()
-@command(b'perfdirstatefoldmap', formatteropts)
+@command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts)
def perfdirstatefoldmap(ui, repo, **opts):
"""benchmap a `dirstate._map.filefoldmap.get()` request
@@ -1233,7 +1233,7 @@
fm.end()
-@command(b'perfdirfoldmap', formatteropts)
+@command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts)
def perfdirfoldmap(ui, repo, **opts):
"""benchmap a `dirstate._map.dirfoldmap.get()` request
@@ -1255,7 +1255,7 @@
fm.end()
-@command(b'perfdirstatewrite', formatteropts)
+@command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts)
def perfdirstatewrite(ui, repo, **opts):
"""benchmap the time it take to write a dirstate on disk"""
opts = _byteskwargs(opts)
@@ -1297,7 +1297,7 @@
@command(
- b'perfmergecalculate',
+ b'perf::mergecalculate|perfmergecalculate',
[
(b'r', b'rev', b'.', b'rev to merge against'),
(b'', b'from', b'', b'rev to merge from'),
@@ -1330,7 +1330,7 @@
@command(
- b'perfmergecopies',
+ b'perf::mergecopies|perfmergecopies',
[
(b'r', b'rev', b'.', b'rev to merge against'),
(b'', b'from', b'', b'rev to merge from'),
@@ -1353,7 +1353,7 @@
fm.end()
-@command(b'perfpathcopies', [], b"REV REV")
+@command(b'perf::pathcopies|perfpathcopies', [], b"REV REV")
def perfpathcopies(ui, repo, rev1, rev2, **opts):
"""benchmark the copy tracing logic"""
opts = _byteskwargs(opts)
@@ -1369,7 +1369,7 @@
@command(
- b'perfphases',
+ b'perf::phases|perfphases',
[
(b'', b'full', False, b'include file reading time too'),
],
@@ -1394,7 +1394,7 @@
fm.end()
-@command(b'perfphasesremote', [], b"[DEST]")
+@command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]")
def perfphasesremote(ui, repo, dest=None, **opts):
"""benchmark time needed to analyse phases of the remote server"""
from mercurial.node import bin
@@ -1455,7 +1455,7 @@
@command(
- b'perfmanifest',
+ b'perf::manifest|perfmanifest',
[
(b'm', b'manifest-rev', False, b'Look up a manifest node revision'),
(b'', b'clear-disk', False, b'clear on-disk caches too'),
@@ -1499,7 +1499,7 @@
fm.end()
-@command(b'perfchangeset', formatteropts)
+@command(b'perf::changeset|perfchangeset', formatteropts)
def perfchangeset(ui, repo, rev, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1513,7 +1513,7 @@
fm.end()
-@command(b'perfignore', formatteropts)
+@command(b'perf::ignore|perfignore', formatteropts)
def perfignore(ui, repo, **opts):
"""benchmark operation related to computing ignore"""
opts = _byteskwargs(opts)
@@ -1532,7 +1532,7 @@
@command(
- b'perfindex',
+ b'perf::index|perfindex',
[
(b'', b'rev', [], b'revision to be looked up (default tip)'),
(b'', b'no-lookup', None, b'do not revision lookup post creation'),
@@ -1596,7 +1596,7 @@
@command(
- b'perfnodemap',
+ b'perf::nodemap|perfnodemap',
[
(b'', b'rev', [], b'revision to be looked up (default tip)'),
(b'', b'clear-caches', True, b'clear revlog cache between calls'),
@@ -1667,7 +1667,7 @@
fm.end()
-@command(b'perfstartup', formatteropts)
+@command(b'perf::startup|perfstartup', formatteropts)
def perfstartup(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1685,7 +1685,7 @@
fm.end()
-@command(b'perfparents', formatteropts)
+@command(b'perf::parents|perfparents', formatteropts)
def perfparents(ui, repo, **opts):
"""benchmark the time necessary to fetch one changeset's parents.
@@ -1712,7 +1712,7 @@
fm.end()
-@command(b'perfctxfiles', formatteropts)
+@command(b'perf::ctxfiles|perfctxfiles', formatteropts)
def perfctxfiles(ui, repo, x, **opts):
opts = _byteskwargs(opts)
x = int(x)
@@ -1725,7 +1725,7 @@
fm.end()
-@command(b'perfrawfiles', formatteropts)
+@command(b'perf::rawfiles|perfrawfiles', formatteropts)
def perfrawfiles(ui, repo, x, **opts):
opts = _byteskwargs(opts)
x = int(x)
@@ -1739,7 +1739,7 @@
fm.end()
-@command(b'perflookup', formatteropts)
+@command(b'perf::lookup|perflookup', formatteropts)
def perflookup(ui, repo, rev, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1748,7 +1748,7 @@
@command(
- b'perflinelogedits',
+ b'perf::linelogedits|perflinelogedits',
[
(b'n', b'edits', 10000, b'number of edits'),
(b'', b'max-hunk-lines', 10, b'max lines in a hunk'),
@@ -1786,7 +1786,7 @@
fm.end()
-@command(b'perfrevrange', formatteropts)
+@command(b'perf::revrange|perfrevrange', formatteropts)
def perfrevrange(ui, repo, *specs, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1795,7 +1795,7 @@
fm.end()
-@command(b'perfnodelookup', formatteropts)
+@command(b'perf::nodelookup|perfnodelookup', formatteropts)
def perfnodelookup(ui, repo, rev, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -1814,7 +1814,7 @@
@command(
- b'perflog',
+ b'perf::log|perflog',
[(b'', b'rename', False, b'ask log to follow renames')] + formatteropts,
)
def perflog(ui, repo, rev=None, **opts):
@@ -1832,7 +1832,7 @@
fm.end()
-@command(b'perfmoonwalk', formatteropts)
+@command(b'perf::moonwalk|perfmoonwalk', formatteropts)
def perfmoonwalk(ui, repo, **opts):
"""benchmark walking the changelog backwards
@@ -1851,7 +1851,7 @@
@command(
- b'perftemplating',
+ b'perf::templating|perftemplating',
[
(b'r', b'rev', [], b'revisions to run the template on'),
]
@@ -1941,7 +1941,7 @@
@command(
- b'perfhelper-mergecopies',
+ b'perf::helper-mergecopies|perfhelper-mergecopies',
formatteropts
+ [
(b'r', b'revs', [], b'restrict search to these revisions'),
@@ -2124,7 +2124,7 @@
@command(
- b'perfhelper-pathcopies',
+ b'perf::helper-pathcopies|perfhelper-pathcopies',
formatteropts
+ [
(b'r', b'revs', [], b'restrict search to these revisions'),
@@ -2263,7 +2263,7 @@
_displaystats(ui, opts, entries, alldata)
-@command(b'perfcca', formatteropts)
+@command(b'perf::cca|perfcca', formatteropts)
def perfcca(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -2271,7 +2271,7 @@
fm.end()
-@command(b'perffncacheload', formatteropts)
+@command(b'perf::fncacheload|perffncacheload', formatteropts)
def perffncacheload(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -2284,7 +2284,7 @@
fm.end()
-@command(b'perffncachewrite', formatteropts)
+@command(b'perf::fncachewrite|perffncachewrite', formatteropts)
def perffncachewrite(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -2304,7 +2304,7 @@
fm.end()
-@command(b'perffncacheencode', formatteropts)
+@command(b'perf::fncacheencode|perffncacheencode', formatteropts)
def perffncacheencode(ui, repo, **opts):
opts = _byteskwargs(opts)
timer, fm = gettimer(ui, opts)
@@ -2348,7 +2348,7 @@
@command(
- b'perfbdiff',
+ b'perf::bdiff|perfbdiff',
revlogopts
+ formatteropts
+ [
@@ -2464,7 +2464,7 @@
@command(
- b'perfunidiff',
+ b'perf::unidiff|perfunidiff',
revlogopts
+ formatteropts
+ [
@@ -2543,7 +2543,7 @@
fm.end()
-@command(b'perfdiffwd', formatteropts)
+@command(b'perf::diffwd|perfdiffwd', formatteropts)
def perfdiffwd(ui, repo, **opts):
"""Profile diff of working directory changes"""
opts = _byteskwargs(opts)
@@ -2568,7 +2568,11 @@
fm.end()
-@command(b'perfrevlogindex', revlogopts + formatteropts, b'-c|-m|FILE')
+@command(
+ b'perf::revlogindex|perfrevlogindex',
+ revlogopts + formatteropts,
+ b'-c|-m|FILE',
+)
def perfrevlogindex(ui, repo, file_=None, **opts):
"""Benchmark operations against a revlog index.
@@ -2704,7 +2708,7 @@
@command(
- b'perfrevlogrevisions',
+ b'perf::revlogrevisions|perfrevlogrevisions',
revlogopts
+ formatteropts
+ [
@@ -2754,7 +2758,7 @@
@command(
- b'perfrevlogwrite',
+ b'perf::revlogwrite|perfrevlogwrite',
revlogopts
+ formatteropts
+ [
@@ -3047,7 +3051,7 @@
@command(
- b'perfrevlogchunks',
+ b'perf::revlogchunks|perfrevlogchunks',
revlogopts
+ formatteropts
+ [
@@ -3176,7 +3180,7 @@
@command(
- b'perfrevlogrevision',
+ b'perf::revlogrevision|perfrevlogrevision',
revlogopts
+ formatteropts
+ [(b'', b'cache', False, b'use caches instead of clearing')],
@@ -3319,7 +3323,7 @@
@command(
- b'perfrevset',
+ b'perf::revset|perfrevset',
[
(b'C', b'clear', False, b'clear volatile cache between each call.'),
(b'', b'contexts', False, b'obtain changectx for each revision'),
@@ -3352,7 +3356,7 @@
@command(
- b'perfvolatilesets',
+ b'perf::volatilesets|perfvolatilesets',
[
(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
]
@@ -3401,7 +3405,7 @@
@command(
- b'perfbranchmap',
+ b'perf::branchmap|perfbranchmap',
[
(b'f', b'full', False, b'Includes build time of subset'),
(
@@ -3492,7 +3496,7 @@
@command(
- b'perfbranchmapupdate',
+ b'perf::branchmapupdate|perfbranchmapupdate',
[
(b'', b'base', [], b'subset of revision to start from'),
(b'', b'target', [], b'subset of revision to end with'),
@@ -3602,7 +3606,7 @@
@command(
- b'perfbranchmapload',
+ b'perf::branchmapload|perfbranchmapload',
[
(b'f', b'filter', b'', b'Specify repoview filter'),
(b'', b'list', False, b'List brachmap filter caches'),
@@ -3661,7 +3665,7 @@
fm.end()
-@command(b'perfloadmarkers')
+@command(b'perf::loadmarkers|perfloadmarkers')
def perfloadmarkers(ui, repo):
"""benchmark the time to parse the on-disk markers for a repo
@@ -3673,7 +3677,7 @@
@command(
- b'perflrucachedict',
+ b'perf::lrucachedict|perflrucachedict',
formatteropts
+ [
(b'', b'costlimit', 0, b'maximum total cost of items in cache'),
@@ -3829,7 +3833,7 @@
@command(
- b'perfwrite',
+ b'perf::write|perfwrite',
formatteropts
+ [
(b'', b'write-method', b'write', b'ui write method'),
@@ -3892,7 +3896,7 @@
@command(
- b'perfprogress',
+ b'perf::progress|perfprogress',
formatteropts
+ [
(b'', b'topic', b'topic', b'topic for progress messages'),
--- a/contrib/python-zstandard/c-ext/bufferutil.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/bufferutil.c Thu Feb 11 20:36:46 2021 -0800
@@ -758,7 +758,7 @@
};
void bufferutil_module_init(PyObject* mod) {
- Py_TYPE(&ZstdBufferWithSegmentsType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdBufferWithSegmentsType, &PyType_Type);
if (PyType_Ready(&ZstdBufferWithSegmentsType) < 0) {
return;
}
@@ -766,7 +766,7 @@
Py_INCREF(&ZstdBufferWithSegmentsType);
PyModule_AddObject(mod, "BufferWithSegments", (PyObject*)&ZstdBufferWithSegmentsType);
- Py_TYPE(&ZstdBufferSegmentsType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdBufferSegmentsType, &PyType_Type);
if (PyType_Ready(&ZstdBufferSegmentsType) < 0) {
return;
}
@@ -774,7 +774,7 @@
Py_INCREF(&ZstdBufferSegmentsType);
PyModule_AddObject(mod, "BufferSegments", (PyObject*)&ZstdBufferSegmentsType);
- Py_TYPE(&ZstdBufferSegmentType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdBufferSegmentType, &PyType_Type);
if (PyType_Ready(&ZstdBufferSegmentType) < 0) {
return;
}
@@ -782,7 +782,7 @@
Py_INCREF(&ZstdBufferSegmentType);
PyModule_AddObject(mod, "BufferSegment", (PyObject*)&ZstdBufferSegmentType);
- Py_TYPE(&ZstdBufferWithSegmentsCollectionType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdBufferWithSegmentsCollectionType, &PyType_Type);
if (PyType_Ready(&ZstdBufferWithSegmentsCollectionType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressionchunker.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressionchunker.c Thu Feb 11 20:36:46 2021 -0800
@@ -348,12 +348,12 @@
};
void compressionchunker_module_init(PyObject* module) {
- Py_TYPE(&ZstdCompressionChunkerIteratorType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionChunkerIteratorType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionChunkerIteratorType) < 0) {
return;
}
- Py_TYPE(&ZstdCompressionChunkerType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionChunkerType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionChunkerType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressiondict.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressiondict.c Thu Feb 11 20:36:46 2021 -0800
@@ -400,7 +400,7 @@
};
void compressiondict_module_init(PyObject* mod) {
- Py_TYPE(&ZstdCompressionDictType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionDictType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionDictType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressionparams.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressionparams.c Thu Feb 11 20:36:46 2021 -0800
@@ -556,7 +556,7 @@
};
void compressionparams_module_init(PyObject* mod) {
- Py_TYPE(&ZstdCompressionParametersType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionParametersType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionParametersType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressionreader.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressionreader.c Thu Feb 11 20:36:46 2021 -0800
@@ -811,7 +811,7 @@
void compressionreader_module_init(PyObject* mod) {
/* TODO make reader a sub-class of io.RawIOBase */
- Py_TYPE(&ZstdCompressionReaderType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionReaderType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionReaderType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressionwriter.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressionwriter.c Thu Feb 11 20:36:46 2021 -0800
@@ -365,7 +365,7 @@
};
void compressionwriter_module_init(PyObject* mod) {
- Py_TYPE(&ZstdCompressionWriterType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionWriterType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionWriterType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressobj.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressobj.c Thu Feb 11 20:36:46 2021 -0800
@@ -249,7 +249,7 @@
};
void compressobj_module_init(PyObject* module) {
- Py_TYPE(&ZstdCompressionObjType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressionObjType, &PyType_Type);
if (PyType_Ready(&ZstdCompressionObjType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressor.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressor.c Thu Feb 11 20:36:46 2021 -0800
@@ -619,7 +619,7 @@
goto finally;
}
- Py_SIZE(output) = outBuffer.pos;
+ Py_SET_SIZE(output, outBuffer.pos);
finally:
PyBuffer_Release(&source);
@@ -1659,7 +1659,7 @@
};
void compressor_module_init(PyObject* mod) {
- Py_TYPE(&ZstdCompressorType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressorType, &PyType_Type);
if (PyType_Ready(&ZstdCompressorType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/compressoriterator.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/compressoriterator.c Thu Feb 11 20:36:46 2021 -0800
@@ -228,7 +228,7 @@
};
void compressoriterator_module_init(PyObject* mod) {
- Py_TYPE(&ZstdCompressorIteratorType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdCompressorIteratorType, &PyType_Type);
if (PyType_Ready(&ZstdCompressorIteratorType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/decompressionreader.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/decompressionreader.c Thu Feb 11 20:36:46 2021 -0800
@@ -774,7 +774,7 @@
void decompressionreader_module_init(PyObject* mod) {
/* TODO make reader a sub-class of io.RawIOBase */
- Py_TYPE(&ZstdDecompressionReaderType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdDecompressionReaderType, &PyType_Type);
if (PyType_Ready(&ZstdDecompressionReaderType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/decompressionwriter.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/decompressionwriter.c Thu Feb 11 20:36:46 2021 -0800
@@ -288,7 +288,7 @@
};
void decompressionwriter_module_init(PyObject* mod) {
- Py_TYPE(&ZstdDecompressionWriterType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdDecompressionWriterType, &PyType_Type);
if (PyType_Ready(&ZstdDecompressionWriterType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/decompressobj.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/decompressobj.c Thu Feb 11 20:36:46 2021 -0800
@@ -195,7 +195,7 @@
};
void decompressobj_module_init(PyObject* module) {
- Py_TYPE(&ZstdDecompressionObjType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdDecompressionObjType, &PyType_Type);
if (PyType_Ready(&ZstdDecompressionObjType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/decompressor.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/decompressor.c Thu Feb 11 20:36:46 2021 -0800
@@ -1811,7 +1811,7 @@
};
void decompressor_module_init(PyObject* mod) {
- Py_TYPE(&ZstdDecompressorType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdDecompressorType, &PyType_Type);
if (PyType_Ready(&ZstdDecompressorType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/decompressoriterator.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/decompressoriterator.c Thu Feb 11 20:36:46 2021 -0800
@@ -242,7 +242,7 @@
};
void decompressoriterator_module_init(PyObject* mod) {
- Py_TYPE(&ZstdDecompressorIteratorType) = &PyType_Type;
+ Py_SET_TYPE(&ZstdDecompressorIteratorType, &PyType_Type);
if (PyType_Ready(&ZstdDecompressorIteratorType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/frameparams.c Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/frameparams.c Thu Feb 11 20:36:46 2021 -0800
@@ -128,7 +128,7 @@
};
void frameparams_module_init(PyObject* mod) {
- Py_TYPE(&FrameParametersType) = &PyType_Type;
+ Py_SET_TYPE(&FrameParametersType, &PyType_Type);
if (PyType_Ready(&FrameParametersType) < 0) {
return;
}
--- a/contrib/python-zstandard/c-ext/python-zstandard.h Wed Feb 10 23:03:54 2021 +0100
+++ b/contrib/python-zstandard/c-ext/python-zstandard.h Thu Feb 11 20:36:46 2021 -0800
@@ -9,6 +9,7 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include "structmember.h"
+#include <pythoncapi_compat.h>
#define ZSTD_STATIC_LINKING_ONLY
#define ZDICT_STATIC_LINKING_ONLY
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h Thu Feb 11 20:36:46 2021 -0800
@@ -0,0 +1,283 @@
+// Header file providing new functions of the Python C API to old Python
+// versions.
+//
+// File distributed under the MIT license.
+//
+// Homepage:
+// https://github.com/pythoncapi/pythoncapi_compat
+//
+// Latest version:
+// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h
+
+#ifndef PYTHONCAPI_COMPAT
+#define PYTHONCAPI_COMPAT
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <Python.h>
+#include "frameobject.h" // PyFrameObject, PyFrame_GetBack()
+
+
+/* VC 2008 doesn't know about the inline keyword. */
+#if defined(_MSC_VER) && _MSC_VER < 1900
+#define inline __forceinline
+#endif
+
+// Cast argument to PyObject* type.
+#ifndef _PyObject_CAST
+# define _PyObject_CAST(op) ((PyObject*)(op))
+#endif
+
+
+// bpo-42262 added Py_NewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef)
+static inline PyObject* _Py_NewRef(PyObject *obj)
+{
+ Py_INCREF(obj);
+ return obj;
+}
+#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-42262 added Py_XNewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef)
+static inline PyObject* _Py_XNewRef(PyObject *obj)
+{
+ Py_XINCREF(obj);
+ return obj;
+}
+#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT)
+static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt)
+{
+ ob->ob_refcnt = refcnt;
+}
+#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt)
+#endif
+
+
+// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE)
+static inline void
+_Py_SET_TYPE(PyObject *ob, PyTypeObject *type)
+{
+ ob->ob_type = type;
+}
+#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type)
+#endif
+
+
+// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE)
+static inline void
+_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size)
+{
+ ob->ob_size = size;
+}
+#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size)
+#endif
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyCodeObject*
+PyFrame_GetCode(PyFrameObject *frame)
+{
+ PyCodeObject *code;
+ assert(frame != NULL);
+ code = frame->f_code;
+ assert(code != NULL);
+ Py_INCREF(code);
+ return code;
+}
+#endif
+
+static inline PyCodeObject*
+_PyFrame_GetCodeBorrow(PyFrameObject *frame)
+{
+ PyCodeObject *code = PyFrame_GetCode(frame);
+ Py_DECREF(code);
+ return code; // borrowed reference
+}
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyFrameObject*
+PyFrame_GetBack(PyFrameObject *frame)
+{
+ PyFrameObject *back;
+ assert(frame != NULL);
+ back = frame->f_back;
+ Py_XINCREF(back);
+ return back;
+}
+#endif
+
+static inline PyFrameObject*
+_PyFrame_GetBackBorrow(PyFrameObject *frame)
+{
+ PyFrameObject *back = PyFrame_GetBack(frame);
+ Py_XDECREF(back);
+ return back; // borrowed reference
+}
+
+
+// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline PyInterpreterState *
+PyThreadState_GetInterpreter(PyThreadState *tstate)
+{
+ assert(tstate != NULL);
+ return tstate->interp;
+}
+#endif
+
+
+// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyFrameObject*
+PyThreadState_GetFrame(PyThreadState *tstate)
+{
+ PyFrameObject *frame;
+ assert(tstate != NULL);
+ frame = tstate->frame;
+ Py_XINCREF(frame);
+ return frame;
+}
+#endif
+
+static inline PyFrameObject*
+_PyThreadState_GetFrameBorrow(PyThreadState *tstate)
+{
+ PyFrameObject *frame = PyThreadState_GetFrame(tstate);
+ Py_XDECREF(frame);
+ return frame; // borrowed reference
+}
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline PyInterpreterState *
+PyInterpreterState_Get(void)
+{
+ PyThreadState *tstate;
+ PyInterpreterState *interp;
+
+ tstate = PyThreadState_GET();
+ if (tstate == NULL) {
+ Py_FatalError("GIL released (tstate is NULL)");
+ }
+ interp = tstate->interp;
+ if (interp == NULL) {
+ Py_FatalError("no current interpreter");
+ }
+ return interp;
+}
+#endif
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6
+#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6
+static inline uint64_t
+PyThreadState_GetID(PyThreadState *tstate)
+{
+ assert(tstate != NULL);
+ return tstate->id;
+}
+#endif
+
+
+// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1
+#if PY_VERSION_HEX < 0x030900A1
+static inline PyObject*
+PyObject_CallNoArgs(PyObject *func)
+{
+ return PyObject_CallFunctionObjArgs(func, NULL);
+}
+#endif
+
+
+// bpo-39245 made PyObject_CallOneArg() public (previously called
+// _PyObject_CallOneArg) in Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4
+static inline PyObject*
+PyObject_CallOneArg(PyObject *func, PyObject *arg)
+{
+ return PyObject_CallFunctionObjArgs(func, arg, NULL);
+}
+#endif
+
+
+// bpo-40024 added PyModule_AddType() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline int
+PyModule_AddType(PyObject *module, PyTypeObject *type)
+{
+ const char *name, *dot;
+
+ if (PyType_Ready(type) < 0) {
+ return -1;
+ }
+
+ // inline _PyType_Name()
+ name = type->tp_name;
+ assert(name != NULL);
+ dot = strrchr(name, '.');
+ if (dot != NULL) {
+ name = dot + 1;
+ }
+
+ Py_INCREF(type);
+ if (PyModule_AddObject(module, name, (PyObject *)type) < 0) {
+ Py_DECREF(type);
+ return -1;
+ }
+
+ return 0;
+}
+#endif
+
+
+// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6.
+// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2.
+#if PY_VERSION_HEX < 0x030900A6
+static inline int
+PyObject_GC_IsTracked(PyObject* obj)
+{
+ return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj));
+}
+#endif
+
+// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6.
+// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final.
+#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0
+static inline int
+PyObject_GC_IsFinalized(PyObject *obj)
+{
+ return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1));
+}
+#endif
+
+
+// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE)
+static inline int
+_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) {
+ return ob->ob_type == type;
+}
+#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type)
+#endif
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // PYTHONCAPI_COMPAT
--- a/hgext/churn.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/churn.py Thu Feb 11 20:36:46 2021 -0800
@@ -38,11 +38,16 @@
def changedlines(ui, repo, ctx1, ctx2, fmatch):
added, removed = 0, 0
diff = b''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
+ inhunk = False
for l in diff.split(b'\n'):
- if l.startswith(b"+") and not l.startswith(b"+++ "):
+ if inhunk and l.startswith(b"+"):
added += 1
- elif l.startswith(b"-") and not l.startswith(b"--- "):
+ elif inhunk and l.startswith(b"-"):
removed += 1
+ elif l.startswith(b"@"):
+ inhunk = True
+ elif l.startswith(b"d"):
+ inhunk = False
return (added, removed)
--- a/hgext/convert/__init__.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/convert/__init__.py Thu Feb 11 20:36:46 2021 -0800
@@ -491,6 +491,22 @@
:convert.skiptags: does not convert tags from the source repo to the target
repo. The default is False.
+
+ Subversion Destination
+ ######################
+
+ Original commit dates are not preserved by default.
+
+ :convert.svn.dangerous-set-commit-dates: preserve original commit dates,
+ forcefully setting ``svn:date`` revision properties. This option is
+ DANGEROUS and may break some subversion functionality for the resulting
+ repository (e.g. filtering revisions with date ranges in ``svn log``),
+ as original commit dates are not guaranteed to be monotonically
+ increasing.
+
+ For commit dates setting to work destination repository must have
+ ``pre-revprop-change`` hook configured to allow setting of ``svn:date``
+ revision properties. See Subversion documentation for more details.
"""
return convcmd.convert(ui, src, dest, revmapfile, **opts)
--- a/hgext/convert/git.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/convert/git.py Thu Feb 11 20:36:46 2021 -0800
@@ -247,7 +247,8 @@
b'\n'.join(line.strip() for line in content.split(b'\n')),
)
for sec in c.sections():
- s = c[sec]
+ # turn the config object into a real dict
+ s = dict(c.items(sec))
if b'url' in s and b'path' in s:
self.submodules.append(submodule(s[b'path'], b'', s[b'url']))
--- a/hgext/convert/subversion.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/convert/subversion.py Thu Feb 11 20:36:46 2021 -0800
@@ -97,6 +97,17 @@
return s.decode(fsencoding).encode('utf-8')
+def formatsvndate(date):
+ return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z')
+
+
+def parsesvndate(s):
+ # Example SVN datetime. Includes microseconds.
+ # ISO-8601 conformant
+ # '2007-01-04T17:35:00.902377Z'
+ return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S'])
+
+
class SvnPathNotFound(Exception):
pass
@@ -1158,12 +1169,7 @@
continue
paths.append((path, ent))
- # Example SVN datetime. Includes microseconds.
- # ISO-8601 conformant
- # '2007-01-04T17:35:00.902377Z'
- date = dateutil.parsedate(
- date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"]
- )
+ date = parsesvndate(date)
if self.ui.configbool(b'convert', b'localtimezone'):
date = makedatetimestamp(date[0])
@@ -1380,7 +1386,7 @@
return logstream(stdout)
-pre_revprop_change = b'''#!/bin/sh
+pre_revprop_change_template = b'''#!/bin/sh
REPOS="$1"
REV="$2"
@@ -1388,15 +1394,26 @@
PROPNAME="$4"
ACTION="$5"
-if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
-if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
-if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
+%(rules)s
echo "Changing prohibited revision property" >&2
exit 1
'''
+def gen_pre_revprop_change_hook(prop_actions_allowed):
+ rules = []
+ for action, propname in prop_actions_allowed:
+ rules.append(
+ (
+ b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; '
+ b'then exit 0; fi'
+ )
+ % (action, propname)
+ )
+ return pre_revprop_change_template % {b'rules': b'\n'.join(rules)}
+
+
class svn_sink(converter_sink, commandline):
commit_re = re.compile(br'Committed revision (\d+).', re.M)
uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
@@ -1470,9 +1487,20 @@
self.is_exec = None
if created:
+ prop_actions_allowed = [
+ (b'M', b'svn:log'),
+ (b'A', b'hg:convert-branch'),
+ (b'A', b'hg:convert-rev'),
+ ]
+
+ if self.ui.configbool(
+ b'convert', b'svn.dangerous-set-commit-dates'
+ ):
+ prop_actions_allowed.append((b'M', b'svn:date'))
+
hook = os.path.join(created, b'hooks', b'pre-revprop-change')
fp = open(hook, b'wb')
- fp.write(pre_revprop_change)
+ fp.write(gen_pre_revprop_change_hook(prop_actions_allowed))
fp.close()
util.setflags(hook, False, True)
@@ -1667,6 +1695,23 @@
revprop=True,
revision=rev,
)
+
+ if self.ui.configbool(
+ b'convert', b'svn.dangerous-set-commit-dates'
+ ):
+ # Subverson always uses UTC to represent date and time
+ date = dateutil.parsedate(commit.date)
+ date = (date[0], 0)
+
+ # The only way to set date and time for svn commit is to use propset after commit is done
+ self.run(
+ b'propset',
+ b'svn:date',
+ formatsvndate(date),
+ revprop=True,
+ revision=rev,
+ )
+
for parent in parents:
self.addchild(parent, rev)
return self.revid(rev)
--- a/hgext/fix.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/fix.py Thu Feb 11 20:36:46 2021 -0800
@@ -433,8 +433,9 @@
if not (len(revs) == 1 and wdirrev in revs):
cmdutil.checkunfinished(repo)
rewriteutil.precheck(repo, revs, b'fix')
- if wdirrev in revs and list(
- mergestatemod.mergestate.read(repo).unresolved()
+ if (
+ wdirrev in revs
+ and mergestatemod.mergestate.read(repo).unresolvedcount()
):
raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
if not revs:
--- a/hgext/histedit.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/histedit.py Thu Feb 11 20:36:46 2021 -0800
@@ -1581,10 +1581,19 @@
def layout(mode):
maxy, maxx = stdscr.getmaxyx()
helplen = len(helplines(mode))
+ mainlen = maxy - helplen - 12
+ if mainlen < 1:
+ raise error.Abort(
+ _(b"terminal dimensions %d by %d too small for curses histedit")
+ % (maxy, maxx),
+ hint=_(
+ b"enlarge your terminal or use --config ui.interface=text"
+ ),
+ )
return {
b'commit': (12, maxx),
b'help': (helplen, maxx),
- b'main': (maxy - helplen - 12, maxx),
+ b'main': (mainlen, maxx),
}
def drawvertwin(size, y, x):
@@ -1614,63 +1623,60 @@
stdscr.clear()
stdscr.refresh()
while True:
- try:
- oldmode, _ = state[b'mode']
- if oldmode == MODE_INIT:
- changemode(state, MODE_RULES)
- e = event(state, ch)
-
- if e == E_QUIT:
- return False
- if e == E_HISTEDIT:
- return state[b'rules']
+ oldmode, unused = state[b'mode']
+ if oldmode == MODE_INIT:
+ changemode(state, MODE_RULES)
+ e = event(state, ch)
+
+ if e == E_QUIT:
+ return False
+ if e == E_HISTEDIT:
+ return state[b'rules']
+ else:
+ if e == E_RESIZE:
+ size = screen_size()
+ if size != stdscr.getmaxyx():
+ curses.resizeterm(*size)
+
+ curmode, unused = state[b'mode']
+ sizes = layout(curmode)
+ if curmode != oldmode:
+ state[b'page_height'] = sizes[b'main'][0]
+ # Adjust the view to fit the current screen size.
+ movecursor(state, state[b'pos'], state[b'pos'])
+
+ # Pack the windows against the top, each pane spread across the
+ # full width of the screen.
+ y, x = (0, 0)
+ helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
+ mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
+ commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
+
+ if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
+ if e == E_PAGEDOWN:
+ changeview(state, +1, b'page')
+ elif e == E_PAGEUP:
+ changeview(state, -1, b'page')
+ elif e == E_LINEDOWN:
+ changeview(state, +1, b'line')
+ elif e == E_LINEUP:
+ changeview(state, -1, b'line')
+
+ # start rendering
+ commitwin.erase()
+ helpwin.erase()
+ mainwin.erase()
+ if curmode == MODE_PATCH:
+ renderpatch(mainwin, state)
+ elif curmode == MODE_HELP:
+ renderstring(mainwin, state, __doc__.strip().splitlines())
else:
- if e == E_RESIZE:
- size = screen_size()
- if size != stdscr.getmaxyx():
- curses.resizeterm(*size)
-
- curmode, _ = state[b'mode']
- sizes = layout(curmode)
- if curmode != oldmode:
- state[b'page_height'] = sizes[b'main'][0]
- # Adjust the view to fit the current screen size.
- movecursor(state, state[b'pos'], state[b'pos'])
-
- # Pack the windows against the top, each pane spread across the
- # full width of the screen.
- y, x = (0, 0)
- helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
- mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
- commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
-
- if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
- if e == E_PAGEDOWN:
- changeview(state, +1, b'page')
- elif e == E_PAGEUP:
- changeview(state, -1, b'page')
- elif e == E_LINEDOWN:
- changeview(state, +1, b'line')
- elif e == E_LINEUP:
- changeview(state, -1, b'line')
-
- # start rendering
- commitwin.erase()
- helpwin.erase()
- mainwin.erase()
- if curmode == MODE_PATCH:
- renderpatch(mainwin, state)
- elif curmode == MODE_HELP:
- renderstring(mainwin, state, __doc__.strip().splitlines())
- else:
- renderrules(mainwin, state)
- rendercommit(commitwin, state)
- renderhelp(helpwin, state)
- curses.doupdate()
- # done rendering
- ch = encoding.strtolocal(stdscr.getkey())
- except curses.error:
- pass
+ renderrules(mainwin, state)
+ rendercommit(commitwin, state)
+ renderhelp(helpwin, state)
+ curses.doupdate()
+ # done rendering
+ ch = encoding.strtolocal(stdscr.getkey())
def _chistedit(ui, repo, freeargs, opts):
--- a/hgext/largefiles/overrides.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/largefiles/overrides.py Thu Feb 11 20:36:46 2021 -0800
@@ -1567,7 +1567,7 @@
# Calling purge with --all will cause the largefiles to be deleted.
# Override repo.status to prevent this from happening.
-@eh.wrapcommand(b'purge', extension=b'purge')
+@eh.wrapcommand(b'purge')
def overridepurge(orig, ui, repo, *dirs, **opts):
# XXX Monkey patching a repoview will not work. The assigned attribute will
# be set on the unfiltered repo, but we will only lookup attributes in the
--- a/hgext/narrow/narrowcommands.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/narrow/narrowcommands.py Thu Feb 11 20:36:46 2021 -0800
@@ -214,6 +214,7 @@
newincludes,
newexcludes,
force,
+ backup,
):
oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes)
newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
@@ -272,7 +273,7 @@
hg.clean(repo, urev)
overrides = {(b'devel', b'strip-obsmarkers'): False}
with ui.configoverride(overrides, b'narrow'):
- repair.strip(ui, unfi, tostrip, topic=b'narrow')
+ repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup)
todelete = []
for f, f2, size in repo.store.datafiles():
@@ -442,6 +443,12 @@
),
(
b'',
+ b'backup',
+ True,
+ _(b'back up local changes when narrowing'),
+ ),
+ (
+ b'',
b'update-working-copy',
False,
_(b'update working copy when the store has changed'),
@@ -639,6 +646,7 @@
newincludes,
newexcludes,
opts[b'force_delete_local_changes'],
+ opts[b'backup'],
)
# _narrow() updated the narrowspec and _widen() below needs to
# use the updated values as its base (otherwise removed includes
--- a/hgext/purge.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/purge.py Thu Feb 11 20:36:46 2021 -0800
@@ -22,115 +22,11 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
-'''command to delete untracked files from the working directory'''
-from __future__ import absolute_import
-
-from mercurial.i18n import _
-from mercurial import (
- cmdutil,
- merge as mergemod,
- pycompat,
- registrar,
- scmutil,
-)
-
-cmdtable = {}
-command = registrar.command(cmdtable)
-# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
-# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
-# be specifying the version(s) of Mercurial they are tested with, or
-# leave the attribute unspecified.
-testedwith = b'ships-with-hg-core'
-
-
-@command(
- b'purge|clean',
- [
- (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
- (b'', b'all', None, _(b'purge ignored files too')),
- (b'i', b'ignored', None, _(b'purge only ignored files')),
- (b'', b'dirs', None, _(b'purge empty directories')),
- (b'', b'files', None, _(b'purge files')),
- (b'p', b'print', None, _(b'print filenames instead of deleting them')),
- (
- b'0',
- b'print0',
- None,
- _(
- b'end filenames with NUL, for use with xargs'
- b' (implies -p/--print)'
- ),
- ),
- ]
- + cmdutil.walkopts,
- _(b'hg purge [OPTION]... [DIR]...'),
- helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-)
-def purge(ui, repo, *dirs, **opts):
- """removes files not tracked by Mercurial
-
- Delete files not known to Mercurial. This is useful to test local
- and uncommitted changes in an otherwise-clean source tree.
-
- This means that purge will delete the following by default:
-
- - Unknown files: files marked with "?" by :hg:`status`
- - Empty directories: in fact Mercurial ignores directories unless
- they contain files under source control management
+'''command to delete untracked files from the working directory (DEPRECATED)
- But it will leave untouched:
-
- - Modified and unmodified tracked files
- - Ignored files (unless -i or --all is specified)
- - New files added to the repository (with :hg:`add`)
-
- The --files and --dirs options can be used to direct purge to delete
- only files, only directories, or both. If neither option is given,
- both will be deleted.
-
- If directories are given on the command line, only files in these
- directories are considered.
-
- Be careful with purge, as you could irreversibly delete some files
- you forgot to add to the repository. If you only want to print the
- list of files that this program would delete, use the --print
- option.
- """
- opts = pycompat.byteskwargs(opts)
- cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
+The functionality of this extension has been included in core Mercurial since
+version 5.7. Please use :hg:`purge ...` instead. :hg:`purge --confirm` is now the default, unless the extension is enabled for backward compatibility.
+'''
- act = not opts.get(b'print')
- eol = b'\n'
- if opts.get(b'print0'):
- eol = b'\0'
- act = False # --print0 implies --print
- if opts.get(b'all', False):
- ignored = True
- unknown = True
- else:
- ignored = opts.get(b'ignored', False)
- unknown = not ignored
-
- removefiles = opts.get(b'files')
- removedirs = opts.get(b'dirs')
-
- if not removefiles and not removedirs:
- removefiles = True
- removedirs = True
-
- match = scmutil.match(repo[None], dirs, opts)
-
- paths = mergemod.purge(
- repo,
- match,
- unknown=unknown,
- ignored=ignored,
- removeemptydirs=removedirs,
- removefiles=removefiles,
- abortonerror=opts.get(b'abort_on_err'),
- noop=not act,
- )
-
- for path in paths:
- if not act:
- ui.write(b'%s%s' % (path, eol))
+# This empty extension looks pointless, but core mercurial checks if it's loaded
+# to implement the slightly different behavior documented above.
--- a/hgext/rebase.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/rebase.py Thu Feb 11 20:36:46 2021 -0800
@@ -67,6 +67,14 @@
cmdtable = {}
command = registrar.command(cmdtable)
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+configitem(
+ b'devel',
+ b'rebase.force-in-memory-merge',
+ default=False,
+)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
@@ -1112,6 +1120,8 @@
with ui.configoverride(overrides, b'rebase'):
return _dorebase(ui, repo, action, opts, inmemory=inmemory)
except error.InMemoryMergeConflictsError:
+ if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
+ raise
ui.warn(
_(
b'hit merge conflicts; re-running rebase without in-memory'
--- a/hgext/uncommit.py Wed Feb 10 23:03:54 2021 +0100
+++ b/hgext/uncommit.py Thu Feb 11 20:36:46 2021 -0800
@@ -175,7 +175,7 @@
old = repo[b'.']
rewriteutil.precheck(repo, [old.rev()], b'uncommit')
if len(old.parents()) > 1:
- raise error.Abort(_(b"cannot uncommit merge changeset"))
+ raise error.InputError(_(b"cannot uncommit merge changeset"))
match = scmutil.match(old, pats, opts)
@@ -202,7 +202,7 @@
else:
hint = _(b"file does not exist")
- raise error.Abort(
+ raise error.InputError(
_(b'cannot uncommit "%s"') % scmutil.getuipathfn(repo)(f),
hint=hint,
)
@@ -280,7 +280,7 @@
markers = list(predecessormarkers(curctx))
if len(markers) != 1:
e = _(b"changeset must have one predecessor, found %i predecessors")
- raise error.Abort(e % len(markers))
+ raise error.InputError(e % len(markers))
prednode = markers[0].prednode()
predctx = unfi[prednode]
--- a/mercurial/branchmap.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/branchmap.py Thu Feb 11 20:36:46 2021 -0800
@@ -566,6 +566,7 @@
# [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
_rbcrecfmt = b'>4sI'
_rbcrecsize = calcsize(_rbcrecfmt)
+_rbcmininc = 64 * _rbcrecsize
_rbcnodelen = 4
_rbcbranchidxmask = 0x7FFFFFFF
_rbccloseflag = 0x80000000
@@ -705,8 +706,10 @@
self._setcachedata(rev, reponode, branchidx)
return b, close
- def setdata(self, branch, rev, node, close):
+ def setdata(self, rev, changelogrevision):
"""add new data information to the cache"""
+ branch, close = changelogrevision.branchinfo
+
if branch in self._namesreverse:
branchidx = self._namesreverse[branch]
else:
@@ -715,7 +718,7 @@
self._namesreverse[branch] = branchidx
if close:
branchidx |= _rbccloseflag
- self._setcachedata(rev, node, branchidx)
+ self._setcachedata(rev, self._repo.changelog.node(rev), branchidx)
# If no cache data were readable (non exists, bad permission, etc)
# the cache was bypassing itself by setting:
#
@@ -730,11 +733,15 @@
if rev == nullrev:
return
rbcrevidx = rev * _rbcrecsize
- if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
- self._rbcrevs.extend(
- b'\0'
- * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs))
- )
+ requiredsize = rbcrevidx + _rbcrecsize
+ rbccur = len(self._rbcrevs)
+ if rbccur < requiredsize:
+ # bytearray doesn't allocate extra space at least in Python 3.7.
+ # When multiple changesets are added in a row, precise resize would
+ # result in quadratic complexity. Overallocate to compensate by
+ # use the classic doubling technique for dynamic arrays instead.
+ # If there was a gap in the map before, less space will be reserved.
+ self._rbcrevs.extend(b'\0' * max(_rbcmininc, requiredsize))
pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
self._rbcrevslen = min(self._rbcrevslen, rev)
--- a/mercurial/bundle2.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/bundle2.py Thu Feb 11 20:36:46 2021 -0800
@@ -2478,35 +2478,10 @@
@parthandler(b'cache:rev-branch-cache')
def handlerbc(op, inpart):
- """receive a rev-branch-cache payload and update the local cache
-
- The payload is a series of data related to each branch
-
- 1) branch name length
- 2) number of open heads
- 3) number of closed heads
- 4) open heads nodes
- 5) closed heads nodes
- """
- total = 0
- rawheader = inpart.read(rbcstruct.size)
- cache = op.repo.revbranchcache()
- cl = op.repo.unfiltered().changelog
- while rawheader:
- header = rbcstruct.unpack(rawheader)
- total += header[1] + header[2]
- utf8branch = inpart.read(header[0])
- branch = encoding.tolocal(utf8branch)
- for x in pycompat.xrange(header[1]):
- node = inpart.read(20)
- rev = cl.rev(node)
- cache.setdata(branch, rev, node, False)
- for x in pycompat.xrange(header[2]):
- node = inpart.read(20)
- rev = cl.rev(node)
- cache.setdata(branch, rev, node, True)
- rawheader = inpart.read(rbcstruct.size)
- cache.write()
+ """Legacy part, ignored for compatibility with bundles from or
+ for Mercurial before 5.7. Newer Mercurial computes the cache
+ efficiently enough during unbundling that the additional transfer
+ is unnecessary."""
@parthandler(b'pushvars')
@@ -2561,8 +2536,6 @@
for r in repo.revs(b"::%ln", common):
commonnodes.add(cl.node(r))
if commonnodes:
- # XXX: we should only send the filelogs (and treemanifest). user
- # already has the changelog and manifest
packer = changegroup.getbundler(
cgversion,
repo,
--- a/mercurial/cext/osutil.c Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/cext/osutil.c Thu Feb 11 20:36:46 2021 -0800
@@ -119,7 +119,7 @@
static void listdir_stat_dealloc(PyObject *o)
{
- o->ob_type->tp_free(o);
+ Py_TYPE(o)->tp_free(o);
}
static PyObject *listdir_stat_getitem(PyObject *self, PyObject *key)
--- a/mercurial/cext/pathencode.c Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/cext/pathencode.c Thu Feb 11 20:36:46 2021 -0800
@@ -21,6 +21,7 @@
#include <ctype.h>
#include <stdlib.h>
#include <string.h>
+#include "pythoncapi_compat.h"
#include "util.h"
@@ -678,7 +679,7 @@
}
assert(PyBytes_Check(ret));
- Py_SIZE(ret) = destlen;
+ Py_SET_SIZE(ret, destlen);
return ret;
}
--- a/mercurial/changegroup.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/changegroup.py Thu Feb 11 20:36:46 2021 -0800
@@ -323,7 +323,10 @@
cgnodes.append(node)
def onchangelog(cl, node):
- efilesset.update(cl.readfiles(node))
+ rev = cl.rev(node)
+ ctx = cl.changelogrevision(rev)
+ efilesset.update(ctx.files)
+ repo.register_changeset(rev, ctx)
self.changelogheader()
deltas = self.deltaiter()
@@ -331,6 +334,7 @@
deltas,
csmap,
trp,
+ alwayscache=True,
addrevisioncb=onchangelog,
duplicaterevisioncb=ondupchangelog,
):
--- a/mercurial/changelog.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/changelog.py Thu Feb 11 20:36:46 2021 -0800
@@ -200,6 +200,7 @@
p1copies = attr.ib(default=None)
p2copies = attr.ib(default=None)
description = attr.ib(default=b'')
+ branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
class changelogrevision(object):
@@ -372,6 +373,11 @@
def description(self):
return encoding.tolocal(self._text[self._offsets[3] + 2 :])
+ @property
+ def branchinfo(self):
+ extra = self.extra
+ return encoding.tolocal(extra.get(b"branch")), b'close' in extra
+
class changelog(revlog.revlog):
def __init__(self, opener, trypending=False):
@@ -601,8 +607,7 @@
This function exists because creating a changectx object
just to access this is costly."""
- extra = self.changelogrevision(rev).extra
- return encoding.tolocal(extra.get(b"branch")), b'close' in extra
+ return self.changelogrevision(rev).branchinfo
def _nodeduplicatecallback(self, transaction, node):
# keep track of revisions that got "re-added", eg: unbunde of know rev.
--- a/mercurial/commands.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/commands.py Thu Feb 11 20:36:46 2021 -0800
@@ -29,6 +29,7 @@
bundlecaches,
changegroup,
cmdutil,
+ context as contextmod,
copies,
debugcommands as debugcommandsmod,
destutil,
@@ -2464,6 +2465,16 @@
(b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
(b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
(b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
+ (
+ b'',
+ b'merge',
+ False,
+ _(
+ b'show difference between auto-merge and committed '
+ b'merge for merge commits (EXPERIMENTAL)'
+ ),
+ _(b'REV'),
+ ),
]
+ diffopts
+ diffopts2
@@ -2544,13 +2555,33 @@
to_rev = opts.get(b'to')
stat = opts.get(b'stat')
reverse = opts.get(b'reverse')
+ diffmerge = opts.get(b'merge')
cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
if change:
repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
ctx2 = scmutil.revsingle(repo, change, None)
- ctx1 = ctx2.p1()
+ if diffmerge and ctx2.p2().node() != nullid:
+ pctx1 = ctx2.p1()
+ pctx2 = ctx2.p2()
+ wctx = contextmod.overlayworkingctx(repo)
+ wctx.setbase(pctx1)
+ with ui.configoverride(
+ {
+ (
+ b'ui',
+ b'forcemerge',
+ ): b'internal:merge3-lie-about-conflicts',
+ },
+ b'diff --merge',
+ ):
+ repo.ui.pushbuffer()
+ mergemod.merge(pctx2, wc=wctx)
+ repo.ui.popbuffer()
+ ctx1 = wctx
+ else:
+ ctx1 = ctx2.p1()
elif from_rev or to_rev:
repo = scmutil.unhidehashlikerevs(
repo, [from_rev] + [to_rev], b'nowarn'
@@ -5447,6 +5478,108 @@
@command(
+ b'purge|clean',
+ [
+ (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
+ (b'', b'all', None, _(b'purge ignored files too')),
+ (b'i', b'ignored', None, _(b'purge only ignored files')),
+ (b'', b'dirs', None, _(b'purge empty directories')),
+ (b'', b'files', None, _(b'purge files')),
+ (b'p', b'print', None, _(b'print filenames instead of deleting them')),
+ (
+ b'0',
+ b'print0',
+ None,
+ _(
+ b'end filenames with NUL, for use with xargs'
+ b' (implies -p/--print)'
+ ),
+ ),
+ (b'', b'confirm', None, _(b'ask before permanently deleting files')),
+ ]
+ + cmdutil.walkopts,
+ _(b'hg purge [OPTION]... [DIR]...'),
+ helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
+def purge(ui, repo, *dirs, **opts):
+ """removes files not tracked by Mercurial
+
+ Delete files not known to Mercurial. This is useful to test local
+ and uncommitted changes in an otherwise-clean source tree.
+
+ This means that purge will delete the following by default:
+
+ - Unknown files: files marked with "?" by :hg:`status`
+ - Empty directories: in fact Mercurial ignores directories unless
+ they contain files under source control management
+
+ But it will leave untouched:
+
+ - Modified and unmodified tracked files
+ - Ignored files (unless -i or --all is specified)
+ - New files added to the repository (with :hg:`add`)
+
+ The --files and --dirs options can be used to direct purge to delete
+ only files, only directories, or both. If neither option is given,
+ both will be deleted.
+
+ If directories are given on the command line, only files in these
+ directories are considered.
+
+ Be careful with purge, as you could irreversibly delete some files
+ you forgot to add to the repository. If you only want to print the
+ list of files that this program would delete, use the --print
+ option.
+ """
+ opts = pycompat.byteskwargs(opts)
+ cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
+
+ act = not opts.get(b'print')
+ eol = b'\n'
+ if opts.get(b'print0'):
+ eol = b'\0'
+ act = False # --print0 implies --print
+ if opts.get(b'all', False):
+ ignored = True
+ unknown = True
+ else:
+ ignored = opts.get(b'ignored', False)
+ unknown = not ignored
+
+ removefiles = opts.get(b'files')
+ removedirs = opts.get(b'dirs')
+ confirm = opts.get(b'confirm')
+ if confirm is None:
+ try:
+ extensions.find(b'purge')
+ confirm = False
+ except KeyError:
+ confirm = True
+
+ if not removefiles and not removedirs:
+ removefiles = True
+ removedirs = True
+
+ match = scmutil.match(repo[None], dirs, opts)
+
+ paths = mergemod.purge(
+ repo,
+ match,
+ unknown=unknown,
+ ignored=ignored,
+ removeemptydirs=removedirs,
+ removefiles=removefiles,
+ abortonerror=opts.get(b'abort_on_err'),
+ noop=not act,
+ confirm=confirm,
+ )
+
+ for path in paths:
+ if not act:
+ ui.write(b'%s%s' % (path, eol))
+
+
+@command(
b'push',
[
(b'f', b'force', None, _(b'force push')),
@@ -6082,7 +6215,7 @@
if hint:
ui.warn(hint)
- unresolvedf = list(ms.unresolved())
+ unresolvedf = ms.unresolvedcount()
if not unresolvedf:
ui.status(_(b'(no more unresolved files)\n'))
cmdutil.checkafterresolved(repo)
--- a/mercurial/commit.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/commit.py Thu Feb 11 20:36:46 2021 -0800
@@ -96,6 +96,10 @@
ctx.date(),
extra,
)
+ rev = repo[n].rev()
+ if oldtip != repo.changelog.tiprev():
+ repo.register_changeset(rev, repo.changelog.changelogrevision(rev))
+
xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
repo.hook(
b'pretxncommit',
@@ -108,7 +112,7 @@
targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
# prevent unmarking changesets as public on recommit
- waspublic = oldtip == repo.changelog.tiprev() and not repo[n].phase()
+ waspublic = oldtip == repo.changelog.tiprev() and not repo[rev].phase()
if targetphase and not waspublic:
# retract boundary do not alter parent changeset.
@@ -116,7 +120,7 @@
# be compliant anyway
#
# if minimal phase was 0 we don't need to retract anything
- phases.registernew(repo, tr, targetphase, [repo[n].rev()])
+ phases.registernew(repo, tr, targetphase, [rev])
return n
--- a/mercurial/configitems.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/configitems.py Thu Feb 11 20:36:46 2021 -0800
@@ -570,6 +570,11 @@
default=0,
)
coreconfigitem(
+ b'convert',
+ b'svn.dangerous-set-commit-dates',
+ default=False,
+)
+coreconfigitem(
b'debug',
b'dirstate.delaywrite',
default=0,
@@ -610,6 +615,12 @@
b'check-relroot',
default=False,
)
+# Track copy information for all file, not just "added" one (very slow)
+coreconfigitem(
+ b'devel',
+ b'copy-tracing.trace-all-files',
+ default=False,
+)
coreconfigitem(
b'devel',
b'default-date',
@@ -729,6 +740,18 @@
b'discovery.randomize',
default=True,
)
+# Control the initial size of the discovery sample
+coreconfigitem(
+ b'devel',
+ b'discovery.sample-size',
+ default=200,
+)
+# Control the initial size of the discovery for initial change
+coreconfigitem(
+ b'devel',
+ b'discovery.sample-size.initial',
+ default=100,
+)
_registerdiffopts(section=b'diff')
coreconfigitem(
b'email',
--- a/mercurial/context.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/context.py Thu Feb 11 20:36:46 2021 -0800
@@ -2597,6 +2597,7 @@
b'flags': flags,
b'copied': copied,
}
+ util.clearcachedproperty(self, b'_manifest')
def filectx(self, path, filelog=None):
return overlayworkingfilectx(
--- a/mercurial/copies.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/copies.py Thu Feb 11 20:36:46 2021 -0800
@@ -59,14 +59,13 @@
# Cases 1, 3, and 5 are then removed by _filter().
for k, v in list(t.items()):
- # remove copies from files that didn't exist
- if v not in src:
+ if k == v: # case 3
del t[k]
- # remove criss-crossed copies
- elif k in src and v in dst:
+ elif v not in src: # case 5
+ # remove copies from files that didn't exist
del t[k]
- # remove copies to files that were then removed
- elif k not in dst:
+ elif k not in dst: # case 1
+ # remove copies to files that were then removed
del t[k]
@@ -153,13 +152,21 @@
if b.p1() == a and b.p2().node() == nullid:
filesmatcher = matchmod.exact(b.files())
forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
- missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'):
+ missing = list(b.walk(match))
+ # _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if debug:
+ dbg(b'debug.copies: searching all files: %d\n' % len(missing))
+ else:
+ missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
+ if debug:
+ dbg(
+ b'debug.copies: missing files to search: %d\n'
+ % len(missing)
+ )
ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
- if debug:
- dbg(b'debug.copies: missing files to search: %d\n' % len(missing))
-
for f in sorted(missing):
if debug:
dbg(b'debug.copies: tracing file: %s\n' % f)
@@ -1220,6 +1227,15 @@
by merge.update().
"""
new_copies = pathcopies(base, ctx)
- _filter(wctx.p1(), wctx, new_copies)
+ parent = wctx.p1()
+ _filter(parent, wctx, new_copies)
+ # Extra filtering to drop copy information for files that existed before
+ # the graft. This is to handle the case of grafting a rename onto a commit
+ # that already has the rename. Otherwise the presence of copy information
+ # would result in the creation of an empty commit where we would prefer to
+ # not create one.
+ for dest, __ in list(new_copies.items()):
+ if dest in parent:
+ del new_copies[dest]
for dst, src in pycompat.iteritems(new_copies):
wctx[dst].markcopied(src)
--- a/mercurial/debugcommands.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/debugcommands.py Thu Feb 11 20:36:46 2021 -0800
@@ -69,6 +69,7 @@
pycompat,
registrar,
repair,
+ repoview,
revlog,
revset,
revsetlang,
@@ -483,14 +484,31 @@
ui.write(b' %s\n' % v)
-@command(b'debugchangedfiles', [], b'REV')
-def debugchangedfiles(ui, repo, rev):
+@command(
+ b'debugchangedfiles',
+ [
+ (
+ b'',
+ b'compute',
+ False,
+ b"compute information instead of reading it from storage",
+ ),
+ ],
+ b'REV',
+)
+def debugchangedfiles(ui, repo, rev, **opts):
"""list the stored files changes for a revision"""
ctx = scmutil.revsingle(repo, rev, None)
- sd = repo.changelog.sidedata(ctx.rev())
- files_block = sd.get(sidedata.SD_FILES)
- if files_block is not None:
- files = metadata.decode_files_sidedata(sd)
+ files = None
+
+ if opts['compute']:
+ files = metadata.compute_all_files_changes(ctx)
+ else:
+ sd = repo.changelog.sidedata(ctx.rev())
+ files_block = sd.get(sidedata.SD_FILES)
+ if files_block is not None:
+ files = metadata.decode_files_sidedata(sd)
+ if files is not None:
for f in sorted(files.touched):
if f in files.added:
action = b"added"
@@ -964,20 +982,73 @@
),
(b'', b'rev', [], b'restrict discovery to this set of revs'),
(b'', b'seed', b'12323', b'specify the random seed use for discovery'),
+ (
+ b'',
+ b'local-as-revs',
+ "",
+ 'treat local has having these revisions only',
+ ),
+ (
+ b'',
+ b'remote-as-revs',
+ "",
+ 'use local as remote, with only these these revisions',
+ ),
]
+ cmdutil.remoteopts,
_(b'[--rev REV] [OTHER]'),
)
def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
- """runs the changeset discovery protocol in isolation"""
+ """runs the changeset discovery protocol in isolation
+
+ The local peer can be "replaced" by a subset of the local repository by
+ using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
+ be "replaced" by a subset of the local repository using the
+ `--local-as-revs` flag. This is useful to efficiently debug pathological
+ discovery situation.
+ """
opts = pycompat.byteskwargs(opts)
- remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
- remote = hg.peer(repo, opts, remoteurl)
- ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
+ unfi = repo.unfiltered()
+
+ # setup potential extra filtering
+ local_revs = opts[b"local_as_revs"]
+ remote_revs = opts[b"remote_as_revs"]
# make sure tests are repeatable
random.seed(int(opts[b'seed']))
+ if not remote_revs:
+
+ remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
+ remote = hg.peer(repo, opts, remoteurl)
+ ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
+ else:
+ branches = (None, [])
+ remote_filtered_revs = scmutil.revrange(
+ unfi, [b"not (::(%s))" % remote_revs]
+ )
+ remote_filtered_revs = frozenset(remote_filtered_revs)
+
+ def remote_func(x):
+ return remote_filtered_revs
+
+ repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
+
+ remote = repo.peer()
+ remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
+
+ if local_revs:
+ local_filtered_revs = scmutil.revrange(
+ unfi, [b"not (::(%s))" % local_revs]
+ )
+ local_filtered_revs = frozenset(local_filtered_revs)
+
+ def local_func(x):
+ return local_filtered_revs
+
+ repoview.filtertable[b'debug-discovery-local-filter'] = local_func
+ repo = repo.filtered(b'debug-discovery-local-filter')
+
data = {}
if opts.get(b'old'):
@@ -2212,9 +2283,9 @@
b'',
b'dump-new',
False,
- _(b'write a (new) persistent binary nodemap on stdin'),
+ _(b'write a (new) persistent binary nodemap on stdout'),
),
- (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
+ (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
(
b'',
b'check',
@@ -3717,6 +3788,23 @@
ui.writenoi18n(b' revision %s\n' % v[1])
+@command(b'debugshell', optionalrepo=True)
+def debugshell(ui, repo):
+ """run an interactive Python interpreter
+
+ The local namespace is provided with a reference to the ui and
+ the repo instance (if available).
+ """
+ import code
+
+ imported_objects = {
+ 'ui': ui,
+ 'repo': repo,
+ }
+
+ code.interact(local=imported_objects)
+
+
@command(
b'debugsuccessorssets',
[(b'', b'closest', False, _(b'return closest successors sets only'))],
--- a/mercurial/exchange.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/exchange.py Thu Feb 11 20:36:46 2021 -0800
@@ -1135,9 +1135,9 @@
except error.BundleValueError as exc:
raise error.Abort(_(b'missing support for %s') % exc)
except bundle2.AbortFromPart as exc:
- pushop.ui.status(_(b'remote: %s\n') % exc)
+ pushop.ui.error(_(b'remote: %s\n') % exc)
if exc.hint is not None:
- pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
+ pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
raise error.Abort(_(b'push failed on remote'))
except error.PushkeyFailed as exc:
partid = int(exc.partid)
@@ -1832,7 +1832,7 @@
op.modes[b'bookmarks'] = b'records'
bundle2.processbundle(pullop.repo, bundle, op=op)
except bundle2.AbortFromPart as exc:
- pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
+ pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc)
raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
except error.BundleValueError as exc:
raise error.Abort(_(b'missing support for %s') % exc)
--- a/mercurial/exchangev2.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/exchangev2.py Thu Feb 11 20:36:46 2021 -0800
@@ -364,12 +364,15 @@
def onchangeset(cl, node):
progress.increment()
- revision = cl.changelogrevision(node)
+ rev = cl.rev(node)
+ revision = cl.changelogrevision(rev)
added.append(node)
# We need to preserve the mapping of changelog revision to node
# so we can set the linkrev accordingly when manifests are added.
- manifestnodes[cl.rev(node)] = revision.manifest
+ manifestnodes[rev] = revision.manifest
+
+ repo.register_changeset(rev, revision)
nodesbyphase = {phase: set() for phase in phases.phasenames.values()}
remotebookmarks = {}
@@ -420,6 +423,7 @@
iterrevisions(),
linkrev,
weakref.proxy(tr),
+ alwayscache=True,
addrevisioncb=onchangeset,
duplicaterevisioncb=ondupchangeset,
)
--- a/mercurial/filemerge.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/filemerge.py Thu Feb 11 20:36:46 2021 -0800
@@ -538,6 +538,25 @@
@internaltool(
+ b'merge3-lie-about-conflicts',
+ fullmerge,
+ b'',
+ precheck=_mergecheck,
+)
+def _imerge3alwaysgood(*args, **kwargs):
+ # Like merge3, but record conflicts as resolved with markers in place.
+ #
+ # This is used for `hg diff --merge` to show the differences between
+ # the auto-merge state and the committed merge state. It may be
+ # useful for other things.
+ b1, junk, b2 = _imerge3(*args, **kwargs)
+ # TODO is this right? I'm not sure what these return values mean,
+ # but as far as I can tell this will indicate to callers tha the
+ # merge succeeded.
+ return b1, False, b2
+
+
+@internaltool(
b'mergediff',
fullmerge,
_(
@@ -1195,7 +1214,11 @@
def hasconflictmarkers(data):
return bool(
- re.search(b"^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE)
+ re.search(
+ br"^(<<<<<<<.*|=======.*|------- .*|\+\+\+\+\+\+\+ .*|>>>>>>>.*)$",
+ data,
+ re.MULTILINE,
+ )
)
--- a/mercurial/help.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/help.py Thu Feb 11 20:36:46 2021 -0800
@@ -829,10 +829,11 @@
def appendcmds(cmds):
cmds = sorted(cmds)
for c in cmds:
+ display_cmd = c
if ui.verbose:
- rst.append(b" :%s: %s\n" % (b', '.join(syns[c]), h[c]))
- else:
- rst.append(b' :%s: %s\n' % (c, h[c]))
+ display_cmd = b', '.join(syns[c])
+ display_cmd = display_cmd.replace(b':', br'\:')
+ rst.append(b' :%s: %s\n' % (display_cmd, h[c]))
if name in (b'shortlist', b'debug'):
# List without categories.
--- a/mercurial/httppeer.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/httppeer.py Thu Feb 11 20:36:46 2021 -0800
@@ -171,9 +171,9 @@
# Send arguments via HTTP headers.
if headersize > 0:
# The headers can typically carry more data than the URL.
- encargs = urlreq.urlencode(sorted(args.items()))
+ encoded_args = urlreq.urlencode(sorted(args.items()))
for header, value in encodevalueinheaders(
- encargs, b'X-HgArg', headersize
+ encoded_args, b'X-HgArg', headersize
):
headers[header] = value
# Send arguments via query string (Mercurial <1.9).
--- a/mercurial/interfaces/repository.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/interfaces/repository.py Thu Feb 11 20:36:46 2021 -0800
@@ -769,7 +769,13 @@
``nullid``, in which case the header from the delta can be ignored
and the delta used as the fulltext.
+ ``alwayscache`` instructs the lower layers to cache the content of the
+ newly added revision, even if it needs to be explicitly computed.
+ This used to be the default when ``addrevisioncb`` was provided up to
+ Mercurial 5.8.
+
``addrevisioncb`` should be called for each node as it is committed.
+ ``duplicaterevisioncb`` should be called for each pre-existing node.
``maybemissingparents`` is a bool indicating whether the incoming
data may reference parents/ancestor revisions that aren't present.
@@ -1641,6 +1647,14 @@
def revbranchcache():
pass
+ def register_changeset(rev, changelogrevision):
+ """Extension point for caches for new nodes.
+
+ Multiple consumers are expected to need parts of the changelogrevision,
+ so it is provided as optimization to avoid duplicate lookups. A simple
+ cache would be fragile when other revisions are accessed, too."""
+ pass
+
def branchtip(branchtip, ignoremissing=False):
"""Return the tip node for a given branch."""
--- a/mercurial/localrepo.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/localrepo.py Thu Feb 11 20:36:46 2021 -0800
@@ -2059,6 +2059,9 @@
self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
return self._revbranchcache
+ def register_changeset(self, rev, changelogrevision):
+ self.revbranchcache().setdata(rev, changelogrevision)
+
def branchtip(self, branch, ignoremissing=False):
"""return the tip node for a given branch
@@ -3633,11 +3636,11 @@
# effectively locks out old clients and prevents them from
# mucking with a repo in an unknown format.
#
- # The revlog header has version 2, which won't be recognized by
+ # The revlog header has version 65535, which won't be recognized by
# such old clients.
hgvfs.append(
b'00changelog.i',
- b'\0\0\0\2 dummy changelog to prevent using the old repo '
+ b'\0\0\xFF\xFF dummy changelog to prevent using the old repo '
b'layout',
)
--- a/mercurial/manifest.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/manifest.py Thu Feb 11 20:36:46 2021 -0800
@@ -1836,6 +1836,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
):
@@ -1843,6 +1844,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=alwayscache,
addrevisioncb=addrevisioncb,
duplicaterevisioncb=duplicaterevisioncb,
)
--- a/mercurial/merge.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/merge.py Thu Feb 11 20:36:46 2021 -0800
@@ -233,7 +233,7 @@
else:
warn(_(b"%s: untracked file differs\n") % f)
if abortconflicts:
- raise error.Abort(
+ raise error.StateError(
_(
b"untracked files in working directory "
b"differ from files in requested revision"
@@ -341,7 +341,7 @@
for f in pmmf:
fold = util.normcase(f)
if fold in foldmap:
- raise error.Abort(
+ raise error.StateError(
_(b"case-folding collision between %s and %s")
% (f, foldmap[fold])
)
@@ -352,7 +352,7 @@
for fold, f in sorted(foldmap.items()):
if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
# the folded prefix matches but actual casing is different
- raise error.Abort(
+ raise error.StateError(
_(b"case-folding collision between %s and directory of %s")
% (lastfull, f)
)
@@ -504,7 +504,9 @@
if invalidconflicts:
for p in invalidconflicts:
repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
- raise error.Abort(_(b"destination manifest contains path conflicts"))
+ raise error.StateError(
+ _(b"destination manifest contains path conflicts")
+ )
def _filternarrowactions(narrowmatch, branchmerge, mresult):
@@ -1918,10 +1920,10 @@
### check phase
if not overwrite:
if len(pl) > 1:
- raise error.Abort(_(b"outstanding uncommitted merge"))
+ raise error.StateError(_(b"outstanding uncommitted merge"))
ms = wc.mergestate()
- if list(ms.unresolved()):
- raise error.Abort(
+ if ms.unresolvedcount():
+ raise error.StateError(
_(b"outstanding merge conflicts"),
hint=_(b"use 'hg resolve' to resolve"),
)
@@ -2007,7 +2009,7 @@
if mresult.hasconflicts():
msg = _(b"conflicting changes")
hint = _(b"commit or update --clean to discard changes")
- raise error.Abort(msg, hint=hint)
+ raise error.StateError(msg, hint=hint)
# Prompt and create actions. Most of this is in the resolve phase
# already, but we can't handle .hgsubstate in filemerge or
@@ -2324,6 +2326,7 @@
removefiles=True,
abortonerror=False,
noop=False,
+ confirm=False,
):
"""Purge the working directory of untracked files.
@@ -2344,6 +2347,8 @@
``noop`` controls whether to actually remove files. If not defined, actions
will be taken.
+ ``confirm`` ask confirmation before actually removing anything.
+
Returns an iterable of relative paths in the working directory that were
or would be removed.
"""
@@ -2371,6 +2376,35 @@
status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
+ if confirm:
+ nb_ignored = len(status.ignored)
+ nb_unkown = len(status.unknown)
+ if nb_unkown and nb_ignored:
+ msg = _(b"permanently delete %d unkown and %d ignored files?")
+ msg %= (nb_unkown, nb_ignored)
+ elif nb_unkown:
+ msg = _(b"permanently delete %d unkown files?")
+ msg %= nb_unkown
+ elif nb_ignored:
+ msg = _(b"permanently delete %d ignored files?")
+ msg %= nb_ignored
+ elif removeemptydirs:
+ dir_count = 0
+ for f in directories:
+ if matcher(f) and not repo.wvfs.listdir(f):
+ dir_count += 1
+ if dir_count:
+ msg = _(
+ b"permanently delete at least %d empty directories?"
+ )
+ msg %= dir_count
+ else:
+ # XXX we might be missing directory there
+ return res
+ msg += b" (yN)$$ &Yes $$ &No"
+ if repo.ui.promptchoice(msg, default=1) == 1:
+ raise error.CanceledError(_(b'removal cancelled'))
+
if removefiles:
for f in sorted(status.unknown + status.ignored):
if not noop:
--- a/mercurial/mergeutil.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/mergeutil.py Thu Feb 11 20:36:46 2021 -0800
@@ -13,7 +13,7 @@
def checkunresolved(ms):
- if list(ms.unresolved()):
+ if ms.unresolvedcount():
raise error.StateError(
_(b"unresolved merge conflicts (see 'hg help resolve')")
)
--- a/mercurial/minirst.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/minirst.py Thu Feb 11 20:36:46 2021 -0800
@@ -158,7 +158,7 @@
_optionre = re.compile(
br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' br'((.*) +)(.*)$'
)
-_fieldre = re.compile(br':(?![: ])([^:]*)(?<! ):[ ]+(.*)')
+_fieldre = re.compile(br':(?![: ])((?:\:|[^:])*)(?<! ):[ ]+(.*)')
_definitionre = re.compile(br'[^ ]')
_tablere = re.compile(br'(=+\s+)*=+')
@@ -229,7 +229,7 @@
m = _fieldre.match(blocks[j][b'lines'][0])
key, rest = m.groups()
blocks[j][b'lines'][0] = rest
- blocks[j][b'key'] = key
+ blocks[j][b'key'] = key.replace(br'\:', b':')
j += 1
i = j + 1
--- a/mercurial/pure/parsers.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/pure/parsers.py Thu Feb 11 20:36:46 2021 -0800
@@ -33,13 +33,6 @@
return x
-indexformatng = b">Qiiiiii20s12x"
-indexfirst = struct.calcsize(b'Q')
-sizeint = struct.calcsize(b'i')
-indexsize = struct.calcsize(indexformatng)
-nullitem = (0, 0, 0, -1, -1, -1, -1, nullid)
-
-
def gettype(q):
return int(q & 0xFFFF)
@@ -49,6 +42,12 @@
class BaseIndexObject(object):
+ index_format = b">Qiiiiii20s12x"
+ big_int_size = struct.calcsize(b'Q')
+ int_size = struct.calcsize(b'i')
+ index_size = struct.calcsize(index_format)
+ null_item = (0, 0, 0, -1, -1, -1, -1, nullid)
+
@property
def nodemap(self):
msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
@@ -94,7 +93,7 @@
def append(self, tup):
if '_nodemap' in vars(self):
self._nodemap[tup[7]] = len(self)
- data = _pack(indexformatng, *tup)
+ data = _pack(self.index_format, *tup)
self._extra.append(data)
def _check_index(self, i):
@@ -105,14 +104,14 @@
def __getitem__(self, i):
if i == -1:
- return nullitem
+ return self.null_item
self._check_index(i)
if i >= self._lgt:
data = self._extra[i - self._lgt]
else:
index = self._calculate_index(i)
- data = self._data[index : index + indexsize]
- r = _unpack(indexformatng, data)
+ data = self._data[index : index + self.index_size]
+ r = _unpack(self.index_format, data)
if self._lgt and i == 0:
r = (offset_type(0, gettype(r[0])),) + r[1:]
return r
@@ -120,13 +119,13 @@
class IndexObject(BaseIndexObject):
def __init__(self, data):
- assert len(data) % indexsize == 0
+ assert len(data) % self.index_size == 0
self._data = data
- self._lgt = len(data) // indexsize
+ self._lgt = len(data) // self.index_size
self._extra = []
def _calculate_index(self, i):
- return i * indexsize
+ return i * self.index_size
def __delitem__(self, i):
if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
@@ -135,7 +134,7 @@
self._check_index(i)
self._stripnodes(i)
if i < self._lgt:
- self._data = self._data[: i * indexsize]
+ self._data = self._data[: i * self.index_size]
self._lgt = i
self._extra = []
else:
@@ -198,14 +197,16 @@
if lgt is not None:
self._offsets = [0] * lgt
count = 0
- while off <= len(self._data) - indexsize:
+ while off <= len(self._data) - self.index_size:
+ start = off + self.big_int_size
(s,) = struct.unpack(
- b'>i', self._data[off + indexfirst : off + sizeint + indexfirst]
+ b'>i',
+ self._data[start : start + self.int_size],
)
if lgt is not None:
self._offsets[count] = off
count += 1
- off += indexsize + s
+ off += self.index_size + s
if off != len(self._data):
raise ValueError(b"corrupted data")
return count
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/pythoncapi_compat.h Thu Feb 11 20:36:46 2021 -0800
@@ -0,0 +1,283 @@
+// Header file providing new functions of the Python C API to old Python
+// versions.
+//
+// File distributed under the MIT license.
+//
+// Homepage:
+// https://github.com/pythoncapi/pythoncapi_compat
+//
+// Latest version:
+// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h
+
+#ifndef PYTHONCAPI_COMPAT
+#define PYTHONCAPI_COMPAT
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <Python.h>
+#include "frameobject.h" // PyFrameObject, PyFrame_GetBack()
+
+
+/* VC 2008 doesn't know about the inline keyword. */
+#if defined(_MSC_VER) && _MSC_VER < 1900
+#define inline __forceinline
+#endif
+
+// Cast argument to PyObject* type.
+#ifndef _PyObject_CAST
+# define _PyObject_CAST(op) ((PyObject*)(op))
+#endif
+
+
+// bpo-42262 added Py_NewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef)
+static inline PyObject* _Py_NewRef(PyObject *obj)
+{
+ Py_INCREF(obj);
+ return obj;
+}
+#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-42262 added Py_XNewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef)
+static inline PyObject* _Py_XNewRef(PyObject *obj)
+{
+ Py_XINCREF(obj);
+ return obj;
+}
+#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT)
+static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt)
+{
+ ob->ob_refcnt = refcnt;
+}
+#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt)
+#endif
+
+
+// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE)
+static inline void
+_Py_SET_TYPE(PyObject *ob, PyTypeObject *type)
+{
+ ob->ob_type = type;
+}
+#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type)
+#endif
+
+
+// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE)
+static inline void
+_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size)
+{
+ ob->ob_size = size;
+}
+#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size)
+#endif
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyCodeObject*
+PyFrame_GetCode(PyFrameObject *frame)
+{
+ PyCodeObject *code;
+ assert(frame != NULL);
+ code = frame->f_code;
+ assert(code != NULL);
+ Py_INCREF(code);
+ return code;
+}
+#endif
+
+static inline PyCodeObject*
+_PyFrame_GetCodeBorrow(PyFrameObject *frame)
+{
+ PyCodeObject *code = PyFrame_GetCode(frame);
+ Py_DECREF(code);
+ return code; // borrowed reference
+}
+
+
+// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyFrameObject*
+PyFrame_GetBack(PyFrameObject *frame)
+{
+ PyFrameObject *back;
+ assert(frame != NULL);
+ back = frame->f_back;
+ Py_XINCREF(back);
+ return back;
+}
+#endif
+
+static inline PyFrameObject*
+_PyFrame_GetBackBorrow(PyFrameObject *frame)
+{
+ PyFrameObject *back = PyFrame_GetBack(frame);
+ Py_XDECREF(back);
+ return back; // borrowed reference
+}
+
+
+// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline PyInterpreterState *
+PyThreadState_GetInterpreter(PyThreadState *tstate)
+{
+ assert(tstate != NULL);
+ return tstate->interp;
+}
+#endif
+
+
+// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1
+#if PY_VERSION_HEX < 0x030900B1
+static inline PyFrameObject*
+PyThreadState_GetFrame(PyThreadState *tstate)
+{
+ PyFrameObject *frame;
+ assert(tstate != NULL);
+ frame = tstate->frame;
+ Py_XINCREF(frame);
+ return frame;
+}
+#endif
+
+static inline PyFrameObject*
+_PyThreadState_GetFrameBorrow(PyThreadState *tstate)
+{
+ PyFrameObject *frame = PyThreadState_GetFrame(tstate);
+ Py_XDECREF(frame);
+ return frame; // borrowed reference
+}
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline PyInterpreterState *
+PyInterpreterState_Get(void)
+{
+ PyThreadState *tstate;
+ PyInterpreterState *interp;
+
+ tstate = PyThreadState_GET();
+ if (tstate == NULL) {
+ Py_FatalError("GIL released (tstate is NULL)");
+ }
+ interp = tstate->interp;
+ if (interp == NULL) {
+ Py_FatalError("no current interpreter");
+ }
+ return interp;
+}
+#endif
+
+
+// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6
+#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6
+static inline uint64_t
+PyThreadState_GetID(PyThreadState *tstate)
+{
+ assert(tstate != NULL);
+ return tstate->id;
+}
+#endif
+
+
+// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1
+#if PY_VERSION_HEX < 0x030900A1
+static inline PyObject*
+PyObject_CallNoArgs(PyObject *func)
+{
+ return PyObject_CallFunctionObjArgs(func, NULL);
+}
+#endif
+
+
+// bpo-39245 made PyObject_CallOneArg() public (previously called
+// _PyObject_CallOneArg) in Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4
+static inline PyObject*
+PyObject_CallOneArg(PyObject *func, PyObject *arg)
+{
+ return PyObject_CallFunctionObjArgs(func, arg, NULL);
+}
+#endif
+
+
+// bpo-40024 added PyModule_AddType() to Python 3.9.0a5
+#if PY_VERSION_HEX < 0x030900A5
+static inline int
+PyModule_AddType(PyObject *module, PyTypeObject *type)
+{
+ const char *name, *dot;
+
+ if (PyType_Ready(type) < 0) {
+ return -1;
+ }
+
+ // inline _PyType_Name()
+ name = type->tp_name;
+ assert(name != NULL);
+ dot = strrchr(name, '.');
+ if (dot != NULL) {
+ name = dot + 1;
+ }
+
+ Py_INCREF(type);
+ if (PyModule_AddObject(module, name, (PyObject *)type) < 0) {
+ Py_DECREF(type);
+ return -1;
+ }
+
+ return 0;
+}
+#endif
+
+
+// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6.
+// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2.
+#if PY_VERSION_HEX < 0x030900A6
+static inline int
+PyObject_GC_IsTracked(PyObject* obj)
+{
+ return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj));
+}
+#endif
+
+// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6.
+// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final.
+#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0
+static inline int
+PyObject_GC_IsFinalized(PyObject *obj)
+{
+ return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1));
+}
+#endif
+
+
+// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4
+#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE)
+static inline int
+_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) {
+ return ob->ob_type == type;
+}
+#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type)
+#endif
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // PYTHONCAPI_COMPAT
--- a/mercurial/revlog.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/revlog.py Thu Feb 11 20:36:46 2021 -0800
@@ -448,14 +448,9 @@
self.datafile = datafile or (indexfile[:-2] + b".d")
self.nodemap_file = None
if persistentnodemap:
- if indexfile.endswith(b'.a'):
- pending_path = indexfile[:-4] + b".n.a"
- if opener.exists(pending_path):
- self.nodemap_file = pending_path
- else:
- self.nodemap_file = indexfile[:-4] + b".n"
- else:
- self.nodemap_file = indexfile[:-2] + b".n"
+ self.nodemap_file = nodemaputil.get_nodemap_file(
+ opener, self.indexfile
+ )
self.opener = opener
# When True, indexfile is opened with checkambig=True at writing, to
@@ -2375,6 +2370,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
):
@@ -2475,7 +2471,7 @@
(baserev, delta),
ifh,
dfh,
- alwayscache=bool(addrevisioncb),
+ alwayscache=alwayscache,
deltacomputer=deltacomputer,
)
--- a/mercurial/revlogutils/nodemap.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/revlogutils/nodemap.py Thu Feb 11 20:36:46 2021 -0800
@@ -81,9 +81,9 @@
if tr.hasfinalize(callback_id):
return # no need to register again
tr.addpending(
- callback_id, lambda tr: _persist_nodemap(tr, revlog, pending=True)
+ callback_id, lambda tr: persist_nodemap(tr, revlog, pending=True)
)
- tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog))
+ tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog))
class _NoTransaction(object):
@@ -123,20 +123,25 @@
return # we do not use persistent_nodemap on this revlog
notr = _NoTransaction()
- _persist_nodemap(notr, revlog)
+ persist_nodemap(notr, revlog)
for k in sorted(notr._postclose):
notr._postclose[k](None)
-def _persist_nodemap(tr, revlog, pending=False):
+def persist_nodemap(tr, revlog, pending=False, force=False):
"""Write nodemap data on disk for a given revlog"""
if getattr(revlog, 'filteredrevs', ()):
raise error.ProgrammingError(
"cannot persist nodemap of a filtered changelog"
)
if revlog.nodemap_file is None:
- msg = "calling persist nodemap on a revlog without the feature enableb"
- raise error.ProgrammingError(msg)
+ if force:
+ revlog.nodemap_file = get_nodemap_file(
+ revlog.opener, revlog.indexfile
+ )
+ else:
+ msg = "calling persist nodemap on a revlog without the feature enabled"
+ raise error.ProgrammingError(msg)
can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental")
ondisk_docket = revlog._nodemap_docket
@@ -634,3 +639,14 @@
if isinstance(entry, dict):
return _find_node(entry, node[1:])
return entry
+
+
+def get_nodemap_file(opener, indexfile):
+ if indexfile.endswith(b'.a'):
+ pending_path = indexfile[:-4] + b".n.a"
+ if opener.exists(pending_path):
+ return pending_path
+ else:
+ return indexfile[:-4] + b".n"
+ else:
+ return indexfile[:-2] + b".n"
--- a/mercurial/scmutil.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/scmutil.py Thu Feb 11 20:36:46 2021 -0800
@@ -229,6 +229,8 @@
detailed_exit_code = 20
elif isinstance(inst, error.ConfigError):
detailed_exit_code = 30
+ elif isinstance(inst, error.HookAbort):
+ detailed_exit_code = 40
elif isinstance(inst, error.SecurityError):
detailed_exit_code = 150
elif isinstance(inst, error.CanceledError):
--- a/mercurial/setdiscovery.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/setdiscovery.py Thu Feb 11 20:36:46 2021 -0800
@@ -286,8 +286,6 @@
ui,
local,
remote,
- initialsamplesize=100,
- fullsamplesize=200,
abortwhenunrelated=True,
ancestorsof=None,
audit=None,
@@ -315,7 +313,8 @@
ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads')
-
+ initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial')
+ fullsamplesize = ui.configint(b'devel', b'discovery.sample-size')
# We also ask remote about all the local heads. That set can be arbitrarily
# large, so we used to limit it size to `initialsamplesize`. We no longer
# do as it proved counter productive. The skipped heads could lead to a
--- a/mercurial/shelve.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/shelve.py Thu Feb 11 20:36:46 2021 -0800
@@ -812,7 +812,7 @@
with repo.lock():
checkparents(repo, state)
ms = mergestatemod.mergestate.read(repo)
- if list(ms.unresolved()):
+ if ms.unresolvedcount():
raise error.Abort(
_(b"unresolved conflicts, can't continue"),
hint=_(b"see 'hg resolve', then 'hg unshelve --continue'"),
--- a/mercurial/simplemerge.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/simplemerge.py Thu Feb 11 20:36:46 2021 -0800
@@ -402,31 +402,6 @@
return sl
- def find_unconflicted(self):
- """Return a list of ranges in base that are not conflicted."""
- am = mdiff.get_matching_blocks(self.basetext, self.atext)
- bm = mdiff.get_matching_blocks(self.basetext, self.btext)
-
- unc = []
-
- while am and bm:
- # there is an unconflicted block at i; how long does it
- # extend? until whichever one ends earlier.
- a1 = am[0][0]
- a2 = a1 + am[0][2]
- b1 = bm[0][0]
- b2 = b1 + bm[0][2]
- i = intersect((a1, a2), (b1, b2))
- if i:
- unc.append(i)
-
- if a2 < b2:
- del am[0]
- else:
- del bm[0]
-
- return unc
-
def _verifytext(text, path, ui, opts):
"""verifies that text is non-binary (unless opts[text] is passed,
--- a/mercurial/store.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/store.py Thu Feb 11 20:36:46 2021 -0800
@@ -387,13 +387,13 @@
b'requires',
]
+REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd')
+
def isrevlog(f, kind, st):
if kind != stat.S_IFREG:
return False
- if f[-2:] in (b'.i', b'.d', b'.n'):
- return True
- return f[-3:] == b'.nd'
+ return f.endswith(REVLOG_FILES_EXT)
class basicstore(object):
--- a/mercurial/subrepoutil.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/subrepoutil.py Thu Feb 11 20:36:46 2021 -0800
@@ -105,7 +105,7 @@
return src
state = {}
- for path, src in p[b''].items():
+ for path, src in p.items(b''):
kind = b'hg'
if src.startswith(b'['):
if b']' not in src:
--- a/mercurial/templater.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/templater.py Thu Feb 11 20:36:46 2021 -0800
@@ -891,7 +891,7 @@
fp = _open_mapfile(path)
cache, tmap, aliases = _readmapfile(fp, path)
- for key, val in conf[b'templates'].items():
+ for key, val in conf.items(b'templates'):
if not val:
raise error.ParseError(
_(b'missing value'), conf.source(b'templates', key)
@@ -904,7 +904,7 @@
cache[key] = unquotestring(val)
elif key != b'__base__':
tmap[key] = os.path.join(base, val)
- aliases.extend(conf[b'templatealias'].items())
+ aliases.extend(conf.items(b'templatealias'))
return cache, tmap, aliases
--- a/mercurial/unionrepo.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/unionrepo.py Thu Feb 11 20:36:46 2021 -0800
@@ -128,6 +128,7 @@
deltas,
linkmapper,
transaction,
+ alwayscache=False,
addrevisioncb=None,
duplicaterevisioncb=None,
maybemissingparents=False,
--- a/mercurial/upgrade.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/upgrade.py Thu Feb 11 20:36:46 2021 -0800
@@ -118,6 +118,7 @@
up_actions,
removed_actions,
revlogs,
+ backup,
)
if not run:
@@ -215,12 +216,6 @@
backuppath = upgrade_engine.upgrade(
ui, repo, dstrepo, upgrade_op
)
- if not backup:
- ui.status(
- _(b'removing old repository content %s\n') % backuppath
- )
- repo.vfs.rmtree(backuppath, forcibly=True)
- backuppath = None
finally:
ui.status(_(b'removing temporary repository %s\n') % tmppath)
--- a/mercurial/upgrade_utils/actions.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/upgrade_utils/actions.py Thu Feb 11 20:36:46 2021 -0800
@@ -66,6 +66,18 @@
postdowngrademessage
Message intended for humans which will be shown post an upgrade
operation in which this improvement was removed
+
+ touches_filelogs (bool)
+ Whether this improvement touches filelogs
+
+ touches_manifests (bool)
+ Whether this improvement touches manifests
+
+ touches_changelog (bool)
+ Whether this improvement touches changelog
+
+ touches_requirements (bool)
+ Whether this improvement changes repository requirements
"""
def __init__(self, name, type, description, upgrademessage):
@@ -75,6 +87,12 @@
self.upgrademessage = upgrademessage
self.postupgrademessage = None
self.postdowngrademessage = None
+ # By default for now, we assume every improvement touches
+ # all the things
+ self.touches_filelogs = True
+ self.touches_manifests = True
+ self.touches_changelog = True
+ self.touches_requirements = True
def __eq__(self, other):
if not isinstance(other, improvement):
@@ -128,6 +146,12 @@
# operation in which this improvement was removed
postdowngrademessage = None
+ # By default for now, we assume every improvement touches all the things
+ touches_filelogs = True
+ touches_manifests = True
+ touches_changelog = True
+ touches_requirements = True
+
def __init__(self):
raise NotImplementedError()
@@ -267,6 +291,12 @@
b' New shares will be created in safe mode.'
)
+ # upgrade only needs to change the requirements
+ touches_filelogs = False
+ touches_manifests = False
+ touches_changelog = False
+ touches_requirements = True
+
@registerformatvariant
class sparserevlog(requirementformatvariant):
@@ -626,6 +656,7 @@
upgrade_actions,
removed_actions,
revlogs_to_process,
+ backup_store,
):
self.ui = ui
self.new_requirements = new_requirements
@@ -670,6 +701,75 @@
b're-delta-multibase' in self._upgrade_actions_names
)
+ # should this operation create a backup of the store
+ self.backup_store = backup_store
+
+ # whether the operation touches different revlogs at all or not
+ self.touches_filelogs = self._touches_filelogs()
+ self.touches_manifests = self._touches_manifests()
+ self.touches_changelog = self._touches_changelog()
+ # whether the operation touches requirements file or not
+ self.touches_requirements = self._touches_requirements()
+ self.touches_store = (
+ self.touches_filelogs
+ or self.touches_manifests
+ or self.touches_changelog
+ )
+ # does the operation only touches repository requirement
+ self.requirements_only = (
+ self.touches_requirements and not self.touches_store
+ )
+
+ def _touches_filelogs(self):
+ for a in self.upgrade_actions:
+ # in optimisations, we re-process the revlogs again
+ if a.type == OPTIMISATION:
+ return True
+ elif a.touches_filelogs:
+ return True
+ for a in self.removed_actions:
+ if a.touches_filelogs:
+ return True
+ return False
+
+ def _touches_manifests(self):
+ for a in self.upgrade_actions:
+ # in optimisations, we re-process the revlogs again
+ if a.type == OPTIMISATION:
+ return True
+ elif a.touches_manifests:
+ return True
+ for a in self.removed_actions:
+ if a.touches_manifests:
+ return True
+ return False
+
+ def _touches_changelog(self):
+ for a in self.upgrade_actions:
+ # in optimisations, we re-process the revlogs again
+ if a.type == OPTIMISATION:
+ return True
+ elif a.touches_changelog:
+ return True
+ for a in self.removed_actions:
+ if a.touches_changelog:
+ return True
+ return False
+
+ def _touches_requirements(self):
+ for a in self.upgrade_actions:
+ # optimisations are used to re-process revlogs and does not result
+ # in a requirement being added or removed
+ if a.type == OPTIMISATION:
+ pass
+ elif a.touches_requirements:
+ return True
+ for a in self.removed_actions:
+ if a.touches_requirements:
+ return True
+
+ return False
+
def _write_labeled(self, l, label):
"""
Utility function to aid writing of a list under one label
--- a/mercurial/upgrade_utils/engine.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/upgrade_utils/engine.py Thu Feb 11 20:36:46 2021 -0800
@@ -24,6 +24,7 @@
util,
vfs as vfsmod,
)
+from ..revlogutils import nodemap
def _revlogfrompath(repo, path):
@@ -412,7 +413,10 @@
"""
# TODO: don't blindly rename everything in store
# There can be upgrades where store is not touched at all
- util.rename(currentrepo.spath, backupvfs.join(b'store'))
+ if upgrade_op.backup_store:
+ util.rename(currentrepo.spath, backupvfs.join(b'store'))
+ else:
+ currentrepo.vfs.rmtree(b'store', forcibly=True)
util.rename(upgradedrepo.spath, currentrepo.spath)
@@ -436,6 +440,8 @@
"""
assert srcrepo.currentwlock()
assert dstrepo.currentwlock()
+ backuppath = None
+ backupvfs = None
ui.status(
_(
@@ -444,78 +450,107 @@
)
)
- with dstrepo.transaction(b'upgrade') as tr:
- _clonerevlogs(
- ui,
- srcrepo,
- dstrepo,
- tr,
- upgrade_op,
+ if upgrade_op.requirements_only:
+ ui.status(_(b'upgrading repository requirements\n'))
+ scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
+ # if there is only one action and that is persistent nodemap upgrade
+ # directly write the nodemap file and update requirements instead of going
+ # through the whole cloning process
+ elif (
+ len(upgrade_op.upgrade_actions) == 1
+ and b'persistent-nodemap' in upgrade_op._upgrade_actions_names
+ and not upgrade_op.removed_actions
+ ):
+ ui.status(
+ _(b'upgrading repository to use persistent nodemap feature\n')
+ )
+ with srcrepo.transaction(b'upgrade') as tr:
+ unfi = srcrepo.unfiltered()
+ cl = unfi.changelog
+ nodemap.persist_nodemap(tr, cl, force=True)
+ scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
+ else:
+ with dstrepo.transaction(b'upgrade') as tr:
+ _clonerevlogs(
+ ui,
+ srcrepo,
+ dstrepo,
+ tr,
+ upgrade_op,
+ )
+
+ # Now copy other files in the store directory.
+ for p in _files_to_copy_post_revlog_clone(srcrepo):
+ srcrepo.ui.status(_(b'copying %s\n') % p)
+ src = srcrepo.store.rawvfs.join(p)
+ dst = dstrepo.store.rawvfs.join(p)
+ util.copyfile(src, dst, copystat=True)
+
+ finishdatamigration(ui, srcrepo, dstrepo, requirements)
+
+ ui.status(_(b'data fully upgraded in a temporary repository\n'))
+
+ if upgrade_op.backup_store:
+ backuppath = pycompat.mkdtemp(
+ prefix=b'upgradebackup.', dir=srcrepo.path
+ )
+ backupvfs = vfsmod.vfs(backuppath)
+
+ # Make a backup of requires file first, as it is the first to be modified.
+ util.copyfile(
+ srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires')
+ )
+
+ # We install an arbitrary requirement that clients must not support
+ # as a mechanism to lock out new clients during the data swap. This is
+ # better than allowing a client to continue while the repository is in
+ # an inconsistent state.
+ ui.status(
+ _(
+ b'marking source repository as being upgraded; clients will be '
+ b'unable to read from repository\n'
+ )
+ )
+ scmutil.writereporequirements(
+ srcrepo, srcrepo.requirements | {b'upgradeinprogress'}
)
- # Now copy other files in the store directory.
- for p in _files_to_copy_post_revlog_clone(srcrepo):
- srcrepo.ui.status(_(b'copying %s\n') % p)
- src = srcrepo.store.rawvfs.join(p)
- dst = dstrepo.store.rawvfs.join(p)
- util.copyfile(src, dst, copystat=True)
-
- finishdatamigration(ui, srcrepo, dstrepo, requirements)
-
- ui.status(_(b'data fully upgraded in a temporary repository\n'))
-
- backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path)
- backupvfs = vfsmod.vfs(backuppath)
-
- # Make a backup of requires file first, as it is the first to be modified.
- util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires'))
-
- # We install an arbitrary requirement that clients must not support
- # as a mechanism to lock out new clients during the data swap. This is
- # better than allowing a client to continue while the repository is in
- # an inconsistent state.
- ui.status(
- _(
- b'marking source repository as being upgraded; clients will be '
- b'unable to read from repository\n'
- )
- )
- scmutil.writereporequirements(
- srcrepo, srcrepo.requirements | {b'upgradeinprogress'}
- )
+ ui.status(_(b'starting in-place swap of repository data\n'))
+ if upgrade_op.backup_store:
+ ui.status(
+ _(b'replaced files will be backed up at %s\n') % backuppath
+ )
- ui.status(_(b'starting in-place swap of repository data\n'))
- ui.status(_(b'replaced files will be backed up at %s\n') % backuppath)
-
- # Now swap in the new store directory. Doing it as a rename should make
- # the operation nearly instantaneous and atomic (at least in well-behaved
- # environments).
- ui.status(_(b'replacing store...\n'))
- tstart = util.timer()
- _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op)
- elapsed = util.timer() - tstart
- ui.status(
- _(
- b'store replacement complete; repository was inconsistent for '
- b'%0.1fs\n'
+ # Now swap in the new store directory. Doing it as a rename should make
+ # the operation nearly instantaneous and atomic (at least in well-behaved
+ # environments).
+ ui.status(_(b'replacing store...\n'))
+ tstart = util.timer()
+ _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op)
+ elapsed = util.timer() - tstart
+ ui.status(
+ _(
+ b'store replacement complete; repository was inconsistent for '
+ b'%0.1fs\n'
+ )
+ % elapsed
)
- % elapsed
- )
- # We first write the requirements file. Any new requirements will lock
- # out legacy clients.
- ui.status(
- _(
- b'finalizing requirements file and making repository readable '
- b'again\n'
+ # We first write the requirements file. Any new requirements will lock
+ # out legacy clients.
+ ui.status(
+ _(
+ b'finalizing requirements file and making repository readable '
+ b'again\n'
+ )
)
- )
- scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
+ scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
- # The lock file from the old store won't be removed because nothing has a
- # reference to its new location. So clean it up manually. Alternatively, we
- # could update srcrepo.svfs and other variables to point to the new
- # location. This is simpler.
- backupvfs.unlink(b'store/lock')
+ if upgrade_op.backup_store:
+ # The lock file from the old store won't be removed because nothing has a
+ # reference to its new location. So clean it up manually. Alternatively, we
+ # could update srcrepo.svfs and other variables to point to the new
+ # location. This is simpler.
+ backupvfs.unlink(b'store/lock')
return backuppath
--- a/mercurial/utils/dateutil.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/utils/dateutil.py Thu Feb 11 20:36:46 2021 -0800
@@ -68,7 +68,9 @@
timestamp = time.time()
if timestamp < 0:
hint = _(b"check your clock")
- raise error.Abort(_(b"negative timestamp: %d") % timestamp, hint=hint)
+ raise error.InputError(
+ _(b"negative timestamp: %d") % timestamp, hint=hint
+ )
delta = datetime.datetime.utcfromtimestamp(
timestamp
) - datetime.datetime.fromtimestamp(timestamp)
@@ -328,24 +330,26 @@
date = date.strip()
if not date:
- raise error.Abort(_(b"dates cannot consist entirely of whitespace"))
+ raise error.InputError(
+ _(b"dates cannot consist entirely of whitespace")
+ )
elif date[0:1] == b"<":
if not date[1:]:
- raise error.Abort(_(b"invalid day spec, use '<DATE'"))
+ raise error.InputError(_(b"invalid day spec, use '<DATE'"))
when = upper(date[1:])
return lambda x: x <= when
elif date[0:1] == b">":
if not date[1:]:
- raise error.Abort(_(b"invalid day spec, use '>DATE'"))
+ raise error.InputError(_(b"invalid day spec, use '>DATE'"))
when = lower(date[1:])
return lambda x: x >= when
elif date[0:1] == b"-":
try:
days = int(date[1:])
except ValueError:
- raise error.Abort(_(b"invalid day spec: %s") % date[1:])
+ raise error.InputError(_(b"invalid day spec: %s") % date[1:])
if days < 0:
- raise error.Abort(
+ raise error.InputError(
_(b"%s must be nonnegative (see 'hg help dates')") % date[1:]
)
when = makedate()[0] - days * 3600 * 24
--- a/mercurial/wireprotov1peer.py Wed Feb 10 23:03:54 2021 +0100
+++ b/mercurial/wireprotov1peer.py Thu Feb 11 20:36:46 2021 -0800
@@ -43,14 +43,14 @@
@batchable
def sample(self, one, two=None):
# Build list of encoded arguments suitable for your wire protocol:
- encargs = [('one', encode(one),), ('two', encode(two),)]
+ encoded_args = [('one', encode(one),), ('two', encode(two),)]
# Create future for injection of encoded result:
- encresref = future()
+ encoded_res_future = future()
# Return encoded arguments and future:
- yield encargs, encresref
+ yield encoded_args, encoded_res_future
# Assuming the future to be filled with the result from the batched
# request now. Decode it:
- yield decode(encresref.value)
+ yield decode(encoded_res_future.value)
The decorator returns a function which wraps this coroutine as a plain
method, but adds the original method as an attribute called "batchable",
@@ -60,12 +60,12 @@
def plain(*args, **opts):
batchable = f(*args, **opts)
- encargsorres, encresref = next(batchable)
- if not encresref:
- return encargsorres # a local result in this case
+ encoded_args_or_res, encoded_res_future = next(batchable)
+ if not encoded_res_future:
+ return encoded_args_or_res # a local result in this case
self = args[0]
cmd = pycompat.bytesurl(f.__name__) # ensure cmd is ascii bytestr
- encresref.set(self._submitone(cmd, encargsorres))
+ encoded_res_future.set(self._submitone(cmd, encoded_args_or_res))
return next(batchable)
setattr(plain, 'batchable', f)
@@ -257,15 +257,15 @@
# Encoded arguments and future holding remote result.
try:
- encargsorres, fremote = next(batchable)
+ encoded_args_or_res, fremote = next(batchable)
except Exception:
pycompat.future_set_exception_info(f, sys.exc_info()[1:])
return
if not fremote:
- f.set_result(encargsorres)
+ f.set_result(encoded_args_or_res)
else:
- requests.append((command, encargsorres))
+ requests.append((command, encoded_args_or_res))
states.append((command, f, batchable, fremote))
if not requests:
--- a/relnotes/5.7 Wed Feb 10 23:03:54 2021 +0100
+++ b/relnotes/5.7 Thu Feb 11 20:36:46 2021 -0800
@@ -17,6 +17,8 @@
can be e.g. `rebase`. As part of this effort, the default format
from `hg rebase` was reorganized a bit.
+ * `hg purge` is now a core command using `--confirm` by default.
+
* `hg diff` and `hg extdiff` now support `--from <rev>` and `--to <rev>`
arguments as clearer alternatives to `-r <revs>`. `-r <revs>` has been
deprecated.
@@ -43,6 +45,9 @@
* The `branchmap` cache is updated more intelligently and can be
significantly faster for repositories with many branches and changesets.
+ * The `rev-branch-cache` is now updated incrementally whenever changesets
+ are added.
+
== New Experimental Features ==
@@ -64,4 +69,5 @@
== Internal API Changes ==
-
+ * `changelog.branchinfo` is deprecated and will be removed after 5.8.
+ It is superseded by `changelogrevision.branchinfo`.
--- a/relnotes/next Wed Feb 10 23:03:54 2021 +0100
+++ b/relnotes/next Thu Feb 11 20:36:46 2021 -0800
@@ -1,8 +1,18 @@
== New Features ==
+
+ * `hg purge` is now a core command using `--confirm` by default.
+
+ * The `rev-branch-cache` is now updated incrementally whenever changesets
+ are added.
== New Experimental Features ==
+ * `hg diff` now takes an experimental `--merge` flag which causes `hg
+ diff --change` to show the changes relative to an automerge for
+ merge changesets. This makes it easier to detect and review manual
+ changes performed in merge changesets.
+
== Bug Fixes ==
@@ -13,4 +23,5 @@
== Internal API Changes ==
-
+ * `changelog.branchinfo` is deprecated and will be removed after 5.8.
+ It is superseded by `changelogrevision.branchinfo`.
--- a/rust/Cargo.lock Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/Cargo.lock Thu Feb 11 20:36:46 2021 -0800
@@ -55,6 +55,24 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "bytes-cast"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bytes-cast-derive"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "cc"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -181,6 +199,16 @@
]
[[package]]
+name = "derive_more"
+version = "0.99.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "difference"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -268,20 +296,17 @@
]
[[package]]
-name = "hex"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
name = "hg-core"
version = "0.1.0"
dependencies = [
"byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -313,6 +338,14 @@
]
[[package]]
+name = "home"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "humantime"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -683,6 +716,7 @@
version = "0.1.0"
dependencies = [
"clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"hg-core 0.1.0",
@@ -910,6 +944,8 @@
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
"checksum bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+"checksum bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3196ba300c7bc9282a4331e878496cb3e9603a898a8f1446601317163e16ca52"
+"checksum bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb"
"checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
@@ -924,6 +960,7 @@
"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
"checksum crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
"checksum ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
+"checksum derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)" = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
@@ -935,7 +972,7 @@
"checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
-"checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35"
+"checksum home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
"checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
--- a/rust/hg-core/Cargo.toml Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/Cargo.toml Thu Feb 11 20:36:46 2021 -0800
@@ -9,8 +9,10 @@
name = "hg"
[dependencies]
+bytes-cast = "0.1"
byteorder = "1.3.4"
-hex = "0.4.2"
+derive_more = "0.99"
+home = "0.5"
im-rc = "15.0.*"
lazy_static = "1.4.0"
memchr = "2.3.3"
--- a/rust/hg-core/examples/nodemap/main.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/examples/nodemap/main.rs Thu Feb 11 20:36:46 2021 -0800
@@ -49,7 +49,7 @@
fn query(index: &Index, nm: &NodeTree, prefix: &str) {
let start = Instant::now();
- let res = nm.find_hex(index, prefix);
+ let res = NodePrefix::from_hex(prefix).map(|p| nm.find_bin(index, p));
println!("Result found in {:?}: {:?}", start.elapsed(), res);
}
@@ -66,7 +66,7 @@
.collect();
if queries < 10 {
let nodes_hex: Vec<String> =
- nodes.iter().map(|n| n.encode_hex()).collect();
+ nodes.iter().map(|n| format!("{:x}", n)).collect();
println!("Nodes: {:?}", nodes_hex);
}
let mut last: Option<Revision> = None;
@@ -76,11 +76,11 @@
}
let elapsed = start.elapsed();
println!(
- "Did {} queries in {:?} (mean {:?}), last was {:?} with result {:?}",
+ "Did {} queries in {:?} (mean {:?}), last was {:x} with result {:?}",
queries,
elapsed,
elapsed / (queries as u32),
- nodes.last().unwrap().encode_hex(),
+ nodes.last().unwrap(),
last
);
}
--- a/rust/hg-core/src/config.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/config.rs Thu Feb 11 20:36:46 2021 -0800
@@ -12,3 +12,4 @@
mod config;
mod layer;
pub use config::Config;
+pub use layer::{ConfigError, ConfigParseError};
--- a/rust/hg-core/src/config/config.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/config/config.rs Thu Feb 11 20:36:46 2021 -0800
@@ -8,11 +8,14 @@
// GNU General Public License version 2 or any later version.
use super::layer;
-use crate::config::layer::{ConfigError, ConfigLayer, ConfigValue};
-use std::path::PathBuf;
+use crate::config::layer::{
+ ConfigError, ConfigLayer, ConfigParseError, ConfigValue,
+};
+use crate::utils::files::get_bytes_from_path;
+use std::env;
+use std::path::{Path, PathBuf};
-use crate::operations::find_root;
-use crate::utils::files::read_whole_file;
+use crate::errors::{HgResultExt, IoResultExt};
/// Holds the config values for the current repository
/// TODO update this docstring once we support more sources
@@ -49,6 +52,124 @@
}
impl Config {
+ /// Load system and user configuration from various files.
+ ///
+ /// This is also affected by some environment variables.
+ ///
+ /// TODO: add a parameter for `--config` CLI arguments
+ pub fn load() -> Result<Self, ConfigError> {
+ let mut config = Self { layers: Vec::new() };
+ let opt_rc_path = env::var_os("HGRCPATH");
+ // HGRCPATH replaces system config
+ if opt_rc_path.is_none() {
+ config.add_system_config()?
+ }
+ config.add_for_environment_variable("EDITOR", b"ui", b"editor");
+ config.add_for_environment_variable("VISUAL", b"ui", b"editor");
+ config.add_for_environment_variable("PAGER", b"pager", b"pager");
+ // HGRCPATH replaces user config
+ if opt_rc_path.is_none() {
+ config.add_user_config()?
+ }
+ if let Some(rc_path) = &opt_rc_path {
+ for path in env::split_paths(rc_path) {
+ if !path.as_os_str().is_empty() {
+ if path.is_dir() {
+ config.add_trusted_dir(&path)?
+ } else {
+ config.add_trusted_file(&path)?
+ }
+ }
+ }
+ }
+ Ok(config)
+ }
+
+ fn add_trusted_dir(&mut self, path: &Path) -> Result<(), ConfigError> {
+ if let Some(entries) = std::fs::read_dir(path)
+ .for_file(path)
+ .io_not_found_as_none()?
+ {
+ for entry in entries {
+ let file_path = entry.for_file(path)?.path();
+ if file_path.extension() == Some(std::ffi::OsStr::new("rc")) {
+ self.add_trusted_file(&file_path)?
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn add_trusted_file(&mut self, path: &Path) -> Result<(), ConfigError> {
+ if let Some(data) =
+ std::fs::read(path).for_file(path).io_not_found_as_none()?
+ {
+ self.layers.extend(ConfigLayer::parse(path, &data)?)
+ }
+ Ok(())
+ }
+
+ fn add_for_environment_variable(
+ &mut self,
+ var: &str,
+ section: &[u8],
+ key: &[u8],
+ ) {
+ if let Some(value) = env::var_os(var) {
+ let origin = layer::ConfigOrigin::Environment(var.into());
+ let mut layer = ConfigLayer::new(origin);
+ layer.add(
+ section.to_owned(),
+ key.to_owned(),
+ // `value` is not a path but this works for any `OsStr`:
+ get_bytes_from_path(value),
+ None,
+ );
+ self.layers.push(layer)
+ }
+ }
+
+ #[cfg(unix)] // TODO: other platforms
+ fn add_system_config(&mut self) -> Result<(), ConfigError> {
+ let mut add_for_prefix = |prefix: &Path| -> Result<(), ConfigError> {
+ let etc = prefix.join("etc").join("mercurial");
+ self.add_trusted_file(&etc.join("hgrc"))?;
+ self.add_trusted_dir(&etc.join("hgrc.d"))
+ };
+ let root = Path::new("/");
+ // TODO: use `std::env::args_os().next().unwrap()` a.k.a. argv[0]
+ // instead? TODO: can this be a relative path?
+ let hg = crate::utils::current_exe()?;
+ // TODO: this order (per-installation then per-system) matches
+ // `systemrcpath()` in `mercurial/scmposix.py`, but
+ // `mercurial/helptext/config.txt` suggests it should be reversed
+ if let Some(installation_prefix) = hg.parent().and_then(Path::parent) {
+ if installation_prefix != root {
+ add_for_prefix(&installation_prefix)?
+ }
+ }
+ add_for_prefix(root)?;
+ Ok(())
+ }
+
+ #[cfg(unix)] // TODO: other plateforms
+ fn add_user_config(&mut self) -> Result<(), ConfigError> {
+ let opt_home = home::home_dir();
+ if let Some(home) = &opt_home {
+ self.add_trusted_file(&home.join(".hgrc"))?
+ }
+ let darwin = cfg!(any(target_os = "macos", target_os = "ios"));
+ if !darwin {
+ if let Some(config_home) = env::var_os("XDG_CONFIG_HOME")
+ .map(PathBuf::from)
+ .or_else(|| opt_home.map(|home| home.join(".config")))
+ {
+ self.add_trusted_file(&config_home.join("hg").join("hgrc"))?
+ }
+ }
+ Ok(())
+ }
+
/// Loads in order, which means that the precedence is the same
/// as the order of `sources`.
pub fn load_from_explicit_sources(
@@ -62,7 +183,7 @@
ConfigSource::AbsPath(c) => {
// TODO check if it should be trusted
// mercurial/ui.py:427
- let data = match read_whole_file(&c) {
+ let data = match std::fs::read(&c) {
Err(_) => continue, // same as the python code
Ok(data) => data,
};
@@ -74,13 +195,28 @@
Ok(Config { layers })
}
- /// Loads the local config. In a future version, this will also load the
- /// `$HOME/.hgrc` and more to mirror the Python implementation.
- pub fn load() -> Result<Self, ConfigError> {
- let root = find_root().unwrap();
- Ok(Self::load_from_explicit_sources(vec![
- ConfigSource::AbsPath(root.join(".hg/hgrc")),
- ])?)
+ /// Loads the per-repository config into a new `Config` which is combined
+ /// with `self`.
+ pub(crate) fn combine_with_repo(
+ &self,
+ repo_config_files: &[PathBuf],
+ ) -> Result<Self, ConfigError> {
+ let (cli_layers, other_layers) = self
+ .layers
+ .iter()
+ .cloned()
+ .partition(ConfigLayer::is_from_command_line);
+
+ let mut repo_config = Self {
+ layers: other_layers,
+ };
+ for path in repo_config_files {
+ // TODO: check if this file should be trusted:
+ // `mercurial/ui.py:427`
+ repo_config.add_trusted_file(path)?;
+ }
+ repo_config.layers.extend(cli_layers);
+ Ok(repo_config)
}
/// Returns an `Err` if the first value found is not a valid boolean.
@@ -90,11 +226,11 @@
&self,
section: &[u8],
item: &[u8],
- ) -> Result<Option<bool>, ConfigError> {
+ ) -> Result<Option<bool>, ConfigParseError> {
match self.get_inner(§ion, &item) {
Some((layer, v)) => match parse_bool(&v.bytes) {
Some(b) => Ok(Some(b)),
- None => Err(ConfigError::Parse {
+ None => Err(ConfigParseError {
origin: layer.origin.to_owned(),
line: v.line,
bytes: v.bytes.to_owned(),
@@ -176,8 +312,6 @@
let config = Config::load_from_explicit_sources(sources)
.expect("expected valid config");
- dbg!(&config);
-
let (_, value) = config.get_inner(b"section", b"item").unwrap();
assert_eq!(
value,
--- a/rust/hg-core/src/config/layer.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/config/layer.rs Thu Feb 11 20:36:46 2021 -0800
@@ -7,14 +7,12 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-use crate::utils::files::{
- get_bytes_from_path, get_path_from_bytes, read_whole_file,
-};
+use crate::errors::{HgError, IoResultExt};
+use crate::utils::files::{get_bytes_from_path, get_path_from_bytes};
use format_bytes::format_bytes;
use lazy_static::lazy_static;
use regex::bytes::Regex;
use std::collections::HashMap;
-use std::io;
use std::path::{Path, PathBuf};
lazy_static! {
@@ -53,6 +51,15 @@
}
}
+ /// Returns whether this layer comes from `--config` CLI arguments
+ pub(crate) fn is_from_command_line(&self) -> bool {
+ if let ConfigOrigin::CommandLine = self.origin {
+ true
+ } else {
+ false
+ }
+ }
+
/// Add an entry to the config, overwriting the old one if already present.
pub fn add(
&mut self,
@@ -98,21 +105,19 @@
while let Some((index, bytes)) = lines_iter.next() {
if let Some(m) = INCLUDE_RE.captures(&bytes) {
let filename_bytes = &m[1];
- let filename_to_include = get_path_from_bytes(&filename_bytes);
- match read_include(&src, &filename_to_include) {
- (include_src, Ok(data)) => {
- layers.push(current_layer);
- layers.extend(Self::parse(&include_src, &data)?);
- current_layer =
- Self::new(ConfigOrigin::File(src.to_owned()));
- }
- (_, Err(e)) => {
- return Err(ConfigError::IncludeError {
- path: filename_to_include.to_owned(),
- io_error: e,
- })
- }
- }
+ // `Path::parent` only fails for the root directory,
+ // which `src` can’t be since we’ve managed to open it as a
+ // file.
+ let dir = src
+ .parent()
+ .expect("Path::parent fail on a file we’ve read");
+ // `Path::join` with an absolute argument correctly ignores the
+ // base path
+ let filename = dir.join(&get_path_from_bytes(&filename_bytes));
+ let data = std::fs::read(&filename).for_file(&filename)?;
+ layers.push(current_layer);
+ layers.extend(Self::parse(&filename, &data)?);
+ current_layer = Self::new(ConfigOrigin::File(src.to_owned()));
} else if let Some(_) = EMPTY_RE.captures(&bytes) {
} else if let Some(m) = SECTION_RE.captures(&bytes) {
section = m[1].to_vec();
@@ -145,11 +150,12 @@
map.remove(&m[1]);
}
} else {
- return Err(ConfigError::Parse {
+ return Err(ConfigParseError {
origin: ConfigOrigin::File(src.to_owned()),
line: Some(index + 1),
bytes: bytes.to_owned(),
- });
+ }
+ .into());
}
}
if !current_layer.is_empty() {
@@ -205,9 +211,11 @@
#[derive(Clone, Debug)]
pub enum ConfigOrigin {
- /// The value comes from a configuration file
+ /// From a configuration file
File(PathBuf),
- /// The value comes from the environment like `$PAGER` or `$EDITOR`
+ /// From a `--config` CLI argument
+ CommandLine,
+ /// From environment variables like `$PAGER` or `$EDITOR`
Environment(Vec<u8>),
/* TODO cli
* TODO defaults (configitems.py)
@@ -221,48 +229,25 @@
pub fn to_bytes(&self) -> Vec<u8> {
match self {
ConfigOrigin::File(p) => get_bytes_from_path(p),
- ConfigOrigin::Environment(e) => e.to_owned(),
+ ConfigOrigin::CommandLine => b"--config".to_vec(),
+ ConfigOrigin::Environment(e) => format_bytes!(b"${}", e),
}
}
}
#[derive(Debug)]
-pub enum ConfigError {
- Parse {
- origin: ConfigOrigin,
- line: Option<usize>,
- bytes: Vec<u8>,
- },
- /// Failed to include a sub config file
- IncludeError {
- path: PathBuf,
- io_error: std::io::Error,
- },
- /// Any IO error that isn't expected
- IO(std::io::Error),
+pub struct ConfigParseError {
+ pub origin: ConfigOrigin,
+ pub line: Option<usize>,
+ pub bytes: Vec<u8>,
}
-impl From<std::io::Error> for ConfigError {
- fn from(e: std::io::Error) -> Self {
- Self::IO(e)
- }
+#[derive(Debug, derive_more::From)]
+pub enum ConfigError {
+ Parse(ConfigParseError),
+ Other(HgError),
}
fn make_regex(pattern: &'static str) -> Regex {
Regex::new(pattern).expect("expected a valid regex")
}
-
-/// Includes are relative to the file they're defined in, unless they're
-/// absolute.
-fn read_include(
- old_src: &Path,
- new_src: &Path,
-) -> (PathBuf, io::Result<Vec<u8>>) {
- if new_src.is_absolute() {
- (new_src.to_path_buf(), read_whole_file(&new_src))
- } else {
- let dir = old_src.parent().unwrap();
- let new_src = dir.join(&new_src);
- (new_src.to_owned(), read_whole_file(&new_src))
- }
-}
--- a/rust/hg-core/src/dirstate.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/dirstate.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,7 +5,8 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-use crate::{utils::hg_path::HgPathBuf, DirstateParseError, FastHashMap};
+use crate::errors::HgError;
+use crate::{utils::hg_path::HgPathBuf, FastHashMap};
use std::collections::hash_map;
use std::convert::TryFrom;
@@ -60,7 +61,7 @@
}
impl TryFrom<u8> for EntryState {
- type Error = DirstateParseError;
+ type Error = HgError;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
@@ -69,8 +70,8 @@
b'r' => Ok(EntryState::Removed),
b'm' => Ok(EntryState::Merged),
b'?' => Ok(EntryState::Unknown),
- _ => Err(DirstateParseError::CorruptedEntry(format!(
- "Incorrect entry state {}",
+ _ => Err(HgError::CorruptedRepository(format!(
+ "Incorrect dirstate entry state {}",
value
))),
}
--- a/rust/hg-core/src/dirstate/dirstate_map.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/dirstate/dirstate_map.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,6 +5,7 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
+use crate::errors::HgError;
use crate::revlog::node::NULL_NODE_ID;
use crate::{
dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT},
@@ -14,7 +15,7 @@
hg_path::{HgPath, HgPathBuf},
},
CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError,
- DirstateParents, DirstateParseError, FastHashMap, StateMap,
+ DirstateParents, FastHashMap, StateMap,
};
use micro_timer::timed;
use std::collections::HashSet;
@@ -370,7 +371,9 @@
p2: NULL_NODE_ID,
};
} else {
- return Err(DirstateError::Parse(DirstateParseError::Damaged));
+ return Err(
+ HgError::corrupted("Dirstate appears to be damaged").into()
+ );
}
self.parents = Some(parents);
--- a/rust/hg-core/src/dirstate/parsers.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/dirstate/parsers.rs Thu Feb 11 20:36:46 2021 -0800
@@ -3,10 +3,11 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
+use crate::errors::HgError;
use crate::utils::hg_path::HgPath;
use crate::{
dirstate::{CopyMap, EntryState, StateMap},
- DirstateEntry, DirstatePackError, DirstateParents, DirstateParseError,
+ DirstateEntry, DirstateParents,
};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use micro_timer::timed;
@@ -26,11 +27,9 @@
);
#[timed]
-pub fn parse_dirstate(
- contents: &[u8],
-) -> Result<ParseResult, DirstateParseError> {
+pub fn parse_dirstate(contents: &[u8]) -> Result<ParseResult, HgError> {
if contents.len() < PARENT_SIZE * 2 {
- return Err(DirstateParseError::TooLittleData);
+ return Err(HgError::corrupted("Too little data for dirstate."));
}
let mut copies = vec![];
let mut entries = vec![];
@@ -43,19 +42,21 @@
while curr_pos < contents.len() {
if curr_pos + MIN_ENTRY_SIZE > contents.len() {
- return Err(DirstateParseError::Overflow);
+ return Err(HgError::corrupted("Overflow in dirstate."));
}
let entry_bytes = &contents[curr_pos..];
let mut cursor = Cursor::new(entry_bytes);
- let state = EntryState::try_from(cursor.read_u8()?)?;
- let mode = cursor.read_i32::<BigEndian>()?;
- let size = cursor.read_i32::<BigEndian>()?;
- let mtime = cursor.read_i32::<BigEndian>()?;
- let path_len = cursor.read_i32::<BigEndian>()? as usize;
+ // Unwraping errors from `byteorder` as we’ve already checked
+ // `MIN_ENTRY_SIZE` so the input should never be too short.
+ let state = EntryState::try_from(cursor.read_u8().unwrap())?;
+ let mode = cursor.read_i32::<BigEndian>().unwrap();
+ let size = cursor.read_i32::<BigEndian>().unwrap();
+ let mtime = cursor.read_i32::<BigEndian>().unwrap();
+ let path_len = cursor.read_i32::<BigEndian>().unwrap() as usize;
if path_len > contents.len() - curr_pos {
- return Err(DirstateParseError::Overflow);
+ return Err(HgError::corrupted("Overflow in dirstate."));
}
// Slice instead of allocating a Vec needed for `read_exact`
@@ -90,7 +91,7 @@
copy_map: &CopyMap,
parents: DirstateParents,
now: Duration,
-) -> Result<Vec<u8>, DirstatePackError> {
+) -> Result<Vec<u8>, HgError> {
// TODO move away from i32 before 2038.
let now: i32 = now.as_secs().try_into().expect("time overflow");
@@ -136,16 +137,23 @@
new_filename.extend(copy.bytes());
}
- packed.write_u8(entry.state.into())?;
- packed.write_i32::<BigEndian>(entry.mode)?;
- packed.write_i32::<BigEndian>(entry.size)?;
- packed.write_i32::<BigEndian>(new_mtime)?;
- packed.write_i32::<BigEndian>(new_filename.len() as i32)?;
+ // Unwrapping because `impl std::io::Write for Vec<u8>` never errors
+ packed.write_u8(entry.state.into()).unwrap();
+ packed.write_i32::<BigEndian>(entry.mode).unwrap();
+ packed.write_i32::<BigEndian>(entry.size).unwrap();
+ packed.write_i32::<BigEndian>(new_mtime).unwrap();
+ packed
+ .write_i32::<BigEndian>(new_filename.len() as i32)
+ .unwrap();
packed.extend(new_filename)
}
if packed.len() != expected_size {
- return Err(DirstatePackError::BadSize(expected_size, packed.len()));
+ return Err(HgError::CorruptedRepository(format!(
+ "bad dirstate size: {} != {}",
+ expected_size,
+ packed.len()
+ )));
}
Ok(packed)
--- a/rust/hg-core/src/dirstate/status.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/dirstate/status.rs Thu Feb 11 20:36:46 2021 -0800
@@ -33,6 +33,7 @@
use std::{
borrow::Cow,
collections::HashSet,
+ fmt,
fs::{read_dir, DirEntry},
io::ErrorKind,
ops::Deref,
@@ -51,17 +52,16 @@
Unknown,
}
-impl ToString for BadType {
- fn to_string(&self) -> String {
- match self {
+impl fmt::Display for BadType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(match self {
BadType::CharacterDevice => "character device",
BadType::BlockDevice => "block device",
BadType::FIFO => "fifo",
BadType::Socket => "socket",
BadType::Directory => "directory",
BadType::Unknown => "unknown",
- }
- .to_string()
+ })
}
}
@@ -265,7 +265,7 @@
pub traversed: Vec<HgPathBuf>,
}
-#[derive(Debug)]
+#[derive(Debug, derive_more::From)]
pub enum StatusError {
/// Generic IO error
IO(std::io::Error),
@@ -277,28 +277,12 @@
pub type StatusResult<T> = Result<T, StatusError>;
-impl From<PatternError> for StatusError {
- fn from(e: PatternError) -> Self {
- StatusError::Pattern(e)
- }
-}
-impl From<HgPathError> for StatusError {
- fn from(e: HgPathError) -> Self {
- StatusError::Path(e)
- }
-}
-impl From<std::io::Error> for StatusError {
- fn from(e: std::io::Error) -> Self {
- StatusError::IO(e)
- }
-}
-
-impl ToString for StatusError {
- fn to_string(&self) -> String {
+impl fmt::Display for StatusError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- StatusError::IO(e) => e.to_string(),
- StatusError::Path(e) => e.to_string(),
- StatusError::Pattern(e) => e.to_string(),
+ StatusError::IO(error) => error.fmt(f),
+ StatusError::Path(error) => error.fmt(f),
+ StatusError::Pattern(error) => error.fmt(f),
}
}
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/errors.rs Thu Feb 11 20:36:46 2021 -0800
@@ -0,0 +1,133 @@
+use std::fmt;
+
+/// Common error cases that can happen in many different APIs
+#[derive(Debug)]
+pub enum HgError {
+ IoError {
+ error: std::io::Error,
+ context: IoErrorContext,
+ },
+
+ /// A file under `.hg/` normally only written by Mercurial is not in the
+ /// expected format. This indicates a bug in Mercurial, filesystem
+ /// corruption, or hardware failure.
+ ///
+ /// The given string is a short explanation for users, not intended to be
+ /// machine-readable.
+ CorruptedRepository(String),
+
+ /// The respository or requested operation involves a feature not
+ /// supported by the Rust implementation. Falling back to the Python
+ /// implementation may or may not work.
+ ///
+ /// The given string is a short explanation for users, not intended to be
+ /// machine-readable.
+ UnsupportedFeature(String),
+
+ /// Operation cannot proceed for some other reason.
+ ///
+ /// The given string is a short explanation for users, not intended to be
+ /// machine-readable.
+ Abort(String),
+}
+
+/// Details about where an I/O error happened
+#[derive(Debug, derive_more::From)]
+pub enum IoErrorContext {
+ /// A filesystem operation for the given file
+ #[from]
+ File(std::path::PathBuf),
+ /// `std::env::current_dir`
+ CurrentDir,
+ /// `std::env::current_exe`
+ CurrentExe,
+}
+
+impl HgError {
+ pub fn corrupted(explanation: impl Into<String>) -> Self {
+ // TODO: capture a backtrace here and keep it in the error value
+ // to aid debugging?
+ // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html
+ HgError::CorruptedRepository(explanation.into())
+ }
+
+ pub fn unsupported(explanation: impl Into<String>) -> Self {
+ HgError::UnsupportedFeature(explanation.into())
+ }
+ pub fn abort(explanation: impl Into<String>) -> Self {
+ HgError::Abort(explanation.into())
+ }
+}
+
+// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
+impl fmt::Display for HgError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ HgError::IoError { error, context } => {
+ write!(f, "{}: {}", error, context)
+ }
+ HgError::CorruptedRepository(explanation) => {
+ write!(f, "corrupted repository: {}", explanation)
+ }
+ HgError::UnsupportedFeature(explanation) => {
+ write!(f, "unsupported feature: {}", explanation)
+ }
+ HgError::Abort(explanation) => explanation.fmt(f),
+ }
+ }
+}
+
+// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
+impl fmt::Display for IoErrorContext {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ IoErrorContext::File(path) => path.display().fmt(f),
+ IoErrorContext::CurrentDir => f.write_str("current directory"),
+ IoErrorContext::CurrentExe => f.write_str("current executable"),
+ }
+ }
+}
+
+pub trait IoResultExt<T> {
+ /// Annotate a possible I/O error as related to a file at the given path.
+ ///
+ /// This allows printing something like “File not found: example.txt”
+ /// instead of just “File not found”.
+ ///
+ /// Converts a `Result` with `std::io::Error` into one with `HgError`.
+ fn for_file(self, path: &std::path::Path) -> Result<T, HgError>;
+}
+
+impl<T> IoResultExt<T> for std::io::Result<T> {
+ fn for_file(self, path: &std::path::Path) -> Result<T, HgError> {
+ self.map_err(|error| HgError::IoError {
+ error,
+ context: IoErrorContext::File(path.to_owned()),
+ })
+ }
+}
+
+pub trait HgResultExt<T> {
+ /// Handle missing files separately from other I/O error cases.
+ ///
+ /// Wraps the `Ok` type in an `Option`:
+ ///
+ /// * `Ok(x)` becomes `Ok(Some(x))`
+ /// * An I/O "not found" error becomes `Ok(None)`
+ /// * Other errors are unchanged
+ fn io_not_found_as_none(self) -> Result<Option<T>, HgError>;
+}
+
+impl<T> HgResultExt<T> for Result<T, HgError> {
+ fn io_not_found_as_none(self) -> Result<Option<T>, HgError> {
+ match self {
+ Ok(x) => Ok(Some(x)),
+ Err(HgError::IoError { error, .. })
+ if error.kind() == std::io::ErrorKind::NotFound =>
+ {
+ Ok(None)
+ }
+ Err(other_error) => Err(other_error),
+ }
+ }
+}
--- a/rust/hg-core/src/lib.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/lib.rs Thu Feb 11 20:36:46 2021 -0800
@@ -3,8 +3,10 @@
//
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
+
mod ancestors;
pub mod dagops;
+pub mod errors;
pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors};
mod dirstate;
pub mod discovery;
@@ -28,22 +30,16 @@
pub use revlog::*;
pub mod config;
pub mod operations;
+pub mod revset;
pub mod utils;
-// Remove this to see (potential) non-artificial compile failures. MacOS
-// *should* compile, but fail to compile tests for example as of 2020-03-06
-#[cfg(not(target_os = "linux"))]
-compile_error!(
- "`hg-core` has only been tested on Linux and will most \
- likely not behave correctly on other platforms."
-);
-
use crate::utils::hg_path::{HgPathBuf, HgPathError};
pub use filepatterns::{
parse_pattern_syntax, read_pattern_file, IgnorePattern,
PatternFileWarning, PatternSyntax,
};
use std::collections::HashMap;
+use std::fmt;
use twox_hash::RandomXxHashBuilder64;
/// This is a contract between the `micro-timer` crate and us, to expose
@@ -57,45 +53,6 @@
/// write access to your repository, you have other issues.
pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>;
-#[derive(Clone, Debug, PartialEq)]
-pub enum DirstateParseError {
- TooLittleData,
- Overflow,
- // TODO refactor to use bytes instead of String
- CorruptedEntry(String),
- Damaged,
-}
-
-impl From<std::io::Error> for DirstateParseError {
- fn from(e: std::io::Error) -> Self {
- DirstateParseError::CorruptedEntry(e.to_string())
- }
-}
-
-impl ToString for DirstateParseError {
- fn to_string(&self) -> String {
- use crate::DirstateParseError::*;
- match self {
- TooLittleData => "Too little data for dirstate.".to_string(),
- Overflow => "Overflow in dirstate.".to_string(),
- CorruptedEntry(e) => format!("Corrupted entry: {:?}.", e),
- Damaged => "Dirstate appears to be damaged.".to_string(),
- }
- }
-}
-
-#[derive(Debug, PartialEq)]
-pub enum DirstatePackError {
- CorruptedEntry(String),
- CorruptedParent,
- BadSize(usize, usize),
-}
-
-impl From<std::io::Error> for DirstatePackError {
- fn from(e: std::io::Error) -> Self {
- DirstatePackError::CorruptedEntry(e.to_string())
- }
-}
#[derive(Debug, PartialEq)]
pub enum DirstateMapError {
PathNotFound(HgPathBuf),
@@ -103,94 +60,61 @@
InvalidPath(HgPathError),
}
-impl ToString for DirstateMapError {
- fn to_string(&self) -> String {
+impl fmt::Display for DirstateMapError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
DirstateMapError::PathNotFound(_) => {
- "expected a value, found none".to_string()
+ f.write_str("expected a value, found none")
}
- DirstateMapError::EmptyPath => "Overflow in dirstate.".to_string(),
- DirstateMapError::InvalidPath(e) => e.to_string(),
+ DirstateMapError::EmptyPath => {
+ f.write_str("Overflow in dirstate.")
+ }
+ DirstateMapError::InvalidPath(path_error) => path_error.fmt(f),
}
}
}
-#[derive(Debug)]
+#[derive(Debug, derive_more::From)]
pub enum DirstateError {
- Parse(DirstateParseError),
- Pack(DirstatePackError),
Map(DirstateMapError),
- IO(std::io::Error),
+ Common(errors::HgError),
}
-impl From<DirstateParseError> for DirstateError {
- fn from(e: DirstateParseError) -> Self {
- DirstateError::Parse(e)
- }
-}
-
-impl From<DirstatePackError> for DirstateError {
- fn from(e: DirstatePackError) -> Self {
- DirstateError::Pack(e)
- }
-}
-
-#[derive(Debug)]
+#[derive(Debug, derive_more::From)]
pub enum PatternError {
+ #[from]
Path(HgPathError),
UnsupportedSyntax(String),
UnsupportedSyntaxInFile(String, String, usize),
TooLong(usize),
+ #[from]
IO(std::io::Error),
/// Needed a pattern that can be turned into a regex but got one that
/// can't. This should only happen through programmer error.
NonRegexPattern(IgnorePattern),
}
-impl ToString for PatternError {
- fn to_string(&self) -> String {
+impl fmt::Display for PatternError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
PatternError::UnsupportedSyntax(syntax) => {
- format!("Unsupported syntax {}", syntax)
+ write!(f, "Unsupported syntax {}", syntax)
}
PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => {
- format!(
+ write!(
+ f,
"{}:{}: unsupported syntax {}",
file_path, line, syntax
)
}
PatternError::TooLong(size) => {
- format!("matcher pattern is too long ({} bytes)", size)
+ write!(f, "matcher pattern is too long ({} bytes)", size)
}
- PatternError::IO(e) => e.to_string(),
- PatternError::Path(e) => e.to_string(),
+ PatternError::IO(error) => error.fmt(f),
+ PatternError::Path(error) => error.fmt(f),
PatternError::NonRegexPattern(pattern) => {
- format!("'{:?}' cannot be turned into a regex", pattern)
+ write!(f, "'{:?}' cannot be turned into a regex", pattern)
}
}
}
}
-
-impl From<DirstateMapError> for DirstateError {
- fn from(e: DirstateMapError) -> Self {
- DirstateError::Map(e)
- }
-}
-
-impl From<std::io::Error> for DirstateError {
- fn from(e: std::io::Error) -> Self {
- DirstateError::IO(e)
- }
-}
-
-impl From<std::io::Error> for PatternError {
- fn from(e: std::io::Error) -> Self {
- PatternError::IO(e)
- }
-}
-
-impl From<HgPathError> for PatternError {
- fn from(e: HgPathError) -> Self {
- PatternError::Path(e)
- }
-}
--- a/rust/hg-core/src/operations/cat.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/operations/cat.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,7 +5,6 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
-use std::convert::From;
use std::path::PathBuf;
use crate::repo::Repo;
@@ -15,61 +14,11 @@
use crate::revlog::revlog::Revlog;
use crate::revlog::revlog::RevlogError;
use crate::revlog::Node;
-use crate::revlog::NodePrefix;
-use crate::revlog::Revision;
use crate::utils::files::get_path_from_bytes;
use crate::utils::hg_path::{HgPath, HgPathBuf};
const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n'];
-/// Kind of error encountered by `CatRev`
-#[derive(Debug)]
-pub enum CatRevErrorKind {
- /// Error when reading a `revlog` file.
- IoError(std::io::Error),
- /// The revision has not been found.
- InvalidRevision,
- /// Found more than one revision whose ID match the requested prefix
- AmbiguousPrefix,
- /// A `revlog` file is corrupted.
- CorruptedRevlog,
- /// The `revlog` format version is not supported.
- UnsuportedRevlogVersion(u16),
- /// The `revlog` data format is not supported.
- UnknowRevlogDataFormat(u8),
-}
-
-/// A `CatRev` error
-#[derive(Debug)]
-pub struct CatRevError {
- /// Kind of error encountered by `CatRev`
- pub kind: CatRevErrorKind,
-}
-
-impl From<CatRevErrorKind> for CatRevError {
- fn from(kind: CatRevErrorKind) -> Self {
- CatRevError { kind }
- }
-}
-
-impl From<RevlogError> for CatRevError {
- fn from(err: RevlogError) -> Self {
- match err {
- RevlogError::IoError(err) => CatRevErrorKind::IoError(err),
- RevlogError::UnsuportedVersion(version) => {
- CatRevErrorKind::UnsuportedRevlogVersion(version)
- }
- RevlogError::InvalidRevision => CatRevErrorKind::InvalidRevision,
- RevlogError::AmbiguousPrefix => CatRevErrorKind::AmbiguousPrefix,
- RevlogError::Corrupted => CatRevErrorKind::CorruptedRevlog,
- RevlogError::UnknowDataFormat(format) => {
- CatRevErrorKind::UnknowRevlogDataFormat(format)
- }
- }
- .into()
- }
-}
-
/// List files under Mercurial control at a given revision.
///
/// * `root`: Repository root
@@ -77,24 +26,16 @@
/// * `files`: The files to output.
pub fn cat(
repo: &Repo,
- rev: &str,
+ revset: &str,
files: &[HgPathBuf],
-) -> Result<Vec<u8>, CatRevError> {
+) -> Result<Vec<u8>, RevlogError> {
+ let rev = crate::revset::resolve_single(revset, repo)?;
let changelog = Changelog::open(repo)?;
let manifest = Manifest::open(repo)?;
-
- let changelog_entry = match rev.parse::<Revision>() {
- Ok(rev) => changelog.get_rev(rev)?,
- _ => {
- let changelog_node = NodePrefix::from_hex(&rev)
- .map_err(|_| CatRevErrorKind::InvalidRevision)?;
- changelog.get_node(changelog_node.borrow())?
- }
- };
- let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?)
- .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
-
- let manifest_entry = manifest.get_node((&manifest_node).into())?;
+ let changelog_entry = changelog.get_rev(rev)?;
+ let manifest_node =
+ Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?;
+ let manifest_entry = manifest.get_node(manifest_node.into())?;
let mut bytes = vec![];
for (manifest_file, node_bytes) in manifest_entry.files_with_nodes() {
@@ -105,9 +46,8 @@
let file_log =
Revlog::open(repo, &index_path, Some(&data_path))?;
- let file_node = Node::from_hex(node_bytes)
- .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
- let file_rev = file_log.get_node_rev((&file_node).into())?;
+ let file_node = Node::from_hex_for_repo(node_bytes)?;
+ let file_rev = file_log.get_node_rev(file_node.into())?;
let data = file_log.get_rev_data(file_rev)?;
if data.starts_with(&METADATA_DELIMITER) {
let end_delimiter_position = data
--- a/rust/hg-core/src/operations/debugdata.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/operations/debugdata.rs Thu Feb 11 20:36:46 2021 -0800
@@ -7,8 +7,6 @@
use crate::repo::Repo;
use crate::revlog::revlog::{Revlog, RevlogError};
-use crate::revlog::NodePrefix;
-use crate::revlog::Revision;
/// Kind of data to debug
#[derive(Debug, Copy, Clone)]
@@ -17,86 +15,19 @@
Manifest,
}
-/// Kind of error encountered by DebugData
-#[derive(Debug)]
-pub enum DebugDataErrorKind {
- /// Error when reading a `revlog` file.
- IoError(std::io::Error),
- /// The revision has not been found.
- InvalidRevision,
- /// Found more than one revision whose ID match the requested prefix
- AmbiguousPrefix,
- /// A `revlog` file is corrupted.
- CorruptedRevlog,
- /// The `revlog` format version is not supported.
- UnsuportedRevlogVersion(u16),
- /// The `revlog` data format is not supported.
- UnknowRevlogDataFormat(u8),
-}
-
-/// A DebugData error
-#[derive(Debug)]
-pub struct DebugDataError {
- /// Kind of error encountered by DebugData
- pub kind: DebugDataErrorKind,
-}
-
-impl From<DebugDataErrorKind> for DebugDataError {
- fn from(kind: DebugDataErrorKind) -> Self {
- DebugDataError { kind }
- }
-}
-
-impl From<std::io::Error> for DebugDataError {
- fn from(err: std::io::Error) -> Self {
- let kind = DebugDataErrorKind::IoError(err);
- DebugDataError { kind }
- }
-}
-
-impl From<RevlogError> for DebugDataError {
- fn from(err: RevlogError) -> Self {
- match err {
- RevlogError::IoError(err) => DebugDataErrorKind::IoError(err),
- RevlogError::UnsuportedVersion(version) => {
- DebugDataErrorKind::UnsuportedRevlogVersion(version)
- }
- RevlogError::InvalidRevision => {
- DebugDataErrorKind::InvalidRevision
- }
- RevlogError::AmbiguousPrefix => {
- DebugDataErrorKind::AmbiguousPrefix
- }
- RevlogError::Corrupted => DebugDataErrorKind::CorruptedRevlog,
- RevlogError::UnknowDataFormat(format) => {
- DebugDataErrorKind::UnknowRevlogDataFormat(format)
- }
- }
- .into()
- }
-}
-
/// Dump the contents data of a revision.
pub fn debug_data(
repo: &Repo,
- rev: &str,
+ revset: &str,
kind: DebugDataKind,
-) -> Result<Vec<u8>, DebugDataError> {
+) -> Result<Vec<u8>, RevlogError> {
let index_file = match kind {
DebugDataKind::Changelog => "00changelog.i",
DebugDataKind::Manifest => "00manifest.i",
};
let revlog = Revlog::open(repo, index_file, None)?;
-
- let data = match rev.parse::<Revision>() {
- Ok(rev) => revlog.get_rev_data(rev)?,
- _ => {
- let node = NodePrefix::from_hex(&rev)
- .map_err(|_| DebugDataErrorKind::InvalidRevision)?;
- let rev = revlog.get_node_rev(node.borrow())?;
- revlog.get_rev_data(rev)?
- }
- };
-
+ let rev =
+ crate::revset::resolve_rev_number_or_hex_prefix(revset, &revlog)?;
+ let data = revlog.get_rev_data(rev)?;
Ok(data)
}
--- a/rust/hg-core/src/operations/find_root.rs Wed Feb 10 23:03:54 2021 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,100 +0,0 @@
-use std::fmt;
-use std::path::{Path, PathBuf};
-
-/// Kind of error encoutered by FindRoot
-#[derive(Debug)]
-pub enum FindRootErrorKind {
- /// Root of the repository has not been found
- /// Contains the current directory used by FindRoot
- RootNotFound(PathBuf),
- /// The current directory does not exists or permissions are insufficient
- /// to get access to it
- GetCurrentDirError(std::io::Error),
-}
-
-/// A FindRoot error
-#[derive(Debug)]
-pub struct FindRootError {
- /// Kind of error encoutered by FindRoot
- pub kind: FindRootErrorKind,
-}
-
-impl std::error::Error for FindRootError {}
-
-impl fmt::Display for FindRootError {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- unimplemented!()
- }
-}
-
-/// Find the root of the repository
-/// by searching for a .hg directory in the process’ current directory and its
-/// ancestors
-pub fn find_root() -> Result<PathBuf, FindRootError> {
- let current_dir = std::env::current_dir().map_err(|e| FindRootError {
- kind: FindRootErrorKind::GetCurrentDirError(e),
- })?;
- Ok(find_root_from_path(¤t_dir)?.into())
-}
-
-/// Find the root of the repository
-/// by searching for a .hg directory in the given directory and its ancestors
-pub fn find_root_from_path(start: &Path) -> Result<&Path, FindRootError> {
- if start.join(".hg").exists() {
- return Ok(start);
- }
- for ancestor in start.ancestors() {
- if ancestor.join(".hg").exists() {
- return Ok(ancestor);
- }
- }
- Err(FindRootError {
- kind: FindRootErrorKind::RootNotFound(start.into()),
- })
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use std::fs;
- use tempfile;
-
- #[test]
- fn dot_hg_not_found() {
- let tmp_dir = tempfile::tempdir().unwrap();
- let path = tmp_dir.path();
-
- let err = find_root_from_path(&path).unwrap_err();
-
- // TODO do something better
- assert!(match err {
- FindRootError { kind } => match kind {
- FindRootErrorKind::RootNotFound(p) => p == path.to_path_buf(),
- _ => false,
- },
- })
- }
-
- #[test]
- fn dot_hg_in_current_path() {
- let tmp_dir = tempfile::tempdir().unwrap();
- let root = tmp_dir.path();
- fs::create_dir_all(root.join(".hg")).unwrap();
-
- let result = find_root_from_path(&root).unwrap();
-
- assert_eq!(result, root)
- }
-
- #[test]
- fn dot_hg_in_parent() {
- let tmp_dir = tempfile::tempdir().unwrap();
- let root = tmp_dir.path();
- fs::create_dir_all(root.join(".hg")).unwrap();
-
- let directory = root.join("some/nested/directory");
- let result = find_root_from_path(&directory).unwrap();
-
- assert_eq!(result, root)
- }
-} /* tests */
--- a/rust/hg-core/src/operations/list_tracked_files.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/operations/list_tracked_files.rs Thu Feb 11 20:36:46 2021 -0800
@@ -6,47 +6,15 @@
// GNU General Public License version 2 or any later version.
use crate::dirstate::parsers::parse_dirstate;
+use crate::errors::HgError;
use crate::repo::Repo;
use crate::revlog::changelog::Changelog;
use crate::revlog::manifest::{Manifest, ManifestEntry};
-use crate::revlog::node::{Node, NodePrefix};
+use crate::revlog::node::Node;
use crate::revlog::revlog::RevlogError;
-use crate::revlog::Revision;
use crate::utils::hg_path::HgPath;
-use crate::{DirstateParseError, EntryState};
+use crate::EntryState;
use rayon::prelude::*;
-use std::convert::From;
-
-/// Kind of error encountered by `ListDirstateTrackedFiles`
-#[derive(Debug)]
-pub enum ListDirstateTrackedFilesErrorKind {
- /// Error when reading the `dirstate` file
- IoError(std::io::Error),
- /// Error when parsing the `dirstate` file
- ParseError(DirstateParseError),
-}
-
-/// A `ListDirstateTrackedFiles` error
-#[derive(Debug)]
-pub struct ListDirstateTrackedFilesError {
- /// Kind of error encountered by `ListDirstateTrackedFiles`
- pub kind: ListDirstateTrackedFilesErrorKind,
-}
-
-impl From<ListDirstateTrackedFilesErrorKind>
- for ListDirstateTrackedFilesError
-{
- fn from(kind: ListDirstateTrackedFilesErrorKind) -> Self {
- ListDirstateTrackedFilesError { kind }
- }
-}
-
-impl From<std::io::Error> for ListDirstateTrackedFilesError {
- fn from(err: std::io::Error) -> Self {
- let kind = ListDirstateTrackedFilesErrorKind::IoError(err);
- ListDirstateTrackedFilesError { kind }
- }
-}
/// List files under Mercurial control in the working directory
/// by reading the dirstate
@@ -56,16 +24,13 @@
}
impl Dirstate {
- pub fn new(repo: &Repo) -> Result<Self, ListDirstateTrackedFilesError> {
+ pub fn new(repo: &Repo) -> Result<Self, HgError> {
let content = repo.hg_vfs().read("dirstate")?;
Ok(Self { content })
}
- pub fn tracked_files(
- &self,
- ) -> Result<Vec<&HgPath>, ListDirstateTrackedFilesError> {
- let (_, entries, _) = parse_dirstate(&self.content)
- .map_err(ListDirstateTrackedFilesErrorKind::ParseError)?;
+ pub fn tracked_files(&self) -> Result<Vec<&HgPath>, HgError> {
+ let (_, entries, _) = parse_dirstate(&self.content)?;
let mut files: Vec<&HgPath> = entries
.into_iter()
.filter_map(|(path, entry)| match entry.state {
@@ -78,81 +43,18 @@
}
}
-/// Kind of error encountered by `ListRevTrackedFiles`
-#[derive(Debug)]
-pub enum ListRevTrackedFilesErrorKind {
- /// Error when reading a `revlog` file.
- IoError(std::io::Error),
- /// The revision has not been found.
- InvalidRevision,
- /// Found more than one revision whose ID match the requested prefix
- AmbiguousPrefix,
- /// A `revlog` file is corrupted.
- CorruptedRevlog,
- /// The `revlog` format version is not supported.
- UnsuportedRevlogVersion(u16),
- /// The `revlog` data format is not supported.
- UnknowRevlogDataFormat(u8),
-}
-
-/// A `ListRevTrackedFiles` error
-#[derive(Debug)]
-pub struct ListRevTrackedFilesError {
- /// Kind of error encountered by `ListRevTrackedFiles`
- pub kind: ListRevTrackedFilesErrorKind,
-}
-
-impl From<ListRevTrackedFilesErrorKind> for ListRevTrackedFilesError {
- fn from(kind: ListRevTrackedFilesErrorKind) -> Self {
- ListRevTrackedFilesError { kind }
- }
-}
-
-impl From<RevlogError> for ListRevTrackedFilesError {
- fn from(err: RevlogError) -> Self {
- match err {
- RevlogError::IoError(err) => {
- ListRevTrackedFilesErrorKind::IoError(err)
- }
- RevlogError::UnsuportedVersion(version) => {
- ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version)
- }
- RevlogError::InvalidRevision => {
- ListRevTrackedFilesErrorKind::InvalidRevision
- }
- RevlogError::AmbiguousPrefix => {
- ListRevTrackedFilesErrorKind::AmbiguousPrefix
- }
- RevlogError::Corrupted => {
- ListRevTrackedFilesErrorKind::CorruptedRevlog
- }
- RevlogError::UnknowDataFormat(format) => {
- ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format)
- }
- }
- .into()
- }
-}
-
/// List files under Mercurial control at a given revision.
pub fn list_rev_tracked_files(
repo: &Repo,
- rev: &str,
-) -> Result<FilesForRev, ListRevTrackedFilesError> {
+ revset: &str,
+) -> Result<FilesForRev, RevlogError> {
+ let rev = crate::revset::resolve_single(revset, repo)?;
let changelog = Changelog::open(repo)?;
let manifest = Manifest::open(repo)?;
-
- let changelog_entry = match rev.parse::<Revision>() {
- Ok(rev) => changelog.get_rev(rev)?,
- _ => {
- let changelog_node = NodePrefix::from_hex(&rev)
- .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?;
- changelog.get_node(changelog_node.borrow())?
- }
- };
- let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?)
- .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?;
- let manifest_entry = manifest.get_node((&manifest_node).into())?;
+ let changelog_entry = changelog.get_rev(rev)?;
+ let manifest_node =
+ Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?;
+ let manifest_entry = manifest.get_node(manifest_node.into())?;
Ok(FilesForRev(manifest_entry))
}
--- a/rust/hg-core/src/operations/mod.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/operations/mod.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,19 +5,8 @@
mod cat;
mod debugdata;
mod dirstate_status;
-mod find_root;
mod list_tracked_files;
-pub use cat::{cat, CatRevError, CatRevErrorKind};
-pub use debugdata::{
- debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind,
-};
-pub use find_root::{
- find_root, find_root_from_path, FindRootError, FindRootErrorKind,
-};
-pub use list_tracked_files::{
- list_rev_tracked_files, FilesForRev, ListRevTrackedFilesError,
- ListRevTrackedFilesErrorKind,
-};
-pub use list_tracked_files::{
- Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind,
-};
+pub use cat::cat;
+pub use debugdata::{debug_data, DebugDataKind};
+pub use list_tracked_files::Dirstate;
+pub use list_tracked_files::{list_rev_tracked_files, FilesForRev};
--- a/rust/hg-core/src/repo.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/repo.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,6 +1,9 @@
-use crate::operations::{find_root, FindRootError};
+use crate::config::{Config, ConfigError, ConfigParseError};
+use crate::errors::{HgError, IoResultExt};
use crate::requirements;
+use crate::utils::files::get_path_from_bytes;
use memmap::{Mmap, MmapOptions};
+use std::collections::HashSet;
use std::path::{Path, PathBuf};
/// A repository on disk
@@ -8,6 +11,28 @@
working_directory: PathBuf,
dot_hg: PathBuf,
store: PathBuf,
+ requirements: HashSet<String>,
+ config: Config,
+}
+
+#[derive(Debug, derive_more::From)]
+pub enum RepoError {
+ NotFound {
+ current_directory: PathBuf,
+ },
+ #[from]
+ ConfigParseError(ConfigParseError),
+ #[from]
+ Other(HgError),
+}
+
+impl From<ConfigError> for RepoError {
+ fn from(error: ConfigError) -> Self {
+ match error {
+ ConfigError::Parse(error) => error.into(),
+ ConfigError::Other(error) => error.into(),
+ }
+ }
}
/// Filesystem access abstraction for the contents of a given "base" diretory
@@ -17,32 +42,132 @@
}
impl Repo {
- /// Returns `None` if the given path doesn’t look like a repository
- /// (doesn’t contain a `.hg` sub-directory).
- pub fn for_path(root: impl Into<PathBuf>) -> Self {
- let working_directory = root.into();
- let dot_hg = working_directory.join(".hg");
- Self {
- store: dot_hg.join("store"),
- dot_hg,
- working_directory,
+ /// Search the current directory and its ancestores for a repository:
+ /// a working directory that contains a `.hg` sub-directory.
+ pub fn find(config: &Config) -> Result<Self, RepoError> {
+ let current_directory = crate::utils::current_dir()?;
+ // ancestors() is inclusive: it first yields `current_directory` as-is.
+ for ancestor in current_directory.ancestors() {
+ if ancestor.join(".hg").is_dir() {
+ return Ok(Self::new_at_path(ancestor.to_owned(), config)?);
+ }
}
+ Err(RepoError::NotFound { current_directory })
}
- pub fn find() -> Result<Self, FindRootError> {
- find_root().map(Self::for_path)
- }
+ /// To be called after checking that `.hg` is a sub-directory
+ fn new_at_path(
+ working_directory: PathBuf,
+ config: &Config,
+ ) -> Result<Self, RepoError> {
+ let dot_hg = working_directory.join(".hg");
+
+ let mut repo_config_files = Vec::new();
+ repo_config_files.push(dot_hg.join("hgrc"));
+ repo_config_files.push(dot_hg.join("hgrc-not-shared"));
+
+ let hg_vfs = Vfs { base: &dot_hg };
+ let mut reqs = requirements::load_if_exists(hg_vfs)?;
+ let relative =
+ reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT);
+ let shared =
+ reqs.contains(requirements::SHARED_REQUIREMENT) || relative;
+
+ // From `mercurial/localrepo.py`:
+ //
+ // if .hg/requires contains the sharesafe requirement, it means
+ // there exists a `.hg/store/requires` too and we should read it
+ // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
+ // is present. We never write SHARESAFE_REQUIREMENT for a repo if store
+ // is not present, refer checkrequirementscompat() for that
+ //
+ // However, if SHARESAFE_REQUIREMENT is not present, it means that the
+ // repository was shared the old way. We check the share source
+ // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the
+ // current repository needs to be reshared
+ let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT);
- pub fn check_requirements(
- &self,
- ) -> Result<(), requirements::RequirementsError> {
- requirements::check(self)
+ let store_path;
+ if !shared {
+ store_path = dot_hg.join("store");
+ if share_safe {
+ reqs.extend(requirements::load(Vfs { base: &store_path })?);
+ }
+ } else {
+ let bytes = hg_vfs.read("sharedpath")?;
+ let mut shared_path = get_path_from_bytes(&bytes).to_owned();
+ if relative {
+ shared_path = dot_hg.join(shared_path)
+ }
+ if !shared_path.is_dir() {
+ return Err(HgError::corrupted(format!(
+ ".hg/sharedpath points to nonexistent directory {}",
+ shared_path.display()
+ ))
+ .into());
+ }
+
+ store_path = shared_path.join("store");
+
+ let source_is_share_safe =
+ requirements::load(Vfs { base: &shared_path })?
+ .contains(requirements::SHARESAFE_REQUIREMENT);
+
+ if share_safe && !source_is_share_safe {
+ return Err(match config
+ .get(b"safe-mismatch", b"source-not-safe")
+ {
+ Some(b"abort") | None => HgError::abort(
+ "share source does not support share-safe requirement",
+ ),
+ _ => HgError::unsupported("share-safe downgrade"),
+ }
+ .into());
+ } else if source_is_share_safe && !share_safe {
+ return Err(
+ match config.get(b"safe-mismatch", b"source-safe") {
+ Some(b"abort") | None => HgError::abort(
+ "version mismatch: source uses share-safe \
+ functionality while the current share does not",
+ ),
+ _ => HgError::unsupported("share-safe upgrade"),
+ }
+ .into(),
+ );
+ }
+
+ if share_safe {
+ repo_config_files.insert(0, shared_path.join("hgrc"))
+ }
+ }
+
+ let repo_config = config.combine_with_repo(&repo_config_files)?;
+
+ let repo = Self {
+ requirements: reqs,
+ working_directory,
+ store: store_path,
+ dot_hg,
+ config: repo_config,
+ };
+
+ requirements::check(&repo)?;
+
+ Ok(repo)
}
pub fn working_directory_path(&self) -> &Path {
&self.working_directory
}
+ pub fn requirements(&self) -> &HashSet<String> {
+ &self.requirements
+ }
+
+ pub fn config(&self) -> &Config {
+ &self.config
+ }
+
/// For accessing repository files (in `.hg`), except for the store
/// (`.hg/store`).
pub(crate) fn hg_vfs(&self) -> Vfs<'_> {
@@ -66,27 +191,26 @@
}
impl Vfs<'_> {
+ pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
+ self.base.join(relative_path)
+ }
+
pub(crate) fn read(
&self,
relative_path: impl AsRef<Path>,
- ) -> std::io::Result<Vec<u8>> {
- std::fs::read(self.base.join(relative_path))
- }
-
- pub(crate) fn open(
- &self,
- relative_path: impl AsRef<Path>,
- ) -> std::io::Result<std::fs::File> {
- std::fs::File::open(self.base.join(relative_path))
+ ) -> Result<Vec<u8>, HgError> {
+ let path = self.join(relative_path);
+ std::fs::read(&path).for_file(&path)
}
pub(crate) fn mmap_open(
&self,
relative_path: impl AsRef<Path>,
- ) -> std::io::Result<Mmap> {
- let file = self.open(relative_path)?;
+ ) -> Result<Mmap, HgError> {
+ let path = self.base.join(relative_path);
+ let file = std::fs::File::open(&path).for_file(&path)?;
// TODO: what are the safety requirements here?
- let mmap = unsafe { MmapOptions::new().map(&file) }?;
+ let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?;
Ok(mmap)
}
}
--- a/rust/hg-core/src/requirements.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/requirements.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,19 +1,8 @@
-use crate::repo::Repo;
-use std::io;
+use crate::errors::{HgError, HgResultExt};
+use crate::repo::{Repo, Vfs};
+use std::collections::HashSet;
-#[derive(Debug)]
-pub enum RequirementsError {
- // TODO: include a path?
- Io(io::Error),
- /// The `requires` file is corrupted
- Corrupted,
- /// The repository requires a feature that we don't support
- Unsupported {
- feature: String,
- },
-}
-
-fn parse(bytes: &[u8]) -> Result<Vec<String>, ()> {
+fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> {
// The Python code reading this file uses `str.splitlines`
// which looks for a number of line separators (even including a couple of
// non-ASCII ones), but Python code writing it always uses `\n`.
@@ -27,16 +16,20 @@
if line[0].is_ascii_alphanumeric() && line.is_ascii() {
Ok(String::from_utf8(line.into()).unwrap())
} else {
- Err(())
+ Err(HgError::corrupted("parse error in 'requires' file"))
}
})
.collect()
}
-pub fn load(repo: &Repo) -> Result<Vec<String>, RequirementsError> {
- match repo.hg_vfs().read("requires") {
- Ok(bytes) => parse(&bytes).map_err(|()| RequirementsError::Corrupted),
+pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> {
+ parse(&hg_vfs.read("requires")?)
+}
+pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> {
+ if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? {
+ parse(&bytes)
+ } else {
// Treat a missing file the same as an empty file.
// From `mercurial/localrepo.py`:
// > requires file contains a newline-delimited list of
@@ -44,18 +37,19 @@
// > the repository. This file was introduced in Mercurial 0.9.2,
// > which means very old repositories may not have one. We assume
// > a missing file translates to no requirements.
- Err(error) if error.kind() == std::io::ErrorKind::NotFound => {
- Ok(Vec::new())
- }
-
- Err(error) => Err(RequirementsError::Io(error))?,
+ Ok(HashSet::new())
}
}
-pub fn check(repo: &Repo) -> Result<(), RequirementsError> {
- for feature in load(repo)? {
- if !SUPPORTED.contains(&&*feature) {
- return Err(RequirementsError::Unsupported { feature });
+pub(crate) fn check(repo: &Repo) -> Result<(), HgError> {
+ for feature in repo.requirements() {
+ if !SUPPORTED.contains(&feature.as_str()) {
+ // TODO: collect and all unknown features and include them in the
+ // error message?
+ return Err(HgError::UnsupportedFeature(format!(
+ "repository requires feature unknown to this Mercurial: {}",
+ feature
+ )));
}
}
Ok(())
@@ -67,10 +61,78 @@
"fncache",
"generaldelta",
"revlogv1",
- "sparserevlog",
+ SHARED_REQUIREMENT,
+ SHARESAFE_REQUIREMENT,
+ SPARSEREVLOG_REQUIREMENT,
+ RELATIVE_SHARED_REQUIREMENT,
"store",
// As of this writing everything rhg does is read-only.
// When it starts writing to the repository, it’ll need to either keep the
// persistent nodemap up to date or remove this entry:
"persistent-nodemap",
];
+
+// Copied from mercurial/requirements.py:
+
+/// When narrowing is finalized and no longer subject to format changes,
+/// we should move this to just "narrow" or similar.
+#[allow(unused)]
+pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
+
+/// Enables sparse working directory usage
+#[allow(unused)]
+pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse";
+
+/// Enables the internal phase which is used to hide changesets instead
+/// of stripping them
+#[allow(unused)]
+pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
+
+/// Stores manifest in Tree structure
+#[allow(unused)]
+pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
+
+/// Increment the sub-version when the revlog v2 format changes to lock out old
+/// clients.
+#[allow(unused)]
+pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
+
+/// A repository with the sparserevlog feature will have delta chains that
+/// can spread over a larger span. Sparse reading cuts these large spans into
+/// pieces, so that each piece isn't too big.
+/// Without the sparserevlog capability, reading from the repository could use
+/// huge amounts of memory, because the whole span would be read at once,
+/// including all the intermediate revisions that aren't pertinent for the
+/// chain. This is why once a repository has enabled sparse-read, it becomes
+/// required.
+#[allow(unused)]
+pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
+
+/// A repository with the sidedataflag requirement will allow to store extra
+/// information for revision without altering their original hashes.
+#[allow(unused)]
+pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag";
+
+/// A repository with the the copies-sidedata-changeset requirement will store
+/// copies related information in changeset's sidedata.
+#[allow(unused)]
+pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
+
+/// The repository use persistent nodemap for the changelog and the manifest.
+#[allow(unused)]
+pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
+
+/// Denotes that the current repository is a share
+#[allow(unused)]
+pub(crate) const SHARED_REQUIREMENT: &str = "shared";
+
+/// Denotes that current repository is a share and the shared source path is
+/// relative to the current repository root path
+#[allow(unused)]
+pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
+
+/// A repository with share implemented safely. The repository has different
+/// store and working copy requirements i.e. both `.hg/requires` and
+/// `.hg/store/requires` are present.
+#[allow(unused)]
+pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe";
--- a/rust/hg-core/src/revlog.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog.rs Thu Feb 11 20:36:46 2021 -0800
@@ -9,7 +9,7 @@
pub mod nodemap;
mod nodemap_docket;
pub mod path_encode;
-pub use node::{Node, NodeError, NodePrefix, NodePrefixRef};
+pub use node::{FromHexError, Node, NodePrefix};
pub mod changelog;
pub mod index;
pub mod manifest;
--- a/rust/hg-core/src/revlog/changelog.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/changelog.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,12 +1,13 @@
+use crate::errors::HgError;
use crate::repo::Repo;
use crate::revlog::revlog::{Revlog, RevlogError};
-use crate::revlog::NodePrefixRef;
+use crate::revlog::NodePrefix;
use crate::revlog::Revision;
/// A specialized `Revlog` to work with `changelog` data format.
pub struct Changelog {
/// The generic `revlog` format.
- revlog: Revlog,
+ pub(crate) revlog: Revlog,
}
impl Changelog {
@@ -19,7 +20,7 @@
/// Return the `ChangelogEntry` a given node id.
pub fn get_node(
&self,
- node: NodePrefixRef,
+ node: NodePrefix,
) -> Result<ChangelogEntry, RevlogError> {
let rev = self.revlog.get_node_rev(node)?;
self.get_rev(rev)
@@ -53,6 +54,8 @@
/// Return the node id of the `manifest` referenced by this `changelog`
/// entry.
pub fn manifest_node(&self) -> Result<&[u8], RevlogError> {
- self.lines().next().ok_or(RevlogError::Corrupted)
+ self.lines()
+ .next()
+ .ok_or_else(|| HgError::corrupted("empty changelog entry").into())
}
}
--- a/rust/hg-core/src/revlog/index.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/index.rs Thu Feb 11 20:36:46 2021 -0800
@@ -3,6 +3,7 @@
use byteorder::{BigEndian, ByteOrder};
+use crate::errors::HgError;
use crate::revlog::node::Node;
use crate::revlog::revlog::RevlogError;
use crate::revlog::{Revision, NULL_REVISION};
@@ -44,7 +45,8 @@
offsets: Some(offsets),
})
} else {
- Err(RevlogError::Corrupted)
+ Err(HgError::corrupted("unexpected inline revlog length")
+ .into())
}
} else {
Ok(Self {
--- a/rust/hg-core/src/revlog/manifest.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/manifest.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,6 +1,6 @@
use crate::repo::Repo;
use crate::revlog::revlog::{Revlog, RevlogError};
-use crate::revlog::NodePrefixRef;
+use crate::revlog::NodePrefix;
use crate::revlog::Revision;
use crate::utils::hg_path::HgPath;
@@ -20,7 +20,7 @@
/// Return the `ManifestEntry` of a given node id.
pub fn get_node(
&self,
- node: NodePrefixRef,
+ node: NodePrefix,
) -> Result<ManifestEntry, RevlogError> {
let rev = self.revlog.get_node_rev(node)?;
self.get_rev(rev)
--- a/rust/hg-core/src/revlog/node.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/node.rs Thu Feb 11 20:36:46 2021 -0800
@@ -8,8 +8,10 @@
//! In Mercurial code base, it is customary to call "a node" the binary SHA
//! of a revision.
-use hex::{self, FromHex, FromHexError};
+use crate::errors::HgError;
+use bytes_cast::BytesCast;
use std::convert::{TryFrom, TryInto};
+use std::fmt;
/// The length in bytes of a `Node`
///
@@ -45,11 +47,10 @@
/// if they need a loop boundary.
///
/// All methods that create a `Node` either take a type that enforces
-/// the size or fail immediately at runtime with [`ExactLengthRequired`].
+/// the size or return an error at runtime.
///
/// [`nybbles_len`]: #method.nybbles_len
-/// [`ExactLengthRequired`]: struct.NodeError#variant.ExactLengthRequired
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Copy, Clone, Debug, PartialEq, BytesCast, derive_more::From)]
#[repr(transparent)]
pub struct Node {
data: NodeData,
@@ -60,32 +61,42 @@
data: [0; NODE_BYTES_LENGTH],
};
-impl From<NodeData> for Node {
- fn from(data: NodeData) -> Node {
- Node { data }
+/// Return an error if the slice has an unexpected length
+impl<'a> TryFrom<&'a [u8]> for &'a Node {
+ type Error = ();
+
+ #[inline]
+ fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> {
+ match Node::from_bytes(bytes) {
+ Ok((node, rest)) if rest.is_empty() => Ok(node),
+ _ => Err(()),
+ }
}
}
/// Return an error if the slice has an unexpected length
-impl<'a> TryFrom<&'a [u8]> for &'a Node {
+impl TryFrom<&'_ [u8]> for Node {
type Error = std::array::TryFromSliceError;
#[inline]
- fn try_from(bytes: &'a [u8]) -> Result<&'a Node, Self::Error> {
+ fn try_from(bytes: &'_ [u8]) -> Result<Self, Self::Error> {
let data = bytes.try_into()?;
- // Safety: `#[repr(transparent)]` makes it ok to "wrap" the target
- // of a reference to the type of the single field.
- Ok(unsafe { std::mem::transmute::<&NodeData, &Node>(data) })
+ Ok(Self { data })
}
}
-#[derive(Debug, PartialEq)]
-pub enum NodeError {
- ExactLengthRequired(usize, String),
- PrefixTooLong(String),
- HexError(FromHexError, String),
+impl fmt::LowerHex for Node {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ for &byte in &self.data {
+ write!(f, "{:02x}", byte)?
+ }
+ Ok(())
+ }
}
+#[derive(Debug)]
+pub struct FromHexError;
+
/// Low level utility function, also for prefixes
fn get_nybble(s: &[u8], i: usize) -> u8 {
if i % 2 == 0 {
@@ -117,18 +128,26 @@
///
/// To be used in FFI and I/O only, in order to facilitate future
/// changes of hash format.
- pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, NodeError> {
- Ok(NodeData::from_hex(hex.as_ref())
- .map_err(|e| NodeError::from((e, hex)))?
- .into())
+ pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, FromHexError> {
+ let prefix = NodePrefix::from_hex(hex)?;
+ if prefix.nybbles_len() == NODE_NYBBLES_LENGTH {
+ Ok(Self { data: prefix.data })
+ } else {
+ Err(FromHexError)
+ }
}
- /// Convert to hexadecimal string representation
+ /// `from_hex`, but for input from an internal file of the repository such
+ /// as a changelog or manifest entry.
///
- /// To be used in FFI and I/O only, in order to facilitate future
- /// changes of hash format.
- pub fn encode_hex(&self) -> String {
- hex::encode(self.data)
+ /// An error is treated as repository corruption.
+ pub fn from_hex_for_repo(hex: impl AsRef<[u8]>) -> Result<Node, HgError> {
+ Self::from_hex(hex.as_ref()).map_err(|FromHexError| {
+ HgError::CorruptedRepository(format!(
+ "Expected a full hexadecimal node ID, found {}",
+ String::from_utf8_lossy(hex.as_ref())
+ ))
+ })
}
/// Provide access to binary data
@@ -140,28 +159,19 @@
}
}
-impl<T: AsRef<[u8]>> From<(FromHexError, T)> for NodeError {
- fn from(err_offender: (FromHexError, T)) -> Self {
- let (err, offender) = err_offender;
- let offender = String::from_utf8_lossy(offender.as_ref()).into_owned();
- match err {
- FromHexError::InvalidStringLength => {
- NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, offender)
- }
- _ => NodeError::HexError(err, offender),
- }
- }
-}
-
/// The beginning of a binary revision SHA.
///
/// Since it can potentially come from an hexadecimal representation with
/// odd length, it needs to carry around whether the last 4 bits are relevant
/// or not.
-#[derive(Debug, PartialEq)]
+#[derive(Debug, PartialEq, Copy, Clone)]
pub struct NodePrefix {
- buf: Vec<u8>,
- is_odd: bool,
+ /// In `1..=NODE_NYBBLES_LENGTH`
+ nybbles_len: u8,
+ /// The first `4 * length_in_nybbles` bits are used (considering bits
+ /// within a bytes in big-endian: most significant first), the rest
+ /// are zero.
+ data: NodeData,
}
impl NodePrefix {
@@ -172,72 +182,42 @@
///
/// To be used in FFI and I/O only, in order to facilitate future
/// changes of hash format.
- pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, NodeError> {
+ pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, FromHexError> {
let hex = hex.as_ref();
let len = hex.len();
- if len > NODE_NYBBLES_LENGTH {
- return Err(NodeError::PrefixTooLong(
- String::from_utf8_lossy(hex).to_owned().to_string(),
- ));
+ if len > NODE_NYBBLES_LENGTH || len == 0 {
+ return Err(FromHexError);
}
- let is_odd = len % 2 == 1;
- let even_part = if is_odd { &hex[..len - 1] } else { hex };
- let mut buf: Vec<u8> =
- Vec::from_hex(&even_part).map_err(|e| (e, hex))?;
-
- if is_odd {
- let latest_char = char::from(hex[len - 1]);
- let latest_nybble = latest_char.to_digit(16).ok_or_else(|| {
- (
- FromHexError::InvalidHexCharacter {
- c: latest_char,
- index: len - 1,
- },
- hex,
- )
- })? as u8;
- buf.push(latest_nybble << 4);
+ let mut data = [0; NODE_BYTES_LENGTH];
+ let mut nybbles_len = 0;
+ for &ascii_byte in hex {
+ let nybble = match char::from(ascii_byte).to_digit(16) {
+ Some(digit) => digit as u8,
+ None => return Err(FromHexError),
+ };
+ // Fill in the upper half of a byte first, then the lower half.
+ let shift = if nybbles_len % 2 == 0 { 4 } else { 0 };
+ data[nybbles_len as usize / 2] |= nybble << shift;
+ nybbles_len += 1;
}
- Ok(NodePrefix { buf, is_odd })
+ Ok(Self { data, nybbles_len })
}
- pub fn borrow(&self) -> NodePrefixRef {
- NodePrefixRef {
- buf: &self.buf,
- is_odd: self.is_odd,
- }
- }
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub struct NodePrefixRef<'a> {
- buf: &'a [u8],
- is_odd: bool,
-}
-
-impl<'a> NodePrefixRef<'a> {
- pub fn len(&self) -> usize {
- if self.is_odd {
- self.buf.len() * 2 - 1
- } else {
- self.buf.len() * 2
- }
- }
-
- pub fn is_empty(&self) -> bool {
- self.len() == 0
+ pub fn nybbles_len(&self) -> usize {
+ self.nybbles_len as _
}
pub fn is_prefix_of(&self, node: &Node) -> bool {
- if self.is_odd {
- let buf = self.buf;
- let last_pos = buf.len() - 1;
- node.data.starts_with(buf.split_at(last_pos).0)
- && node.data[last_pos] >> 4 == buf[last_pos] >> 4
- } else {
- node.data.starts_with(self.buf)
+ let full_bytes = self.nybbles_len() / 2;
+ if self.data[..full_bytes] != node.data[..full_bytes] {
+ return false;
}
+ if self.nybbles_len() % 2 == 0 {
+ return true;
+ }
+ let last = self.nybbles_len() - 1;
+ self.get_nybble(last) == node.get_nybble(last)
}
/// Retrieve the `i`th half-byte from the prefix.
@@ -245,8 +225,12 @@
/// This is also the `i`th hexadecimal digit in numeric form,
/// also called a [nybble](https://en.wikipedia.org/wiki/Nibble).
pub fn get_nybble(&self, i: usize) -> u8 {
- assert!(i < self.len());
- get_nybble(self.buf, i)
+ assert!(i < self.nybbles_len());
+ get_nybble(&self.data, i)
+ }
+
+ fn iter_nybbles(&self) -> impl Iterator<Item = u8> + '_ {
+ (0..self.nybbles_len()).map(move |i| get_nybble(&self.data, i))
}
/// Return the index first nybble that's different from `node`
@@ -257,42 +241,49 @@
///
/// Returned index is as in `get_nybble`, i.e., starting at 0.
pub fn first_different_nybble(&self, node: &Node) -> Option<usize> {
- let buf = self.buf;
- let until = if self.is_odd {
- buf.len() - 1
- } else {
- buf.len()
- };
- for (i, item) in buf.iter().enumerate().take(until) {
- if *item != node.data[i] {
- return if *item & 0xf0 == node.data[i] & 0xf0 {
- Some(2 * i + 1)
- } else {
- Some(2 * i)
- };
- }
+ self.iter_nybbles()
+ .zip(NodePrefix::from(*node).iter_nybbles())
+ .position(|(a, b)| a != b)
+ }
+}
+
+impl fmt::LowerHex for NodePrefix {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let full_bytes = self.nybbles_len() / 2;
+ for &byte in &self.data[..full_bytes] {
+ write!(f, "{:02x}", byte)?
}
- if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 {
- Some(until * 2)
- } else {
- None
+ if self.nybbles_len() % 2 == 1 {
+ let last = self.nybbles_len() - 1;
+ write!(f, "{:x}", self.get_nybble(last))?
+ }
+ Ok(())
+ }
+}
+
+/// A shortcut for full `Node` references
+impl From<&'_ Node> for NodePrefix {
+ fn from(node: &'_ Node) -> Self {
+ NodePrefix {
+ nybbles_len: node.nybbles_len() as _,
+ data: node.data,
}
}
}
/// A shortcut for full `Node` references
-impl<'a> From<&'a Node> for NodePrefixRef<'a> {
- fn from(node: &'a Node) -> Self {
- NodePrefixRef {
- buf: &node.data,
- is_odd: false,
+impl From<Node> for NodePrefix {
+ fn from(node: Node) -> Self {
+ NodePrefix {
+ nybbles_len: node.nybbles_len() as _,
+ data: node.data,
}
}
}
-impl PartialEq<Node> for NodePrefixRef<'_> {
+impl PartialEq<Node> for NodePrefix {
fn eq(&self, other: &Node) -> bool {
- !self.is_odd && self.buf == other.data
+ Self::from(*other) == *self
}
}
@@ -300,18 +291,16 @@
mod tests {
use super::*;
- fn sample_node() -> Node {
- let mut data = [0; NODE_BYTES_LENGTH];
- data.copy_from_slice(&[
+ const SAMPLE_NODE_HEX: &str = "0123456789abcdeffedcba9876543210deadbeef";
+ const SAMPLE_NODE: Node = Node {
+ data: [
0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba,
0x98, 0x76, 0x54, 0x32, 0x10, 0xde, 0xad, 0xbe, 0xef,
- ]);
- data.into()
- }
+ ],
+ };
/// Pad an hexadecimal string to reach `NODE_NYBBLES_LENGTH`
- ///check_hash
- /// The padding is made with zeros
+ /// The padding is made with zeros.
pub fn hex_pad_right(hex: &str) -> String {
let mut res = hex.to_string();
while res.len() < NODE_NYBBLES_LENGTH {
@@ -320,135 +309,88 @@
res
}
- fn sample_node_hex() -> String {
- hex_pad_right("0123456789abcdeffedcba9876543210deadbeef")
- }
-
#[test]
fn test_node_from_hex() {
- assert_eq!(Node::from_hex(&sample_node_hex()), Ok(sample_node()));
-
- let mut short = hex_pad_right("0123");
- short.pop();
- short.pop();
- assert_eq!(
- Node::from_hex(&short),
- Err(NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, short)),
- );
-
- let not_hex = hex_pad_right("012... oops");
- assert_eq!(
- Node::from_hex(¬_hex),
- Err(NodeError::HexError(
- FromHexError::InvalidHexCharacter { c: '.', index: 3 },
- not_hex,
- )),
- );
+ let not_hex = "012... oops";
+ let too_short = "0123";
+ let too_long = format!("{}0", SAMPLE_NODE_HEX);
+ assert_eq!(Node::from_hex(SAMPLE_NODE_HEX).unwrap(), SAMPLE_NODE);
+ assert!(Node::from_hex(not_hex).is_err());
+ assert!(Node::from_hex(too_short).is_err());
+ assert!(Node::from_hex(&too_long).is_err());
}
#[test]
fn test_node_encode_hex() {
- assert_eq!(sample_node().encode_hex(), sample_node_hex());
+ assert_eq!(format!("{:x}", SAMPLE_NODE), SAMPLE_NODE_HEX);
}
#[test]
- fn test_prefix_from_hex() -> Result<(), NodeError> {
- assert_eq!(
- NodePrefix::from_hex("0e1")?,
- NodePrefix {
- buf: vec![14, 16],
- is_odd: true
- }
- );
+ fn test_prefix_from_to_hex() -> Result<(), FromHexError> {
+ assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1")?), "0e1");
+ assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1a")?), "0e1a");
assert_eq!(
- NodePrefix::from_hex("0e1a")?,
- NodePrefix {
- buf: vec![14, 26],
- is_odd: false
- }
+ format!("{:x}", NodePrefix::from_hex(SAMPLE_NODE_HEX)?),
+ SAMPLE_NODE_HEX
);
-
- // checking limit case
- let node_as_vec = sample_node().data.iter().cloned().collect();
- assert_eq!(
- NodePrefix::from_hex(sample_node_hex())?,
- NodePrefix {
- buf: node_as_vec,
- is_odd: false
- }
- );
-
Ok(())
}
#[test]
fn test_prefix_from_hex_errors() {
- assert_eq!(
- NodePrefix::from_hex("testgr"),
- Err(NodeError::HexError(
- FromHexError::InvalidHexCharacter { c: 't', index: 0 },
- "testgr".to_string()
- ))
- );
- let mut long = NULL_NODE.encode_hex();
+ assert!(NodePrefix::from_hex("testgr").is_err());
+ let mut long = format!("{:x}", NULL_NODE);
long.push('c');
- match NodePrefix::from_hex(&long)
- .expect_err("should be refused as too long")
- {
- NodeError::PrefixTooLong(s) => assert_eq!(s, long),
- err => panic!(format!("Should have been TooLong, got {:?}", err)),
- }
+ assert!(NodePrefix::from_hex(&long).is_err())
}
#[test]
- fn test_is_prefix_of() -> Result<(), NodeError> {
+ fn test_is_prefix_of() -> Result<(), FromHexError> {
let mut node_data = [0; NODE_BYTES_LENGTH];
node_data[0] = 0x12;
node_data[1] = 0xca;
let node = Node::from(node_data);
- assert!(NodePrefix::from_hex("12")?.borrow().is_prefix_of(&node));
- assert!(!NodePrefix::from_hex("1a")?.borrow().is_prefix_of(&node));
- assert!(NodePrefix::from_hex("12c")?.borrow().is_prefix_of(&node));
- assert!(!NodePrefix::from_hex("12d")?.borrow().is_prefix_of(&node));
+ assert!(NodePrefix::from_hex("12")?.is_prefix_of(&node));
+ assert!(!NodePrefix::from_hex("1a")?.is_prefix_of(&node));
+ assert!(NodePrefix::from_hex("12c")?.is_prefix_of(&node));
+ assert!(!NodePrefix::from_hex("12d")?.is_prefix_of(&node));
Ok(())
}
#[test]
- fn test_get_nybble() -> Result<(), NodeError> {
+ fn test_get_nybble() -> Result<(), FromHexError> {
let prefix = NodePrefix::from_hex("dead6789cafe")?;
- assert_eq!(prefix.borrow().get_nybble(0), 13);
- assert_eq!(prefix.borrow().get_nybble(7), 9);
+ assert_eq!(prefix.get_nybble(0), 13);
+ assert_eq!(prefix.get_nybble(7), 9);
Ok(())
}
#[test]
fn test_first_different_nybble_even_prefix() {
let prefix = NodePrefix::from_hex("12ca").unwrap();
- let prefref = prefix.borrow();
let mut node = Node::from([0; NODE_BYTES_LENGTH]);
- assert_eq!(prefref.first_different_nybble(&node), Some(0));
+ assert_eq!(prefix.first_different_nybble(&node), Some(0));
node.data[0] = 0x13;
- assert_eq!(prefref.first_different_nybble(&node), Some(1));
+ assert_eq!(prefix.first_different_nybble(&node), Some(1));
node.data[0] = 0x12;
- assert_eq!(prefref.first_different_nybble(&node), Some(2));
+ assert_eq!(prefix.first_different_nybble(&node), Some(2));
node.data[1] = 0xca;
// now it is a prefix
- assert_eq!(prefref.first_different_nybble(&node), None);
+ assert_eq!(prefix.first_different_nybble(&node), None);
}
#[test]
fn test_first_different_nybble_odd_prefix() {
let prefix = NodePrefix::from_hex("12c").unwrap();
- let prefref = prefix.borrow();
let mut node = Node::from([0; NODE_BYTES_LENGTH]);
- assert_eq!(prefref.first_different_nybble(&node), Some(0));
+ assert_eq!(prefix.first_different_nybble(&node), Some(0));
node.data[0] = 0x13;
- assert_eq!(prefref.first_different_nybble(&node), Some(1));
+ assert_eq!(prefix.first_different_nybble(&node), Some(1));
node.data[0] = 0x12;
- assert_eq!(prefref.first_different_nybble(&node), Some(2));
+ assert_eq!(prefix.first_different_nybble(&node), Some(2));
node.data[1] = 0xca;
// now it is a prefix
- assert_eq!(prefref.first_different_nybble(&node), None);
+ assert_eq!(prefix.first_different_nybble(&node), None);
}
}
--- a/rust/hg-core/src/revlog/nodemap.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/nodemap.rs Thu Feb 11 20:36:46 2021 -0800
@@ -13,31 +13,23 @@
//! is used in a more abstract context.
use super::{
- node::NULL_NODE, Node, NodeError, NodePrefix, NodePrefixRef, Revision,
- RevlogIndex, NULL_REVISION,
+ node::NULL_NODE, Node, NodePrefix, Revision, RevlogIndex, NULL_REVISION,
};
+use bytes_cast::{unaligned, BytesCast};
use std::cmp::max;
use std::fmt;
-use std::mem;
+use std::mem::{self, align_of, size_of};
use std::ops::Deref;
use std::ops::Index;
-use std::slice;
#[derive(Debug, PartialEq)]
pub enum NodeMapError {
MultipleResults,
- InvalidNodePrefix(NodeError),
/// A `Revision` stored in the nodemap could not be found in the index
RevisionNotInIndex(Revision),
}
-impl From<NodeError> for NodeMapError {
- fn from(err: NodeError) -> Self {
- NodeMapError::InvalidNodePrefix(err)
- }
-}
-
/// Mapping system from Mercurial nodes to revision numbers.
///
/// ## `RevlogIndex` and `NodeMap`
@@ -82,24 +74,9 @@
fn find_bin<'a>(
&self,
idx: &impl RevlogIndex,
- prefix: NodePrefixRef<'a>,
+ prefix: NodePrefix,
) -> Result<Option<Revision>, NodeMapError>;
- /// Find the unique Revision whose `Node` hexadecimal string representation
- /// starts with a given prefix
- ///
- /// If no Revision matches the given prefix, `Ok(None)` is returned.
- ///
- /// If several Revisions match the given prefix, a [`MultipleResults`]
- /// error is returned.
- fn find_hex(
- &self,
- idx: &impl RevlogIndex,
- prefix: &str,
- ) -> Result<Option<Revision>, NodeMapError> {
- self.find_bin(idx, NodePrefix::from_hex(prefix)?.borrow())
- }
-
/// Give the size of the shortest node prefix that determines
/// the revision uniquely.
///
@@ -114,19 +91,9 @@
fn unique_prefix_len_bin<'a>(
&self,
idx: &impl RevlogIndex,
- node_prefix: NodePrefixRef<'a>,
+ node_prefix: NodePrefix,
) -> Result<Option<usize>, NodeMapError>;
- /// Same as `unique_prefix_len_bin`, with the hexadecimal representation
- /// of the prefix as input.
- fn unique_prefix_len_hex(
- &self,
- idx: &impl RevlogIndex,
- prefix: &str,
- ) -> Result<Option<usize>, NodeMapError> {
- self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?.borrow())
- }
-
/// Same as `unique_prefix_len_bin`, with a full `Node` as input
fn unique_prefix_len_node(
&self,
@@ -149,7 +116,7 @@
/// Low level NodeTree [`Blocks`] elements
///
/// These are exactly as for instance on persistent storage.
-type RawElement = i32;
+type RawElement = unaligned::I32Be;
/// High level representation of values in NodeTree
/// [`Blocks`](struct.Block.html)
@@ -168,23 +135,24 @@
///
/// See [`Block`](struct.Block.html) for explanation about the encoding.
fn from(raw: RawElement) -> Element {
- if raw >= 0 {
- Element::Block(raw as usize)
- } else if raw == -1 {
+ let int = raw.get();
+ if int >= 0 {
+ Element::Block(int as usize)
+ } else if int == -1 {
Element::None
} else {
- Element::Rev(-raw - 2)
+ Element::Rev(-int - 2)
}
}
}
impl From<Element> for RawElement {
fn from(element: Element) -> RawElement {
- match element {
+ RawElement::from(match element {
Element::None => 0,
- Element::Block(i) => i as RawElement,
+ Element::Block(i) => i as i32,
Element::Rev(rev) => -rev - 2,
- }
+ })
}
}
@@ -212,42 +180,24 @@
/// represented at all, because we want an immutable empty nodetree
/// to be valid.
-#[derive(Copy, Clone)]
-pub struct Block([u8; BLOCK_SIZE]);
+const ELEMENTS_PER_BLOCK: usize = 16; // number of different values in a nybble
-/// Not derivable for arrays of length >32 until const generics are stable
-impl PartialEq for Block {
- fn eq(&self, other: &Self) -> bool {
- self.0[..] == other.0[..]
- }
-}
-
-pub const BLOCK_SIZE: usize = 64;
+#[derive(Copy, Clone, BytesCast, PartialEq)]
+#[repr(transparent)]
+pub struct Block([RawElement; ELEMENTS_PER_BLOCK]);
impl Block {
fn new() -> Self {
- // -1 in 2's complement to create an absent node
- let byte: u8 = 255;
- Block([byte; BLOCK_SIZE])
+ let absent_node = RawElement::from(-1);
+ Block([absent_node; ELEMENTS_PER_BLOCK])
}
fn get(&self, nybble: u8) -> Element {
- let index = nybble as usize * mem::size_of::<RawElement>();
- Element::from(RawElement::from_be_bytes([
- self.0[index],
- self.0[index + 1],
- self.0[index + 2],
- self.0[index + 3],
- ]))
+ self.0[nybble as usize].into()
}
fn set(&mut self, nybble: u8, element: Element) {
- let values = RawElement::to_be_bytes(element.into());
- let index = nybble as usize * mem::size_of::<RawElement>();
- self.0[index] = values[0];
- self.0[index + 1] = values[1];
- self.0[index + 2] = values[2];
- self.0[index + 3] = values[3];
+ self.0[nybble as usize] = element.into()
}
}
@@ -295,7 +245,7 @@
/// Return `None` unless the `Node` for `rev` has given prefix in `index`.
fn has_prefix_or_none(
idx: &impl RevlogIndex,
- prefix: NodePrefixRef,
+ prefix: NodePrefix,
rev: Revision,
) -> Result<Option<Revision>, NodeMapError> {
idx.node(rev)
@@ -316,7 +266,7 @@
/// revision is the only one for a *subprefix* of the one being looked up.
fn validate_candidate(
idx: &impl RevlogIndex,
- prefix: NodePrefixRef,
+ prefix: NodePrefix,
candidate: (Option<Revision>, usize),
) -> Result<(Option<Revision>, usize), NodeMapError> {
let (rev, steps) = candidate;
@@ -398,16 +348,17 @@
// Transmute the `Vec<Block>` to a `Vec<u8>`. Blocks are contiguous
// bytes, so this is perfectly safe.
let bytes = unsafe {
- // Assert that `Block` hasn't been changed and has no padding
- let _: [u8; 4 * BLOCK_SIZE] =
- std::mem::transmute([Block::new(); 4]);
+ // Check for compatible allocation layout.
+ // (Optimized away by constant-folding + dead code elimination.)
+ assert_eq!(size_of::<Block>(), 64);
+ assert_eq!(align_of::<Block>(), 1);
// /!\ Any use of `vec` after this is use-after-free.
// TODO: use `into_raw_parts` once stabilized
Vec::from_raw_parts(
vec.as_ptr() as *mut u8,
- vec.len() * BLOCK_SIZE,
- vec.capacity() * BLOCK_SIZE,
+ vec.len() * size_of::<Block>(),
+ vec.capacity() * size_of::<Block>(),
)
};
(readonly, bytes)
@@ -442,7 +393,7 @@
/// `NodeTree`).
fn lookup(
&self,
- prefix: NodePrefixRef,
+ prefix: NodePrefix,
) -> Result<(Option<Revision>, usize), NodeMapError> {
for (i, visit_item) in self.visit(prefix).enumerate() {
if let Some(opt) = visit_item.final_revision() {
@@ -452,10 +403,7 @@
Err(NodeMapError::MultipleResults)
}
- fn visit<'n, 'p>(
- &'n self,
- prefix: NodePrefixRef<'p>,
- ) -> NodeTreeVisitor<'n, 'p> {
+ fn visit<'n>(&'n self, prefix: NodePrefix) -> NodeTreeVisitor<'n> {
NodeTreeVisitor {
nt: self,
prefix,
@@ -613,7 +561,7 @@
amount: usize,
) -> Self {
assert!(buffer.len() >= amount);
- let len_in_blocks = amount / BLOCK_SIZE;
+ let len_in_blocks = amount / size_of::<Block>();
NodeTreeBytes {
buffer,
len_in_blocks,
@@ -625,18 +573,17 @@
type Target = [Block];
fn deref(&self) -> &[Block] {
- unsafe {
- slice::from_raw_parts(
- (&self.buffer).as_ptr() as *const Block,
- self.len_in_blocks,
- )
- }
+ Block::slice_from_bytes(&self.buffer, self.len_in_blocks)
+ // `NodeTreeBytes::new` already asserted that `self.buffer` is
+ // large enough.
+ .unwrap()
+ .0
}
}
-struct NodeTreeVisitor<'n, 'p> {
+struct NodeTreeVisitor<'n> {
nt: &'n NodeTree,
- prefix: NodePrefixRef<'p>,
+ prefix: NodePrefix,
visit: usize,
nybble_idx: usize,
done: bool,
@@ -649,11 +596,11 @@
element: Element,
}
-impl<'n, 'p> Iterator for NodeTreeVisitor<'n, 'p> {
+impl<'n> Iterator for NodeTreeVisitor<'n> {
type Item = NodeTreeVisitItem;
fn next(&mut self) -> Option<Self::Item> {
- if self.done || self.nybble_idx >= self.prefix.len() {
+ if self.done || self.nybble_idx >= self.prefix.nybbles_len() {
return None;
}
@@ -718,18 +665,18 @@
fn find_bin<'a>(
&self,
idx: &impl RevlogIndex,
- prefix: NodePrefixRef<'a>,
+ prefix: NodePrefix,
) -> Result<Option<Revision>, NodeMapError> {
- validate_candidate(idx, prefix.clone(), self.lookup(prefix)?)
+ validate_candidate(idx, prefix, self.lookup(prefix)?)
.map(|(opt, _shortest)| opt)
}
fn unique_prefix_len_bin<'a>(
&self,
idx: &impl RevlogIndex,
- prefix: NodePrefixRef<'a>,
+ prefix: NodePrefix,
) -> Result<Option<usize>, NodeMapError> {
- validate_candidate(idx, prefix.clone(), self.lookup(prefix)?)
+ validate_candidate(idx, prefix, self.lookup(prefix)?)
.map(|(opt, shortest)| opt.map(|_rev| shortest))
}
}
@@ -774,13 +721,13 @@
let mut raw = [255u8; 64];
let mut counter = 0;
- for val in [0, 15, -2, -1, -3].iter() {
- for byte in RawElement::to_be_bytes(*val).iter() {
+ for val in [0_i32, 15, -2, -1, -3].iter() {
+ for byte in val.to_be_bytes().iter() {
raw[counter] = *byte;
counter += 1;
}
}
- let block = Block(raw);
+ let (block, _) = Block::from_bytes(&raw).unwrap();
assert_eq!(block.get(0), Element::Block(0));
assert_eq!(block.get(1), Element::Block(15));
assert_eq!(block.get(3), Element::None);
@@ -822,6 +769,10 @@
])
}
+ fn hex(s: &str) -> NodePrefix {
+ NodePrefix::from_hex(s).unwrap()
+ }
+
#[test]
fn test_nt_debug() {
let nt = sample_nodetree();
@@ -840,11 +791,11 @@
pad_insert(&mut idx, 1, "1234deadcafe");
let nt = NodeTree::from(vec![block! {1: Rev(1)}]);
- assert_eq!(nt.find_hex(&idx, "1")?, Some(1));
- assert_eq!(nt.find_hex(&idx, "12")?, Some(1));
- assert_eq!(nt.find_hex(&idx, "1234de")?, Some(1));
- assert_eq!(nt.find_hex(&idx, "1a")?, None);
- assert_eq!(nt.find_hex(&idx, "ab")?, None);
+ assert_eq!(nt.find_bin(&idx, hex("1"))?, Some(1));
+ assert_eq!(nt.find_bin(&idx, hex("12"))?, Some(1));
+ assert_eq!(nt.find_bin(&idx, hex("1234de"))?, Some(1));
+ assert_eq!(nt.find_bin(&idx, hex("1a"))?, None);
+ assert_eq!(nt.find_bin(&idx, hex("ab"))?, None);
// and with full binary Nodes
assert_eq!(nt.find_node(&idx, idx.get(&1).unwrap())?, Some(1));
@@ -861,12 +812,12 @@
let nt = sample_nodetree();
- assert_eq!(nt.find_hex(&idx, "0"), Err(MultipleResults));
- assert_eq!(nt.find_hex(&idx, "01"), Ok(Some(9)));
- assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults));
- assert_eq!(nt.find_hex(&idx, "00a"), Ok(Some(0)));
- assert_eq!(nt.unique_prefix_len_hex(&idx, "00a"), Ok(Some(3)));
- assert_eq!(nt.find_hex(&idx, "000"), Ok(Some(NULL_REVISION)));
+ assert_eq!(nt.find_bin(&idx, hex("0")), Err(MultipleResults));
+ assert_eq!(nt.find_bin(&idx, hex("01")), Ok(Some(9)));
+ assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults));
+ assert_eq!(nt.find_bin(&idx, hex("00a")), Ok(Some(0)));
+ assert_eq!(nt.unique_prefix_len_bin(&idx, hex("00a")), Ok(Some(3)));
+ assert_eq!(nt.find_bin(&idx, hex("000")), Ok(Some(NULL_REVISION)));
}
#[test]
@@ -884,13 +835,13 @@
root: block![0: Block(1), 1:Block(3), 12: Rev(2)],
masked_inner_blocks: 1,
};
- assert_eq!(nt.find_hex(&idx, "10")?, Some(1));
- assert_eq!(nt.find_hex(&idx, "c")?, Some(2));
- assert_eq!(nt.unique_prefix_len_hex(&idx, "c")?, Some(1));
- assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults));
- assert_eq!(nt.find_hex(&idx, "000")?, Some(NULL_REVISION));
- assert_eq!(nt.unique_prefix_len_hex(&idx, "000")?, Some(3));
- assert_eq!(nt.find_hex(&idx, "01")?, Some(9));
+ assert_eq!(nt.find_bin(&idx, hex("10"))?, Some(1));
+ assert_eq!(nt.find_bin(&idx, hex("c"))?, Some(2));
+ assert_eq!(nt.unique_prefix_len_bin(&idx, hex("c"))?, Some(1));
+ assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults));
+ assert_eq!(nt.find_bin(&idx, hex("000"))?, Some(NULL_REVISION));
+ assert_eq!(nt.unique_prefix_len_bin(&idx, hex("000"))?, Some(3));
+ assert_eq!(nt.find_bin(&idx, hex("01"))?, Some(9));
assert_eq!(nt.masked_readonly_blocks(), 2);
Ok(())
}
@@ -923,14 +874,14 @@
&self,
prefix: &str,
) -> Result<Option<Revision>, NodeMapError> {
- self.nt.find_hex(&self.index, prefix)
+ self.nt.find_bin(&self.index, hex(prefix))
}
fn unique_prefix_len_hex(
&self,
prefix: &str,
) -> Result<Option<usize>, NodeMapError> {
- self.nt.unique_prefix_len_hex(&self.index, prefix)
+ self.nt.unique_prefix_len_bin(&self.index, hex(prefix))
}
/// Drain `added` and restart a new one
@@ -1108,7 +1059,7 @@
let (_, bytes) = idx.nt.into_readonly_and_added_bytes();
// only the root block has been changed
- assert_eq!(bytes.len(), BLOCK_SIZE);
+ assert_eq!(bytes.len(), size_of::<Block>());
// big endian for -2
assert_eq!(&bytes[4..2 * 4], [255, 255, 255, 254]);
// big endian for -6
--- a/rust/hg-core/src/revlog/nodemap_docket.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/nodemap_docket.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,5 +1,6 @@
+use crate::errors::{HgError, HgResultExt};
+use bytes_cast::{unaligned, BytesCast};
use memmap::Mmap;
-use std::convert::TryInto;
use std::path::{Path, PathBuf};
use super::revlog::RevlogError;
@@ -13,6 +14,16 @@
// TODO: keep here more of the data from `parse()` when we need it
}
+#[derive(BytesCast)]
+#[repr(C)]
+struct DocketHeader {
+ uid_size: u8,
+ _tip_rev: unaligned::U64Be,
+ data_length: unaligned::U64Be,
+ _data_unused: unaligned::U64Be,
+ tip_node_size: unaligned::U64Be,
+}
+
impl NodeMapDocket {
/// Return `Ok(None)` when the caller should proceed without a persistent
/// nodemap:
@@ -28,82 +39,60 @@
index_path: &Path,
) -> Result<Option<(Self, Mmap)>, RevlogError> {
let docket_path = index_path.with_extension("n");
- let docket_bytes = match repo.store_vfs().read(&docket_path) {
- Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
- return Ok(None)
- }
- Err(e) => return Err(RevlogError::IoError(e)),
- Ok(bytes) => bytes,
+ let docket_bytes = if let Some(bytes) =
+ repo.store_vfs().read(&docket_path).io_not_found_as_none()?
+ {
+ bytes
+ } else {
+ return Ok(None);
};
- let mut input = if let Some((&ONDISK_VERSION, rest)) =
+ let input = if let Some((&ONDISK_VERSION, rest)) =
docket_bytes.split_first()
{
rest
} else {
return Ok(None);
};
- let input = &mut input;
- let uid_size = read_u8(input)? as usize;
- let _tip_rev = read_be_u64(input)?;
+ /// Treat any error as a parse error
+ fn parse<T, E>(result: Result<T, E>) -> Result<T, RevlogError> {
+ result.map_err(|_| {
+ HgError::corrupted("nodemap docket parse error").into()
+ })
+ }
+
+ let (header, rest) = parse(DocketHeader::from_bytes(input))?;
+ let uid_size = header.uid_size as usize;
// TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit
// systems?
- let data_length = read_be_u64(input)? as usize;
- let _data_unused = read_be_u64(input)?;
- let tip_node_size = read_be_u64(input)? as usize;
- let uid = read_bytes(input, uid_size)?;
- let _tip_node = read_bytes(input, tip_node_size)?;
-
- let uid =
- std::str::from_utf8(uid).map_err(|_| RevlogError::Corrupted)?;
+ let tip_node_size = header.tip_node_size.get() as usize;
+ let data_length = header.data_length.get() as usize;
+ let (uid, rest) = parse(u8::slice_from_bytes(rest, uid_size))?;
+ let (_tip_node, _rest) =
+ parse(u8::slice_from_bytes(rest, tip_node_size))?;
+ let uid = parse(std::str::from_utf8(uid))?;
let docket = NodeMapDocket { data_length };
let data_path = rawdata_path(&docket_path, uid);
- // TODO: use `std::fs::read` here when the `persistent-nodemap.mmap`
+ // TODO: use `vfs.read()` here when the `persistent-nodemap.mmap`
// config is false?
- match repo.store_vfs().mmap_open(&data_path) {
- Ok(mmap) => {
- if mmap.len() >= data_length {
- Ok(Some((docket, mmap)))
- } else {
- Err(RevlogError::Corrupted)
- }
+ if let Some(mmap) = repo
+ .store_vfs()
+ .mmap_open(&data_path)
+ .io_not_found_as_none()?
+ {
+ if mmap.len() >= data_length {
+ Ok(Some((docket, mmap)))
+ } else {
+ Err(HgError::corrupted("persistent nodemap too short").into())
}
- Err(error) => {
- if error.kind() == std::io::ErrorKind::NotFound {
- Ok(None)
- } else {
- Err(RevlogError::IoError(error))
- }
- }
+ } else {
+ Ok(None)
}
}
}
-fn read_bytes<'a>(
- input: &mut &'a [u8],
- count: usize,
-) -> Result<&'a [u8], RevlogError> {
- if let Some(start) = input.get(..count) {
- *input = &input[count..];
- Ok(start)
- } else {
- Err(RevlogError::Corrupted)
- }
-}
-
-fn read_u8<'a>(input: &mut &[u8]) -> Result<u8, RevlogError> {
- Ok(read_bytes(input, 1)?[0])
-}
-
-fn read_be_u64<'a>(input: &mut &[u8]) -> Result<u64, RevlogError> {
- let array = read_bytes(input, std::mem::size_of::<u64>())?
- .try_into()
- .unwrap();
- Ok(u64::from_be_bytes(array))
-}
-
fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf {
let docket_name = docket_path
.file_name()
--- a/rust/hg-core/src/revlog/revlog.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/revlog/revlog.rs Thu Feb 11 20:36:46 2021 -0800
@@ -11,22 +11,37 @@
use zstd;
use super::index::Index;
-use super::node::{NodePrefixRef, NODE_BYTES_LENGTH, NULL_NODE};
+use super::node::{NodePrefix, NODE_BYTES_LENGTH, NULL_NODE};
use super::nodemap;
-use super::nodemap::NodeMap;
+use super::nodemap::{NodeMap, NodeMapError};
use super::nodemap_docket::NodeMapDocket;
use super::patch;
+use crate::errors::HgError;
use crate::repo::Repo;
use crate::revlog::Revision;
+#[derive(derive_more::From)]
pub enum RevlogError {
- IoError(std::io::Error),
- UnsuportedVersion(u16),
InvalidRevision,
/// Found more than one entry whose ID match the requested prefix
AmbiguousPrefix,
- Corrupted,
- UnknowDataFormat(u8),
+ #[from]
+ Other(HgError),
+}
+
+impl From<NodeMapError> for RevlogError {
+ fn from(error: NodeMapError) -> Self {
+ match error {
+ NodeMapError::MultipleResults => RevlogError::AmbiguousPrefix,
+ NodeMapError::RevisionNotInIndex(_) => RevlogError::corrupted(),
+ }
+ }
+}
+
+impl RevlogError {
+ fn corrupted() -> Self {
+ RevlogError::Other(HgError::corrupted("corrupted revlog"))
+ }
}
/// Read only implementation of revlog.
@@ -53,14 +68,12 @@
data_path: Option<&Path>,
) -> Result<Self, RevlogError> {
let index_path = index_path.as_ref();
- let index_mmap = repo
- .store_vfs()
- .mmap_open(&index_path)
- .map_err(RevlogError::IoError)?;
+ let index_mmap = repo.store_vfs().mmap_open(&index_path)?;
let version = get_version(&index_mmap);
if version != 1 {
- return Err(RevlogError::UnsuportedVersion(version));
+ // A proper new version should have had a repo/store requirement.
+ return Err(RevlogError::corrupted());
}
let index = Index::new(Box::new(index_mmap))?;
@@ -74,10 +87,7 @@
None
} else {
let data_path = data_path.unwrap_or(&default_data_path);
- let data_mmap = repo
- .store_vfs()
- .mmap_open(data_path)
- .map_err(RevlogError::IoError)?;
+ let data_mmap = repo.store_vfs().mmap_open(data_path)?;
Some(Box::new(data_mmap))
};
@@ -111,13 +121,11 @@
#[timed]
pub fn get_node_rev(
&self,
- node: NodePrefixRef,
+ node: NodePrefix,
) -> Result<Revision, RevlogError> {
if let Some(nodemap) = &self.nodemap {
return nodemap
- .find_bin(&self.index, node)
- // TODO: propagate details of this error:
- .map_err(|_| RevlogError::Corrupted)?
+ .find_bin(&self.index, node)?
.ok_or(RevlogError::InvalidRevision);
}
@@ -130,7 +138,9 @@
let mut found_by_prefix = None;
for rev in (0..self.len() as Revision).rev() {
let index_entry =
- self.index.get_entry(rev).ok_or(RevlogError::Corrupted)?;
+ self.index.get_entry(rev).ok_or(HgError::corrupted(
+ "revlog references a revision not in the index",
+ ))?;
if node == *index_entry.hash() {
return Ok(rev);
}
@@ -144,6 +154,11 @@
found_by_prefix.ok_or(RevlogError::InvalidRevision)
}
+ /// Returns whether the given revision exists in this revlog.
+ pub fn has_rev(&self, rev: Revision) -> bool {
+ self.index.get_entry(rev).is_some()
+ }
+
/// Return the full data associated to a revision.
///
/// All entries required to build the final data out of deltas will be
@@ -156,8 +171,9 @@
let mut delta_chain = vec![];
while let Some(base_rev) = entry.base_rev {
delta_chain.push(entry);
- entry =
- self.get_entry(base_rev).or(Err(RevlogError::Corrupted))?;
+ entry = self
+ .get_entry(base_rev)
+ .map_err(|_| RevlogError::corrupted())?;
}
// TODO do not look twice in the index
@@ -180,7 +196,7 @@
) {
Ok(data)
} else {
- Err(RevlogError::Corrupted)
+ Err(RevlogError::corrupted())
}
}
@@ -290,7 +306,8 @@
b'x' => Ok(Cow::Owned(self.uncompressed_zlib_data()?)),
// zstd data.
b'\x28' => Ok(Cow::Owned(self.uncompressed_zstd_data()?)),
- format_type => Err(RevlogError::UnknowDataFormat(format_type)),
+ // A proper new format should have had a repo/store requirement.
+ _format_type => Err(RevlogError::corrupted()),
}
}
@@ -300,13 +317,13 @@
let mut buf = Vec::with_capacity(self.compressed_len);
decoder
.read_to_end(&mut buf)
- .or(Err(RevlogError::Corrupted))?;
+ .map_err(|_| RevlogError::corrupted())?;
Ok(buf)
} else {
let mut buf = vec![0; self.uncompressed_len];
decoder
.read_exact(&mut buf)
- .or(Err(RevlogError::Corrupted))?;
+ .map_err(|_| RevlogError::corrupted())?;
Ok(buf)
}
}
@@ -315,14 +332,14 @@
if self.is_delta() {
let mut buf = Vec::with_capacity(self.compressed_len);
zstd::stream::copy_decode(self.bytes, &mut buf)
- .or(Err(RevlogError::Corrupted))?;
+ .map_err(|_| RevlogError::corrupted())?;
Ok(buf)
} else {
let mut buf = vec![0; self.uncompressed_len];
let len = zstd::block::decompress_to_buffer(self.bytes, &mut buf)
- .or(Err(RevlogError::Corrupted))?;
+ .map_err(|_| RevlogError::corrupted())?;
if len != self.uncompressed_len {
- Err(RevlogError::Corrupted)
+ Err(RevlogError::corrupted())
} else {
Ok(buf)
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/revset.rs Thu Feb 11 20:36:46 2021 -0800
@@ -0,0 +1,53 @@
+//! The revset query language
+//!
+//! <https://www.mercurial-scm.org/repo/hg/help/revsets>
+
+use crate::repo::Repo;
+use crate::revlog::changelog::Changelog;
+use crate::revlog::revlog::{Revlog, RevlogError};
+use crate::revlog::NodePrefix;
+use crate::revlog::{Revision, NULL_REVISION};
+
+/// Resolve a query string into a single revision.
+///
+/// Only some of the revset language is implemented yet.
+pub fn resolve_single(
+ input: &str,
+ repo: &Repo,
+) -> Result<Revision, RevlogError> {
+ let changelog = Changelog::open(repo)?;
+
+ match resolve_rev_number_or_hex_prefix(input, &changelog.revlog) {
+ Err(RevlogError::InvalidRevision) => {} // Try other syntax
+ result => return result,
+ }
+
+ if input == "null" {
+ return Ok(NULL_REVISION);
+ }
+
+ // TODO: support for the rest of the language here.
+
+ Err(RevlogError::InvalidRevision)
+}
+
+/// Resolve the small subset of the language suitable for revlogs other than
+/// the changelog, such as in `hg debugdata --manifest` CLI argument.
+///
+/// * A non-negative decimal integer for a revision number, or
+/// * An hexadecimal string, for the unique node ID that starts with this
+/// prefix
+pub fn resolve_rev_number_or_hex_prefix(
+ input: &str,
+ revlog: &Revlog,
+) -> Result<Revision, RevlogError> {
+ if let Ok(integer) = input.parse::<i32>() {
+ if integer >= 0 && revlog.has_rev(integer) {
+ return Ok(integer);
+ }
+ }
+ if let Ok(prefix) = NodePrefix::from_hex(input) {
+ return revlog.get_node_rev(prefix);
+ }
+ Err(RevlogError::InvalidRevision)
+}
--- a/rust/hg-core/src/utils.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/utils.rs Thu Feb 11 20:36:46 2021 -0800
@@ -7,6 +7,7 @@
//! Contains useful functions, traits, structs, etc. for use in core.
+use crate::errors::{HgError, IoErrorContext};
use crate::utils::hg_path::HgPath;
use std::{io::Write, ops::Deref};
@@ -176,3 +177,17 @@
None
}
}
+
+pub fn current_dir() -> Result<std::path::PathBuf, HgError> {
+ std::env::current_dir().map_err(|error| HgError::IoError {
+ error,
+ context: IoErrorContext::CurrentDir,
+ })
+}
+
+pub fn current_exe() -> Result<std::path::PathBuf, HgError> {
+ std::env::current_exe().map_err(|error| HgError::IoError {
+ error,
+ context: IoErrorContext::CurrentExe,
+ })
+}
--- a/rust/hg-core/src/utils/files.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/utils/files.rs Thu Feb 11 20:36:46 2021 -0800
@@ -18,7 +18,6 @@
use same_file::is_same_file;
use std::borrow::{Cow, ToOwned};
use std::fs::Metadata;
-use std::io::Read;
use std::iter::FusedIterator;
use std::ops::Deref;
use std::path::{Path, PathBuf};
@@ -309,17 +308,6 @@
}
}
-/// Reads a file in one big chunk instead of doing multiple reads
-pub fn read_whole_file(filepath: &Path) -> std::io::Result<Vec<u8>> {
- let mut file = std::fs::File::open(filepath)?;
- let size = file.metadata()?.len();
-
- let mut res = vec![0; size as usize];
- file.read_exact(&mut res)?;
-
- Ok(res)
-}
-
#[cfg(test)]
mod tests {
use super::*;
--- a/rust/hg-core/src/utils/hg_path.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-core/src/utils/hg_path.rs Thu Feb 11 20:36:46 2021 -0800
@@ -47,57 +47,68 @@
},
}
-impl ToString for HgPathError {
- fn to_string(&self) -> String {
+impl fmt::Display for HgPathError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
HgPathError::LeadingSlash(bytes) => {
- format!("Invalid HgPath '{:?}': has a leading slash.", bytes)
+ write!(f, "Invalid HgPath '{:?}': has a leading slash.", bytes)
}
HgPathError::ConsecutiveSlashes {
bytes,
second_slash_index: pos,
- } => format!(
+ } => write!(
+ f,
"Invalid HgPath '{:?}': consecutive slashes at pos {}.",
bytes, pos
),
HgPathError::ContainsNullByte {
bytes,
null_byte_index: pos,
- } => format!(
+ } => write!(
+ f,
"Invalid HgPath '{:?}': contains null byte at pos {}.",
bytes, pos
),
- HgPathError::DecodeError(bytes) => {
- format!("Invalid HgPath '{:?}': could not be decoded.", bytes)
- }
+ HgPathError::DecodeError(bytes) => write!(
+ f,
+ "Invalid HgPath '{:?}': could not be decoded.",
+ bytes
+ ),
HgPathError::EndsWithSlash(path) => {
- format!("Audit failed for '{}': ends with a slash.", path)
+ write!(f, "Audit failed for '{}': ends with a slash.", path)
}
- HgPathError::ContainsIllegalComponent(path) => format!(
+ HgPathError::ContainsIllegalComponent(path) => write!(
+ f,
"Audit failed for '{}': contains an illegal component.",
path
),
- HgPathError::InsideDotHg(path) => format!(
+ HgPathError::InsideDotHg(path) => write!(
+ f,
"Audit failed for '{}': is inside the '.hg' folder.",
path
),
HgPathError::IsInsideNestedRepo {
path,
nested_repo: nested,
- } => format!(
+ } => {
+ write!(f,
"Audit failed for '{}': is inside a nested repository '{}'.",
path, nested
- ),
- HgPathError::TraversesSymbolicLink { path, symlink } => format!(
+ )
+ }
+ HgPathError::TraversesSymbolicLink { path, symlink } => write!(
+ f,
"Audit failed for '{}': traverses symbolic link '{}'.",
path, symlink
),
- HgPathError::NotFsCompliant(path) => format!(
+ HgPathError::NotFsCompliant(path) => write!(
+ f,
"Audit failed for '{}': cannot be turned into a \
filesystem path.",
path
),
- HgPathError::NotUnderRoot { path, root } => format!(
+ HgPathError::NotUnderRoot { path, root } => write!(
+ f,
"Audit failed for '{}': not under root {}.",
path.display(),
root.display()
@@ -367,7 +378,9 @@
}
}
-#[derive(Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash)]
+#[derive(
+ Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash, derive_more::From,
+)]
pub struct HgPathBuf {
inner: Vec<u8>,
}
@@ -408,12 +421,6 @@
}
}
-impl From<Vec<u8>> for HgPathBuf {
- fn from(vec: Vec<u8>) -> Self {
- Self { inner: vec }
- }
-}
-
impl<T: ?Sized + AsRef<HgPath>> From<&T> for HgPathBuf {
fn from(s: &T) -> HgPathBuf {
s.as_ref().to_owned()
--- a/rust/hg-cpython/src/dirstate.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-cpython/src/dirstate.rs Thu Feb 11 20:36:46 2021 -0800
@@ -24,10 +24,7 @@
exc, PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult,
PySequence, Python,
};
-use hg::{
- utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState,
- StateMap,
-};
+use hg::{utils::hg_path::HgPathBuf, DirstateEntry, EntryState, StateMap};
use libc::{c_char, c_int};
use std::convert::TryFrom;
@@ -79,11 +76,10 @@
.map(|(filename, stats)| {
let stats = stats.extract::<PySequence>(py)?;
let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?;
- let state = EntryState::try_from(state.data(py)[0]).map_err(
- |e: DirstateParseError| {
+ let state =
+ EntryState::try_from(state.data(py)[0]).map_err(|e| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
- },
- )?;
+ })?;
let mode = stats.get_item(py, 1)?.extract(py)?;
let size = stats.get_item(py, 2)?.extract(py)?;
let mtime = stats.get_item(py, 3)?.extract(py)?;
--- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs Thu Feb 11 20:36:46 2021 -0800
@@ -18,9 +18,9 @@
use crate::dirstate::extract_dirstate;
use hg::{
+ errors::HgError,
utils::hg_path::{HgPath, HgPathBuf},
- DirsMultiset, DirsMultisetIter, DirstateMapError, DirstateParseError,
- EntryState,
+ DirsMultiset, DirsMultisetIter, DirstateMapError, EntryState,
};
py_class!(pub class Dirs |py| {
@@ -38,7 +38,7 @@
skip_state = Some(
skip.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
- .map_err(|e: DirstateParseError| {
+ .map_err(|e: HgError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?,
);
@@ -46,7 +46,7 @@
let inner = if let Ok(map) = map.cast_as::<PyDict>(py) {
let dirstate = extract_dirstate(py, &map)?;
DirsMultiset::from_dirstate(&dirstate, skip_state)
- .map_err(|e| {
+ .map_err(|e: DirstateMapError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?
} else {
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs Thu Feb 11 20:36:46 2021 -0800
@@ -26,10 +26,10 @@
dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
};
use hg::{
+ errors::HgError,
utils::hg_path::{HgPath, HgPathBuf},
DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap,
- DirstateMapError, DirstateParents, DirstateParseError, EntryState,
- StateMapIter, PARENT_SIZE,
+ DirstateMapError, DirstateParents, EntryState, StateMapIter, PARENT_SIZE,
};
// TODO
@@ -84,13 +84,13 @@
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
- .map_err(|e: DirstateParseError| {
+ .map_err(|e: HgError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?,
DirstateEntry {
state: state.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
- .map_err(|e: DirstateParseError| {
+ .map_err(|e: HgError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?,
mode: mode.extract(py)?,
@@ -113,7 +113,7 @@
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
- .map_err(|e: DirstateParseError| {
+ .map_err(|e: HgError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?,
size.extract(py)?,
@@ -137,7 +137,7 @@
HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
oldstate.extract::<PyBytes>(py)?.data(py)[0]
.try_into()
- .map_err(|e: DirstateParseError| {
+ .map_err(|e: HgError| {
PyErr::new::<exc::ValueError, _>(py, e.to_string())
})?,
)
--- a/rust/hg-cpython/src/parsers.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-cpython/src/parsers.rs Thu Feb 11 20:36:46 2021 -0800
@@ -15,8 +15,7 @@
};
use hg::{
pack_dirstate, parse_dirstate, utils::hg_path::HgPathBuf, DirstateEntry,
- DirstatePackError, DirstateParents, DirstateParseError, FastHashMap,
- PARENT_SIZE,
+ DirstateParents, FastHashMap, PARENT_SIZE,
};
use std::convert::TryInto;
@@ -59,21 +58,7 @@
.to_py_object(py),
)
}
- Err(e) => Err(PyErr::new::<exc::ValueError, _>(
- py,
- match e {
- DirstateParseError::TooLittleData => {
- "too little data for parents".to_string()
- }
- DirstateParseError::Overflow => {
- "overflow in dirstate".to_string()
- }
- DirstateParseError::CorruptedEntry(e) => e,
- DirstateParseError::Damaged => {
- "dirstate appears to be damaged".to_string()
- }
- },
- )),
+ Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
}
}
@@ -128,18 +113,9 @@
}
Ok(PyBytes::new(py, &packed))
}
- Err(error) => Err(PyErr::new::<exc::ValueError, _>(
- py,
- match error {
- DirstatePackError::CorruptedParent => {
- "expected a 20-byte hash".to_string()
- }
- DirstatePackError::CorruptedEntry(e) => e,
- DirstatePackError::BadSize(expected, actual) => {
- format!("bad dirstate size: {} != {}", actual, expected)
- }
- },
- )),
+ Err(error) => {
+ Err(PyErr::new::<exc::ValueError, _>(py, error.to_string()))
+ }
}
}
--- a/rust/hg-cpython/src/revlog.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/hg-cpython/src/revlog.rs Thu Feb 11 20:36:46 2021 -0800
@@ -17,8 +17,8 @@
};
use hg::{
nodemap::{Block, NodeMapError, NodeTree},
- revlog::{nodemap::NodeMap, RevlogIndex},
- NodeError, Revision,
+ revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex},
+ Revision,
};
use std::cell::RefCell;
@@ -64,7 +64,7 @@
let nt = opt.as_ref().unwrap();
let idx = &*self.cindex(py).borrow();
let node = node_from_py_bytes(py, &node)?;
- nt.find_bin(idx, (&node).into()).map_err(|e| nodemap_error(py, e))
+ nt.find_bin(idx, node.into()).map_err(|e| nodemap_error(py, e))
}
/// same as `get_rev()` but raises a bare `error.RevlogError` if node
@@ -107,7 +107,9 @@
String::from_utf8_lossy(node.data(py)).to_string()
};
- nt.find_hex(idx, &node_as_string)
+ let prefix = NodePrefix::from_hex(&node_as_string).map_err(|_| PyErr::new::<ValueError, _>(py, "Invalid node or prefix"))?;
+
+ nt.find_bin(idx, prefix)
// TODO make an inner API returning the node directly
.map(|opt| opt.map(
|rev| PyBytes::new(py, idx.node(rev).unwrap().as_bytes())))
@@ -468,17 +470,9 @@
match err {
NodeMapError::MultipleResults => revlog_error(py),
NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r),
- NodeMapError::InvalidNodePrefix(s) => invalid_node_prefix(py, &s),
}
}
-fn invalid_node_prefix(py: Python, ne: &NodeError) -> PyErr {
- PyErr::new::<ValueError, _>(
- py,
- format!("Invalid node or prefix: {:?}", ne),
- )
-}
-
/// Create the module, with __package__ given from parent
pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
let dotted_name = &format!("{}.revlog", package);
--- a/rust/rhg/Cargo.toml Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/Cargo.toml Thu Feb 11 20:36:46 2021 -0800
@@ -10,6 +10,7 @@
[dependencies]
hg-core = { path = "../hg-core"}
clap = "2.33.1"
+derive_more = "0.99"
log = "0.4.11"
micro-timer = "0.3.1"
env_logger = "0.7.1"
--- a/rust/rhg/src/commands.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,10 +5,11 @@
pub mod root;
use crate::error::CommandError;
use crate::ui::Ui;
+use hg::config::Config;
/// The common trait for rhg commands
///
/// Normalize the interface of the commands provided by rhg
pub trait Command {
- fn run(&self, ui: &Ui) -> Result<(), CommandError>;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError>;
}
--- a/rust/rhg/src/commands/cat.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands/cat.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,8 +1,8 @@
use crate::commands::Command;
-use crate::error::{CommandError, CommandErrorKind};
-use crate::ui::utf8_to_local;
+use crate::error::CommandError;
use crate::ui::Ui;
-use hg::operations::{cat, CatRevError, CatRevErrorKind};
+use hg::config::Config;
+use hg::operations::cat;
use hg::repo::Repo;
use hg::utils::hg_path::HgPathBuf;
use micro_timer::timed;
@@ -30,76 +30,29 @@
impl<'a> Command for CatCommand<'a> {
#[timed]
- fn run(&self, ui: &Ui) -> Result<(), CommandError> {
- let repo = Repo::find()?;
- repo.check_requirements()?;
- let cwd = std::env::current_dir()
- .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> {
+ let repo = Repo::find(config)?;
+ let cwd = hg::utils::current_dir()?;
let mut files = vec![];
for file in self.files.iter() {
+ // TODO: actually normalize `..` path segments etc?
let normalized = cwd.join(&file);
let stripped = normalized
.strip_prefix(&repo.working_directory_path())
- .or(Err(CommandErrorKind::Abort(None)))?;
+ // TODO: error message for path arguments outside of the repo
+ .map_err(|_| CommandError::abort(""))?;
let hg_file = HgPathBuf::try_from(stripped.to_path_buf())
- .or(Err(CommandErrorKind::Abort(None)))?;
+ .map_err(|e| CommandError::abort(e.to_string()))?;
files.push(hg_file);
}
match self.rev {
Some(rev) => {
- let data = cat(&repo, rev, &files)
- .map_err(|e| map_rev_error(rev, e))?;
+ let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?;
self.display(ui, &data)
}
- None => Err(CommandErrorKind::Unimplemented.into()),
+ None => Err(CommandError::Unimplemented.into()),
}
}
}
-
-/// Convert `CatRevErrorKind` to `CommandError`
-fn map_rev_error(rev: &str, err: CatRevError) -> CommandError {
- CommandError {
- kind: match err.kind {
- CatRevErrorKind::IoError(err) => CommandErrorKind::Abort(Some(
- utf8_to_local(&format!("abort: {}\n", err)).into(),
- )),
- CatRevErrorKind::InvalidRevision => CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: invalid revision identifier {}\n",
- rev
- ))
- .into(),
- )),
- CatRevErrorKind::AmbiguousPrefix => CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: ambiguous revision identifier {}\n",
- rev
- ))
- .into(),
- )),
- CatRevErrorKind::UnsuportedRevlogVersion(version) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unsupported revlog version {}\n",
- version
- ))
- .into(),
- ))
- }
- CatRevErrorKind::CorruptedRevlog => CommandErrorKind::Abort(Some(
- "abort: corrupted revlog\n".into(),
- )),
- CatRevErrorKind::UnknowRevlogDataFormat(format) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unknow revlog dataformat {:?}\n",
- format
- ))
- .into(),
- ))
- }
- },
- }
-}
--- a/rust/rhg/src/commands/debugdata.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands/debugdata.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,10 +1,8 @@
use crate::commands::Command;
-use crate::error::{CommandError, CommandErrorKind};
-use crate::ui::utf8_to_local;
+use crate::error::CommandError;
use crate::ui::Ui;
-use hg::operations::{
- debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind,
-};
+use hg::config::Config;
+use hg::operations::{debug_data, DebugDataKind};
use hg::repo::Repo;
use micro_timer::timed;
@@ -25,10 +23,10 @@
impl<'a> Command for DebugDataCommand<'a> {
#[timed]
- fn run(&self, ui: &Ui) -> Result<(), CommandError> {
- let repo = Repo::find()?;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> {
+ let repo = Repo::find(config)?;
let data = debug_data(&repo, self.rev, self.kind)
- .map_err(|e| to_command_error(self.rev, e))?;
+ .map_err(|e| (e, self.rev))?;
let mut stdout = ui.stdout_buffer();
stdout.write_all(&data)?;
@@ -37,55 +35,3 @@
Ok(())
}
}
-
-/// Convert operation errors to command errors
-fn to_command_error(rev: &str, err: DebugDataError) -> CommandError {
- match err.kind {
- DebugDataErrorKind::IoError(err) => CommandError {
- kind: CommandErrorKind::Abort(Some(
- utf8_to_local(&format!("abort: {}\n", err)).into(),
- )),
- },
- DebugDataErrorKind::InvalidRevision => CommandError {
- kind: CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: invalid revision identifier{}\n",
- rev
- ))
- .into(),
- )),
- },
- DebugDataErrorKind::AmbiguousPrefix => CommandError {
- kind: CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: ambiguous revision identifier{}\n",
- rev
- ))
- .into(),
- )),
- },
- DebugDataErrorKind::UnsuportedRevlogVersion(version) => CommandError {
- kind: CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unsupported revlog version {}\n",
- version
- ))
- .into(),
- )),
- },
- DebugDataErrorKind::CorruptedRevlog => CommandError {
- kind: CommandErrorKind::Abort(Some(
- "abort: corrupted revlog\n".into(),
- )),
- },
- DebugDataErrorKind::UnknowRevlogDataFormat(format) => CommandError {
- kind: CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unknow revlog dataformat {:?}\n",
- format
- ))
- .into(),
- )),
- },
- }
-}
--- a/rust/rhg/src/commands/debugrequirements.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands/debugrequirements.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,8 +1,8 @@
use crate::commands::Command;
use crate::error::CommandError;
use crate::ui::Ui;
+use hg::config::Config;
use hg::repo::Repo;
-use hg::requirements;
pub const HELP_TEXT: &str = "
Print the current repo requirements.
@@ -17,11 +17,13 @@
}
impl Command for DebugRequirementsCommand {
- fn run(&self, ui: &Ui) -> Result<(), CommandError> {
- let repo = Repo::find()?;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> {
+ let repo = Repo::find(config)?;
let mut output = String::new();
- for req in requirements::load(&repo)? {
- output.push_str(&req);
+ let mut requirements: Vec<_> = repo.requirements().iter().collect();
+ requirements.sort();
+ for req in requirements {
+ output.push_str(req);
output.push('\n');
}
ui.write_stdout(output.as_bytes())?;
--- a/rust/rhg/src/commands/files.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands/files.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,14 +1,9 @@
use crate::commands::Command;
-use crate::error::{CommandError, CommandErrorKind};
-use crate::ui::utf8_to_local;
+use crate::error::CommandError;
use crate::ui::Ui;
-use hg::operations::{
- list_rev_tracked_files, ListRevTrackedFilesError,
- ListRevTrackedFilesErrorKind,
-};
-use hg::operations::{
- Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind,
-};
+use hg::config::Config;
+use hg::operations::list_rev_tracked_files;
+use hg::operations::Dirstate;
use hg::repo::Repo;
use hg::utils::files::{get_bytes_from_path, relativize_path};
use hg::utils::hg_path::{HgPath, HgPathBuf};
@@ -34,8 +29,7 @@
repo: &Repo,
files: impl IntoIterator<Item = &'a HgPath>,
) -> Result<(), CommandError> {
- let cwd = std::env::current_dir()
- .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?;
+ let cwd = hg::utils::current_dir()?;
let rooted_cwd = cwd
.strip_prefix(repo.working_directory_path())
.expect("cwd was already checked within the repository");
@@ -53,90 +47,16 @@
}
impl<'a> Command for FilesCommand<'a> {
- fn run(&self, ui: &Ui) -> Result<(), CommandError> {
- let repo = Repo::find()?;
- repo.check_requirements()?;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> {
+ let repo = Repo::find(config)?;
if let Some(rev) = self.rev {
- let files = list_rev_tracked_files(&repo, rev)
- .map_err(|e| map_rev_error(rev, e))?;
+ let files =
+ list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?;
self.display_files(ui, &repo, files.iter())
} else {
- let distate = Dirstate::new(&repo).map_err(map_dirstate_error)?;
- let files = distate.tracked_files().map_err(map_dirstate_error)?;
+ let distate = Dirstate::new(&repo)?;
+ let files = distate.tracked_files()?;
self.display_files(ui, &repo, files)
}
}
}
-
-/// Convert `ListRevTrackedFilesErrorKind` to `CommandError`
-fn map_rev_error(rev: &str, err: ListRevTrackedFilesError) -> CommandError {
- CommandError {
- kind: match err.kind {
- ListRevTrackedFilesErrorKind::IoError(err) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!("abort: {}\n", err)).into(),
- ))
- }
- ListRevTrackedFilesErrorKind::InvalidRevision => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: invalid revision identifier {}\n",
- rev
- ))
- .into(),
- ))
- }
- ListRevTrackedFilesErrorKind::AmbiguousPrefix => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: ambiguous revision identifier {}\n",
- rev
- ))
- .into(),
- ))
- }
- ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unsupported revlog version {}\n",
- version
- ))
- .into(),
- ))
- }
- ListRevTrackedFilesErrorKind::CorruptedRevlog => {
- CommandErrorKind::Abort(Some(
- "abort: corrupted revlog\n".into(),
- ))
- }
- ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!(
- "abort: unknow revlog dataformat {:?}\n",
- format
- ))
- .into(),
- ))
- }
- },
- }
-}
-
-/// Convert `ListDirstateTrackedFilesError` to `CommandError`
-fn map_dirstate_error(err: ListDirstateTrackedFilesError) -> CommandError {
- CommandError {
- kind: match err.kind {
- ListDirstateTrackedFilesErrorKind::IoError(err) => {
- CommandErrorKind::Abort(Some(
- utf8_to_local(&format!("abort: {}\n", err)).into(),
- ))
- }
- ListDirstateTrackedFilesErrorKind::ParseError(_) => {
- CommandErrorKind::Abort(Some(
- // TODO find a better error message
- b"abort: parse error\n".to_vec(),
- ))
- }
- },
- }
-}
--- a/rust/rhg/src/commands/root.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/commands/root.rs Thu Feb 11 20:36:46 2021 -0800
@@ -2,6 +2,7 @@
use crate::error::CommandError;
use crate::ui::Ui;
use format_bytes::format_bytes;
+use hg::config::Config;
use hg::repo::Repo;
use hg::utils::files::get_bytes_from_path;
@@ -20,8 +21,8 @@
}
impl Command for RootCommand {
- fn run(&self, ui: &Ui) -> Result<(), CommandError> {
- let repo = Repo::find()?;
+ fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> {
+ let repo = Repo::find(config)?;
let bytes = get_bytes_from_path(repo.working_directory_path());
ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?;
Ok(())
--- a/rust/rhg/src/error.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/error.rs Thu Feb 11 20:36:46 2021 -0800
@@ -1,124 +1,115 @@
-use crate::exitcode;
+use crate::ui::utf8_to_local;
use crate::ui::UiError;
use format_bytes::format_bytes;
-use hg::operations::{FindRootError, FindRootErrorKind};
-use hg::requirements::RequirementsError;
+use hg::config::{ConfigError, ConfigParseError};
+use hg::errors::HgError;
+use hg::repo::RepoError;
+use hg::revlog::revlog::RevlogError;
use hg::utils::files::get_bytes_from_path;
use std::convert::From;
-use std::path::PathBuf;
/// The kind of command error
#[derive(Debug)]
-pub enum CommandErrorKind {
- /// The root of the repository cannot be found
- RootNotFound(PathBuf),
- /// The current directory cannot be found
- CurrentDirNotFound(std::io::Error),
- /// `.hg/requires`
- RequirementsError(RequirementsError),
- /// The standard output stream cannot be written to
- StdoutError,
- /// The standard error stream cannot be written to
- StderrError,
- /// The command aborted
- Abort(Option<Vec<u8>>),
+pub enum CommandError {
+ /// Exit with an error message and "standard" failure exit code.
+ Abort { message: Vec<u8> },
+
/// A mercurial capability as not been implemented.
+ ///
+ /// There is no error message printed in this case.
+ /// Instead, we exit with a specic status code and a wrapper script may
+ /// fallback to Python-based Mercurial.
Unimplemented,
}
-impl CommandErrorKind {
- pub fn get_exit_code(&self) -> exitcode::ExitCode {
- match self {
- CommandErrorKind::RootNotFound(_) => exitcode::ABORT,
- CommandErrorKind::CurrentDirNotFound(_) => exitcode::ABORT,
- CommandErrorKind::RequirementsError(
- RequirementsError::Unsupported { .. },
- ) => exitcode::UNIMPLEMENTED_COMMAND,
- CommandErrorKind::RequirementsError(_) => exitcode::ABORT,
- CommandErrorKind::StdoutError => exitcode::ABORT,
- CommandErrorKind::StderrError => exitcode::ABORT,
- CommandErrorKind::Abort(_) => exitcode::ABORT,
- CommandErrorKind::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND,
+impl CommandError {
+ pub fn abort(message: impl AsRef<str>) -> Self {
+ CommandError::Abort {
+ // TODO: bytes-based (instead of Unicode-based) formatting
+ // of error messages to handle non-UTF-8 filenames etc:
+ // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
+ message: utf8_to_local(message.as_ref()).into(),
}
}
+}
- /// Return the message corresponding to the error kind if any
- pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> {
- match self {
- CommandErrorKind::RootNotFound(path) => {
- let bytes = get_bytes_from_path(path);
- Some(format_bytes!(
- b"abort: no repository found in '{}' (.hg not found)!\n",
- bytes.as_slice()
- ))
- }
- CommandErrorKind::CurrentDirNotFound(e) => Some(format_bytes!(
- b"abort: error getting current working directory: {}\n",
- e.to_string().as_bytes(),
- )),
- CommandErrorKind::RequirementsError(
- RequirementsError::Corrupted,
- ) => Some(
- "abort: .hg/requires is corrupted\n".as_bytes().to_owned(),
- ),
- CommandErrorKind::Abort(message) => message.to_owned(),
- _ => None,
+impl From<HgError> for CommandError {
+ fn from(error: HgError) -> Self {
+ match error {
+ HgError::UnsupportedFeature(_) => CommandError::Unimplemented,
+ _ => CommandError::abort(error.to_string()),
}
}
}
-/// The error type for the Command trait
-#[derive(Debug)]
-pub struct CommandError {
- pub kind: CommandErrorKind,
-}
-
-impl CommandError {
- /// Exist the process with the corresponding exit code.
- pub fn exit(&self) {
- std::process::exit(self.kind.get_exit_code())
- }
-
- /// Return the message corresponding to the command error if any
- pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> {
- self.kind.get_error_message_bytes()
+impl From<UiError> for CommandError {
+ fn from(_error: UiError) -> Self {
+ // If we already failed writing to stdout or stderr,
+ // writing an error message to stderr about it would be likely to fail
+ // too.
+ CommandError::abort("")
}
}
-impl From<CommandErrorKind> for CommandError {
- fn from(kind: CommandErrorKind) -> Self {
- CommandError { kind }
+impl From<RepoError> for CommandError {
+ fn from(error: RepoError) -> Self {
+ match error {
+ RepoError::NotFound { current_directory } => CommandError::Abort {
+ message: format_bytes!(
+ b"no repository found in '{}' (.hg not found)!",
+ get_bytes_from_path(current_directory)
+ ),
+ },
+ RepoError::ConfigParseError(error) => error.into(),
+ RepoError::Other(error) => error.into(),
+ }
}
}
-impl From<UiError> for CommandError {
- fn from(error: UiError) -> Self {
- CommandError {
- kind: match error {
- UiError::StdoutError(_) => CommandErrorKind::StdoutError,
- UiError::StderrError(_) => CommandErrorKind::StderrError,
- },
+impl From<ConfigError> for CommandError {
+ fn from(error: ConfigError) -> Self {
+ match error {
+ ConfigError::Parse(error) => error.into(),
+ ConfigError::Other(error) => error.into(),
}
}
}
-impl From<FindRootError> for CommandError {
- fn from(err: FindRootError) -> Self {
- match err.kind {
- FindRootErrorKind::RootNotFound(path) => CommandError {
- kind: CommandErrorKind::RootNotFound(path),
- },
- FindRootErrorKind::GetCurrentDirError(e) => CommandError {
- kind: CommandErrorKind::CurrentDirNotFound(e),
- },
+impl From<ConfigParseError> for CommandError {
+ fn from(error: ConfigParseError) -> Self {
+ let ConfigParseError {
+ origin,
+ line,
+ bytes,
+ } = error;
+ let line_message = if let Some(line_number) = line {
+ format_bytes!(b" at line {}", line_number.to_string().into_bytes())
+ } else {
+ Vec::new()
+ };
+ CommandError::Abort {
+ message: format_bytes!(
+ b"config parse error in {}{}: '{}'",
+ origin.to_bytes(),
+ line_message,
+ bytes
+ ),
}
}
}
-impl From<RequirementsError> for CommandError {
- fn from(err: RequirementsError) -> Self {
- CommandError {
- kind: CommandErrorKind::RequirementsError(err),
+impl From<(RevlogError, &str)> for CommandError {
+ fn from((err, rev): (RevlogError, &str)) -> CommandError {
+ match err {
+ RevlogError::InvalidRevision => CommandError::abort(format!(
+ "invalid revision identifier {}",
+ rev
+ )),
+ RevlogError::AmbiguousPrefix => CommandError::abort(format!(
+ "ambiguous revision identifier {}",
+ rev
+ )),
+ RevlogError::Other(error) => error.into(),
}
}
}
--- a/rust/rhg/src/exitcode.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/exitcode.rs Thu Feb 11 20:36:46 2021 -0800
@@ -6,5 +6,5 @@
/// Generic abort
pub const ABORT: ExitCode = 255;
-/// Command not implemented by rhg
-pub const UNIMPLEMENTED_COMMAND: ExitCode = 252;
+/// Command or feature not implemented by rhg
+pub const UNIMPLEMENTED: ExitCode = 252;
--- a/rust/rhg/src/main.rs Wed Feb 10 23:03:54 2021 +0100
+++ b/rust/rhg/src/main.rs Thu Feb 11 20:36:46 2021 -0800
@@ -5,6 +5,7 @@
use clap::ArgGroup;
use clap::ArgMatches;
use clap::SubCommand;
+use format_bytes::format_bytes;
use hg::operations::DebugDataKind;
use std::convert::TryFrom;
@@ -91,46 +92,54 @@
let matches = app.clone().get_matches_safe().unwrap_or_else(|err| {
let _ = ui::Ui::new().writeln_stderr_str(&err.message);
- std::process::exit(exitcode::UNIMPLEMENTED_COMMAND)
+ std::process::exit(exitcode::UNIMPLEMENTED)
});
let ui = ui::Ui::new();
let command_result = match_subcommand(matches, &ui);
- match command_result {
- Ok(_) => std::process::exit(exitcode::OK),
- Err(e) => {
- let message = e.get_error_message_bytes();
- if let Some(msg) = message {
- match ui.write_stderr(&msg) {
- Ok(_) => (),
- Err(_) => std::process::exit(exitcode::ABORT),
- };
- };
- e.exit()
+ let exit_code = match command_result {
+ Ok(_) => exitcode::OK,
+
+ // Exit with a specific code and no error message to let a potential
+ // wrapper script fallback to Python-based Mercurial.
+ Err(CommandError::Unimplemented) => exitcode::UNIMPLEMENTED,
+
+ Err(CommandError::Abort { message }) => {
+ if !message.is_empty() {
+ // Ignore errors when writing to stderr, we’re already exiting
+ // with failure code so there’s not much more we can do.
+ let _ =
+ ui.write_stderr(&format_bytes!(b"abort: {}\n", message));
+ }
+ exitcode::ABORT
}
- }
+ };
+ std::process::exit(exit_code)
}
fn match_subcommand(
matches: ArgMatches,
ui: &ui::Ui,
) -> Result<(), CommandError> {
+ let config = hg::config::Config::load()?;
+
match matches.subcommand() {
- ("root", _) => commands::root::RootCommand::new().run(&ui),
+ ("root", _) => commands::root::RootCommand::new().run(&ui, &config),
("files", Some(matches)) => {
- commands::files::FilesCommand::try_from(matches)?.run(&ui)
+ commands::files::FilesCommand::try_from(matches)?.run(&ui, &config)
}
("cat", Some(matches)) => {
- commands::cat::CatCommand::try_from(matches)?.run(&ui)
+ commands::cat::CatCommand::try_from(matches)?.run(&ui, &config)
}
("debugdata", Some(matches)) => {
- commands::debugdata::DebugDataCommand::try_from(matches)?.run(&ui)
+ commands::debugdata::DebugDataCommand::try_from(matches)?
+ .run(&ui, &config)
}
("debugrequirements", _) => {
commands::debugrequirements::DebugRequirementsCommand::new()
- .run(&ui)
+ .run(&ui, &config)
}
_ => unreachable!(), // Because of AppSettings::SubcommandRequired,
}
--- a/setup.py Wed Feb 10 23:03:54 2021 +0100
+++ b/setup.py Thu Feb 11 20:36:46 2021 -0800
@@ -609,6 +609,12 @@
# and its build is not explictely disabled (for external build
# as Linux distributions would do)
if self.distribution.rust and self.rust:
+ if not sys.platform.startswith('linux'):
+ self.warn(
+ "rust extensions have only been tested on Linux "
+ "and may not behave correctly on other platforms"
+ )
+
for rustext in ruststandalones:
rustext.build('' if self.inplace else self.build_lib)
--- a/tests/hghave.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/hghave.py Thu Feb 11 20:36:46 2021 -0800
@@ -591,7 +591,7 @@
return matchoutput("pylint --help", br"Usage:[ ]+pylint", True)
-@check("clang-format", "clang-format C code formatter")
+@check("clang-format", "clang-format C code formatter (>= 11)")
def has_clang_format():
m = matchoutput('clang-format --version', br'clang-format version (\d+)')
# style changed somewhere between 10.x and 11.x
@@ -1034,7 +1034,7 @@
return matchoutput('sqlite3 -version', br'^3\.\d+')
-@check('vcr', 'vcr http mocking library')
+@check('vcr', 'vcr http mocking library (pytest-vcr)')
def has_vcr():
try:
import vcr
@@ -1054,7 +1054,7 @@
return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
-@check('black', 'the black formatter for python')
+@check('black', 'the black formatter for python (>= 20.8b1)')
def has_black():
blackcmd = 'black --version'
version_regex = b'black, version ([0-9a-b.]+)'
--- a/tests/run-tests.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/run-tests.py Thu Feb 11 20:36:46 2021 -0800
@@ -2278,7 +2278,7 @@
if test.path.endswith(b'.t'):
rename(test.errpath, test.path)
else:
- rename(test.errpath, '%s.out' % test.path)
+ rename(test.errpath, b'%s.out' % test.path)
accepted = True
if not accepted:
self.faildata[test.name] = b''.join(lines)
--- a/tests/svnxml.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/svnxml.py Thu Feb 11 20:36:46 2021 -0800
@@ -15,6 +15,7 @@
e['revision'] = entry.getAttribute('revision')
e['author'] = xmltext(entry.getElementsByTagName('author')[0])
e['msg'] = xmltext(entry.getElementsByTagName('msg')[0])
+ e['date'] = xmltext(entry.getElementsByTagName('date')[0])
e['paths'] = []
paths = entry.getElementsByTagName('paths')
if paths:
@@ -42,7 +43,7 @@
except AttributeError:
fp = sys.stdout
for e in entries:
- for k in ('revision', 'author', 'msg'):
+ for k in ('revision', 'author', 'date', 'msg'):
fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8'))
for path, action, fpath, frev in sorted(e['paths']):
frominfo = b''
--- a/tests/test-acl.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-acl.t Thu Feb 11 20:36:46 2021 -0800
@@ -204,6 +204,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
@@ -283,6 +284,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
@@ -806,6 +808,7 @@
acl: acl.deny.bookmarks not enabled
acl: bookmark access granted: "ef1ea85a6374b77d6da9dcda9541f498f2d17df7" on bookmark "moving-bookmark"
bundle2-input-bundle: 7 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
invalid branch cache (served.hidden): tip differs
added 1 changesets with 1 changes to 1 files
@@ -982,6 +985,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
@@ -1318,6 +1322,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
@@ -1408,6 +1413,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
@@ -1577,6 +1583,7 @@
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 5 parts total
+ truncating cache/rbc-revs-v1 to 8
updating the branch cache
added 3 changesets with 3 changes to 3 files
bundle2-output-bundle: "HG20", 1 parts total
--- a/tests/test-audit-subrepo.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-audit-subrepo.t Thu Feb 11 20:36:46 2021 -0800
@@ -323,7 +323,7 @@
new changesets 7a2f0e59146f
.hgsubstate: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ cat main5/.hg/hgrc | grep pwned
[1]
@@ -623,7 +623,7 @@
new changesets * (glob)
.hgsubstate: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ ls "$FAKEHOME"
a
$ test -d "$FAKEHOME/.hg"
@@ -652,7 +652,7 @@
new changesets * (glob)
.hgsubstate: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ ls -A "$FAKEHOME"
.hg
a
--- a/tests/test-batching.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-batching.py Thu Feb 11 20:36:46 2021 -0800
@@ -204,7 +204,7 @@
@wireprotov1peer.batchable
def foo(self, one, two=None):
- encargs = [
+ encoded_args = [
(
b'one',
mangle(one),
@@ -214,9 +214,9 @@
mangle(two),
),
]
- encresref = wireprotov1peer.future()
- yield encargs, encresref
- yield unmangle(encresref.value)
+ encoded_res_future = wireprotov1peer.future()
+ yield encoded_args, encoded_res_future
+ yield unmangle(encoded_res_future.value)
@wireprotov1peer.batchable
def bar(self, b, a):
--- a/tests/test-bookmarks-pushpull.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-bookmarks-pushpull.t Thu Feb 11 20:36:46 2021 -0800
@@ -1177,7 +1177,7 @@
searching for changes
no changes found
abort: prepushkey hook exited with status 1
- [255]
+ [40]
#endif
--- a/tests/test-bookmarks.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-bookmarks.t Thu Feb 11 20:36:46 2021 -0800
@@ -1125,7 +1125,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ cp .hg/bookmarks.pending.saved .hg/bookmarks.pending
(check visible bookmarks while transaction running in repo)
@@ -1158,7 +1158,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
Check pretxnclose-bookmark can abort a transaction
--------------------------------------------------
@@ -1242,7 +1242,7 @@
transaction abort!
rollback completed
abort: pretxnclose-bookmark.force-public hook exited with status 1
- [255]
+ [40]
create on a public changeset
@@ -1254,4 +1254,4 @@
transaction abort!
rollback completed
abort: pretxnclose-bookmark.force-forward hook exited with status 1
- [255]
+ [40]
--- a/tests/test-bundle2-exchange.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-bundle2-exchange.t Thu Feb 11 20:36:46 2021 -0800
@@ -638,7 +638,7 @@
remote: Cleaning up the mess...
remote: rollback completed
abort: pretxnclose.failpush hook exited with status 1
- [255]
+ [40]
$ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
pushing to ssh://user@dummy/other
@@ -699,7 +699,7 @@
remote: Cleaning up the mess...
remote: rollback completed
abort: pretxnchangegroup hook exited with status 1
- [255]
+ [40]
$ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
pushing to ssh://user@dummy/other
searching for changes
@@ -747,7 +747,7 @@
Cleaning up the mess...
rollback completed
abort: pretxnchangegroup hook exited with status 1
- [255]
+ [40]
$ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
pushing to ssh://user@dummy/other
searching for changes
--- a/tests/test-check-code.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-check-code.t Thu Feb 11 20:36:46 2021 -0800
@@ -11,6 +11,7 @@
> -X contrib/python-zstandard \
> -X hgext/fsmonitor/pywatchman \
> -X mercurial/thirdparty \
+ > -X mercurial/pythoncapi_compat.h \
> | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false
Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob)
Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob)
--- a/tests/test-churn.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-churn.t Thu Feb 11 20:36:46 2021 -0800
@@ -195,3 +195,22 @@
alltogether 11 *********************************************************
$ cd ..
+
+count lines that look like headings but are not
+
+ $ hg init not-headers
+ $ cd not-headers
+ $ cat > a <<EOF
+ > diff
+ > @@ -195,3 +195,21 @@
+ > -- a/tests/test-churn.t
+ > ++ b/tests/test-churn.t
+ > EOF
+ $ hg ci -Am adda -u user1
+ adding a
+ $ hg churn --diffstat
+ user1 +4/-0 ++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ $ hg rm a
+ $ hg ci -Am removea -u user1
+ $ hg churn --diffstat
+ user1 +4/-4 +++++++++++++++++++++++++++---------------------------
--- a/tests/test-commandserver.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-commandserver.t Thu Feb 11 20:36:46 2021 -0800
@@ -522,7 +522,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
*** runcommand verify
checking changesets
checking manifests
@@ -1013,7 +1013,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
*** runcommand log
*** runcommand verify -q
@@ -1057,7 +1057,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
*** runcommand log
0 bar (bar)
*** runcommand verify -q
--- a/tests/test-commit-amend.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-commit-amend.t Thu Feb 11 20:36:46 2021 -0800
@@ -209,7 +209,7 @@
transaction abort!
rollback completed
abort: pretxncommit.test-saving-last-message hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
message given from command line (no-eol)
@@ -234,7 +234,7 @@
transaction abort!
rollback completed
abort: pretxncommit.test-saving-last-message hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
another precious commit message
--- a/tests/test-completion.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-completion.t Thu Feb 11 20:36:46 2021 -0800
@@ -38,6 +38,7 @@
paths
phase
pull
+ purge
push
recover
remove
@@ -129,6 +130,7 @@
debugrevspec
debugserve
debugsetparents
+ debugshell
debugsidedata
debugssl
debugstrip
@@ -270,7 +272,7 @@
debugbuilddag: mergeable-file, overwritten-file, new-file
debugbundle: all, part-type, spec
debugcapabilities:
- debugchangedfiles:
+ debugchangedfiles: compute
debugcheckstate:
debugcolor: style
debugcommands:
@@ -281,7 +283,7 @@
debugdate: extended
debugdeltachain: changelog, manifest, dir, template
debugdirstate: nodates, dates, datesort
- debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure
+ debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure
debugdownload: output
debugextensions: template
debugfileset: rev, all-files, show-matcher, show-stage
@@ -318,6 +320,7 @@
debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
debugserve: sshstdio, logiofd, logiofile
debugsetparents:
+ debugshell:
debugsidedata: changelog, manifest, dir
debugssl:
debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
@@ -333,7 +336,7 @@
debugwhyunstable:
debugwireargs: three, four, five, ssh, remotecmd, insecure
debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
- diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
+ diff: rev, from, to, change, merge, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
files: rev, print0, include, exclude, template, subrepos
forget: interactive, include, exclude, dry-run
@@ -354,6 +357,7 @@
paths: template
phase: public, draft, secret, force, rev
pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
+ purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
recover: verify
remove: after, force, subrepos, include, exclude, dry-run
--- a/tests/test-config.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-config.t Thu Feb 11 20:36:46 2021 -0800
@@ -388,3 +388,52 @@
> done
$ HGRCPATH=configs hg config section.key
99
+
+Configuration priority
+======================
+
+setup necessary file
+
+ $ cat > file-A.rc << EOF
+ > [config-test]
+ > basic = value-A
+ > pre-include= value-A
+ > %include ./included.rc
+ > post-include= value-A
+ > EOF
+
+ $ cat > file-B.rc << EOF
+ > [config-test]
+ > basic = value-B
+ > EOF
+
+
+ $ cat > included.rc << EOF
+ > [config-test]
+ > pre-include= value-included
+ > post-include= value-included
+ > EOF
+
+Simple order checking
+---------------------
+
+If file B is read after file A, value from B overwrite value from A.
+
+ $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic
+ value-B
+
+Ordering from include
+---------------------
+
+value from an include overwrite value defined before the include, but not the one defined after the include
+
+ $ HGRCPATH="file-A.rc" hg config config-test.pre-include
+ value-included
+ $ HGRCPATH="file-A.rc" hg config config-test.post-include
+ value-A
+
+command line override
+---------------------
+
+ $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic --config config-test.basic=value-CLI
+ value-CLI
--- a/tests/test-contrib-perf.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-contrib-perf.t Thu Feb 11 20:36:46 2021 -0800
@@ -78,111 +78,137 @@
list of commands:
- perfaddremove
+ perf::addremove
+ (no help text available)
+ perf::ancestors
(no help text available)
- perfancestors
+ perf::ancestorset
(no help text available)
- perfancestorset
+ perf::annotate
(no help text available)
- perfannotate (no help text available)
- perfbdiff benchmark a bdiff between revisions
- perfbookmarks
+ perf::bdiff benchmark a bdiff between revisions
+ perf::bookmarks
benchmark parsing bookmarks from disk to memory
- perfbranchmap
+ perf::branchmap
benchmark the update of a branchmap
- perfbranchmapload
+ perf::branchmapload
benchmark reading the branchmap
- perfbranchmapupdate
+ perf::branchmapupdate
benchmark branchmap update from for <base> revs to <target>
revs
- perfbundleread
+ perf::bundleread
Benchmark reading of bundle files.
- perfcca (no help text available)
- perfchangegroupchangelog
+ perf::cca (no help text available)
+ perf::changegroupchangelog
Benchmark producing a changelog group for a changegroup.
- perfchangeset
+ perf::changeset
+ (no help text available)
+ perf::ctxfiles
(no help text available)
- perfctxfiles (no help text available)
- perfdiffwd Profile diff of working directory changes
- perfdirfoldmap
+ perf::diffwd Profile diff of working directory changes
+ perf::dirfoldmap
benchmap a 'dirstate._map.dirfoldmap.get()' request
- perfdirs (no help text available)
- perfdirstate benchmap the time of various distate operations
- perfdirstatedirs
+ perf::dirs (no help text available)
+ perf::dirstate
+ benchmap the time of various distate operations
+ perf::dirstatedirs
benchmap a 'dirstate.hasdir' call from an empty 'dirs' cache
- perfdirstatefoldmap
+ perf::dirstatefoldmap
benchmap a 'dirstate._map.filefoldmap.get()' request
- perfdirstatewrite
+ perf::dirstatewrite
benchmap the time it take to write a dirstate on disk
- perfdiscovery
+ perf::discovery
benchmark discovery between local repo and the peer at given
path
- perffncacheencode
+ perf::fncacheencode
(no help text available)
- perffncacheload
+ perf::fncacheload
(no help text available)
- perffncachewrite
+ perf::fncachewrite
(no help text available)
- perfheads benchmark the computation of a changelog heads
- perfhelper-mergecopies
+ perf::heads benchmark the computation of a changelog heads
+ perf::helper-mergecopies
find statistics about potential parameters for
'perfmergecopies'
- perfhelper-pathcopies
+ perf::helper-pathcopies
find statistic about potential parameters for the
'perftracecopies'
- perfignore benchmark operation related to computing ignore
- perfindex benchmark index creation time followed by a lookup
- perflinelogedits
+ perf::ignore benchmark operation related to computing ignore
+ perf::index benchmark index creation time followed by a lookup
+ perf::linelogedits
(no help text available)
- perfloadmarkers
+ perf::loadmarkers
benchmark the time to parse the on-disk markers for a repo
- perflog (no help text available)
- perflookup (no help text available)
- perflrucachedict
+ perf::log (no help text available)
+ perf::lookup (no help text available)
+ perf::lrucachedict
(no help text available)
- perfmanifest benchmark the time to read a manifest from disk and return a
+ perf::manifest
+ benchmark the time to read a manifest from disk and return a
usable
- perfmergecalculate
+ perf::mergecalculate
(no help text available)
- perfmergecopies
+ perf::mergecopies
measure runtime of 'copies.mergecopies'
- perfmoonwalk benchmark walking the changelog backwards
- perfnodelookup
+ perf::moonwalk
+ benchmark walking the changelog backwards
+ perf::nodelookup
(no help text available)
- perfnodemap benchmark the time necessary to look up revision from a cold
+ perf::nodemap
+ benchmark the time necessary to look up revision from a cold
nodemap
- perfparents benchmark the time necessary to fetch one changeset's parents.
- perfpathcopies
+ perf::parents
+ benchmark the time necessary to fetch one changeset's parents.
+ perf::pathcopies
benchmark the copy tracing logic
- perfphases benchmark phasesets computation
- perfphasesremote
+ perf::phases benchmark phasesets computation
+ perf::phasesremote
benchmark time needed to analyse phases of the remote server
- perfprogress printing of progress bars
- perfrawfiles (no help text available)
- perfrevlogchunks
+ perf::progress
+ printing of progress bars
+ perf::rawfiles
+ (no help text available)
+ perf::revlogchunks
Benchmark operations on revlog chunks.
- perfrevlogindex
+ perf::revlogindex
Benchmark operations against a revlog index.
- perfrevlogrevision
+ perf::revlogrevision
Benchmark obtaining a revlog revision.
- perfrevlogrevisions
+ perf::revlogrevisions
Benchmark reading a series of revisions from a revlog.
- perfrevlogwrite
+ perf::revlogwrite
Benchmark writing a series of revisions to a revlog.
- perfrevrange (no help text available)
- perfrevset benchmark the execution time of a revset
- perfstartup (no help text available)
- perfstatus benchmark the performance of a single status call
- perftags (no help text available)
- perftemplating
+ perf::revrange
+ (no help text available)
+ perf::revset benchmark the execution time of a revset
+ perf::startup
+ (no help text available)
+ perf::status benchmark the performance of a single status call
+ perf::tags (no help text available)
+ perf::templating
test the rendering time of a given template
- perfunidiff benchmark a unified diff between revisions
- perfvolatilesets
+ perf::unidiff
+ benchmark a unified diff between revisions
+ perf::volatilesets
benchmark the computation of various volatile set
- perfwalk (no help text available)
- perfwrite microbenchmark ui.write (and others)
+ perf::walk (no help text available)
+ perf::write microbenchmark ui.write (and others)
(use 'hg help -v perf' to show built-in aliases and global options)
+
+ $ hg help perfaddremove
+ hg perf::addremove
+
+ aliases: perfaddremove
+
+ (no help text available)
+
+ options:
+
+ -T --template TEMPLATE display with template
+
+ (some details hidden, use --verbose to show complete help)
+
$ hg perfaddremove
$ hg perfancestors
$ hg perfancestorset 2
--- a/tests/test-convert-svn-sink.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-convert-svn-sink.t Thu Feb 11 20:36:46 2021 -0800
@@ -54,10 +54,12 @@
2 2 test a
revision: 2
author: test
+ date: * (glob)
msg: modify a file
M /a
revision: 1
author: test
+ date: * (glob)
msg: add a file
A /a
A /d1
@@ -95,6 +97,7 @@
3 3 test b
revision: 3
author: test
+ date: * (glob)
msg: rename a file
D /a
A /b (from /a@2)
@@ -131,6 +134,7 @@
4 4 test c
revision: 4
author: test
+ date: * (glob)
msg: copy a file
A /c (from /b@3)
$ ls a a-hg-wc
@@ -167,6 +171,7 @@
5 5 test .
revision: 5
author: test
+ date: * (glob)
msg: remove a file
D /b
$ ls a a-hg-wc
@@ -209,6 +214,7 @@
6 6 test c
revision: 6
author: test
+ date: * (glob)
msg: make a file executable
M /c
#if execbit
@@ -247,6 +253,7 @@
8 8 test newlink
revision: 8
author: test
+ date: * (glob)
msg: move symlink
D /link
A /newlink (from /link@7)
@@ -278,6 +285,7 @@
7 7 test f
revision: 7
author: test
+ date: * (glob)
msg: f
D /c
A /d
@@ -315,6 +323,7 @@
1 1 test d1/a
revision: 1
author: test
+ date: * (glob)
msg: add executable file in new directory
A /d1
A /d1/a
@@ -343,6 +352,7 @@
2 2 test d2/a
revision: 2
author: test
+ date: * (glob)
msg: copy file to new directory
A /d2
A /d2/a (from /d1/a@1)
@@ -416,21 +426,25 @@
4 4 test right-2
revision: 4
author: test
+ date: * (glob)
msg: merge
A /right-1
A /right-2
revision: 3
author: test
+ date: * (glob)
msg: left-2
M /b
A /left-2
revision: 2
author: test
+ date: * (glob)
msg: left-1
M /b
A /left-1
revision: 1
author: test
+ date: * (glob)
msg: base
A /b
@@ -459,10 +473,12 @@
2 2 test .hgtags
revision: 2
author: test
+ date: * (glob)
msg: Tagged as v1.0
A /.hgtags
revision: 1
author: test
+ date: * (glob)
msg: Add file a
A /a
$ rm -rf a a-hg a-hg-wc
@@ -494,10 +510,12 @@
2 2 test exec
revision: 2
author: test
+ date: * (glob)
msg: remove executable bit
M /exec
revision: 1
author: test
+ date: * (glob)
msg: create executable
A /exec
$ test ! -x a-hg-wc/exec
@@ -540,11 +558,77 @@
2 2 test b
revision: 2
author: test
+ date: * (glob)
msg: Another change
A /b
revision: 1
author: test
+ date: * (glob)
msg: Some change
A /a
$ rm -rf a a-hg a-hg-wc
+
+Commit dates convertion
+
+ $ hg init a
+
+ $ echo a >> a/a
+ $ hg add a
+ adding a/a
+ $ hg --cwd a ci -d '1 0' -A -m 'Change 1'
+
+ $ echo a >> a/a
+ $ hg --cwd a ci -d '2 0' -m 'Change 2'
+
+ $ echo a >> a/a
+ $ hg --cwd a ci -d '2 0' -m 'Change at the same time'
+
+ $ echo a >> a/a
+ $ hg --cwd a ci -d '1 0' -m 'Change in the past'
+
+ $ echo a >> a/a
+ $ hg --cwd a ci -d '3 0' -m 'Change in the future'
+
+ $ hg convert --config convert.svn.dangerous-set-commit-dates=true -d svn a
+ assuming destination a-hg
+ initializing svn repository 'a-hg'
+ initializing svn working copy 'a-hg-wc'
+ scanning source...
+ sorting...
+ converting...
+ 4 Change 1
+ 3 Change 2
+ 2 Change at the same time
+ 1 Change in the past
+ 0 Change in the future
+ $ svnupanddisplay a-hg-wc 0
+ 5 5 test .
+ 5 5 test a
+ revision: 5
+ author: test
+ date: 1970-01-01T00:00:03.000000Z
+ msg: Change in the future
+ M /a
+ revision: 4
+ author: test
+ date: 1970-01-01T00:00:01.000000Z
+ msg: Change in the past
+ M /a
+ revision: 3
+ author: test
+ date: 1970-01-01T00:00:02.000000Z
+ msg: Change at the same time
+ M /a
+ revision: 2
+ author: test
+ date: 1970-01-01T00:00:02.000000Z
+ msg: Change 2
+ M /a
+ revision: 1
+ author: test
+ date: 1970-01-01T00:00:01.000000Z
+ msg: Change 1
+ A /a
+
+ $ rm -rf a a-hg a-hg-wc
--- a/tests/test-convert.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-convert.t Thu Feb 11 20:36:46 2021 -0800
@@ -388,6 +388,23 @@
does not convert tags from the source repo to the target
repo. The default is False.
+ Subversion Destination
+ ######################
+
+ Original commit dates are not preserved by default.
+
+ convert.svn.dangerous-set-commit-dates
+ preserve original commit dates, forcefully setting
+ "svn:date" revision properties. This option is DANGEROUS and
+ may break some subversion functionality for the resulting
+ repository (e.g. filtering revisions with date ranges in
+ "svn log"), as original commit dates are not guaranteed to
+ be monotonically increasing.
+
+ For commit dates setting to work destination repository must have "pre-
+ revprop-change" hook configured to allow setting of "svn:date" revision
+ properties. See Subversion documentation for more details.
+
options ([+] can be repeated):
-s --source-type TYPE source repository type
--- a/tests/test-copies-chain-merge.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-copies-chain-merge.t Thu Feb 11 20:36:46 2021 -0800
@@ -721,6 +721,11 @@
#if no-compatibility no-filelog no-changeset
+ $ hg debugchangedfiles --compute 0
+ added : a, ;
+ added : b, ;
+ added : h, ;
+
$ for rev in `hg log --rev 'all()' -T '{rev}\n'`; do
> echo "##### revision $rev #####"
> hg debugsidedata -c -v -- $rev
--- a/tests/test-copies-in-changeset.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-copies-in-changeset.t Thu Feb 11 20:36:46 2021 -0800
@@ -345,7 +345,10 @@
$ hg co -q 0
$ hg mv a b
$ hg ci -qm 'rename a to b'
- $ hg rebase -d 1 --config rebase.experimental.inmemory=yes
+Not only do we want this to run in-memory, it shouldn't fall back to
+on-disk merge (no conflicts), so we force it to be in-memory
+with no fallback.
+ $ hg rebase -d 1 --config rebase.experimental.inmemory=yes --config devel.rebase.force-in-memory-merge=yes
rebasing 2:* tip "rename a to b" (glob)
merging a and b to b
saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob)
--- a/tests/test-copies.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-copies.t Thu Feb 11 20:36:46 2021 -0800
@@ -93,8 +93,10 @@
x y
$ hg debugp1copies -r 1
x -> y
-Incorrectly doesn't show the rename
$ hg debugpathcopies 0 1
+ x -> y (no-filelog !)
+ $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes
+ x -> y
Copy a file onto another file with same content. If metadata is stored in changeset, this does not
produce a new filelog entry. The changeset's "files" entry should still list the file.
@@ -111,8 +113,10 @@
x x2
$ hg debugp1copies -r 1
x -> x2
-Incorrectly doesn't show the rename
$ hg debugpathcopies 0 1
+ x -> x2 (no-filelog !)
+ $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes
+ x -> x2
Rename file in a loop: x->y->z->x
$ newrepo
--- a/tests/test-copy.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-copy.t Thu Feb 11 20:36:46 2021 -0800
@@ -228,6 +228,17 @@
should show no copies
$ hg st -C
+note: since filelog based copy tracing only trace copy for new file, the copy information here is not displayed.
+
+ $ hg status --copies --change .
+ M bar
+
+They are a devel option to walk all file and fine this information anyway.
+
+ $ hg status --copies --change . --config devel.copy-tracing.trace-all-files=yes
+ M bar
+ foo
+
copy --after on an added file
$ cp bar baz
$ hg add baz
--- a/tests/test-diff-change.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-diff-change.t Thu Feb 11 20:36:46 2021 -0800
@@ -194,4 +194,105 @@
9
10
+merge diff should show only manual edits to a merge:
+
+ $ hg diff --merge -c 6
+(no diff output is expected here)
+
+Construct an "evil merge" that does something other than just the merge.
+
+ $ hg co ".^"
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg merge -r 5
+ merging file.txt
+ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+ (branch merge, don't forget to commit)
+ $ echo 11 >> file.txt
+ $ hg ci -m 'merge 8 to y with manual edit of 11' # 7
+ created new head
+ $ hg diff -c 7
+ diff -r 273b50f17c6d -r 8ad85e839ba7 file.txt
+ --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
+ @@ -6,6 +6,7 @@
+ 5
+ 6
+ 7
+ -8
+ +y
+ 9
+ 10
+ +11
+Contrast with the `hg diff -c 7` version above: only the manual edit shows
+up, making it easy to identify changes someone is otherwise trying to sneak
+into a merge.
+ $ hg diff --merge -c 7
+ diff -r 8ad85e839ba7 file.txt
+ --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
+ @@ -9,3 +9,4 @@
+ y
+ 9
+ 10
+ +11
+
+Set up a conflict.
+ $ hg co ".^"
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ sed -e 's,^8$,z,' file.txt > file.txt.tmp
+ $ mv file.txt.tmp file.txt
+ $ hg ci -m 'conflicting edit: 8 to z'
+ created new head
+ $ echo "this file is new in p1 of the merge" > new-file-p1.txt
+ $ hg ci -Am 'new file' new-file-p1.txt
+ $ hg log -r . --template 'p1 will be rev {rev}\n'
+ p1 will be rev 9
+ $ hg co 5
+ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo "this file is new in p2 of the merge" > new-file-p2.txt
+ $ hg ci -Am 'new file' new-file-p2.txt
+ created new head
+ $ hg log -r . --template 'p2 will be rev {rev}\n'
+ p2 will be rev 10
+ $ hg co -- 9
+ 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg merge -r 10
+ merging file.txt
+ warning: conflicts while merging file.txt! (edit, then use 'hg resolve --mark')
+ 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+ use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+ [1]
+ $ hg revert file.txt -r .
+ $ hg resolve -ma
+ (no more unresolved files)
+ $ hg commit -m 'merge conflicted edit'
+Without --merge, it's a diff against p1
+ $ hg diff --no-merge -c 11
+ diff -r fd1f17c90d7c -r 5010caab09f6 new-file-p2.txt
+ --- /dev/null Thu Jan 01 00:00:00 1970 +0000
+ +++ b/new-file-p2.txt Thu Jan 01 00:00:00 1970 +0000
+ @@ -0,0 +1,1 @@
+ +this file is new in p2 of the merge
+With --merge, it's a diff against the conflicted content.
+ $ hg diff --merge -c 11
+ diff -r 5010caab09f6 file.txt
+ --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
+ +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
+ @@ -6,12 +6,6 @@
+ 5
+ 6
+ 7
+ -<<<<<<< local: fd1f17c90d7c - test: new file
+ z
+ -||||||| base
+ -8
+ -=======
+ -y
+ ->>>>>>> other: d9e7de69eac3 - test: new file
+ 9
+ 10
+
+There must _NOT_ be a .hg/merge directory leftover.
+ $ test ! -d .hg/merge
+(No output is expected)
$ cd ..
--- a/tests/test-dispatch.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-dispatch.t Thu Feb 11 20:36:46 2021 -0800
@@ -154,7 +154,7 @@
$ HGPLAIN=+strictflags hg --config='hooks.pre-log=false' log -b default
abort: pre-log hook exited with status 1
- [255]
+ [40]
$ HGPLAIN=+strictflags hg --cwd .. -q -Ra log -b default
0:cb9a9f314b8b
$ HGPLAIN=+strictflags hg --cwd .. -q --repository a log -b default
@@ -166,7 +166,7 @@
$ HGPLAIN= hg log --config='hooks.pre-log=false' -b default
abort: pre-log hook exited with status 1
- [255]
+ [40]
$ HGPLAINEXCEPT= hg log --cwd .. -q -Ra -b default
0:cb9a9f314b8b
--- a/tests/test-globalopts.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-globalopts.t Thu Feb 11 20:36:46 2021 -0800
@@ -351,6 +351,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
@@ -483,6 +484,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
--- a/tests/test-help-hide.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-help-hide.t Thu Feb 11 20:36:46 2021 -0800
@@ -55,6 +55,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
@@ -191,6 +192,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
--- a/tests/test-help.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-help.t Thu Feb 11 20:36:46 2021 -0800
@@ -107,6 +107,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
@@ -235,6 +236,7 @@
addremove add all new files, delete all missing files
files list tracked files
forget forget the specified files on the next commit
+ purge removes files not tracked by Mercurial
remove remove the specified files on the next commit
rename rename files; equivalent of copy + remove
resolve redo merges or set/view the merge status of files
@@ -375,8 +377,6 @@
mq manage a stack of patches
notify hooks for sending email push notifications
patchbomb command to send changesets as (a series of) patch emails
- purge command to delete untracked files from the working
- directory
relink recreates hardlinks between repository clones
schemes extend schemes with shortcuts to repository swarms
share share a common history between several working directories
@@ -1069,6 +1069,7 @@
debugsetparents
manually set the parents of the current working directory
(DANGEROUS)
+ debugshell run an interactive Python interpreter
debugsidedata
dump the side data for a cl/manifest/file revision
debugssl test a secure connection to a server
@@ -2720,6 +2721,13 @@
set or show the current phase name
</td></tr>
<tr><td>
+ <a href="/help/purge">
+ purge
+ </a>
+ </td><td>
+ removes files not tracked by Mercurial
+ </td></tr>
+ <tr><td>
<a href="/help/recover">
recover
</a>
--- a/tests/test-hgweb-json.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-hgweb-json.t Thu Feb 11 20:36:46 2021 -0800
@@ -2190,6 +2190,10 @@
"topic": "phase"
},
{
+ "summary": "removes files not tracked by Mercurial",
+ "topic": "purge"
+ },
+ {
"summary": "roll back an interrupted transaction",
"topic": "recover"
},
--- a/tests/test-histedit-edit.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-histedit-edit.t Thu Feb 11 20:36:46 2021 -0800
@@ -375,7 +375,7 @@
note: commit message saved in .hg/last-message.txt
note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
f
@@ -400,7 +400,7 @@
note: commit message saved in .hg/last-message.txt
note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat >> .hg/hgrc <<EOF
> [hooks]
--- a/tests/test-histedit-fold.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-histedit-fold.t Thu Feb 11 20:36:46 2021 -0800
@@ -202,7 +202,7 @@
transaction abort!
rollback completed
abort: pretxncommit.abortfolding hook failed
- [255]
+ [40]
$ cat .hg/last-message.txt
f
--- a/tests/test-hook.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-hook.t Thu Feb 11 20:36:46 2021 -0800
@@ -227,7 +227,7 @@
HG_PATS=[]
abort: pre-identify hook exited with status 1
- [255]
+ [40]
$ hg cat b
pre-cat hook: HG_ARGS=cat b
HG_HOOKNAME=pre-cat
@@ -390,7 +390,7 @@
HG_TAG=fa
abort: pretag.forbid hook exited with status 1
- [255]
+ [40]
$ hg tag -l fla
pretag hook: HG_HOOKNAME=pretag
HG_HOOKTYPE=pretag
@@ -405,7 +405,7 @@
HG_TAG=fla
abort: pretag.forbid hook exited with status 1
- [255]
+ [40]
pretxncommit hook can see changeset, can roll back txn, changeset no
more there after
@@ -451,7 +451,7 @@
rollback completed
abort: pretxncommit.forbid1 hook exited with status 1
- [255]
+ [40]
$ hg -q tip
4:539e4b31b6dc
@@ -485,7 +485,7 @@
HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
abort: precommit.forbid hook exited with status 1
- [255]
+ [40]
$ hg -q tip
4:539e4b31b6dc
@@ -644,7 +644,7 @@
HG_URL=file:$TESTTMP/a
abort: prepushkey hook exited with status 1
- [255]
+ [40]
$ cd ../a
test that prelistkeys can prevent listing keys
@@ -679,7 +679,7 @@
HG_NAMESPACE=bookmarks
abort: prelistkeys hook exited with status 1
- [255]
+ [40]
$ cd ../a
$ rm .hg/hgrc
@@ -704,7 +704,7 @@
HG_URL=file:$TESTTMP/a
abort: prechangegroup.forbid hook exited with status 1
- [255]
+ [40]
pretxnchangegroup hook can see incoming changes, can roll back txn,
incoming changes no longer there after
@@ -735,7 +735,7 @@
transaction abort!
rollback completed
abort: pretxnchangegroup.forbid1 hook exited with status 1
- [255]
+ [40]
$ hg -q tip
3:07f3376c1e65
@@ -786,7 +786,7 @@
HG_SOURCE=pull
abort: preoutgoing.forbid hook exited with status 1
- [255]
+ [40]
outgoing hooks work for local clones
@@ -825,7 +825,7 @@
HG_SOURCE=clone
abort: preoutgoing.forbid hook exited with status 1
- [255]
+ [40]
$ cd "$TESTTMP/b"
@@ -915,7 +915,7 @@
hooktype preoutgoing
source pull
abort: preoutgoing.fail hook failed
- [255]
+ [40]
$ echo '[hooks]' > ../a/.hg/hgrc
$ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
@@ -1283,7 +1283,7 @@
rollback completed
strip failed, backup bundle stored in * (glob)
abort: pretxnclose.error hook exited with status 1
- [255]
+ [40]
$ hg recover
no interrupted transaction available
[1]
@@ -1306,7 +1306,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a
(check (in)visibility of new changeset while transaction running in
@@ -1331,7 +1331,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
Hook from untrusted hgrc are reported as failure
================================================
@@ -1382,7 +1382,7 @@
rollback completed
abort: untrusted hook pretxnclose.testing not executed
(see 'hg help config.trusted')
- [255]
+ [40]
$ hg log
changeset: 0:3903775176ed
tag: tip
--- a/tests/test-inherit-mode.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-inherit-mode.t Thu Feb 11 20:36:46 2021 -0800
@@ -134,6 +134,8 @@
00660 ../push/.hg/00changelog.i
00770 ../push/.hg/cache/
00660 ../push/.hg/cache/branch2-base
+ 00660 ../push/.hg/cache/rbc-names-v1
+ 00660 ../push/.hg/cache/rbc-revs-v1
00660 ../push/.hg/dirstate
00660 ../push/.hg/requires
00770 ../push/.hg/store/
--- a/tests/test-largefiles.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-largefiles.t Thu Feb 11 20:36:46 2021 -0800
@@ -1751,7 +1751,7 @@
$ hg rm sub2/large6
$ hg up -r.
abort: outstanding uncommitted merge
- [255]
+ [20]
- revert should be able to revert files introduced in a pending merge
$ hg revert --all -r .
--- a/tests/test-mactext.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-mactext.t Thu Feb 11 20:36:46 2021 -0800
@@ -27,7 +27,7 @@
transaction abort!
rollback completed
abort: pretxncommit.cr hook failed
- [255]
+ [40]
$ hg cat f | f --hexdump
0000: 68 65 6c 6c 6f 0a |hello.|
--- a/tests/test-merge-remove.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-merge-remove.t Thu Feb 11 20:36:46 2021 -0800
@@ -95,7 +95,7 @@
$ hg merge
bar: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ cat bar
memories of buried pirate treasure
--- a/tests/test-merge-tools.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-merge-tools.t Thu Feb 11 20:36:46 2021 -0800
@@ -377,7 +377,7 @@
merging f
some fail message
abort: $TESTTMP/mybrokenmerge.py hook failed
- [255]
+ [40]
$ aftermerge
# cat f
revision 1
--- a/tests/test-merge1.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-merge1.t Thu Feb 11 20:36:46 2021 -0800
@@ -113,7 +113,7 @@
$ hg merge 1
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
#if symlink
symlinks to directories should be treated as regular files (issue5027)
@@ -122,7 +122,7 @@
$ hg merge 1
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
symlinks shouldn't be followed
$ rm b
$ echo This is file b1 > .hg/b
@@ -130,7 +130,7 @@
$ hg merge 1
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ rm b
$ echo This is file b2 > b
@@ -144,7 +144,7 @@
$ hg merge 1 --config merge.checkunknown=abort
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
this merge should warn
$ hg merge 1 --config merge.checkunknown=warn
@@ -188,7 +188,7 @@
$ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
remoteignored: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
merging .hgignore
merging for .hgignore
@@ -210,15 +210,15 @@
b: untracked file differs
localignored: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
localignored: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
b: replacing untracked file
localignored: replacing untracked file
--- a/tests/test-minirst.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-minirst.py Thu Feb 11 20:36:46 2021 -0800
@@ -159,6 +159,8 @@
:a: First item.
:ab: Second item. Indentation and wrapping
is handled automatically.
+:c\:d: a key with colon
+:efg\:\:hh: a key with many colon
Next list:
--- a/tests/test-minirst.py.out Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-minirst.py.out Thu Feb 11 20:36:46 2021 -0800
@@ -439,6 +439,8 @@
a First item.
ab Second item. Indentation and wrapping is
handled automatically.
+c:d a key with colon
+efg::hh a key with many colon
Next list:
@@ -456,6 +458,9 @@
wrapping is
handled
automatically.
+c:d a key with colon
+efg::hh a key with many
+ colon
Next list:
@@ -476,6 +481,10 @@
<dd>First item.
<dt>ab
<dd>Second item. Indentation and wrapping is handled automatically.
+ <dt>c:d
+ <dd>a key with colon
+ <dt>efg::hh
+ <dd>a key with many colon
</dl>
<p>
Next list:
--- a/tests/test-mq-qfold.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-mq-qfold.t Thu Feb 11 20:36:46 2021 -0800
@@ -235,7 +235,7 @@
rollback completed
qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
original message
--- a/tests/test-mq-qnew.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-mq-qnew.t Thu Feb 11 20:36:46 2021 -0800
@@ -310,7 +310,7 @@
note: commit message saved in .hg/last-message.txt
note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
--- a/tests/test-mq-qrefresh-replace-log-message.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-mq-qrefresh-replace-log-message.t Thu Feb 11 20:36:46 2021 -0800
@@ -191,7 +191,7 @@
rollback completed
qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
Fifth commit message
This is the 5th log message
@@ -235,7 +235,7 @@
rollback completed
qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
(rebuilding at failure of qrefresh bases on rev #0, and it causes
dropping status of "file2")
@@ -273,7 +273,7 @@
rollback completed
qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ sh "$TESTTMP/checkvisibility.sh"
====
@@ -315,7 +315,7 @@
rollback completed
qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ sh "$TESTTMP/checkvisibility.sh"
====
--- a/tests/test-narrow-pull.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-narrow-pull.t Thu Feb 11 20:36:46 2021 -0800
@@ -78,7 +78,7 @@
transaction abort!
rollback completed
abort: pretxnchangegroup.bad hook exited with status 1
- [255]
+ [40]
$ hg id
223311e70a6f tip
@@ -147,6 +147,7 @@
$ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0
$ cd narrow2
$ hg pull -q -r 1
+ remote: abort: unexpected error: unable to resolve parent while packing b'00manifest.i' 1 for changeset 0
transaction abort!
rollback completed
abort: pull failed on remote
--- a/tests/test-narrow-trackedcmd.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-narrow-trackedcmd.t Thu Feb 11 20:36:46 2021 -0800
@@ -110,6 +110,8 @@
--clear whether to replace the existing narrowspec
--force-delete-local-changes forces deletion of local changes when
narrowing
+ --[no-]backup back up local changes when narrowing
+ (default: on)
--update-working-copy update working copy when the store has
changed
-e --ssh CMD specify ssh command to use
--- a/tests/test-narrow-widen.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-narrow-widen.t Thu Feb 11 20:36:46 2021 -0800
@@ -431,7 +431,7 @@
transaction abort!
rollback completed
abort: pretxnchangegroup.bad hook exited with status 1
- [255]
+ [40]
$ hg l
$ hg bookmarks
no bookmarks set
--- a/tests/test-narrow.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-narrow.t Thu Feb 11 20:36:46 2021 -0800
@@ -492,3 +492,20 @@
searching for changes
looking for unused includes to remove
found no unused includes
+Test --no-backup
+ $ hg tracked --addinclude d0 --addinclude d2 -q
+ $ hg unbundle .hg/strip-backup/*-narrow.hg -q
+ $ rm .hg/strip-backup/*
+ $ hg tracked --auto-remove-includes --no-backup
+ comparing with ssh://user@dummy/master
+ searching for changes
+ looking for unused includes to remove
+ path:d0
+ path:d2
+ remove these unused includes (yn)? y
+ looking for local changes to affected paths
+ deleting data/d0/f.i
+ deleting data/d2/f.i
+ deleting meta/d0/00manifest.i (tree !)
+ deleting meta/d2/00manifest.i (tree !)
+ $ ls .hg/strip-backup/
--- a/tests/test-parse-date.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-parse-date.t Thu Feb 11 20:36:46 2021 -0800
@@ -103,43 +103,43 @@
$ hg log -d "--2"
abort: -2 must be nonnegative (see 'hg help dates')
- [255]
+ [10]
Whitespace only
$ hg log -d " "
abort: dates cannot consist entirely of whitespace
- [255]
+ [10]
Test date formats with '>' or '<' accompanied by space characters
$ hg log -d '>' --template '{date|date}\n'
abort: invalid day spec, use '>DATE'
- [255]
+ [10]
$ hg log -d '<' --template '{date|date}\n'
abort: invalid day spec, use '<DATE'
- [255]
+ [10]
$ hg log -d ' >' --template '{date|date}\n'
abort: invalid day spec, use '>DATE'
- [255]
+ [10]
$ hg log -d ' <' --template '{date|date}\n'
abort: invalid day spec, use '<DATE'
- [255]
+ [10]
$ hg log -d '> ' --template '{date|date}\n'
abort: invalid day spec, use '>DATE'
- [255]
+ [10]
$ hg log -d '< ' --template '{date|date}\n'
abort: invalid day spec, use '<DATE'
- [255]
+ [10]
$ hg log -d ' > ' --template '{date|date}\n'
abort: invalid day spec, use '>DATE'
- [255]
+ [10]
$ hg log -d ' < ' --template '{date|date}\n'
abort: invalid day spec, use '<DATE'
- [255]
+ [10]
$ hg log -d '>02/01' --template '{date|date}\n'
$ hg log -d '<02/01' --template '{date|date}\n'
--- a/tests/test-pathconflicts-basic.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-pathconflicts-basic.t Thu Feb 11 20:36:46 2021 -0800
@@ -53,7 +53,7 @@
$ hg up file
a: untracked directory conflicts with file
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg up --clean file
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
(activating bookmark file)
--- a/tests/test-pathconflicts-update.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-pathconflicts-update.t Thu Feb 11 20:36:46 2021 -0800
@@ -49,7 +49,7 @@
$ hg up dir
a/b: untracked file conflicts with directory
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg up dir --config merge.checkunknown=warn
a/b: replacing untracked file
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -70,7 +70,7 @@
$ hg up dir
a/b: untracked file conflicts with directory
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg up dir --config merge.checkunknown=warn
a/b: replacing untracked file
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -89,7 +89,7 @@
$ hg up file
a/b: untracked directory conflicts with file
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg up file --config merge.checkunknown=warn
a/b: replacing untracked files in directory
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -107,7 +107,7 @@
$ hg up link
a/b: untracked directory conflicts with file
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg up link --config merge.checkunknown=warn
a/b: replacing untracked files in directory
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-persistent-nodemap.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-persistent-nodemap.t Thu Feb 11 20:36:46 2021 -0800
@@ -617,23 +617,30 @@
plain-cl-delta: yes yes yes
compression: zlib zlib zlib
compression-level: default default default
- $ hg debugupgraderepo --run --no-backup --quiet
+ $ hg debugupgraderepo --run --no-backup
upgrade will perform the following actions:
requirements
preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
added: persistent-nodemap
+ persistent-nodemap
+ Speedup revision lookup by node id.
+
processed revlogs:
- all-filelogs
- changelog
- manifest
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ upgrading repository to use persistent nodemap feature
+ removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob)
$ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
00changelog-*.nd (glob)
00changelog.n
- 00manifest-*.nd (glob)
- 00manifest.n
$ hg debugnodemap --metadata
uid: * (glob)
--- a/tests/test-phases.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-phases.t Thu Feb 11 20:36:46 2021 -0800
@@ -757,7 +757,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ cp .hg/store/phaseroots.pending.saved .hg/store/phaseroots.pending
(check (in)visibility of phaseroot while transaction running in repo)
@@ -780,7 +780,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
Check that pretxnclose-phase hook can control phase movement
@@ -854,12 +854,12 @@
transaction abort!
rollback completed
abort: pretxnclose-phase.nopublish_D hook exited with status 1
- [255]
+ [40]
$ hg phase --public a603bfb5a83e
transaction abort!
rollback completed
abort: pretxnclose-phase.nopublish_D hook exited with status 1
- [255]
+ [40]
$ hg phase --draft 17a481b3bccb
test-debug-phase: move rev 3: 2 -> 1
test-debug-phase: move rev 4: 2 -> 1
@@ -871,7 +871,7 @@
transaction abort!
rollback completed
abort: pretxnclose-phase.nopublish_D hook exited with status 1
- [255]
+ [40]
$ cd ..
--- a/tests/test-purge.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-purge.t Thu Feb 11 20:36:46 2021 -0800
@@ -1,8 +1,3 @@
- $ cat <<EOF >> $HGRCPATH
- > [extensions]
- > purge =
- > EOF
-
init
$ hg init t
@@ -18,11 +13,35 @@
$ echo 'ignored' > .hgignore
$ hg ci -qAmr3 -d'2 0'
+purge without the extension
+
+ $ hg st
+ $ touch foo
+ $ hg purge
+ permanently delete 1 unkown files? (yN) n
+ abort: removal cancelled
+ [250]
+ $ hg st
+ ? foo
+ $ hg purge --no-confirm
+ $ hg st
+
+now enabling the extension
+
+ $ cat <<EOF >> $HGRCPATH
+ > [extensions]
+ > purge =
+ > EOF
+
delete an empty directory
$ mkdir empty_dir
$ hg purge -p -v
empty_dir
+ $ hg purge --confirm
+ permanently delete at least 1 empty directories? (yN) n
+ abort: removal cancelled
+ [250]
$ hg purge -v
removing directory empty_dir
$ ls -A
@@ -62,6 +81,10 @@
$ hg purge -p
untracked_file
untracked_file_readonly
+ $ hg purge --confirm
+ permanently delete 2 unkown files? (yN) n
+ abort: removal cancelled
+ [250]
$ hg purge -v
removing file untracked_file
removing file untracked_file_readonly
@@ -121,6 +144,10 @@
$ cd directory
$ hg purge -p ../untracked_directory
untracked_directory/nested_directory
+ $ hg purge --confirm
+ permanently delete 1 unkown files? (yN) n
+ abort: removal cancelled
+ [250]
$ hg purge -v ../untracked_directory
removing directory untracked_directory/nested_directory
removing directory untracked_directory
@@ -138,6 +165,7 @@
$ touch ignored
$ hg purge -p
+ $ hg purge --confirm
$ hg purge -v
$ touch untracked_file
$ ls
@@ -147,6 +175,10 @@
untracked_file
$ hg purge -p -i
ignored
+ $ hg purge --confirm -i
+ permanently delete 1 ignored files? (yN) n
+ abort: removal cancelled
+ [250]
$ hg purge -v -i
removing file ignored
$ ls -A
@@ -159,6 +191,10 @@
$ hg purge -p --all
ignored
untracked_file
+ $ hg purge --confirm --all
+ permanently delete 1 unkown and 1 ignored files? (yN) n
+ abort: removal cancelled
+ [250]
$ hg purge -v --all
removing file ignored
removing file untracked_file
--- a/tests/test-rebase-conflicts.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-rebase-conflicts.t Thu Feb 11 20:36:46 2021 -0800
@@ -318,10 +318,10 @@
bundle2-input-part: total payload size 1686
bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
bundle2-input-part: total payload size 74
- truncating cache/rbc-revs-v1 to 56
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 3 parts total
+ truncating cache/rbc-revs-v1 to 72
added 2 changesets with 2 changes to 1 files
updating the branch cache
invalid branch cache (served): tip differs
--- a/tests/test-rebase-interruptions.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-rebase-interruptions.t Thu Feb 11 20:36:46 2021 -0800
@@ -350,7 +350,7 @@
M A
rebasing 6:a0b2430ebfb8 tip "F"
abort: precommit hook exited with status 1
- [255]
+ [40]
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
@@ -401,7 +401,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
@@ -451,7 +451,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ hg tglogp
@ 7: 401ccec5e39f secret 'C'
|
--- a/tests/test-remotefilelog-clone-tree.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-remotefilelog-clone-tree.t Thu Feb 11 20:36:46 2021 -0800
@@ -91,7 +91,6 @@
# flakiness here
$ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null
streaming all changes
- remote: abort: Cannot clone from a shallow repo to a full repo.
[255]
# getbundle full clone
--- a/tests/test-remotefilelog-clone.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-remotefilelog-clone.t Thu Feb 11 20:36:46 2021 -0800
@@ -85,9 +85,9 @@
$ TEMP_STDERR=full-clone-from-shallow.stderr.tmp
$ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR
streaming all changes
- remote: abort: Cannot clone from a shallow repo to a full repo.
[255]
$ cat $TEMP_STDERR
+ remote: abort: Cannot clone from a shallow repo to a full repo.
abort: pull failed on remote
$ rm $TEMP_STDERR
--- a/tests/test-remotefilelog-prefetch.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-remotefilelog-prefetch.t Thu Feb 11 20:36:46 2021 -0800
@@ -180,7 +180,7 @@
x: untracked file differs
3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over * (glob)
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ hg revert --all
# Test batch fetching of lookup files during hg status
--- a/tests/test-rename-dir-merge.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-rename-dir-merge.t Thu Feb 11 20:36:46 2021 -0800
@@ -110,7 +110,7 @@
$ hg merge 2
b/c: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ cat b/c
target
but it should succeed if the content matches
--- a/tests/test-requires.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-requires.t Thu Feb 11 20:36:46 2021 -0800
@@ -5,7 +5,7 @@
$ hg commit -m test
$ rm .hg/requires
$ hg tip
- abort: unknown version (2) in revlog 00changelog.i
+ abort: unknown version (65535) in revlog 00changelog.i
[50]
$ echo indoor-pool > .hg/requires
$ hg tip
--- a/tests/test-resolve.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-resolve.t Thu Feb 11 20:36:46 2021 -0800
@@ -153,15 +153,15 @@
$ hg up 0
abort: outstanding merge conflicts
(use 'hg resolve' to resolve)
- [255]
+ [20]
$ hg merge 2
abort: outstanding merge conflicts
(use 'hg resolve' to resolve)
- [255]
+ [20]
$ hg merge --force 2
abort: outstanding merge conflicts
(use 'hg resolve' to resolve)
- [255]
+ [20]
set up conflict-free merge
@@ -344,6 +344,24 @@
$ hg resolve -l
R file1
R file2
+Test with :mergediff conflict markers
+ $ hg resolve --unmark
+ $ hg resolve --re-merge -t :mergediff file2
+ merging file2
+ warning: conflicts while merging file2! (edit, then use 'hg resolve --mark')
+ [1]
+ $ hg resolve -l
+ U file1
+ U file2
+ $ hg --config commands.resolve.mark-check=abort resolve -m
+ warning: the following files still have conflict markers:
+ file2
+ abort: conflict markers detected
+ (use --all to mark anyway)
+ [20]
+ $ hg resolve -l
+ U file1
+ U file2
Test option value 'warn'
$ hg resolve --unmark
$ hg resolve -l
--- a/tests/test-rhg.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-rhg.t Thu Feb 11 20:36:46 2021 -0800
@@ -38,7 +38,7 @@
Deleted repository
$ rm -rf `pwd`
$ rhg root
- abort: error getting current working directory: $ENOENT$
+ abort: $ENOENT$: current directory
[255]
Listing tracked files
@@ -153,17 +153,11 @@
[252]
$ rhg debugrequirements
- dotencode
- fncache
- generaldelta
- revlogv1
- sparserevlog
- store
- indoor-pool
+ [252]
$ echo -e '\xFF' >> .hg/requires
$ rhg debugrequirements
- abort: .hg/requires is corrupted
+ abort: corrupted repository: parse error in 'requires' file
[255]
Persistent nodemap
@@ -202,3 +196,67 @@
of
$ rhg cat -r c3ae8dec9fad of
r5000
+
+Crate a shared repository
+
+ $ echo "[extensions]" >> $HGRCPATH
+ $ echo "share = " >> $HGRCPATH
+
+ $ cd $TESTTMP
+ $ hg init repo1
+ $ cd repo1
+ $ echo a > a
+ $ hg commit -A -m'init'
+ adding a
+
+ $ cd ..
+ $ hg share repo1 repo2
+ updating working directory
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+And check that basic rhg commands work with sharing
+
+ $ cd repo2
+ $ rhg files
+ a
+ $ rhg cat -r 0 a
+ a
+
+Same with relative sharing
+
+ $ cd ..
+ $ hg share repo2 repo3 --relative
+ updating working directory
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+ $ cd repo3
+ $ rhg files
+ a
+ $ rhg cat -r 0 a
+ a
+
+Same with share-safe
+
+ $ echo "[format]" >> $HGRCPATH
+ $ echo "use-share-safe = True" >> $HGRCPATH
+
+ $ cd $TESTTMP
+ $ hg init repo4
+ $ cd repo4
+ $ echo a > a
+ $ hg commit -A -m'init'
+ adding a
+
+ $ cd ..
+ $ hg share repo4 repo5
+ updating working directory
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+And check that basic rhg commands work with sharing
+
+ $ cd repo5
+ $ rhg files
+ a
+ $ rhg cat -r 0 a
+ a
+
--- a/tests/test-rollback.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-rollback.t Thu Feb 11 20:36:46 2021 -0800
@@ -103,7 +103,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status * (glob)
- [255]
+ [40]
$ cat .hg/last-message.txt ; echo
precious commit message
@@ -118,7 +118,7 @@
note: commit message saved in .hg/last-message.txt
note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
abort: pretxncommit hook exited with status * (glob)
- [255]
+ [40]
$ cat .hg/last-message.txt
another precious commit message
@@ -380,7 +380,7 @@
warn during abort
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
$ hg commit -m 'commit 1'
warn during pretxncommit
@@ -405,7 +405,7 @@
transaction abort!
rollback completed
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
$ hg commit -m 'commit 1'
warn during pretxncommit
@@ -431,7 +431,7 @@
transaction abort!
warn during abort
abort: pretxncommit hook exited with status 1
- [255]
+ [40]
$ hg verify
checking changesets
--- a/tests/test-setdiscovery.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-setdiscovery.t Thu Feb 11 20:36:46 2021 -0800
@@ -1328,25 +1328,25 @@
updating to branch b
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false
+ $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50
comparing with b
query 1; heads
searching for changes
taking quick initial sample
searching: 2 queries
- query 2; still undecided: 1080, sample size is: 100
+ query 2; still undecided: 1080, sample size is: 50
sampling from both directions
searching: 3 queries
- query 3; still undecided: 980, sample size is: 200
+ query 3; still undecided: 1030, sample size is: 200
sampling from both directions
searching: 4 queries
- query 4; still undecided: 497, sample size is: 210
+ query 4; still undecided: 547, sample size is: 210
sampling from both directions
searching: 5 queries
- query 5; still undecided: 285, sample size is: 220
+ query 5; still undecided: 336, sample size is: 220
sampling from both directions
searching: 6 queries
- query 6; still undecided: 63, sample size is: 63
+ query 6; still undecided: 114, sample size is: 114
6 total queries in *.????s (glob)
elapsed time: * seconds (glob)
round-trips: 6
@@ -1412,22 +1412,30 @@
missing: 1040
common heads: 3ee37d65064a
- $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.01
+ $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50
comparing with b
searching for changes
sampling from both directions
- query 1; still undecided: 1340, sample size is: 200
+ query 1; still undecided: 1340, sample size is: 50
+ sampling from both directions
+ query 2; still undecided: 995, sample size is: 60
sampling from both directions
- query 2; still undecided: 795, sample size is: 202
+ query 3; still undecided: 913, sample size is: 72
sampling from both directions
- query 3; still undecided: 525, sample size is: 204
+ query 4; still undecided: 816, sample size is: 204
+ sampling from both directions
+ query 5; still undecided: 612, sample size is: 153
sampling from both directions
- query 4; still undecided: 252, sample size is: 206
+ query 6; still undecided: 456, sample size is: 123
+ sampling from both directions
+ query 7; still undecided: 332, sample size is: 147
sampling from both directions
- query 5; still undecided: 44, sample size is: 44
- 5 total queries in *s (glob)
- elapsed time: * seconds (glob)
- round-trips: 5
+ query 8; still undecided: 184, sample size is: 176
+ sampling from both directions
+ query 9; still undecided: 8, sample size is: 8
+ 9 total queries in *s (glob)
+ elapsed time: * seconds (glob)
+ round-trips: 9
heads summary:
total common heads: 1
also local heads: 0
@@ -1580,3 +1588,139 @@
common: 0
missing: 1
common heads: 66f7d451a68b
+
+ $ cd ..
+
+
+Test debuging discovery using different subset of the same repository
+=====================================================================
+
+remote is a local subset
+------------------------
+
+remote will be last 25 heads of the local graph
+
+ $ cd $TESTTMP/manyheads
+ $ hg -R a debugdiscovery \
+ > --debug \
+ > --remote-as-revs 'last(heads(all()), 25)' \
+ > --config devel.discovery.randomize=false
+ query 1; heads
+ searching for changes
+ all remote heads known locally
+ elapsed time: * seconds (glob)
+ round-trips: 1
+ heads summary:
+ total common heads: 25
+ also local heads: 25
+ also remote heads: 25
+ both: 25
+ local heads: 260
+ common: 25
+ missing: 235
+ remote heads: 25
+ common: 25
+ unknown: 0
+ local changesets: 1340
+ common: 400
+ heads: 25
+ roots: 1
+ missing: 940
+ heads: 235
+ roots: 235
+ first undecided set: 940
+ heads: 235
+ roots: 235
+ common: 0
+ missing: 940
+ common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753
+
+local is a local subset
+------------------------
+
+remote will be last 25 heads of the local graph
+
+ $ cd $TESTTMP/manyheads
+ $ hg -R a debugdiscovery b \
+ > --debug \
+ > --local-as-revs 'first(heads(all()), 25)' \
+ > --config devel.discovery.randomize=false
+ comparing with b
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ query 2; still undecided: 375, sample size is: 81
+ sampling from both directions
+ query 3; still undecided: 3, sample size is: 3
+ 3 total queries *s (glob)
+ elapsed time: * seconds (glob)
+ round-trips: 3
+ heads summary:
+ total common heads: 1
+ also local heads: 0
+ also remote heads: 0
+ both: 0
+ local heads: 25
+ common: 0
+ missing: 25
+ remote heads: 1
+ common: 0
+ unknown: 1
+ local changesets: 400
+ common: 300
+ heads: 1
+ roots: 1
+ missing: 100
+ heads: 25
+ roots: 25
+ first undecided set: 400
+ heads: 25
+ roots: 1
+ common: 300
+ missing: 100
+ common heads: 3ee37d65064a
+
+both local and remove are subset
+------------------------
+
+remote will be last 25 heads of the local graph
+
+ $ cd $TESTTMP/manyheads
+ $ hg -R a debugdiscovery \
+ > --debug \
+ > --local-as-revs 'first(heads(all()), 25)' \
+ > --remote-as-revs 'last(heads(all()), 25)' \
+ > --config devel.discovery.randomize=false
+ query 1; heads
+ searching for changes
+ taking quick initial sample
+ query 2; still undecided: 375, sample size is: 81
+ sampling from both directions
+ query 3; still undecided: 3, sample size is: 3
+ 3 total queries in *s (glob)
+ elapsed time: * seconds (glob)
+ round-trips: 3
+ heads summary:
+ total common heads: 1
+ also local heads: 0
+ also remote heads: 0
+ both: 0
+ local heads: 25
+ common: 0
+ missing: 25
+ remote heads: 25
+ common: 0
+ unknown: 25
+ local changesets: 400
+ common: 300
+ heads: 1
+ roots: 1
+ missing: 100
+ heads: 25
+ roots: 25
+ first undecided set: 400
+ heads: 25
+ roots: 1
+ common: 300
+ missing: 100
+ common heads: 3ee37d65064a
--- a/tests/test-share-bookmarks.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-share-bookmarks.t Thu Feb 11 20:36:46 2021 -0800
@@ -102,7 +102,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ hg book bm1
FYI, in contrast to above test, bmX is invisible in repo1 (= shared
@@ -127,7 +127,7 @@
transaction abort!
rollback completed
abort: pretxnclose hook exited with status 1
- [255]
+ [40]
$ hg book bm3
clean up bm2 since it's uninteresting (not shared in the vfs case and
@@ -249,7 +249,7 @@
no changes found
adding remote bookmark bm3
abort: forced failure by extension
- [255]
+ [40]
$ hg boo
bm1 3:b87954705719
bm4 5:92793bfc8cad
--- a/tests/test-share-safe.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-share-safe.t Thu Feb 11 20:36:46 2021 -0800
@@ -352,18 +352,27 @@
- changelog
- manifest
- $ hg debugupgraderepo --run -q
+ $ hg debugupgraderepo --run
upgrade will perform the following actions:
requirements
preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
added: share-safe
+ share-safe
+ Upgrades a repository to share-safe format so that future shares of this repository share its requirements and configs.
+
processed revlogs:
- all-filelogs
- changelog
- manifest
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ upgrading repository requirements
+ removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob)
repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode.
$ hg debugrequirements
@@ -433,7 +442,7 @@
- changelog
- manifest
- $ hg debugupgraderepo -q --run
+ $ hg debugupgraderepo --run
upgrade will perform the following actions:
requirements
@@ -445,6 +454,12 @@
- changelog
- manifest
+ beginning upgrade...
+ repository locked and read-only
+ creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob)
+ (it is safe to interrupt this process any time before data migration completes)
+ upgrading repository requirements
+ removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob)
repository downgraded to not use share safe mode, existing shares will not work and needs to be reshared.
$ hg debugrequirements
--- a/tests/test-simplemerge.py Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-simplemerge.py Thu Feb 11 20:36:46 2021 -0800
@@ -141,8 +141,6 @@
"""No conflicts because nothing changed"""
m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
- self.assertEqual(m3.find_unconflicted(), [(0, 2)])
-
self.assertEqual(
list(m3.find_sync_regions()),
[(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
@@ -189,8 +187,6 @@
[b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
)
- self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)])
-
self.assertEqual(
list(m3.find_sync_regions()),
[(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
@@ -271,8 +267,6 @@
[b'aaa\n', b'222\n', b'bbb\n'],
)
- self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)])
-
self.assertEqual(
list(m3.find_sync_regions()),
[(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
@@ -323,8 +317,6 @@
[b'aaa', b'222', b'bbb'],
)
- self.assertEqual(m3.find_unconflicted(), [(0, 1), (2, 3)])
-
self.assertEqual(
list(m3.find_sync_regions()),
[(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
@@ -338,8 +330,6 @@
[b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
)
- self.assertEqual(m3.find_unconflicted(), [(0, 1), (3, 4)])
-
self.assertEqual(
list(m3.find_sync_regions()),
[(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
--- a/tests/test-strip.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-strip.t Thu Feb 11 20:36:46 2021 -0800
@@ -427,7 +427,7 @@
strip failed, unrecovered changes stored in '$TESTTMP/test/.hg/strip-backup/*-temp.hg' (glob)
(fix the problem, then recover the changesets with "hg unbundle '$TESTTMP/test/.hg/strip-backup/*-temp.hg'") (glob)
abort: pretxnchangegroup.bad hook exited with status 1
- [255]
+ [40]
$ restore
$ hg log -G
o changeset: 4:443431ffac4f
--- a/tests/test-tag.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-tag.t Thu Feb 11 20:36:46 2021 -0800
@@ -290,7 +290,7 @@
$ rm -f .hg/last-message.txt
$ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e
abort: pretag.test-saving-lastmessage hook exited with status 1
- [255]
+ [40]
$ test -f .hg/last-message.txt
[1]
@@ -325,7 +325,7 @@
note: commit message saved in .hg/last-message.txt
note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
abort: pretxncommit.unexpectedabort hook exited with status 1
- [255]
+ [40]
$ cat .hg/last-message.txt
custom tag message
second line
--- a/tests/test-transplant.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-transplant.t Thu Feb 11 20:36:46 2021 -0800
@@ -1091,7 +1091,7 @@
transaction abort!
rollback completed
abort: pretxncommit.abort hook exited with status 1
- [255]
+ [40]
$ cat >> .hg/hgrc <<EOF
> [hooks]
> pretxncommit.abort = !
--- a/tests/test-unamend.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-unamend.t Thu Feb 11 20:36:46 2021 -0800
@@ -39,7 +39,7 @@
$ hg unamend
abort: changeset must have one predecessor, found 0 predecessors
- [255]
+ [10]
Unamend on clean wdir and tip
--- a/tests/test-uncommit.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-uncommit.t Thu Feb 11 20:36:46 2021 -0800
@@ -114,12 +114,12 @@
$ hg uncommit nothinghere
abort: cannot uncommit "nothinghere"
(file does not exist)
- [255]
+ [10]
$ hg status
$ hg uncommit file-abc
abort: cannot uncommit "file-abc"
(file was not changed in working directory parent)
- [255]
+ [10]
$ hg status
Try partial uncommit, also moves bookmark
@@ -419,7 +419,7 @@
$ hg uncommit
abort: cannot uncommit merge changeset
- [255]
+ [10]
$ hg status
$ hg log -G -T '{rev}:{node} {desc}' --hidden
@@ -585,12 +585,12 @@
$ hg uncommit emptydir
abort: cannot uncommit "emptydir"
(file was untracked in working directory parent)
- [255]
+ [10]
$ cd emptydir
$ hg uncommit .
abort: cannot uncommit "emptydir"
(file was untracked in working directory parent)
- [255]
+ [10]
$ hg status
$ cd ..
--- a/tests/test-up-local-change.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-up-local-change.t Thu Feb 11 20:36:46 2021 -0800
@@ -175,7 +175,7 @@
$ hg up 1
b: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
$ rm b
test conflicting untracked ignored file
@@ -195,7 +195,7 @@
$ hg up 'desc("add ignored file")'
ignored: untracked file differs
abort: untracked files in working directory differ from files in requested revision
- [255]
+ [20]
test a local add
--- a/tests/test-update-branches.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-update-branches.t Thu Feb 11 20:36:46 2021 -0800
@@ -324,7 +324,7 @@
$ hg up -q 4
abort: conflicting changes
(commit or update --clean to discard changes)
- [255]
+ [20]
$ hg up -m 4
merging a
warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
--- a/tests/test-upgrade-repo.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-upgrade-repo.t Thu Feb 11 20:36:46 2021 -0800
@@ -632,11 +632,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for * (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ ls -1 .hg/ | grep upgradebackup
[1]
@@ -679,11 +677,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
Check that the repo still works fine
@@ -759,11 +755,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ hg verify
checking changesets
@@ -810,11 +804,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ hg verify
checking changesets
@@ -861,11 +853,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ hg verify
checking changesets
@@ -919,11 +909,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ hg verify
checking changesets
@@ -978,11 +966,9 @@
data fully upgraded in a temporary repository
marking source repository as being upgraded; clients will be unable to read from repository
starting in-place swap of repository data
- replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
replacing store...
store replacement complete; repository was inconsistent for *s (glob)
finalizing requirements file and making repository readable again
- removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
$ hg verify
checking changesets
--- a/tests/test-url-download.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-url-download.t Thu Feb 11 20:36:46 2021 -0800
@@ -34,6 +34,8 @@
$ hg debugdownload ./null.txt
1 0000000000000000000000000000000000000000
+ $ cat ../error.log
+
Test largefile URL
------------------
@@ -66,3 +68,5 @@
$ hg debugdownload "largefile://a57b57b39ee4dc3da1e03526596007f480ecdbe8"
1 0000000000000000000000000000000000000000
$ cd ..
+
+ $ cat error.log
--- a/tests/test-win32text.t Wed Feb 10 23:03:54 2021 +0100
+++ b/tests/test-win32text.t Thu Feb 11 20:36:46 2021 -0800
@@ -38,7 +38,7 @@
transaction abort!
rollback completed
abort: pretxncommit.crlf hook failed
- [255]
+ [40]
$ mv .hg/hgrc .hg/hgrc.bak
@@ -77,7 +77,7 @@
transaction abort!
rollback completed
abort: pretxnchangegroup.crlf hook failed
- [255]
+ [40]
$ mv .hg/hgrc.bak .hg/hgrc
$ echo hello > f
@@ -109,7 +109,7 @@
transaction abort!
rollback completed
abort: pretxncommit.crlf hook failed
- [255]
+ [40]
$ hg revert -a
forgetting d/f2
$ rm d/f2
@@ -286,7 +286,7 @@
transaction abort!
rollback completed
abort: pretxnchangegroup.crlf hook failed
- [255]
+ [40]
$ hg log -v
changeset: 5:f0b1c8d75fce