hgext/remotefilelog/debugcommands.py
changeset 43076 2372284d9457
parent 42943 5fadf6103790
child 43077 687b865b95ad
--- a/hgext/remotefilelog/debugcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/debugcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,29 +29,35 @@
     shallowutil,
 )
 
+
 def debugremotefilelog(ui, path, **opts):
     decompress = opts.get(r'decompress')
 
     size, firstnode, mapping = parsefileblob(path, decompress)
 
-    ui.status(_("size: %d bytes\n") % (size))
-    ui.status(_("path: %s \n") % (path))
+    ui.status(_("size: %d bytes\n") % size)
+    ui.status(_("path: %s \n") % path)
     ui.status(_("key: %s \n") % (short(firstnode)))
     ui.status(_("\n"))
-    ui.status(_("%12s => %12s %13s %13s %12s\n") %
-              ("node", "p1", "p2", "linknode", "copyfrom"))
+    ui.status(
+        _("%12s => %12s %13s %13s %12s\n")
+        % ("node", "p1", "p2", "linknode", "copyfrom")
+    )
 
     queue = [firstnode]
     while queue:
         node = queue.pop(0)
         p1, p2, linknode, copyfrom = mapping[node]
-        ui.status(_("%s => %s  %s  %s  %s\n") %
-            (short(node), short(p1), short(p2), short(linknode), copyfrom))
+        ui.status(
+            _("%s => %s  %s  %s  %s\n")
+            % (short(node), short(p1), short(p2), short(linknode), copyfrom)
+        )
         if p1 != nullid:
             queue.append(p1)
         if p2 != nullid:
             queue.append(p2)
 
+
 def buildtemprevlog(repo, file):
     # get filename key
     filekey = nodemod.hex(hashlib.sha1(file).digest())
@@ -74,6 +80,7 @@
     class faket(object):
         def add(self, a, b, c):
             pass
+
     t = faket()
     for fctx in fctxs:
         if fctx.node() not in repo:
@@ -89,13 +96,16 @@
 
     return r
 
+
 def debugindex(orig, ui, repo, file_=None, **opts):
     """dump the contents of an index file"""
-    if (opts.get(r'changelog') or
-        opts.get(r'manifest') or
-        opts.get(r'dir') or
-        not shallowutil.isenabled(repo) or
-        not repo.shallowmatch(file_)):
+    if (
+        opts.get(r'changelog')
+        or opts.get(r'manifest')
+        or opts.get(r'dir')
+        or not shallowutil.isenabled(repo)
+        or not repo.shallowmatch(file_)
+    ):
         return orig(ui, repo, file_, **opts)
 
     r = buildtemprevlog(repo, file_)
@@ -112,12 +122,20 @@
         basehdr = '  base'
 
     if format == 0:
-        ui.write(("   rev    offset  length " + basehdr + " linkrev"
-                  " nodeid       p1           p2\n"))
+        ui.write(
+            (
+                "   rev    offset  length " + basehdr + " linkrev"
+                " nodeid       p1           p2\n"
+            )
+        )
     elif format == 1:
-        ui.write(("   rev flag   offset   length"
-                  "     size " + basehdr + "   link     p1     p2"
-                  "       nodeid\n"))
+        ui.write(
+            (
+                "   rev flag   offset   length"
+                "     size " + basehdr + "   link     p1     p2"
+                "       nodeid\n"
+            )
+        )
 
     for i in r:
         node = r.node(i)
@@ -130,14 +148,37 @@
                 pp = r.parents(node)
             except Exception:
                 pp = [nullid, nullid]
-            ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
-                    i, r.start(i), r.length(i), base, r.linkrev(i),
-                    short(node), short(pp[0]), short(pp[1])))
+            ui.write(
+                "% 6d % 9d % 7d % 6d % 7d %s %s %s\n"
+                % (
+                    i,
+                    r.start(i),
+                    r.length(i),
+                    base,
+                    r.linkrev(i),
+                    short(node),
+                    short(pp[0]),
+                    short(pp[1]),
+                )
+            )
         elif format == 1:
             pr = r.parentrevs(i)
-            ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
-                    i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
-                    base, r.linkrev(i), pr[0], pr[1], short(node)))
+            ui.write(
+                "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n"
+                % (
+                    i,
+                    r.flags(i),
+                    r.start(i),
+                    r.length(i),
+                    r.rawsize(i),
+                    base,
+                    r.linkrev(i),
+                    pr[0],
+                    pr[1],
+                    short(node),
+                )
+            )
+
 
 def debugindexdot(orig, ui, repo, file_):
     """dump an index DAG as a graphviz dot file"""
@@ -146,7 +187,7 @@
 
     r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
 
-    ui.write(("digraph G {\n"))
+    ui.write("digraph G {\n")
     for i in r:
         node = r.node(i)
         pp = r.parents(node)
@@ -155,6 +196,7 @@
             ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
     ui.write("}\n")
 
+
 def verifyremotefilelog(ui, path, **opts):
     decompress = opts.get(r'decompress')
 
@@ -167,14 +209,18 @@
             for p1, p2, linknode, copyfrom in mapping.itervalues():
                 if linknode == nullid:
                     actualpath = os.path.relpath(root, path)
-                    key = fileserverclient.getcachekey("reponame", actualpath,
-                                                       file)
-                    ui.status("%s %s\n" % (key, os.path.relpath(filepath,
-                                                                path)))
+                    key = fileserverclient.getcachekey(
+                        "reponame", actualpath, file
+                    )
+                    ui.status(
+                        "%s %s\n" % (key, os.path.relpath(filepath, path))
+                    )
+
 
 def _decompressblob(raw):
     return zlib.decompress(raw)
 
+
 def parsefileblob(path, decompress):
     f = open(path, "rb")
     try:
@@ -194,24 +240,25 @@
     while start < len(raw):
         divider = raw.index('\0', start + 80)
 
-        currentnode = raw[start:(start + 20)]
+        currentnode = raw[start : (start + 20)]
         if not firstnode:
             firstnode = currentnode
 
-        p1 = raw[(start + 20):(start + 40)]
-        p2 = raw[(start + 40):(start + 60)]
-        linknode = raw[(start + 60):(start + 80)]
-        copyfrom = raw[(start + 80):divider]
+        p1 = raw[(start + 20) : (start + 40)]
+        p2 = raw[(start + 40) : (start + 60)]
+        linknode = raw[(start + 60) : (start + 80)]
+        copyfrom = raw[(start + 80) : divider]
 
         mapping[currentnode] = (p1, p2, linknode, copyfrom)
         start = divider + 1
 
     return size, firstnode, mapping
 
+
 def debugdatapack(ui, *paths, **opts):
     for path in paths:
         if '.data' in path:
-            path = path[:path.index('.data')]
+            path = path[: path.index('.data')]
         ui.write("%s:\n" % path)
         dpack = datapack.datapack(path)
         node = opts.get(r'node')
@@ -230,6 +277,7 @@
         lastfilename = None
         totaldeltasize = 0
         totalblobsize = 0
+
         def printtotals():
             if lastfilename is not None:
                 ui.write("\n")
@@ -238,14 +286,18 @@
             difference = totalblobsize - totaldeltasize
             deltastr = "%0.1f%% %s" % (
                 (100.0 * abs(difference) / totalblobsize),
-                ("smaller" if difference > 0 else "bigger"))
+                ("smaller" if difference > 0 else "bigger"),
+            )
 
-            ui.write(("Total:%s%s  %s (%s)\n") % (
-                "".ljust(2 * hashlen - len("Total:")),
-                ('%d' % totaldeltasize).ljust(12),
-                ('%d' % totalblobsize).ljust(9),
-                deltastr
-            ))
+            ui.write(
+                "Total:%s%s  %s (%s)\n"
+                % (
+                    "".ljust(2 * hashlen - len("Total:")),
+                    ('%d' % totaldeltasize).ljust(12),
+                    ('%d' % totalblobsize).ljust(9),
+                    deltastr,
+                )
+            )
 
         bases = {}
         nodes = set()
@@ -260,11 +312,15 @@
                 printtotals()
                 name = '(empty name)' if filename == '' else filename
                 ui.write("%s:\n" % name)
-                ui.write("%s%s%s%s\n" % (
-                    "Node".ljust(hashlen),
-                    "Delta Base".ljust(hashlen),
-                    "Delta Length".ljust(14),
-                    "Blob Size".ljust(9)))
+                ui.write(
+                    "%s%s%s%s\n"
+                    % (
+                        "Node".ljust(hashlen),
+                        "Delta Base".ljust(hashlen),
+                        "Delta Length".ljust(14),
+                        "Blob Size".ljust(9),
+                    )
+                )
                 lastfilename = filename
                 totalblobsize = 0
                 totaldeltasize = 0
@@ -277,11 +333,15 @@
                 totalblobsize += blobsize
             else:
                 blobsize = "(missing)"
-            ui.write("%s  %s  %s%s\n" % (
-                hashformatter(node),
-                hashformatter(deltabase),
-                ('%d' % deltalen).ljust(14),
-                pycompat.bytestr(blobsize)))
+            ui.write(
+                "%s  %s  %s%s\n"
+                % (
+                    hashformatter(node),
+                    hashformatter(deltabase),
+                    ('%d' % deltalen).ljust(14),
+                    pycompat.bytestr(blobsize),
+                )
+            )
 
         if filename is not None:
             printtotals()
@@ -291,6 +351,7 @@
             ui.warn(("%d failures\n" % failures))
             return 1
 
+
 def _sanitycheck(ui, nodes, bases):
     """
     Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
@@ -307,14 +368,22 @@
 
         while deltabase != nullid:
             if deltabase not in nodes:
-                ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" %
-                        (short(node), short(deltabase))))
+                ui.warn(
+                    (
+                        "Bad entry: %s has an unknown deltabase (%s)\n"
+                        % (short(node), short(deltabase))
+                    )
+                )
                 failures += 1
                 break
 
             if deltabase in seen:
-                ui.warn(("Bad entry: %s has a cycle (at %s)\n" %
-                        (short(node), short(deltabase))))
+                ui.warn(
+                    (
+                        "Bad entry: %s has a cycle (at %s)\n"
+                        % (short(node), short(deltabase))
+                    )
+                )
                 failures += 1
                 break
 
@@ -326,6 +395,7 @@
         bases[node] = nullid
     return failures
 
+
 def dumpdeltachain(ui, deltachain, **opts):
     hashformatter = hex
     hashlen = 40
@@ -335,22 +405,30 @@
         if filename != lastfilename:
             ui.write("\n%s\n" % filename)
             lastfilename = filename
-        ui.write("%s  %s  %s  %s\n" % (
-            "Node".ljust(hashlen),
-            "Delta Base".ljust(hashlen),
-            "Delta SHA1".ljust(hashlen),
-            "Delta Length".ljust(6),
-        ))
+        ui.write(
+            "%s  %s  %s  %s\n"
+            % (
+                "Node".ljust(hashlen),
+                "Delta Base".ljust(hashlen),
+                "Delta SHA1".ljust(hashlen),
+                "Delta Length".ljust(6),
+            )
+        )
 
-        ui.write("%s  %s  %s  %d\n" % (
-            hashformatter(node),
-            hashformatter(deltabasenode),
-            nodemod.hex(hashlib.sha1(delta).digest()),
-            len(delta)))
+        ui.write(
+            "%s  %s  %s  %d\n"
+            % (
+                hashformatter(node),
+                hashformatter(deltabasenode),
+                nodemod.hex(hashlib.sha1(delta).digest()),
+                len(delta),
+            )
+        )
+
 
 def debughistorypack(ui, path):
     if '.hist' in path:
-        path = path[:path.index('.hist')]
+        path = path[: path.index('.hist')]
     hpack = historypack.historypack(path)
 
     lastfilename = None
@@ -358,21 +436,41 @@
         filename, node, p1node, p2node, linknode, copyfrom = entry
         if filename != lastfilename:
             ui.write("\n%s\n" % filename)
-            ui.write("%s%s%s%s%s\n" % (
-                "Node".ljust(14),
-                "P1 Node".ljust(14),
-                "P2 Node".ljust(14),
-                "Link Node".ljust(14),
-                "Copy From"))
+            ui.write(
+                "%s%s%s%s%s\n"
+                % (
+                    "Node".ljust(14),
+                    "P1 Node".ljust(14),
+                    "P2 Node".ljust(14),
+                    "Link Node".ljust(14),
+                    "Copy From",
+                )
+            )
             lastfilename = filename
-        ui.write("%s  %s  %s  %s  %s\n" % (short(node), short(p1node),
-            short(p2node), short(linknode), copyfrom))
+        ui.write(
+            "%s  %s  %s  %s  %s\n"
+            % (
+                short(node),
+                short(p1node),
+                short(p2node),
+                short(linknode),
+                copyfrom,
+            )
+        )
+
 
 def debugwaitonrepack(repo):
     with lockmod.lock(repack.repacklockvfs(repo), "repacklock", timeout=-1):
         return
 
+
 def debugwaitonprefetch(repo):
-    with repo._lock(repo.svfs, "prefetchlock", True, None,
-                         None, _('prefetching in %s') % repo.origroot):
+    with repo._lock(
+        repo.svfs,
+        "prefetchlock",
+        True,
+        None,
+        None,
+        _('prefetching in %s') % repo.origroot,
+    ):
         pass