hgext/largefiles/overrides.py
changeset 18704 d69585a5c5c0
parent 18610 46edbc49a9f2
child 18731 c2d079387b2c
--- a/hgext/largefiles/overrides.py	Fri Feb 15 21:20:24 2013 -0600
+++ b/hgext/largefiles/overrides.py	Sat Feb 09 21:07:42 2013 +0000
@@ -733,19 +733,21 @@
         repo.lfpullsource = source
         oldheads = lfutil.getcurrentheads(repo)
         result = orig(ui, repo, source, **opts)
-        # If we do not have the new largefiles for any new heads we pulled, we
-        # will run into a problem later if we try to merge or rebase with one of
-        # these heads, so cache the largefiles now directly into the system
-        # cache.
-        numcached = 0
-        heads = lfutil.getcurrentheads(repo)
-        newheads = set(heads).difference(set(oldheads))
-        if len(newheads) > 0:
-            ui.status(_("caching largefiles for %s heads\n") % len(newheads))
-        for head in newheads:
-            (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
-            numcached += len(cached)
-        ui.status(_("%d largefiles cached\n") % numcached)
+        if opts.get('cache_largefiles'):
+            # If you are pulling from a remote location that is not your
+            # default location, you may want to cache largefiles for new heads
+            # that have been pulled, so you can easily merge or rebase with
+            # them later
+            numcached = 0
+            heads = lfutil.getcurrentheads(repo)
+            newheads = set(heads).difference(set(oldheads))
+            if len(newheads) > 0:
+                ui.status(_("caching largefiles for %s heads\n") %
+                          len(newheads))
+            for head in newheads:
+                (cached, missing) = lfcommands.cachelfiles(ui, repo, head)
+                numcached += len(cached)
+            ui.status(_("%d largefiles cached\n") % numcached)
     if opts.get('all_largefiles'):
         revspostpull = len(repo)
         revs = []