# HG changeset patch # User Mikael Berthe # Date 1403972889 -7200 # Node ID 55098d552ae2d4e29cc13b4be636f644ebf663d2 # Parent a8aa9e54bce4b1749556ede2e6bfa0d23544d67e s/createSizeHash/initialCleanup/ diff -r a8aa9e54bce4 -r 55098d552ae2 goduf.go --- a/goduf.go Sat Jun 28 18:21:33 2014 +0200 +++ b/goduf.go Sat Jun 28 18:28:09 2014 +0200 @@ -410,7 +410,8 @@ return } -func (data *dataT) createSizeHash() (hardLinkCount, uniqueSizeCount int) { +// initialCleanup() removes files with unique size as well as hard links +func (data *dataT) initialCleanup() (hardLinkCount, uniqueSizeCount int) { for s, sizeGroup := range data.sizeGroups { if len(sizeGroup.files) < 2 { delete(data.sizeGroups, s) @@ -421,11 +422,11 @@ var hardlinksFound bool // Check for hardlinks - // TODO: what about symlinks? // Remove unique dev/inodes // Instead of this loop, another way would be to use the field // "Unique" of the fileObj to mark them to be discarded // and remove them all at the end. + // TODO: what about symlinks? for { if !OSHasInodes() { break @@ -562,7 +563,7 @@ // Remove unique sizes myLog.Println(1, "* Removing files with unique size, sorting file lists...") - hardLinkCount, uniqueSizeCount := data.createSizeHash() + hardLinkCount, uniqueSizeCount := data.initialCleanup() if verbose { myLog.Printf(2, " Dropped %d files with unique size\n", uniqueSizeCount)