aboutsummaryrefslogtreecommitdiff
path: root/checkem
diff options
context:
space:
mode:
authorTom Ryder <tom@sanctum.geek.nz>2017-08-11 19:29:27 +1200
committerTom Ryder <tom@sanctum.geek.nz>2017-08-11 19:30:09 +1200
commitee27d1a3d10f8f8e054fcc5f67d3ddcb7e304823 (patch)
tree1b7b41fb7d96a3ed5ef85da0b042d9949bb158a6 /checkem
parentSort duplicate blocks by filesize ascending (diff)
downloadcheckem-ee27d1a3d10f8f8e054fcc5f67d3ddcb7e304823.tar.gz (sig)
checkem-ee27d1a3d10f8f8e054fcc5f67d3ddcb7e304823.zip
Skip don't die on unreadable filev2.12
Diffstat (limited to 'checkem')
-rwxr-xr-xcheckem24
1 files changed, 14 insertions, 10 deletions
diff --git a/checkem b/checkem
index c9fdc32..343743f 100755
--- a/checkem
+++ b/checkem
@@ -25,7 +25,7 @@ use File::Find;
use Digest;
# Version number to make Perl::Critic happy
-our $VERSION = 2.11;
+our $VERSION = 2.12;
# If no arguments, work with the current working directory
if ( !@ARGV ) {
@@ -91,13 +91,13 @@ find {
# If there's more than one filename of any of the sizes, look for hard links,
# checksum them if not linked, and push them into a sums table
my %sums;
-for my $fs ( grep { @{$_} > 1 } values %sizes ) {
+SIZE: for my $fs ( grep { @{$_} > 1 } values %sizes ) {
# Keep a temporary table of inodes to catch hard links
my %inos;
# Iterate through each file in the list
- for my $f ( @{$fs} ) {
+ FILE: for my $f ( @{$fs} ) {
# Catch hard links on compliant systems by keeping a dev/inode hash
my ( $dev, $ino ) = @{$f}{qw(dev ino)};
@@ -108,19 +108,23 @@ for my $fs ( grep { @{$_} > 1 } values %sizes ) {
# Files still the same size and not hard linked, group by digest;
# create the digest object if it isn't already defined
- open my $fh, '<', $f->{name}
- or croak 'Failed to open file';
- binmode $fh;
- $dig->addfile($fh);
- push @{ $sums{ $dig->digest() } }, $f;
- close $fh
- or croak 'Failed to close file';
+ if ( open my $fh, '<', $f->{name} ) {
+ binmode $fh;
+ $dig->addfile($fh);
+ push @{ $sums{ $dig->digest() } }, $f;
+ close $fh
+ or carp 'Failed to close file';
+ }
+ else {
+ carp 'Failed to open file';
+ }
}
}
# Print the groups of matched files (more than one share a checksum in the
# final table); sort the blocks by the filesize, and the files within each
# block by name
+GROUP:
for my $group (
sort { $a->[0]{size} <=> $b->[0]{size} }
grep { @{$_} > 1 } values %sums