From b41a4afa8b0686fbf42f85129be1b5918dbd75d1 Mon Sep 17 00:00:00 2001 From: darko-poljak Date: Sat, 8 Feb 2014 20:27:30 +0100 Subject: [PATCH] change default hash alg to sha1 --- sweeper/sweeper.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sweeper/sweeper.py b/sweeper/sweeper.py index ad843b7..79b279d 100644 --- a/sweeper/sweeper.py +++ b/sweeper/sweeper.py @@ -15,7 +15,7 @@ Options: -b , --block-size= size of block used when reading file's content [default: 4096] -d , --digest-algs= secure hash algorithm comma separated - list [default: md5] + list [default: sha1] note that multiple hashes will slow down sweeper -a , --action= action on duplicate files (pprint, @@ -71,6 +71,7 @@ from collections import defaultdict from functools import partial +DEF_HASHALGS = ['sha1'] # some differences in python versions # we prefer iter methods if sys.version_info[0] == 3: @@ -169,7 +170,7 @@ def _fbequal(fpath1, fpath2): return False -def file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, verbose=False, +def file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096, verbose=False, safe_mode=False): """Find duplicate files in directory list. Return directory with keys equal to file hash value and value as list of @@ -237,7 +238,7 @@ def file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, verbose=False, return result -def iter_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, +def iter_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096, safe_mode=False): """Find duplicate files in directory list. Yield tuple of file path, hash tuple and list of duplicate files @@ -294,7 +295,7 @@ def _extract_files_for_action(topdirs, hashalgs, block_size, keep_prefix, yield (files, result) -def rm_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, +def rm_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096, simulate=False, keep_prefix=None, verbose=False, safe_mode=False): """Remove duplicate files found in specified directory list. @@ -318,7 +319,7 @@ def rm_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, os.remove(f) -def mv_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, +def mv_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096, dest_dir='dups', simulate=False, keep_prefix=None, verbose=False, safe_mode=False): """Move duplicate files found in specified directory list.