change default hash alg to sha1
This commit is contained in:
parent
ea547d83c5
commit
b41a4afa8b
1 changed files with 6 additions and 5 deletions
|
@ -15,7 +15,7 @@ Options:
|
||||||
-b <blocksize>, --block-size=<blocksize> size of block used when reading
|
-b <blocksize>, --block-size=<blocksize> size of block used when reading
|
||||||
file's content [default: 4096]
|
file's content [default: 4096]
|
||||||
-d <hashalgs>, --digest-algs=<hashalgs> secure hash algorithm comma separated
|
-d <hashalgs>, --digest-algs=<hashalgs> secure hash algorithm comma separated
|
||||||
list [default: md5]
|
list [default: sha1]
|
||||||
note that multiple hashes will slow
|
note that multiple hashes will slow
|
||||||
down sweeper
|
down sweeper
|
||||||
-a <action>, --action=<action> action on duplicate files (pprint,
|
-a <action>, --action=<action> action on duplicate files (pprint,
|
||||||
|
@ -71,6 +71,7 @@ from collections import defaultdict
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
|
||||||
|
DEF_HASHALGS = ['sha1']
|
||||||
# some differences in python versions
|
# some differences in python versions
|
||||||
# we prefer iter methods
|
# we prefer iter methods
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
|
@ -169,7 +170,7 @@ def _fbequal(fpath1, fpath2):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, verbose=False,
|
def file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096, verbose=False,
|
||||||
safe_mode=False):
|
safe_mode=False):
|
||||||
"""Find duplicate files in directory list. Return directory
|
"""Find duplicate files in directory list. Return directory
|
||||||
with keys equal to file hash value and value as list of
|
with keys equal to file hash value and value as list of
|
||||||
|
@ -237,7 +238,7 @@ def file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096, verbose=False,
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def iter_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096,
|
def iter_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096,
|
||||||
safe_mode=False):
|
safe_mode=False):
|
||||||
"""Find duplicate files in directory list.
|
"""Find duplicate files in directory list.
|
||||||
Yield tuple of file path, hash tuple and list of duplicate files
|
Yield tuple of file path, hash tuple and list of duplicate files
|
||||||
|
@ -294,7 +295,7 @@ def _extract_files_for_action(topdirs, hashalgs, block_size, keep_prefix,
|
||||||
yield (files, result)
|
yield (files, result)
|
||||||
|
|
||||||
|
|
||||||
def rm_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096,
|
def rm_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096,
|
||||||
simulate=False, keep_prefix=None, verbose=False,
|
simulate=False, keep_prefix=None, verbose=False,
|
||||||
safe_mode=False):
|
safe_mode=False):
|
||||||
"""Remove duplicate files found in specified directory list.
|
"""Remove duplicate files found in specified directory list.
|
||||||
|
@ -318,7 +319,7 @@ def rm_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096,
|
||||||
os.remove(f)
|
os.remove(f)
|
||||||
|
|
||||||
|
|
||||||
def mv_file_dups(topdirs=['./'], hashalgs=['md5'], block_size=4096,
|
def mv_file_dups(topdirs=['./'], hashalgs=DEF_HASHALGS, block_size=4096,
|
||||||
dest_dir='dups', simulate=False, keep_prefix=None,
|
dest_dir='dups', simulate=False, keep_prefix=None,
|
||||||
verbose=False, safe_mode=False):
|
verbose=False, safe_mode=False):
|
||||||
"""Move duplicate files found in specified directory list.
|
"""Move duplicate files found in specified directory list.
|
||||||
|
|
Loading…
Reference in a new issue