Compare commits
No commits in common. "master" and "v0.1.0" have entirely different histories.
9 changed files with 88 additions and 409 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,3 +1 @@
|
||||||
*.pyc
|
*.pyc
|
||||||
dist/
|
|
||||||
*.egg-info/
|
|
||||||
|
|
5
AUTHORS
5
AUTHORS
|
@ -1,5 +0,0 @@
|
||||||
Author:
|
|
||||||
Darko Poljak <darko.poljak@gmail.com>
|
|
||||||
|
|
||||||
Contributors:
|
|
||||||
matthewi
|
|
26
README.rst
26
README.rst
|
@ -10,35 +10,29 @@ Print duplicates
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from sweeper import Sweeper
|
from sweeper import file_dups
|
||||||
swp = Sweeper(['images1', 'images2'])
|
dups = file_dups(['images1', 'images2'])
|
||||||
dups = swp.file_dups()
|
|
||||||
print(dups)
|
print(dups)
|
||||||
|
|
||||||
Remove duplicate files
|
Remove duplicate files
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from sweeper import Sweeper
|
from sweeper import file_dups
|
||||||
swp = Sweeper(['images1', 'images2'])
|
rm_file_dups(['images'])
|
||||||
swp.rm()
|
|
||||||
|
|
||||||
Perform custom action
|
Perform custom action
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from sweeper import Sweeper
|
from sweeper import file_dups
|
||||||
swp = Sweeper(['images'])
|
for files in iter_file_dups(['images']):
|
||||||
for f, h, dups in swp:
|
for fname in files:
|
||||||
print('encountered {} which duplicates with already found duplicate files {} with hash {}'.format(f, dups, h))
|
print('found duplicate file with name: %s' % fname)
|
||||||
|
|
||||||
As script::
|
As script::
|
||||||
|
|
||||||
python -m sweeper/sweeper --help
|
python sweeper.py --help
|
||||||
|
|
||||||
As installed console script::
|
|
||||||
|
|
||||||
sweeper --help
|
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
|
@ -63,5 +57,5 @@ https://github.com/darko-poljak/sweeper
|
||||||
Tested With
|
Tested With
|
||||||
===========
|
===========
|
||||||
|
|
||||||
Python2.7, Python3
|
Python2.7.6, Python3.3.3
|
||||||
|
|
||||||
|
|
0
TODO
0
TODO
13
setup.py
13
setup.py
|
@ -1,17 +1,12 @@
|
||||||
import os
|
import os
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
def read(fname):
|
def read(fname):
|
||||||
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
||||||
|
|
||||||
|
|
||||||
import sweeper.sweeper as sw
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='sweeper',
|
name='sweeper',
|
||||||
version=sw.__version__,
|
version='0.1.0',
|
||||||
author='Darko Poljak',
|
author='Darko Poljak',
|
||||||
author_email='darko.poljak@gmail.com',
|
author_email='darko.poljak@gmail.com',
|
||||||
description='Find duplicate files and perform action.',
|
description='Find duplicate files and perform action.',
|
||||||
|
@ -20,11 +15,6 @@ setup(
|
||||||
url='https://github.com/darko-poljak/sweeper',
|
url='https://github.com/darko-poljak/sweeper',
|
||||||
download_url='https://github.com/darko-poljak/sweeper',
|
download_url='https://github.com/darko-poljak/sweeper',
|
||||||
packages=['sweeper'],
|
packages=['sweeper'],
|
||||||
entry_points={
|
|
||||||
'console_scripts': [
|
|
||||||
'sweeper=sweeper.sweeper:main',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
long_description=read('README.rst'),
|
long_description=read('README.rst'),
|
||||||
platforms="OS Independent",
|
platforms="OS Independent",
|
||||||
install_requires=["docopt"],
|
install_requires=["docopt"],
|
||||||
|
@ -35,3 +25,4 @@ setup(
|
||||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from .sweeper import Sweeper
|
from .sweeper import file_dups
|
||||||
|
|
||||||
__all__ = ['Sweeper']
|
__all__ = ['file_dups', 'mv_file_dups', 'rm_file_dups', 'iter_file_dups']
|
||||||
|
|
|
@ -2,96 +2,52 @@
|
||||||
# Author: Darko Poljak <darko.poljak@gmail.com>
|
# Author: Darko Poljak <darko.poljak@gmail.com>
|
||||||
# License: GPLv3
|
# License: GPLv3
|
||||||
|
|
||||||
"""{0} {1}
|
"""Sweeper.
|
||||||
|
|
||||||
Usage: {0} [options] [<directory>...]
|
Usage: sweeper.py [options] [<directory>...]
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
<directory> directory path to scan for files
|
<directory> directory path to scan for files
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
-h, --help show this screen
|
-h, --help show this screen
|
||||||
-v, --version show version and exit
|
-b <blocksize>, --block-size=<blocksize> size of block used when reading file's
|
||||||
-b <blocksize>, --block-size=<blocksize> size of block used when reading
|
content [default: 4096]
|
||||||
file's content [default: 4096]
|
-d <hashalg>, --digest-alg=<hashalg> secure hash algorithm [default: md5]
|
||||||
-d <hashalgs>, --digest-algs=<hashalgs> secure hash algorithm comma separated
|
-a <action>, --action=<action> action on duplicate files
|
||||||
list [default: sha1]
|
(print, remove, move) [default: print]
|
||||||
note that multiple hashes will slow
|
|
||||||
down sweeper
|
|
||||||
-a <action>, --action=<action> action on duplicate files (pprint,
|
|
||||||
print, remove, move)
|
|
||||||
[default: pprint]
|
|
||||||
-remove removes duplicate files
|
|
||||||
except first or first with specified
|
|
||||||
directory prefix found
|
|
||||||
-move moves duplicate files to
|
|
||||||
duplicates driectory, except first
|
|
||||||
or first with specified directory
|
|
||||||
prefix found
|
|
||||||
-print prints result directory where
|
|
||||||
keys are hash values and values are
|
|
||||||
list of duplicate file paths
|
|
||||||
-pprint prints sets of duplicate file
|
|
||||||
paths each in it's line where sets
|
|
||||||
are separated by blank newline
|
|
||||||
-m <directory>, --move=<directory> move duplicate files to directory
|
-m <directory>, --move=<directory> move duplicate files to directory
|
||||||
(used with move action)
|
(used with move action) [default: ./dups]
|
||||||
[default: ./dups]
|
|
||||||
-k <dirprefix>, --keep=<dirprefix> directory prefix for remove and move
|
|
||||||
actions
|
|
||||||
-s, --simulate if action is remove or move just
|
|
||||||
simulate action by printing, do not
|
|
||||||
actually perform the action
|
|
||||||
-V, --verbose print more info
|
|
||||||
note that verbosity will slow down
|
|
||||||
sweeper due to text printing and
|
|
||||||
gathering additional information
|
|
||||||
-S, --safe-mode enable safe mode: compare hash
|
|
||||||
duplicate files byte by byte too
|
|
||||||
note that it will further slow down
|
|
||||||
sweeper but will overcome hash
|
|
||||||
collisions (although this is
|
|
||||||
unlikely)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
__author__ = 'Darko Poljak <darko.poljak@gmail.com>'
|
__author__ = 'Darko Poljak <darko.poljak@gmail.com>'
|
||||||
__version__ = '0.9.0'
|
__version__ = '0.1.0'
|
||||||
__license__ = 'GPLv3'
|
__license__ = 'GPLv3'
|
||||||
|
|
||||||
__all__ = ['Sweeper']
|
__all__ = [
|
||||||
|
'file_dups', 'rm_file_dups', 'mv_file_dups', 'iter_file_dups'
|
||||||
|
]
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
import hashlib
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from functools import partial
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
|
|
||||||
# some differences in python versions
|
# some differences in python versions
|
||||||
# we prefer iter methods
|
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
|
def _do_encode(buf):
|
||||||
|
return buf
|
||||||
|
|
||||||
def _dict_iter_items(d):
|
def _dict_iter_items(d):
|
||||||
return d.items()
|
return d.items()
|
||||||
|
|
||||||
def _dict_iter_keys(d):
|
|
||||||
return d.keys()
|
|
||||||
|
|
||||||
def _dict_iter_values(d):
|
|
||||||
return d.values()
|
|
||||||
else:
|
else:
|
||||||
|
def _do_encode(buf):
|
||||||
|
return buf
|
||||||
|
|
||||||
def _dict_iter_items(d):
|
def _dict_iter_items(d):
|
||||||
return d.iteritems()
|
return d.iteritems()
|
||||||
|
|
||||||
def _dict_iter_keys(d):
|
|
||||||
return d.iterkeys()
|
|
||||||
|
|
||||||
def _dict_iter_values(d):
|
|
||||||
return d.itervalues()
|
|
||||||
|
|
||||||
range = xrange
|
|
||||||
|
|
||||||
|
|
||||||
def _filehash(filepath, hashalg, block_size):
|
def _filehash(filepath, hashalg, block_size):
|
||||||
"""Calculate secure hash for given file content using
|
"""Calculate secure hash for given file content using
|
||||||
|
@ -101,254 +57,56 @@ def _filehash(filepath, hashalg, block_size):
|
||||||
md = hashlib.new(hashalg)
|
md = hashlib.new(hashalg)
|
||||||
with open(filepath, "rb") as f:
|
with open(filepath, "rb") as f:
|
||||||
for buf in iter(lambda: f.read(block_size), b''):
|
for buf in iter(lambda: f.read(block_size), b''):
|
||||||
md.update(buf)
|
md.update(_do_encode(buf))
|
||||||
return md.hexdigest()
|
return md.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def _uniq_list(list_):
|
def file_dups(topdirs=['./'], hashalg='md5', block_size=4096):
|
||||||
return list(set(list_))
|
|
||||||
|
|
||||||
|
|
||||||
def _fbequal(fpath1, fpath2):
|
|
||||||
'''Compare files byte by byte. If files are equal return True,
|
|
||||||
False otherwise.
|
|
||||||
fpath1 and fpath2 are file paths.
|
|
||||||
'''
|
|
||||||
# nested to work with 2.6
|
|
||||||
with open(fpath1, "rb") as f1:
|
|
||||||
with open(fpath2, "rb") as f2:
|
|
||||||
while True:
|
|
||||||
b1 = f1.read(1)
|
|
||||||
b2 = f2.read(1)
|
|
||||||
if b1 != b2: # different bytes
|
|
||||||
return False
|
|
||||||
if not b1 or not b2: # end in one or both files
|
|
||||||
break
|
|
||||||
if not b1 and not b2: # end in both files, files are equal
|
|
||||||
return True
|
|
||||||
# end in one file but not in the other, files aren't equal
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _remap_keys_to_str(d):
|
|
||||||
'''Iterator that remaps dictionary keys to string in case keys are tuple
|
|
||||||
or list. Leave it unchanged otherwise.
|
|
||||||
Yields string key, value pairs.
|
|
||||||
'''
|
|
||||||
for k in _dict_iter_keys(d):
|
|
||||||
if isinstance(k, tuple) or isinstance(k, list):
|
|
||||||
key = ','.join(k)
|
|
||||||
else:
|
|
||||||
key = k
|
|
||||||
yield (key, d[k])
|
|
||||||
|
|
||||||
|
|
||||||
def _gather_file_list(dirs):
|
|
||||||
'''Gather file paths in directory list dirs.
|
|
||||||
Return tuple (count, files) where count is files
|
|
||||||
list length and files is list of file paths in
|
|
||||||
specified directories.
|
|
||||||
'''
|
|
||||||
files = []
|
|
||||||
for dir_ in dirs:
|
|
||||||
for dirpath, dirnames, filenames in os.walk(dir_):
|
|
||||||
# replace fpath with realpath value (eliminate symbolic links)
|
|
||||||
files.extend([os.path.realpath(os.path.join(dirpath, fname))
|
|
||||||
for fname in filenames])
|
|
||||||
return files
|
|
||||||
|
|
||||||
|
|
||||||
class Sweeper(object):
|
|
||||||
DEF_HASHALGS = ['sha1']
|
|
||||||
|
|
||||||
def __init__(self, topdirs=['./'], hashalgs=DEF_HASHALGS,
|
|
||||||
block_size=4096, verbose=False, safe_mode=False):
|
|
||||||
# replace dir paths with realpath value (eliminate symbolic links)
|
|
||||||
self.topdirs = []
|
|
||||||
for i in range(len(topdirs)):
|
|
||||||
self.topdirs.append(os.path.realpath(topdirs[i]))
|
|
||||||
self.hashalgs = hashalgs
|
|
||||||
self.block_size = block_size
|
|
||||||
self.verbose = verbose
|
|
||||||
self.safe_mode = safe_mode
|
|
||||||
|
|
||||||
# iter through file paths in files list
|
|
||||||
def _files_iter_from_list(self, files):
|
|
||||||
return (fpath for fpath in files)
|
|
||||||
|
|
||||||
# iter through file paths by os.walking
|
|
||||||
def _files_iter_from_disk(self):
|
|
||||||
for topdir in self.topdirs:
|
|
||||||
for dirpath, dirnames, filenames in os.walk(topdir):
|
|
||||||
for fname in filenames:
|
|
||||||
# replace fpath with realpath value
|
|
||||||
# (eliminate symbolic links)
|
|
||||||
fpath = os.path.realpath(os.path.join(dirpath, fname))
|
|
||||||
yield fpath
|
|
||||||
|
|
||||||
def file_dups(self):
|
|
||||||
"""Find duplicate files in directory list. Return directory
|
"""Find duplicate files in directory list. Return directory
|
||||||
with keys equal to file hash value and value as list of
|
with keys equal to file hash value and value as list of
|
||||||
file paths whose content is the same.
|
file paths whose content is the same.
|
||||||
If safe_mode is true then you want to play safe: do byte
|
|
||||||
by byte comparison for hash duplicate files.
|
|
||||||
"""
|
"""
|
||||||
dups = defaultdict(list)
|
dups = defaultdict(list)
|
||||||
if self.verbose:
|
for topdir in topdirs:
|
||||||
if self.safe_mode:
|
for dirpath, dirnames, filenames in os.walk(topdir):
|
||||||
print('safe mode is on')
|
for fname in filenames:
|
||||||
print('gathering and counting files...', end='')
|
fpath = os.path.join(dirpath, fname)
|
||||||
sys.stdout.flush()
|
hexmd = _filehash(fpath, hashalg, block_size)
|
||||||
files = _gather_file_list(self.topdirs)
|
|
||||||
count = len(files)
|
|
||||||
current = 1
|
|
||||||
print(count)
|
|
||||||
_files_iter = partial(self._files_iter_from_list, files)
|
|
||||||
else:
|
|
||||||
_files_iter = self._files_iter_from_disk
|
|
||||||
|
|
||||||
for fpath in _files_iter():
|
|
||||||
if self.verbose:
|
|
||||||
print('\rprocessing file {0}/{1}: calc hash'.format(current,
|
|
||||||
count),
|
|
||||||
end='')
|
|
||||||
sys.stdout.flush()
|
|
||||||
hexmds = [_filehash(fpath, h, self.block_size)
|
|
||||||
for h in self.hashalgs]
|
|
||||||
hexmd = tuple(hexmds)
|
|
||||||
dup_files = dups[hexmd]
|
|
||||||
files_equals = False
|
|
||||||
if self.safe_mode:
|
|
||||||
if dup_files:
|
|
||||||
if self.verbose:
|
|
||||||
print('\rprocessing file {0}/{1}: byte cmp'.format(
|
|
||||||
current, count), end='')
|
|
||||||
sys.stdout.flush()
|
|
||||||
for f in dup_files:
|
|
||||||
if _fbequal(f, fpath):
|
|
||||||
files_equals = True
|
|
||||||
break
|
|
||||||
if self.verbose and not files_equals:
|
|
||||||
print('\nsame hash value {} but not same bytes for'
|
|
||||||
' file {} with files {}'.format(
|
|
||||||
hexmd, fpath, dup_files))
|
|
||||||
else: # when list is empty in safe mode
|
|
||||||
files_equals = True
|
|
||||||
else:
|
|
||||||
files_equals = True # when safe mode is off
|
|
||||||
if self.verbose:
|
|
||||||
current += 1
|
|
||||||
if files_equals:
|
|
||||||
dups[hexmd].append(fpath)
|
dups[hexmd].append(fpath)
|
||||||
|
result = {k: v for k, v in _dict_iter_items(dups) if len(v) > 1}
|
||||||
if self.verbose:
|
|
||||||
print('')
|
|
||||||
# make result dict with unique file paths list
|
|
||||||
result = {}
|
|
||||||
for k, v in _dict_iter_items(dups):
|
|
||||||
uniq_v = _uniq_list(v)
|
|
||||||
if len(uniq_v) > 1:
|
|
||||||
result[k] = uniq_v
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Find duplicate files in directory list.
|
|
||||||
Yield tuple of file path, hash tuple and list of duplicate files
|
|
||||||
as soon as duplicate file is found.
|
|
||||||
Newly found file is not included in the list at the yield time,
|
|
||||||
but is appended later before next yield.
|
|
||||||
This means that not all duplicate files are returned with any
|
|
||||||
return value. Same hash value and sublist could be returned later
|
|
||||||
if file with same content is found.
|
|
||||||
If safe_mode is true then you want to play safe: do byte
|
|
||||||
by byte comparison for hash duplicate files.
|
|
||||||
"""
|
|
||||||
# internaly, file dups dict is still maintained
|
|
||||||
dups = defaultdict(list)
|
|
||||||
_files_iter = self._files_iter_from_disk
|
|
||||||
|
|
||||||
for fpath in _files_iter():
|
def rm_file_dups(topdirs=['./'], hashalg='md5', block_size=4096):
|
||||||
hexmds = [_filehash(fpath, h, self.block_size)
|
|
||||||
for h in self.hashalgs]
|
|
||||||
hexmd = tuple(hexmds)
|
|
||||||
dup_files = dups[hexmd]
|
|
||||||
# there were dup list elements (used for yield)
|
|
||||||
if self.safe_mode and dup_files:
|
|
||||||
# compare only with first file in dup_files
|
|
||||||
# all files in dup_files list are already content equal
|
|
||||||
files_equals = _fbequal(dup_files[0], fpath)
|
|
||||||
else: # when list is emtpy in safe mode or when safe mode is off
|
|
||||||
files_equals = True
|
|
||||||
if files_equals:
|
|
||||||
# yield only if current dup files list isn't empty
|
|
||||||
if dup_files:
|
|
||||||
yield (fpath, hexmd, dups[hexmd])
|
|
||||||
# finally append newly found file to dup list
|
|
||||||
dups[hexmd].append(fpath)
|
|
||||||
|
|
||||||
def _extract_files_for_action(self, keep_prefix):
|
|
||||||
dups = self.file_dups()
|
|
||||||
for files in _dict_iter_values(dups):
|
|
||||||
found = False
|
|
||||||
if keep_prefix:
|
|
||||||
result = []
|
|
||||||
for f in files:
|
|
||||||
if f.startswith(keep_prefix) and not found:
|
|
||||||
found = True
|
|
||||||
else:
|
|
||||||
result.append(f)
|
|
||||||
if not found:
|
|
||||||
result = list(files)[1:]
|
|
||||||
yield (files, result)
|
|
||||||
|
|
||||||
def _do_action(self, simulate, keep_prefix, action, action_str):
|
|
||||||
for dups, extracted in self._extract_files_for_action(keep_prefix):
|
|
||||||
if simulate or self.verbose:
|
|
||||||
print('found duplicates: \n{}'.format(dups))
|
|
||||||
for f in extracted:
|
|
||||||
if simulate or self.verbose:
|
|
||||||
print(action_str.format(f))
|
|
||||||
if not simulate:
|
|
||||||
action(f)
|
|
||||||
|
|
||||||
def rm(self, simulate=False, keep_prefix=None):
|
|
||||||
"""Remove duplicate files found in specified directory list.
|
"""Remove duplicate files found in specified directory list.
|
||||||
If keep_prefix is specified then first file with that path
|
First file in list is kept.
|
||||||
prefix found is kept in the original directory.
|
|
||||||
Otherwise first file in list is kept in the original directory.
|
|
||||||
If simulate is True then only print the action, do not actually
|
|
||||||
perform it.
|
|
||||||
If safe_mode is true then do byte by byte comparison for
|
|
||||||
hash duplicate files.
|
|
||||||
"""
|
"""
|
||||||
self._do_action(simulate, keep_prefix, os.remove, 'rm {}')
|
for files in do_with_file_dups(topdirs, hashalg, block_size):
|
||||||
|
for f in files:
|
||||||
|
os.remove(f)
|
||||||
|
|
||||||
def mv(self, dest_dir='dups', simulate=False, keep_prefix=None):
|
|
||||||
|
def mv_file_dups(topdirs=['./'], hashalg='md5', block_size=4096, dest_dir='dups'):
|
||||||
"""Move duplicate files found in specified directory list.
|
"""Move duplicate files found in specified directory list.
|
||||||
If keep_prefix is specified then first file with that path
|
First file in list is kept in the original directory.
|
||||||
prefix found is kept in the original directory.
|
|
||||||
Otherwise first file in list is kept in the original directory.
|
|
||||||
If simulate is True then only print the action, do not actually
|
|
||||||
perform it.
|
|
||||||
If safe_mode is true then do byte by byte comparison for
|
|
||||||
hash duplicate files.
|
|
||||||
"""
|
"""
|
||||||
import shutil
|
|
||||||
|
|
||||||
if not os.path.exists(dest_dir):
|
if not os.path.exists(dest_dir):
|
||||||
if simulate:
|
|
||||||
print('mkdir {}'.format(dest_dir))
|
|
||||||
else:
|
|
||||||
os.mkdir(dest_dir)
|
os.mkdir(dest_dir)
|
||||||
elif not os.path.isdir(dest_dir):
|
if not os.path.isdir(dest_dir):
|
||||||
errmsg = '{} is not a directory'.format(dest_dir)
|
raise OSError('%s is not a directory' % dest_dir)
|
||||||
if simulate:
|
import shutil
|
||||||
print('would raise:', errmsg)
|
for files in do_with_file_dups(topdirs, hashalg, block_size):
|
||||||
else:
|
for i, f in enumerate(files):
|
||||||
raise OSError(errmsg)
|
if i > 0:
|
||||||
self._do_action(simulate, keep_prefix,
|
shutil.move(f, dest_dir)
|
||||||
partial(shutil.move, dst=dest_dir),
|
|
||||||
'mv {0} to ' + dest_dir)
|
|
||||||
|
def iter_file_dups(topdirs=['./'], hashalg='md5', block_size=4096):
|
||||||
|
"""Yield list of duplicate files when found in specified directory list.
|
||||||
|
"""
|
||||||
|
dups = file_dups(topdirs, hashalg, block_size)
|
||||||
|
for fpaths in dups.itervalues():
|
||||||
|
yield fpaths
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -357,58 +115,32 @@ def main():
|
||||||
import json
|
import json
|
||||||
from docopt import docopt
|
from docopt import docopt
|
||||||
|
|
||||||
args = docopt(__doc__.format(sys.argv[0], __version__),
|
args = docopt(__doc__)
|
||||||
version=" ".join(('sweeper', __version__)))
|
|
||||||
|
|
||||||
topdirs = args['<directory>']
|
topdirs = args['<directory>']
|
||||||
if not topdirs:
|
if not topdirs:
|
||||||
topdirs = ['./']
|
topdirs = ['./']
|
||||||
|
|
||||||
action = args['--action']
|
action = args['--action']
|
||||||
verbose = args['--verbose']
|
|
||||||
|
|
||||||
# set block size as int
|
|
||||||
try:
|
try:
|
||||||
bs = int(args['--block-size'])
|
bs = int(args['--block-size'])
|
||||||
args['--block-size'] = bs
|
args['--block-size'] = bs
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print('Invalid block size "{}"'.format(args['--block-size']))
|
print('Invalid block size "%s"' % args['--block-size'])
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
hashalgs = args['--digest-algs'].split(',')
|
|
||||||
hashalgs_uniq = _uniq_list(hashalgs)
|
|
||||||
if len(hashalgs) != len(hashalgs_uniq):
|
|
||||||
print('Duplicate hash algorithms specified')
|
|
||||||
sys.exit(1)
|
|
||||||
block_size = args['--block-size']
|
|
||||||
simulate = args['--simulate']
|
|
||||||
keep_prefix = args['--keep']
|
|
||||||
dest_dir = args['--move']
|
|
||||||
safe_mode = args['--safe-mode']
|
|
||||||
|
|
||||||
sweeper = Sweeper(topdirs=topdirs, hashalgs=hashalgs,
|
if action == 'print':
|
||||||
block_size=block_size, verbose=verbose,
|
dups = file_dups(topdirs, args['--digest-alg'], args['--block-size'])
|
||||||
safe_mode=safe_mode)
|
print(json.dumps(dict(dups), indent=4))
|
||||||
if action == 'print' or action == 'pprint':
|
|
||||||
dups = sweeper.file_dups()
|
|
||||||
# defaultdict(list) -> dict
|
|
||||||
spam = dict(dups)
|
|
||||||
if spam:
|
|
||||||
if action == 'pprint':
|
|
||||||
for _, fpaths in _dict_iter_items(spam):
|
|
||||||
for path in fpaths:
|
|
||||||
print(path)
|
|
||||||
if fpaths:
|
|
||||||
print('')
|
|
||||||
else:
|
|
||||||
print(json.dumps({k: v for k, v in _remap_keys_to_str(spam)},
|
|
||||||
indent=4))
|
|
||||||
elif action == 'move':
|
elif action == 'move':
|
||||||
sweeper.mv(dest_dir, simulate, keep_prefix)
|
mv_file_dups(topdirs, args['--digest-alg'], args['--block-size'],
|
||||||
|
args['--move'])
|
||||||
elif action == 'remove':
|
elif action == 'remove':
|
||||||
sweeper.rm(simulate, keep_prefix)
|
rm_file_dups(topdirs, args['--digest-alg'], args['--block-size'])
|
||||||
else:
|
else:
|
||||||
print('Invalid action "{}"'.format(action))
|
print('Invalid action "%s"' % action)
|
||||||
|
|
||||||
|
|
||||||
|
# if used as script call main function
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
BIN
test/__init__.pyc
Normal file
BIN
test/__init__.pyc
Normal file
Binary file not shown.
|
@ -3,7 +3,7 @@
|
||||||
# License: GPLv3
|
# License: GPLv3
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from sweeper import Sweeper
|
from sweeper import file_dups
|
||||||
import os
|
import os
|
||||||
|
|
||||||
mydir = os.path.dirname(os.path.realpath(__file__))
|
mydir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
@ -11,8 +11,7 @@ mydir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
class TestSweeper(unittest.TestCase):
|
class TestSweeper(unittest.TestCase):
|
||||||
def test_file_dups_dups(self):
|
def test_file_dups_dups(self):
|
||||||
swp = Sweeper(topdirs=[os.path.join(mydir, 'testfiles_dups')])
|
dups = file_dups([os.path.join(mydir, 'testfiles_dups')], 'md5')
|
||||||
dups = swp.file_dups()
|
|
||||||
dups_exist = False
|
dups_exist = False
|
||||||
for h, flist in dups.items():
|
for h, flist in dups.items():
|
||||||
if len(flist) > 1:
|
if len(flist) > 1:
|
||||||
|
@ -20,40 +19,10 @@ class TestSweeper(unittest.TestCase):
|
||||||
self.assertTrue(dups_exist)
|
self.assertTrue(dups_exist)
|
||||||
|
|
||||||
def test_file_dups_nodups(self):
|
def test_file_dups_nodups(self):
|
||||||
swp = Sweeper(topdirs=[os.path.join(mydir, 'testfiles_nodups')])
|
dups = file_dups([os.path.join(mydir, 'testfiles_nodups')], 'md5')
|
||||||
dups = swp.file_dups()
|
|
||||||
for h, flist in dups.items():
|
for h, flist in dups.items():
|
||||||
self.assertTrue(len(flist) == 1)
|
self.assertTrue(len(flist) == 1)
|
||||||
|
|
||||||
# does not actually test safe_mode, we would need to find
|
|
||||||
# hash collision
|
|
||||||
def test_file_dups_safe_mode(self):
|
|
||||||
swp = Sweeper(topdirs=[os.path.join(mydir, 'testfiles_dups')],
|
|
||||||
safe_mode=True)
|
|
||||||
dups = swp.file_dups()
|
|
||||||
for h, flist in dups.items():
|
|
||||||
if len(flist) > 1:
|
|
||||||
dups_exist = True
|
|
||||||
self.assertTrue(dups_exist)
|
|
||||||
|
|
||||||
def test_iter_file_dups_dups(self):
|
|
||||||
swp = Sweeper(topdirs=[os.path.join(mydir, 'testfiles_dups')])
|
|
||||||
dups_exist = False
|
|
||||||
for x in swp:
|
|
||||||
dups_exist = True
|
|
||||||
filepath, h, dups = x
|
|
||||||
self.assertNotIn(filepath, dups)
|
|
||||||
self.assertTrue(len(dups) > 0)
|
|
||||||
self.assertTrue(dups_exist)
|
|
||||||
|
|
||||||
def test_iter_file_dups_nodups(self):
|
|
||||||
swp = Sweeper([os.path.join(mydir, 'testfiles_nodups')])
|
|
||||||
dups_exist = False
|
|
||||||
for x in swp:
|
|
||||||
dups_exist = True
|
|
||||||
break
|
|
||||||
self.assertFalse(dups_exist)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
Loading…
Reference in a new issue