From ce3de17e70f9e6c0e5dc30c9879a5e8d43b19b9c Mon Sep 17 00:00:00 2001 From: toonn Date: Mon, 23 Dec 2019 23:23:38 +0100 Subject: Add duplicate filter to filter_stack --- ranger/core/filter_stack.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/ranger/core/filter_stack.py b/ranger/core/filter_stack.py index 9b4d2779..a479b6ed 100644 --- a/ranger/core/filter_stack.py +++ b/ranger/core/filter_stack.py @@ -98,6 +98,47 @@ class HashFilter(BaseFilter, FileManagerAware): return "".format(self.filepath) +@stack_filter("duplicate") +class DuplicateFilter(BaseFilter, FileManagerAware): + def __init__(self, _): + self.duplicates = self.get_duplicates(self.fm.thisdir.files_all) + + def __call__(self, fobj): + return fobj in self.duplicates + + def __str__(self): + return "" + + def get_duplicates(self, fsobjects): + hashes = {} + for fobj in fsobjects: + chunks = hash_chunks(fobj.path) + chunk = next(chunks) + while chunk in hashes: + for dup in hashes[chunk]: + _, dup_chunks = dup + try: + hashes[next(dup_chunks)] = [dup] + hashes[chunk].remove(dup) + except StopIteration: + pass + try: + chunk = next(chunks) + except StopIteration: + hashes[chunk].append((fobj, chunks)) + break + else: + hashes[chunk] = [(fobj, chunks)] + + duplicates = set() + for dups in hashes.values(): + if len(dups) >= 2: + for (dup, _) in dups: + duplicates.add(dup) + + return duplicates + + @stack_filter("type") class TypeFilter(BaseFilter): type_to_function = { -- cgit 1.4.1-2-gfad0