about summary refs log tree commit diff stats
diff options
context:
space:
mode:
authortoonn <toonn@toonn.io>2019-12-28 18:16:50 +0100
committertoonn <toonn@toonn.io>2019-12-28 18:47:24 +0100
commit99870addf7e23cd8bb34463c43dfc7ccd38b9545 (patch)
tree285c66ba07bef4782550323439a7180582050089
parent5581c896a97ee7740dd1b142a053fc24681c1f72 (diff)
downloadranger-99870addf7e23cd8bb34463c43dfc7ccd38b9545.tar.gz
Fix hash_chunks generator
-rw-r--r--ranger/core/filter_stack.py12
-rw-r--r--ranger/ext/hash.py3
2 files changed, 9 insertions, 6 deletions
diff --git a/ranger/core/filter_stack.py b/ranger/core/filter_stack.py
index ca8810e8..59495437 100644
--- a/ranger/core/filter_stack.py
+++ b/ranger/core/filter_stack.py
@@ -9,8 +9,7 @@ import re
 import mimetypes
 # pylint: disable=invalid-name
 try:
-    from itertools import izip_longest
-    zip_longest = izip_longest
+    from itertools import izip_longest as zip_longest
 except ImportError:
     from itertools import zip_longest
 # pylint: enable=invalid-name
@@ -77,9 +76,12 @@ class MimeFilter(BaseFilter):
 
 @stack_filter("hash")
 class HashFilter(BaseFilter, FileManagerAware):
-    def __init__(self, filepath):
-        self.filepath = filepath if filepath else self.fm.thisfile.path
-        if not self.filepath:
+    def __init__(self, filepath = None):
+        if filepath is None:
+            self.filepath = self.fm.thisfile.path
+        else:
+            self.filepath = filepath
+        if self.filepath is None:
             self.fm.notify("Error: No file selected for hashing!", bad=True)
         # TODO: Lazily generated list would be more efficient, a generator
         #       isn't enough because this object is reused for every fsobject
diff --git a/ranger/ext/hash.py b/ranger/ext/hash.py
index 20059dbf..1ed21a71 100644
--- a/ranger/ext/hash.py
+++ b/ranger/ext/hash.py
@@ -17,7 +17,8 @@ def hash_chunks(filepath, h=None):
         h.update(filepath)
         yield h.hexdigest()
         for fp in listdir(filepath):
-            hash_chunks(fp, h=h)
+            for fp_chunk in  hash_chunks(fp, h=h):
+                yield fp_chunk
     elif getsize(filepath) == 0:
         yield h.hexdigest()
     else: