From f397e7be59a883ea6a1c227046836c34005aea07 Mon Sep 17 00:00:00 2001
From: Edwin Eefting <edwin@datux.nl>
Date: Mon, 21 Feb 2022 11:01:07 +0100
Subject: [PATCH] python2 compat

---
 zfs_autobackup/ZfsCheck.py |  8 ++------
 zfs_autobackup/util.py     | 17 ++++++++++-------
 2 files changed, 12 insertions(+), 13 deletions(-)

diff --git a/zfs_autobackup/ZfsCheck.py b/zfs_autobackup/ZfsCheck.py
index 000426e..bc4d1d2 100644
--- a/zfs_autobackup/ZfsCheck.py
+++ b/zfs_autobackup/ZfsCheck.py
@@ -1,8 +1,4 @@
 from __future__ import print_function
-import hashlib
-import sys
-from builtins import BrokenPipeError
-from signal import signal, SIGPIPE, SIG_DFL
 
 from .ZfsNode import ZfsNode
 from .util import *
@@ -66,8 +62,8 @@ class ZfsCheck(CliBase):
                     print("{}\t{}\t{}".format(file, block, hash))
                     sys.stdout.flush() #important, to generate SIGPIPES on ssh disconnect
 
-        except BrokenPipeError:
-            output_redir()
+        # except BrokenPipeError:
+        #     output_redir()
 
         finally:
             snapshot.unmount()
diff --git a/zfs_autobackup/util.py b/zfs_autobackup/util.py
index 9a48c22..17927e0 100644
--- a/zfs_autobackup/util.py
+++ b/zfs_autobackup/util.py
@@ -20,8 +20,6 @@ import platform
 import sys
 import time
 
-import pathlib as pathlib
-
 
 def block_hash(fname, count=10000, bs=4096):
     """This function was created to checksum huge files and blockdevices (TB's)
@@ -60,12 +58,17 @@ def block_hash_tree(start_path, count=10000, bs=4096):
     cwd=os.getcwd()
     os.chdir(start_path)
 
-    try:
-        for f in pathlib.Path('.').glob('**/*'):
-            if f.is_file() and not f.is_symlink():
-                for (chunk_nr, hash) in block_hash(f, count, bs):
+    def walkerror(e):
+        raise e
 
-                    yield ( f, chunk_nr, hash )
+    try:
+        for (dirpath, dirnames, filenames) in os.walk(".", onerror=walkerror):
+            for f in filenames:
+                file_path=os.path.join(dirpath, f)
+
+                if (not os.path.islink(file_path)) and os.path.isfile(file_path):
+                    for (chunk_nr, hash) in block_hash(file_path, count, bs):
+                        yield ( file_path, chunk_nr, hash )
     finally:
         os.chdir(cwd)