Răsfoiți Sursa

cosmetic source cleanup (flake8)

Thomas Waldmann 9 ani în urmă
părinte
comite
4b339f5d69

+ 0 - 1
borg/__main__.py

@@ -1,3 +1,2 @@
 from borg.archiver import main
 main()
-

+ 2 - 3
borg/archive.py

@@ -187,7 +187,7 @@ class Archive:
 
     @property
     def duration(self):
-        return format_timedelta(self.end-self.start)
+        return format_timedelta(self.end - self.start)
 
     def __str__(self):
         return '''Archive name: {0.name}
@@ -591,8 +591,7 @@ Number of files: {0.stats.nfiles}'''.format(self)
 # this set must be kept complete, otherwise the RobustUnpacker might malfunction:
 ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks',
                  b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
-                 b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended',
-            ])
+                 b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
 
 
 class RobustUnpacker:

+ 5 - 7
borg/archiver.py

@@ -239,8 +239,7 @@ class Archiver:
         # Ignore if nodump flag is set
         if has_lchflags and (st.st_flags & stat.UF_NODUMP):
             return
-        if (stat.S_ISREG(st.st_mode) or
-            read_special and not stat.S_ISDIR(st.st_mode)):
+        if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode):
             if not dry_run:
                 try:
                     status = archive.process_file(path, st, cache)
@@ -576,7 +575,7 @@ class Archiver:
         archive = Archive(repository, key, manifest, args.location.archive)
         for i, item_id in enumerate(archive.metadata[b'items']):
             data = key.decrypt(item_id, repository.get(item_id))
-            filename = '%06d_%s.items' %(i, hexlify(item_id).decode('ascii'))
+            filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
             print('Dumping', filename)
             with open(filename, 'wb') as fd:
                 fd.write(data)
@@ -594,7 +593,7 @@ class Archiver:
             print("object id %s is invalid." % hex_id)
         else:
             try:
-                data =repository.get(id)
+                data = repository.get(id)
             except repository.ObjectNotFound:
                 print("object %s not found." % hex_id)
             else:
@@ -756,7 +755,7 @@ class Archiver:
 
     def preprocess_args(self, args):
         deprecations = [
-            #('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
+            # ('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
         ]
         for i, arg in enumerate(args[:]):
             for old_name, new_name, warning in deprecations:
@@ -787,8 +786,7 @@ class Archiver:
         parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
         parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
                                    help='show version number and exit')
-        subparsers = parser.add_subparsers(title='required arguments',
-                                           metavar='<command>')
+        subparsers = parser.add_subparsers(title='required arguments', metavar='<command>')
 
         serve_epilog = textwrap.dedent("""
         This command starts a repository server process. This command is usually not used manually.

+ 2 - 2
borg/cache.py

@@ -130,10 +130,10 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
         try:
             cache_version = self.config.getint('cache', 'version')
             wanted_version = 1
-            if  cache_version != wanted_version:
+            if cache_version != wanted_version:
                 raise Exception('%s has unexpected cache version %d (wanted: %d).' % (
                     config_path, cache_version, wanted_version))
-        except configparser.NoSectionError as e:
+        except configparser.NoSectionError:
             raise Exception('%s does not look like a Borg cache.' % config_path) from None
         self.id = self.config.get('cache', 'repository')
         self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))

+ 1 - 1
borg/fuse.py

@@ -209,7 +209,7 @@ class FuseOperations(llfuse.Operations):
                 continue
             n = min(size, s - offset)
             chunk = self.key.decrypt(id, self.repository.get(id))
-            parts.append(chunk[offset:offset+n])
+            parts.append(chunk[offset:offset + n])
             offset = 0
             size -= n
             if not size:

+ 5 - 5
borg/helpers.py

@@ -128,7 +128,7 @@ class Manifest:
 
 
 def prune_within(archives, within):
-    multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
+    multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
     try:
         hours = int(within[:-1]) * multiplier[within[-1]]
     except (KeyError, ValueError):
@@ -136,7 +136,7 @@ def prune_within(archives, within):
         raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
     if hours <= 0:
         raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
-    target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
+    target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
     return [a for a in archives if a.ts > target]
 
 
@@ -200,7 +200,7 @@ class Statistics:
                 path = remove_surrogates(item[b'path']) if item else ''
                 space = columns - len(msg)
                 if space < len('...') + len(path):
-                    path = '%s...%s' % (path[:(space//2)-len('...')], path[-space//2:])
+                    path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
                 msg += "{0:<{space}}".format(path, space=space)
             else:
                 msg = ' ' * columns
@@ -355,7 +355,7 @@ class FnmatchPattern(PatternBase):
         if pattern.endswith(os.path.sep):
             pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
         else:
-            pattern = os.path.normpath(pattern) + os.path.sep+'*'
+            pattern = os.path.normpath(pattern) + os.path.sep + '*'
 
         self.pattern = pattern
 
@@ -831,6 +831,7 @@ FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
 TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
 DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
 
+
 def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
         retry_msg=None, invalid_msg=None, env_msg=None,
         falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
@@ -951,7 +952,6 @@ class ProgressIndicatorPercent:
             print(" " * len(self.msg % 100.0), file=self.file, end='\r')
 
 
-
 class ProgressIndicatorEndless:
     def __init__(self, step=10, file=sys.stderr):
         """

+ 1 - 1
borg/key.py

@@ -360,7 +360,7 @@ class KeyfileKey(KeyfileKeyBase):
             filename = os.path.join(keys_dir, name)
             with open(filename, 'r') as fd:
                 line = fd.readline().strip()
-                if line.startswith(self.FILE_ID) and line[len(self.FILE_ID)+1:] == id:
+                if line.startswith(self.FILE_ID) and line[len(self.FILE_ID) + 1:] == id:
                     return filename
         raise KeyfileNotFoundError(self.repository._location.canonical_path(), get_keys_dir())
 

+ 1 - 1
borg/locking.py

@@ -111,7 +111,7 @@ class ExclusiveLock:
         self.sleep = sleep
         self.path = os.path.abspath(path)
         self.id = id or get_id()
-        self.unique_name  = os.path.join(self.path, "%s.%d-%x" % self.id)
+        self.unique_name = os.path.join(self.path, "%s.%d-%x" % self.id)
 
     def __enter__(self):
         return self.acquire()

+ 1 - 1
borg/remote.py

@@ -185,7 +185,7 @@ class RemoteRepository:
             else:
                 raise ValueError('log level missing, fix this code')
         if testing:
-            return [sys.executable, '-m', 'borg.archiver', 'serve' ] + opts + self.extra_test_args
+            return [sys.executable, '-m', 'borg.archiver', 'serve'] + opts + self.extra_test_args
         else:  # pragma: no cover
             return [args.remote_path, 'serve'] + opts
 

+ 11 - 9
borg/testsuite/archiver.py

@@ -82,6 +82,7 @@ def cmd(request):
         exe = 'borg.exe'
     else:
         raise ValueError("param must be 'python' or 'binary'")
+
     def exec_fn(*args, **kw):
         return exec_cmd(*args, exe=exe, fork=True, **kw)
     return exec_fn
@@ -121,6 +122,7 @@ if the directory does not exist, the test will be skipped.
 """
 DF_MOUNT = '/tmp/borg-mount'
 
+
 @pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
 def test_disk_full(cmd):
     def make_files(dir, count, size, rnd=True):
@@ -177,7 +179,7 @@ def test_disk_full(cmd):
                 shutil.rmtree(reserve, ignore_errors=True)
             rc, out = cmd('list', repo)
             if rc != EXIT_SUCCESS:
-               print('list', rc, out)
+                print('list', rc, out)
             rc, out = cmd('check', '--repair', repo)
             if rc != EXIT_SUCCESS:
                 print('check', rc, out)
@@ -301,7 +303,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         list_output = self.cmd('list', '--short', self.repository_location)
         self.assert_in('test', list_output)
         self.assert_in('test.2', list_output)
-        expected =  [
+        expected = [
             'input',
             'input/bdev',
             'input/cdev',
@@ -320,7 +322,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
             expected.remove('input/cdev')
         if has_lchflags:
             # remove the file we did not backup, so input and output become equal
-            expected.remove('input/flagfile') # this file is UF_NODUMP
+            expected.remove('input/flagfile')  # this file is UF_NODUMP
             os.remove(os.path.join('input', 'flagfile'))
         list_output = self.cmd('list', '--short', self.repository_location + '::test')
         for name in expected:
@@ -348,7 +350,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.assert_equal(filter(info_output), filter(info_output2))
 
     def test_atime(self):
-        have_root = self.create_test_files()
+        self.create_test_files()
         atime, mtime = 123456780, 234567890
         os.utime('input/file1', (atime, mtime))
         self.cmd('init', self.repository_location)
@@ -414,7 +416,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         filenames = ['normal', 'with some blanks', '(with_parens)', ]
         for filename in filenames:
             filename = os.path.join(self.input_path, filename)
-            with open(filename, 'wb') as fd:
+            with open(filename, 'wb'):
                 pass
         self.cmd('init', self.repository_location)
         self.cmd('create', self.repository_location + '::test', 'input')
@@ -617,11 +619,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.create_regular_file('tagged1/file1', size=1024)
         self.create_regular_file('tagged2/.NOBACKUP2')
         self.create_regular_file('tagged2/file2', size=1024)
-        self.create_regular_file('tagged3/CACHEDIR.TAG', contents = b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
+        self.create_regular_file('tagged3/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
         self.create_regular_file('tagged3/file3', size=1024)
         self.create_regular_file('taggedall/.NOBACKUP1')
         self.create_regular_file('taggedall/.NOBACKUP2')
-        self.create_regular_file('taggedall/CACHEDIR.TAG', contents = b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
+        self.create_regular_file('taggedall/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
         self.create_regular_file('taggedall/file4', size=1024)
         self.cmd('create', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2',
                  '--exclude-caches', '--keep-tag-files', self.repository_location + '::test', 'input')
@@ -785,7 +787,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         clearly incomplete: only tests for the weird "unchanged" status for now"""
         now = time.time()
         self.create_regular_file('file1', size=1024 * 80)
-        os.utime('input/file1', (now - 5, now - 5)) # 5 seconds ago
+        os.utime('input/file1', (now - 5, now - 5))  # 5 seconds ago
         self.create_regular_file('file2', size=1024 * 80)
         self.cmd('init', self.repository_location)
         output = self.cmd('create', '-v', '--list', self.repository_location + '::test', 'input')
@@ -822,7 +824,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         output = self.cmd('create', '-v', '--list', '--filter=AM', self.repository_location + '::test3', 'input')
         self.assert_in('file1', output)
 
-    #def test_cmdline_compatibility(self):
+    # def test_cmdline_compatibility(self):
     #    self.create_regular_file('file1', size=1024 * 80)
     #    self.cmd('init', self.repository_location)
     #    self.cmd('create', self.repository_location + '::test', 'input')

+ 5 - 4
borg/testsuite/benchmark.py

@@ -38,12 +38,14 @@ def testdata(request, tmpdir_factory):
     data_type = request.param
     if data_type == 'zeros':
         # do not use a binary zero (\0) to avoid sparse detection
-        data = lambda: b'0' * size
+        def data(size):
+            return b'0' * size
     if data_type == 'random':
-        data = lambda: os.urandom(size)
+        def data(size):
+            return os.urandom(size)
     for i in range(count):
         with open(str(p.join(str(i))), "wb") as f:
-            f.write(data())
+            f.write(data(size))
     yield str(p)
     p.remove(rec=1)
 
@@ -95,4 +97,3 @@ def test_check(benchmark, cmd, archive):
 def test_help(benchmark, cmd):
     result, out = benchmark(cmd, 'help')
     assert result == 0
-

+ 0 - 2
borg/testsuite/compress.py

@@ -98,5 +98,3 @@ def test_compressor():
     for params in params_list:
         c = Compressor(**params)
         assert data == c.decompress(c.compress(data))
-
-

+ 2 - 3
borg/testsuite/helpers.py

@@ -165,8 +165,7 @@ class FormatTimedeltaTestCase(BaseTestCase):
 def check_patterns(files, pattern, expected):
     """Utility for testing patterns.
     """
-    assert all([f == os.path.normpath(f) for f in files]), \
-            "Pattern matchers expect normalized input paths"
+    assert all([f == os.path.normpath(f) for f in files]), "Pattern matchers expect normalized input paths"
 
     matched = [f for f in files if pattern.match(f)]
 
@@ -284,7 +283,7 @@ def test_patterns_shell(pattern, expected):
     ("^[^/]", []),
     ("^(?!/srv|/foo|/opt)",
      ["/home", "/home/user/.profile", "/home/user/.bashrc", "/home/user2/.profile",
-      "/home/user2/public_html/index.html", "/home/foo/.thumbnails", "/home/foo/bar/.thumbnails",]),
+      "/home/user2/public_html/index.html", "/home/foo/.thumbnails", "/home/foo/bar/.thumbnails", ]),
     ])
 def test_patterns_regex(pattern, expected):
     files = [

+ 1 - 0
borg/testsuite/locking.py

@@ -9,6 +9,7 @@ from ..locking import get_id, TimeoutTimer, ExclusiveLock, UpgradableLock, LockR
 ID1 = "foo", 1, 1
 ID2 = "bar", 2, 2
 
+
 def test_id():
     hostname, pid, tid = get_id()
     assert isinstance(hostname, str)

+ 1 - 1
borg/testsuite/repository.py

@@ -338,7 +338,7 @@ class RemoteRepositoryTestCase(RepositoryTestCase):
             remote_path = 'borg'
             umask = 0o077
 
-        assert self.repository.borg_cmd(None, testing=True) == [sys.executable, '-m', 'borg.archiver', 'serve' ]
+        assert self.repository.borg_cmd(None, testing=True) == [sys.executable, '-m', 'borg.archiver', 'serve']
         args = MockArgs()
         # note: test logger is on info log level, so --info gets added automagically
         assert self.repository.borg_cmd(args, testing=False) == ['borg', 'serve', '--umask=077', '--info']

+ 1 - 1
borg/upgrader.py

@@ -321,6 +321,6 @@ class Borg0xxKeyfileKey(KeyfileKey):
             filename = os.path.join(keys_dir, name)
             with open(filename, 'r') as fd:
                 line = fd.readline().strip()
-                if line and line.startswith(cls.FILE_ID) and line[len(cls.FILE_ID)+1:] == id:
+                if line and line.startswith(cls.FILE_ID) and line[len(cls.FILE_ID) + 1:] == id:
                     return filename
         raise KeyfileNotFoundError(repository.path, keys_dir)

+ 2 - 2
borg/xattr.py

@@ -231,8 +231,8 @@ elif sys.platform.startswith('freebsd'):  # pragma: freebsd only
         mv = memoryview(namebuf.raw)
         while mv:
             length = mv[0]
-            names.append(os.fsdecode(bytes(mv[1:1+length])))
-            mv = mv[1+length:]
+            names.append(os.fsdecode(bytes(mv[1:1 + length])))
+            mv = mv[1 + length:]
         return names
 
     def getxattr(path, name, *, follow_symlinks=True):

+ 4 - 1
setup.py

@@ -19,7 +19,7 @@ on_rtd = os.environ.get('READTHEDOCS')
 
 # msgpack pure python data corruption was fixed in 0.4.6.
 # Also, we might use some rather recent API features.
-install_requires=['msgpack-python>=0.4.6', ]
+install_requires = ['msgpack-python>=0.4.6', ]
 
 
 from setuptools import setup, Extension
@@ -120,12 +120,14 @@ elif not on_rtd:
 with open('README.rst', 'r') as fd:
     long_description = fd.read()
 
+
 class build_usage(Command):
     description = "generate usage for each command"
 
     user_options = [
         ('output=', 'O', 'output directory'),
     ]
+
     def initialize_options(self):
         pass
 
@@ -172,6 +174,7 @@ class build_api(Command):
     user_options = [
         ('output=', 'O', 'output directory'),
     ]
+
     def initialize_options(self):
         pass