Browse Source

Merge pull request #8859 from ThomasWaldmann/pyupgrade-py310-plus

pyupgrade --py310-plus ./**/*.py
TW 1 week ago
parent
commit
39b847a138

+ 1 - 1
scripts/errorlist.py

@@ -11,7 +11,7 @@ from borg.helpers import Error, BackupError, BorgWarning
 
 def subclasses(cls):
     direct_subclasses = cls.__subclasses__()
-    return set(direct_subclasses) | set(s for c in direct_subclasses for s in subclasses(c))
+    return set(direct_subclasses) | {s for c in direct_subclasses for s in subclasses(c)}
 
 
 # 0, 1, 2 are used for success, generic warning, generic error

+ 1 - 1
src/borg/archive.py

@@ -12,7 +12,7 @@ from functools import partial
 from getpass import getuser
 from io import BytesIO
 from itertools import groupby, zip_longest
-from typing import Iterator
+from collections.abc import Iterator
 from shutil import get_terminal_size
 
 from .platformflags import is_win32

+ 2 - 3
src/borg/archiver/compact_cmd.py

@@ -1,6 +1,5 @@
 import argparse
 import os
-from typing import Tuple, Set
 
 from ._common import with_repository
 from ..archive import Archive
@@ -101,7 +100,7 @@ class ArchiveGarbageCollector:
             logger.warning(f"Could not access cache directory: {e}")
             return
 
-        used_files_cache_names = set(files_cache_name(series_name) for series_name in existing_series)
+        used_files_cache_names = {files_cache_name(series_name) for series_name in existing_series}
         unused_files_cache_names = files_cache_names - used_files_cache_names
 
         for cache_filename in unused_files_cache_names:
@@ -112,7 +111,7 @@ class ArchiveGarbageCollector:
                 logger.warning(f"Could not access cache file: {e}")
         logger.info(f"Removed {len(unused_files_cache_names)} unused files cache files.")
 
-    def analyze_archives(self) -> Tuple[Set, int, int, int]:
+    def analyze_archives(self) -> tuple[set, int, int, int]:
         """Iterate over all items in all archives, create the dicts id -> size of all used chunks."""
 
         def use_it(id):

+ 3 - 3
src/borg/archiver/diff_cmd.py

@@ -48,11 +48,11 @@ class DiffMixIn:
             )
 
         def print_text_output(diff, formatter):
-            actual_changes = dict(
-                (name, change)
+            actual_changes = {
+                name: change
                 for name, change in diff.changes().items()
                 if actual_change(change) and (not args.content_only or (name not in DiffFormatter.METADATA))
-            )
+            }
             diff._changes = actual_changes
             res: str = formatter.format_item(diff)
             if res.strip():

+ 1 - 1
src/borg/archiver/tag_cmd.py

@@ -18,7 +18,7 @@ class TagMixIn:
 
         def tags_set(tags):
             """return a set of tags, removing empty tags"""
-            return set(tag for tag in tags if tag)
+            return {tag for tag in tags if tag}
 
         if args.name:
             archive_infos = [manifest.archives.get_one([args.name])]

+ 10 - 11
src/borg/conftest.py

@@ -1,5 +1,4 @@
 import os
-from typing import Optional, List
 
 import pytest
 
@@ -65,19 +64,19 @@ def set_env_variables():
 class ArchiverSetup:
     EXE: str = None  # python source based
     FORK_DEFAULT = False
-    BORG_EXES: List[str] = []
+    BORG_EXES: list[str] = []
 
     def __init__(self):
         self.archiver = None
-        self.tmpdir: Optional[str] = None
-        self.repository_path: Optional[str] = None
-        self.repository_location: Optional[str] = None
-        self.input_path: Optional[str] = None
-        self.output_path: Optional[str] = None
-        self.keys_path: Optional[str] = None
-        self.cache_path: Optional[str] = None
-        self.exclude_file_path: Optional[str] = None
-        self.patterns_file_path: Optional[str] = None
+        self.tmpdir: str | None = None
+        self.repository_path: str | None = None
+        self.repository_location: str | None = None
+        self.input_path: str | None = None
+        self.output_path: str | None = None
+        self.keys_path: str | None = None
+        self.cache_path: str | None = None
+        self.exclude_file_path: str | None = None
+        self.patterns_file_path: str | None = None
 
     def get_kind(self) -> str:
         if self.repository_location.startswith("ssh://__testsuite__"):

+ 1 - 1
src/borg/crypto/file_integrity.py

@@ -3,7 +3,7 @@ import io
 import json
 import os
 from hmac import compare_digest
-from typing import Callable
+from collections.abc import Callable
 
 from ..helpers import IntegrityError
 from ..logger import create_logger

+ 3 - 2
src/borg/crypto/key.py

@@ -3,7 +3,8 @@ import hmac
 import os
 import textwrap
 from hashlib import sha256, pbkdf2_hmac
-from typing import Literal, Callable, ClassVar
+from typing import Literal, ClassVar
+from collections.abc import Callable
 
 from ..logger import create_logger
 
@@ -563,7 +564,7 @@ class FlexiKey:
                 raise KeyfileMismatchError(self.repository._location.canonical_path(), filename)
         # we get here if it really looks like a borg key for this repo,
         # do some more checks that are close to how borg reads/parses the key.
-        with open(filename, "r") as fd:
+        with open(filename) as fd:
             lines = fd.readlines()
             if len(lines) < 2:
                 logger.warning(f"borg key sanity check: expected 2+ lines total. [{filename}]")

+ 2 - 2
src/borg/helpers/__init__.py

@@ -67,7 +67,7 @@ warning_info = namedtuple("warning_info", "wc,msg,args,wt")
 """
 The global warnings_list variable is used to collect warning_info elements while borg is running.
 """
-_warnings_list: List[warning_info] = []
+_warnings_list: list[warning_info] = []
 
 
 def add_warning(msg, *args, **kwargs):
@@ -159,7 +159,7 @@ def get_ec(ec=None):
         # we do not have any warnings in warnings list, return success exit code
         return _exit_code
     # looks like we have some warning(s)
-    rcs = sorted(set(w_info.wc for w_info in _warnings_list))
+    rcs = sorted({w_info.wc for w_info in _warnings_list})
     logger.debug(f"rcs: {rcs!r}")
     if len(rcs) == 1:
         # easy: there was only one kind of warning, so we can be specific

+ 7 - 7
src/borg/helpers/parseformat.py

@@ -10,7 +10,7 @@ import re
 import shlex
 import stat
 import uuid
-from typing import Dict, Set, Tuple, ClassVar, Any, TYPE_CHECKING, Literal
+from typing import ClassVar, Any, TYPE_CHECKING, Literal
 from collections import OrderedDict
 from datetime import datetime, timezone
 from functools import partial
@@ -646,8 +646,8 @@ def archivename_validator(text):
 
 class BaseFormatter(metaclass=abc.ABCMeta):
     format: str
-    static_data: Dict[str, Any]
-    FIXED_KEYS: ClassVar[Dict[str, str]] = {
+    static_data: dict[str, Any]
+    FIXED_KEYS: ClassVar[dict[str, str]] = {
         # Formatting aids
         "LF": "\n",
         "SPACE": " ",
@@ -657,7 +657,7 @@ class BaseFormatter(metaclass=abc.ABCMeta):
         "NEWLINE": "\n",
         "NL": "\n",  # \n is automatically converted to os.linesep on write
     }
-    KEY_DESCRIPTIONS: ClassVar[Dict[str, str]] = {
+    KEY_DESCRIPTIONS: ClassVar[dict[str, str]] = {
         "NEWLINE": "OS dependent line separator",
         "NL": "alias of NEWLINE",
         "NUL": "NUL character for creating print0 / xargs -0 like output",
@@ -666,9 +666,9 @@ class BaseFormatter(metaclass=abc.ABCMeta):
         "CR": "carriage return character",
         "LF": "line feed character",
     }
-    KEY_GROUPS: ClassVar[Tuple[Tuple[str, ...], ...]] = (("NEWLINE", "NL", "NUL", "SPACE", "TAB", "CR", "LF"),)
+    KEY_GROUPS: ClassVar[tuple[tuple[str, ...], ...]] = (("NEWLINE", "NL", "NUL", "SPACE", "TAB", "CR", "LF"),)
 
-    def __init__(self, format: str, static: Dict[str, Any]) -> None:
+    def __init__(self, format: str, static: dict[str, Any]) -> None:
         self.format = partial_format(format, static)
         self.static_data = static
 
@@ -685,7 +685,7 @@ class BaseFormatter(metaclass=abc.ABCMeta):
     @classmethod
     def keys_help(cls):
         help = []
-        keys: Set[str] = set()
+        keys: set[str] = set()
         keys.update(cls.KEY_DESCRIPTIONS.keys())
         keys.update(key for group in cls.KEY_GROUPS for key in group)
 

+ 2 - 1
src/borg/legacyrepository.py

@@ -9,7 +9,8 @@ from collections import defaultdict
 from configparser import ConfigParser
 from functools import partial
 from itertools import islice
-from typing import Callable, DefaultDict
+from typing import DefaultDict
+from collections.abc import Callable
 
 from .constants import *  # NOQA
 from .hashindex import NSIndex1Entry, NSIndex1

+ 1 - 2
src/borg/logger.py

@@ -57,10 +57,9 @@ import os
 import queue
 import sys
 import time
-from typing import Optional
 import warnings
 
-logging_debugging_path: Optional[str] = None  # if set, write borg.logger debugging log to path/borg-*.log
+logging_debugging_path: str | None = None  # if set, write borg.logger debugging log to path/borg-*.log
 
 configured = False
 borg_serve_log_queue: queue.SimpleQueue = queue.SimpleQueue()

+ 1 - 2
src/borg/repository.py

@@ -29,8 +29,7 @@ def repo_lister(repository, *, limit=None):
         finished = (len(result) < limit) if limit is not None else (len(result) == 0)
         if not finished:
             marker = result[-1][0]
-        for id, stored_size in result:
-            yield id, stored_size
+        yield from result
 
 
 class Repository:

+ 2 - 2
src/borg/testsuite/chunker_pytest_test.py

@@ -164,8 +164,8 @@ def test_buzhash_chunksize_distribution():
     chunks_count = len(chunks)
     min_chunksize_observed = min(chunk_sizes)
     max_chunksize_observed = max(chunk_sizes)
-    min_count = sum((int(size == 2**min_exp) for size in chunk_sizes))
-    max_count = sum((int(size == 2**max_exp) for size in chunk_sizes))
+    min_count = sum(int(size == 2**min_exp) for size in chunk_sizes)
+    max_count = sum(int(size == 2**max_exp) for size in chunk_sizes)
     print(
         f"count: {chunks_count} min: {min_chunksize_observed} max: {max_chunksize_observed} "
         f"min count: {min_count} max count: {max_count}"

+ 1 - 2
src/borg/testsuite/legacyrepository_test.py

@@ -1,7 +1,6 @@
 import logging
 import os
 import sys
-from typing import Optional
 from unittest.mock import patch
 
 import pytest
@@ -45,7 +44,7 @@ def get_repository_from_fixture(repo_fixtures, request):
     return request.getfixturevalue(repo_fixtures)
 
 
-def reopen(repository, exclusive: Optional[bool] = True, create=False):
+def reopen(repository, exclusive: bool | None = True, create=False):
     if isinstance(repository, LegacyRepository):
         if repository.io is not None or repository.lock is not None:
             raise RuntimeError("Repo must be closed before a reopen. Cannot support nested repository contexts.")

+ 1 - 2
src/borg/testsuite/repository_test.py

@@ -1,7 +1,6 @@
 import logging
 import os
 import sys
-from typing import Optional
 
 import pytest
 
@@ -40,7 +39,7 @@ def get_repository_from_fixture(repo_fixtures, request):
     return request.getfixturevalue(repo_fixtures)
 
 
-def reopen(repository, exclusive: Optional[bool] = True, create=False):
+def reopen(repository, exclusive: bool | None = True, create=False):
     if isinstance(repository, Repository):
         if repository.opened:
             raise RuntimeError("Repo must be closed before a reopen. Cannot support nested repository contexts.")

+ 6 - 8
src/borg/testsuite/shell_completions_test.py

@@ -12,12 +12,12 @@ def test_bash_completion_is_valid():
 
     # Check if bash is available
     try:
-        subprocess.run(["bash", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
+        subprocess.run(["bash", "--version"], capture_output=True, check=True)
     except (subprocess.SubprocessError, FileNotFoundError):
         pytest.skip("bash not available")
 
     # Test if the bash completion file can be sourced without errors
-    result = subprocess.run(["bash", "-n", bash_completion_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    result = subprocess.run(["bash", "-n", bash_completion_file], capture_output=True)
     assert result.returncode == 0, f"Bash completion file has syntax errors: {result.stderr.decode()}"
 
 
@@ -28,14 +28,12 @@ def test_fish_completion_is_valid():
 
     # Check if fish is available
     try:
-        subprocess.run(["fish", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
+        subprocess.run(["fish", "--version"], capture_output=True, check=True)
     except (subprocess.SubprocessError, FileNotFoundError):
         pytest.skip("fish not available")
 
     # Test if the fish completion file can be sourced without errors
-    result = subprocess.run(
-        ["fish", "-c", f"source {fish_completion_file}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
-    )
+    result = subprocess.run(["fish", "-c", f"source {fish_completion_file}"], capture_output=True)
     assert result.returncode == 0, f"Fish completion file has syntax errors: {result.stderr.decode()}"
 
 
@@ -46,10 +44,10 @@ def test_zsh_completion_is_valid():
 
     # Check if zsh is available
     try:
-        subprocess.run(["zsh", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
+        subprocess.run(["zsh", "--version"], capture_output=True, check=True)
     except (subprocess.SubprocessError, FileNotFoundError):
         pytest.skip("zsh not available")
 
     # Test if the zsh completion file can be sourced without errors
-    result = subprocess.run(["zsh", "-n", zsh_completion_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    result = subprocess.run(["zsh", "-n", zsh_completion_file], capture_output=True)
     assert result.returncode == 0, f"Zsh completion file has syntax errors: {result.stderr.decode()}"