Browse Source

Merge pull request #6964 from ThomasWaldmann/timestamps

Timestamps with timezones
TW 2 years ago
parent
commit
3715e327a6

+ 0 - 1
requirements.d/development.lock.txt

@@ -10,4 +10,3 @@ pytest-cov==3.0.0
 pytest-benchmark==3.4.1
 pytest-benchmark==3.4.1
 Cython==0.29.30
 Cython==0.29.30
 twine==3.8.0
 twine==3.8.0
-python-dateutil==2.8.2

+ 0 - 1
requirements.d/development.txt

@@ -10,4 +10,3 @@ pytest-cov
 pytest-benchmark
 pytest-benchmark
 Cython!=0.27
 Cython!=0.27
 twine
 twine
-python-dateutil

+ 12 - 12
src/borg/archive.py

@@ -6,7 +6,7 @@ import sys
 import time
 import time
 from collections import OrderedDict
 from collections import OrderedDict
 from contextlib import contextmanager
 from contextlib import contextmanager
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta
 from functools import partial
 from functools import partial
 from getpass import getuser
 from getpass import getuser
 from io import BytesIO
 from io import BytesIO
@@ -479,13 +479,13 @@ class Archive:
             start_monotonic is None
             start_monotonic is None
         ), "Logic error: if start is given, start_monotonic must be given as well and vice versa."
         ), "Logic error: if start is given, start_monotonic must be given as well and vice versa."
         if start is None:
         if start is None:
-            start = datetime.utcnow()
+            start = datetime.now().astimezone()  # local time with local timezone
             start_monotonic = time.monotonic()
             start_monotonic = time.monotonic()
         self.chunker_params = chunker_params
         self.chunker_params = chunker_params
         self.start = start
         self.start = start
         self.start_monotonic = start_monotonic
         self.start_monotonic = start_monotonic
         if end is None:
         if end is None:
-            end = datetime.utcnow()
+            end = datetime.now().astimezone()  # local time with local timezone
         self.end = end
         self.end = end
         self.consider_part_files = consider_part_files
         self.consider_part_files = consider_part_files
         self.pipeline = DownloadPipeline(self.repository, self.key)
         self.pipeline = DownloadPipeline(self.repository, self.key)
@@ -549,8 +549,8 @@ class Archive:
     def info(self):
     def info(self):
         if self.create:
         if self.create:
             stats = self.stats
             stats = self.stats
-            start = self.start.replace(tzinfo=timezone.utc)
-            end = self.end.replace(tzinfo=timezone.utc)
+            start = self.start
+            end = self.end
         else:
         else:
             stats = self.calc_stats(self.cache)
             stats = self.calc_stats(self.cache)
             start = self.ts
             start = self.ts
@@ -587,8 +587,8 @@ Time (end):   {end}
 Duration: {0.duration}
 Duration: {0.duration}
 """.format(
 """.format(
             self,
             self,
-            start=OutputTimestamp(self.start.replace(tzinfo=timezone.utc)),
-            end=OutputTimestamp(self.end.replace(tzinfo=timezone.utc)),
+            start=OutputTimestamp(self.start),
+            end=OutputTimestamp(self.end),
             location=self.repository._location.canonical_path(),
             location=self.repository._location.canonical_path(),
         )
         )
 
 
@@ -629,11 +629,11 @@ Duration: {0.duration}
         item_ptrs = archive_put_items(self.items_buffer.chunks, key=self.key, cache=self.cache, stats=self.stats)
         item_ptrs = archive_put_items(self.items_buffer.chunks, key=self.key, cache=self.cache, stats=self.stats)
         duration = timedelta(seconds=time.monotonic() - self.start_monotonic)
         duration = timedelta(seconds=time.monotonic() - self.start_monotonic)
         if timestamp is None:
         if timestamp is None:
-            end = datetime.utcnow()
+            end = datetime.now().astimezone()  # local time with local timezone
             start = end - duration
             start = end - duration
         else:
         else:
-            end = timestamp + duration
             start = timestamp
             start = timestamp
+            end = start + duration
         self.start = start
         self.start = start
         self.end = end
         self.end = end
         metadata = {
         metadata = {
@@ -644,8 +644,8 @@ Duration: {0.duration}
             "cmdline": sys.argv,
             "cmdline": sys.argv,
             "hostname": hostname,
             "hostname": hostname,
             "username": getuser(),
             "username": getuser(),
-            "time": start.strftime(ISO_FORMAT),
-            "time_end": end.strftime(ISO_FORMAT),
+            "time": start.isoformat(timespec="microseconds"),
+            "time_end": end.isoformat(timespec="microseconds"),
             "chunker_params": self.chunker_params,
             "chunker_params": self.chunker_params,
         }
         }
         if stats is not None:
         if stats is not None:
@@ -2314,7 +2314,7 @@ class ArchiveRecreater:
             target.rename(archive.name)
             target.rename(archive.name)
         if self.stats:
         if self.stats:
             target.start = _start
             target.start = _start
-            target.end = datetime.utcnow()
+            target.end = datetime.now().astimezone()  # local time with local timezone
             log_multi(str(target), str(target.stats))
             log_multi(str(target), str(target.stats))
 
 
     def matcher_add_tagged_dirs(self, archive):
     def matcher_add_tagged_dirs(self, archive):

+ 3 - 3
src/borg/archiver/create.py

@@ -204,7 +204,7 @@ class CreateMixIn:
         self.noxattrs = args.noxattrs
         self.noxattrs = args.noxattrs
         self.exclude_nodump = args.exclude_nodump
         self.exclude_nodump = args.exclude_nodump
         dry_run = args.dry_run
         dry_run = args.dry_run
-        t0 = datetime.utcnow()
+        t0 = datetime.now().astimezone()  # local time with local timezone
         t0_monotonic = time.monotonic()
         t0_monotonic = time.monotonic()
         logger.info('Creating archive at "%s"' % args.location.processed)
         logger.info('Creating archive at "%s"' % args.location.processed)
         if not dry_run:
         if not dry_run:
@@ -821,8 +821,8 @@ class CreateMixIn:
             dest="timestamp",
             dest="timestamp",
             type=timestamp,
             type=timestamp,
             default=None,
             default=None,
-            help="manually specify the archive creation date/time (UTC, yyyy-mm-ddThh:mm:ss format). "
-            "Alternatively, give a reference file/directory.",
+            help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
+            "(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
         )
         )
         archive_group.add_argument(
         archive_group.add_argument(
             "-c",
             "-c",

+ 1 - 1
src/borg/archiver/help.py

@@ -291,7 +291,7 @@ class HelpMixIn:
         Examples::
         Examples::
 
 
             borg create /path/to/repo::{hostname}-{user}-{utcnow} ...
             borg create /path/to/repo::{hostname}-{user}-{utcnow} ...
-            borg create /path/to/repo::{hostname}-{now:%Y-%m-%d_%H:%M:%S} ...
+            borg create /path/to/repo::{hostname}-{now:%Y-%m-%d_%H:%M:%S%z} ...
             borg prune -a '{hostname}-*' ...
             borg prune -a '{hostname}-*' ...
 
 
         .. note::
         .. note::

+ 2 - 2
src/borg/archiver/recreate.py

@@ -177,8 +177,8 @@ class RecreateMixIn:
             dest="timestamp",
             dest="timestamp",
             type=timestamp,
             type=timestamp,
             default=None,
             default=None,
-            help="manually specify the archive creation date/time (UTC, yyyy-mm-ddThh:mm:ss format). "
-            "alternatively, give a reference file/directory.",
+            help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
+            "(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
         )
         )
         archive_group.add_argument(
         archive_group.add_argument(
             "-C",
             "-C",

+ 3 - 3
src/borg/archiver/tar.py

@@ -238,7 +238,7 @@ class TarMixIn:
         return self.exit_code
         return self.exit_code
 
 
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
-        t0 = datetime.utcnow()
+        t0 = datetime.now().astimezone()  # local time with local timezone
         t0_monotonic = time.monotonic()
         t0_monotonic = time.monotonic()
 
 
         archive = Archive(
         archive = Archive(
@@ -485,8 +485,8 @@ class TarMixIn:
             type=timestamp,
             type=timestamp,
             default=None,
             default=None,
             metavar="TIMESTAMP",
             metavar="TIMESTAMP",
-            help="manually specify the archive creation date/time (UTC, yyyy-mm-ddThh:mm:ss format). "
-            "alternatively, give a reference file/directory.",
+            help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
+            "(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
         )
         )
         archive_group.add_argument(
         archive_group.add_argument(
             "-c",
             "-c",

+ 0 - 3
src/borg/constants.py

@@ -102,9 +102,6 @@ EXIT_WARNING = 1  # reached normal end of operation, but there were issues
 EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation
 EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation
 EXIT_SIGNAL_BASE = 128  # terminated due to signal, rc = 128 + sig_no
 EXIT_SIGNAL_BASE = 128  # terminated due to signal, rc = 128 + sig_no
 
 
-# never use datetime.isoformat(), it is evil. always use one of these:
-# datetime.strftime(ISO_FORMAT)  # output always includes .microseconds
-# datetime.strftime(ISO_FORMAT_NO_USECS)  # output never includes microseconds
 ISO_FORMAT_NO_USECS = "%Y-%m-%dT%H:%M:%S"
 ISO_FORMAT_NO_USECS = "%Y-%m-%dT%H:%M:%S"
 ISO_FORMAT = ISO_FORMAT_NO_USECS + ".%f"
 ISO_FORMAT = ISO_FORMAT_NO_USECS + ".%f"
 
 

+ 1 - 1
src/borg/helpers/__init__.py

@@ -36,7 +36,7 @@ from .process import signal_handler, raising_signal_handler, sig_int, ignore_sig
 from .process import popen_with_error_handling, is_terminal, prepare_subprocess_env, create_filter_process
 from .process import popen_with_error_handling, is_terminal, prepare_subprocess_env, create_filter_process
 from .progress import ProgressIndicatorPercent, ProgressIndicatorEndless, ProgressIndicatorMessage
 from .progress import ProgressIndicatorPercent, ProgressIndicatorEndless, ProgressIndicatorMessage
 from .time import parse_timestamp, timestamp, safe_timestamp, safe_s, safe_ns, MAX_S, SUPPORT_32BIT_PLATFORMS
 from .time import parse_timestamp, timestamp, safe_timestamp, safe_s, safe_ns, MAX_S, SUPPORT_32BIT_PLATFORMS
-from .time import format_time, format_timedelta, isoformat_time, to_localtime, OutputTimestamp
+from .time import format_time, format_timedelta, OutputTimestamp
 from .yes_no import yes, TRUISH, FALSISH, DEFAULTISH
 from .yes_no import yes, TRUISH, FALSISH, DEFAULTISH
 
 
 from .msgpack import is_slow_msgpack, is_supported_msgpack, get_limited_unpacker
 from .msgpack import is_slow_msgpack, is_supported_msgpack, get_limited_unpacker

+ 8 - 7
src/borg/helpers/manifest.py

@@ -3,7 +3,7 @@ import os
 import os.path
 import os.path
 import re
 import re
 from collections import abc, namedtuple
 from collections import abc, namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
 from operator import attrgetter
 from operator import attrgetter
 from typing import Sequence, FrozenSet
 from typing import Sequence, FrozenSet
 
 
@@ -65,7 +65,7 @@ class Archives(abc.MutableMapping):
         id, ts = info
         id, ts = info
         assert isinstance(id, bytes)
         assert isinstance(id, bytes)
         if isinstance(ts, datetime):
         if isinstance(ts, datetime):
-            ts = ts.replace(tzinfo=None).strftime(ISO_FORMAT)
+            ts = ts.isoformat(timespec="microseconds")
         assert isinstance(ts, str)
         assert isinstance(ts, str)
         self._archives[name] = {"id": id, "time": ts}
         self._archives[name] = {"id": id, "time": ts}
 
 
@@ -180,7 +180,7 @@ class Manifest:
 
 
     @property
     @property
     def last_timestamp(self):
     def last_timestamp(self):
-        return parse_timestamp(self.timestamp, tzinfo=None)
+        return parse_timestamp(self.timestamp)
 
 
     @classmethod
     @classmethod
     def load(cls, repository, operations, key=None, force_tam_not_required=False):
     def load(cls, repository, operations, key=None, force_tam_not_required=False):
@@ -254,11 +254,12 @@ class Manifest:
             self.config["tam_required"] = True
             self.config["tam_required"] = True
         # self.timestamp needs to be strictly monotonically increasing. Clocks often are not set correctly
         # self.timestamp needs to be strictly monotonically increasing. Clocks often are not set correctly
         if self.timestamp is None:
         if self.timestamp is None:
-            self.timestamp = datetime.utcnow().strftime(ISO_FORMAT)
+            self.timestamp = datetime.now(tz=timezone.utc).isoformat(timespec="microseconds")
         else:
         else:
-            prev_ts = self.last_timestamp
-            incremented = (prev_ts + timedelta(microseconds=1)).strftime(ISO_FORMAT)
-            self.timestamp = max(incremented, datetime.utcnow().strftime(ISO_FORMAT))
+            incremented_ts = self.last_timestamp + timedelta(microseconds=1)
+            now_ts = datetime.now(tz=timezone.utc)
+            max_ts = max(incremented_ts, now_ts)
+            self.timestamp = max_ts.isoformat(timespec="microseconds")
         # include checks for limits as enforced by limited unpacker (used by load())
         # include checks for limits as enforced by limited unpacker (used by load())
         assert len(self.archives) <= MAX_ARCHIVES
         assert len(self.archives) <= MAX_ARCHIVES
         assert all(len(name) <= 255 for name in self.archives)
         assert all(len(name) <= 255 for name in self.archives)

+ 2 - 2
src/borg/helpers/misc.py

@@ -13,7 +13,6 @@ from ..logger import create_logger
 
 
 logger = create_logger()
 logger = create_logger()
 
 
-from .time import to_localtime
 from . import msgpack
 from . import msgpack
 from .. import __version__ as borg_version
 from .. import __version__ as borg_version
 from .. import chunker
 from .. import chunker
@@ -55,7 +54,8 @@ def prune_split(archives, rule, n, kept_because=None):
 
 
     a = None
     a = None
     for a in sorted(archives, key=attrgetter("ts"), reverse=True):
     for a in sorted(archives, key=attrgetter("ts"), reverse=True):
-        period = to_localtime(a.ts).strftime(pattern)
+        # we compute the pruning in local time zone
+        period = a.ts.astimezone().strftime(pattern)
         if period != last:
         if period != last:
             last = period
             last = period
             if a.id not in kept_because:
             if a.id not in kept_because:

+ 9 - 5
src/borg/helpers/parseformat.py

@@ -21,7 +21,7 @@ logger = create_logger()
 from .errors import Error
 from .errors import Error
 from .fs import get_keys_dir
 from .fs import get_keys_dir
 from .msgpack import Timestamp
 from .msgpack import Timestamp
-from .time import OutputTimestamp, format_time, to_localtime, safe_timestamp
+from .time import OutputTimestamp, format_time, safe_timestamp
 from .. import __version__ as borg_version
 from .. import __version__ as borg_version
 from .. import __version_tuple__ as borg_version_tuple
 from .. import __version_tuple__ as borg_version_tuple
 from ..constants import *  # NOQA
 from ..constants import *  # NOQA
@@ -196,7 +196,7 @@ def replace_placeholders(text, overrides={}):
         "fqdn": fqdn,
         "fqdn": fqdn,
         "reverse-fqdn": ".".join(reversed(fqdn.split("."))),
         "reverse-fqdn": ".".join(reversed(fqdn.split("."))),
         "hostname": hostname,
         "hostname": hostname,
-        "now": DatetimeWrapper(current_time.astimezone(None)),
+        "now": DatetimeWrapper(current_time.astimezone()),
         "utcnow": DatetimeWrapper(current_time),
         "utcnow": DatetimeWrapper(current_time),
         "user": getosusername(),
         "user": getosusername(),
         "uuid4": str(uuid.uuid4()),
         "uuid4": str(uuid.uuid4()),
@@ -303,7 +303,7 @@ def sizeof_fmt_decimal(num, suffix="B", sep="", precision=2, sign=False):
 
 
 
 
 def format_archive(archive):
 def format_archive(archive):
-    return "%-36s %s [%s]" % (archive.name, format_time(to_localtime(archive.ts)), bin_to_hex(archive.id))
+    return "%-36s %s [%s]" % (archive.name, format_time(archive.ts), bin_to_hex(archive.id))
 
 
 
 
 def parse_stringified_list(s):
 def parse_stringified_list(s):
@@ -500,9 +500,13 @@ class Location:
             )
             )
 
 
     def with_timestamp(self, timestamp):
     def with_timestamp(self, timestamp):
+        # note: this only affects the repository URL/path, not the archive name!
         return Location(
         return Location(
             self.raw,
             self.raw,
-            overrides={"now": DatetimeWrapper(timestamp.astimezone(None)), "utcnow": DatetimeWrapper(timestamp)},
+            overrides={
+                "now": DatetimeWrapper(timestamp),
+                "utcnow": DatetimeWrapper(timestamp.astimezone(timezone.utc)),
+            },
         )
         )
 
 
 
 
@@ -973,7 +977,7 @@ def basic_json_data(manifest, *, cache=None, extra=None):
     key = manifest.key
     key = manifest.key
     data = extra or {}
     data = extra or {}
     data.update({"repository": BorgJsonEncoder().default(manifest.repository), "encryption": {"mode": key.ARG_NAME}})
     data.update({"repository": BorgJsonEncoder().default(manifest.repository), "encryption": {"mode": key.ARG_NAME}})
-    data["repository"]["last_modified"] = OutputTimestamp(manifest.last_timestamp.replace(tzinfo=timezone.utc))
+    data["repository"]["last_modified"] = OutputTimestamp(manifest.last_timestamp)
     if key.NAME.startswith("key file"):
     if key.NAME.startswith("key file"):
         data["encryption"]["keyfile"] = key.find_key()
         data["encryption"]["keyfile"] = key.find_key()
     if cache:
     if cache:

+ 21 - 39
src/borg/helpers/time.py

@@ -1,21 +1,27 @@
 import os
 import os
-import time
 from datetime import datetime, timezone
 from datetime import datetime, timezone
 
 
-from ..constants import ISO_FORMAT, ISO_FORMAT_NO_USECS
 
 
+def parse_timestamp(timestamp, tzinfo=timezone.utc):
+    """Parse a ISO 8601 timestamp string.
 
 
-def to_localtime(ts):
-    """Convert datetime object from UTC to local time zone"""
-    return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
+    For naive/unaware dt, assume it is in tzinfo timezone (default: UTC).
+    """
+    dt = datetime.fromisoformat(timestamp)
+    if dt.tzinfo is None:
+        dt = dt.replace(tzinfo=tzinfo)
+    return dt
 
 
 
 
-def parse_timestamp(timestamp, tzinfo=timezone.utc):
-    """Parse a ISO 8601 timestamp string"""
-    fmt = ISO_FORMAT if "." in timestamp else ISO_FORMAT_NO_USECS
-    dt = datetime.strptime(timestamp, fmt)
-    if tzinfo is not None:
-        dt = dt.replace(tzinfo=tzinfo)
+def parse_local_timestamp(timestamp, tzinfo=None):
+    """Parse a ISO 8601 timestamp string.
+
+    For naive/unaware dt, assume it is in local timezone.
+    Convert to tzinfo timezone (the default None means: local timezone).
+    """
+    dt = datetime.fromisoformat(timestamp)
+    if dt.tzinfo is None:
+        dt = dt.astimezone(tz=tzinfo)
     return dt
     return dt
 
 
 
 
@@ -26,22 +32,8 @@ def timestamp(s):
         ts = safe_s(os.stat(s).st_mtime)
         ts = safe_s(os.stat(s).st_mtime)
         return datetime.fromtimestamp(ts, tz=timezone.utc)
         return datetime.fromtimestamp(ts, tz=timezone.utc)
     except OSError:
     except OSError:
-        # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
-        for format in (
-            "%Y-%m-%dT%H:%M:%SZ",
-            "%Y-%m-%dT%H:%M:%S+00:00",
-            "%Y-%m-%dT%H:%M:%S",
-            "%Y-%m-%d %H:%M:%S",
-            "%Y-%m-%dT%H:%M",
-            "%Y-%m-%d %H:%M",
-            "%Y-%m-%d",
-            "%Y-%j",
-        ):
-            try:
-                return datetime.strptime(s, format).replace(tzinfo=timezone.utc)
-            except ValueError:
-                continue
-        raise ValueError
+        # didn't work, try parsing as a ISO timestamp. if no TZ is given, we assume local timezone.
+        return parse_local_timestamp(s)
 
 
 
 
 # Not too rarely, we get crappy timestamps from the fs, that overflow some computations.
 # Not too rarely, we get crappy timestamps from the fs, that overflow some computations.
@@ -98,15 +90,7 @@ def format_time(ts: datetime, format_spec=""):
     """
     """
     Convert *ts* to a human-friendly format with textual weekday.
     Convert *ts* to a human-friendly format with textual weekday.
     """
     """
-    return ts.strftime("%a, %Y-%m-%d %H:%M:%S" if format_spec == "" else format_spec)
-
-
-def isoformat_time(ts: datetime):
-    """
-    Format *ts* according to ISO 8601.
-    """
-    # note: first make all datetime objects tz aware before adding %z here.
-    return ts.strftime(ISO_FORMAT)
+    return ts.strftime("%a, %Y-%m-%d %H:%M:%S %z" if format_spec == "" else format_spec)
 
 
 
 
 def format_timedelta(td):
 def format_timedelta(td):
@@ -127,8 +111,6 @@ def format_timedelta(td):
 
 
 class OutputTimestamp:
 class OutputTimestamp:
     def __init__(self, ts: datetime):
     def __init__(self, ts: datetime):
-        if ts.tzinfo == timezone.utc:
-            ts = to_localtime(ts)
         self.ts = ts
         self.ts = ts
 
 
     def __format__(self, format_spec):
     def __format__(self, format_spec):
@@ -138,6 +120,6 @@ class OutputTimestamp:
         return f"{self}"
         return f"{self}"
 
 
     def isoformat(self):
     def isoformat(self):
-        return isoformat_time(self.ts)
+        return self.ts.isoformat(timespec="microseconds")
 
 
     to_json = isoformat
     to_json = isoformat

+ 4 - 2
src/borg/repository.py

@@ -8,7 +8,7 @@ import time
 from binascii import hexlify, unhexlify
 from binascii import hexlify, unhexlify
 from collections import defaultdict
 from collections import defaultdict
 from configparser import ConfigParser
 from configparser import ConfigParser
-from datetime import datetime
+from datetime import datetime, timezone
 from functools import partial
 from functools import partial
 from itertools import islice
 from itertools import islice
 
 
@@ -656,7 +656,9 @@ class Repository:
         if self.append_only:
         if self.append_only:
             with open(os.path.join(self.path, "transactions"), "a") as log:
             with open(os.path.join(self.path, "transactions"), "a") as log:
                 print(
                 print(
-                    "transaction %d, UTC time %s" % (transaction_id, datetime.utcnow().strftime(ISO_FORMAT)), file=log
+                    "transaction %d, UTC time %s"
+                    % (transaction_id, datetime.now(tz=timezone.utc).isoformat(timespec="microseconds")),
+                    file=log,
                 )
                 )
 
 
         # Write hints file
         # Write hints file

+ 18 - 19
src/borg/testsuite/archiver.py

@@ -1,5 +1,4 @@
 import argparse
 import argparse
-import dateutil.tz
 import errno
 import errno
 import io
 import io
 import json
 import json
@@ -18,9 +17,7 @@ import time
 import unittest
 import unittest
 from binascii import unhexlify, b2a_base64, a2b_base64
 from binascii import unhexlify, b2a_base64, a2b_base64
 from configparser import ConfigParser
 from configparser import ConfigParser
-from datetime import datetime
-from datetime import timezone
-from datetime import timedelta
+from datetime import datetime, timezone, timedelta
 from hashlib import sha256
 from hashlib import sha256
 from io import BytesIO, StringIO
 from io import BytesIO, StringIO
 from unittest.mock import patch
 from unittest.mock import patch
@@ -249,6 +246,11 @@ def test_disk_full(cmd):
             assert rc == EXIT_SUCCESS
             assert rc == EXIT_SUCCESS
 
 
 
 
+def checkts(ts):
+    # check if the timestamp is in the expected format
+    assert datetime.strptime(ts, ISO_FORMAT + "%z")  # must not raise
+
+
 class ArchiverTestCaseBase(BaseTestCase):
 class ArchiverTestCaseBase(BaseTestCase):
     EXE: str = None  # python source based
     EXE: str = None  # python source based
     FORK_DEFAULT = False
     FORK_DEFAULT = False
@@ -1682,7 +1684,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         repository = info_repo["repository"]
         repository = info_repo["repository"]
         assert len(repository["id"]) == 64
         assert len(repository["id"]) == 64
         assert "last_modified" in repository
         assert "last_modified" in repository
-        assert datetime.strptime(repository["last_modified"], ISO_FORMAT)  # must not raise
+        checkts(repository["last_modified"])
         assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
         assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
         assert "keyfile" not in info_repo["encryption"]
         assert "keyfile" not in info_repo["encryption"]
         cache = info_repo["cache"]
         cache = info_repo["cache"]
@@ -1701,8 +1703,8 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         assert isinstance(archive["duration"], float)
         assert isinstance(archive["duration"], float)
         assert len(archive["id"]) == 64
         assert len(archive["id"]) == 64
         assert "stats" in archive
         assert "stats" in archive
-        assert datetime.strptime(archive["start"], ISO_FORMAT)
-        assert datetime.strptime(archive["end"], ISO_FORMAT)
+        checkts(archive["start"])
+        checkts(archive["end"])
 
 
     def test_info_json_of_empty_archive(self):
     def test_info_json_of_empty_archive(self):
         """See https://github.com/borgbackup/borg/issues/6120"""
         """See https://github.com/borgbackup/borg/issues/6120"""
@@ -2298,18 +2300,12 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         # the latest archive must be still there
         # the latest archive must be still there
         self.assert_in("test5", output)
         self.assert_in("test5", output)
 
 
-    # Given a date and time in local tz, create a UTC timestamp string suitable
-    # for create --timestamp command line option
-    def _to_utc_timestamp(self, year, month, day, hour, minute, second):
-        dtime = datetime(year, month, day, hour, minute, second, 0, dateutil.tz.gettz())
-        return dtime.astimezone(dateutil.tz.UTC).strftime("%Y-%m-%dT%H:%M:%S")
-
     def _create_archive_ts(self, name, y, m, d, H=0, M=0, S=0):
     def _create_archive_ts(self, name, y, m, d, H=0, M=0, S=0):
         self.cmd(
         self.cmd(
             f"--repo={self.repository_location}",
             f"--repo={self.repository_location}",
             "create",
             "create",
             "--timestamp",
             "--timestamp",
-            self._to_utc_timestamp(y, m, d, H, M, S),
+            datetime(y, m, d, H, M, S, 0).strftime(ISO_FORMAT_NO_USECS),  # naive == local time / local tz
             name,
             name,
             src_dir,
             src_dir,
         )
         )
@@ -2579,11 +2575,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         list_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
         list_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
         repository = list_repo["repository"]
         repository = list_repo["repository"]
         assert len(repository["id"]) == 64
         assert len(repository["id"]) == 64
-        assert datetime.strptime(repository["last_modified"], ISO_FORMAT)  # must not raise
+        checkts(repository["last_modified"])
         assert list_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
         assert list_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
         assert "keyfile" not in list_repo["encryption"]
         assert "keyfile" not in list_repo["encryption"]
         archive0 = list_repo["archives"][0]
         archive0 = list_repo["archives"][0]
-        assert datetime.strptime(archive0["time"], ISO_FORMAT)  # must not raise
+        checkts(archive0["time"])
 
 
         list_archive = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
         list_archive = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
         items = [json.loads(s) for s in list_archive.splitlines()]
         items = [json.loads(s) for s in list_archive.splitlines()]
@@ -2591,7 +2587,6 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         file1 = items[1]
         file1 = items[1]
         assert file1["path"] == "input/file1"
         assert file1["path"] == "input/file1"
         assert file1["size"] == 81920
         assert file1["size"] == 81920
-        assert datetime.strptime(file1["mtime"], ISO_FORMAT)  # must not raise
 
 
         list_archive = self.cmd(
         list_archive = self.cmd(
             f"--repo={self.repository_location}", "list", "test", "--json-lines", "--format={sha256}"
             f"--repo={self.repository_location}", "list", "test", "--json-lines", "--format={sha256}"
@@ -4058,7 +4053,9 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
                             "version": 1,
                             "version": 1,
                             "archives": {},
                             "archives": {},
                             "config": {},
                             "config": {},
-                            "timestamp": (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT),
+                            "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
+                                timespec="microseconds"
+                            ),
                         }
                         }
                     ),
                     ),
                 ),
                 ),
@@ -4078,7 +4075,9 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
                         {
                         {
                             "version": 1,
                             "version": 1,
                             "archives": {},
                             "archives": {},
-                            "timestamp": (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT),
+                            "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
+                                timespec="microseconds"
+                            ),
                         }
                         }
                     ),
                     ),
                 ),
                 ),

+ 5 - 10
src/borg/upgrade.py

@@ -130,16 +130,11 @@ class UpgraderFrom12To20:
         new_metadata = {}
         new_metadata = {}
         # keep all metadata except archive version and stats. also do not keep
         # keep all metadata except archive version and stats. also do not keep
         # recreate_source_id, recreate_args, recreate_partial_chunks which were used only in 1.1.0b1 .. b2.
         # recreate_source_id, recreate_args, recreate_partial_chunks which were used only in 1.1.0b1 .. b2.
-        for attr in (
-            "cmdline",
-            "hostname",
-            "username",
-            "time",
-            "time_end",
-            "comment",
-            "chunker_params",
-            "recreate_cmdline",
-        ):
+        for attr in ("cmdline", "hostname", "username", "comment", "chunker_params", "recreate_cmdline"):
             if hasattr(metadata, attr):
             if hasattr(metadata, attr):
                 new_metadata[attr] = getattr(metadata, attr)
                 new_metadata[attr] = getattr(metadata, attr)
+        # old borg used UTC timestamps, but did not have the explicit tz offset in them.
+        for attr in ("time", "time_end"):
+            if hasattr(metadata, attr):
+                new_metadata[attr] = getattr(metadata, attr) + "+00:00"
         return new_metadata
         return new_metadata

+ 0 - 1
tox.ini

@@ -36,5 +36,4 @@ deps =
     pytest
     pytest
     mypy
     mypy
     pkgconfig
     pkgconfig
-    types-python-dateutil
 commands = mypy
 commands = mypy