helpers.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695
  1. import argparse
  2. import binascii
  3. from collections import namedtuple
  4. import grp
  5. import os
  6. import pwd
  7. import re
  8. import sys
  9. import time
  10. import unicodedata
  11. from datetime import datetime, timezone, timedelta
  12. from fnmatch import translate
  13. from operator import attrgetter
  14. import msgpack
  15. from . import hashindex
  16. from . import chunker
  17. from . import crypto
  18. class Error(Exception):
  19. """Error base class"""
  20. exit_code = 1
  21. def get_message(self):
  22. return 'Error: ' + type(self).__doc__.format(*self.args)
  23. class ExtensionModuleError(Error):
  24. """The Borg binary extension modules do not seem to be properly installed"""
  25. def check_extension_modules():
  26. from . import platform
  27. if hashindex.API_VERSION != 2:
  28. raise ExtensionModuleError
  29. if chunker.API_VERSION != 2:
  30. raise ExtensionModuleError
  31. if crypto.API_VERSION != 2:
  32. raise ExtensionModuleError
  33. if platform.API_VERSION != 2:
  34. raise ExtensionModuleError
  35. class Manifest:
  36. MANIFEST_ID = b'\0' * 32
  37. def __init__(self, key, repository):
  38. self.archives = {}
  39. self.config = {}
  40. self.key = key
  41. self.repository = repository
  42. @classmethod
  43. def load(cls, repository, key=None):
  44. from .key import key_factory
  45. cdata = repository.get(cls.MANIFEST_ID)
  46. if not key:
  47. key = key_factory(repository, cdata)
  48. manifest = cls(key, repository)
  49. data = key.decrypt(None, cdata)
  50. manifest.id = key.id_hash(data)
  51. m = msgpack.unpackb(data)
  52. if not m.get(b'version') == 1:
  53. raise ValueError('Invalid manifest version')
  54. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  55. manifest.timestamp = m.get(b'timestamp')
  56. if manifest.timestamp:
  57. manifest.timestamp = manifest.timestamp.decode('ascii')
  58. manifest.config = m[b'config']
  59. return manifest, key
  60. def write(self):
  61. self.timestamp = datetime.utcnow().isoformat()
  62. data = msgpack.packb(StableDict({
  63. 'version': 1,
  64. 'archives': self.archives,
  65. 'timestamp': self.timestamp,
  66. 'config': self.config,
  67. }))
  68. self.id = self.key.id_hash(data)
  69. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  70. def list_archive_infos(self, sort_by=None, reverse=False):
  71. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  72. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  73. archives = []
  74. for name, values in self.archives.items():
  75. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  76. id = values[b'id']
  77. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  78. if sort_by is not None:
  79. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  80. return archives
  81. def prune_within(archives, within):
  82. multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
  83. try:
  84. hours = int(within[:-1]) * multiplier[within[-1]]
  85. except (KeyError, ValueError):
  86. # I don't like how this displays the original exception too:
  87. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  88. if hours <= 0:
  89. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  90. target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
  91. return [a for a in archives if a.ts > target]
  92. def prune_split(archives, pattern, n, skip=[]):
  93. last = None
  94. keep = []
  95. if n == 0:
  96. return keep
  97. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  98. period = to_localtime(a.ts).strftime(pattern)
  99. if period != last:
  100. last = period
  101. if a not in skip:
  102. keep.append(a)
  103. if len(keep) == n:
  104. break
  105. return keep
  106. class Statistics:
  107. def __init__(self):
  108. self.osize = self.csize = self.usize = self.nfiles = 0
  109. def update(self, size, csize, unique):
  110. self.osize += size
  111. self.csize += csize
  112. if unique:
  113. self.usize += csize
  114. def print_(self, label, cache):
  115. total_size, total_csize, unique_size, unique_csize, total_unique_chunks, total_chunks = cache.chunks.summarize()
  116. print()
  117. print(' Original size Compressed size Deduplicated size')
  118. print('%-15s %20s %20s %20s' % (label, format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize)))
  119. print('All archives: %20s %20s %20s' % (format_file_size(total_size), format_file_size(total_csize), format_file_size(unique_csize)))
  120. print()
  121. print(' Unique chunks Total chunks')
  122. print('Chunk index: %20d %20d' % (total_unique_chunks, total_chunks))
  123. def show_progress(self, item=None, final=False):
  124. if not final:
  125. path = remove_surrogates(item[b'path']) if item else ''
  126. if len(path) > 43:
  127. path = '%s...%s' % (path[:20], path[-20:])
  128. msg = '%9s O %9s C %9s D %-43s' % (
  129. format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize), path)
  130. else:
  131. msg = ' ' * 79
  132. print(msg, end='\r')
  133. sys.stdout.flush()
  134. def get_keys_dir():
  135. """Determine where to repository keys and cache"""
  136. return os.environ.get('BORG_KEYS_DIR',
  137. os.path.join(os.path.expanduser('~'), '.borg', 'keys'))
  138. def get_cache_dir():
  139. """Determine where to repository keys and cache"""
  140. return os.environ.get('BORG_CACHE_DIR',
  141. os.path.join(os.path.expanduser('~'), '.cache', 'borg'))
  142. def to_localtime(ts):
  143. """Convert datetime object from UTC to local time zone"""
  144. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  145. def parse_timestamp(timestamp):
  146. """Parse a ISO 8601 timestamp string"""
  147. if '.' in timestamp: # microseconds might not be pressent
  148. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  149. else:
  150. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  151. def update_excludes(args):
  152. """Merge exclude patterns from files with those on command line.
  153. Empty lines and lines starting with '#' are ignored, but whitespace
  154. is not stripped."""
  155. if hasattr(args, 'exclude_files') and args.exclude_files:
  156. if not hasattr(args, 'excludes') or args.excludes is None:
  157. args.excludes = []
  158. for file in args.exclude_files:
  159. patterns = [line.rstrip('\r\n') for line in file if not line.startswith('#')]
  160. args.excludes += [ExcludePattern(pattern) for pattern in patterns if pattern]
  161. file.close()
  162. def adjust_patterns(paths, excludes):
  163. if paths:
  164. return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
  165. else:
  166. return excludes
  167. def exclude_path(path, patterns):
  168. """Used by create and extract sub-commands to determine
  169. whether or not an item should be processed.
  170. """
  171. for pattern in (patterns or []):
  172. if pattern.match(path):
  173. return isinstance(pattern, ExcludePattern)
  174. return False
  175. # For both IncludePattern and ExcludePattern, we require that
  176. # the pattern either match the whole path or an initial segment
  177. # of the path up to but not including a path separator. To
  178. # unify the two cases, we add a path separator to the end of
  179. # the path before matching.
  180. ##### !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  181. ##### For discussion only, don't merge this code!
  182. ##### !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  183. class IncludePattern:
  184. """Literal files or directories listed on the command line
  185. for some operations (e.g. extract, but not create).
  186. If a directory is specified, all paths that start with that
  187. path match as well. A trailing slash makes no difference.
  188. """
  189. def __init__(self, pattern):
  190. def match(path):
  191. return (path+os.path.sep).startswith(self.pattern)
  192. # HFS+ converts paths to a canonical form, so users shouldn't be
  193. # required to enter an exact match
  194. if sys.platform in ('darwin',):
  195. # repository paths will be mostly in NFD, as the OSX exception list
  196. # to NFD is small, so normalize to that form for best performance
  197. pattern = unicodedata.normalize("NFD", pattern)
  198. self.match = lambda p: match(unicodedata.normalize("NFD", p))
  199. # Windows and Unix filesystems allow different forms, so users
  200. # always have to enter an exact match
  201. else:
  202. self.match = match
  203. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep
  204. def __repr__(self):
  205. return '%s(%s)' % (type(self), self.pattern)
  206. class ExcludePattern(IncludePattern):
  207. """Shell glob patterns to exclude. A trailing slash means to
  208. exclude the contents of a directory, but not the directory itself.
  209. """
  210. def __init__(self, pattern):
  211. def match(path):
  212. return self.regex.match(path+os.path.sep) is not None
  213. if pattern.endswith(os.path.sep):
  214. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep+'*'+os.path.sep
  215. else:
  216. self.pattern = os.path.normpath(pattern)+os.path.sep+'*'
  217. # HFS+ converts paths to a canonical form, so users shouldn't be
  218. # required to enter an exact match
  219. if sys.platform in ('darwin',):
  220. # repository paths will be mostly in NFD, as the OSX exception list
  221. # to NFD is small, so normalize to that form for best performance
  222. self.pattern = unicodedata.normalize("NFD", self.pattern)
  223. self.match = lambda p: match(unicodedata.normalize("NFD", p))
  224. # Windows and Unix filesystems allow different forms, so users
  225. # always have to enter an exact match
  226. else:
  227. self.match = match
  228. # fnmatch and re.match both cache compiled regular expressions.
  229. # Nevertheless, this is about 10 times faster.
  230. self.regex = re.compile(translate(self.pattern))
  231. def __repr__(self):
  232. return '%s(%s)' % (type(self), self.pattern)
  233. def timestamp(s):
  234. """Convert a --timestamp=s argument to a datetime object"""
  235. try:
  236. # is it pointing to a file / directory?
  237. ts = os.stat(s).st_mtime
  238. return datetime.utcfromtimestamp(ts)
  239. except OSError:
  240. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  241. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  242. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  243. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  244. '%Y-%m-%d', '%Y-%j',
  245. ):
  246. try:
  247. return datetime.strptime(s, format)
  248. except ValueError:
  249. continue
  250. raise ValueError
  251. def ChunkerParams(s):
  252. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  253. if int(chunk_max) > 23:
  254. # do not go beyond 2**23 (8MB) chunk size now,
  255. # COMPR_BUFFER can only cope with up to this size
  256. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  257. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  258. def CompressionSpec(s):
  259. values = s.split(',')
  260. count = len(values)
  261. if count < 1:
  262. raise ValueError
  263. compression = values[0]
  264. try:
  265. compression = int(compression)
  266. if count > 1:
  267. raise ValueError
  268. # DEPRECATED: it is just --compression N
  269. if 0 <= compression <= 9:
  270. return dict(name='zlib', level=compression)
  271. raise ValueError
  272. except ValueError:
  273. # --compression algo[,...]
  274. name = compression
  275. if name in ('none', 'lz4', ):
  276. return dict(name=name)
  277. if name in ('zlib', 'lzma', ):
  278. if count < 2:
  279. level = 6 # default compression level in py stdlib
  280. elif count == 2:
  281. level = int(values[1])
  282. if not 0 <= level <= 9:
  283. raise ValueError
  284. else:
  285. raise ValueError
  286. return dict(name=name, level=level)
  287. raise ValueError
  288. def is_cachedir(path):
  289. """Determines whether the specified path is a cache directory (and
  290. therefore should potentially be excluded from the backup) according to
  291. the CACHEDIR.TAG protocol
  292. (http://www.brynosaurus.com/cachedir/spec.html).
  293. """
  294. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  295. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  296. try:
  297. if os.path.exists(tag_path):
  298. with open(tag_path, 'rb') as tag_file:
  299. tag_data = tag_file.read(len(tag_contents))
  300. if tag_data == tag_contents:
  301. return True
  302. except OSError:
  303. pass
  304. return False
  305. def format_time(t):
  306. """Format datetime suitable for fixed length list output
  307. """
  308. if abs((datetime.now() - t).days) < 365:
  309. return t.strftime('%b %d %H:%M')
  310. else:
  311. return t.strftime('%b %d %Y')
  312. def format_timedelta(td):
  313. """Format timedelta in a human friendly format
  314. """
  315. # Since td.total_seconds() requires python 2.7
  316. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  317. s = ts % 60
  318. m = int(ts / 60) % 60
  319. h = int(ts / 3600) % 24
  320. txt = '%.2f seconds' % s
  321. if m:
  322. txt = '%d minutes %s' % (m, txt)
  323. if h:
  324. txt = '%d hours %s' % (h, txt)
  325. if td.days:
  326. txt = '%d days %s' % (td.days, txt)
  327. return txt
  328. def format_file_mode(mod):
  329. """Format file mode bits for list output
  330. """
  331. def x(v):
  332. return ''.join(v & m and s or '-'
  333. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  334. return '%s%s%s' % (x(mod // 64), x(mod // 8), x(mod))
  335. def format_file_size(v):
  336. """Format file size into a human friendly format
  337. """
  338. if abs(v) > 10**12:
  339. return '%.2f TB' % (v / 10**12)
  340. elif abs(v) > 10**9:
  341. return '%.2f GB' % (v / 10**9)
  342. elif abs(v) > 10**6:
  343. return '%.2f MB' % (v / 10**6)
  344. elif abs(v) > 10**3:
  345. return '%.2f kB' % (v / 10**3)
  346. else:
  347. return '%d B' % v
  348. def format_archive(archive):
  349. return '%-36s %s' % (archive.name, to_localtime(archive.ts).strftime('%c'))
  350. class IntegrityError(Error):
  351. """Data integrity error"""
  352. def memoize(function):
  353. cache = {}
  354. def decorated_function(*args):
  355. try:
  356. return cache[args]
  357. except KeyError:
  358. val = function(*args)
  359. cache[args] = val
  360. return val
  361. return decorated_function
  362. @memoize
  363. def uid2user(uid, default=None):
  364. try:
  365. return pwd.getpwuid(uid).pw_name
  366. except KeyError:
  367. return default
  368. @memoize
  369. def user2uid(user, default=None):
  370. try:
  371. return user and pwd.getpwnam(user).pw_uid
  372. except KeyError:
  373. return default
  374. @memoize
  375. def gid2group(gid, default=None):
  376. try:
  377. return grp.getgrgid(gid).gr_name
  378. except KeyError:
  379. return default
  380. @memoize
  381. def group2gid(group, default=None):
  382. try:
  383. return group and grp.getgrnam(group).gr_gid
  384. except KeyError:
  385. return default
  386. def posix_acl_use_stored_uid_gid(acl):
  387. """Replace the user/group field with the stored uid/gid
  388. """
  389. entries = []
  390. for entry in acl.decode('ascii').split('\n'):
  391. if entry:
  392. fields = entry.split(':')
  393. if len(fields) == 4:
  394. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  395. else:
  396. entries.append(entry)
  397. return ('\n'.join(entries)).encode('ascii')
  398. class Location:
  399. """Object representing a repository / archive location
  400. """
  401. proto = user = host = port = path = archive = None
  402. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  403. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  404. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  405. file_re = re.compile(r'(?P<proto>file)://'
  406. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  407. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  408. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  409. # get the repo from BORG_RE env and the optional archive from param.
  410. # if the syntax requires giving REPOSITORY (see "borg mount"),
  411. # use "::" to let it use the env var.
  412. # if REPOSITORY argument is optional, it'll automatically use the env.
  413. env_re = re.compile(r'(?:::(?P<archive>.+)?)?$')
  414. def __init__(self, text=''):
  415. self.orig = text
  416. if not self.parse(self.orig):
  417. raise ValueError
  418. def parse(self, text):
  419. valid = self._parse(text)
  420. if valid:
  421. return True
  422. m = self.env_re.match(text)
  423. if not m:
  424. return False
  425. repo = os.environ.get('BORG_REPO')
  426. if repo is None:
  427. return False
  428. valid = self._parse(repo)
  429. if not valid:
  430. return False
  431. self.archive = m.group('archive')
  432. return True
  433. def _parse(self, text):
  434. m = self.ssh_re.match(text)
  435. if m:
  436. self.proto = m.group('proto')
  437. self.user = m.group('user')
  438. self.host = m.group('host')
  439. self.port = m.group('port') and int(m.group('port')) or None
  440. self.path = m.group('path')
  441. self.archive = m.group('archive')
  442. return True
  443. m = self.file_re.match(text)
  444. if m:
  445. self.proto = m.group('proto')
  446. self.path = m.group('path')
  447. self.archive = m.group('archive')
  448. return True
  449. m = self.scp_re.match(text)
  450. if m:
  451. self.user = m.group('user')
  452. self.host = m.group('host')
  453. self.path = m.group('path')
  454. self.archive = m.group('archive')
  455. self.proto = self.host and 'ssh' or 'file'
  456. return True
  457. return False
  458. def __str__(self):
  459. items = []
  460. items.append('proto=%r' % self.proto)
  461. items.append('user=%r' % self.user)
  462. items.append('host=%r' % self.host)
  463. items.append('port=%r' % self.port)
  464. items.append('path=%r' % self.path)
  465. items.append('archive=%r' % self.archive)
  466. return ', '.join(items)
  467. def to_key_filename(self):
  468. name = re.sub('[^\w]', '_', self.path).strip('_')
  469. if self.proto != 'file':
  470. name = self.host + '__' + name
  471. return os.path.join(get_keys_dir(), name)
  472. def __repr__(self):
  473. return "Location(%s)" % self
  474. def canonical_path(self):
  475. if self.proto == 'file':
  476. return self.path
  477. else:
  478. if self.path and self.path.startswith('~'):
  479. path = '/' + self.path
  480. elif self.path and not self.path.startswith('/'):
  481. path = '/~/' + self.path
  482. else:
  483. path = self.path
  484. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  485. self.host,
  486. ':{}'.format(self.port) if self.port else '',
  487. path)
  488. def location_validator(archive=None):
  489. def validator(text):
  490. try:
  491. loc = Location(text)
  492. except ValueError:
  493. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  494. if archive is True and not loc.archive:
  495. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  496. elif archive is False and loc.archive:
  497. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  498. return loc
  499. return validator
  500. def read_msgpack(filename):
  501. with open(filename, 'rb') as fd:
  502. return msgpack.unpack(fd)
  503. def write_msgpack(filename, d):
  504. with open(filename + '.tmp', 'wb') as fd:
  505. msgpack.pack(d, fd)
  506. fd.flush()
  507. os.fsync(fd.fileno())
  508. os.rename(filename + '.tmp', filename)
  509. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  510. for key in keys:
  511. if isinstance(d.get(key), bytes):
  512. d[key] = d[key].decode(encoding, errors)
  513. return d
  514. def remove_surrogates(s, errors='replace'):
  515. """Replace surrogates generated by fsdecode with '?'
  516. """
  517. return s.encode('utf-8', errors).decode('utf-8')
  518. _safe_re = re.compile(r'^((\.\.)?/+)+')
  519. def make_path_safe(path):
  520. """Make path safe by making it relative and local
  521. """
  522. return _safe_re.sub('', path) or '.'
  523. def daemonize():
  524. """Detach process from controlling terminal and run in background
  525. """
  526. pid = os.fork()
  527. if pid:
  528. os._exit(0)
  529. os.setsid()
  530. pid = os.fork()
  531. if pid:
  532. os._exit(0)
  533. os.chdir('/')
  534. os.close(0)
  535. os.close(1)
  536. os.close(2)
  537. fd = os.open('/dev/null', os.O_RDWR)
  538. os.dup2(fd, 0)
  539. os.dup2(fd, 1)
  540. os.dup2(fd, 2)
  541. class StableDict(dict):
  542. """A dict subclass with stable items() ordering"""
  543. def items(self):
  544. return sorted(super().items())
  545. if sys.version < '3.3':
  546. # st_mtime_ns attribute only available in 3.3+
  547. def st_mtime_ns(st):
  548. return int(st.st_mtime * 1e9)
  549. # unhexlify in < 3.3 incorrectly only accepts bytes input
  550. def unhexlify(data):
  551. if isinstance(data, str):
  552. data = data.encode('ascii')
  553. return binascii.unhexlify(data)
  554. else:
  555. def st_mtime_ns(st):
  556. return st.st_mtime_ns
  557. unhexlify = binascii.unhexlify
  558. def bigint_to_int(mtime):
  559. """Convert bytearray to int
  560. """
  561. if isinstance(mtime, bytes):
  562. return int.from_bytes(mtime, 'little', signed=True)
  563. return mtime
  564. def int_to_bigint(value):
  565. """Convert integers larger than 64 bits to bytearray
  566. Smaller integers are left alone
  567. """
  568. if value.bit_length() > 63:
  569. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  570. return value