Sfoglia il codice sorgente

PR #208 - Merge branch 'stdin_stdout_support' of https://github.com/thomaswaldmann/attic into merge

Thomas Waldmann 10 anni fa
parent
commit
29449e40bc
2 ha cambiato i file con 38 aggiunte e 6 eliminazioni
  1. 24 4
      attic/archive.py
  2. 14 2
      attic/archiver.py

+ 24 - 4
attic/archive.py

@@ -230,11 +230,14 @@ class Archive:
         cache.rollback()
         cache.rollback()
         return stats
         return stats
 
 
-    def extract_item(self, item, restore_attrs=True, dry_run=False):
-        if dry_run:
+    def extract_item(self, item, restore_attrs=True, dry_run=False, stdout=False):
+        if dry_run or stdout:
             if b'chunks' in item:
             if b'chunks' in item:
-                for _ in self.pipeline.fetch_many([c[0] for c in item[b'chunks']], is_preloaded=True):
-                    pass
+                for data in self.pipeline.fetch_many([c[0] for c in item[b'chunks']], is_preloaded=True):
+                    if stdout:
+                        sys.stdout.buffer.write(data)
+                if stdout:
+                    sys.stdout.buffer.flush()
             return
             return
 
 
         dest = self.cwd
         dest = self.cwd
@@ -381,6 +384,23 @@ class Archive:
         item.update(self.stat_attrs(st, path))
         item.update(self.stat_attrs(st, path))
         self.add_item(item)
         self.add_item(item)
 
 
+    def process_stdin(self, path, cache):
+        uid, gid = 0, 0
+        fd = sys.stdin.buffer  # binary
+        chunks = []
+        for chunk in self.chunker.chunkify(fd):
+            chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
+        self.stats.nfiles += 1
+        item = {
+            b'path': path,
+            b'chunks': chunks,
+            b'mode': 0o100660,  # regular file, ug=rw
+            b'uid': uid, b'user': uid2user(uid),
+            b'gid': gid, b'group': gid2group(gid),
+            b'mtime': int_to_bigint(int(time.time()) * 1000000000)
+        }
+        self.add_item(item)
+
     def process_file(self, path, st, cache):
     def process_file(self, path, st, cache):
         safe_path = make_path_safe(path)
         safe_path = make_path_safe(path)
         # Is it a hard link?
         # Is it a hard link?

+ 14 - 2
attic/archiver.py

@@ -116,6 +116,14 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
             except IOError:
             except IOError:
                 pass
                 pass
         for path in args.paths:
         for path in args.paths:
+            if path == '-':  # stdin
+                path = 'stdin'
+                self.print_verbose(path)
+                try:
+                    archive.process_stdin(path, cache)
+                except IOError as e:
+                    self.print_error('%s: %s', path, e)
+                continue
             path = os.path.normpath(path)
             path = os.path.normpath(path)
             if args.dontcross:
             if args.dontcross:
                 try:
                 try:
@@ -195,6 +203,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
                           numeric_owner=args.numeric_owner)
                           numeric_owner=args.numeric_owner)
         patterns = adjust_patterns(args.paths, args.excludes)
         patterns = adjust_patterns(args.paths, args.excludes)
         dry_run = args.dry_run
         dry_run = args.dry_run
+        stdout = args.stdout
         strip_components = args.strip_components
         strip_components = args.strip_components
         dirs = []
         dirs = []
         for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True):
         for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True):
@@ -205,7 +214,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
                     continue
                     continue
             if not args.dry_run:
             if not args.dry_run:
                 while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
                 while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
-                    archive.extract_item(dirs.pop(-1))
+                    archive.extract_item(dirs.pop(-1), stdout=stdout)
             self.print_verbose(remove_surrogates(orig_path))
             self.print_verbose(remove_surrogates(orig_path))
             try:
             try:
                 if dry_run:
                 if dry_run:
@@ -215,7 +224,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
                         dirs.append(item)
                         dirs.append(item)
                         archive.extract_item(item, restore_attrs=False)
                         archive.extract_item(item, restore_attrs=False)
                     else:
                     else:
-                        archive.extract_item(item)
+                        archive.extract_item(item, stdout=stdout)
             except IOError as e:
             except IOError as e:
                 self.print_error('%s: %s', remove_surrogates(orig_path), e)
                 self.print_error('%s: %s', remove_surrogates(orig_path), e)
 
 
@@ -585,6 +594,9 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         subparser.add_argument('--strip-components', dest='strip_components',
         subparser.add_argument('--strip-components', dest='strip_components',
                                type=int, default=0, metavar='NUMBER',
                                type=int, default=0, metavar='NUMBER',
                                help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
                                help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
+        subparser.add_argument('--stdout', dest='stdout',
+                               action='store_true', default=False,
+                               help='write all extracted data to stdout')
         subparser.add_argument('archive', metavar='ARCHIVE',
         subparser.add_argument('archive', metavar='ARCHIVE',
                                type=location_validator(archive=True),
                                type=location_validator(archive=True),
                                help='archive to extract')
                                help='archive to extract')