[ARVADOS] updated: 083dcfe0946067f32681b4ffc497a7e9a5fdff79

Git user git at public.curoverse.com
Tue Oct 11 19:14:33 EDT 2016


Summary of changes:
 sdk/cli/test/test_arv-keep-get.rb  |  4 ++--
 sdk/python/arvados/commands/put.py | 37 +++++++++++++++++++++++--------------
 2 files changed, 25 insertions(+), 16 deletions(-)

       via  083dcfe0946067f32681b4ffc497a7e9a5fdff79 (commit)
       via  5af68e1098ebd0ab749387667c18585a18b71f04 (commit)
      from  e892c7ee96f28bef7d5b2a9314eb9549ee56634d (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.


commit 083dcfe0946067f32681b4ffc497a7e9a5fdff79
Author: Lucas Di Pentima <lucas at curoverse.com>
Date:   Tue Oct 11 20:13:06 2016 -0300

    9701: Replaced deprecated use of File.exists?() with File.exist()

diff --git a/sdk/cli/test/test_arv-keep-get.rb b/sdk/cli/test/test_arv-keep-get.rb
index 0e578b8..d0224ae 100644
--- a/sdk/cli/test/test_arv-keep-get.rb
+++ b/sdk/cli/test/test_arv-keep-get.rb
@@ -180,7 +180,7 @@ class TestArvKeepGet < Minitest::Test
     end
     assert_equal "#{Digest::MD5.hexdigest('foo')}  ./foo\n", err
     assert_equal '', out
-    assert_equal false, File.exists?('tmp/foo')
+    assert_equal false, File.exist?('tmp/foo')
   end
 
   def test_sha1_nowrite
@@ -190,7 +190,7 @@ class TestArvKeepGet < Minitest::Test
     end
     assert_equal "#{Digest::SHA1.hexdigest('foo')}  ./foo\n", err
     assert_equal '', out
-    assert_equal false, File.exists?('tmp/foo')
+    assert_equal false, File.exist?('tmp/foo')
   end
 
   def test_block_to_file

commit 5af68e1098ebd0ab749387667c18585a18b71f04
Author: Lucas Di Pentima <lucas at curoverse.com>
Date:   Tue Oct 11 20:06:37 2016 -0300

    9701: Fixed arv-put to pass a couple of pending sdk/cli tests. refs #9463

diff --git a/sdk/python/arvados/commands/put.py b/sdk/python/arvados/commands/put.py
index 1a27410..89753a2 100644
--- a/sdk/python/arvados/commands/put.py
+++ b/sdk/python/arvados/commands/put.py
@@ -475,10 +475,14 @@ class ArvPutUploadJob(object):
             output.close()
 
     def _write(self, source_fd, output):
+        first_read = True
         while True:
             data = source_fd.read(arvados.config.KEEP_BLOCK_SIZE)
-            if not data:
+            # Allow an empty file to be written
+            if not data and not first_read:
                 break
+            if first_read:
+                first_read = False
             output.write(data)
 
     def _my_collection(self):
@@ -591,11 +595,15 @@ class ArvPutUploadJob(object):
         through subcollections
         """
         if isinstance(item, arvados.arvfile.ArvadosFile):
-            locators = []
-            for segment in item.segments():
-                loc = segment.locator
-                locators.append(loc)
-            return locators
+            if item.size() == 0:
+                # Empty file locator
+                return ["d41d8cd98f00b204e9800998ecf8427e+0"]
+            else:
+                locators = []
+                for segment in item.segments():
+                    loc = segment.locator
+                    locators.append(loc)
+                return locators
         elif isinstance(item, arvados.collection.Collection):
             l = [self._datablocks_on_item(x) for x in item.values()]
             # Fast list flattener method taken from:
@@ -701,14 +709,15 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr):
     bytes_expected = expected_bytes_for(args.paths)
     try:
         writer = ArvPutUploadJob(paths = args.paths,
-                                resume = args.resume,
-                                reporter = reporter,
-                                bytes_expected = bytes_expected,
-                                num_retries = args.retries,
-                                replication_desired = args.replication,
-                                name = collection_name,
-                                owner_uuid = project_uuid,
-                                ensure_unique_name = True)
+                                 resume = args.resume,
+                                 filename = args.filename,
+                                 reporter = reporter,
+                                 bytes_expected = bytes_expected,
+                                 num_retries = args.retries,
+                                 replication_desired = args.replication,
+                                 name = collection_name,
+                                 owner_uuid = project_uuid,
+                                 ensure_unique_name = True)
     except ResumeCacheConflict:
         print >>stderr, "\n".join([
             "arv-put: Another process is already uploading this data.",

-----------------------------------------------------------------------


hooks/post-receive
-- 




More information about the arvados-commits mailing list