[ARVADOS] updated: c84af5984c87bbcbf345da68243ba01da5e483dc

git at public.curoverse.com git at public.curoverse.com
Mon Feb 17 10:56:07 EST 2014


Summary of changes:
 sdk/python/arvados/keep.py |    2 +-
 sdk/python/arvados/util.py |   32 +++++++++++++++++---------------
 2 files changed, 18 insertions(+), 16 deletions(-)

       via  c84af5984c87bbcbf345da68243ba01da5e483dc (commit)
      from  25a646aef93c601334e9d1f676d7ba04e5868b0b (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.


commit c84af5984c87bbcbf345da68243ba01da5e483dc
Author: Tom Clegg <tom at curoverse.com>
Date:   Mon Feb 17 07:55:24 2014 -0800

    Fix namespace bugs.

diff --git a/sdk/python/arvados/keep.py b/sdk/python/arvados/keep.py
index e1902d1..e8be158 100644
--- a/sdk/python/arvados/keep.py
+++ b/sdk/python/arvados/keep.py
@@ -142,7 +142,7 @@ class KeepClient(object):
     def shuffled_service_roots(self, hash):
         if self.service_roots == None:
             self.lock.acquire()
-            keep_disks = api().keep_disks().list().execute()['items']
+            keep_disks = arvados.api().keep_disks().list().execute()['items']
             roots = (("http%s://%s:%d/" %
                       ('s' if f['service_ssl_flag'] else '',
                        f['service_host'],
diff --git a/sdk/python/arvados/util.py b/sdk/python/arvados/util.py
index 552fbbe..667b44c 100644
--- a/sdk/python/arvados/util.py
+++ b/sdk/python/arvados/util.py
@@ -4,6 +4,8 @@ import os
 import re
 import subprocess
 import errno
+import sys
+import arvados.collection
 
 def clear_tmpdir(path=None):
     """
@@ -11,7 +13,7 @@ def clear_tmpdir(path=None):
     exists and is empty.
     """
     if path == None:
-        path = current_task().tmpdir
+        path = arvados.current_task().tmpdir
     if os.path.exists(path):
         p = subprocess.Popen(['rm', '-rf', path])
         stdout, stderr = p.communicate(None)
@@ -35,12 +37,12 @@ def run_command(execargs, **kwargs):
 
 def git_checkout(url, version, path):
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     if not os.path.exists(path):
-        util.run_command(["git", "clone", url, path],
-                         cwd=os.path.dirname(path))
-    util.run_command(["git", "checkout", version],
-                     cwd=path)
+        run_command(["git", "clone", url, path],
+                    cwd=os.path.dirname(path))
+    run_command(["git", "checkout", version],
+                cwd=path)
     return path
 
 def tar_extractor(path, decompress_flag):
@@ -63,7 +65,7 @@ def tarball_extract(tarball, path):
     path -- where to extract the tarball: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -87,11 +89,11 @@ def tarball_extract(tarball, path):
 
         for f in CollectionReader(tarball).all_files():
             if re.search('\.(tbz|tar.bz2)$', f.name()):
-                p = util.tar_extractor(path, 'j')
+                p = tar_extractor(path, 'j')
             elif re.search('\.(tgz|tar.gz)$', f.name()):
-                p = util.tar_extractor(path, 'z')
+                p = tar_extractor(path, 'z')
             elif re.search('\.tar$', f.name()):
-                p = util.tar_extractor(path, '')
+                p = tar_extractor(path, '')
             else:
                 raise errors.AssertionError(
                     "tarball_extract cannot handle filename %s" % f.name())
@@ -124,7 +126,7 @@ def zipball_extract(zipball, path):
     path -- where to extract the archive: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -193,7 +195,7 @@ def collection_extract(collection, path, files=[], decompress=True):
     else:
         collection_hash = hashlib.md5(collection).hexdigest()
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -263,7 +265,7 @@ def stream_extract(stream, path, files=[], decompress=True):
     path -- where to extract: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -281,7 +283,7 @@ def stream_extract(stream, path, files=[], decompress=True):
             files_got += [outname]
             if os.path.exists(os.path.join(path, outname)):
                 os.unlink(os.path.join(path, outname))
-            util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+            mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
             outfile = open(os.path.join(path, outname), 'wb')
             for buf in (f.readall_decompressed() if decompress
                         else f.readall()):
@@ -300,7 +302,7 @@ def listdir_recursive(dirname, base=None):
         ent_path = os.path.join(dirname, ent)
         ent_base = os.path.join(base, ent) if base else ent
         if os.path.isdir(ent_path):
-            allfiles += util.listdir_recursive(ent_path, ent_base)
+            allfiles += listdir_recursive(ent_path, ent_base)
         else:
             allfiles += [ent_base]
     return allfiles

-----------------------------------------------------------------------


hooks/post-receive
-- 




More information about the arvados-commits mailing list