[ARVADOS] updated: 479bb4c903b8780f017b0b63dfa59354353255ac

git at public.curoverse.com git at public.curoverse.com
Wed Oct 21 16:57:18 EDT 2015


Summary of changes:
 apps/workbench/config/application.default.yml |  4 +-
 sdk/python/arvados/__init__.py                | 28 ++++-----
 sdk/python/tests/arvados_testutil.py          | 24 +-------
 sdk/python/tests/test_init.py                 | 83 ---------------------------
 4 files changed, 17 insertions(+), 122 deletions(-)
 delete mode 100644 sdk/python/tests/test_init.py

       via  479bb4c903b8780f017b0b63dfa59354353255ac (commit)
      from  dbd30cba1ac27fa5a19096b1572860c8cc2fd867 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.


commit 479bb4c903b8780f017b0b63dfa59354353255ac
Author: Bryan Cosca <bcosc at curoverse.com>
Date:   Wed Oct 21 16:57:11 2015 -0400

    6600: flailing 4.57 10.15

diff --git a/apps/workbench/config/application.default.yml b/apps/workbench/config/application.default.yml
index 744c0c3..c458a50 100644
--- a/apps/workbench/config/application.default.yml
+++ b/apps/workbench/config/application.default.yml
@@ -11,7 +11,7 @@
 #   template_uuid: is the uuid of the template to be executed
 #   input_paths: an array of inputs for the pipeline. Use either a collection's "uuid"
 #     or a file's "uuid/file_name" path in this array. If the pipeline does not require
-#     any inputs, this can be omitted. 
+#     any inputs, this can be omitted.
 #   max_wait_seconds: max time in seconds to wait for the pipeline run to complete.
 #     Default value of 30 seconds is used when this value is not provided.
 diagnostics:
@@ -151,7 +151,7 @@ common:
 
   # Below is a sample setting of user_profile_form_fields config parameter.
   # This configuration parameter should be set to either false (to disable) or
-  # to an array as shown below. 
+  # to an array as shown below.
   # Configure the list of input fields to be displayed in the profile page
   # using the attribute "key" for each of the input fields.
   # This sample shows configuration with one required and one optional form fields.
diff --git a/sdk/python/arvados/__init__.py b/sdk/python/arvados/__init__.py
index bf1533a..b487b77 100644
--- a/sdk/python/arvados/__init__.py
+++ b/sdk/python/arvados/__init__.py
@@ -39,11 +39,11 @@ logger.setLevel(logging.DEBUG if config.get('ARVADOS_DEBUG')
                 else logging.WARNING)
 
 def task_set_output(self,s,api_client=None,num_retries=5):
-    
+
     if not api_client:
         api_client = api('v1')
 
-    for tries_left in RetryLoop(num_retries=num_retries, backoff_start=2): # change this to 2 after tests are finished
+    for tries_left in RetryLoop(num_retries=num_retries, backoff_start=0):
         try:
             api_client.job_tasks().update(uuid=self['uuid'],
                                                  body={
@@ -52,7 +52,7 @@ def task_set_output(self,s,api_client=None,num_retries=5):
                                                       'progress':1.0
                                                      }).execute()
         except errors.ApiError as error:
-            if retry.check_http_response_success(error.resp.status) is None and tries_left > 0: #status_code for HttpError
+            if retry.check_http_response_success(error.resp.status) is None and tries_left > 0:
                 logger.debug("task_set_output: job_tasks().update() raised {}, retrying with {} tries left".format(repr(error),tries_left))
             else:
                 raise
@@ -69,12 +69,12 @@ def current_task(api_client=None, num_retries=5):
 
     for tries_left in RetryLoop(num_retries=num_retries, backoff_start=2):
         try:
-            result = api_client.job_tasks().get(uuid=os.environ['TASK_UUID']).execute()
-            result = UserDict.UserDict(result)
-            result.set_output = types.MethodType(task_set_output, result)
-            result.tmpdir = os.environ['TASK_WORK']
-            _current_task = result
-            return result
+            task = api_client.job_tasks().get(uuid=os.environ['TASK_UUID']).execute()
+            task = UserDict.UserDict(task)
+            task.set_output = types.MethodType(task_set_output, task)
+            task.tmpdir = os.environ['TASK_WORK']
+            _current_task = task
+            return task
         except errors.ApiError as error:
             if retry.check_http_response_success(error.resp.status) is None and tries_left > 0:
                 logger.debug("current_task: job_tasks().get() raised {}, retrying with {} tries left".format(repr(error),tries_left))
@@ -92,11 +92,11 @@ def current_job(api_client=None, num_retries=5):
 
     for tries_left in RetryLoop(num_retries=num_retries, backoff_start=2):
         try:
-            result = api_client.jobs().get(uuid=os.environ['JOB_UUID']).execute()
-            result = UserDict.UserDict(result)
-            result.tmpdir = os.environ['JOB_WORK']
-            _current_job = result
-            return result
+            job = api_client.jobs().get(uuid=os.environ['JOB_UUID']).execute()
+            job = UserDict.UserDict(job)
+            job.tmpdir = os.environ['JOB_WORK']
+            _current_job = job
+            return job
         except errors.ApiError as error:
             if retry.check_http_response_success(error.resp.status) is None and tries_left > 0:
                 logger.debug("current_job: jobs().get() raised {}, retrying with {} tries left".format(repr(error),tries_left))
diff --git a/sdk/python/tests/arvados_testutil.py b/sdk/python/tests/arvados_testutil.py
index be0d0c0..6e2a078 100644
--- a/sdk/python/tests/arvados_testutil.py
+++ b/sdk/python/tests/arvados_testutil.py
@@ -43,6 +43,7 @@ def mock_responses(body, *codes, **headers):
     return mock.patch('httplib2.Http.request', side_effect=queue_with((
         (fake_httplib2_response(code, **headers), body) for code in codes)))
 
+
 class FakeCurl:
     @classmethod
     def make(cls, code, body='', headers={}):
@@ -116,29 +117,6 @@ def mock_keep_responses(body, *codes, **headers):
     cm.responses = responses
     return mock.patch('pycurl.Curl', cm)
 
-def mock_api_responses(cm=None, body=None, *codes, **headers):
-
-    print cm
-    if cm is None:
-        cm = mock.MagicMock()
-
-    if isinstance(body, tuple):
-        codes = list(codes)
-        codes.insert(0, body)
-        responses = [
-            FakeCurl.make(code=code, body=b, headers=headers)
-            for b, code in codes
-        ]
-    else:
-        responses = [
-            FakeCurl.make(code=code, body=body, headers=headers)
-            for code in codes
-        ]
-    cm.side_effect = queue_with(responses)
-    cm.responses = responses
-    return cm
-
-
 
 class MockStreamReader(object):
     def __init__(self, name='.', *data):
diff --git a/sdk/python/tests/test_init.py b/sdk/python/tests/test_init.py
deleted file mode 100644
index 355a21c..0000000
--- a/sdk/python/tests/test_init.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python
-
-import mock
-import os
-import unittest
-import hashlib
-import run_test_server
-import json
-import arvados
-import arvados_testutil as tutil
-from apiclient import http as apiclient_http
-
- at tutil.skip_sleep
-class ApiClientRetryTestMixin(object):
-
-    TEST_UUID = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
-    TEST_LOCATOR = 'd41d8cd98f00b204e9800998ecf8427e+0'
-
-    @classmethod
-    def setUpClass(cls):
-        run_test_server.run()
-        cls.api = arvados.api('v1')
-
-    def tearDown(cls):
-        run_test_server.reset()
-
-    def run_method(self, *args, **kwargs):
-        raise NotImplementedError("test subclasses must define run_method")        
-
-    def check_success(self, expected=None, *args, **kwargs):
-        try:
-            self.run_method
-        except: # This seems really iffy, because random exceptions can occur.
-            self.assertTrue(False)
-
-    def check_exception(self, error_class=None, *args, **kwargs):
-        if error_class is None:
-            error_class = self.DEFAULT_EXCEPTION
-        self.assertRaises(error_class, self.run_method)
-
-    def test_immediate_success(self):
-        with tutil.mock_responses('', 200):
-            self.check_success()
-
-    def test_immediate_failure(self):
-        with tutil.mock_responses('', 400):
-            self.check_exception()
-
-    def test_retry_then_success(self):
-        with tutil.mock_responses('', 500, 200):
-            self.check_success()
-
-    def test_error_after_default_retries_exhausted(self):
-        with tutil.mock_responses('', 500, 500, 500, 500, 500, 500):
-            self.check_exception()
-
-    def test_no_retry_after_immediate_success(self):
-        with tutil.mock_responses('', 200, 400):
-            self.check_success()
-    
-class TaskSetOutputTestCase(ApiClientRetryTestMixin, unittest.TestCase):
-    DEFAULT_EXCEPTION = arvados.errors.ApiError
-
-    def run_method(self, locator=ApiClientRetryTestMixin.TEST_LOCATOR, *args, **kwargs):
-        arvados.task_set_output({'uuid':self.TEST_UUID},s=locator)
-
-class CurrentJobTestCase(ApiClientRetryTestMixin, unittest.TestCase):
-    DEFAULT_EXCEPTION = arvados.errors.ApiError
-
-    os.environ['JOB_UUID'] = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
-    os.environ['JOB_WORK'] = '.'
-
-    def run_method(self, *args, **kwargs):
-        arvados.current_job()
-
-class CurrentTaskTestCase(ApiClientRetryTestMixin, unittest.TestCase):
-    DEFAULT_EXCEPTION = arvados.errors.ApiError
-
-    os.environ['TASK_UUID'] = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
-    os.environ['TASK_WORK'] = '.'
-
-    def run_method(self, *args, **kwargs):
-        arvados.current_task()

-----------------------------------------------------------------------


hooks/post-receive
-- 




More information about the arvados-commits mailing list