[ARVADOS] updated: 1.3.0-1463-g9f6bfd78b

Git user git at public.curoverse.com
Thu Aug 8 18:06:54 UTC 2019


Summary of changes:
 .../pipeline_instances_controller_test.rb          |  71 -----
 .../test/helpers/repository_stub_helper.rb         |   1 -
 apps/workbench/test/integration/jobs_test.rb       |  77 -----
 .../test/integration/pipeline_instances_test.rb    | 351 ---------------------
 apps/workbench/test/integration/projects_test.rb   |   1 -
 .../test/integration/repositories_browse_test.rb   |  23 --
 apps/workbench/test/integration/websockets_test.rb |  61 ----
 apps/workbench/test/integration/work_units_test.rb |  30 --
 lib/config/config.default.yml                      |  30 +-
 sdk/cli/arvados-cli.gemspec                        |   6 +-
 sdk/go/arvados/config.go                           |   9 +-
 .../api/app/helpers/commit_ancestors_helper.rb     |   6 -
 services/api/app/models/commit_ancestor.rb         |  44 ---
 services/api/app/models/job.rb                     | 143 ---------
 services/api/config/arvados_config.rb              |   5 -
 ....rb => 20190808145904_drop_commit_ancestors.rb} |   4 +-
 services/api/db/structure.sql                      |  59 +---
 services/api/lib/enable_jobs_api.rb                |  12 +
 services/api/test/unit/job_test.rb                 |  11 -
 19 files changed, 24 insertions(+), 920 deletions(-)
 delete mode 100644 services/api/app/helpers/commit_ancestors_helper.rb
 delete mode 100644 services/api/app/models/commit_ancestor.rb
 copy services/api/db/migrate/{20130315183626_add_log_to_jobs.rb => 20190808145904_drop_commit_ancestors.rb} (57%)

       via  9f6bfd78b88486d4b3384b4be05c1f5e4f546a9c (commit)
      from  30e065c34db0ab9a0e824a77b1ac0a46412598e0 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.


commit 9f6bfd78b88486d4b3384b4be05c1f5e4f546a9c
Author: Peter Amstutz <pamstutz at veritasgenetics.com>
Date:   Thu Aug 8 14:06:17 2019 -0400

    15133: Move removing jobs API code+tests
    
    Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <pamstutz at veritasgenetics.com>

diff --git a/apps/workbench/test/controllers/pipeline_instances_controller_test.rb b/apps/workbench/test/controllers/pipeline_instances_controller_test.rb
index c76244d11..1ff608ac5 100644
--- a/apps/workbench/test/controllers/pipeline_instances_controller_test.rb
+++ b/apps/workbench/test/controllers/pipeline_instances_controller_test.rb
@@ -28,13 +28,6 @@ class PipelineInstancesControllerTest < ActionController::TestCase
     PipelineInstance.where(uuid: pi_uuid).first.destroy
   end
 
-  test "pipeline instance components populated after create" do
-    create_instance_long_enough_to do |new_instance_uuid, template_fixture|
-      assert_equal(template_fixture['components'].to_json,
-                   assigns(:object).components.to_json)
-    end
-  end
-
   test "can render pipeline instance with tagged collections" do
     # Make sure to pass in a tagged collection to test that part of the rendering behavior.
     get(:show,
@@ -101,70 +94,6 @@ class PipelineInstancesControllerTest < ActionController::TestCase
   # when the template has components that do not exist in the
   # instance (ticket #4000).
 
-  test "copy pipeline instance with components=use_latest" do
-    post(:copy,
-         params: {
-           id: api_fixture('pipeline_instances')['pipeline_with_newer_template']['uuid'],
-           components: 'use_latest',
-           script: 'use_latest',
-           pipeline_instance: {
-             state: 'RunningOnServer'
-           }
-         },
-         session: session_for(:active))
-    assert_response 302
-    assert_not_nil assigns(:object)
-
-    # Component 'foo' has script parameters only in the pipeline instance.
-    # Component 'bar' is present only in the pipeline_template.
-    # Test that the copied pipeline instance includes parameters for
-    # component 'foo' from the source instance, and parameters for
-    # component 'bar' from the source template.
-    #
-    assert_not_nil assigns(:object).components[:foo]
-    foo = assigns(:object).components[:foo]
-    assert_not_nil foo[:script_parameters]
-    assert_not_nil foo[:script_parameters][:input]
-    assert_equal 'foo instance input', foo[:script_parameters][:input][:title]
-
-    assert_not_nil assigns(:object).components[:bar]
-    bar = assigns(:object).components[:bar]
-    assert_not_nil bar[:script_parameters]
-    assert_not_nil bar[:script_parameters][:input]
-    assert_equal 'bar template input', bar[:script_parameters][:input][:title]
-  end
-
-  test "copy pipeline instance on newer template works with script=use_same" do
-    post(:copy,
-         params: {
-           id: api_fixture('pipeline_instances')['pipeline_with_newer_template']['uuid'],
-           components: 'use_latest',
-           script: 'use_same',
-           pipeline_instance: {
-             state: 'RunningOnServer'
-           }
-         },
-         session: session_for(:active))
-    assert_response 302
-    assert_not_nil assigns(:object)
-
-    # Test that relevant component parameters were copied from both
-    # the source instance and source template, respectively (see
-    # previous test)
-    #
-    assert_not_nil assigns(:object).components[:foo]
-    foo = assigns(:object).components[:foo]
-    assert_not_nil foo[:script_parameters]
-    assert_not_nil foo[:script_parameters][:input]
-    assert_equal 'foo instance input', foo[:script_parameters][:input][:title]
-
-    assert_not_nil assigns(:object).components[:bar]
-    bar = assigns(:object).components[:bar]
-    assert_not_nil bar[:script_parameters]
-    assert_not_nil bar[:script_parameters][:input]
-    assert_equal 'bar template input', bar[:script_parameters][:input][:title]
-  end
-
   test "generate graph" do
 
     use_token 'admin'
diff --git a/apps/workbench/test/helpers/repository_stub_helper.rb b/apps/workbench/test/helpers/repository_stub_helper.rb
index 419de8c5e..a8e3653a5 100644
--- a/apps/workbench/test/helpers/repository_stub_helper.rb
+++ b/apps/workbench/test/helpers/repository_stub_helper.rb
@@ -26,7 +26,6 @@ module RepositoryStubHelper
       100644 blob 9bef02bbfda670595750fd99a4461005ce5b8f12     695    apps/workbench/.gitignore
       100644 blob b51f674d90f68bfb50d9304068f915e42b04aea4    2249    apps/workbench/Gemfile
       100644 blob b51f674d90f68bfb50d9304068f915e42b04aea4    2249    apps/workbench/Gemfile
-      100755 blob cdd5ebaff27781f93ab85e484410c0ce9e97770f    1012    crunch_scripts/hash
     EOS
     Repository.any_instance.
       stubs(:cat_file).with(fakesha1, fakefilename).returns fakefile
diff --git a/apps/workbench/test/integration/jobs_test.rb b/apps/workbench/test/integration/jobs_test.rb
index a66dfd803..7b4f2cef1 100644
--- a/apps/workbench/test/integration/jobs_test.rb
+++ b/apps/workbench/test/integration/jobs_test.rb
@@ -37,83 +37,6 @@ class JobsTest < ActionDispatch::IntegrationTest
     assert_selector 'a[href="/"]', text: 'Go to dashboard'
   end
 
-  test 'view partial job log' do
-    need_selenium 'to be able to see the CORS response headers (PhantomJS 1.9.8 does not)'
-
-    # This config will be restored during teardown by ../test_helper.rb:
-    Rails.configuration.Workbench.LogViewerMaxBytes = 100
-
-    logdata = fakepipe_with_log_data.read
-    job_uuid = api_fixture('jobs')['running']['uuid']
-    logcollection = upload_data_and_get_collection(logdata, 'active', "#{job_uuid}.log.txt")
-    job = nil
-    use_token 'active' do
-      job = Job.find job_uuid
-      job.update_attributes log: logcollection.portable_data_hash
-    end
-    visit page_with_token 'active', '/jobs/'+job.uuid
-    find('a[href="#Log"]').click
-    wait_for_ajax
-    assert_text 'Showing only 100 bytes of this log'
-  end
-
-  test 'view log via keep-web redirect' do
-    token = api_token('active')
-    logdata = fakepipe_with_log_data.read
-    logblock = `echo -n #{logdata.shellescape} | ARVADOS_API_TOKEN=#{token.shellescape} arv-put --no-progress --raw -`.strip
-    assert $?.success?, $?
-
-    job = nil
-    use_token 'active' do
-      job = Job.find api_fixture('jobs')['running']['uuid']
-      mtxt = ". #{logblock} 0:#{logdata.length}:#{job.uuid}.log.txt\n"
-      logcollection = Collection.create(manifest_text: mtxt)
-      job.update_attributes log: logcollection.portable_data_hash
-    end
-    visit page_with_token 'active', '/jobs/'+job.uuid
-    find('a[href="#Log"]').click
-    assert_text 'log message 1'
-  end
-
-  [
-    ['foobar', false, false],
-    ['job_with_latest_version', true, false],
-    ['job_with_latest_version', true, true],
-  ].each do |job_name, expect_options, use_latest|
-    test "Rerun #{job_name} job, expect options #{expect_options},
-          and use latest version option #{use_latest}" do
-      job = api_fixture('jobs')[job_name]
-      visit page_with_token 'active', '/jobs/'+job['uuid']
-
-      if expect_options
-        assert_text 'supplied_script_version: master'
-      else
-        assert_no_text 'supplied_script_version'
-      end
-
-      assert_triggers_dom_event 'shown.bs.modal' do
-        find('a,button', text: 'Re-run job...').click
-      end
-      within('.modal-dialog') do
-        assert_selector 'a,button', text: 'Cancel'
-        if use_latest
-          page.choose("job_script_version_#{job['supplied_script_version']}")
-        end
-        click_on "Run now"
-      end
-
-      # Re-running jobs doesn't currently work because the test API
-      # server has no git repository to check against.  For now, check
-      # that the error message says something appropriate for that
-      # situation.
-      if expect_options && use_latest
-        assert_text "077ba2ad3ea24a929091a9e6ce545c93199b8e57"
-      else
-        assert_text "Script version #{job['script_version']} does not resolve to a commit"
-      end
-    end
-  end
-
   [
     ['active', true],
     ['job_reader2', false],
diff --git a/apps/workbench/test/integration/pipeline_instances_test.rb b/apps/workbench/test/integration/pipeline_instances_test.rb
index adfd62bd8..8afa0b88e 100644
--- a/apps/workbench/test/integration/pipeline_instances_test.rb
+++ b/apps/workbench/test/integration/pipeline_instances_test.rb
@@ -61,135 +61,6 @@ class PipelineInstancesTest < ActionDispatch::IntegrationTest
     end
   end
 
-  test 'Create and run a pipeline' do
-    visit page_with_token('active_trustedclient', '/pipeline_templates')
-    within('tr', text: 'Two Part Pipeline Template') do
-      find('a,button', text: 'Run').click
-    end
-
-    # project chooser
-    within('.modal-dialog') do #FIXME: source of 1 test error
-      find('.selectable', text: 'A Project').click
-      find('button', text: 'Choose').click
-    end
-
-    # This pipeline needs input. So, Run should be disabled
-    page.assert_selector 'a.disabled,button.disabled', text: 'Run'
-
-    instance_page = current_path
-
-    # Add this collection to the project
-    visit '/projects'
-    find("#projects-menu").click
-    find('.dropdown-menu a,button', text: 'A Project').click
-    find('.btn', text: 'Add data').click
-    find('.dropdown-menu a,button', text: 'Copy data from another project').click
-    within('.modal-dialog') do
-      wait_for_ajax
-      first('span', text: 'foo_tag').click
-      find('.btn', text: 'Copy').click
-    end
-    using_wait_time(Capybara.default_max_wait_time * 3) do
-      wait_for_ajax
-    end
-
-    click_link 'Pipelines and processes'
-    find('tr[data-kind="arvados#pipelineInstance"]', text: '(none)').
-      find('a', text: 'Show').
-      click
-
-    assert find('p', text: 'Provide a value')
-
-    find('div.form-group', text: 'Foo/bar pair').
-      find('.btn', text: 'Choose').
-      click
-
-    within('.modal-dialog') do
-      assert(has_text?("Foo/bar pair"),
-             "pipeline input picker missing name of input")
-      wait_for_ajax
-      first('span', text: 'foo_tag').click
-      find('button', text: 'OK').click
-    end
-    wait_for_ajax
-
-    # The input, after being specified, should still be displayed (#3382)
-    assert find('div.form-group', text: 'Foo/bar pair')
-
-    # The input, after being specified, should still be editable (#3382)
-    find('div.form-group', text: 'Foo/bar pair').
-      find('.btn', text: 'Choose').click
-
-    within('.modal-dialog') do
-      assert(has_text?("Foo/bar pair"),
-             "pipeline input picker missing name of input")
-      wait_for_ajax
-      first('span', text: 'foo_tag').click
-      find('button', text: 'OK').click
-    end
-
-    # For good measure, check one last time that the input, after being specified twice, is still be displayed (#3382)
-    assert find('div.form-group', text: 'Foo/bar pair')
-
-    # Ensure that the collection's portable_data_hash, uuid and name
-    # are saved in the desired places. (#4015)
-
-    # foo_collection_in_aproject is the collection tagged with foo_tag.
-    collection = api_fixture('collections', 'foo_collection_in_aproject')
-    click_link 'Advanced'
-    click_link 'API response'
-    api_response = JSON.parse(find('div#advanced_api_response pre').text)
-    input_params = api_response['components']['part-one']['script_parameters']['input']
-    assert_equal collection['portable_data_hash'], input_params['value']
-    assert_equal collection['name'], input_params['selection_name']
-    assert_equal collection['uuid'], input_params['selection_uuid']
-
-    # "Run" button is now enabled
-    page.assert_no_selector 'a.disabled,button.disabled', text: 'Run'
-
-    first('a,button', text: 'Run').click
-
-    # Pipeline is running. We have a "Pause" button instead now.
-    page.assert_selector 'a,button', text: 'Pause'
-    find('a,button', text: 'Pause').click
-
-    # Pipeline is stopped. It should now be in paused state and Runnable again.
-    assert page.has_text? 'Paused'
-    page.assert_no_selector 'a.disabled,button.disabled', text: 'Resume'
-    page.assert_selector 'a,button', text: 'Re-run with latest'
-    page.assert_selector 'a,button', text: 'Re-run options'
-
-    # Since it is test env, no jobs are created to run. So, graph not visible
-    assert page.has_no_text? 'Graph'
-  end
-
-  # Create a pipeline instance from within a project and run
-  test 'Create pipeline inside a project and run' do
-    visit page_with_token('active_trustedclient', '/projects')
-
-    # Add collection to the project using Add data button
-    find("#projects-menu").click
-    find('.dropdown-menu a,button', text: 'A Project').click
-    find('.btn', text: 'Add data').click
-    find('.dropdown-menu a,button', text: 'Copy data from another project').click
-    within('.modal-dialog') do
-      wait_for_ajax
-      first('span', text: 'foo_tag').click
-      find('.btn', text: 'Copy').click
-    end
-    using_wait_time(Capybara.default_max_wait_time * 3) do
-      wait_for_ajax
-    end
-
-    create_and_run_pipeline_in_aproject true, 'Two Part Pipeline Template', 'foo_collection_in_aproject', false
-  end
-
-  # Create a pipeline instance from outside of a project
-  test 'Run a pipeline from dashboard' do
-    visit page_with_token('active_trustedclient')
-    create_and_run_pipeline_in_aproject false, 'Two Part Pipeline Template', 'foo_collection_in_aproject', false
-  end
-
   test 'view pipeline with job and see graph' do
     visit page_with_token('active_trustedclient', '/pipeline_instances')
     assert page.has_text? 'pipeline_with_job'
@@ -246,228 +117,6 @@ class PipelineInstancesTest < ActionDispatch::IntegrationTest
            "did not land on pipeline instance page")
   end
 
-  PROJECT_WITH_SEARCH_COLLECTION = "A Subproject"
-  def check_parameter_search(proj_name)
-    create_pipeline_from("parameter_with_search", proj_name)
-    search_text = api_fixture("pipeline_templates", "parameter_with_search",
-                              "components", "with-search",
-                              "script_parameters", "input", "search_for")
-    first("a.btn,button", text: "Choose").click
-    within(".modal-body") do
-      if (proj_name != PROJECT_WITH_SEARCH_COLLECTION)
-        # Switch finder modal to Subproject to find the Collection.
-        click_on proj_name
-        click_on PROJECT_WITH_SEARCH_COLLECTION
-      end
-      assert_equal(search_text, first("input").value,
-                   "parameter search not preseeded")
-      assert(has_text?(api_fixture("collections")["baz_collection_name_in_asubproject"]["name"]),
-             "baz Collection not in preseeded search results")
-    end
-  end
-
-  test "Workbench respects search_for parameter in templates" do
-    check_parameter_search(PROJECT_WITH_SEARCH_COLLECTION)
-  end
-
-  test "Workbench preserves search_for parameter after project switch" do
-    check_parameter_search("A Project")
-  end
-
-  test "enter a float for a number pipeline input" do
-    # Poltergeist either does not support the HTML 5 <input
-    # type="number">, or interferes with the associated X-Editable
-    # validation code.  If the input field has type=number (forcing an
-    # integer), this test will yield a false positive under
-    # Poltergeist.  --Brett, 2015-02-05
-    need_selenium "for strict X-Editable input validation"
-    create_pipeline_from("template_with_dataclass_number")
-    INPUT_SELECTOR =
-      ".editable[data-name='[components][work][script_parameters][input][value]']"
-    find(INPUT_SELECTOR).click
-    find(".editable-input input").set("12.34")
-    find("#editable-submit").click
-    assert_no_selector(".editable-popup")
-    assert_selector(INPUT_SELECTOR, text: "12.34")
-  end
-
-  [
-    [true, 'Two Part Pipeline Template', 'foo_collection_in_aproject', false],
-    [false, 'Two Part Pipeline Template', 'foo_collection_in_aproject', false],
-    [true, 'Two Part Template with dataclass File', 'foo_collection_in_aproject', true],
-    [false, 'Two Part Template with dataclass File', 'foo_collection_in_aproject', true],
-    [true, 'Two Part Pipeline Template', 'collection_with_no_name_in_aproject', false],
-  ].each do |in_aproject, template_name, collection, choose_file|
-    test "Run pipeline instance in #{in_aproject} with #{template_name} with #{collection} file #{choose_file}" do
-      if in_aproject
-        visit page_with_token 'active', \
-        '/projects/'+api_fixture('groups')['aproject']['uuid']
-      else
-        visit page_with_token 'active', '/'
-      end
-
-      # need bigger modal size when choosing a file from collection
-      if Capybara.current_driver == :selenium
-        Capybara.current_session.driver.browser.manage.window.resize_to(1200, 800)
-      end
-
-      create_and_run_pipeline_in_aproject in_aproject, template_name, collection, choose_file
-      instance_path = current_path
-
-      # Pause the pipeline
-      find('a,button', text: 'Pause').click
-      assert page.has_text? 'Paused'
-      page.assert_no_selector 'a.disabled,button.disabled', text: 'Resume'
-      page.assert_selector 'a,button', text: 'Re-run with latest'
-      page.assert_selector 'a,button', text: 'Re-run options'
-
-      # Verify that the newly created instance is created in the right project.
-      assert page.has_text? 'Home'
-      if in_aproject
-        assert page.has_text? 'A Project'
-      else
-        assert page.has_no_text? 'A Project'
-      end
-    end
-  end
-
-  [
-    ['active', false, false, false],
-    ['active', false, false, true],
-    ['active', true, false, false],
-    ['active', true, true, false],
-    ['active', true, false, true],
-    ['active', true, true, true],
-    ['project_viewer', false, false, true],
-    ['project_viewer', true, true, true],
-  ].each do |user, with_options, choose_options, in_aproject|
-    test "Rerun pipeline instance as #{user} using options #{with_options} #{choose_options} in #{in_aproject}" do
-      if in_aproject
-        path = '/pipeline_instances/'+api_fixture('pipeline_instances')['pipeline_owned_by_active_in_aproject']['uuid']
-      else
-        path = '/pipeline_instances/'+api_fixture('pipeline_instances')['pipeline_owned_by_active_in_home']['uuid']
-      end
-
-      visit page_with_token(user, path)
-
-      page.assert_selector 'a,button', text: 'Re-run with latest'
-      page.assert_selector 'a,button', text: 'Re-run options'
-
-      if user == 'project_viewer' && in_aproject
-        assert page.has_text? 'A Project'
-      end
-
-      # Now re-run the pipeline
-      if with_options
-        assert_triggers_dom_event 'shown.bs.modal' do
-          find('a,button', text: 'Re-run options').click
-        end
-        within('.modal-dialog') do
-          page.assert_selector 'a,button', text: 'Copy and edit inputs'
-          page.assert_selector 'a,button', text: 'Run now'
-          if choose_options
-            find('button', text: 'Copy and edit inputs').click
-          else
-            find('button', text: 'Run now').click
-          end
-        end
-      else
-        find('a,button', text: 'Re-run with latest').click
-      end
-
-      # Verify that the newly created instance is created in the right
-      # project. In case of project_viewer user, since the user cannot
-      # write to the project, the pipeline should have been created in
-      # the user's Home project.
-      assert_not_equal path, current_path, 'Rerun instance path expected to be different'
-      assert_text 'Home'
-      if in_aproject && (user != 'project_viewer')
-        assert_text 'A Project'
-      else
-        assert_no_text 'A Project'
-      end
-    end
-  end
-
-  # Create and run a pipeline for 'Two Part Pipeline Template' in 'A Project'
-  def create_and_run_pipeline_in_aproject in_aproject, template_name, collection_fixture, choose_file=false
-    # collection in aproject to be used as input
-    collection = api_fixture('collections', collection_fixture)
-    collection['name'] ||= '' # API response is "" even if fixture attr is null
-
-    # create a pipeline instance
-    find('.btn', text: 'Run a process').click
-    within('.modal-dialog') do
-      find('.selectable', text: template_name).click
-      find('.btn', text: 'Next: choose inputs').click
-    end
-
-    assert find('p', text: 'Provide a value')
-
-    find('div.form-group', text: 'Foo/bar pair').
-      find('.btn', text: 'Choose').
-      click
-
-    within('.modal-dialog') do
-      if in_aproject
-        assert_selector 'button.dropdown-toggle', text: 'A Project'
-        wait_for_ajax
-      else
-        assert_selector 'button.dropdown-toggle', text: 'Home'
-        wait_for_ajax
-        click_button "Home"
-        click_link "A Project"
-        wait_for_ajax
-      end
-
-      if collection_fixture == 'foo_collection_in_aproject'
-        first('span', text: 'foo_tag').click
-      elsif collection['name'] != ''
-        first('span', text: "#{collection['name']}").click
-      else
-        collection_uuid = collection['uuid']
-        find("div[data-object-uuid=#{collection_uuid}]").click
-      end
-
-      if choose_file
-        wait_for_ajax
-        find('.preview-selectable', text: 'foo').click
-      end
-      find('button', text: 'OK').click
-    end
-
-    # The input, after being specified, should still be displayed (#3382)
-    assert find('div.form-group', text: 'Foo/bar pair')
-
-    # Ensure that the collection's portable_data_hash, uuid and name
-    # are saved in the desired places. (#4015)
-    click_link 'Advanced'
-    click_link 'API response'
-
-    api_response = JSON.parse(find('div#advanced_api_response pre').text)
-    input_params = api_response['components']['part-one']['script_parameters']['input']
-    assert_equal(collection['uuid'], input_params['selection_uuid'], "Not found expected input param uuid")
-    if choose_file
-      assert_equal(collection['portable_data_hash']+'/foo', input_params['value'], "Not found expected input file param value")
-      assert_equal(collection['name']+'/foo', input_params['selection_name'], "Not found expected input file param name")
-    else
-      assert_equal(collection['portable_data_hash'], input_params['value'], "Not found expected input param value")
-      assert_equal(collection['name'], input_params['selection_name'], "Not found expected input selection name")
-    end
-
-    # "Run" button present and enabled
-    page.assert_no_selector 'a.disabled,button.disabled', text: 'Run'
-    first('a,button', text: 'Run').click
-
-    # Pipeline is running. We have a "Pause" button instead now.
-    page.assert_no_selector 'a,button', text: 'Run'
-    page.assert_no_selector 'a.disabled,button.disabled', text: 'Resume'
-    page.assert_selector 'a,button', text: 'Pause'
-
-    # Since it is test env, no jobs are created to run. So, graph not visible
-    assert page.has_no_text? 'Graph'
-  end
-
   [
     ['user1_with_load', 'zzzzz-d1hrv-10pipelines0001', 0], # run time 0 minutes
     ['user1_with_load', 'zzzzz-d1hrv-10pipelines0010', 17*60*60 + 51*60], # run time 17 hours and 51 minutes
diff --git a/apps/workbench/test/integration/projects_test.rb b/apps/workbench/test/integration/projects_test.rb
index 279d85101..17ab5e466 100644
--- a/apps/workbench/test/integration/projects_test.rb
+++ b/apps/workbench/test/integration/projects_test.rb
@@ -735,7 +735,6 @@ class ProjectsTest < ActionDispatch::IntegrationTest
   end
 
   [
-    ['Two Part Pipeline Template', 'part-one', 'Provide a value for the following'],
     ['Workflow with input specifications', 'this workflow has inputs specified', 'Provide a value for the following'],
   ].each do |template_name, preview_txt, process_txt|
     test "run a process using template #{template_name} in a project" do
diff --git a/apps/workbench/test/integration/repositories_browse_test.rb b/apps/workbench/test/integration/repositories_browse_test.rb
index 056598ef1..1fd9d9008 100644
--- a/apps/workbench/test/integration/repositories_browse_test.rb
+++ b/apps/workbench/test/integration/repositories_browse_test.rb
@@ -16,29 +16,6 @@ class RepositoriesTest < ActionDispatch::IntegrationTest
     need_javascript
   end
 
-  test "browse repository from jobs#show" do
-    sha1 = api_fixture('jobs')['running']['script_version']
-    _, fakecommit, fakefile =
-      stub_repo_content sha1: sha1, filename: 'crunch_scripts/hash'
-    show_object_using 'active', 'jobs', 'running', sha1
-    click_on api_fixture('jobs')['running']['script']
-    assert_text fakefile
-    click_on 'crunch_scripts'
-    assert_selector 'td a', text: 'hash'
-    click_on 'foo'
-    assert_selector 'td a', text: 'crunch_scripts'
-    click_on sha1
-    assert_text fakecommit
-
-    show_object_using 'active', 'jobs', 'running', sha1
-    click_on 'active/foo'
-    assert_selector 'td a', text: 'crunch_scripts'
-
-    show_object_using 'active', 'jobs', 'running', sha1
-    click_on sha1
-    assert_text fakecommit
-  end
-
   test "browse using arv-git-http" do
     repo = api_fixture('repositories')['foo']
     Repository.any_instance.
diff --git a/apps/workbench/test/integration/websockets_test.rb b/apps/workbench/test/integration/websockets_test.rb
index a79220a88..83494173a 100644
--- a/apps/workbench/test/integration/websockets_test.rb
+++ b/apps/workbench/test/integration/websockets_test.rb
@@ -77,67 +77,6 @@ class WebsocketTest < ActionDispatch::IntegrationTest
     end
   end
 
-  test "pipeline instance arv-refresh-on-log-event" do
-    # Do something and check that the pane reloads.
-    p = use_token :active do
-      PipelineInstance.create(state: "RunningOnServer",
-                              components: {
-                                c1: {
-                                  script: "test_hash.py",
-                                  script_version: "1de84a854e2b440dc53bf42f8548afa4c17da332"
-                                }
-                              })
-    end
-    visit(page_with_token("active", "/pipeline_instances/#{p.uuid}"))
-
-    assert_text 'Active'
-    assert page.has_link? 'Pause'
-    assert_no_text 'Complete'
-    assert page.has_no_link? 'Re-run with latest'
-
-    use_token :dispatch1 do
-      p.update_attributes!(state: 'Complete')
-    end
-
-    assert_no_text 'Active'
-    assert page.has_no_link? 'Pause'
-    assert_text 'Complete'
-    assert page.has_link? 'Re-run with latest'
-  end
-
-  test "job arv-refresh-on-log-event" do
-    # Do something and check that the pane reloads.
-    uuid = api_fixture('jobs')['running_will_be_completed']['uuid']
-    visit(page_with_token("active", "/jobs/#{uuid}"))
-
-    assert_no_text 'complete'
-    assert_no_text 'Re-run job'
-
-    use_token :dispatch1 do
-      Job.find(uuid).update_attributes!(state: 'Complete')
-    end
-
-    assert_text 'complete'
-    assert_text 'Re-run job'
-  end
-
-  test "dashboard arv-refresh-on-log-event" do
-    visit(page_with_token("active", "/"))
-
-    assert_no_text 'test dashboard arv-refresh-on-log-event'
-
-    # Do something and check that the pane reloads.
-    use_token :active do
-      p = PipelineInstance.create({state: "RunningOnServer",
-                                    name: "test dashboard arv-refresh-on-log-event",
-                                    components: {
-                                    }
-                                  })
-    end
-
-    assert_text 'test dashboard arv-refresh-on-log-event'
-  end
-
   test 'job graph appears when first data point is already in logs table' do
     job_graph_first_datapoint_test
   end
diff --git a/apps/workbench/test/integration/work_units_test.rb b/apps/workbench/test/integration/work_units_test.rb
index b58d59a82..fe73f2734 100644
--- a/apps/workbench/test/integration/work_units_test.rb
+++ b/apps/workbench/test/integration/work_units_test.rb
@@ -78,8 +78,6 @@ class WorkUnitsTest < ActionDispatch::IntegrationTest
   end
 
   [
-    ['jobs', 'running_job_with_components', true, true],
-    ['pipeline_instances', 'components_is_jobspec', true, true],
     ['containers', 'running', false],
     ['container_requests', 'running', true],
   ].each do |type, fixture, cancelable, confirm_cancellation|
@@ -122,8 +120,6 @@ class WorkUnitsTest < ActionDispatch::IntegrationTest
   end
 
   [
-    ['jobs', 'running_job_with_components'],
-    ['pipeline_instances', 'has_component_with_completed_jobs'],
     ['container_requests', 'running'],
     ['container_requests', 'completed'],
   ].each do |type, fixture|
@@ -145,7 +141,6 @@ class WorkUnitsTest < ActionDispatch::IntegrationTest
   end
 
   [
-    ['Pipeline with default input specifications', 'part-one', 'Provide values for the following'],
     ['Workflow with default input specifications', 'this workflow has inputs specified', 'Provide a value for the following'],
   ].each do |template_name, preview_txt, process_txt|
     test "run a process using template #{template_name} from dashboard" do
@@ -248,31 +243,6 @@ class WorkUnitsTest < ActionDispatch::IntegrationTest
     end
   end
 
-  [
-    ['jobs', 'active', 'running_job_with_components', 'component1', '/jobs/zzzzz-8i9sb-jyq01m7in1jlofj#Log'],
-    ['pipeline_instances', 'active', 'pipeline_in_running_state', 'foo', '/jobs/zzzzz-8i9sb-pshmckwoma9plh7#Log'],
-    ['pipeline_instances', nil, 'pipeline_in_publicly_accessible_project_but_other_objects_elsewhere', 'foo', 'Log unavailable'],
-  ].each do |type, token, fixture, child, log_link|
-    test "link_to_log for #{fixture} for #{token}" do
-      obj = api_fixture(type)[fixture]
-      if token
-        visit page_with_token token, "/#{type}/#{obj['uuid']}"
-      else
-        Rails.configuration.Users.AnonymousUserToken =
-          api_fixture("api_client_authorizations", "anonymous", "api_token")
-        visit "/#{type}/#{obj['uuid']}"
-      end
-
-      click_link(child)
-
-      if token
-        assert_selector "a[href=\"#{log_link}\"]"
-      else
-        assert_text log_link
-      end
-    end
-  end
-
   test 'Run from workflows index page' do
     visit page_with_token('active', '/workflows')
 
diff --git a/lib/config/config.default.yml b/lib/config/config.default.yml
index 9ac4aeeb9..e35671cd7 100644
--- a/lib/config/config.default.yml
+++ b/lib/config/config.default.yml
@@ -570,7 +570,11 @@ Clusters:
           AssignNodeHostname: "compute%<slot_number>d"
 
       JobsAPI:
-        # Enable the legacy Jobs API.  This value must be a string.
+        # Enable the legacy 'jobs' API (crunch v1).  This value must be a string.
+        #
+        # Note: this only enables read-only access, creating new
+        # legacy jobs and pipelines is not supported.
+        #
         # 'auto' -- (default) enable the Jobs API only if it has been used before
         #         (i.e., there are job records in the database)
         # 'true' -- enable the Jobs API despite lack of existing records.
@@ -583,30 +587,6 @@ Clusters:
         # {git_repositories_dir}/arvados/.git
         GitInternalDir: /var/lib/arvados/internal.git
 
-        # Docker image to be used when none found in runtime_constraints of a job
-        DefaultDockerImage: ""
-
-        # none or slurm_immediate
-        CrunchJobWrapper: none
-
-        # username, or false = do not set uid when running jobs.
-        CrunchJobUser: crunch
-
-        # The web service must be able to create/write this file, and
-        # crunch-job must be able to stat() it.
-        CrunchRefreshTrigger: /tmp/crunch_refresh_trigger
-
-        # Control job reuse behavior when two completed jobs match the
-        # search criteria and have different outputs.
-        #
-        # If true, in case of a conflict, reuse the earliest job (this is
-        # similar to container reuse behavior).
-        #
-        # If false, in case of a conflict, do not reuse any completed job,
-        # but do reuse an already-running job if available (this is the
-        # original job reuse behavior, and is still the default).
-        ReuseJobIfOutputsDiffer: false
-
       CloudVMs:
         # Enable the cloud scheduler (experimental).
         Enable: false
diff --git a/sdk/cli/arvados-cli.gemspec b/sdk/cli/arvados-cli.gemspec
index 60aeb1892..bce7ce5f5 100644
--- a/sdk/cli/arvados-cli.gemspec
+++ b/sdk/cli/arvados-cli.gemspec
@@ -22,12 +22,8 @@ Gem::Specification.new do |s|
   s.email       = 'gem-dev at curoverse.com'
   #s.bindir      = '.'
   s.licenses    = ['Apache-2.0']
-  s.files       = ["bin/arv", "bin/arv-run-pipeline-instance",
-                   "bin/arv-crunch-job", "bin/arv-tag", "bin/crunch-job",
-                   "LICENSE-2.0.txt"]
+  s.files       = ["bin/arv", "bin/arv-tag", "LICENSE-2.0.txt"]
   s.executables << "arv"
-  s.executables << "arv-run-pipeline-instance"
-  s.executables << "arv-crunch-job"
   s.executables << "arv-tag"
   s.required_ruby_version = '>= 2.1.0'
   s.add_runtime_dependency 'arvados', '~> 1.3.0', '>= 1.3.0'
diff --git a/sdk/go/arvados/config.go b/sdk/go/arvados/config.go
index f6b736d58..80381aced 100644
--- a/sdk/go/arvados/config.go
+++ b/sdk/go/arvados/config.go
@@ -271,13 +271,8 @@ type ContainersConfig struct {
 	UsePreemptibleInstances     bool
 
 	JobsAPI struct {
-		Enable                  string
-		GitInternalDir          string
-		DefaultDockerImage      string
-		CrunchJobWrapper        string
-		CrunchJobUser           string
-		CrunchRefreshTrigger    string
-		ReuseJobIfOutputsDiffer bool
+		Enable         string
+		GitInternalDir string
 	}
 	Logging struct {
 		MaxAge                       Duration
diff --git a/services/api/app/helpers/commit_ancestors_helper.rb b/services/api/app/helpers/commit_ancestors_helper.rb
deleted file mode 100644
index 6def2c9bb..000000000
--- a/services/api/app/helpers/commit_ancestors_helper.rb
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: AGPL-3.0
-
-module CommitAncestorsHelper
-end
diff --git a/services/api/app/models/commit_ancestor.rb b/services/api/app/models/commit_ancestor.rb
deleted file mode 100644
index 59e8552f3..000000000
--- a/services/api/app/models/commit_ancestor.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: AGPL-3.0
-
-# Usage:
-#
-# x = CommitAncestor.find_or_create_by_descendant_and_ancestor(a, b)
-# "b is an ancestor of a" if x.is
-#
-
-class CommitAncestor < ActiveRecord::Base
-  before_create :ask_git_whether_is
-
-  class CommitNotFoundError < ArgumentError
-  end
-
-  protected
-
-  def ask_git_whether_is
-    @gitdirbase = Rails.configuration.Git.Repositories
-    self.is = nil
-    Dir.foreach @gitdirbase do |repo|
-      next if repo.match(/^\./)
-      git_dir = repo.match(/\.git$/) ? repo : File.join(repo, '.git')
-      repo_name = repo.sub(/\.git$/, '')
-      ENV['GIT_DIR'] = File.join(@gitdirbase, git_dir)
-      IO.foreach("|git rev-list --format=oneline '#{self.descendant.gsub(/[^0-9a-f]/,"")}'") do |line|
-        self.is = false
-        sha1, _ = line.strip.split(" ", 2)
-        if sha1 == self.ancestor
-          self.is = true
-          break
-        end
-      end
-      if !self.is.nil?
-        self.repository_name = repo_name
-        break
-      end
-    end
-    if self.is.nil?
-      raise CommitNotFoundError.new "Specified commit was not found"
-    end
-  end
-end
diff --git a/services/api/app/models/job.rb b/services/api/app/models/job.rb
index 4d63deb99..c13d48bd8 100644
--- a/services/api/app/models/job.rb
+++ b/services/api/app/models/job.rb
@@ -16,7 +16,6 @@ class Job < ArvadosModel
   serialize :runtime_constraints, Hash
   serialize :tasks_summary, Hash
   before_create :ensure_unique_submit_id
-  after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
   before_validation :set_priority
   before_validation :update_state_from_old_state_attrs
   before_validation :update_script_parameters_digest
@@ -29,7 +28,6 @@ class Job < ArvadosModel
   before_save :tag_version_in_internal_repository
   before_save :update_timestamps_when_state_changes
 
-  has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
   has_many(:nodes, foreign_key: :job_uuid, primary_key: :uuid)
 
   class SubmitIdReused < RequestError
@@ -207,139 +205,6 @@ class Job < ArvadosModel
     filters
   end
 
-  def self.find_reusable attrs, params, filters, read_users
-    if filters.empty?  # Translate older creation parameters into filters.
-      filters =
-        [["repository", "=", attrs[:repository]],
-         ["script", "=", attrs[:script]],
-         ["script_version", "not in git", params[:exclude_script_versions]],
-        ].reject { |filter| filter.last.nil? or filter.last.empty? }
-      if !params[:minimum_script_version].blank?
-        filters << ["script_version", "in git",
-                     params[:minimum_script_version]]
-      else
-        filters += default_git_filters("script_version", attrs[:repository],
-                                       attrs[:script_version])
-      end
-      if image_search = attrs[:runtime_constraints].andand["docker_image"]
-        if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
-          image_search += ":#{image_tag}"
-        end
-        image_locator = Collection.
-          for_latest_docker_image(image_search).andand.portable_data_hash
-      else
-        image_locator = nil
-      end
-      filters << ["docker_image_locator", "=", image_locator]
-      if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
-        filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
-      end
-      filters = load_job_specific_filters(attrs, filters, read_users)
-    end
-
-    # Check specified filters for some reasonableness.
-    filter_names = filters.map { |f| f.first }.uniq
-    ["repository", "script"].each do |req_filter|
-      if not filter_names.include?(req_filter)
-        return send_error("#{req_filter} filter required")
-      end
-    end
-
-    # Search for a reusable Job, and return it if found.
-    candidates = Job.readable_by(current_user)
-    log_reuse_info { "starting with #{candidates.count} jobs readable by current user #{current_user.uuid}" }
-
-    candidates = candidates.where(
-      'state = ? or (owner_uuid = ? and state in (?))',
-      Job::Complete, current_user.uuid, [Job::Queued, Job::Running])
-    log_reuse_info(candidates) { "after filtering on job state ((state=Complete) or (state=Queued/Running and (submitted by current user)))" }
-
-    digest = Job.sorted_hash_digest(attrs[:script_parameters])
-    candidates = candidates.where('script_parameters_digest = ?', digest)
-    log_reuse_info(candidates) { "after filtering on script_parameters_digest #{digest}" }
-
-    candidates = candidates.where('nondeterministic is distinct from ?', true)
-    log_reuse_info(candidates) { "after filtering on !nondeterministic" }
-
-    # prefer Running jobs over Queued
-    candidates = candidates.order('state desc, created_at')
-
-    candidates = apply_filters candidates, filters
-    log_reuse_info(candidates) { "after filtering on repo, script, and custom filters #{filters.inspect}" }
-
-    chosen = nil
-    chosen_output = nil
-    incomplete_job = nil
-    candidates.each do |j|
-      if j.state != Job::Complete
-        if !incomplete_job
-          # We'll use this if we don't find a job that has completed
-          log_reuse_info { "job #{j.uuid} is reusable, but unfinished; continuing search for completed jobs" }
-          incomplete_job = j
-        else
-          log_reuse_info { "job #{j.uuid} is unfinished and we already have #{incomplete_job.uuid}; ignoring" }
-        end
-      elsif chosen == false
-        # Ignore: we have already decided not to reuse any completed
-        # job.
-        log_reuse_info { "job #{j.uuid} with output #{j.output} ignored, see above" }
-      elsif j.output.nil?
-        log_reuse_info { "job #{j.uuid} has nil output" }
-      elsif j.log.nil?
-        log_reuse_info { "job #{j.uuid} has nil log" }
-      elsif Rails.configuration.Containers.JobsAPI.ReuseJobIfOutputsDiffer
-        if !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
-          # Ignore: keep looking for an incomplete job or one whose
-          # output is readable.
-          log_reuse_info { "job #{j.uuid} output #{j.output} unavailable to user; continuing search" }
-        elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.log)
-          # Ignore: keep looking for an incomplete job or one whose
-          # log is readable.
-          log_reuse_info { "job #{j.uuid} log #{j.log} unavailable to user; continuing search" }
-        else
-          log_reuse_info { "job #{j.uuid} with output #{j.output} is reusable; decision is final." }
-          return j
-        end
-      elsif chosen_output
-        if chosen_output != j.output
-          # If two matching jobs produced different outputs, run a new
-          # job (or use one that's already running/queued) instead of
-          # choosing one arbitrarily.
-          log_reuse_info { "job #{j.uuid} output #{j.output} disagrees; forgetting about #{chosen.uuid} and ignoring any other finished jobs (see reuse_job_if_outputs_differ in application.default.yml)" }
-          chosen = false
-        else
-          log_reuse_info { "job #{j.uuid} output #{j.output} agrees with chosen #{chosen.uuid}; continuing search in case other candidates have different outputs" }
-        end
-        # ...and that's the only thing we need to do once we've chosen
-        # a job to reuse.
-      elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
-        # This user cannot read the output of this job. Any other
-        # completed job will have either the same output (making it
-        # unusable) or a different output (making it unusable because
-        # reuse_job_if_outputs_different is turned off). Therefore,
-        # any further investigation of reusable jobs is futile.
-        log_reuse_info { "job #{j.uuid} output #{j.output} is unavailable to user; this means no finished job can be reused (see reuse_job_if_outputs_differ in application.default.yml)" }
-        chosen = false
-      elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.log)
-        # This user cannot read the log of this job, don't try to reuse the
-        # job but consider if the output is consistent.
-        log_reuse_info { "job #{j.uuid} log #{j.log} is unavailable to user; continuing search" }
-        chosen_output = j.output
-      else
-        log_reuse_info { "job #{j.uuid} with output #{j.output} can be reused; continuing search in case other candidates have different outputs" }
-        chosen = j
-        chosen_output = j.output
-      end
-    end
-    j = chosen || incomplete_job
-    if j
-      log_reuse_info { "done, #{j.uuid} was selected" }
-    else
-      log_reuse_info { "done, nothing suitable" }
-    end
-    return j
-  end
-
   def self.default_git_filters(attr_name, repo_name, refspec)
     # Add a filter to @filters for `attr_name` = the latest commit available
     # in `repo_name` at `refspec`.  No filter is added if refspec can't be
@@ -567,14 +432,6 @@ class Job < ArvadosModel
     super
   end
 
-  def trigger_crunch_dispatch_if_cancelled
-    if @need_crunch_dispatch_trigger
-      File.open(Rails.configuration.Containers.JobsAPI.CrunchRefreshTrigger, 'wb') do
-        # That's all, just create/touch a file for crunch-job to see.
-      end
-    end
-  end
-
   def update_timestamps_when_state_changes
     return if not (state_changed? or new_record?)
 
diff --git a/services/api/config/arvados_config.rb b/services/api/config/arvados_config.rb
index 847bee048..09e54b9d4 100644
--- a/services/api/config/arvados_config.rb
+++ b/services/api/config/arvados_config.rb
@@ -146,12 +146,7 @@ arvcfg.declare_config "Containers.SLURM.Managed.ComputeNodeDomain", String, :com
 arvcfg.declare_config "Containers.SLURM.Managed.ComputeNodeNameservers", Hash, :compute_node_nameservers, ->(cfg, k, v) { arrayToHash cfg, "Containers.SLURM.Managed.ComputeNodeNameservers", v }
 arvcfg.declare_config "Containers.SLURM.Managed.AssignNodeHostname", String, :assign_node_hostname
 arvcfg.declare_config "Containers.JobsAPI.Enable", String, :enable_legacy_jobs_api, ->(cfg, k, v) { ConfigLoader.set_cfg cfg, "Containers.JobsAPI.Enable", v.to_s }
-arvcfg.declare_config "Containers.JobsAPI.CrunchJobWrapper", String, :crunch_job_wrapper
-arvcfg.declare_config "Containers.JobsAPI.CrunchJobUser", String, :crunch_job_user
-arvcfg.declare_config "Containers.JobsAPI.CrunchRefreshTrigger", String, :crunch_refresh_trigger
 arvcfg.declare_config "Containers.JobsAPI.GitInternalDir", String, :git_internal_dir
-arvcfg.declare_config "Containers.JobsAPI.ReuseJobIfOutputsDiffer", Boolean, :reuse_job_if_outputs_differ
-arvcfg.declare_config "Containers.JobsAPI.DefaultDockerImage", String, :default_docker_image_for_jobs
 arvcfg.declare_config "Mail.MailchimpAPIKey", String, :mailchimp_api_key
 arvcfg.declare_config "Mail.MailchimpListID", String, :mailchimp_list_id
 arvcfg.declare_config "Services.Controller.ExternalURL", URI
diff --git a/services/api/db/migrate/20190808145904_drop_commit_ancestors.rb b/services/api/db/migrate/20190808145904_drop_commit_ancestors.rb
new file mode 100644
index 000000000..974f3fee0
--- /dev/null
+++ b/services/api/db/migrate/20190808145904_drop_commit_ancestors.rb
@@ -0,0 +1,9 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class DropCommitAncestors < ActiveRecord::Migration[5.0]
+  def change
+    drop_table :commit_ancestors
+  end
+end
diff --git a/services/api/db/structure.sql b/services/api/db/structure.sql
index 9bb059c2a..080990a35 100644
--- a/services/api/db/structure.sql
+++ b/services/api/db/structure.sql
@@ -228,40 +228,6 @@ ALTER SEQUENCE public.collections_id_seq OWNED BY public.collections.id;
 
 
 --
--- Name: commit_ancestors; Type: TABLE; Schema: public; Owner: -
---
-
-CREATE TABLE public.commit_ancestors (
-    id integer NOT NULL,
-    repository_name character varying(255),
-    descendant character varying(255) NOT NULL,
-    ancestor character varying(255) NOT NULL,
-    "is" boolean DEFAULT false NOT NULL,
-    created_at timestamp without time zone NOT NULL,
-    updated_at timestamp without time zone NOT NULL
-);
-
-
---
--- Name: commit_ancestors_id_seq; Type: SEQUENCE; Schema: public; Owner: -
---
-
-CREATE SEQUENCE public.commit_ancestors_id_seq
-    START WITH 1
-    INCREMENT BY 1
-    NO MINVALUE
-    NO MAXVALUE
-    CACHE 1;
-
-
---
--- Name: commit_ancestors_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
---
-
-ALTER SEQUENCE public.commit_ancestors_id_seq OWNED BY public.commit_ancestors.id;
-
-
---
 -- Name: commits; Type: TABLE; Schema: public; Owner: -
 --
 
@@ -1268,13 +1234,6 @@ ALTER TABLE ONLY public.collections ALTER COLUMN id SET DEFAULT nextval('public.
 
 
 --
--- Name: commit_ancestors id; Type: DEFAULT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.commit_ancestors ALTER COLUMN id SET DEFAULT nextval('public.commit_ancestors_id_seq'::regclass);
-
-
---
 -- Name: commits id; Type: DEFAULT; Schema: public; Owner: -
 --
 
@@ -1462,14 +1421,6 @@ ALTER TABLE ONLY public.collections
 
 
 --
--- Name: commit_ancestors commit_ancestors_pkey; Type: CONSTRAINT; Schema: public; Owner: -
---
-
-ALTER TABLE ONLY public.commit_ancestors
-    ADD CONSTRAINT commit_ancestors_pkey PRIMARY KEY (id);
-
-
---
 -- Name: commits commits_pkey; Type: CONSTRAINT; Schema: public; Owner: -
 --
 
@@ -1918,13 +1869,6 @@ CREATE UNIQUE INDEX index_collections_on_uuid ON public.collections USING btree
 
 
 --
--- Name: index_commit_ancestors_on_descendant_and_ancestor; Type: INDEX; Schema: public; Owner: -
---
-
-CREATE UNIQUE INDEX index_commit_ancestors_on_descendant_and_ancestor ON public.commit_ancestors USING btree (descendant, ancestor);
-
-
---
 -- Name: index_commits_on_repository_name_and_sha1; Type: INDEX; Schema: public; Owner: -
 --
 
@@ -3125,6 +3069,7 @@ INSERT INTO "schema_migrations" (version) VALUES
 ('20190214214814'),
 ('20190322174136'),
 ('20190422144631'),
-('20190523180148');
+('20190523180148'),
+('20190808145904');
 
 
diff --git a/services/api/lib/enable_jobs_api.rb b/services/api/lib/enable_jobs_api.rb
index a4fdc5a1e..1a96a81ad 100644
--- a/services/api/lib/enable_jobs_api.rb
+++ b/services/api/lib/enable_jobs_api.rb
@@ -2,6 +2,15 @@
 #
 # SPDX-License-Identifier: AGPL-3.0
 
+Disable_update_jobs_api_method_list = {"jobs.create"=>{},
+                                "pipeline_instances.create"=>{},
+                                "pipeline_templates.create"=>{},
+                                "jobs.update"=>{},
+                                "pipeline_instances.update"=>{},
+                                "pipeline_templates.update"=>{},
+                                "job_tasks.create"=>{},
+                                "job_tasks.update"=>{}}
+
 Disable_jobs_api_method_list = {"jobs.create"=>{},
                                 "pipeline_instances.create"=>{},
                                 "pipeline_templates.create"=>{},
@@ -30,6 +39,9 @@ Disable_jobs_api_method_list = {"jobs.create"=>{},
                                 "job_tasks.show"=>{}}
 
 def check_enable_legacy_jobs_api
+  # Create/update is permanently disabled (legacy functionality has been removed)
+  Rails.configuration.API.DisabledAPIs.merge! Disable_update_jobs_api_method_list
+
   if Rails.configuration.Containers.JobsAPI.Enable == "false" ||
      (Rails.configuration.Containers.JobsAPI.Enable == "auto" &&
       Job.count == 0)
diff --git a/services/api/test/unit/job_test.rb b/services/api/test/unit/job_test.rb
index 764aac3e4..c89c64c8c 100644
--- a/services/api/test/unit/job_test.rb
+++ b/services/api/test/unit/job_test.rb
@@ -562,17 +562,6 @@ class JobTest < ActiveSupport::TestCase
     assert_equal Job.deep_sort_hash(a).to_json, Job.deep_sort_hash(b).to_json
   end
 
-  test 'find_reusable without logging' do
-    Rails.logger.expects(:info).never
-    try_find_reusable
-  end
-
-  test 'find_reusable with logging' do
-    Rails.configuration.Containers.LogReuseDecisions = true
-    Rails.logger.expects(:info).at_least(3)
-    try_find_reusable
-  end
-
   def try_find_reusable
     foobar = jobs(:foobar)
     example_attrs = {

-----------------------------------------------------------------------


hooks/post-receive
-- 




More information about the arvados-commits mailing list