[arvados] updated: 2.5.0-140-g4e765b926
git repository hosting
git at public.arvados.org
Tue Feb 7 19:34:04 UTC 2023
Summary of changes:
sdk/cwl/arvados_cwl/arvcontainer.py | 1 -
sdk/cwl/arvados_cwl/arvworkflow.py | 127 ++++-----------------
sdk/cwl/arvados_cwl/executor.py | 31 ++---
sdk/cwl/arvados_cwl/runner.py | 6 -
sdk/cwl/tests/arvados-tests.yml | 2 +-
.../dmel_r6.16/WholeGenome/genome.dict | 0
.../dmel_r6.16/WholeGenome/genome.fa.fai | 0
.../Homo_sapiens/GRCh38.p2/WholeGenome/genome.dict | 0
.../GRCh38.p2/WholeGenome/genome.fa.fai | 0
sdk/cwl/tests/test_copy_deps.py | 72 +++++-------
sdk/cwl/tests/test_submit.py | 6 +-
sdk/cwl/tests/wf/runin-reqs-wf.cwl | 3 +-
sdk/cwl/tests/wf/runin-reqs-wf2.cwl | 3 +-
sdk/cwl/tests/wf/runin-reqs-wf3.cwl | 3 +-
sdk/cwl/tests/wf/runin-reqs-wf4.cwl | 3 +-
sdk/cwl/tests/wf/runin-reqs-wf5.cwl | 3 +-
16 files changed, 70 insertions(+), 190 deletions(-)
copy apps/workbench/app/mailers/.gitkeep => sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.dict (100%)
copy apps/workbench/app/mailers/.gitkeep => sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.fa.fai (100%)
copy apps/workbench/app/mailers/.gitkeep => sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.dict (100%)
copy apps/workbench/app/mailers/.gitkeep => sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.fa.fai (100%)
via 4e765b926f58f169bfca38268df9bdda84b55503 (commit)
via 62ede2cf371f51cbe8bac07c36ddf904e428262b (commit)
via 995df5b03c9304bc956b02e50225ad2e1f9dfd8d (commit)
via 9f42cb85807ebad098aaf6e0ab3218f763b712e2 (commit)
from a54c0f72656d883ae8f27d5074e35f60e61dce09 (commit)
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
commit 4e765b926f58f169bfca38268df9bdda84b55503
Author: Peter Amstutz <peter.amstutz at curii.com>
Date: Tue Feb 7 14:32:58 2023 -0500
19385: Fix tests
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <peter.amstutz at curii.com>
diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py
index cc3a51d80..86eee4051 100644
--- a/sdk/cwl/arvados_cwl/arvworkflow.py
+++ b/sdk/cwl/arvados_cwl/arvworkflow.py
@@ -167,17 +167,14 @@ def is_basetype(tp):
return False
-def update_refs(d, baseuri, urlexpander, merged_map, jobmapper, set_block_style, runtimeContext, prefix, replacePrefix):
- if set_block_style and (isinstance(d, CommentedSeq) or isinstance(d, CommentedMap)):
- d.fa.set_block_style()
-
+def update_refs(d, baseuri, urlexpander, merged_map, jobmapper, runtimeContext, prefix, replacePrefix):
if isinstance(d, MutableSequence):
for i, s in enumerate(d):
if prefix and isinstance(s, str):
if s.startswith(prefix):
d[i] = replacePrefix+s[len(prefix):]
else:
- update_refs(s, baseuri, urlexpander, merged_map, jobmapper, set_block_style, runtimeContext, prefix, replacePrefix)
+ update_refs(s, baseuri, urlexpander, merged_map, jobmapper, runtimeContext, prefix, replacePrefix)
elif isinstance(d, MutableMapping):
for field in ("id", "name"):
if isinstance(d.get(field), str) and d[field].startswith("_:"):
@@ -214,7 +211,7 @@ def update_refs(d, baseuri, urlexpander, merged_map, jobmapper, set_block_style,
if isinstance(d["inputs"][inp], str) and not is_basetype(d["inputs"][inp]):
d["inputs"][inp] = rel_ref(d["inputs"][inp], baseuri, urlexpander, merged_map, jobmapper)
if isinstance(d["inputs"][inp], MutableMapping):
- update_refs(d["inputs"][inp], baseuri, urlexpander, merged_map, jobmapper, set_block_style, runtimeContext, prefix, replacePrefix)
+ update_refs(d["inputs"][inp], baseuri, urlexpander, merged_map, jobmapper, runtimeContext, prefix, replacePrefix)
continue
if field == "$schemas":
@@ -222,7 +219,7 @@ def update_refs(d, baseuri, urlexpander, merged_map, jobmapper, set_block_style,
d["$schemas"][n] = rel_ref(d["$schemas"][n], baseuri, urlexpander, merged_map, jobmapper)
continue
- update_refs(d[field], baseuri, urlexpander, merged_map, jobmapper, set_block_style, runtimeContext, prefix, replacePrefix)
+ update_refs(d[field], baseuri, urlexpander, merged_map, jobmapper, runtimeContext, prefix, replacePrefix)
def fix_schemadef(req, baseuri, urlexpander, merged_map, jobmapper, pdh):
@@ -307,17 +304,18 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid,
yamlloader = schema_salad.utils.yaml_no_ts()
result = yamlloader.load(textIO)
- set_block_style = False
- if result.fa.flow_style():
- set_block_style = True
+ export_as_json = result.fa.flow_style()
# 2. find $import, $include, $schema, run, location
# 3. update field value
- update_refs(result, w, tool.doc_loader.expand_url, merged_map, jobmapper, set_block_style, runtimeContext, "", "")
+ update_refs(result, w, tool.doc_loader.expand_url, merged_map, jobmapper, runtimeContext, "", "")
with col.open(w[n+1:], "wt") as f:
# yamlloader.dump(result, stream=sys.stdout)
- yamlloader.dump(result, stream=f)
+ if export_as_json:
+ json.dump(result, f, indent=4, separators=(',',': '))
+ else:
+ yamlloader.dump(result, stream=f)
with col.open(os.path.join("original", w[n+1:]), "wt") as f:
f.write(text)
@@ -352,7 +350,9 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid,
existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", col.portable_data_hash()],
["owner_uuid", "=", arvRunner.project_uuid]]).execute(num_retries=arvRunner.num_retries)
+
if len(existing["items"]) == 0:
+ toolname = toolname.replace("/", " ")
col.save_new(name=toolname, owner_uuid=arvRunner.project_uuid, ensure_unique_name=True, properties=properties)
logger.info("Workflow uploaded to %s", col.manifest_locator())
else:
@@ -460,7 +460,7 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid,
if r["class"] == "SchemaDefRequirement":
wrapper["requirements"][i] = fix_schemadef(r, main["id"], tool.doc_loader.expand_url, merged_map, jobmapper, col.portable_data_hash())
- update_refs(wrapper, main["id"], tool.doc_loader.expand_url, merged_map, jobmapper, False, runtimeContext, main["id"]+"#", "#main/")
+ update_refs(wrapper, main["id"], tool.doc_loader.expand_url, merged_map, jobmapper, runtimeContext, main["id"]+"#", "#main/")
# Remove any lingering file references.
drop_ids(wrapper)
diff --git a/sdk/cwl/tests/arvados-tests.yml b/sdk/cwl/tests/arvados-tests.yml
index 4ed4d4ac3..f242e6323 100644
--- a/sdk/cwl/tests/arvados-tests.yml
+++ b/sdk/cwl/tests/arvados-tests.yml
@@ -224,7 +224,7 @@
out: null
tool: wf-defaults/wf4.cwl
doc: default in embedded subworkflow missing 'id' field, v1.0
- should_fail: true
+ should_fail: false
- job: null
output:
diff --git a/sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.dict b/sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.dict
new file mode 100644
index 000000000..e69de29bb
diff --git a/sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.fa.fai b/sdk/cwl/tests/chipseq/data/Genomes/Drosophila_melanogaster/dmel_r6.16/WholeGenome/genome.fa.fai
new file mode 100644
index 000000000..e69de29bb
diff --git a/sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.dict b/sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.dict
new file mode 100644
index 000000000..e69de29bb
diff --git a/sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.fa.fai b/sdk/cwl/tests/chipseq/data/Genomes/Homo_sapiens/GRCh38.p2/WholeGenome/genome.fa.fai
new file mode 100644
index 000000000..e69de29bb
diff --git a/sdk/cwl/tests/wf/runin-reqs-wf.cwl b/sdk/cwl/tests/wf/runin-reqs-wf.cwl
index 22cc82b7f..3e229e665 100644
--- a/sdk/cwl/tests/wf/runin-reqs-wf.cwl
+++ b/sdk/cwl/tests/wf/runin-reqs-wf.cwl
@@ -15,8 +15,7 @@ inputs:
default:
class: File
location: check_mem.py
-outputs:
- out: []
+outputs: []
requirements:
SubworkflowFeatureRequirement: {}
ScatterFeatureRequirement: {}
diff --git a/sdk/cwl/tests/wf/runin-reqs-wf2.cwl b/sdk/cwl/tests/wf/runin-reqs-wf2.cwl
index 4bde6c562..430190459 100644
--- a/sdk/cwl/tests/wf/runin-reqs-wf2.cwl
+++ b/sdk/cwl/tests/wf/runin-reqs-wf2.cwl
@@ -15,8 +15,7 @@ inputs:
default:
class: File
location: check_mem.py
-outputs:
- out: []
+outputs: []
requirements:
SubworkflowFeatureRequirement: {}
ScatterFeatureRequirement: {}
diff --git a/sdk/cwl/tests/wf/runin-reqs-wf3.cwl b/sdk/cwl/tests/wf/runin-reqs-wf3.cwl
index c13b7a0bc..b08af0063 100644
--- a/sdk/cwl/tests/wf/runin-reqs-wf3.cwl
+++ b/sdk/cwl/tests/wf/runin-reqs-wf3.cwl
@@ -15,8 +15,7 @@ inputs:
default:
class: File
location: check_mem.py
-outputs:
- out: []
+outputs: []
requirements:
SubworkflowFeatureRequirement: {}
ScatterFeatureRequirement: {}
diff --git a/sdk/cwl/tests/wf/runin-reqs-wf4.cwl b/sdk/cwl/tests/wf/runin-reqs-wf4.cwl
index d00ee8577..747e3c8d9 100644
--- a/sdk/cwl/tests/wf/runin-reqs-wf4.cwl
+++ b/sdk/cwl/tests/wf/runin-reqs-wf4.cwl
@@ -15,8 +15,7 @@ inputs:
default:
class: File
location: check_mem.py
-outputs:
- out: []
+outputs: []
requirements:
SubworkflowFeatureRequirement: {}
ScatterFeatureRequirement: {}
diff --git a/sdk/cwl/tests/wf/runin-reqs-wf5.cwl b/sdk/cwl/tests/wf/runin-reqs-wf5.cwl
index 647b07edf..bf598b938 100644
--- a/sdk/cwl/tests/wf/runin-reqs-wf5.cwl
+++ b/sdk/cwl/tests/wf/runin-reqs-wf5.cwl
@@ -15,8 +15,7 @@ inputs:
default:
class: File
location: check_mem.py
-outputs:
- out: []
+outputs: []
requirements:
SubworkflowFeatureRequirement: {}
ScatterFeatureRequirement: {}
commit 62ede2cf371f51cbe8bac07c36ddf904e428262b
Author: Peter Amstutz <peter.amstutz at curii.com>
Date: Tue Feb 7 11:15:37 2023 -0500
19385: Fix test_copy_deps
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <peter.amstutz at curii.com>
diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py
index 5274e9cc5..cc3a51d80 100644
--- a/sdk/cwl/arvados_cwl/arvworkflow.py
+++ b/sdk/cwl/arvados_cwl/arvworkflow.py
@@ -350,9 +350,13 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid,
p = g.split("#", 1)[1]
properties["arv:"+p] = git_info[g]
- col.save_new(name=toolname, owner_uuid=arvRunner.project_uuid, ensure_unique_name=True, properties=properties)
-
- logger.info("Workflow uploaded to %s", col.manifest_locator())
+ existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", col.portable_data_hash()],
+ ["owner_uuid", "=", arvRunner.project_uuid]]).execute(num_retries=arvRunner.num_retries)
+ if len(existing["items"]) == 0:
+ col.save_new(name=toolname, owner_uuid=arvRunner.project_uuid, ensure_unique_name=True, properties=properties)
+ logger.info("Workflow uploaded to %s", col.manifest_locator())
+ else:
+ logger.info("Workflow uploaded to %s", existing["items"][0]["uuid"])
adjustDirObjs(job_order, trim_listing)
adjustFileObjs(job_order, trim_anonymous_location)
diff --git a/sdk/cwl/tests/test_copy_deps.py b/sdk/cwl/tests/test_copy_deps.py
index 54d90580f..28a5915b1 100644
--- a/sdk/cwl/tests/test_copy_deps.py
+++ b/sdk/cwl/tests/test_copy_deps.py
@@ -5,57 +5,39 @@
import arvados
import arvados.collection
import subprocess
-import json
api = arvados.api()
-workflow_content = """{
- "$graph": [
- {
- "baseCommand": "echo",
- "class": "CommandLineTool",
- "cwlVersion": "v1.2",
- "hints": [
- {
- "class": "http://arvados.org/cwl#WorkflowRunnerResources"
- }
- ],
- "id": "#main",
- "inputs": [
- {
- "default": {
- "basename": "b",
- "class": "File",
- "location": "keep:d7514270f356df848477718d58308cc4+94/b",
- "nameext": "",
- "nameroot": "b",
- "size": 0
- },
- "id": "#main/message",
- "inputBinding": {
- "position": 1
- },
- "type": "File"
- }
- ],
- "outputs": []
- }
- ],
- "cwlVersion": "v1.2"
-}"""
+workflow_content = """# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+cwlVersion: v1.2
+class: CommandLineTool
+baseCommand: echo
+inputs:
+ message:
+ type: File
+ inputBinding:
+ position: 1
+ default:
+ class: File
+ location: keep:d7514270f356df848477718d58308cc4+94/b
+
+outputs: []
+"""
+
+expect_file = "19070-copy-deps.cwl"
def check_workflow_content(uuid):
c = arvados.collection.Collection(uuid)
try:
- j = json.load(c.open("workflow.json"))
- except IOError:
+ with c.open(expect_file) as f:
+ content = f.read()
+ match = (content == workflow_content)
+ return match
+ except:
return False
- # The value of "acrContainerImage" is tied to the specific version
- # of arvados-cwl-runner so we can't just compare PDH of the whole
- # workflow collection, it changes with every version.
- del j["$graph"][0]["hints"][0]["acrContainerImage"]
- print
- return json.dumps(j, sort_keys=True, indent=4, separators=(',',': ')) == workflow_content
def check_contents(group, wf_uuid):
contents = api.groups().contents(uuid=group["uuid"]).execute()
@@ -88,7 +70,7 @@ def check_contents(group, wf_uuid):
if c["kind"] == "arvados#collection" and check_workflow_content(c["portable_data_hash"]):
found = True
if not found:
- raise Exception("Couldn't find collection containing expected workflow.json")
+ raise Exception("Couldn't find collection containing expected "+expect_file)
def test_create():
@@ -137,7 +119,7 @@ def test_update():
if c["kind"] == "arvados#collection" and check_workflow_content(c["portable_data_hash"]):
found = True
if not found:
- raise Exception("Couldn't find collection containing expected workflow.json")
+ raise Exception("Couldn't find collection containing expected "+expect_file)
# Updating by default will copy missing items
cmd = ["arvados-cwl-runner", "--disable-git", "--update-workflow", wf_uuid, "19070-copy-deps.cwl"]
commit 995df5b03c9304bc956b02e50225ad2e1f9dfd8d
Author: Peter Amstutz <peter.amstutz at curii.com>
Date: Mon Feb 6 15:26:38 2023 -0500
19385: Replace upload_workflow code
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <peter.amstutz at curii.com>
diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py
index c7082b04d..5274e9cc5 100644
--- a/sdk/cwl/arvados_cwl/arvworkflow.py
+++ b/sdk/cwl/arvados_cwl/arvworkflow.py
@@ -250,7 +250,7 @@ def drop_ids(d):
drop_ids(d[field])
-def new_upload_workflow(arvRunner, tool, job_order, project_uuid,
+def upload_workflow(arvRunner, tool, job_order, project_uuid,
runtimeContext,
uuid=None,
submit_runner_ram=0, name=None, merged_map=None,
@@ -484,70 +484,6 @@ def make_workflow_record(arvRunner, doc, name, tool, project_uuid, update_uuid):
return call.execute(num_retries=arvRunner.num_retries)["uuid"]
-def upload_workflow(arvRunner, tool, job_order, project_uuid,
- runtimeContext, uuid=None,
- submit_runner_ram=0, name=None, merged_map=None,
- submit_runner_image=None,
- git_info=None):
-
- packed = packed_workflow(arvRunner, tool, merged_map, runtimeContext, git_info)
-
- adjustDirObjs(job_order, trim_listing)
- adjustFileObjs(job_order, trim_anonymous_location)
- adjustDirObjs(job_order, trim_anonymous_location)
-
- main = [p for p in packed["$graph"] if p["id"] == "#main"][0]
- for inp in main["inputs"]:
- sn = shortname(inp["id"])
- if sn in job_order:
- inp["default"] = job_order[sn]
-
- if not name:
- name = tool.tool.get("label", os.path.basename(tool.tool["id"]))
-
- upload_dependencies(arvRunner, name, tool.doc_loader,
- packed, tool.tool["id"],
- runtimeContext)
-
- wf_runner_resources = None
-
- hints = main.get("hints", [])
- found = False
- for h in hints:
- if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
- wf_runner_resources = h
- found = True
- break
- if not found:
- wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
- hints.append(wf_runner_resources)
-
- wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner,
- submit_runner_image or "arvados/jobs:"+__version__,
- runtimeContext)
-
- if submit_runner_ram:
- wf_runner_resources["ramMin"] = submit_runner_ram
-
- main["hints"] = hints
-
- wrapper = make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool)
-
- body = {
- "workflow": {
- "name": name,
- "description": tool.tool.get("doc", ""),
- "definition": wrapper
- }}
- if project_uuid:
- body["workflow"]["owner_uuid"] = project_uuid
-
- if uuid:
- call = arvRunner.api.workflows().update(uuid=uuid, body=body)
- else:
- call = arvRunner.api.workflows().create(body=body)
- return call.execute(num_retries=arvRunner.num_retries)["uuid"]
-
def dedup_reqs(reqs):
dedup = {}
for r in reversed(reqs):
diff --git a/sdk/cwl/arvados_cwl/executor.py b/sdk/cwl/arvados_cwl/executor.py
index c42d7bf32..316e8d264 100644
--- a/sdk/cwl/arvados_cwl/executor.py
+++ b/sdk/cwl/arvados_cwl/executor.py
@@ -36,7 +36,7 @@ import arvados_cwl.util
from .arvcontainer import RunnerContainer, cleanup_name_for_collection
from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, make_builder, update_from_merged_map
from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
-from .arvworkflow import ArvadosWorkflow, upload_workflow, new_upload_workflow, make_workflow_record
+from .arvworkflow import ArvadosWorkflow, upload_workflow, make_workflow_record
from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
from .perf import Perf
from .pathmapper import NoFollowPathMapper
@@ -709,17 +709,17 @@ The 'jobs' API is no longer supported.
if submitting and not self.fast_submit:
# upload workflow and get back the workflow wrapper
- workflow_wrapper = new_upload_workflow(self, tool, job_order,
- runtimeContext.project_uuid,
- runtimeContext,
- uuid=runtimeContext.update_workflow,
- submit_runner_ram=runtimeContext.submit_runner_ram,
- name=runtimeContext.name,
- merged_map=merged_map,
- submit_runner_image=runtimeContext.submit_runner_image,
- git_info=git_info,
- set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow),
- jobmapper=jobmapper)
+ workflow_wrapper = upload_workflow(self, tool, job_order,
+ runtimeContext.project_uuid,
+ runtimeContext,
+ uuid=runtimeContext.update_workflow,
+ submit_runner_ram=runtimeContext.submit_runner_ram,
+ name=runtimeContext.name,
+ merged_map=merged_map,
+ submit_runner_image=runtimeContext.submit_runner_image,
+ git_info=git_info,
+ set_defaults=(runtimeContext.update_workflow or runtimeContext.create_workflow),
+ jobmapper=jobmapper)
if runtimeContext.update_workflow or runtimeContext.create_workflow:
# Now create a workflow record and exit.
diff --git a/sdk/cwl/tests/test_submit.py b/sdk/cwl/tests/test_submit.py
index f82834701..d415be885 100644
--- a/sdk/cwl/tests/test_submit.py
+++ b/sdk/cwl/tests/test_submit.py
@@ -71,7 +71,11 @@ def stubs(wfdetails=('submit_wf.cwl', None)):
"df80736f-f14d-4b10-b2e3-03aa27f034b2", "df80736f-f14d-4b10-b2e3-03aa27f034b3",
"df80736f-f14d-4b10-b2e3-03aa27f034b4", "df80736f-f14d-4b10-b2e3-03aa27f034b5",
"df80736f-f14d-4b10-b2e3-03aa27f034b6", "df80736f-f14d-4b10-b2e3-03aa27f034b7",
- "df80736f-f14d-4b10-b2e3-03aa27f034b8", "df80736f-f14d-4b10-b2e3-03aa27f034b9"]
+ "df80736f-f14d-4b10-b2e3-03aa27f034b8", "df80736f-f14d-4b10-b2e3-03aa27f034b9",
+ "df80736f-f14d-4b10-b2e3-03aa27f034c0", "df80736f-f14d-4b10-b2e3-03aa27f034c1",
+ "df80736f-f14d-4b10-b2e3-03aa27f034c2", "df80736f-f14d-4b10-b2e3-03aa27f034c3",
+ "df80736f-f14d-4b10-b2e3-03aa27f034c4", "df80736f-f14d-4b10-b2e3-03aa27f034c5",
+ "df80736f-f14d-4b10-b2e3-03aa27f034c6", "df80736f-f14d-4b10-b2e3-03aa27f034c7"]
determine_image_id.return_value = None
commit 9f42cb85807ebad098aaf6e0ab3218f763b712e2
Author: Peter Amstutz <peter.amstutz at curii.com>
Date: Mon Feb 6 14:50:11 2023 -0500
19385: Clean up debug prints
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <peter.amstutz at curii.com>
diff --git a/sdk/cwl/arvados_cwl/arvcontainer.py b/sdk/cwl/arvados_cwl/arvcontainer.py
index ceae0dc02..742906c61 100644
--- a/sdk/cwl/arvados_cwl/arvcontainer.py
+++ b/sdk/cwl/arvados_cwl/arvcontainer.py
@@ -546,7 +546,6 @@ class RunnerContainer(Runner):
main = self.loadingContext.loader.idx["_:main"]
if main.get("id") == "_:main":
del main["id"]
- #print(json.dumps(main, indent=2))
workflowpath = "/var/lib/cwl/workflow.json#main"
container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
"kind": "json",
diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py
index 2bc823222..c7082b04d 100644
--- a/sdk/cwl/arvados_cwl/arvworkflow.py
+++ b/sdk/cwl/arvados_cwl/arvworkflow.py
@@ -133,21 +133,16 @@ def rel_ref(s, baseuri, urlexpander, merged_map, jobmapper):
if s.startswith("keep:"):
return s
- #print("BBB", s, baseuri)
uri = urlexpander(s, baseuri)
- #print("CCC", uri)
if uri.startswith("keep:"):
return uri
fileuri = urllib.parse.urldefrag(baseuri)[0]
- #print("BBB", s, baseuri, uri)
-
for u in (baseuri, fileuri):
if u in merged_map:
replacements = merged_map[u].resolved
- #print("RRR", u, uri, replacements)
if uri in replacements:
return replacements[uri]
@@ -158,14 +153,10 @@ def rel_ref(s, baseuri, urlexpander, merged_map, jobmapper):
p2 = os.path.dirname(uri_file_path(uri))
p3 = os.path.basename(uri_file_path(uri))
- #print("PPP", p1, p2, p3)
-
r = os.path.relpath(p2, p1)
if r == ".":
r = ""
- #print("RRR", r)
-
return os.path.join(r, p3)
def is_basetype(tp):
@@ -242,9 +233,7 @@ def fix_schemadef(req, baseuri, urlexpander, merged_map, jobmapper, pdh):
path, frag = urllib.parse.urldefrag(r)
rel = rel_ref(r, baseuri, urlexpander, merged_map, jobmapper)
merged_map.setdefault(path, FileUpdates({}, {}))
- #print("PPP", path, r, frag)
rename = "keep:%s/%s" %(pdh, rel)
- #rename = "#%s" % frag
for mm in merged_map:
merged_map[mm].resolved[r] = rename
return req
@@ -306,8 +295,6 @@ def new_upload_workflow(arvRunner, tool, job_order, project_uuid,
col = arvados.collection.Collection(api_client=arvRunner.api)
- #print(merged_map)
-
for w in workflow_files | import_files:
# 1. load YAML
@@ -465,29 +452,15 @@ def new_upload_workflow(arvRunner, tool, job_order, project_uuid,
for g in git_info:
doc[g] = git_info[g]
- #print("MMM", main["id"])
- #print(yamlloader.dump(wrapper, stream=sys.stdout))
-
for i, r in enumerate(wrapper["requirements"]):
if r["class"] == "SchemaDefRequirement":
wrapper["requirements"][i] = fix_schemadef(r, main["id"], tool.doc_loader.expand_url, merged_map, jobmapper, col.portable_data_hash())
- # print()
- # print("merrrrged maaap", merged_map)
- # print()
- #print("update_refs", main["id"], runfile)
-
- #print(yamlloader.dump(wrapper, stream=sys.stdout))
-
update_refs(wrapper, main["id"], tool.doc_loader.expand_url, merged_map, jobmapper, False, runtimeContext, main["id"]+"#", "#main/")
# Remove any lingering file references.
drop_ids(wrapper)
- #print("HHH")
-
- #print(yamlloader.dump(wrapper, stream=sys.stdout))
-
return doc
diff --git a/sdk/cwl/arvados_cwl/executor.py b/sdk/cwl/arvados_cwl/executor.py
index 1134a0a02..c42d7bf32 100644
--- a/sdk/cwl/arvados_cwl/executor.py
+++ b/sdk/cwl/arvados_cwl/executor.py
@@ -734,15 +734,8 @@ The 'jobs' API is no longer supported.
# Reload just the wrapper workflow.
self.fast_submit = True
- #print("bah bah", loadingContext.requirements)
- #workflow_wrapper, _ = loadingContext.loader.resolve_all(cmap(workflow_wrapper), "_:main", checklinks=True)
-
- #tool = load_tool(workflow_wrapper[0], loadingContext)
- #print("AAA", json.dumps(loadingContext.loader.idx["_:main"], indent=2))
tool = load_tool(workflow_wrapper, loadingContext)
-
loadingContext.loader.idx["_:main"] = workflow_wrapper
- #print("BBB", json.dumps(loadingContext.loader.idx["_:main"], indent=2))
if not submitting:
update_from_merged_map(tool, merged_map)
diff --git a/sdk/cwl/arvados_cwl/runner.py b/sdk/cwl/arvados_cwl/runner.py
index 4d569df33..54af2be51 100644
--- a/sdk/cwl/arvados_cwl/runner.py
+++ b/sdk/cwl/arvados_cwl/runner.py
@@ -334,8 +334,6 @@ def upload_dependencies(arvrunner, name, document_loader,
if optional_deps:
sc_result.extend(optional_deps)
- #print("BOING", uri, sc_result)
-
sc = []
uuids = {}
@@ -735,8 +733,6 @@ def upload_job_order(arvrunner, name, tool, job_order, runtimeContext):
update_from_mapper(job_order, jobmapper)
- #print(json.dumps(job_order, indent=2))
-
return job_order, jobmapper
FileUpdates = namedtuple("FileUpdates", ["resolved", "secondaryFiles"])
@@ -781,8 +777,6 @@ def upload_workflow_deps(arvrunner, tool, runtimeContext):
for k,v in pm.items():
toolmap[k] = v.resolved
- #print("visited", deptool["id"], toolmap, discovered_secondaryfiles)
-
merged_map[deptool["id"]] = FileUpdates(toolmap, discovered_secondaryfiles)
return merged_map
-----------------------------------------------------------------------
hooks/post-receive
--
More information about the arvados-commits
mailing list