[ARVADOS] created: 2.1.0-104-gd3e5a70e3

Git user git at public.arvados.org
Tue Nov 24 11:36:28 UTC 2020


        at  d3e5a70e3cff631a76a9d3ac0fe492b3d1f64df5 (commit)


commit d3e5a70e3cff631a76a9d3ac0fe492b3d1f64df5
Author: Peter van Heusden <pvh at sanbi.ac.za>
Date:   Wed Nov 18 16:01:32 2020 +0200

    Fix salt-install's crunch-dispatch-local config and tests
    
    * added [@pvanheus fix]:https://github.com/arvados/arvados/pull/140
    * added [a debug parameter to the provision script](https://gitter.im/arvados/community?at=5fb54b4bd37a1a13d6b46a05)
    * added a test script to verify the cluster is able to run a CWL workflow
    * document salt-install's workflow test
    
    refs #17150
    
    Arvados-DCO-1.1-Signed-off-by: Javier BĂ©rtoli <jbertoli at curii.com>

diff --git a/doc/install/salt-single-host.html.textile.liquid b/doc/install/salt-single-host.html.textile.liquid
index 139366179..fb41d59ee 100644
--- a/doc/install/salt-single-host.html.textile.liquid
+++ b/doc/install/salt-single-host.html.textile.liquid
@@ -11,9 +11,9 @@ SPDX-License-Identifier: CC-BY-SA-3.0
 
 # "Install Saltstack":#saltstack
 # "Single host install using the provision.sh script":#single_host
-# "Local testing Arvados in a Vagrant box":#vagrant
 # "DNS configuration":#final_steps
 # "Initial user and login":#initial_user
+# "Test the installed cluster running a simple workflow":#test_install
 
 h2(#saltstack). Install Saltstack
 
@@ -84,3 +84,95 @@ Assuming you didn't change these values in the @provision.sh@ script, the initia
 * User: 'admin'
 * Password: 'password'
 * Email: 'admin at arva2.arv.local'
+
+h2(#test_install). Test the installed cluster running a simple workflow
+
+The @provision.sh@ script saves a simple example test workflow in the @/tmp/cluster_tests at . If you want to run it, just change to that directory and run:
+
+<notextile>
+<pre><code>cd /tmp/cluster_tests
+./run-test.sh
+</code></pre>
+</notextile>
+
+It will create a test user, upload a small workflow and run it. If everything goes OK, the output should similar to this (some output was shortened for clarity):
+
+<notextile>
+<pre><code>Creating Arvados Standard Docker Images project
+Arvados project uuid is 'arva2-j7d0g-0prd8cjlk6kfl7y'
+{
+ ...
+ "uuid":"arva2-o0j2j-n4zu4cak5iifq2a",
+ "owner_uuid":"arva2-tpzed-000000000000000",
+ ...
+}
+Uploading arvados/jobs' docker image to the project
+2.1.1: Pulling from arvados/jobs
+8559a31e96f4: Pulling fs layer
+...
+Status: Downloaded newer image for arvados/jobs:2.1.1
+docker.io/arvados/jobs:2.1.1
+2020-11-23 21:43:39 arvados.arv_put[32678] INFO: Creating new cache file at /home/vagrant/.cache/arvados/arv-put/c59256eda1829281424c80f588c7cc4d
+2020-11-23 21:43:46 arvados.arv_put[32678] INFO: Collection saved as 'Docker image arvados jobs:2.1.1 sha256:0dd50'
+arva2-4zz18-1u5pvbld7cvxuy2
+Creating initial user ('admin')
+Setting up user ('admin')
+{
+ "items":[
+  {
+   ...
+   "owner_uuid":"arva2-tpzed-000000000000000",
+   ...
+   "uuid":"arva2-o0j2j-1ownrdne0ok9iox"
+  },
+  {
+   ...
+   "owner_uuid":"arva2-tpzed-000000000000000",
+   ...
+   "uuid":"arva2-o0j2j-1zbeyhcwxc1tvb7"
+  },
+  {
+   ...
+   "email":"admin at arva2.arv.local",
+   ...
+   "owner_uuid":"arva2-tpzed-000000000000000",
+   ...
+   "username":"admin",
+   "uuid":"arva2-tpzed-3wrm93zmzpshrq2",
+   ...
+  }
+ ],
+ "kind":"arvados#HashList"
+}
+Activating user 'admin'
+{
+ ...
+ "email":"admin at arva2.arv.local",
+ ...
+ "username":"admin",
+ "uuid":"arva2-tpzed-3wrm93zmzpshrq2",
+ ...
+}
+Running test CWL workflow
+INFO /usr/bin/cwl-runner 2.1.1, arvados-python-client 2.1.1, cwltool 3.0.20200807132242
+INFO Resolved 'hasher-workflow.cwl' to 'file:///tmp/cluster_tests/hasher-workflow.cwl'
+...
+INFO Using cluster arva2 (https://arva2.arv.local:8443/)
+INFO Upload local files: "test.txt"
+INFO Uploaded to ea34d971b71d5536b4f6b7d6c69dc7f6+50 (arva2-4zz18-c8uvwqdry4r8jao)
+INFO Using collection cache size 256 MiB
+INFO [container hasher-workflow.cwl] submitted container_request arva2-xvhdp-v1bkywd58gyocwm
+INFO [container hasher-workflow.cwl] arva2-xvhdp-v1bkywd58gyocwm is Final
+INFO Overall process status is success
+INFO Final output collection d6c69a88147dde9d52a418d50ef788df+123
+{
+    "hasher_out": {
+        "basename": "hasher3.md5sum.txt",
+        "class": "File",
+        "location": "keep:d6c69a88147dde9d52a418d50ef788df+123/hasher3.md5sum.txt",
+        "size": 95
+    }
+}
+INFO Final process status is success
+</code></pre>
+</notextile>
diff --git a/doc/install/salt-vagrant.html.textile.liquid b/doc/install/salt-vagrant.html.textile.liquid
index 41f32e51c..d9aa791f0 100644
--- a/doc/install/salt-vagrant.html.textile.liquid
+++ b/doc/install/salt-vagrant.html.textile.liquid
@@ -12,6 +12,7 @@ SPDX-License-Identifier: CC-BY-SA-3.0
 # "Vagrant":#vagrant
 # "DNS configuration":#final_steps
 # "Initial user and login":#initial_user
+# "Test the installed cluster running a simple workflow":#test_install
 
 h2(#vagrant). Vagrant
 
@@ -71,3 +72,19 @@ Assuming you didn't change the defaults, the initial credentials are:
 * User: 'admin'
 * Password: 'password'
 * Email: 'admin at arva2.arv.local'
+
+h2(#test_install). Test the installed cluster running a simple workflow
+
+As documented in the <a href="{{ site.baseurl }}/install/salt-single-host.html">Single Host installation</a> page, You can run a test workflow to verify the installation finished correctly. To do so, you can follow these steps:
+
+<notextile>
+<pre><code>vagrant ssh</code></pre>
+</notextile>
+
+and once in the instance:
+
+<notextile>
+<pre><code>cd /tmp/cluster_tests
+./run-test.sh
+</code></pre>
+</notextile>
diff --git a/tools/salt-install/Vagrantfile b/tools/salt-install/Vagrantfile
index 93bb77d4f..a448824a9 100644
--- a/tools/salt-install/Vagrantfile
+++ b/tools/salt-install/Vagrantfile
@@ -13,7 +13,13 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
 
   config.vm.define "arvados" do |arv|
     arv.vm.box = "bento/debian-10"
-    arv.vm.hostname = "arva2.arv.local"
+    arv.vm.hostname = "vagrant.local"
+    # CPU/RAM
+    config.vm.provider :virtualbox do |v|
+      v.memory = 2048
+      v.cpus = 2
+    end
+
     # Networking
     arv.vm.network "forwarded_port", guest: 8443, host: 8443
     arv.vm.network "forwarded_port", guest: 25100, host: 25100
@@ -24,12 +30,10 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
     arv.vm.network "forwarded_port", guest: 8001, host: 8001
     arv.vm.network "forwarded_port", guest: 8000, host: 8000
     arv.vm.network "forwarded_port", guest: 3001, host: 3001
-    # config.vm.network "private_network", ip: "192.168.33.10"
-    # arv.vm.synced_folder "salt_pillars", "/srv/pillars",
-    #                      create: true
     arv.vm.provision "shell",
                      path: "provision.sh",
                      args: [
+                       # "--test",
                        "--vagrant",
                        "--ssl-port=8443"
                      ].join(" ")
diff --git a/tools/salt-install/provision.sh b/tools/salt-install/provision.sh
index 7e88d7662..57a26308e 100755
--- a/tools/salt-install/provision.sh
+++ b/tools/salt-install/provision.sh
@@ -50,21 +50,26 @@ VERSION="latest"
 
 set -o pipefail
 
+# capture the directory that the script is running from
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+
 usage() {
   echo >&2
   echo >&2 "Usage: $0 [-h] [-h]"
   echo >&2
   echo >&2 "$0 options:"
-  echo >&2 "  -v, --vagrant           Run in vagrant and use the /vagrant shared dir"
+  echo >&2 "  -d, --debug             Run salt installation in debug mode"
   echo >&2 "  -p <N>, --ssl-port <N>  SSL port to use for the web applications"
+  echo >&2 "  -t, --test              Test installation running a CWL workflow"
   echo >&2 "  -h, --help              Display this help and exit"
+  echo >&2 "  -v, --vagrant           Run in vagrant and use the /vagrant shared dir"
   echo >&2
 }
 
 arguments() {
   # NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
-  TEMP=`getopt -o hvp: \
-    --long help,vagrant,ssl-port: \
+  TEMP=`getopt -o dhp:tv \
+    --long debug,help,ssl-port:,test,vagrant \
     -n "$0" -- "$@"`
 
   if [ $? != 0 ] ; then echo "GNU getopt missing? Use -h for help"; exit 1 ; fi
@@ -73,6 +78,14 @@ arguments() {
 
   while [ $# -ge 1 ]; do
     case $1 in
+      -d | --debug)
+        LOG_LEVEL="debug"
+        shift
+        ;;
+      -t | --test)
+        TEST="yes"
+        shift
+        ;;
       -v | --vagrant)
         VAGRANT="yes"
         shift
@@ -93,7 +106,9 @@ arguments() {
   done
 }
 
+LOG_LEVEL="info"
 HOST_SSL_PORT=443
+TESTS_DIR="tests"
 
 arguments $@
 
@@ -106,7 +121,7 @@ F_DIR="/srv/formulas"
 P_DIR="/srv/pillars"
 
 apt-get update
-apt-get install -y curl git
+apt-get install -y curl git jq
 
 dpkg -l |grep salt-minion
 if [ ${?} -eq 0 ]; then
@@ -139,6 +154,7 @@ mkdir -p ${P_DIR}
 cat > ${S_DIR}/top.sls << EOFTSLS
 base:
   '*':
+    - example_single_host_host_entries
     - example_add_snakeoil_certs
     - locale
     - nginx.passenger
@@ -169,7 +185,7 @@ EOFPSLS
 # Get the formula and dependencies
 cd ${F_DIR} || exit 1
 for f in postgres arvados nginx docker locale; do
-  git clone https://github.com/saltstack-formulas/${f}-formula.git
+  git clone https://github.com/netmanagers/${f}-formula.git
 done
 
 if [ "x${BRANCH}" != "x" ]; then
@@ -183,15 +199,16 @@ fi
 
 if [ "x${VAGRANT}" = "xyes" ]; then
   SOURCE_PILLARS_DIR="/vagrant/${CONFIG_DIR}"
+  TESTS_DIR="/vagrant/${TESTS_DIR}"
 else
-  SOURCE_PILLARS_DIR="./${CONFIG_DIR}"
+  SOURCE_PILLARS_DIR="${SCRIPT_DIR}/${CONFIG_DIR}"
+  TESTS_DIR="${SCRIPT_DIR}/${TESTS_DIR}"
 fi
 
-# Replace cluster and domain name in the example pillars
+# Replace cluster and domain name in the example pillars and test files
 for f in ${SOURCE_PILLARS_DIR}/*; do
-  # sed "s/example.net/${DOMAIN}/g; s/fixme/${CLUSTER}/g" \
-  sed "s/__DOMAIN__/${DOMAIN}/g;
-       s/__CLUSTER__/${CLUSTER}/g;
+  sed "s/__CLUSTER__/${CLUSTER}/g;
+       s/__DOMAIN__/${DOMAIN}/g;
        s/__RELEASE__/${RELEASE}/g;
        s/__HOST_SSL_PORT__/${HOST_SSL_PORT}/g;
        s/__GUEST_SSL_PORT__/${GUEST_SSL_PORT}/g;
@@ -202,9 +219,18 @@ for f in ${SOURCE_PILLARS_DIR}/*; do
   ${f} > ${P_DIR}/$(basename ${f})
 done
 
-# Let's write an /etc/hosts file that points all the hosts to localhost
-
-echo "127.0.0.2 api keep keep0 collections download ws workbench workbench2 ${CLUSTER}.${DOMAIN} api.${CLUSTER}.${DOMAIN} keep.${CLUSTER}.${DOMAIN} keep0.${CLUSTER}.${DOMAIN} collections.${CLUSTER}.${DOMAIN} download.${CLUSTER}.${DOMAIN} ws.${CLUSTER}.${DOMAIN} workbench.${CLUSTER}.${DOMAIN} workbench2.${CLUSTER}.${DOMAIN}" >> /etc/hosts
+mkdir -p /tmp/cluster_tests
+# Replace cluster and domain name in the example pillars and test files
+for f in ${TESTS_DIR}/*; do
+  sed "s/__CLUSTER__/${CLUSTER}/g;
+       s/__DOMAIN__/${DOMAIN}/g;
+       s/__HOST_SSL_PORT__/${HOST_SSL_PORT}/g;
+       s/__INITIAL_USER__/${INITIAL_USER}/g;
+       s/__INITIAL_USER_EMAIL__/${INITIAL_USER_EMAIL}/g;
+       s/__INITIAL_USER_PASSWORD__/${INITIAL_USER_PASSWORD}/g" \
+  ${f} > /tmp/cluster_tests/$(basename ${f})
+done
+chmod 755 /tmp/cluster_tests/run-test.sh
 
 # FIXME! #16992 Temporary fix for psql call in arvados-api-server
 if [ -e /root/.psqlrc ]; then
@@ -220,7 +246,7 @@ echo '\pset pager off' >> /root/.psqlrc
 # END FIXME! #16992 Temporary fix for psql call in arvados-api-server
 
 # Now run the install
-salt-call --local state.apply -l debug
+salt-call --local state.apply -l ${LOG_LEVEL}
 
 # FIXME! #16992 Temporary fix for psql call in arvados-api-server
 if [ "x${DELETE_PSQL}" = "xyes" ]; then
@@ -229,7 +255,18 @@ if [ "x${DELETE_PSQL}" = "xyes" ]; then
 fi
 
 if [ "x${RESTORE_PSQL}" = "xyes" ]; then
-  echo "Restroting .psql file"
+  echo "Restoring .psql file"
   mv -v /root/.psqlrc.provision.backup /root/.psqlrc
 fi
 # END FIXME! #16992 Temporary fix for psql call in arvados-api-server
+
+# If running in a vagrant VM, add default user to docker group
+if [ "x${VAGRANT}" = "xyes" ]; then
+  usermod -a -G docker vagrant 
+fi
+
+# Test that the installation finished correctly
+if [ "x${TEST}" = "xyes" ]; then
+  cd /tmp/cluster_tests
+  ./run-test.sh
+fi
diff --git a/tools/salt-install/single_host/arvados.sls b/tools/salt-install/single_host/arvados.sls
index ad0cbab70..dffd6575e 100644
--- a/tools/salt-install/single_host/arvados.sls
+++ b/tools/salt-install/single_host/arvados.sls
@@ -78,19 +78,19 @@ arvados:
 
     ### TOKENS
     tokens:
-      system_root: changeme_system_root_token
-      management: changeme_management_token
-      rails_secret: changeme_rails_secret_token
-      anonymous_user: changeme_anonymous_user_token
+      system_root: changemesystemroottoken
+      management: changememanagementtoken
+      rails_secret: changemerailssecrettoken
+      anonymous_user: changemeanonymoususertoken
 
     ### KEYS
     secrets:
-      blob_signing_key: changeme_blob_signing_key
-      workbench_secret_key: changeme_workbench_secret_key
-      dispatcher_access_key: changeme_dispatcher_access_key
-      dispatcher_secret_key: changeme_dispatcher_secret_key
-      keep_access_key: changeme_keep_access_key
-      keep_secret_key: changeme_keep_secret_key
+      blob_signing_key: changemeblobsigningkey
+      workbench_secret_key: changemeworkbenchsecretkey
+      dispatcher_access_key: changemedispatcheraccesskey
+      dispatcher_secret_key: changeme_dispatchersecretkey
+      keep_access_key: changemekeepaccesskey
+      keep_secret_key: changemekeepsecretkey
 
     Login:
       Test:
@@ -124,7 +124,7 @@ arvados:
       Controller:
         ExternalURL: https://__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
         InternalURLs:
-          http://127.0.0.2:8003: {}
+          http://controller.internal:8003: {}
       DispatchCloud:
         InternalURLs:
           http://__CLUSTER__.__DOMAIN__:9006: {}
@@ -134,17 +134,17 @@ arvados:
       Keepproxy:
         ExternalURL: https://keep.__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
         InternalURLs:
-          http://127.0.0.2:25100: {}
+          http://keep.internal:25100: {}
       Keepstore:
         InternalURLs:
           http://keep0.__CLUSTER__.__DOMAIN__:25107: {}
       RailsAPI:
         InternalURLs:
-          http://127.0.0.2:8004: {}
+          http://api.internal:8004: {}
       WebDAV:
         ExternalURL: https://collections.__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
         InternalURLs:
-          http://127.0.0.2:9002: {}
+          http://collections.internal:9002: {}
       WebDAVDownload:
         ExternalURL: https://download.__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
       WebShell:
@@ -152,7 +152,7 @@ arvados:
       Websocket:
         ExternalURL: wss://ws.__CLUSTER__.__DOMAIN__/websocket
         InternalURLs:
-          http://127.0.0.2:8005: {}
+          http://ws.internal:8005: {}
       Workbench1:
         ExternalURL: https://workbench.__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
       Workbench2:
diff --git a/tools/salt-install/single_host/nginx_api_configuration.sls b/tools/salt-install/single_host/nginx_api_configuration.sls
index db0bea126..b2f12c773 100644
--- a/tools/salt-install/single_host/nginx_api_configuration.sls
+++ b/tools/salt-install/single_host/nginx_api_configuration.sls
@@ -18,7 +18,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - listen: '127.0.0.2:8004'
+            - listen: 'api.internal:8004'
             - server_name: api
             - root: /var/www/arvados-api/current/public
             - index:  index.html index.htm
diff --git a/tools/salt-install/single_host/nginx_controller_configuration.sls b/tools/salt-install/single_host/nginx_controller_configuration.sls
index 2b2e7d591..7c99d2dea 100644
--- a/tools/salt-install/single_host/nginx_controller_configuration.sls
+++ b/tools/salt-install/single_host/nginx_controller_configuration.sls
@@ -14,7 +14,7 @@ nginx:
           default: 1
           '127.0.0.0/8': 0
         upstream controller_upstream:
-          - server: '127.0.0.2:8003  fail_timeout=10s'
+          - server: 'controller.internal:8003  fail_timeout=10s'
 
   ### SITES
   servers:
diff --git a/tools/salt-install/single_host/nginx_keepproxy_configuration.sls b/tools/salt-install/single_host/nginx_keepproxy_configuration.sls
index 29cd0cb44..fc4854e5a 100644
--- a/tools/salt-install/single_host/nginx_keepproxy_configuration.sls
+++ b/tools/salt-install/single_host/nginx_keepproxy_configuration.sls
@@ -11,7 +11,7 @@ nginx:
       ### STREAMS
       http:
         upstream keepproxy_upstream:
-          - server: '127.0.0.2:25100 fail_timeout=10s'
+          - server: 'keep.internal:25100 fail_timeout=10s'
 
   servers:
     managed:
diff --git a/tools/salt-install/single_host/nginx_keepweb_configuration.sls b/tools/salt-install/single_host/nginx_keepweb_configuration.sls
index bd0a636b0..513c0393e 100644
--- a/tools/salt-install/single_host/nginx_keepweb_configuration.sls
+++ b/tools/salt-install/single_host/nginx_keepweb_configuration.sls
@@ -11,7 +11,7 @@ nginx:
       ### STREAMS
       http:
         upstream collections_downloads_upstream:
-          - server: '127.0.0.2:9002 fail_timeout=10s'
+          - server: 'collections.internal:9002 fail_timeout=10s'
 
   servers:
     managed:
diff --git a/tools/salt-install/single_host/nginx_webshell_configuration.sls b/tools/salt-install/single_host/nginx_webshell_configuration.sls
index e33ddcea7..495de82d2 100644
--- a/tools/salt-install/single_host/nginx_webshell_configuration.sls
+++ b/tools/salt-install/single_host/nginx_webshell_configuration.sls
@@ -12,7 +12,7 @@ nginx:
       ### STREAMS
       http:
         upstream webshell_upstream:
-          - server: '127.0.0.2:4200 fail_timeout=10s'
+          - server: 'shell.internal:4200 fail_timeout=10s'
 
   ### SITES
   servers:
diff --git a/tools/salt-install/single_host/nginx_websocket_configuration.sls b/tools/salt-install/single_host/nginx_websocket_configuration.sls
index 2241d3b8e..1848a8737 100644
--- a/tools/salt-install/single_host/nginx_websocket_configuration.sls
+++ b/tools/salt-install/single_host/nginx_websocket_configuration.sls
@@ -11,7 +11,7 @@ nginx:
       ### STREAMS
       http:
         upstream websocket_upstream:
-          - server: '127.0.0.2:8005 fail_timeout=10s'
+          - server: 'ws.internal:8005 fail_timeout=10s'
 
   servers:
     managed:
diff --git a/tools/salt-install/single_host/nginx_workbench_configuration.sls b/tools/salt-install/single_host/nginx_workbench_configuration.sls
index 76fb13438..9a382e777 100644
--- a/tools/salt-install/single_host/nginx_workbench_configuration.sls
+++ b/tools/salt-install/single_host/nginx_workbench_configuration.sls
@@ -17,7 +17,7 @@ nginx:
       ### STREAMS
       http:
         upstream workbench_upstream:
-          - server: '127.0.0.2:9000 fail_timeout=10s'
+          - server: 'workbench.internal:9000 fail_timeout=10s'
 
   ### SITES
   servers:
@@ -64,7 +64,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - listen: '127.0.0.2:9000'
+            - listen: 'workbench.internal:9000'
             - server_name: workbench
             - root: /var/www/arvados-workbench/current/public
             - index:  index.html index.htm
diff --git a/tools/salt-install/tests/hasher-workflow-job.yml b/tools/salt-install/tests/hasher-workflow-job.yml
new file mode 100644
index 000000000..8e5f61167
--- /dev/null
+++ b/tools/salt-install/tests/hasher-workflow-job.yml
@@ -0,0 +1,10 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+inputfile:
+  class: File
+  path: test.txt
+hasher1_outputname: hasher1.md5sum.txt
+hasher2_outputname: hasher2.md5sum.txt
+hasher3_outputname: hasher3.md5sum.txt
diff --git a/tools/salt-install/tests/hasher-workflow.cwl b/tools/salt-install/tests/hasher-workflow.cwl
new file mode 100644
index 000000000..a23a22f91
--- /dev/null
+++ b/tools/salt-install/tests/hasher-workflow.cwl
@@ -0,0 +1,65 @@
+#!/usr/bin/env cwl-runner
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+cwlVersion: v1.0
+class: Workflow
+
+$namespaces:
+  arv: "http://arvados.org/cwl#"
+  cwltool: "http://commonwl.org/cwltool#"
+
+inputs:
+  inputfile: File
+  hasher1_outputname: string
+  hasher2_outputname: string
+  hasher3_outputname: string
+
+outputs:
+  hasher_out:
+    type: File
+    outputSource: hasher3/hasher_out
+
+steps:
+  hasher1:
+    run: hasher.cwl
+    in:
+      inputfile: inputfile
+      outputname: hasher1_outputname
+    out: [hasher_out]
+    hints:
+      ResourceRequirement:
+        coresMin: 1
+      arv:IntermediateOutput:
+        outputTTL: 3600
+      arv:ReuseRequirement:
+        enableReuse: false
+
+  hasher2:
+    run: hasher.cwl
+    in:
+      inputfile: hasher1/hasher_out
+      outputname: hasher2_outputname
+    out: [hasher_out]
+    hints:
+      ResourceRequirement:
+        coresMin: 1
+      arv:IntermediateOutput:
+        outputTTL: 3600
+      arv:ReuseRequirement:
+        enableReuse: false
+
+  hasher3:
+    run: hasher.cwl
+    in:
+      inputfile: hasher2/hasher_out
+      outputname: hasher3_outputname
+    out: [hasher_out]
+    hints:
+      ResourceRequirement:
+        coresMin: 1
+      arv:IntermediateOutput:
+        outputTTL: 3600
+      arv:ReuseRequirement:
+        enableReuse: false
diff --git a/tools/salt-install/tests/hasher.cwl b/tools/salt-install/tests/hasher.cwl
new file mode 100644
index 000000000..0a0f64f05
--- /dev/null
+++ b/tools/salt-install/tests/hasher.cwl
@@ -0,0 +1,24 @@
+#!/usr/bin/env cwl-runner
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+cwlVersion: v1.0
+class: CommandLineTool
+
+baseCommand: md5sum
+inputs:
+  inputfile:
+    type: File
+    inputBinding:
+      position: 1
+  outputname:
+    type: string
+
+stdout: $(inputs.outputname)
+
+outputs:
+  hasher_out:
+    type: File
+    outputBinding:
+      glob: $(inputs.outputname)
diff --git a/tools/salt-install/tests/run-test.sh b/tools/salt-install/tests/run-test.sh
new file mode 100755
index 000000000..b91101ee1
--- /dev/null
+++ b/tools/salt-install/tests/run-test.sh
@@ -0,0 +1,42 @@
+#!/usr/bin/env /bin/bash
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+export ARVADOS_API_TOKEN=changemesystemroottoken
+export ARVADOS_API_HOST=__CLUSTER__.__DOMAIN__:__HOST_SSL_PORT__
+export ARVADOS_API_HOST_INSECURE=true
+
+
+# https://doc.arvados.org/v2.0/install/install-jobs-image.html
+echo "Creating Arvados Standard Docker Images project"
+uuid_prefix=$(arv --format=uuid user current | cut -d- -f1)
+project_uuid=$(arv --format=uuid group create --group "{\"owner_uuid\": \"${uuid_prefix}-tpzed-000000000000000\", \"group_class\":\"project\", \"name\":\"Arvados Standard Docker Images\"}")
+echo "Arvados project uuid is '${project_uuid}'"
+read -rd $'\000' newlink <<EOF; arv link create --link "${newlink}"
+{
+"tail_uuid":"${uuid_prefix}-j7d0g-fffffffffffffff",
+"head_uuid":"${project_uuid}",
+"link_class":"permission",
+"name":"can_read"
+}
+EOF
+
+echo "Uploading arvados/jobs' docker image to the project"
+VERSION="2.1.1"
+arv-keepdocker --pull arvados/jobs ${VERSION} --project-uuid ${project_uuid}
+
+# Create the initial user
+echo "Creating initial user ('__INITIAL_USER__')"
+user=$(arv --format=uuid user create --user '{"email": "__INITIAL_USER_EMAIL__", "username": "__INITIAL_USER__"}')
+echo "Setting up user ('__INITIAL_USER__')"
+arv user setup --uuid ${user}
+echo "Activating user '__INITIAL_USER__'"
+arv user update --uuid ${user} --user '{"is_active": true}'
+
+user_api_token=$(arv api_client_authorization create --api-client-authorization "{\"owner_uuid\": \"${user}\"}" | jq -r .api_token)
+
+echo "Running test CWL workflow"
+# Change to the user's token and run the workflow
+export ARVADOS_API_TOKEN=${user_api_token}
+cwl-runner hasher-workflow.cwl hasher-workflow-job.yml
diff --git a/tools/salt-install/tests/test.txt b/tools/salt-install/tests/test.txt
new file mode 100644
index 000000000..a9c439556
--- /dev/null
+++ b/tools/salt-install/tests/test.txt
@@ -0,0 +1,5 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+test

-----------------------------------------------------------------------


hooks/post-receive
-- 




More information about the arvados-commits mailing list