[ARVADOS] updated: 77f5565c40f52c6929472d7d375ad44b09640958
Git user
git at public.curoverse.com
Fri Mar 11 02:18:06 EST 2016
Summary of changes:
backports/deb-fuse/fpm-info.sh | 5 --
backports/deb-libfuse-dev/fpm-info.sh | 5 --
backports/python-llfuse/fpm-info.sh | 15 ++--
backports/rpm-fuse-devel/fpm-info.sh | 5 --
backports/rpm-fuse/fpm-info.sh | 5 --
build/run-build-packages.sh | 10 +--
build/run-tests.sh | 11 ++-
sdk/cwl/arvados_cwl/__init__.py | 7 +-
sdk/cwl/setup.py | 2 +-
.../arvados/v1/repositories_controller.rb | 4 +-
.../arvados/v1/repositories_controller_test.rb | 4 +-
services/arv-git-httpd/gitolite_test.go | 14 +++-
services/datamanager/datamanager.go | 2 +-
services/datamanager/datamanager_test.go | 26 ++++++-
services/fuse/arvados_fuse/__init__.py | 47 +++++-------
services/fuse/setup.py | 2 +-
services/fuse/tests/test_mount.py | 12 +--
services/nodemanager/arvnodeman/baseactor.py | 85 ++++++++++++++++++++++
services/nodemanager/arvnodeman/clientactor.py | 2 +-
.../arvnodeman/computenode/dispatch/__init__.py | 11 ++-
services/nodemanager/arvnodeman/config.py | 4 +-
services/nodemanager/arvnodeman/daemon.py | 21 +++---
services/nodemanager/arvnodeman/fullstopactor.py | 17 -----
services/nodemanager/arvnodeman/launcher.py | 12 +--
services/nodemanager/arvnodeman/timedcallback.py | 2 +-
services/nodemanager/tests/test_daemon.py | 1 +
services/nodemanager/tests/test_failure.py | 29 +++-----
services/nodemanager/tests/testutil.py | 5 +-
28 files changed, 211 insertions(+), 154 deletions(-)
delete mode 100644 backports/deb-fuse/fpm-info.sh
delete mode 100644 backports/deb-libfuse-dev/fpm-info.sh
delete mode 100644 backports/rpm-fuse-devel/fpm-info.sh
delete mode 100644 backports/rpm-fuse/fpm-info.sh
create mode 100644 services/nodemanager/arvnodeman/baseactor.py
delete mode 100644 services/nodemanager/arvnodeman/fullstopactor.py
discards 9155994563675fc5d9ca937b79b12d135fde7ee6 (commit)
discards 6070c5b247e86573a568a4eae8feeb91b4c1f517 (commit)
discards 660175722709812a28590aa90b23b2e3beef645c (commit)
discards 49906554b512026fb995c94c7c84ff873d8745aa (commit)
discards ed99c3e8084fd3ae4a60652dc0348a5735a04dd4 (commit)
discards b257d006610748cb133c352f9ce95b61d1c66e17 (commit)
discards e48fa087b669f02e57b8f209e1f2e1d56b85ba05 (commit)
discards 0a0011c987cbec72c7e13762dbc99b8e19db47c1 (commit)
discards 0bbcf2efc3327a51101124e4e92961ed5fa0b73f (commit)
discards cadc05b77abddccb7dea2aac38340b7b767eee7c (commit)
discards de8054d65905f213cc573739d50dd4f401aa4f47 (commit)
discards 0e1993d1d429133e181f385ef388b7ebc97c684b (commit)
discards 0910614bda6825369e5516e98f0a2fe8d545b8e5 (commit)
discards f8b4ec3bb51ef6ab076a3a2f26779c9944623899 (commit)
discards a9fa507c505ab702cae850f4ccd5aae26f6218a9 (commit)
via 77f5565c40f52c6929472d7d375ad44b09640958 (commit)
via 5ef755ed9143cd70dcf2d643baa4ca0c3f8e4288 (commit)
via 89e0608f1b82586efd662f2918ff195b0d0c96e4 (commit)
via 3bddce308692548110019765b0074c1876269b9c (commit)
via c025847bd30e5dc2678fb2ebd4e3e39c8352c60e (commit)
via 7166965c7bda6c727586fb10a00056b3086705f4 (commit)
via d1de3281f023bfdbb62a172dec058caf2496224f (commit)
via 071cbfb5b1b9328d0db5d4e4c07fd0c8d604c39e (commit)
via 648aaa04080e11b0a793d6100260770b878b24c0 (commit)
via c6df16d2af30e989bcfb04f6ef730cde658a9dc9 (commit)
via d54cd5298bb6e043205995c6e5d414a841d9c389 (commit)
via 2dbbaaefc6a4a46a7f17b9e7799fc455cd722113 (commit)
via 054f95044461c08fd5fb6cd983d1e8ea1dc62ea8 (commit)
via f6aee8a6a829c60015506d89a4e87eb9dc96a07a (commit)
via e5c99ebf68f31d630f2a35f7e4e79e93143a3607 (commit)
via e5ee153a56578d13a025cde47fd0c07e21fd975f (commit)
via 0e97dd2d9fe31a6c1cf73471e6e5ca1f33500850 (commit)
This update added new revisions after undoing existing revisions. That is
to say, the old revision is not a strict subset of the new revision. This
situation occurs when you --force push a change and generate a repository
containing something like this:
* -- * -- B -- O -- O -- O (9155994563675fc5d9ca937b79b12d135fde7ee6)
\
N -- N -- N (77f5565c40f52c6929472d7d375ad44b09640958)
When this happens we assume that you've already had alert emails for all
of the O revisions, and so we here report only the revisions in the N
branch from the common base, B.
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
commit 77f5565c40f52c6929472d7d375ad44b09640958
Author: Tom Clegg <tom at curoverse.com>
Date: Fri Mar 11 02:15:10 2016 -0500
8491: Downgrade to llfuse 0.41 until #8345 gets sorted out (but keep the fuse backports).
diff --git a/build/run-build-packages.sh b/build/run-build-packages.sh
index 136b73c..0ada192 100755
--- a/build/run-build-packages.sh
+++ b/build/run-build-packages.sh
@@ -92,7 +92,7 @@ case "$TARGET" in
PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
- ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
'pycurl<7.21.5' contextlib2)
PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
;;
@@ -105,7 +105,7 @@ case "$TARGET" in
PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
- ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
'pycurl<7.21.5')
PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
;;
@@ -118,7 +118,7 @@ case "$TARGET" in
PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
- ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
contextlib2 \
'pycurl<7.21.5')
PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
@@ -129,7 +129,7 @@ case "$TARGET" in
PYTHON2_PKG_PREFIX=python
PYTHON3_PACKAGE=python$PYTHON3_VERSION
PYTHON3_PKG_PREFIX=python3
- PYTHON_BACKPORTS=(pyasn1==0.1.7 pyvcf pyasn1-modules==0.0.5 llfuse ciso8601 \
+ PYTHON_BACKPORTS=(pyasn1==0.1.7 pyvcf pyasn1-modules==0.0.5 llfuse==0.41.1 ciso8601 \
google-api-python-client==1.4.2 six uritemplate oauth2client==1.5.2 httplib2 \
rsa 'pycurl<7.21.5' backports.ssl_match_hostname)
PYTHON3_BACKPORTS=(docker-py requests websocket-client)
@@ -144,7 +144,7 @@ case "$TARGET" in
oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
ciso8601 pycrypto backports.ssl_match_hostname 'pycurl<7.21.5' \
- python-daemon lockfile llfuse 'pbr<1.0')
+ python-daemon lockfile llfuse==0.41.1 'pbr<1.0')
PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
export PYCURL_SSL_LIBRARY=nss
;;
diff --git a/build/run-tests.sh b/build/run-tests.sh
index 3519e55..06ba3cd 100755
--- a/build/run-tests.sh
+++ b/build/run-tests.sh
@@ -425,10 +425,13 @@ pip freeze 2>/dev/null | egrep ^apache-libcloud==$LIBCLOUD_PIN \
|| pip install --pre --ignore-installed https://github.com/curoverse/libcloud/archive/apache-libcloud-$LIBCLOUD_PIN.zip >/dev/null \
|| fatal "pip install apache-libcloud failed"
-# Uninstall old llfuse, because services/fuse "pip install" won't
-# upgrade it by default.
-if pip freeze | egrep '^llfuse==0\.41\.'; then
- yes | pip uninstall 'llfuse<0.42'
+# This will help people who reuse --temp dirs when we upgrade to llfuse 0.42
+if egrep -q 'llfuse.*>= *0\.42' "$WORKSPACE/services/fuse/setup.py"; then
+ # Uninstall old llfuse, because services/fuse "pip install" won't
+ # upgrade it by default.
+ if pip freeze | egrep '^llfuse==0\.41\.'; then
+ yes | pip uninstall 'llfuse<0.42'
+ fi
fi
# Deactivate Python 2 virtualenv
commit 5ef755ed9143cd70dcf2d643baa4ca0c3f8e4288
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 16:01:22 2016 -0500
8491: 8222: Prohibit activesupport 4.2.6 so our gem can be installed with ruby <2.2.2
diff --git a/sdk/ruby/arvados.gemspec b/sdk/ruby/arvados.gemspec
index 3adcf4d..3d090f4 100644
--- a/sdk/ruby/arvados.gemspec
+++ b/sdk/ruby/arvados.gemspec
@@ -19,10 +19,11 @@ Gem::Specification.new do |s|
"lib/arvados/collection.rb", "lib/arvados/keep.rb",
"README", "LICENSE-2.0.txt"]
s.required_ruby_version = '>= 2.1.0'
+ # activesupport <4.2.6 only because https://dev.arvados.org/issues/8222
+ s.add_dependency('activesupport', '>= 3.2.13', '< 4.2.6')
+ s.add_dependency('andand', '~> 1.3', '>= 1.3.3')
s.add_dependency('google-api-client', '~> 0.6.3', '>= 0.6.3')
- s.add_dependency('activesupport', '>= 3.2.13')
s.add_dependency('json', '~> 1.7', '>= 1.7.7')
- s.add_dependency('andand', '~> 1.3', '>= 1.3.3')
s.add_runtime_dependency('jwt', '>= 0.1.5', '< 1.0.0')
s.homepage =
'https://arvados.org'
commit 89e0608f1b82586efd662f2918ff195b0d0c96e4
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 13:36:49 2016 -0500
8491: Remove git-commit.version before running api server tests
diff --git a/build/run-tests.sh b/build/run-tests.sh
index 423dd0e..3519e55 100755
--- a/build/run-tests.sh
+++ b/build/run-tests.sh
@@ -768,6 +768,7 @@ do_test doc doclinkchecker
stop_services
test_apiserver() {
+ rm -f "$WORKSPACE/services/api/git-commit.version"
cd "$WORKSPACE/services/api" \
&& RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[services/api]}
}
commit 3bddce308692548110019765b0074c1876269b9c
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 13:36:16 2016 -0500
8491: --retry applies to do_install too
diff --git a/build/run-tests.sh b/build/run-tests.sh
index a17d610..423dd0e 100755
--- a/build/run-tests.sh
+++ b/build/run-tests.sh
@@ -481,8 +481,8 @@ timer() {
echo -n "$(($SECONDS - $t0))s"
}
-do_test() {
- while ! do_test_once ${@} && [[ "$retry" == 1 ]]
+retry() {
+ while ! ${@} && [[ "$retry" == 1 ]]
do
read -p 'Try again? [Y/n] ' x
if [[ "$x" != "y" ]] && [[ "$x" != "" ]]
@@ -492,6 +492,10 @@ do_test() {
done
}
+do_test() {
+ retry do_test_once ${@}
+}
+
do_test_once() {
unset result
if [[ -z "${skip[$1]}" ]] && ( [[ -z "$only" ]] || [[ "$only" == "$1" ]] )
@@ -544,6 +548,10 @@ do_test_once() {
}
do_install() {
+ retry do_install_once ${@}
+}
+
+do_install_once() {
if [[ -z "$skip_install" || (-n "$only_install" && "$only_install" == "$1") ]]
then
title "Running $1 install"
@@ -576,8 +584,10 @@ do_install() {
else
"install_$1"
fi
- checkexit $? "$1 install"
+ result=$?
+ checkexit $result "$1 install"
title "End of $1 install (`timer`)"
+ return $result
else
title "Skipping $1 install"
fi
commit c025847bd30e5dc2678fb2ebd4e3e39c8352c60e
Author: Tom Clegg <tom at curoverse.com>
Date: Tue Mar 8 16:37:42 2016 -0500
8491: move files from arvados-dev into their new places
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..921eb02
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,7 @@
+export WORKSPACE?=$(shell pwd)
+test:
+ build/run-tests.sh ${TEST_FLAGS}
+packages:
+ build/run-build-packages-all-targets.sh ${PACKAGES_FLAGS}
+test-packages:
+ build/run-build-packages-all-targets.sh --test-packages ${PACKAGES_FLAGS}
diff --git a/build/COPYING b/build/COPYING
deleted file mode 100644
index af63e41..0000000
--- a/build/COPYING
+++ /dev/null
@@ -1,2 +0,0 @@
-This code is licenced under the GNU Affero General Public License version 3
-(see agpl-3.0.txt)
diff --git a/build/README b/build/README
deleted file mode 100644
index b076f0b..0000000
--- a/build/README
+++ /dev/null
@@ -1,30 +0,0 @@
-Welcome to Arvados!
-
-This is the arvados-dev source tree. It contains scripts that can be useful
-if you want to hack on Arvados itself.
-
-If you are interested in using Arvados or setting up your own Arvados
-installation, you most likely do not need this source tree.
-
-For the Arvados source code, check out the git repository at
- https://github.com/curoverse/arvados
-
-The main Arvados web site is
- https://arvados.org
-
-The Arvados public wiki is located at
- https://arvados.org/projects/arvados/wiki
-
-The Arvados public bug tracker is located at
- https://arvados.org/projects/arvados/issues
-
-For support see
- http://doc.arvados.org/user/getting_started/community.html
-
-Installation documentation is located at
- http://doc.arvados.org/install
-
-If you wish to build the documentation yourself, follow the instructions in
-doc/README to build the documentation, then consult the "Install Guide".
-
-See COPYING for information about Arvados Free Software licenses.
diff --git a/build/agpl-3.0.txt b/build/agpl-3.0.txt
deleted file mode 100644
index dba13ed..0000000
--- a/build/agpl-3.0.txt
+++ /dev/null
@@ -1,661 +0,0 @@
- GNU AFFERO GENERAL PUBLIC LICENSE
- Version 3, 19 November 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU Affero General Public License is a free, copyleft license for
-software and other kinds of works, specifically designed to ensure
-cooperation with the community in the case of network server software.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-our General Public Licenses are intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- Developers that use our General Public Licenses protect your rights
-with two steps: (1) assert copyright on the software, and (2) offer
-you this License which gives you legal permission to copy, distribute
-and/or modify the software.
-
- A secondary benefit of defending all users' freedom is that
-improvements made in alternate versions of the program, if they
-receive widespread use, become available for other developers to
-incorporate. Many developers of free software are heartened and
-encouraged by the resulting cooperation. However, in the case of
-software used on network servers, this result may fail to come about.
-The GNU General Public License permits making a modified version and
-letting the public access it on a server without ever releasing its
-source code to the public.
-
- The GNU Affero General Public License is designed specifically to
-ensure that, in such cases, the modified source code becomes available
-to the community. It requires the operator of a network server to
-provide the source code of the modified version running there to the
-users of that server. Therefore, public use of a modified version, on
-a publicly accessible server, gives the public access to the source
-code of the modified version.
-
- An older license, called the Affero General Public License and
-published by Affero, was designed to accomplish similar goals. This is
-a different license, not a version of the Affero GPL, but Affero has
-released a new version of the Affero GPL which permits relicensing under
-this license.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU Affero General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Remote Network Interaction; Use with the GNU General Public License.
-
- Notwithstanding any other provision of this License, if you modify the
-Program, your modified version must prominently offer all users
-interacting with it remotely through a computer network (if your version
-supports such interaction) an opportunity to receive the Corresponding
-Source of your version by providing access to the Corresponding Source
-from a network server at no charge, through some standard or customary
-means of facilitating copying of software. This Corresponding Source
-shall include the Corresponding Source for any work covered by version 3
-of the GNU General Public License that is incorporated pursuant to the
-following paragraph.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the work with which it is combined will remain governed by version
-3 of the GNU General Public License.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU Affero General Public License from time to time. Such new versions
-will be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU Affero General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU Affero General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU Affero General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
- If your software can interact with users remotely through a computer
-network, you should also make sure that it provides a way for users to
-get its source. For example, if your program is a web application, its
-interface could display a "Source" link that leads users to an archive
-of the code. There are many ways you could offer source, and different
-solutions will be better for different programs; see section 13 for the
-specific requirements.
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU AGPL, see
-<http://www.gnu.org/licenses/>.
diff --git a/build/jenkins/create-plot-data-from-log.sh b/build/create-plot-data-from-log.sh
similarity index 100%
rename from build/jenkins/create-plot-data-from-log.sh
rename to build/create-plot-data-from-log.sh
diff --git a/build/git/hooks/coding-standards.sh b/build/git/hooks/coding-standards.sh
deleted file mode 100755
index d4e4c71..0000000
--- a/build/git/hooks/coding-standards.sh
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env ruby
-
-# This script can be installed as a git update hook.
-
-# It can also be installed as a gitolite 'hooklet' in the
-# hooks/common/update.secondary.d/ directory.
-
-# NOTE: this script runs under the same assumptions as the 'update' hook, so
-# the starting directory must be maintained and arguments must be passed on.
-
-$refname = ARGV[0]
-$oldrev = ARGV[1]
-$newrev = ARGV[2]
-$user = ENV['USER']
-
-def blacklist bl
- all_revs = `git rev-list #{$oldrev}..#{$newrev}`.split("\n")
- all_revs.each do |rev|
- bl.each do |b|
- if rev == b
- puts "Revision #{b} is blacklisted, you must remove it from your branch (possibly using git rebase) before you can push."
- exit 1
- end
- end
- end
-end
-
-blacklist ['26d74dc0524c87c5dcc0c76040ce413a4848b57a']
-
-# Only enforce policy on the master branch
-exit 0 if $refname != 'refs/heads/master'
-
-puts "Enforcing Policies... \n(#{$refname}) (#{$oldrev[0,6]}) (#{$newrev[0,6]})"
-
-$regex = /\[ref: (\d+)\]/
-
-$broken_commit_message = /Please enter a commit message to explain why this merge is necessary/
-$wrong_way_merge_master = /Merge( remote-tracking)? branch '([^\/]+\/)?master' into/
-$merge_master = /Merge branch '[^']+'((?! into)| into master)/
-$pull_merge = /Merge branch 'master' of /
-$refs_or_closes_or_no_issue = /(refs #|closes #|fixes #|no issue #)/i
-
-# enforced custom commit message format
-def check_message_format
- all_revs = `git rev-list --first-parent #{$oldrev}..#{$newrev}`.split("\n")
- merge_revs = `git rev-list --first-parent --min-parents=2 #{$oldrev}..#{$newrev}`.split("\n")
- # single_revs = `git rev-list --first-parent --max-parents=1 #{$oldrev}..#{$newrev}`.split("\n")
- broken = false
- no_ff = false
-
- merge_revs.each do |rev|
- message = `git cat-file commit #{rev} | sed '1,/^$/d'`
- if $wrong_way_merge_master.match(message)
- puts "\n[POLICY] Only non-fast-forward merges into master are allowed. Please"
- puts "reset your master branch:"
- puts " git reset --hard origin/master"
- puts "and then merge your branch with the --no-ff option:"
- puts " git merge your-branch --no-ff\n"
- puts "Remember to add a reference to an issue number in the merge commit!\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- no_ff = true
- elsif $pull_merge.match(message)
- puts "\n[POLICY] This appears to be a git pull merge of remote master into local"
- puts "master. In order to maintain a linear first-parent history of master,"
- puts "please reset your branch and remerge or rebase using the latest master.\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- elsif not $merge_master.match(message) and not
- puts "\n[POLICY] This does not appear to be a merge of a feature"
- puts "branch into master. Merges must follow the format"
- puts "\"Merge branch 'feature-branch'\".\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
- end
-
- all_revs.each do |rev|
- message = `git cat-file commit #{rev} | sed '1,/^$/d'`
- if $broken_commit_message.match(message)
- puts "\n[POLICY] Rejected broken commit message for including boilerplate"
- puts "instruction text.\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
-
- # Do not test when the commit is a no_ff merge (which will be rejected), because
- # this test will complain about *every* commit in the merge otherwise, obscuring
- # the real reason for the rejection (the no_ff merge)
- if not no_ff and not $refs_or_closes_or_no_issue.match(message)
- puts "\n[POLICY] All commits to master must include an issue using \"refs #\" or"
- puts "\"closes #\", or specify \"no issue #\"\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
- end
-
- if broken
- exit 1
- end
-end
-
-check_message_format
diff --git a/build/install/easy-docker-install.sh b/build/install/easy-docker-install.sh
deleted file mode 100755
index fe6e186..0000000
--- a/build/install/easy-docker-install.sh
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env bash
-
-# This script is intended to make Arvados installation easy. It will download the
-# latest copy of the Arvados docker images as well as the arvdock command. It
-# then uses arvdock to spin up Arvados on this computer.
-#
-# The latest version of this script is available at http://get.arvados.org, so that this
-# command does the right thing:
-#
-# $ \curl -sSL http://get.arvados.org | bash
-#
-# Prerequisites: working docker installation. Run this script as a user who is a member
-# of the docker group.
-
-COLUMNS=80
-
-fail () {
- title "$*"
- exit 1
-}
-
-title () {
- printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
-}
-
-docker_pull () {
- $DOCKER pull $*
-
- ECODE=$?
-
- if [[ "$ECODE" != "0" ]]; then
- title "$DOCKER pull $* failed"
- exit $ECODE
- fi
-}
-
-main () {
-
- \which which >/dev/null 2>&1 || fail "Error: could not find 'which' command."
-
- # find the docker binary
- DOCKER=`which docker.io`
-
- if [[ "$DOCKER" == "" ]]; then
- DOCKER=`which docker`
- fi
-
- if [[ "$DOCKER" == "" ]]; then
- fail "Error: you need to have docker installed. Could not find the docker executable."
- fi
-
- echo
- echo "If necessary, this command will download the latest Arvados docker images."
- echo "The download can take a long time, depending on the speed of your internet connection."
- echo "When the images are downloaded, it will then start an Arvados environment on this computer."
- echo
- docker_pull arvados/workbench
- docker_pull arvados/doc
- docker_pull arvados/keep
- docker_pull arvados/shell
- docker_pull arvados/sso
- docker_pull arvados/compute
- docker_pull arvados/keep
- docker_pull arvados/keepproxy
- docker_pull arvados/api
- docker_pull crosbymichael/skydns
- docker_pull crosbymichael/skydock
-
- # Now download arvdock and start the containers
- echo
- echo Downloading arvdock
- echo
- \curl -sSL https://raw.githubusercontent.com/curoverse/arvados/master/docker/arvdock -o arvdock
- chmod 755 arvdock
-
- echo
- echo Starting the docker containers
- echo
- ./arvdock start
-
- echo To stop the containers, run
- echo
- echo ./arvdock stop
- echo
-}
-
-main
diff --git a/build/jenkins/run-cwl-tests.sh b/build/jenkins/run-cwl-tests.sh
deleted file mode 100755
index 53c0538..0000000
--- a/build/jenkins/run-cwl-tests.sh
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/bin/bash
-
-read -rd "\000" helpmessage <<EOF
-$(basename $0): Test cwl tool and (optionally) upload to PyPi and Docker Hub.
-
-Syntax:
- WORKSPACE=/path/to/common-workflow-language $(basename $0) [options]
-
-Options:
-
---upload-pypi Upload package to pypi (default: false)
---upload-docker Upload packages to docker hub (default: false)
---debug Output debug information (default: false)
-
-WORKSPACE=path Path to the common-workflow-language source tree
-
-EOF
-
-EXITCODE=0
-CALL_FREIGHT=0
-
-DEBUG=0
-UPLOAD_PYPI=0
-UPLOAD_DOCKER=0
-
-VENVDIR=
-
-leave_temp=
-
-declare -A leave_temp
-
-set -e
-
-clear_temp() {
- leaving=""
- for var in VENVDIR
- do
- if [[ -z "${leave_temp[$var]}" ]]
- then
- if [[ -n "${!var}" ]]
- then
- rm -rf "${!var}"
- fi
- else
- leaving+=" $var=\"${!var}\""
- fi
- done
- if [[ -n "$leaving" ]]; then
- echo "Leaving behind temp dirs: $leaving"
- fi
-}
-
-fatal() {
- clear_temp
- echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
- exit 1
-}
-
-trap clear_temp INT EXIT
-
-# Set up temporary install dirs (unless existing dirs were supplied)
-for tmpdir in VENVDIR
-do
- if [[ -n "${!tmpdir}" ]]; then
- leave_temp[$tmpdir]=1
- else
- eval $tmpdir=$(mktemp -d)
- fi
-done
-
-
-while [[ -n "$1" ]]
-do
- arg="$1"; shift
- case "$arg" in
- --help)
- echo >&2 "$helpmessage"
- echo >&2
- exit 1
- ;;
- --debug)
- DEBUG=1
- ;;
- --upload-pypi)
- UPLOAD_PYPI=1
- ;;
- --upload-docker)
- UPLOAD_DOCKER=1
- ;;
- --leave-temp)
- leave_temp[VENVDIR]=1
- ;;
- *=*)
- eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
- ;;
- *)
- echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
- exit 1
- ;;
- esac
-done
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo >&2 "$helpmessage"
- echo >&2
- echo >&2 "Error: WORKSPACE environment variable not set"
- echo >&2
- exit 1
-fi
-
-if [[ "$DEBUG" != 0 ]]; then
- echo "Workspace is $WORKSPACE"
-fi
-
-virtualenv --setuptools "$VENVDIR" || fatal "virtualenv $VENVDIR failed"
-. "$VENVDIR/bin/activate"
-
-handle_python_package () {
- # This function assumes the current working directory is the python package directory
- if [[ "$UPLOAD_PYPI" != 0 ]]; then
- # Make sure only to use sdist - that's the only format pip can deal with (sigh)
- if [[ "$DEBUG" != 0 ]]; then
- python setup.py sdist upload
- else
- python setup.py -q sdist upload
- fi
- else
- # Make sure only to use sdist - that's the only format pip can deal with (sigh)
- if [[ "$DEBUG" != 0 ]]; then
- python setup.py sdist
- else
- python setup.py -q sdist
- fi
- fi
-}
-
-# Make all files world-readable -- jenkins runs with umask 027, and has checked
-# out our git tree here
-chmod o+r "$WORKSPACE" -R
-
-# Now fix our umask to something better suited to building and publishing
-# gems and packages
-umask 0022
-
-if [[ "$DEBUG" != 0 ]]; then
- echo "umask is" `umask`
-fi
-
-# Python packages
-if [[ "$DEBUG" != 0 ]]; then
- echo
- echo "Python packages"
- echo
-fi
-
-cd "$WORKSPACE"
-
-if test -d cwltool ; then
- (cd cwltool
- git fetch
- git reset --hard origin/master
- )
-else
- git clone git at github.com:common-workflow-language/cwltool.git
- (cd cwltool
- git config user.email "sysadmin at curoverse.com"
- git config user.name "Curoverse build bot"
- )
-fi
-
-(cd cwltool
- python setup.py install
- python setup.py test
- ./build-node-docker.sh
-)
-
-./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-2
-./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-3
-
-(cd cwltool
- handle_python_package
-)
-
-(cd cwltool/cwl-runner
- handle_python_package
-)
-
-(cd cwltool
- ./build-cwl-docker.sh
-)
-
-if [[ "$UPLOAD_DOCKER" != 0 ]]; then
- docker push commonworkflowlanguage/cwltool_module
- docker push commonworkflowlanguage/cwltool
- docker push commonworkflowlanguage/nodejs-engine
-fi
-
-if test -d common-workflow-language.github.io ; then
- (cd common-workflow-language.github.io
- git fetch
- git reset --hard origin/master
- )
-else
- git clone git at github.com:common-workflow-language/common-workflow-language.github.io.git
- (cd common-workflow-language.github.io
- git config user.email "sysadmin at curoverse.com"
- git config user.name "Curoverse build bot"
- )
-fi
-
-python -mcwltool --outdir=$PWD/common-workflow-language.github.io site/cwlsite.cwl site/cwlsite-job.json
-
-(cd common-workflow-language.github.io
- git add --all
- git diff-index --quiet HEAD || git commit -m"Build bot"
- git push
-)
diff --git a/build/jenkins/run-deploy.sh b/build/jenkins/run-deploy.sh
deleted file mode 100755
index 1b06c65..0000000
--- a/build/jenkins/run-deploy.sh
+++ /dev/null
@@ -1,266 +0,0 @@
-#!/bin/bash
-
-DEBUG=0
-SSH_PORT=22
-
-function usage {
- echo >&2
- echo >&2 "usage: $0 [options] <identifier>"
- echo >&2
- echo >&2 " <identifier> Arvados cluster name"
- echo >&2
- echo >&2 "$0 options:"
- echo >&2 " -p, --port <ssh port> SSH port to use (default 22)"
- echo >&2 " -d, --debug Enable debug output"
- echo >&2 " -h, --help Display this help and exit"
- echo >&2
- echo >&2 "Note: this script requires an arvados token created with these permissions:"
- echo >&2 ' arv api_client_authorization create_system_auth \'
- echo >&2 ' --scopes "[\"GET /arvados/v1/virtual_machines\",'
- echo >&2 ' \"GET /arvados/v1/keep_services\",'
- echo >&2 ' \"GET /arvados/v1/keep_services/\",'
- echo >&2 ' \"GET /arvados/v1/groups\",'
- echo >&2 ' \"GET /arvados/v1/groups/\",'
- echo >&2 ' \"GET /arvados/v1/links\",'
- echo >&2 ' \"GET /arvados/v1/collections\",'
- echo >&2 ' \"POST /arvados/v1/collections\",'
- echo >&2 ' \"POST /arvados/v1/links\"]"'
- echo >&2
-}
-
-# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
-TEMP=`getopt -o hdp: \
- --long help,debug,port: \
- -n "$0" -- "$@"`
-
-if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
-# Note the quotes around `$TEMP': they are essential!
-eval set -- "$TEMP"
-
-while [ $# -ge 1 ]
-do
- case $1 in
- -p | --port)
- SSH_PORT="$2"; shift 2
- ;;
- -d | --debug)
- DEBUG=1
- shift
- ;;
- --)
- shift
- break
- ;;
- *)
- usage
- exit 1
- ;;
- esac
-done
-
-IDENTIFIER=$1
-
-if [[ "$IDENTIFIER" == '' ]]; then
- usage
- exit 1
-fi
-
-EXITCODE=0
-
-COLUMNS=80
-
-PUPPET_AGENT='
-now() { date +%s; }
-let endtime="$(now) + 600"
-while [ "$endtime" -gt "$(now)" ]; do
- puppet agent --test --detailed-exitcodes
- agent_exitcode=$?
- if [ 0 = "$agent_exitcode" ] || [ 2 = "$agent_exitcode" ]; then
- break
- else
- sleep 10s
- fi
-done
-exit ${agent_exitcode:-99}
-'
-
-title () {
- date=`date +'%Y-%m-%d %H:%M:%S'`
- printf "$date $1\n"
-}
-
-function run_puppet() {
- node=$1
- return_var=$2
-
- title "Running puppet on $node"
- TMP_FILE=`mktemp`
- if [[ "$DEBUG" != "0" ]]; then
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" | tee $TMP_FILE
- else
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" > $TMP_FILE 2>&1
- fi
-
- ECODE=${PIPESTATUS[0]}
- RESULT=$(cat $TMP_FILE)
-
- if [[ "$ECODE" != "255" && ! ("$RESULT" =~ 'already in progress') && "$ECODE" != "2" && "$ECODE" != "0" ]]; then
- # Ssh exits 255 if the connection timed out. Just ignore that.
- # Puppet exits 2 if there are changes. For real!
- # Puppet prints 'Notice: Run of Puppet configuration client already in progress' if another puppet process
- # was already running
- echo "ERROR running puppet on $node: exit code $ECODE"
- if [[ "$DEBUG" == "0" ]]; then
- title "Command output follows:"
- echo $RESULT
- fi
- fi
- if [[ "$ECODE" == "255" ]]; then
- title "Connection timed out"
- ECODE=0
- fi
- if [[ "$ECODE" == "2" ]]; then
- ECODE=0
- fi
- rm -f $TMP_FILE
- eval "$return_var=$ECODE"
-}
-
-function run_command() {
- node=$1
- return_var=$2
- command=$3
-
- title "Running '$command' on $node"
- TMP_FILE=`mktemp`
- if [[ "$DEBUG" != "0" ]]; then
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" | tee $TMP_FILE
- else
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" > $TMP_FILE 2>&1
- fi
-
- ECODE=$?
- RESULT=$(cat $TMP_FILE)
-
- if [[ "$ECODE" != "255" && "$ECODE" != "0" ]]; then
- # Ssh exists 255 if the connection timed out. Just ignore that, it's possible that this node is
- # a shell node that is down.
- title "ERROR running command on $node: exit code $ECODE"
- if [[ "$DEBUG" == "0" ]]; then
- title "Command output follows:"
- echo $RESULT
- fi
- fi
- if [[ "$ECODE" == "255" ]]; then
- title "Connection timed out"
- ECODE=0
- fi
- rm -f $TMP_FILE
- eval "$return_var=$ECODE"
-}
-
-title "Updating API server"
-SUM_ECODE=0
-run_puppet $IDENTIFIER.arvadosapi.com ECODE
-SUM_ECODE=$(($SUM_ECODE + $ECODE))
-if [ ! "$IDENTIFIER" = "c97qk" ]
-then
- run_command $IDENTIFIER.arvadosapi.com ECODE "dpkg -L arvados-mailchimp-plugin 2>/dev/null && apt-get install arvados-mailchimp-plugin --reinstall || echo"
- SUM_ECODE=$(($SUM_ECODE + $ECODE))
-fi
-
-if [[ "$SUM_ECODE" != "0" ]]; then
- title "ERROR: Updating API server FAILED"
- EXITCODE=$(($EXITCODE + $SUM_ECODE))
- exit $EXITCODE
-fi
-
-title "Loading ARVADOS_API_HOST and ARVADOS_API_TOKEN"
-if [[ -f "$HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf" ]]; then
- . $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf
-else
- title "WARNING: $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf not found."
-fi
-if [[ "$ARVADOS_API_HOST" == "" ]] || [[ "$ARVADOS_API_TOKEN" == "" ]]; then
- title "ERROR: ARVADOS_API_HOST and/or ARVADOS_API_TOKEN environment variables are not set."
- exit 1
-fi
-
-title "Locating Arvados Standard Docker images project"
-
-JSON_FILTER="[[\"name\", \"=\", \"Arvados Standard Docker Images\"], [\"owner_uuid\", \"=\", \"$IDENTIFIER-tpzed-000000000000000\"]]"
-DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group list --filters="$JSON_FILTER"`
-
-if [[ "$DOCKER_IMAGES_PROJECT" == "" ]]; then
- title "Warning: Arvados Standard Docker Images project not found. Creating it."
-
- DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group create --group "{\"owner_uuid\":\"$IDENTIFIER-tpzed-000000000000000\", \"name\":\"Arvados Standard Docker Images\", \"group_class\":\"project\"}"`
- ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv link create --link "{\"tail_uuid\":\"$IDENTIFIER-j7d0g-fffffffffffffff\", \"head_uuid\":\"$DOCKER_IMAGES_PROJECT\", \"link_class\":\"permission\", \"name\":\"can_read\" }"
- if [[ "$?" != "0" ]]; then
- title "ERROR: could not create standard Docker images project Please create it, cf. http://doc.arvados.org/install/create-standard-objects.html"
- exit 1
- fi
-fi
-
-title "Found Arvados Standard Docker Images project with uuid $DOCKER_IMAGES_PROJECT"
-GIT_COMMIT=`ssh -o "StrictHostKeyChecking no" $IDENTIFIER cat /usr/local/arvados/src/git-commit.version`
-
-if [[ "$?" != "0" ]] || [[ "$GIT_COMMIT" == "" ]]; then
- title "ERROR: unable to get arvados/jobs Docker image git revision"
- exit 1
-else
- title "Found git commit for arvados/jobs Docker image: $GIT_COMMIT"
-fi
-
-run_command shell.$IDENTIFIER ECODE "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker" |grep -q $GIT_COMMIT
-
-if [[ "$?" == "0" ]]; then
- title "Found latest arvados/jobs Docker image, nothing to upload"
-else
- title "Installing latest arvados/jobs Docker image"
- ssh -o "StrictHostKeyChecking no" shell.$IDENTIFIER "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker --pull --project-uuid=$DOCKER_IMAGES_PROJECT arvados/jobs $GIT_COMMIT"
- if [[ "$?" -ne 0 ]]; then
- title "'git pull' failed exiting..."
- exit 1
- fi
-fi
-
-title "Gathering list of shell and Keep nodes"
-SHELL_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv virtual_machine list |jq .items[].hostname -r`
-KEEP_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv keep_service list |jq .items[].service_host -r`
-
-title "Updating workbench"
-SUM_ECODE=0
-if [[ `host workbench.$ARVADOS_API_HOST |cut -f4 -d' '` != `host $ARVADOS_API_HOST |cut -f4 -d' '` ]]; then
- # Workbench runs on a separate host. We need to run puppet there too.
- run_puppet workbench.$IDENTIFIER ECODE
- SUM_ECODE=$(($SUM_ECODE + $ECODE))
-fi
-
-if [[ "$SUM_ECODE" != "0" ]]; then
- title "ERROR: Updating workbench FAILED"
- EXITCODE=$(($EXITCODE + $SUM_ECODE))
- exit $EXITCODE
-fi
-
-for n in manage switchyard $SHELL_NODES $KEEP_NODES; do
- ECODE=0
- if [[ $n =~ $ARVADOS_API_HOST$ ]]; then
- # e.g. keep.qr1hi.arvadosapi.com
- node=$n
- else
- # e.g. shell
- node=$n.$ARVADOS_API_HOST
- fi
-
- # e.g. keep.qr1hi
- node=${node%.arvadosapi.com}
-
- title "Updating $node"
- run_puppet $node ECODE
- if [[ "$ECODE" != "0" ]]; then
- title "ERROR: Updating $node node FAILED: exit code $ECODE"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
- fi
-done
diff --git a/build/jenkins/run-diagnostics-suite.sh b/build/jenkins/run-diagnostics-suite.sh
deleted file mode 100755
index 015a053..0000000
--- a/build/jenkins/run-diagnostics-suite.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-INSTANCE=$1
-REVISION=$2
-
-if [[ "$INSTANCE" == '' ]]; then
- echo "Syntax: $0 <instance> [revision]"
- exit 1
-fi
-
-if [[ "$REVISION" == '' ]]; then
- # See if there's a configuration file with the revision?
- CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
- if [[ -f $CONFIG_PATH ]]; then
- echo "Loading git revision from $CONFIG_PATH"
- . $CONFIG_PATH
- REVISION=$ARVADOS_GIT_REVISION
- fi
-fi
-
-if [[ "$REVISION" != '' ]]; then
- echo "Git revision is $REVISION"
-else
- echo "No valid git revision found, proceeding with what is in place."
-fi
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo "WORKSPACE environment variable not set"
- exit 1
-fi
-
-title () {
- txt="********** $1 **********"
- printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
-}
-
-timer_reset() {
- t0=$SECONDS
-}
-
-timer() {
- echo -n "$(($SECONDS - $t0))s"
-}
-
-source /etc/profile.d/rvm.sh
-echo $WORKSPACE
-
-title "Starting diagnostics"
-timer_reset
-
-cd $WORKSPACE
-
-if [[ "$REVISION" != '' ]]; then
- git checkout $REVISION
-fi
-
-cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
-
-cd $WORKSPACE/apps/workbench
-
-HOME="$GEMHOME" bundle install --no-deployment
-
-if [[ ! -d tmp ]]; then
- mkdir tmp
-fi
-
-RAILS_ENV=diagnostics bundle exec rake TEST=test/diagnostics/pipeline_test.rb
-
-ECODE=$?
-
-if [[ "$REVISION" != '' ]]; then
- git checkout master
-fi
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! DIAGNOSTICS FAILED (`timer`) !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
-fi
-
-title "Diagnostics complete (`timer`)"
-
-exit $EXITCODE
diff --git a/build/jenkins/run-performance-suite.sh b/build/jenkins/run-performance-suite.sh
deleted file mode 100755
index 2944bda..0000000
--- a/build/jenkins/run-performance-suite.sh
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-INSTANCE=$1
-REVISION=$2
-
-if [[ "$INSTANCE" == '' ]]; then
- echo "Syntax: $0 <instance> [revision]"
- exit 1
-fi
-
-if [[ "$REVISION" == '' ]]; then
- # See if there's a configuration file with the revision?
- CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
- if [[ -f $CONFIG_PATH ]]; then
- echo "Loading git revision from $CONFIG_PATH"
- . $CONFIG_PATH
- REVISION=$ARVADOS_GIT_REVISION
- fi
-fi
-
-if [[ "$REVISION" != '' ]]; then
- echo "Git revision is $REVISION"
-else
- echo "No valid git revision found, proceeding with what is in place."
-fi
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo "WORKSPACE environment variable not set"
- exit 1
-fi
-
-title () {
- txt="********** $1 **********"
- printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
-}
-
-timer_reset() {
- t0=$SECONDS
-}
-
-timer() {
- echo -n "$(($SECONDS - $t0))s"
-}
-
-source /etc/profile.d/rvm.sh
-echo $WORKSPACE
-
-title "Starting performance test"
-timer_reset
-
-cd $WORKSPACE
-
-if [[ "$REVISION" != '' ]]; then
- git checkout $REVISION
-fi
-
-cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
-
-cd $WORKSPACE/apps/workbench
-
-HOME="$GEMHOME" bundle install --no-deployment
-
-if [[ ! -d tmp ]]; then
- mkdir tmp
-fi
-
-mkdir -p tmp/cache
-
-RAILS_ENV=performance bundle exec rake test:benchmark
-
-ECODE=$?
-
-if [[ "$REVISION" != '' ]]; then
- git checkout master
-fi
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! PERFORMANCE TESTS FAILED (`timer`) !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
-fi
-
-title "Performance tests complete (`timer`)"
-
-exit $EXITCODE
diff --git a/build/jenkins/run-tapestry-tests.sh b/build/jenkins/run-tapestry-tests.sh
deleted file mode 100755
index 851a81d..0000000
--- a/build/jenkins/run-tapestry-tests.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-COLUMNS=80
-
-title () {
- printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
-}
-
-source /etc/profile.d/rvm.sh
-
-# This shouldn't really be necessary... but the jenkins/rvm integration seems a
-# bit wonky occasionally.
-rvm use ree
-
-echo $WORKSPACE
-
-# Tapestry
-title "Starting tapestry tests"
-cd "$WORKSPACE"
-
-# There are a few submodules
-git submodule init && git submodule update
-
-# Use sqlite for testing
-sed -i'' -e "s:mysql:sqlite3:" Gemfile
-
-# Tapestry is not set up yet to use --deployment
-#bundle install --deployment
-bundle install
-
-rm -f config/database.yml
-rm -f config/environments/test.rb
-cp $HOME/tapestry/test.rb config/environments/
-cp $HOME/tapestry/database.yml config/
-
-export RAILS_ENV=test
-
-bundle exec rake db:drop
-bundle exec rake db:create
-bundle exec rake db:setup
-bundle exec rake test
-
-ECODE=$?
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! TAPESTRY TESTS FAILED !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
-fi
-
-title "Tapestry tests complete"
-
-exit $EXITCODE
diff --git a/build/jenkins/run_upload_packages.py b/build/jenkins/run_upload_packages.py
deleted file mode 100755
index 04e6c80..0000000
--- a/build/jenkins/run_upload_packages.py
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/usr/bin/env python3
-
-import argparse
-import functools
-import glob
-import logging
-import os
-import pipes
-import shutil
-import subprocess
-import sys
-import time
-
-class TimestampFile:
- def __init__(self, path):
- self.path = path
- self.start_time = time.time()
-
- def last_upload(self):
- try:
- return os.path.getmtime(self.path)
- except EnvironmentError:
- return -1
-
- def update(self):
- os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
- os.utime(self.path, (time.time(), self.start_time))
-
-
-class PackageSuite:
- NEED_SSH = False
-
- def __init__(self, glob_root, rel_globs):
- logger_part = getattr(self, 'LOGGER_PART', os.path.basename(glob_root))
- self.logger = logging.getLogger('arvados-dev.upload.' + logger_part)
- self.globs = [os.path.join(glob_root, rel_glob)
- for rel_glob in rel_globs]
-
- def files_to_upload(self, since_timestamp):
- for abs_glob in self.globs:
- for path in glob.glob(abs_glob):
- if os.path.getmtime(path) >= since_timestamp:
- yield path
-
- def upload_file(self, path):
- raise NotImplementedError("PackageSuite.upload_file")
-
- def upload_files(self, paths):
- for path in paths:
- self.logger.info("Uploading %s", path)
- self.upload_file(path)
-
- def post_uploads(self, paths):
- pass
-
- def update_packages(self, since_timestamp):
- upload_paths = list(self.files_to_upload(since_timestamp))
- if upload_paths:
- self.upload_files(upload_paths)
- self.post_uploads(upload_paths)
-
-
-class PythonPackageSuite(PackageSuite):
- LOGGER_PART = 'python'
-
- def __init__(self, glob_root, rel_globs):
- super().__init__(glob_root, rel_globs)
- self.seen_packages = set()
-
- def upload_file(self, path):
- src_dir = os.path.dirname(os.path.dirname(path))
- if src_dir in self.seen_packages:
- return
- self.seen_packages.add(src_dir)
- # NOTE: If we ever start uploading Python 3 packages, we'll need to
- # figure out some way to adapt cmd to match. It might be easiest
- # to give all our setup.py files the executable bit, and run that
- # directly.
- # We also must run `sdist` before `upload`: `upload` uploads any
- # distributions previously generated in the command. It doesn't
- # know how to upload distributions already on disk. We write the
- # result to a dedicated directory to avoid interfering with our
- # timestamp tracking.
- cmd = ['python2.7', 'setup.py']
- if not self.logger.isEnabledFor(logging.INFO):
- cmd.append('--quiet')
- cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
- subprocess.check_call(cmd, cwd=src_dir)
- shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
-
-
-class GemPackageSuite(PackageSuite):
- LOGGER_PART = 'gems'
-
- def upload_file(self, path):
- cmd = ['gem', 'push', path]
- push_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- repushed = any(line == b'Repushing of gem versions is not allowed.\n'
- for line in push_proc.stdout)
- # Read any remaining stdout before closing.
- for line in push_proc.stdout:
- pass
- push_proc.stdout.close()
- if (push_proc.wait() != 0) and not repushed:
- raise subprocess.CalledProcessError(push_proc.returncode, cmd)
-
-
-class DistroPackageSuite(PackageSuite):
- NEED_SSH = True
- REMOTE_DEST_DIR = 'tmp'
-
- def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts):
- super().__init__(glob_root, rel_globs)
- self.target = target
- self.ssh_host = ssh_host
- self.ssh_opts = ['-o' + opt for opt in ssh_opts]
- if not self.logger.isEnabledFor(logging.INFO):
- self.ssh_opts.append('-q')
-
- def _build_cmd(self, base_cmd, *args):
- cmd = [base_cmd]
- cmd.extend(self.ssh_opts)
- cmd.extend(args)
- return cmd
-
- def _paths_basenames(self, paths):
- return (os.path.basename(path) for path in paths)
-
- def _run_script(self, script, *args):
- # SSH will use a shell to run our bash command, so we have to
- # quote our arguments.
- # self.__class__.__name__ provides $0 for the script, which makes a
- # nicer message if there's an error.
- subprocess.check_call(self._build_cmd(
- 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
- self.__class__.__name__, *(pipes.quote(s) for s in args)))
-
- def upload_files(self, paths):
- cmd = self._build_cmd('scp', *paths)
- cmd.append('{self.ssh_host}:{self.REMOTE_DEST_DIR}'.format(self=self))
- subprocess.check_call(cmd)
-
-
-class DebianPackageSuite(DistroPackageSuite):
- FREIGHT_SCRIPT = """
-cd "$1"; shift
-DISTNAME=$1; shift
-freight add "$@" "apt/$DISTNAME"
-freight cache "apt/$DISTNAME"
-rm "$@"
-"""
- TARGET_DISTNAMES = {
- 'debian7': 'wheezy',
- 'debian8': 'jessie',
- 'ubuntu1204': 'precise',
- 'ubuntu1404': 'trusty',
- }
-
- def post_uploads(self, paths):
- self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR,
- self.TARGET_DISTNAMES[self.target],
- *self._paths_basenames(paths))
-
-
-class RedHatPackageSuite(DistroPackageSuite):
- CREATEREPO_SCRIPT = """
-cd "$1"; shift
-REPODIR=$1; shift
-rpmsign --addsign "$@" </dev/null
-mv "$@" "$REPODIR"
-createrepo "$REPODIR"
-"""
- REPO_ROOT = '/var/www/rpm.arvados.org/'
- TARGET_REPODIRS = {
- 'centos6': 'CentOS/6/os/x86_64/'
- }
-
- def post_uploads(self, paths):
- repo_dir = os.path.join(self.REPO_ROOT,
- self.TARGET_REPODIRS[self.target])
- self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR,
- repo_dir, *self._paths_basenames(paths))
-
-
-def _define_suite(suite_class, *rel_globs, **kwargs):
- return functools.partial(suite_class, rel_globs=rel_globs, **kwargs)
-
-PACKAGE_SUITES = {
- 'python': _define_suite(PythonPackageSuite,
- 'sdk/pam/dist/*.tar.gz',
- 'sdk/python/dist/*.tar.gz',
- 'sdk/cwl/dist/*.tar.gz',
- 'services/nodemanager/dist/*.tar.gz',
- 'services/fuse/dist/*.tar.gz',
- ),
- 'gems': _define_suite(GemPackageSuite,
- 'sdk/ruby/*.gem',
- 'sdk/cli/*.gem',
- 'services/login-sync/*.gem',
- ),
- }
-for target in ['debian7', 'debian8', 'ubuntu1204', 'ubuntu1404']:
- PACKAGE_SUITES[target] = _define_suite(
- DebianPackageSuite, os.path.join('packages', target, '*.deb'),
- target=target)
-for target in ['centos6']:
- PACKAGE_SUITES[target] = _define_suite(
- RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
- target=target)
-
-def parse_arguments(arguments):
- parser = argparse.ArgumentParser(
- prog="run_upload_packages.py",
- description="Upload Arvados packages to various repositories")
- parser.add_argument(
- '--workspace', '-W', default=os.environ.get('WORKSPACE'),
- help="Arvados source directory with built packages to upload")
- parser.add_argument(
- '--ssh-host', '-H',
- help="Host specification for distribution repository server")
- parser.add_argument('-o', action='append', default=[], dest='ssh_opts',
- metavar='OPTION', help="Pass option to `ssh -o`")
- parser.add_argument('--verbose', '-v', action='count', default=0,
- help="Log more information and subcommand output")
- parser.add_argument(
- 'targets', nargs='*', default=['all'], metavar='target',
- help="Upload packages to these targets (default all)\nAvailable targets: " +
- ', '.join(sorted(PACKAGE_SUITES.keys())))
- args = parser.parse_args(arguments)
- if 'all' in args.targets:
- args.targets = list(PACKAGE_SUITES.keys())
-
- if args.workspace is None:
- parser.error("workspace not set from command line or environment")
- for target in args.targets:
- try:
- suite_class = PACKAGE_SUITES[target].func
- except KeyError:
- parser.error("unrecognized target {!r}".format(target))
- if suite_class.NEED_SSH and (args.ssh_host is None):
- parser.error(
- "--ssh-host must be specified to upload distribution packages")
- return args
-
-def setup_logger(stream_dest, args):
- log_handler = logging.StreamHandler(stream_dest)
- log_handler.setFormatter(logging.Formatter(
- '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
- '%Y-%m-%d %H:%M:%S'))
- logger = logging.getLogger('arvados-dev.upload')
- logger.addHandler(log_handler)
- logger.setLevel(max(1, logging.WARNING - (10 * args.verbose)))
-
-def build_suite_and_upload(target, since_timestamp, args):
- suite_def = PACKAGE_SUITES[target]
- kwargs = {}
- if suite_def.func.NEED_SSH:
- kwargs.update(ssh_host=args.ssh_host, ssh_opts=args.ssh_opts)
- suite = suite_def(args.workspace, **kwargs)
- suite.update_packages(since_timestamp)
-
-def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
- args = parse_arguments(arguments)
- setup_logger(stderr, args)
- ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
- '.last_upload'))
- last_upload_ts = ts_file.last_upload()
- for target in args.targets:
- build_suite_and_upload(target, last_upload_ts, args)
- ts_file.update()
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/jenkins/libcloud-pin b/build/libcloud-pin
similarity index 100%
rename from build/jenkins/libcloud-pin
rename to build/libcloud-pin
diff --git a/build/jenkins/package-build-dockerfiles/.gitignore b/build/package-build-dockerfiles/.gitignore
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/.gitignore
rename to build/package-build-dockerfiles/.gitignore
diff --git a/build/jenkins/package-build-dockerfiles/Makefile b/build/package-build-dockerfiles/Makefile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/Makefile
rename to build/package-build-dockerfiles/Makefile
diff --git a/build/jenkins/package-build-dockerfiles/README b/build/package-build-dockerfiles/README
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/README
rename to build/package-build-dockerfiles/README
diff --git a/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh b/build/package-build-dockerfiles/build-all-build-containers.sh
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
rename to build/package-build-dockerfiles/build-all-build-containers.sh
diff --git a/build/jenkins/package-build-dockerfiles/centos6/Dockerfile b/build/package-build-dockerfiles/centos6/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/centos6/Dockerfile
rename to build/package-build-dockerfiles/centos6/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/debian7/Dockerfile b/build/package-build-dockerfiles/debian7/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/debian7/Dockerfile
rename to build/package-build-dockerfiles/debian7/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/debian8/Dockerfile b/build/package-build-dockerfiles/debian8/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/debian8/Dockerfile
rename to build/package-build-dockerfiles/debian8/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile b/build/package-build-dockerfiles/ubuntu1204/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
rename to build/package-build-dockerfiles/ubuntu1204/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile b/build/package-build-dockerfiles/ubuntu1404/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
rename to build/package-build-dockerfiles/ubuntu1404/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/centos6/Dockerfile b/build/package-test-dockerfiles/centos6/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/centos6/Dockerfile
rename to build/package-test-dockerfiles/centos6/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo b/build/package-test-dockerfiles/centos6/localrepo.repo
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
rename to build/package-test-dockerfiles/centos6/localrepo.repo
diff --git a/build/jenkins/package-test-dockerfiles/debian7/Dockerfile b/build/package-test-dockerfiles/debian7/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/debian7/Dockerfile
rename to build/package-test-dockerfiles/debian7/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/debian8/Dockerfile b/build/package-test-dockerfiles/debian8/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/debian8/Dockerfile
rename to build/package-test-dockerfiles/debian8/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile b/build/package-test-dockerfiles/ubuntu1204/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
rename to build/package-test-dockerfiles/ubuntu1204/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile b/build/package-test-dockerfiles/ubuntu1404/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
rename to build/package-test-dockerfiles/ubuntu1404/Dockerfile
diff --git a/build/jenkins/package-testing/common-test-packages.sh b/build/package-testing/common-test-packages.sh
similarity index 100%
rename from build/jenkins/package-testing/common-test-packages.sh
rename to build/package-testing/common-test-packages.sh
diff --git a/build/jenkins/package-testing/deb-common-test-packages.sh b/build/package-testing/deb-common-test-packages.sh
similarity index 100%
rename from build/jenkins/package-testing/deb-common-test-packages.sh
rename to build/package-testing/deb-common-test-packages.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-api-server.sh b/build/package-testing/test-package-arvados-api-server.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-api-server.sh
rename to build/package-testing/test-package-arvados-api-server.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-node-manager.sh b/build/package-testing/test-package-arvados-node-manager.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-node-manager.sh
rename to build/package-testing/test-package-arvados-node-manager.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-sso-server.sh b/build/package-testing/test-package-arvados-sso-server.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-sso-server.sh
rename to build/package-testing/test-package-arvados-sso-server.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-workbench.sh b/build/package-testing/test-package-arvados-workbench.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-workbench.sh
rename to build/package-testing/test-package-arvados-workbench.sh
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh b/build/package-testing/test-package-python27-python-arvados-fuse.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
rename to build/package-testing/test-package-python27-python-arvados-fuse.sh
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh b/build/package-testing/test-package-python27-python-arvados-python-client.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
rename to build/package-testing/test-package-python27-python-arvados-python-client.sh
diff --git a/build/jenkins/package-testing/test-packages-centos6.sh b/build/package-testing/test-packages-centos6.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-centos6.sh
rename to build/package-testing/test-packages-centos6.sh
diff --git a/build/jenkins/package-testing/test-packages-debian7.sh b/build/package-testing/test-packages-debian7.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-debian7.sh
rename to build/package-testing/test-packages-debian7.sh
diff --git a/build/jenkins/package-testing/test-packages-debian8.sh b/build/package-testing/test-packages-debian8.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-debian8.sh
rename to build/package-testing/test-packages-debian8.sh
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1204.sh b/build/package-testing/test-packages-ubuntu1204.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-ubuntu1204.sh
rename to build/package-testing/test-packages-ubuntu1204.sh
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1404.sh b/build/package-testing/test-packages-ubuntu1404.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-ubuntu1404.sh
rename to build/package-testing/test-packages-ubuntu1404.sh
diff --git a/build/jenkins/rails-package-scripts/README.md b/build/rails-package-scripts/README.md
similarity index 100%
rename from build/jenkins/rails-package-scripts/README.md
rename to build/rails-package-scripts/README.md
diff --git a/build/jenkins/rails-package-scripts/arvados-api-server.sh b/build/rails-package-scripts/arvados-api-server.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-api-server.sh
rename to build/rails-package-scripts/arvados-api-server.sh
diff --git a/build/jenkins/rails-package-scripts/arvados-sso-server.sh b/build/rails-package-scripts/arvados-sso-server.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-sso-server.sh
rename to build/rails-package-scripts/arvados-sso-server.sh
diff --git a/build/jenkins/rails-package-scripts/arvados-workbench.sh b/build/rails-package-scripts/arvados-workbench.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-workbench.sh
rename to build/rails-package-scripts/arvados-workbench.sh
diff --git a/build/jenkins/rails-package-scripts/postinst.sh b/build/rails-package-scripts/postinst.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/postinst.sh
rename to build/rails-package-scripts/postinst.sh
diff --git a/build/jenkins/rails-package-scripts/postrm.sh b/build/rails-package-scripts/postrm.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/postrm.sh
rename to build/rails-package-scripts/postrm.sh
diff --git a/build/jenkins/rails-package-scripts/prerm.sh b/build/rails-package-scripts/prerm.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/prerm.sh
rename to build/rails-package-scripts/prerm.sh
diff --git a/build/jenkins/rails-package-scripts/step2.sh b/build/rails-package-scripts/step2.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/step2.sh
rename to build/rails-package-scripts/step2.sh
diff --git a/build/jenkins/run-build-docker-images.sh b/build/run-build-docker-images.sh
similarity index 100%
rename from build/jenkins/run-build-docker-images.sh
rename to build/run-build-docker-images.sh
diff --git a/build/jenkins/run-build-docker-jobs-image.sh b/build/run-build-docker-jobs-image.sh
similarity index 100%
rename from build/jenkins/run-build-docker-jobs-image.sh
rename to build/run-build-docker-jobs-image.sh
diff --git a/build/jenkins/run-build-packages-all-targets.sh b/build/run-build-packages-all-targets.sh
similarity index 100%
rename from build/jenkins/run-build-packages-all-targets.sh
rename to build/run-build-packages-all-targets.sh
diff --git a/build/jenkins/run-build-packages-one-target.sh b/build/run-build-packages-one-target.sh
similarity index 100%
rename from build/jenkins/run-build-packages-one-target.sh
rename to build/run-build-packages-one-target.sh
diff --git a/build/jenkins/run-build-packages-sso.sh b/build/run-build-packages-sso.sh
similarity index 100%
rename from build/jenkins/run-build-packages-sso.sh
rename to build/run-build-packages-sso.sh
diff --git a/build/jenkins/run-build-packages.sh b/build/run-build-packages.sh
similarity index 100%
rename from build/jenkins/run-build-packages.sh
rename to build/run-build-packages.sh
diff --git a/build/jenkins/run-library.sh b/build/run-library.sh
similarity index 100%
rename from build/jenkins/run-library.sh
rename to build/run-library.sh
diff --git a/build/jenkins/run-tests.sh b/build/run-tests.sh
similarity index 100%
rename from build/jenkins/run-tests.sh
rename to build/run-tests.sh
commit 7166965c7bda6c727586fb10a00056b3086705f4
Author: Tom Clegg <tom at curoverse.com>
Date: Tue Mar 8 13:31:22 2016 -0500
Add 'build/' from commit '2b9b7518a60a71315a1504bf96b3182122bec702'
git-subtree-dir: build
git-subtree-mainline: 0a0011c987cbec72c7e13762dbc99b8e19db47c1
git-subtree-split: 2b9b7518a60a71315a1504bf96b3182122bec702
diff --git a/build/COPYING b/build/COPYING
new file mode 100644
index 0000000..af63e41
--- /dev/null
+++ b/build/COPYING
@@ -0,0 +1,2 @@
+This code is licenced under the GNU Affero General Public License version 3
+(see agpl-3.0.txt)
diff --git a/build/README b/build/README
new file mode 100644
index 0000000..b076f0b
--- /dev/null
+++ b/build/README
@@ -0,0 +1,30 @@
+Welcome to Arvados!
+
+This is the arvados-dev source tree. It contains scripts that can be useful
+if you want to hack on Arvados itself.
+
+If you are interested in using Arvados or setting up your own Arvados
+installation, you most likely do not need this source tree.
+
+For the Arvados source code, check out the git repository at
+ https://github.com/curoverse/arvados
+
+The main Arvados web site is
+ https://arvados.org
+
+The Arvados public wiki is located at
+ https://arvados.org/projects/arvados/wiki
+
+The Arvados public bug tracker is located at
+ https://arvados.org/projects/arvados/issues
+
+For support see
+ http://doc.arvados.org/user/getting_started/community.html
+
+Installation documentation is located at
+ http://doc.arvados.org/install
+
+If you wish to build the documentation yourself, follow the instructions in
+doc/README to build the documentation, then consult the "Install Guide".
+
+See COPYING for information about Arvados Free Software licenses.
diff --git a/build/agpl-3.0.txt b/build/agpl-3.0.txt
new file mode 100644
index 0000000..dba13ed
--- /dev/null
+++ b/build/agpl-3.0.txt
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+<http://www.gnu.org/licenses/>.
diff --git a/build/git/hooks/coding-standards.sh b/build/git/hooks/coding-standards.sh
new file mode 100755
index 0000000..d4e4c71
--- /dev/null
+++ b/build/git/hooks/coding-standards.sh
@@ -0,0 +1,128 @@
+#!/usr/bin/env ruby
+
+# This script can be installed as a git update hook.
+
+# It can also be installed as a gitolite 'hooklet' in the
+# hooks/common/update.secondary.d/ directory.
+
+# NOTE: this script runs under the same assumptions as the 'update' hook, so
+# the starting directory must be maintained and arguments must be passed on.
+
+$refname = ARGV[0]
+$oldrev = ARGV[1]
+$newrev = ARGV[2]
+$user = ENV['USER']
+
+def blacklist bl
+ all_revs = `git rev-list #{$oldrev}..#{$newrev}`.split("\n")
+ all_revs.each do |rev|
+ bl.each do |b|
+ if rev == b
+ puts "Revision #{b} is blacklisted, you must remove it from your branch (possibly using git rebase) before you can push."
+ exit 1
+ end
+ end
+ end
+end
+
+blacklist ['26d74dc0524c87c5dcc0c76040ce413a4848b57a']
+
+# Only enforce policy on the master branch
+exit 0 if $refname != 'refs/heads/master'
+
+puts "Enforcing Policies... \n(#{$refname}) (#{$oldrev[0,6]}) (#{$newrev[0,6]})"
+
+$regex = /\[ref: (\d+)\]/
+
+$broken_commit_message = /Please enter a commit message to explain why this merge is necessary/
+$wrong_way_merge_master = /Merge( remote-tracking)? branch '([^\/]+\/)?master' into/
+$merge_master = /Merge branch '[^']+'((?! into)| into master)/
+$pull_merge = /Merge branch 'master' of /
+$refs_or_closes_or_no_issue = /(refs #|closes #|fixes #|no issue #)/i
+
+# enforced custom commit message format
+def check_message_format
+ all_revs = `git rev-list --first-parent #{$oldrev}..#{$newrev}`.split("\n")
+ merge_revs = `git rev-list --first-parent --min-parents=2 #{$oldrev}..#{$newrev}`.split("\n")
+ # single_revs = `git rev-list --first-parent --max-parents=1 #{$oldrev}..#{$newrev}`.split("\n")
+ broken = false
+ no_ff = false
+
+ merge_revs.each do |rev|
+ message = `git cat-file commit #{rev} | sed '1,/^$/d'`
+ if $wrong_way_merge_master.match(message)
+ puts "\n[POLICY] Only non-fast-forward merges into master are allowed. Please"
+ puts "reset your master branch:"
+ puts " git reset --hard origin/master"
+ puts "and then merge your branch with the --no-ff option:"
+ puts " git merge your-branch --no-ff\n"
+ puts "Remember to add a reference to an issue number in the merge commit!\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ no_ff = true
+ elsif $pull_merge.match(message)
+ puts "\n[POLICY] This appears to be a git pull merge of remote master into local"
+ puts "master. In order to maintain a linear first-parent history of master,"
+ puts "please reset your branch and remerge or rebase using the latest master.\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ elsif not $merge_master.match(message) and not
+ puts "\n[POLICY] This does not appear to be a merge of a feature"
+ puts "branch into master. Merges must follow the format"
+ puts "\"Merge branch 'feature-branch'\".\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+ end
+
+ all_revs.each do |rev|
+ message = `git cat-file commit #{rev} | sed '1,/^$/d'`
+ if $broken_commit_message.match(message)
+ puts "\n[POLICY] Rejected broken commit message for including boilerplate"
+ puts "instruction text.\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+
+ # Do not test when the commit is a no_ff merge (which will be rejected), because
+ # this test will complain about *every* commit in the merge otherwise, obscuring
+ # the real reason for the rejection (the no_ff merge)
+ if not no_ff and not $refs_or_closes_or_no_issue.match(message)
+ puts "\n[POLICY] All commits to master must include an issue using \"refs #\" or"
+ puts "\"closes #\", or specify \"no issue #\"\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+ end
+
+ if broken
+ exit 1
+ end
+end
+
+check_message_format
diff --git a/build/install/easy-docker-install.sh b/build/install/easy-docker-install.sh
new file mode 100755
index 0000000..fe6e186
--- /dev/null
+++ b/build/install/easy-docker-install.sh
@@ -0,0 +1,87 @@
+#!/usr/bin/env bash
+
+# This script is intended to make Arvados installation easy. It will download the
+# latest copy of the Arvados docker images as well as the arvdock command. It
+# then uses arvdock to spin up Arvados on this computer.
+#
+# The latest version of this script is available at http://get.arvados.org, so that this
+# command does the right thing:
+#
+# $ \curl -sSL http://get.arvados.org | bash
+#
+# Prerequisites: working docker installation. Run this script as a user who is a member
+# of the docker group.
+
+COLUMNS=80
+
+fail () {
+ title "$*"
+ exit 1
+}
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_pull () {
+ $DOCKER pull $*
+
+ ECODE=$?
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "$DOCKER pull $* failed"
+ exit $ECODE
+ fi
+}
+
+main () {
+
+ \which which >/dev/null 2>&1 || fail "Error: could not find 'which' command."
+
+ # find the docker binary
+ DOCKER=`which docker.io`
+
+ if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+ fi
+
+ if [[ "$DOCKER" == "" ]]; then
+ fail "Error: you need to have docker installed. Could not find the docker executable."
+ fi
+
+ echo
+ echo "If necessary, this command will download the latest Arvados docker images."
+ echo "The download can take a long time, depending on the speed of your internet connection."
+ echo "When the images are downloaded, it will then start an Arvados environment on this computer."
+ echo
+ docker_pull arvados/workbench
+ docker_pull arvados/doc
+ docker_pull arvados/keep
+ docker_pull arvados/shell
+ docker_pull arvados/sso
+ docker_pull arvados/compute
+ docker_pull arvados/keep
+ docker_pull arvados/keepproxy
+ docker_pull arvados/api
+ docker_pull crosbymichael/skydns
+ docker_pull crosbymichael/skydock
+
+ # Now download arvdock and start the containers
+ echo
+ echo Downloading arvdock
+ echo
+ \curl -sSL https://raw.githubusercontent.com/curoverse/arvados/master/docker/arvdock -o arvdock
+ chmod 755 arvdock
+
+ echo
+ echo Starting the docker containers
+ echo
+ ./arvdock start
+
+ echo To stop the containers, run
+ echo
+ echo ./arvdock stop
+ echo
+}
+
+main
diff --git a/build/jenkins/create-plot-data-from-log.sh b/build/jenkins/create-plot-data-from-log.sh
new file mode 100755
index 0000000..ce3bfed
--- /dev/null
+++ b/build/jenkins/create-plot-data-from-log.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+build=$1
+file=$2
+outputdir=$3
+
+usage() {
+ echo "./$0 build_number file_to_parse output_dir"
+ echo "this script will use the build output to generate *csv and *txt"
+ echo "for jenkins plugin plot https://github.com/jenkinsci/plot-plugin/"
+}
+
+if [ $# -ne 3 ]
+then
+ usage
+ exit 1
+fi
+
+if [ ! -e $file ]
+then
+ usage
+ echo "$file doesn't exist! exiting"
+ exit 2
+fi
+if [ ! -w $outputdir ]
+then
+ usage
+ echo "$outputdir isn't writeable! exiting"
+ exit 3
+fi
+
+#------------------------------
+## MAXLINE is the amount of lines that will read after the pattern
+## is match (the logfile could be hundred thousands lines long).
+## 1000 should be safe enough to capture all the output of the individual test
+MAXLINES=1000
+
+## TODO: check $build and $file make sense
+
+for test in \
+ test_Create_and_show_large_collection_with_manifest_text_of_20000000 \
+ test_Create,_show,_and_update_description_for_large_collection_with_manifest_text_of_100000 \
+ test_Create_one_large_collection_of_20000000_and_one_small_collection_of_10000_and_combine_them
+do
+ cleaned_test=$(echo $test | tr -d ",.:;/")
+ (zgrep -i -E -A$MAXLINES "^[A-Za-z0-9]+Test: $test" $file && echo "----") | tail -n +1 | tail --lines=+3|grep -B$MAXLINES -E "^-*$" -m1 > $outputdir/$cleaned_test-$build.txt
+ result=$?
+ if [ $result -eq 0 ]
+ then
+ echo processing $outputdir/$cleaned_test-$build.txt creating $outputdir/$cleaned_test.csv
+ echo $(grep ^Completed $outputdir/$cleaned_test-$build.txt | perl -n -e '/^Completed (.*) in [0-9]+ms.*$/;print "".++$line."-$1,";' | perl -p -e 's/,$//g'|tr " " "_" ) > $outputdir/$cleaned_test.csv
+ echo $(grep ^Completed $outputdir/$cleaned_test-$build.txt | perl -n -e '/^Completed.*in ([0-9]+)ms.*$/;print "$1,";' | perl -p -e 's/,$//g' ) >> $outputdir/$cleaned_test.csv
+ #echo URL=https://ci.curoverse.com/view/job/arvados-api-server/ws/apps/workbench/log/$cleaned_test-$build.txt/*view*/ >> $outputdir/$test.properties
+ else
+ echo "$test was't found on $file"
+ cleaned_test=$(echo $test | tr -d ",.:;/")
+ > $outputdir/$cleaned_test.csv
+ fi
+done
diff --git a/build/jenkins/libcloud-pin b/build/jenkins/libcloud-pin
new file mode 100644
index 0000000..3fa07e6
--- /dev/null
+++ b/build/jenkins/libcloud-pin
@@ -0,0 +1 @@
+LIBCLOUD_PIN=0.20.2.dev1
\ No newline at end of file
diff --git a/build/jenkins/package-build-dockerfiles/.gitignore b/build/jenkins/package-build-dockerfiles/.gitignore
new file mode 100644
index 0000000..ceee9fa
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/.gitignore
@@ -0,0 +1,2 @@
+*/generated
+common-generated/
diff --git a/build/jenkins/package-build-dockerfiles/Makefile b/build/jenkins/package-build-dockerfiles/Makefile
new file mode 100644
index 0000000..70fbf28
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/Makefile
@@ -0,0 +1,29 @@
+all: centos6/generated debian7/generated debian8/generated ubuntu1204/generated ubuntu1404/generated
+
+centos6/generated: common-generated-all
+ test -d centos6/generated || mkdir centos6/generated
+ cp -rlt centos6/generated common-generated/*
+
+debian7/generated: common-generated-all
+ test -d debian7/generated || mkdir debian7/generated
+ cp -rlt debian7/generated common-generated/*
+
+debian8/generated: common-generated-all
+ test -d debian8/generated || mkdir debian8/generated
+ cp -rlt debian8/generated common-generated/*
+
+ubuntu1204/generated: common-generated-all
+ test -d ubuntu1204/generated || mkdir ubuntu1204/generated
+ cp -rlt ubuntu1204/generated common-generated/*
+
+ubuntu1404/generated: common-generated-all
+ test -d ubuntu1404/generated || mkdir ubuntu1404/generated
+ cp -rlt ubuntu1404/generated common-generated/*
+
+common-generated-all: common-generated/golang-amd64.tar.gz
+
+common-generated/golang-amd64.tar.gz: common-generated
+ wget -cqO common-generated/golang-amd64.tar.gz https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz
+
+common-generated:
+ mkdir common-generated
diff --git a/build/jenkins/package-build-dockerfiles/README b/build/jenkins/package-build-dockerfiles/README
new file mode 100644
index 0000000..0dfab94
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/README
@@ -0,0 +1,13 @@
+==================
+DOCKER IMAGE BUILD
+==================
+
+1. `make`
+2. `cd DISTRO`
+3. `docker build -t arvados/build:DISTRO .`
+
+==============
+BUILD PACKAGES
+==============
+
+`docker run -v /path/to/your/arvados-dev/jenkins:/jenkins -v /path/to/your/arvados:/arvados arvados/build:DISTRO`
diff --git a/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh b/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
new file mode 100755
index 0000000..34ffcce
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+make
+
+for target in `find -maxdepth 1 -type d |grep -v generated`; do
+ if [[ "$target" == "." ]]; then
+ continue
+ fi
+ target=${target#./}
+ echo $target
+ cd $target
+ docker build -t arvados/build:$target .
+ cd ..
+done
+
+
diff --git a/build/jenkins/package-build-dockerfiles/centos6/Dockerfile b/build/jenkins/package-build-dockerfiles/centos6/Dockerfile
new file mode 100644
index 0000000..cfd94c8
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/centos6/Dockerfile
@@ -0,0 +1,31 @@
+FROM centos:6
+MAINTAINER Brett Smith <brett at curoverse.com>
+
+# Install build dependencies provided in base distribution
+RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar scl-utils centos-release-SCL postgresql-devel
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Need to "touch" RPM database to workaround bug in interaction between
+# overlayfs and yum (https://bugzilla.redhat.com/show_bug.cgi?id=1213602)
+RUN touch /var/lib/rpm/* && yum -q -y install python27 python33
+RUN scl enable python33 "easy_install-3.3 pip" && scl enable python27 "easy_install-2.7 pip"
+
+RUN cd /tmp && \
+ curl -OL 'http://pkgs.repoforge.org/rpmforge-release/rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm' && \
+ rpm -ivh rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm && \
+ sed -i 's/enabled = 0/enabled = 1/' /etc/yum.repos.d/rpmforge.repo
+
+RUN touch /var/lib/rpm/* && yum install --assumeyes git
+
+ENV WORKSPACE /arvados
+CMD ["scl", "enable", "python33", "python27", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos6"]
diff --git a/build/jenkins/package-build-dockerfiles/debian7/Dockerfile b/build/jenkins/package-build-dockerfiles/debian7/Dockerfile
new file mode 100644
index 0000000..0d04590
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/debian7/Dockerfile
@@ -0,0 +1,19 @@
+FROM debian:wheezy
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "debian7"]
diff --git a/build/jenkins/package-build-dockerfiles/debian8/Dockerfile b/build/jenkins/package-build-dockerfiles/debian8/Dockerfile
new file mode 100644
index 0000000..fcd390f
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/debian8/Dockerfile
@@ -0,0 +1,19 @@
+FROM debian:jessie
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "debian8"]
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile b/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
new file mode 100644
index 0000000..158053c
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
@@ -0,0 +1,19 @@
+FROM ubuntu:precise
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip build-essential
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1204"]
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile b/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
new file mode 100644
index 0000000..0b8ee7a
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
@@ -0,0 +1,19 @@
+FROM ubuntu:trusty
+MAINTAINER Brett Smith <brett at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1404"]
diff --git a/build/jenkins/package-test-dockerfiles/centos6/Dockerfile b/build/jenkins/package-test-dockerfiles/centos6/Dockerfile
new file mode 100644
index 0000000..69927a1
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/centos6/Dockerfile
@@ -0,0 +1,20 @@
+FROM centos:6
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+RUN yum -q install --assumeyes scl-utils centos-release-SCL \
+ which tar
+
+# Install RVM
+RUN touch /var/lib/rpm/* && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundle fpm
+
+RUN cd /tmp && \
+ curl -OL 'http://pkgs.repoforge.org/rpmforge-release/rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm' && \
+ rpm -ivh rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm && \
+ sed -i 's/enabled = 0/enabled = 1/' /etc/yum.repos.d/rpmforge.repo
+
+COPY localrepo.repo /etc/yum.repos.d/localrepo.repo
\ No newline at end of file
diff --git a/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo b/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
new file mode 100644
index 0000000..ac6b898
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
@@ -0,0 +1,5 @@
+[localrepo]
+name=Arvados Test
+baseurl=file:///arvados/packages/centos6
+gpgcheck=0
+enabled=1
diff --git a/build/jenkins/package-test-dockerfiles/debian7/Dockerfile b/build/jenkins/package-test-dockerfiles/debian7/Dockerfile
new file mode 100644
index 0000000..c9a2fdc
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/debian7/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:7
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl procps && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/debian7/ /" >>/etc/apt/sources.list
diff --git a/build/jenkins/package-test-dockerfiles/debian8/Dockerfile b/build/jenkins/package-test-dockerfiles/debian8/Dockerfile
new file mode 100644
index 0000000..cde1847
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/debian8/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:8
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/debian8/ /" >>/etc/apt/sources.list
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile b/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
new file mode 100644
index 0000000..0cb77c8
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
@@ -0,0 +1,14 @@
+FROM ubuntu:precise
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/ubuntu1204/ /" >>/etc/apt/sources.list
\ No newline at end of file
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile b/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
new file mode 100644
index 0000000..6c4d0e9
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
@@ -0,0 +1,14 @@
+FROM ubuntu:trusty
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/ubuntu1404/ /" >>/etc/apt/sources.list
\ No newline at end of file
diff --git a/build/jenkins/package-testing/common-test-packages.sh b/build/jenkins/package-testing/common-test-packages.sh
new file mode 100755
index 0000000..2dc67ab
--- /dev/null
+++ b/build/jenkins/package-testing/common-test-packages.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+set -eu
+
+FAIL=0
+
+echo
+
+while read so && [ -n "$so" ]; do
+ if ldd "$so" | grep "not found" ; then
+ echo "^^^ Missing while scanning $so ^^^"
+ FAIL=1
+ fi
+done <<EOF
+$(find -name '*.so')
+EOF
+
+if test -x "/jenkins/package-testing/test-package-$1.sh" ; then
+ if ! "/jenkins/package-testing/test-package-$1.sh" ; then
+ FAIL=1
+ fi
+fi
+
+if test $FAIL = 0 ; then
+ echo "Package $1 passed"
+fi
+
+exit $FAIL
diff --git a/build/jenkins/package-testing/deb-common-test-packages.sh b/build/jenkins/package-testing/deb-common-test-packages.sh
new file mode 100755
index 0000000..5f32a60
--- /dev/null
+++ b/build/jenkins/package-testing/deb-common-test-packages.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+set -eu
+
+# Multiple .deb based distros symlink to this script, so extract the target
+# from the invocation path.
+target=$(echo $0 | sed 's/.*test-packages-\([^.]*\)\.sh.*/\1/')
+
+export ARV_PACKAGES_DIR="/arvados/packages/$target"
+
+dpkg-query --show > "$ARV_PACKAGES_DIR/$1.before"
+
+apt-get -qq update
+apt-get --assume-yes --force-yes install "$1"
+
+dpkg-query --show > "$ARV_PACKAGES_DIR/$1.after"
+
+set +e
+diff "$ARV_PACKAGES_DIR/$1.before" "$ARV_PACKAGES_DIR/$1.after" > "$ARV_PACKAGES_DIR/$1.diff"
+set -e
+
+mkdir -p /tmp/opts
+cd /tmp/opts
+
+export ARV_PACKAGES_DIR="/arvados/packages/$target"
+
+dpkg-deb -x $(ls -t "$ARV_PACKAGES_DIR/$1"_*.deb | head -n1) .
+
+while read so && [ -n "$so" ]; do
+ echo
+ echo "== Packages dependencies for $so =="
+ ldd "$so" | awk '($3 ~ /^\//){print $3}' | sort -u | xargs dpkg -S | cut -d: -f1 | sort -u
+done <<EOF
+$(find -name '*.so')
+EOF
+
+exec /jenkins/package-testing/common-test-packages.sh "$1"
diff --git a/build/jenkins/package-testing/test-package-arvados-api-server.sh b/build/jenkins/package-testing/test-package-arvados-api-server.sh
new file mode 100755
index 0000000..e975448
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-api-server.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+set -e
+cd /var/www/arvados-api/current/
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ apt-get install -y nginx
+ dpkg-reconfigure arvados-api-server
+ ;;
+ centos6)
+ yum install --assumeyes httpd
+ yum reinstall --assumeyes arvados-api-server
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+/usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-api-server.gems"
diff --git a/build/jenkins/package-testing/test-package-arvados-node-manager.sh b/build/jenkins/package-testing/test-package-arvados-node-manager.sh
new file mode 100755
index 0000000..2f416d1
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-node-manager.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+exec python <<EOF
+import libcloud.compute.types
+import libcloud.compute.providers
+libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.AZURE_ARM)
+print "Successfully imported compatible libcloud library"
+EOF
diff --git a/build/jenkins/package-testing/test-package-arvados-sso-server.sh b/build/jenkins/package-testing/test-package-arvados-sso-server.sh
new file mode 100755
index 0000000..c1a377e
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-sso-server.sh
@@ -0,0 +1,172 @@
+#!/bin/bash
+
+set -e
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ FORMAT=deb
+ ;;
+ centos6)
+ FORMAT=rpm
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+checkexit() {
+ if [[ "$1" != "0" ]]; then
+ title "!!!!!! $2 FAILED !!!!!!"
+ fi
+}
+
+
+# Find the SSO server package
+
+cd "$WORKSPACE"
+
+if [[ ! -d "/var/www/arvados-sso" ]]; then
+ echo "/var/www/arvados-sso should exist"
+ exit 1
+fi
+
+if [[ ! -e "/etc/arvados/sso/application.yml" ]]; then
+ mkdir -p /etc/arvados/sso/
+ RANDOM_PASSWORD=`date | md5sum |cut -f1 -d' '`
+ cp config/application.yml.example /etc/arvados/sso/application.yml
+ sed -i -e 's/uuid_prefix: ~/uuid_prefix: zzzzz/' /etc/arvados/sso/application.yml
+ sed -i -e "s/secret_token: ~/secret_token: $RANDOM_PASSWORD/" /etc/arvados/sso/application.yml
+fi
+
+if [[ ! -e "/etc/arvados/sso/database.yml" ]]; then
+ # We need to set up our database configuration now.
+ if [[ "$FORMAT" == "rpm" ]]; then
+ # postgres packaging on CentOS6 is kind of primitive, needs an initdb
+ service postgresql initdb
+ if [ "$TARGET" = "centos6" ]; then
+ sed -i -e "s/127.0.0.1\/32 ident/127.0.0.1\/32 md5/" /var/lib/pgsql/data/pg_hba.conf
+ sed -i -e "s/::1\/128 ident/::1\/128 md5/" /var/lib/pgsql/data/pg_hba.conf
+ fi
+ fi
+ service postgresql start
+
+ RANDOM_PASSWORD=`date | md5sum |cut -f1 -d' '`
+ cat >/etc/arvados/sso/database.yml <<EOF
+production:
+ adapter: postgresql
+ encoding: utf8
+ database: sso_provider_production
+ username: sso_provider_user
+ password: $RANDOM_PASSWORD
+ host: localhost
+EOF
+
+ su postgres -c "psql -c \"CREATE USER sso_provider_user WITH PASSWORD '$RANDOM_PASSWORD'\""
+ su postgres -c "createdb sso_provider_production -O sso_provider_user"
+fi
+
+if [[ "$FORMAT" == "deb" ]]; then
+ # Test 2: the package should reconfigure cleanly
+ dpkg-reconfigure arvados-sso-server || EXITCODE=3
+
+ cd /var/www/arvados-sso/current/
+ /usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-sso-server.gems"
+
+ # Test 3: the package should remove cleanly
+ apt-get remove arvados-sso-server --yes || EXITCODE=3
+
+ checkexit $EXITCODE "apt-get remove arvados-sso-server --yes"
+
+ # Test 4: the package configuration should remove cleanly
+ dpkg --purge arvados-sso-server || EXITCODE=4
+
+ checkexit $EXITCODE "dpkg --purge arvados-sso-server"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=4
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+ # Test 5: the package should remove cleanly with --purge
+
+ apt-get remove arvados-sso-server --purge --yes || EXITCODE=5
+
+ checkexit $EXITCODE "apt-get remove arvados-sso-server --purge --yes"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=5
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+elif [[ "$FORMAT" == "rpm" ]]; then
+
+ # Set up Nginx first
+ # (courtesy of https://www.phusionpassenger.com/library/walkthroughs/deploy/ruby/ownserver/nginx/oss/el6/install_passenger.html)
+ yum install -q -y epel-release pygpgme curl
+ curl --fail -sSLo /etc/yum.repos.d/passenger.repo https://oss-binaries.phusionpassenger.com/yum/definitions/el-passenger.repo
+ yum install -q -y nginx passenger
+ sed -i -e 's/^# passenger/passenger/' /etc/nginx/conf.d/passenger.conf
+ # Done setting up Nginx
+
+ # Test 2: the package should reinstall cleanly
+ yum --assumeyes reinstall arvados-sso-server || EXITCODE=3
+
+ cd /var/www/arvados-sso/current/
+ /usr/local/rvm/bin/rvm-exec default bundle list >$ARV_PACKAGES_DIR/arvados-sso-server.gems
+
+ # Test 3: the package should remove cleanly
+ yum -q -y remove arvados-sso-server || EXITCODE=3
+
+ checkexit $EXITCODE "yum -q -y remove arvados-sso-server"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=3
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+fi
+
+if [[ "$EXITCODE" == "0" ]]; then
+ echo "Testing complete, no errors!"
+else
+ echo "Errors while testing!"
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/package-testing/test-package-arvados-workbench.sh b/build/jenkins/package-testing/test-package-arvados-workbench.sh
new file mode 100755
index 0000000..1be4dea
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-workbench.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+set -e
+cd /var/www/arvados-workbench/current/
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ apt-get install -y nginx
+ dpkg-reconfigure arvados-workbench
+ ;;
+ centos6)
+ yum install --assumeyes httpd
+ yum reinstall --assumeyes arvados-workbench
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+/usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-workbench.gems"
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh b/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
new file mode 100755
index 0000000..1654be9
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+exec python <<EOF
+import arvados_fuse
+print "Successfully imported arvados_fuse"
+EOF
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh b/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
new file mode 100755
index 0000000..0772fbf
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+exec python <<EOF
+import arvados
+print "Successfully imported arvados"
+EOF
diff --git a/build/jenkins/package-testing/test-packages-centos6.sh b/build/jenkins/package-testing/test-packages-centos6.sh
new file mode 100755
index 0000000..4e05364
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-centos6.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+set -eu
+
+yum -q clean all
+touch /var/lib/rpm/*
+
+export ARV_PACKAGES_DIR=/arvados/packages/centos6
+
+rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.before"
+
+yum install --assumeyes $1
+
+rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.after"
+
+set +e
+diff "$ARV_PACKAGES_DIR/$1.before" "$ARV_PACKAGES_DIR/$1.after" >"$ARV_PACKAGES_DIR/$1.diff"
+set -e
+
+SCL=""
+if scl enable python27 true 2>/dev/null ; then
+ SCL="scl enable python27"
+fi
+
+mkdir -p /tmp/opts
+cd /tmp/opts
+
+rpm2cpio $(ls -t "$ARV_PACKAGES_DIR/$1"-*.rpm | head -n1) | cpio -idm 2>/dev/null
+
+shared=$(find -name '*.so')
+if test -n "$shared" ; then
+ for so in $shared ; do
+ echo
+ echo "== Packages dependencies for $so =="
+ $SCL ldd "$so" \
+ | awk '($3 ~ /^\//){print $3}' | sort -u | xargs rpm -qf | sort -u
+ done
+fi
+
+if test -n "$SCL" ; then
+ exec $SCL "/jenkins/package-testing/common-test-packages.sh '$1'"
+else
+ exec /jenkins/package-testing/common-test-packages.sh "$1"
+fi
diff --git a/build/jenkins/package-testing/test-packages-debian7.sh b/build/jenkins/package-testing/test-packages-debian7.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-debian7.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-debian8.sh b/build/jenkins/package-testing/test-packages-debian8.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-debian8.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1204.sh b/build/jenkins/package-testing/test-packages-ubuntu1204.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-ubuntu1204.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1404.sh b/build/jenkins/package-testing/test-packages-ubuntu1404.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-ubuntu1404.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/rails-package-scripts/README.md b/build/jenkins/rails-package-scripts/README.md
new file mode 100644
index 0000000..3a93c31
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/README.md
@@ -0,0 +1,14 @@
+When run-build-packages.sh builds a Rails package, it generates the package's pre/post-inst/rm scripts by concatenating:
+
+1. package_name.sh, which defines variables about where package files live and some human-readable names about them.
+2. step2.sh, which uses those to define some utility variables and set defaults for things that aren't set.
+3. stepname.sh, like postinst.sh, prerm.sh, etc., which uses all this information to do the actual work.
+
+Since our build process is a tower of shell scripts, concatenating files seemed like the least worst option to share code between these files and packages. More advanced code generation would've been too much trouble to integrate into our build process at this time. Trying to inject portions of files into other files seemed error-prone and likely to introduce bugs to the end result.
+
+postinst.sh lets the early parts define a few hooks to control behavior:
+
+* After it installs the core configuration files (database.yml, application.yml, and production.rb) to /etc/arvados/server, it calls setup_extra_conffiles. By default this is a noop function (in step2.sh). API server defines this to set up the old omniauth.rb conffile.
+* Before it restarts nginx, it calls setup_before_nginx_restart. By default this is a noop function (in step2.sh). API server defines this to set up the internal git repository, if necessary.
+* $RAILSPKG_DATABASE_LOAD_TASK defines the Rake task to load the database. API server uses db:structure:load. SSO server uses db:schema:load. Workbench doesn't set this, which causes the postinst to skip all database work.
+* If $RAILSPKG_SUPPORTS_CONFIG_CHECK != 1, it won't run the config:check rake task. SSO clears this flag (it doesn't have that task code).
diff --git a/build/jenkins/rails-package-scripts/arvados-api-server.sh b/build/jenkins/rails-package-scripts/arvados-api-server.sh
new file mode 100644
index 0000000..c2b99f0
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-api-server.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-api-server
+INSTALL_PATH=/var/www/arvados-api
+CONFIG_PATH=/etc/arvados/api
+DOC_URL="http://doc.arvados.org/install/install-api-server.html#configure"
+
+RAILSPKG_DATABASE_LOAD_TASK=db:structure:load
+setup_extra_conffiles() {
+ setup_conffile initializers/omniauth.rb
+}
+
+setup_before_nginx_restart() {
+ # initialize git_internal_dir
+ # usually /var/lib/arvados/internal.git (set in application.default.yml )
+ if [ "$APPLICATION_READY" = "1" ]; then
+ GIT_INTERNAL_DIR=$($COMMAND_PREFIX bundle exec rake config:check 2>&1 | grep git_internal_dir | awk '{ print $2 }')
+ if [ ! -e "$GIT_INTERNAL_DIR" ]; then
+ run_and_report "Creating git_internal_dir '$GIT_INTERNAL_DIR'" \
+ mkdir -p "$GIT_INTERNAL_DIR"
+ run_and_report "Initializing git_internal_dir '$GIT_INTERNAL_DIR'" \
+ git init --quiet --bare $GIT_INTERNAL_DIR
+ else
+ echo "Initializing git_internal_dir $GIT_INTERNAL_DIR: directory exists, skipped."
+ fi
+ run_and_report "Making sure '$GIT_INTERNAL_DIR' has the right permission" \
+ chown -R "$WWW_OWNER:" "$GIT_INTERNAL_DIR"
+ else
+ echo "Initializing git_internal_dir... skipped."
+ fi
+}
diff --git a/build/jenkins/rails-package-scripts/arvados-sso-server.sh b/build/jenkins/rails-package-scripts/arvados-sso-server.sh
new file mode 100644
index 0000000..10b2ee2
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-sso-server.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-sso-server
+INSTALL_PATH=/var/www/arvados-sso
+CONFIG_PATH=/etc/arvados/sso
+DOC_URL="http://doc.arvados.org/install/install-sso.html#configure"
+RAILSPKG_DATABASE_LOAD_TASK=db:schema:load
+RAILSPKG_SUPPORTS_CONFIG_CHECK=0
diff --git a/build/jenkins/rails-package-scripts/arvados-workbench.sh b/build/jenkins/rails-package-scripts/arvados-workbench.sh
new file mode 100644
index 0000000..f2b8a56
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-workbench.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-workbench
+INSTALL_PATH=/var/www/arvados-workbench
+CONFIG_PATH=/etc/arvados/workbench
+DOC_URL="http://doc.arvados.org/install/install-workbench-app.html#configure"
diff --git a/build/jenkins/rails-package-scripts/postinst.sh b/build/jenkins/rails-package-scripts/postinst.sh
new file mode 100644
index 0000000..6fac26b
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/postinst.sh
@@ -0,0 +1,251 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+set -e
+
+DATABASE_READY=1
+APPLICATION_READY=1
+
+if [ -s "$HOME/.rvm/scripts/rvm" ] || [ -s "/usr/local/rvm/scripts/rvm" ]; then
+ COMMAND_PREFIX="/usr/local/rvm/bin/rvm-exec default"
+else
+ COMMAND_PREFIX=
+fi
+
+report_not_ready() {
+ local ready_flag="$1"; shift
+ local config_file="$1"; shift
+ if [ "1" != "$ready_flag" ]; then cat >&2 <<EOF
+
+PLEASE NOTE:
+
+The $PACKAGE_NAME package was not configured completely because
+$config_file needs some tweaking.
+Please refer to the documentation at
+<$DOC_URL> for more details.
+
+When $(basename "$config_file") has been modified,
+reconfigure or reinstall this package.
+
+EOF
+ fi
+}
+
+report_web_service_warning() {
+ local warning="$1"; shift
+ cat >&2 <<EOF
+
+WARNING: $warning.
+
+To override, set the WEB_SERVICE environment variable to the name of the service
+hosting the Rails server.
+
+For Debian-based systems, then reconfigure this package with dpkg-reconfigure.
+
+For RPM-based systems, then reinstall this package.
+
+EOF
+}
+
+run_and_report() {
+ # Usage: run_and_report ACTION_MSG CMD
+ # This is the usual wrapper that prints ACTION_MSG, runs CMD, then writes
+ # a message about whether CMD succeeded or failed. Returns the exit code
+ # of CMD.
+ local action_message="$1"; shift
+ local retcode=0
+ echo -n "$action_message..."
+ if "$@"; then
+ echo " done."
+ else
+ retcode=$?
+ echo " failed."
+ fi
+ return $retcode
+}
+
+setup_confdirs() {
+ for confdir in "$@"; do
+ if [ ! -d "$confdir" ]; then
+ install -d -g "$WWW_OWNER" -m 0750 "$confdir"
+ fi
+ done
+}
+
+setup_conffile() {
+ # Usage: setup_conffile CONFFILE_PATH [SOURCE_PATH]
+ # Both paths are relative to RELEASE_CONFIG_PATH.
+ # This function will try to safely ensure that a symbolic link for
+ # the configuration file points from RELEASE_CONFIG_PATH to CONFIG_PATH.
+ # If SOURCE_PATH is given, this function will try to install that file as
+ # the configuration file in CONFIG_PATH, and return 1 if the file in
+ # CONFIG_PATH is unmodified from the source.
+ local conffile_relpath="$1"; shift
+ local conffile_source="$1"
+ local release_conffile="$RELEASE_CONFIG_PATH/$conffile_relpath"
+ local etc_conffile="$CONFIG_PATH/$(basename "$conffile_relpath")"
+
+ # Note that -h can return true and -e will return false simultaneously
+ # when the target is a dangling symlink. We're okay with that outcome,
+ # so check -h first.
+ if [ ! -h "$release_conffile" ]; then
+ if [ ! -e "$release_conffile" ]; then
+ ln -s "$etc_conffile" "$release_conffile"
+ # If there's a config file in /var/www identical to the one in /etc,
+ # overwrite it with a symlink after porting its permissions.
+ elif cmp --quiet "$release_conffile" "$etc_conffile"; then
+ local ownership="$(stat -c "%u:%g" "$release_conffile")"
+ local owning_group="${ownership#*:}"
+ if [ 0 != "$owning_group" ]; then
+ chgrp "$owning_group" "$CONFIG_PATH" /etc/arvados
+ fi
+ chown "$ownership" "$etc_conffile"
+ chmod --reference="$release_conffile" "$etc_conffile"
+ ln --force -s "$etc_conffile" "$release_conffile"
+ fi
+ fi
+
+ if [ -n "$conffile_source" ]; then
+ if [ ! -e "$etc_conffile" ]; then
+ install -g "$WWW_OWNER" -m 0640 \
+ "$RELEASE_CONFIG_PATH/$conffile_source" "$etc_conffile"
+ return 1
+ # Even if $etc_conffile already existed, it might be unmodified from
+ # the source. This is especially likely when a user installs, updates
+ # database.yml, then reconfigures before they update application.yml.
+ # Use cmp to be sure whether $etc_conffile is modified.
+ elif cmp --quiet "$RELEASE_CONFIG_PATH/$conffile_source" "$etc_conffile"; then
+ return 1
+ fi
+ fi
+}
+
+prepare_database() {
+ DB_MIGRATE_STATUS=`$COMMAND_PREFIX bundle exec rake db:migrate:status 2>&1 || true`
+ if echo $DB_MIGRATE_STATUS | grep -qF 'Schema migrations table does not exist yet.'; then
+ # The database exists, but the migrations table doesn't.
+ run_and_report "Setting up database" $COMMAND_PREFIX bundle exec \
+ rake "$RAILSPKG_DATABASE_LOAD_TASK" db:seed
+ elif echo $DB_MIGRATE_STATUS | grep -q '^database: '; then
+ run_and_report "Running db:migrate" \
+ $COMMAND_PREFIX bundle exec rake db:migrate
+ elif echo $DB_MIGRATE_STATUS | grep -q 'database .* does not exist'; then
+ if ! run_and_report "Running db:setup" \
+ $COMMAND_PREFIX bundle exec rake db:setup 2>/dev/null; then
+ echo "Warning: unable to set up database." >&2
+ DATABASE_READY=0
+ fi
+ else
+ echo "Warning: Database is not ready to set up. Skipping database setup." >&2
+ DATABASE_READY=0
+ fi
+}
+
+configure_version() {
+ WEB_SERVICE=${WEB_SERVICE:-$(service --status-all 2>/dev/null \
+ | grep -Eo '\bnginx|httpd[^[:space:]]*' || true)}
+ if [ -z "$WEB_SERVICE" ]; then
+ report_web_service_warning "Web service (Nginx or Apache) not found"
+ elif [ "$WEB_SERVICE" != "$(echo "$WEB_SERVICE" | head -n 1)" ]; then
+ WEB_SERVICE=$(echo "$WEB_SERVICE" | head -n 1)
+ report_web_service_warning \
+ "Multiple web services found. Choosing the first one ($WEB_SERVICE)"
+ fi
+
+ if [ -e /etc/redhat-release ]; then
+ # Recognize any service that starts with "nginx"; e.g., nginx16.
+ if [ "$WEB_SERVICE" != "${WEB_SERVICE#nginx}" ]; then
+ WWW_OWNER=nginx
+ else
+ WWW_OWNER=apache
+ fi
+ else
+ # Assume we're on a Debian-based system for now.
+ # Both Apache and Nginx run as www-data by default.
+ WWW_OWNER=www-data
+ fi
+
+ echo
+ echo "Assumption: $WEB_SERVICE is configured to serve Rails from"
+ echo " $RELEASE_PATH"
+ echo "Assumption: $WEB_SERVICE and passenger run as $WWW_OWNER"
+ echo
+
+ echo -n "Creating symlinks to configuration in $CONFIG_PATH ..."
+ setup_confdirs /etc/arvados "$CONFIG_PATH"
+ setup_conffile environments/production.rb environments/production.rb.example \
+ || true
+ setup_conffile application.yml application.yml.example || APPLICATION_READY=0
+ if [ -n "$RAILSPKG_DATABASE_LOAD_TASK" ]; then
+ setup_conffile database.yml database.yml.example || DATABASE_READY=0
+ fi
+ setup_extra_conffiles
+ echo "... done."
+
+ # Before we do anything else, make sure some directories and files are in place
+ if [ ! -e $SHARED_PATH/log ]; then mkdir -p $SHARED_PATH/log; fi
+ if [ ! -e $RELEASE_PATH/tmp ]; then mkdir -p $RELEASE_PATH/tmp; fi
+ if [ ! -e $RELEASE_PATH/log ]; then ln -s $SHARED_PATH/log $RELEASE_PATH/log; fi
+ if [ ! -e $SHARED_PATH/log/production.log ]; then touch $SHARED_PATH/log/production.log; fi
+
+ cd "$RELEASE_PATH"
+ export RAILS_ENV=production
+
+ if ! $COMMAND_PREFIX bundle --version >/dev/null; then
+ run_and_report "Installing bundle" $COMMAND_PREFIX gem install bundle
+ fi
+
+ run_and_report "Running bundle install" \
+ $COMMAND_PREFIX bundle install --path $SHARED_PATH/vendor_bundle --local --quiet
+
+ echo -n "Ensuring directory and file permissions ..."
+ # Ensure correct ownership of a few files
+ chown "$WWW_OWNER:" $RELEASE_PATH/config/environment.rb
+ chown "$WWW_OWNER:" $RELEASE_PATH/config.ru
+ chown "$WWW_OWNER:" $RELEASE_PATH/Gemfile.lock
+ chown -R "$WWW_OWNER:" $RELEASE_PATH/tmp
+ chown -R "$WWW_OWNER:" $SHARED_PATH/log
+ case "$RAILSPKG_DATABASE_LOAD_TASK" in
+ db:schema:load) chown "$WWW_OWNER:" $RELEASE_PATH/db/schema.rb ;;
+ db:structure:load) chown "$WWW_OWNER:" $RELEASE_PATH/db/structure.sql ;;
+ esac
+ chmod 644 $SHARED_PATH/log/*
+ chmod -R 2775 $RELEASE_PATH/tmp
+ echo "... done."
+
+ if [ -n "$RAILSPKG_DATABASE_LOAD_TASK" ]; then
+ prepare_database
+ fi
+
+ if [ 11 = "$RAILSPKG_SUPPORTS_CONFIG_CHECK$APPLICATION_READY" ]; then
+ run_and_report "Checking application.yml for completeness" \
+ $COMMAND_PREFIX bundle exec rake config:check || APPLICATION_READY=0
+ fi
+
+ # precompile assets; thankfully this does not take long
+ if [ "$APPLICATION_READY" = "1" ]; then
+ run_and_report "Precompiling assets" \
+ $COMMAND_PREFIX bundle exec rake assets:precompile -q -s 2>/dev/null \
+ || APPLICATION_READY=0
+ else
+ echo "Precompiling assets... skipped."
+ fi
+ chown -R "$WWW_OWNER:" $RELEASE_PATH/tmp
+
+ setup_before_nginx_restart
+
+ if [ ! -z "$WEB_SERVICE" ]; then
+ service "$WEB_SERVICE" restart
+ fi
+}
+
+if [ "$1" = configure ]; then
+ # This is a debian-based system
+ configure_version
+elif [ "$1" = "0" ] || [ "$1" = "1" ] || [ "$1" = "2" ]; then
+ # This is an rpm-based system
+ configure_version
+fi
+
+report_not_ready "$DATABASE_READY" "$CONFIG_PATH/database.yml"
+report_not_ready "$APPLICATION_READY" "$CONFIG_PATH/application.yml"
diff --git a/build/jenkins/rails-package-scripts/postrm.sh b/build/jenkins/rails-package-scripts/postrm.sh
new file mode 100644
index 0000000..2d63f0b
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/postrm.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+set -e
+
+purge () {
+ rm -rf $SHARED_PATH/vendor_bundle
+ rm -rf $SHARED_PATH/log
+ rm -rf $CONFIG_PATH
+ rmdir $SHARED_PATH || true
+ rmdir $INSTALL_PATH || true
+}
+
+if [ "$1" = 'purge' ]; then
+ # This is a debian-based system and purge was requested
+ purge
+elif [ "$1" = "0" ]; then
+ # This is an rpm-based system, no guarantees are made, always purge
+ # Apparently yum doesn't actually remember what it installed.
+ # Clean those files up here, then purge.
+ rm -rf $RELEASE_PATH
+ purge
+fi
diff --git a/build/jenkins/rails-package-scripts/prerm.sh b/build/jenkins/rails-package-scripts/prerm.sh
new file mode 100644
index 0000000..4ef5904
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/prerm.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+remove () {
+ rm -f $RELEASE_PATH/config/database.yml
+ rm -f $RELEASE_PATH/config/environments/production.rb
+ rm -f $RELEASE_PATH/config/application.yml
+ # Old API server configuration file.
+ rm -f $RELEASE_PATH/config/initializers/omniauth.rb
+ rm -rf $RELEASE_PATH/public/assets/
+ rm -rf $RELEASE_PATH/tmp
+ rm -rf $RELEASE_PATH/.bundle
+ rm -rf $RELEASE_PATH/log
+}
+
+if [ "$1" = 'remove' ]; then
+ # This is a debian-based system and removal was requested
+ remove
+elif [ "$1" = "0" ] || [ "$1" = "1" ] || [ "$1" = "2" ]; then
+ # This is an rpm-based system
+ remove
+fi
diff --git a/build/jenkins/rails-package-scripts/step2.sh b/build/jenkins/rails-package-scripts/step2.sh
new file mode 100644
index 0000000..816b906
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/step2.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This code runs after package variable definitions, before the actual
+# pre/post package work, to set some variable and function defaults.
+
+if [ -z "$INSTALL_PATH" ]; then
+ cat >&2 <<EOF
+
+PACKAGE BUILD ERROR: $0 is missing package metadata.
+
+This package is buggy. Please mail <support at curoverse.com> to let
+us know the name and version number of the package you tried to
+install, and we'll get it fixed.
+
+EOF
+ exit 3
+fi
+
+RELEASE_PATH=$INSTALL_PATH/current
+RELEASE_CONFIG_PATH=$RELEASE_PATH/config
+SHARED_PATH=$INSTALL_PATH/shared
+
+RAILSPKG_SUPPORTS_CONFIG_CHECK=${RAILSPKG_SUPPORTS_CONFIG_CHECK:-1}
+if ! type setup_extra_conffiles >/dev/null 2>&1; then
+ setup_extra_conffiles() { return; }
+fi
+if ! type setup_before_nginx_restart >/dev/null 2>&1; then
+ setup_before_nginx_restart() { return; }
+fi
diff --git a/build/jenkins/run-build-docker-images.sh b/build/jenkins/run-build-docker-images.sh
new file mode 100755
index 0000000..0a5841d
--- /dev/null
+++ b/build/jenkins/run-build-docker-images.sh
@@ -0,0 +1,167 @@
+#!/bin/bash
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options]"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -t, --tags [csv_tags] comma separated tags"
+ echo >&2 " -u, --upload Upload the images (docker push)"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 " If no options are given, just builds the images."
+}
+
+upload=false
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hut: \
+ --long help,upload,tags: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -u | --upload)
+ upload=true
+ shift
+ ;;
+ -t | --tags)
+ case "$2" in
+ "")
+ echo "ERROR: --tags needs a parameter";
+ usage;
+ exit 1
+ ;;
+ *)
+ tags=$2;
+ shift 2
+ ;;
+ esac
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_push () {
+ if [[ ! -z "$tags" ]]
+ then
+ for tag in $( echo $tags|tr "," " " )
+ do
+ $DOCKER tag $1 $1:$tag
+ done
+ fi
+
+ # Sometimes docker push fails; retry it a few times if necessary.
+ for i in `seq 1 5`; do
+ $DOCKER push $*
+ ECODE=$?
+ if [[ "$ECODE" == "0" ]]; then
+ break
+ fi
+ done
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker push $* failed !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+echo $WORKSPACE
+
+# find the docker binary
+DOCKER=`which docker.io`
+
+if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+fi
+
+if [[ "$DOCKER" == "" ]]; then
+ title "Error: you need to have docker installed. Could not find the docker executable."
+ exit 1
+fi
+
+# DOCKER
+title "Starting docker build"
+
+timer_reset
+
+# clean up the docker build environment
+cd "$WORKSPACE"
+
+tools/arvbox/bin/arvbox build dev
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+tools/arvbox/bin/arvbox build localdemo
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "docker build complete (`timer`)"
+
+title "uploading images"
+
+timer_reset
+
+if [[ "$ECODE" != "0" ]]; then
+ title "upload arvados images SKIPPED because build failed"
+else
+ if [[ $upload == true ]]; then
+ ## 20150526 nico -- *sometimes* dockerhub needs re-login
+ ## even though credentials are already in .dockercfg
+ docker login -u arvados
+
+ docker_push arvados/arvbox-dev
+ docker_push arvados/arvbox-demo
+ title "upload arvados images complete (`timer`)"
+ else
+ title "upload arvados images SKIPPED because no --upload option set"
+ fi
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-docker-jobs-image.sh b/build/jenkins/run-build-docker-jobs-image.sh
new file mode 100755
index 0000000..fcf849b
--- /dev/null
+++ b/build/jenkins/run-build-docker-jobs-image.sh
@@ -0,0 +1,164 @@
+#!/bin/bash
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options]"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -t, --tags [csv_tags] comma separated tags"
+ echo >&2 " -u, --upload Upload the images (docker push)"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 " If no options are given, just builds the images."
+}
+
+upload=false
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hut: \
+ --long help,upload,tags: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -u | --upload)
+ upload=true
+ shift
+ ;;
+ -t | --tags)
+ case "$2" in
+ "")
+ echo "ERROR: --tags needs a parameter";
+ usage;
+ exit 1
+ ;;
+ *)
+ tags=$2;
+ shift 2
+ ;;
+ esac
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_push () {
+ if [[ ! -z "$tags" ]]
+ then
+ for tag in $( echo $tags|tr "," " " )
+ do
+ $DOCKER tag -f $1 $1:$tag
+ done
+ fi
+
+ # Sometimes docker push fails; retry it a few times if necessary.
+ for i in `seq 1 5`; do
+ $DOCKER push $*
+ ECODE=$?
+ if [[ "$ECODE" == "0" ]]; then
+ break
+ fi
+ done
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker push $* failed !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+echo $WORKSPACE
+
+# find the docker binary
+DOCKER=`which docker.io`
+
+if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+fi
+
+if [[ "$DOCKER" == "" ]]; then
+ title "Error: you need to have docker installed. Could not find the docker executable."
+ exit 1
+fi
+
+# DOCKER
+title "Starting docker build"
+
+timer_reset
+
+# clean up the docker build environment
+cd "$WORKSPACE"
+cd docker
+rm -f jobs-image
+rm -f config.yml
+
+# Get test config.yml file
+cp $HOME/docker/config.yml .
+
+./build.sh jobs-image
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "docker build complete (`timer`)"
+
+title "uploading images"
+
+timer_reset
+
+if [[ "$ECODE" != "0" ]]; then
+ title "upload arvados images SKIPPED because build failed"
+else
+ if [[ $upload == true ]]; then
+ ## 20150526 nico -- *sometimes* dockerhub needs re-login
+ ## even though credentials are already in .dockercfg
+ docker login -u arvados
+
+ docker_push arvados/jobs
+ title "upload arvados images complete (`timer`)"
+ else
+ title "upload arvados images SKIPPED because no --upload option set"
+ fi
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-packages-all-targets.sh b/build/jenkins/run-build-packages-all-targets.sh
new file mode 100755
index 0000000..f1a1e1c
--- /dev/null
+++ b/build/jenkins/run-build-packages-all-targets.sh
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Orchestrate run-build-packages.sh for every target
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+Options:
+
+--command
+ Build command to execute (default: use built-in Docker image command)
+--test-packages
+ Run package install tests
+--debug
+ Output debug information (default: false)
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+set -e
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,test-packages,debug,command:,only-test: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+COMMAND=
+DEBUG=
+TEST_PACKAGES=
+ONLY_TEST=
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --debug)
+ DEBUG="--debug"
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --test-packages)
+ TEST_PACKAGES="--test-packages"
+ ;;
+ --only-test)
+ ONLY_TEST="$1 $2"; shift
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+cd $(dirname $0)
+
+FINAL_EXITCODE=0
+
+for dockerfile_path in $(find -name Dockerfile); do
+ if ./run-build-packages-one-target.sh --target "$(basename $(dirname "$dockerfile_path"))" --command "$COMMAND" $DEBUG $TEST_PACKAGES $ONLY_TEST ; then
+ true
+ else
+ FINAL_EXITCODE=$?
+ fi
+done
+
+if test $FINAL_EXITCODE != 0 ; then
+ echo "Build packages failed with code $FINAL_EXITCODE" >&2
+fi
+
+exit $FINAL_EXITCODE
diff --git a/build/jenkins/run-build-packages-one-target.sh b/build/jenkins/run-build-packages-one-target.sh
new file mode 100755
index 0000000..c5e0a89
--- /dev/null
+++ b/build/jenkins/run-build-packages-one-target.sh
@@ -0,0 +1,203 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Orchestrate run-build-packages.sh for one target
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+--target <target>
+ Distribution to build packages for (default: debian7)
+--command
+ Build command to execute (default: use built-in Docker image command)
+--test-packages
+ Run package install test script "test-packages-$target.sh"
+--debug
+ Output debug information (default: false)
+--only-test
+ Test only a specific package
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+set -e
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,debug,test-packages,target:,command:,only-test: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+TARGET=debian7
+COMMAND=
+DEBUG=
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --only-test)
+ packages="$2"; shift
+ ;;
+ --debug)
+ DEBUG=" --debug"
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --test-packages)
+ test_packages=1
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+set -e
+
+if [[ -n "$test_packages" ]]; then
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name *.rpm)" ]] ; then
+ createrepo $WORKSPACE/packages/$TARGET
+ fi
+
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name *.deb)" ]] ; then
+ (cd $WORKSPACE/packages/$TARGET
+ dpkg-scanpackages . 2> >(grep -v 'warning' 1>&2) | gzip -c > Packages.gz
+ )
+ fi
+
+ COMMAND="/jenkins/package-testing/test-packages-$TARGET.sh"
+ IMAGE="arvados/package-test:$TARGET"
+else
+ IMAGE="arvados/build:$TARGET"
+ if [[ "$COMMAND" != "" ]]; then
+ COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET$DEBUG"
+ fi
+fi
+
+JENKINS_DIR=$(dirname "$(readlink -e "$0")")
+
+if [[ -n "$test_packages" ]]; then
+ pushd "$JENKINS_DIR/package-test-dockerfiles"
+else
+ pushd "$JENKINS_DIR/package-build-dockerfiles"
+ make "$TARGET/generated"
+fi
+
+echo $TARGET
+cd $TARGET
+time docker build --tag=$IMAGE .
+popd
+
+if test -z "$packages" ; then
+ packages="arvados-api-server
+ arvados-data-manager
+ arvados-docker-cleaner
+ arvados-git-httpd
+ arvados-node-manager
+ arvados-src
+ arvados-workbench
+ crunchstat
+ keepproxy
+ keep-rsync
+ keepstore
+ keep-web
+ libarvados-perl"
+
+ case "$TARGET" in
+ centos6)
+ packages="$packages python27-python-arvados-fuse
+ python27-python-arvados-python-client"
+ ;;
+ *)
+ packages="$packages python-arvados-fuse
+ python-arvados-python-client"
+ ;;
+ esac
+fi
+
+FINAL_EXITCODE=0
+
+package_fails=""
+
+mkdir -p "$WORKSPACE/apps/workbench/vendor/cache-$TARGET"
+mkdir -p "$WORKSPACE/services/api/vendor/cache-$TARGET"
+
+docker_volume_args=(
+ -v "$JENKINS_DIR:/jenkins"
+ -v "$WORKSPACE:/arvados"
+ -v /arvados/services/api/vendor/bundle
+ -v /arvados/apps/workbench/vendor/bundle
+ -v "$WORKSPACE/services/api/vendor/cache-$TARGET:/arvados/services/api/vendor/cache"
+ -v "$WORKSPACE/apps/workbench/vendor/cache-$TARGET:/arvados/apps/workbench/vendor/cache"
+)
+
+if [[ -n "$test_packages" ]]; then
+ for p in $packages ; do
+ echo
+ echo "START: $p test on $IMAGE" >&2
+ if docker run --rm \
+ "${docker_volume_args[@]}" \
+ --env ARVADOS_DEBUG=1 \
+ --env "TARGET=$TARGET" \
+ --env "WORKSPACE=/arvados" \
+ "$IMAGE" $COMMAND $p
+ then
+ echo "OK: $p test on $IMAGE succeeded" >&2
+ else
+ FINAL_EXITCODE=$?
+ package_fails="$package_fails $p"
+ echo "ERROR: $p test on $IMAGE failed with exit status $FINAL_EXITCODE" >&2
+ fi
+ done
+else
+ echo
+ echo "START: build packages on $IMAGE" >&2
+ if docker run --rm \
+ "${docker_volume_args[@]}" \
+ --env ARVADOS_DEBUG=1 \
+ "$IMAGE" $COMMAND
+ then
+ echo
+ echo "OK: build packages on $IMAGE succeeded" >&2
+ else
+ FINAL_EXITCODE=$?
+ echo "ERROR: build packages on $IMAGE failed with exit status $FINAL_EXITCODE" >&2
+ fi
+fi
+
+if test -n "$package_fails" ; then
+ echo "Failed package tests:$package_fails" >&2
+fi
+
+exit $FINAL_EXITCODE
diff --git a/build/jenkins/run-build-packages-sso.sh b/build/jenkins/run-build-packages-sso.sh
new file mode 100755
index 0000000..cc673a6
--- /dev/null
+++ b/build/jenkins/run-build-packages-sso.sh
@@ -0,0 +1,161 @@
+#!/bin/bash
+
+JENKINS_DIR=$(dirname $(readlink -e "$0"))
+. "$JENKINS_DIR/run-library.sh"
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Build Arvados SSO server package
+
+Syntax:
+ WORKSPACE=/path/to/arvados-sso $(basename $0) [options]
+
+Options:
+
+--debug
+ Output debug information (default: false)
+--target
+ Distribution to build packages for (default: debian7)
+
+WORKSPACE=path Path to the Arvados SSO source tree to build packages from
+
+EOF
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+TARGET=debian7
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,build-bundle-packages,debug,target: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --test-packages)
+ test_packages=1
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+case "$TARGET" in
+ debian7)
+ FORMAT=deb
+ ;;
+ debian8)
+ FORMAT=deb
+ ;;
+ ubuntu1204)
+ FORMAT=deb
+ ;;
+ ubuntu1404)
+ FORMAT=deb
+ ;;
+ centos6)
+ FORMAT=rpm
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+# Test for fpm
+fpm --version >/dev/null 2>&1
+
+if [[ "$?" != 0 ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: fpm not found"
+ echo >&2
+ exit 1
+fi
+
+RUN_BUILD_PACKAGES_PATH="`dirname \"$0\"`"
+RUN_BUILD_PACKAGES_PATH="`( cd \"$RUN_BUILD_PACKAGES_PATH\" && pwd )`" # absolutized and normalized
+if [ -z "$RUN_BUILD_PACKAGES_PATH" ] ; then
+ # error; for some reason, the path is not accessible
+ # to the script (e.g. permissions re-evaled after suid)
+ exit 1 # fail
+fi
+
+debug_echo "$0 is running from $RUN_BUILD_PACKAGES_PATH"
+debug_echo "Workspace is $WORKSPACE"
+
+if [[ -f /etc/profile.d/rvm.sh ]]; then
+ source /etc/profile.d/rvm.sh
+ GEM="rvm-exec default gem"
+else
+ GEM=gem
+fi
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# More cleanup - make sure all executables that we'll package are 755
+# No executables in the sso server package
+#find -type d -name 'bin' |xargs -I {} find {} -type f |xargs -I {} chmod 755 {}
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+debug_echo "umask is" `umask`
+
+if [[ ! -d "$WORKSPACE/packages/$TARGET" ]]; then
+ mkdir -p "$WORKSPACE/packages/$TARGET"
+fi
+
+# Build the SSO server package
+handle_rails_package arvados-sso-server "$WORKSPACE" \
+ "$WORKSPACE/LICENCE" --url="https://arvados.org" \
+ --description="Arvados SSO server - Arvados is a free and open source platform for big data science." \
+ --license="Expat license"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-packages.sh b/build/jenkins/run-build-packages.sh
new file mode 100755
index 0000000..136b73c
--- /dev/null
+++ b/build/jenkins/run-build-packages.sh
@@ -0,0 +1,538 @@
+#!/bin/bash
+
+. `dirname "$(readlink -f "$0")"`/run-library.sh
+. `dirname "$(readlink -f "$0")"`/libcloud-pin
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Build Arvados packages
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+Options:
+
+--build-bundle-packages (default: false)
+ Build api server and workbench packages with vendor/bundle included
+--debug
+ Output debug information (default: false)
+--target
+ Distribution to build packages for (default: debian7)
+--command
+ Build command to execute (defaults to the run command defined in the
+ Docker image)
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+TARGET=debian7
+COMMAND=
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,build-bundle-packages,debug,target: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+if [[ "$COMMAND" != "" ]]; then
+ COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET"
+fi
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+declare -a PYTHON_BACKPORTS PYTHON3_BACKPORTS
+
+PYTHON2_VERSION=2.7
+PYTHON3_VERSION=$(python3 -c 'import sys; print("{v.major}.{v.minor}".format(v=sys.version_info))')
+
+case "$TARGET" in
+ debian7)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ 'pycurl<7.21.5' contextlib2)
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ debian8)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ 'pycurl<7.21.5')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ ubuntu1204)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ contextlib2 \
+ 'pycurl<7.21.5')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ ubuntu1404)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(pyasn1==0.1.7 pyvcf pyasn1-modules==0.0.5 llfuse ciso8601 \
+ google-api-python-client==1.4.2 six uritemplate oauth2client==1.5.2 httplib2 \
+ rsa 'pycurl<7.21.5' backports.ssl_match_hostname)
+ PYTHON3_BACKPORTS=(docker-py requests websocket-client)
+ ;;
+ centos6)
+ FORMAT=rpm
+ PYTHON2_PACKAGE=$(rpm -qf "$(which python$PYTHON2_VERSION)" --queryformat '%{NAME}\n')
+ PYTHON2_PKG_PREFIX=$PYTHON2_PACKAGE
+ PYTHON3_PACKAGE=$(rpm -qf "$(which python$PYTHON3_VERSION)" --queryformat '%{NAME}\n')
+ PYTHON3_PKG_PREFIX=$PYTHON3_PACKAGE
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname 'pycurl<7.21.5' \
+ python-daemon lockfile llfuse 'pbr<1.0')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ export PYCURL_SSL_LIBRARY=nss
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+# Test for fpm
+fpm --version >/dev/null 2>&1
+
+if [[ "$?" != 0 ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: fpm not found"
+ echo >&2
+ exit 1
+fi
+
+EASY_INSTALL2=$(find_easy_install -$PYTHON2_VERSION "")
+EASY_INSTALL3=$(find_easy_install -$PYTHON3_VERSION 3)
+
+RUN_BUILD_PACKAGES_PATH="`dirname \"$0\"`"
+RUN_BUILD_PACKAGES_PATH="`( cd \"$RUN_BUILD_PACKAGES_PATH\" && pwd )`" # absolutized and normalized
+if [ -z "$RUN_BUILD_PACKAGES_PATH" ] ; then
+ # error; for some reason, the path is not accessible
+ # to the script (e.g. permissions re-evaled after suid)
+ exit 1 # fail
+fi
+
+debug_echo "$0 is running from $RUN_BUILD_PACKAGES_PATH"
+debug_echo "Workspace is $WORKSPACE"
+
+if [[ -f /etc/profile.d/rvm.sh ]]; then
+ source /etc/profile.d/rvm.sh
+ GEM="rvm-exec default gem"
+else
+ GEM=gem
+fi
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# More cleanup - make sure all executables that we'll package are 755
+find -type d -name 'bin' |xargs -I {} find {} -type f |xargs -I {} chmod 755 {}
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+debug_echo "umask is" `umask`
+
+if [[ ! -d "$WORKSPACE/packages/$TARGET" ]]; then
+ mkdir -p $WORKSPACE/packages/$TARGET
+fi
+
+# Perl packages
+debug_echo -e "\nPerl packages\n"
+
+cd "$WORKSPACE/sdk/perl"
+
+if [[ -e Makefile ]]; then
+ make realclean >"$STDOUT_IF_DEBUG"
+fi
+find -maxdepth 1 \( -name 'MANIFEST*' -or -name "libarvados-perl*.$FORMAT" \) \
+ -delete
+rm -rf install
+
+perl Makefile.PL INSTALL_BASE=install >"$STDOUT_IF_DEBUG" && \
+ make install INSTALLDIRS=perl >"$STDOUT_IF_DEBUG" && \
+ fpm_build install/lib/=/usr/share libarvados-perl \
+ "Curoverse, Inc." dir "$(version_from_git)" install/man/=/usr/share/man \
+ "$WORKSPACE/LICENSE-2.0.txt=/usr/share/doc/libarvados-perl/LICENSE-2.0.txt" && \
+ mv --no-clobber libarvados-perl*.$FORMAT "$WORKSPACE/packages/$TARGET/"
+
+# Ruby gems
+debug_echo -e "\nRuby gems\n"
+
+FPM_GEM_PREFIX=$($GEM environment gemdir)
+
+cd "$WORKSPACE/sdk/ruby"
+handle_ruby_gem arvados
+
+cd "$WORKSPACE/sdk/cli"
+handle_ruby_gem arvados-cli
+
+cd "$WORKSPACE/services/login-sync"
+handle_ruby_gem arvados-login-sync
+
+# Python packages
+debug_echo -e "\nPython packages\n"
+
+cd "$WORKSPACE/sdk/pam"
+handle_python_package
+
+cd "$WORKSPACE/sdk/python"
+handle_python_package
+
+cd "$WORKSPACE/sdk/cwl"
+handle_python_package
+
+cd "$WORKSPACE/services/fuse"
+handle_python_package
+
+cd "$WORKSPACE/services/nodemanager"
+handle_python_package
+
+# arvados-src
+(
+ set -e
+
+ cd "$WORKSPACE"
+ COMMIT_HASH=$(format_last_commit_here "%H")
+
+ SRC_BUILD_DIR=$(mktemp -d)
+ # mktemp creates the directory with 0700 permissions by default
+ chmod 755 $SRC_BUILD_DIR
+ git clone $DASHQ_UNLESS_DEBUG "$WORKSPACE/.git" "$SRC_BUILD_DIR"
+ cd "$SRC_BUILD_DIR"
+
+ # go into detached-head state
+ git checkout $DASHQ_UNLESS_DEBUG "$COMMIT_HASH"
+ echo "$COMMIT_HASH" >git-commit.version
+
+ cd "$SRC_BUILD_DIR"
+ PKG_VERSION=$(version_from_git)
+ cd $WORKSPACE/packages/$TARGET
+ fpm_build $SRC_BUILD_DIR/=/usr/local/arvados/src arvados-src 'Curoverse, Inc.' 'dir' "$PKG_VERSION" "--exclude=usr/local/arvados/src/.git" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=The Arvados source code" "--architecture=all"
+
+ rm -rf "$SRC_BUILD_DIR"
+)
+
+# On older platforms we need to publish a backport of libfuse >=2.9.2,
+# and we need to build and install it here in order to even build an
+# llfuse package.
+cd $WORKSPACE/packages/$TARGET
+if [[ $TARGET =~ ubuntu1204 ]]; then
+ # port libfuse 2.9.2 to Ubuntu 12.04
+ LIBFUSE_DIR=$(mktemp -d)
+ (
+ cd $LIBFUSE_DIR
+ # download fuse 2.9.2 ubuntu 14.04 source package
+ file="fuse_2.9.2.orig.tar.xz" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+ file="fuse_2.9.2-4ubuntu4.14.04.1.debian.tar.xz" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+ file="fuse_2.9.2-4ubuntu4.14.04.1.dsc" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+
+ # install dpkg-source and dpkg-buildpackage commands
+ apt-get install -y --no-install-recommends dpkg-dev
+
+ # extract source and apply patches
+ dpkg-source -x fuse_2.9.2-4ubuntu4.14.04.1.dsc
+ rm -f fuse_2.9.2.orig.tar.xz fuse_2.9.2-4ubuntu4.14.04.1.debian.tar.xz fuse_2.9.2-4ubuntu4.14.04.1.dsc
+
+ # add new version to changelog
+ cd fuse-2.9.2
+ (
+ echo "fuse (2.9.2-5) precise; urgency=low"
+ echo
+ echo " * Backported from trusty-security to precise"
+ echo
+ echo " -- Joshua Randall <jcrandall at alum.mit.edu> Thu, 4 Feb 2016 11:31:00 -0000"
+ echo
+ cat debian/changelog
+ ) > debian/changelog.new
+ mv debian/changelog.new debian/changelog
+
+ # install build-deps and build
+ apt-get install -y --no-install-recommends debhelper dh-autoreconf libselinux-dev
+ dpkg-buildpackage -rfakeroot -b
+ )
+ fpm_build "$LIBFUSE_DIR/fuse_2.9.2-5_amd64.deb" fuse "Ubuntu Developers" deb "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/libfuse2_2.9.2-5_amd64.deb" libfuse2 "Ubuntu Developers" deb "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/libfuse-dev_2.9.2-5_amd64.deb" libfuse-dev "Ubuntu Developers" deb "2.9.2" --iteration 5
+ dpkg -i \
+ "$WORKSPACE/packages/$TARGET/fuse_2.9.2-5_amd64.deb" \
+ "$WORKSPACE/packages/$TARGET/libfuse2_2.9.2-5_amd64.deb" \
+ "$WORKSPACE/packages/$TARGET/libfuse-dev_2.9.2-5_amd64.deb"
+ apt-get -y --no-install-recommends -f install
+ rm -rf $LIBFUSE_DIR
+elif [[ $TARGET =~ centos6 ]]; then
+ # port fuse 2.9.2 to centos 6
+ # install tools to build rpm from source
+ yum install -y rpm-build redhat-rpm-config
+ LIBFUSE_DIR=$(mktemp -d)
+ (
+ cd "$LIBFUSE_DIR"
+ # download fuse 2.9.2 centos 7 source rpm
+ file="fuse-2.9.2-6.el7.src.rpm" && curl -L -o "${file}" "http://vault.centos.org/7.2.1511/os/Source/SPackages/${file}"
+ (
+ # modify source rpm spec to remove conflict on filesystem version
+ mkdir -p /root/rpmbuild/SOURCES
+ cd /root/rpmbuild/SOURCES
+ rpm2cpio ${LIBFUSE_DIR}/fuse-2.9.2-6.el7.src.rpm | cpio -i
+ perl -pi -e 's/Conflicts:\s*filesystem.*//g' fuse.spec
+ )
+ # build rpms from source
+ rpmbuild -bb /root/rpmbuild/SOURCES/fuse.spec
+ rm -f fuse-2.9.2-6.el7.src.rpm
+ # move built RPMs to LIBFUSE_DIR
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-libs-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-devel-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ rm -rf /root/rpmbuild
+ )
+ fpm_build "$LIBFUSE_DIR/fuse-libs-2.9.2-6.el6.x86_64.rpm" fuse-libs "Centos Developers" rpm "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/fuse-2.9.2-6.el6.x86_64.rpm" fuse "Centos Developers" rpm "2.9.2" --iteration 5 --no-auto-depends
+ fpm_build "$LIBFUSE_DIR/fuse-devel-2.9.2-6.el6.x86_64.rpm" fuse-devel "Centos Developers" rpm "2.9.2" --iteration 5 --no-auto-depends
+ yum install \
+ "$WORKSPACE/packages/$TARGET/fuse-libs-2.9.2-5.x86_64.rpm" \
+ "$WORKSPACE/packages/$TARGET/fuse-2.9.2-5.x86_64.rpm" \
+ "$WORKSPACE/packages/$TARGET/fuse-devel-2.9.2-5.x86_64.rpm"
+fi
+
+# Go binaries
+cd $WORKSPACE/packages/$TARGET
+export GOPATH=$(mktemp -d)
+package_go_binary services/keepstore keepstore \
+ "Keep storage daemon, accessible to clients on the LAN"
+package_go_binary services/keepproxy keepproxy \
+ "Make a Keep cluster accessible to clients that are not on the LAN"
+package_go_binary services/keep-web keep-web \
+ "Static web hosting service for user data stored in Arvados Keep"
+package_go_binary services/datamanager arvados-data-manager \
+ "Ensure block replication levels, report disk usage, and determine which blocks should be deleted when space is needed"
+package_go_binary services/arv-git-httpd arvados-git-httpd \
+ "Provide authenticated http access to Arvados-hosted git repositories"
+package_go_binary services/crunchstat crunchstat \
+ "Gather cpu/memory/network statistics of running Crunch jobs"
+package_go_binary tools/keep-rsync keep-rsync \
+ "Copy all data from one set of Keep servers to another"
+
+# The Python SDK
+# Please resist the temptation to add --no-python-fix-name to the fpm call here
+# (which would remove the python- prefix from the package name), because this
+# package is a dependency of arvados-fuse, and fpm can not omit the python-
+# prefix from only one of the dependencies of a package... Maybe I could
+# whip up a patch and send it upstream, but that will be for another day. Ward,
+# 2014-05-15
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/sdk/python/build"
+fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/python/arvados_python_client.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados Python SDK" --deb-recommends=git
+
+# The PAM module
+if [[ $TARGET =~ debian|ubuntu ]]; then
+ cd $WORKSPACE/packages/$TARGET
+ rm -rf "$WORKSPACE/sdk/pam/build"
+ fpm_build $WORKSPACE/sdk/pam libpam-arvados 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/pam/arvados_pam.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=PAM module for authenticating shell logins using Arvados API tokens" --depends libpam-python
+fi
+
+# The FUSE driver
+# Please see comment about --no-python-fix-name above; we stay consistent and do
+# not omit the python- prefix first.
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/fuse/build"
+fpm_build $WORKSPACE/services/fuse "${PYTHON2_PKG_PREFIX}-arvados-fuse" 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/fuse/arvados_fuse.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Keep FUSE driver"
+
+# The node manager
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/nodemanager/build"
+fpm_build $WORKSPACE/services/nodemanager arvados-node-manager 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/nodemanager/arvados_node_manager.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados node manager"
+
+# The Docker image cleaner
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/dockercleaner/build"
+fpm_build $WORKSPACE/services/dockercleaner arvados-docker-cleaner 'Curoverse, Inc.' 'python3' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/dockercleaner/arvados_docker_cleaner.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados Docker image cleaner"
+
+# Forked libcloud
+LIBCLOUD_DIR=$(mktemp -d)
+(
+ cd $LIBCLOUD_DIR
+ git clone $DASHQ_UNLESS_DEBUG https://github.com/curoverse/libcloud.git .
+ git checkout apache-libcloud-$LIBCLOUD_PIN
+ # libcloud is absurdly noisy without -q, so force -q here
+ OLD_DASHQ_UNLESS_DEBUG=$DASHQ_UNLESS_DEBUG
+ DASHQ_UNLESS_DEBUG=-q
+ handle_python_package
+ DASHQ_UNLESS_DEBUG=$OLD_DASHQ_UNLESS_DEBUG
+)
+fpm_build $LIBCLOUD_DIR "$PYTHON2_PKG_PREFIX"-apache-libcloud
+rm -rf $LIBCLOUD_DIR
+
+# Python 2 dependencies
+declare -a PIP_DOWNLOAD_SWITCHES=(--no-deps)
+# Add --no-use-wheel if this pip knows it.
+pip wheel --help >/dev/null 2>&1
+case "$?" in
+ 0) PIP_DOWNLOAD_SWITCHES+=(--no-use-wheel) ;;
+ 2) ;;
+ *) echo "WARNING: `pip wheel` test returned unknown exit code $?" ;;
+esac
+
+for deppkg in "${PYTHON_BACKPORTS[@]}"; do
+ outname=$(echo "$deppkg" | sed -e 's/^python-//' -e 's/[<=>].*//' -e 's/_/-/g' -e "s/^/${PYTHON2_PKG_PREFIX}-/")
+ case "$deppkg" in
+ httplib2|google-api-python-client)
+ # Work around 0640 permissions on some package files.
+ # See #7591 and #7991.
+ pyfpm_workdir=$(mktemp --tmpdir -d pyfpm-XXXXXX) && (
+ set -e
+ cd "$pyfpm_workdir"
+ pip install "${PIP_DOWNLOAD_SWITCHES[@]}" --download . "$deppkg"
+ tar -xf "$deppkg"-*.tar*
+ cd "$deppkg"-*/
+ "python$PYTHON2_VERSION" setup.py $DASHQ_UNLESS_DEBUG egg_info build
+ chmod -R go+rX .
+ set +e
+ # --iteration 2 provides an upgrade for previously built
+ # buggy packages.
+ fpm_build . "$outname" "" python "" --iteration 2
+ # The upload step uses the package timestamp to determine
+ # whether it's new. --no-clobber plays nice with that.
+ mv --no-clobber "$outname"*.$FORMAT "$WORKSPACE/packages/$TARGET"
+ )
+ if [ 0 != "$?" ]; then
+ echo "ERROR: $deppkg build process failed"
+ EXITCODE=1
+ fi
+ if [ -n "$pyfpm_workdir" ]; then
+ rm -rf "$pyfpm_workdir"
+ fi
+ ;;
+ *)
+ fpm_build "$deppkg" "$outname"
+ ;;
+ esac
+done
+
+# Python 3 dependencies
+for deppkg in "${PYTHON3_BACKPORTS[@]}"; do
+ outname=$(echo "$deppkg" | sed -e 's/^python-//' -e 's/[<=>].*//' -e 's/_/-/g' -e "s/^/${PYTHON3_PKG_PREFIX}-/")
+ # The empty string is the vendor argument: these aren't Curoverse software.
+ fpm_build "$deppkg" "$outname" "" python3
+done
+
+# Build the API server package
+handle_rails_package arvados-api-server "$WORKSPACE/services/api" \
+ "$WORKSPACE/agpl-3.0.txt" --url="https://arvados.org" \
+ --description="Arvados API server - Arvados is a free and open source platform for big data science." \
+ --license="GNU Affero General Public License, version 3.0"
+
+# Build the workbench server package
+(
+ set -e
+ cd "$WORKSPACE/apps/workbench"
+
+ # We need to bundle to be ready even when we build a package without vendor directory
+ # because asset compilation requires it.
+ bundle install --path vendor/bundle >"$STDOUT_IF_DEBUG"
+
+ # clear the tmp directory; the asset generation step will recreate tmp/cache/assets,
+ # and we want that in the package, so it's easier to not exclude the tmp directory
+ # from the package - empty it instead.
+ rm -rf tmp
+ mkdir tmp
+
+ # Set up application.yml and production.rb so that asset precompilation works
+ \cp config/application.yml.example config/application.yml -f
+ \cp config/environments/production.rb.example config/environments/production.rb -f
+ sed -i 's/secret_token: ~/secret_token: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/' config/application.yml
+
+ RAILS_ENV=production RAILS_GROUPS=assets bundle exec rake assets:precompile >/dev/null
+
+ # Remove generated configuration files so they don't go in the package.
+ rm config/application.yml config/environments/production.rb
+)
+
+if [[ "$?" != "0" ]]; then
+ echo "ERROR: Asset precompilation failed"
+ EXITCODE=1
+else
+ handle_rails_package arvados-workbench "$WORKSPACE/apps/workbench" \
+ "$WORKSPACE/agpl-3.0.txt" --url="https://arvados.org" \
+ --description="Arvados Workbench - Arvados is a free and open source platform for big data science." \
+ --license="GNU Affero General Public License, version 3.0"
+fi
+
+# clean up temporary GOPATH
+rm -rf "$GOPATH"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-cwl-tests.sh b/build/jenkins/run-cwl-tests.sh
new file mode 100755
index 0000000..53c0538
--- /dev/null
+++ b/build/jenkins/run-cwl-tests.sh
@@ -0,0 +1,218 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Test cwl tool and (optionally) upload to PyPi and Docker Hub.
+
+Syntax:
+ WORKSPACE=/path/to/common-workflow-language $(basename $0) [options]
+
+Options:
+
+--upload-pypi Upload package to pypi (default: false)
+--upload-docker Upload packages to docker hub (default: false)
+--debug Output debug information (default: false)
+
+WORKSPACE=path Path to the common-workflow-language source tree
+
+EOF
+
+EXITCODE=0
+CALL_FREIGHT=0
+
+DEBUG=0
+UPLOAD_PYPI=0
+UPLOAD_DOCKER=0
+
+VENVDIR=
+
+leave_temp=
+
+declare -A leave_temp
+
+set -e
+
+clear_temp() {
+ leaving=""
+ for var in VENVDIR
+ do
+ if [[ -z "${leave_temp[$var]}" ]]
+ then
+ if [[ -n "${!var}" ]]
+ then
+ rm -rf "${!var}"
+ fi
+ else
+ leaving+=" $var=\"${!var}\""
+ fi
+ done
+ if [[ -n "$leaving" ]]; then
+ echo "Leaving behind temp dirs: $leaving"
+ fi
+}
+
+fatal() {
+ clear_temp
+ echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
+ exit 1
+}
+
+trap clear_temp INT EXIT
+
+# Set up temporary install dirs (unless existing dirs were supplied)
+for tmpdir in VENVDIR
+do
+ if [[ -n "${!tmpdir}" ]]; then
+ leave_temp[$tmpdir]=1
+ else
+ eval $tmpdir=$(mktemp -d)
+ fi
+done
+
+
+while [[ -n "$1" ]]
+do
+ arg="$1"; shift
+ case "$arg" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --upload-pypi)
+ UPLOAD_PYPI=1
+ ;;
+ --upload-docker)
+ UPLOAD_DOCKER=1
+ ;;
+ --leave-temp)
+ leave_temp[VENVDIR]=1
+ ;;
+ *=*)
+ eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
+ ;;
+ *)
+ echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
+ exit 1
+ ;;
+ esac
+done
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if [[ "$DEBUG" != 0 ]]; then
+ echo "Workspace is $WORKSPACE"
+fi
+
+virtualenv --setuptools "$VENVDIR" || fatal "virtualenv $VENVDIR failed"
+. "$VENVDIR/bin/activate"
+
+handle_python_package () {
+ # This function assumes the current working directory is the python package directory
+ if [[ "$UPLOAD_PYPI" != 0 ]]; then
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ if [[ "$DEBUG" != 0 ]]; then
+ python setup.py sdist upload
+ else
+ python setup.py -q sdist upload
+ fi
+ else
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ if [[ "$DEBUG" != 0 ]]; then
+ python setup.py sdist
+ else
+ python setup.py -q sdist
+ fi
+ fi
+}
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+if [[ "$DEBUG" != 0 ]]; then
+ echo "umask is" `umask`
+fi
+
+# Python packages
+if [[ "$DEBUG" != 0 ]]; then
+ echo
+ echo "Python packages"
+ echo
+fi
+
+cd "$WORKSPACE"
+
+if test -d cwltool ; then
+ (cd cwltool
+ git fetch
+ git reset --hard origin/master
+ )
+else
+ git clone git at github.com:common-workflow-language/cwltool.git
+ (cd cwltool
+ git config user.email "sysadmin at curoverse.com"
+ git config user.name "Curoverse build bot"
+ )
+fi
+
+(cd cwltool
+ python setup.py install
+ python setup.py test
+ ./build-node-docker.sh
+)
+
+./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-2
+./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-3
+
+(cd cwltool
+ handle_python_package
+)
+
+(cd cwltool/cwl-runner
+ handle_python_package
+)
+
+(cd cwltool
+ ./build-cwl-docker.sh
+)
+
+if [[ "$UPLOAD_DOCKER" != 0 ]]; then
+ docker push commonworkflowlanguage/cwltool_module
+ docker push commonworkflowlanguage/cwltool
+ docker push commonworkflowlanguage/nodejs-engine
+fi
+
+if test -d common-workflow-language.github.io ; then
+ (cd common-workflow-language.github.io
+ git fetch
+ git reset --hard origin/master
+ )
+else
+ git clone git at github.com:common-workflow-language/common-workflow-language.github.io.git
+ (cd common-workflow-language.github.io
+ git config user.email "sysadmin at curoverse.com"
+ git config user.name "Curoverse build bot"
+ )
+fi
+
+python -mcwltool --outdir=$PWD/common-workflow-language.github.io site/cwlsite.cwl site/cwlsite-job.json
+
+(cd common-workflow-language.github.io
+ git add --all
+ git diff-index --quiet HEAD || git commit -m"Build bot"
+ git push
+)
diff --git a/build/jenkins/run-deploy.sh b/build/jenkins/run-deploy.sh
new file mode 100755
index 0000000..1b06c65
--- /dev/null
+++ b/build/jenkins/run-deploy.sh
@@ -0,0 +1,266 @@
+#!/bin/bash
+
+DEBUG=0
+SSH_PORT=22
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options] <identifier>"
+ echo >&2
+ echo >&2 " <identifier> Arvados cluster name"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -p, --port <ssh port> SSH port to use (default 22)"
+ echo >&2 " -d, --debug Enable debug output"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 "Note: this script requires an arvados token created with these permissions:"
+ echo >&2 ' arv api_client_authorization create_system_auth \'
+ echo >&2 ' --scopes "[\"GET /arvados/v1/virtual_machines\",'
+ echo >&2 ' \"GET /arvados/v1/keep_services\",'
+ echo >&2 ' \"GET /arvados/v1/keep_services/\",'
+ echo >&2 ' \"GET /arvados/v1/groups\",'
+ echo >&2 ' \"GET /arvados/v1/groups/\",'
+ echo >&2 ' \"GET /arvados/v1/links\",'
+ echo >&2 ' \"GET /arvados/v1/collections\",'
+ echo >&2 ' \"POST /arvados/v1/collections\",'
+ echo >&2 ' \"POST /arvados/v1/links\"]"'
+ echo >&2
+}
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hdp: \
+ --long help,debug,port: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -p | --port)
+ SSH_PORT="$2"; shift 2
+ ;;
+ -d | --debug)
+ DEBUG=1
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+IDENTIFIER=$1
+
+if [[ "$IDENTIFIER" == '' ]]; then
+ usage
+ exit 1
+fi
+
+EXITCODE=0
+
+COLUMNS=80
+
+PUPPET_AGENT='
+now() { date +%s; }
+let endtime="$(now) + 600"
+while [ "$endtime" -gt "$(now)" ]; do
+ puppet agent --test --detailed-exitcodes
+ agent_exitcode=$?
+ if [ 0 = "$agent_exitcode" ] || [ 2 = "$agent_exitcode" ]; then
+ break
+ else
+ sleep 10s
+ fi
+done
+exit ${agent_exitcode:-99}
+'
+
+title () {
+ date=`date +'%Y-%m-%d %H:%M:%S'`
+ printf "$date $1\n"
+}
+
+function run_puppet() {
+ node=$1
+ return_var=$2
+
+ title "Running puppet on $node"
+ TMP_FILE=`mktemp`
+ if [[ "$DEBUG" != "0" ]]; then
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" | tee $TMP_FILE
+ else
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" > $TMP_FILE 2>&1
+ fi
+
+ ECODE=${PIPESTATUS[0]}
+ RESULT=$(cat $TMP_FILE)
+
+ if [[ "$ECODE" != "255" && ! ("$RESULT" =~ 'already in progress') && "$ECODE" != "2" && "$ECODE" != "0" ]]; then
+ # Ssh exits 255 if the connection timed out. Just ignore that.
+ # Puppet exits 2 if there are changes. For real!
+ # Puppet prints 'Notice: Run of Puppet configuration client already in progress' if another puppet process
+ # was already running
+ echo "ERROR running puppet on $node: exit code $ECODE"
+ if [[ "$DEBUG" == "0" ]]; then
+ title "Command output follows:"
+ echo $RESULT
+ fi
+ fi
+ if [[ "$ECODE" == "255" ]]; then
+ title "Connection timed out"
+ ECODE=0
+ fi
+ if [[ "$ECODE" == "2" ]]; then
+ ECODE=0
+ fi
+ rm -f $TMP_FILE
+ eval "$return_var=$ECODE"
+}
+
+function run_command() {
+ node=$1
+ return_var=$2
+ command=$3
+
+ title "Running '$command' on $node"
+ TMP_FILE=`mktemp`
+ if [[ "$DEBUG" != "0" ]]; then
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" | tee $TMP_FILE
+ else
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" > $TMP_FILE 2>&1
+ fi
+
+ ECODE=$?
+ RESULT=$(cat $TMP_FILE)
+
+ if [[ "$ECODE" != "255" && "$ECODE" != "0" ]]; then
+ # Ssh exists 255 if the connection timed out. Just ignore that, it's possible that this node is
+ # a shell node that is down.
+ title "ERROR running command on $node: exit code $ECODE"
+ if [[ "$DEBUG" == "0" ]]; then
+ title "Command output follows:"
+ echo $RESULT
+ fi
+ fi
+ if [[ "$ECODE" == "255" ]]; then
+ title "Connection timed out"
+ ECODE=0
+ fi
+ rm -f $TMP_FILE
+ eval "$return_var=$ECODE"
+}
+
+title "Updating API server"
+SUM_ECODE=0
+run_puppet $IDENTIFIER.arvadosapi.com ECODE
+SUM_ECODE=$(($SUM_ECODE + $ECODE))
+if [ ! "$IDENTIFIER" = "c97qk" ]
+then
+ run_command $IDENTIFIER.arvadosapi.com ECODE "dpkg -L arvados-mailchimp-plugin 2>/dev/null && apt-get install arvados-mailchimp-plugin --reinstall || echo"
+ SUM_ECODE=$(($SUM_ECODE + $ECODE))
+fi
+
+if [[ "$SUM_ECODE" != "0" ]]; then
+ title "ERROR: Updating API server FAILED"
+ EXITCODE=$(($EXITCODE + $SUM_ECODE))
+ exit $EXITCODE
+fi
+
+title "Loading ARVADOS_API_HOST and ARVADOS_API_TOKEN"
+if [[ -f "$HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf" ]]; then
+ . $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf
+else
+ title "WARNING: $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf not found."
+fi
+if [[ "$ARVADOS_API_HOST" == "" ]] || [[ "$ARVADOS_API_TOKEN" == "" ]]; then
+ title "ERROR: ARVADOS_API_HOST and/or ARVADOS_API_TOKEN environment variables are not set."
+ exit 1
+fi
+
+title "Locating Arvados Standard Docker images project"
+
+JSON_FILTER="[[\"name\", \"=\", \"Arvados Standard Docker Images\"], [\"owner_uuid\", \"=\", \"$IDENTIFIER-tpzed-000000000000000\"]]"
+DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group list --filters="$JSON_FILTER"`
+
+if [[ "$DOCKER_IMAGES_PROJECT" == "" ]]; then
+ title "Warning: Arvados Standard Docker Images project not found. Creating it."
+
+ DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group create --group "{\"owner_uuid\":\"$IDENTIFIER-tpzed-000000000000000\", \"name\":\"Arvados Standard Docker Images\", \"group_class\":\"project\"}"`
+ ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv link create --link "{\"tail_uuid\":\"$IDENTIFIER-j7d0g-fffffffffffffff\", \"head_uuid\":\"$DOCKER_IMAGES_PROJECT\", \"link_class\":\"permission\", \"name\":\"can_read\" }"
+ if [[ "$?" != "0" ]]; then
+ title "ERROR: could not create standard Docker images project Please create it, cf. http://doc.arvados.org/install/create-standard-objects.html"
+ exit 1
+ fi
+fi
+
+title "Found Arvados Standard Docker Images project with uuid $DOCKER_IMAGES_PROJECT"
+GIT_COMMIT=`ssh -o "StrictHostKeyChecking no" $IDENTIFIER cat /usr/local/arvados/src/git-commit.version`
+
+if [[ "$?" != "0" ]] || [[ "$GIT_COMMIT" == "" ]]; then
+ title "ERROR: unable to get arvados/jobs Docker image git revision"
+ exit 1
+else
+ title "Found git commit for arvados/jobs Docker image: $GIT_COMMIT"
+fi
+
+run_command shell.$IDENTIFIER ECODE "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker" |grep -q $GIT_COMMIT
+
+if [[ "$?" == "0" ]]; then
+ title "Found latest arvados/jobs Docker image, nothing to upload"
+else
+ title "Installing latest arvados/jobs Docker image"
+ ssh -o "StrictHostKeyChecking no" shell.$IDENTIFIER "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker --pull --project-uuid=$DOCKER_IMAGES_PROJECT arvados/jobs $GIT_COMMIT"
+ if [[ "$?" -ne 0 ]]; then
+ title "'git pull' failed exiting..."
+ exit 1
+ fi
+fi
+
+title "Gathering list of shell and Keep nodes"
+SHELL_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv virtual_machine list |jq .items[].hostname -r`
+KEEP_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv keep_service list |jq .items[].service_host -r`
+
+title "Updating workbench"
+SUM_ECODE=0
+if [[ `host workbench.$ARVADOS_API_HOST |cut -f4 -d' '` != `host $ARVADOS_API_HOST |cut -f4 -d' '` ]]; then
+ # Workbench runs on a separate host. We need to run puppet there too.
+ run_puppet workbench.$IDENTIFIER ECODE
+ SUM_ECODE=$(($SUM_ECODE + $ECODE))
+fi
+
+if [[ "$SUM_ECODE" != "0" ]]; then
+ title "ERROR: Updating workbench FAILED"
+ EXITCODE=$(($EXITCODE + $SUM_ECODE))
+ exit $EXITCODE
+fi
+
+for n in manage switchyard $SHELL_NODES $KEEP_NODES; do
+ ECODE=0
+ if [[ $n =~ $ARVADOS_API_HOST$ ]]; then
+ # e.g. keep.qr1hi.arvadosapi.com
+ node=$n
+ else
+ # e.g. shell
+ node=$n.$ARVADOS_API_HOST
+ fi
+
+ # e.g. keep.qr1hi
+ node=${node%.arvadosapi.com}
+
+ title "Updating $node"
+ run_puppet $node ECODE
+ if [[ "$ECODE" != "0" ]]; then
+ title "ERROR: Updating $node node FAILED: exit code $ECODE"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+ fi
+done
diff --git a/build/jenkins/run-diagnostics-suite.sh b/build/jenkins/run-diagnostics-suite.sh
new file mode 100755
index 0000000..015a053
--- /dev/null
+++ b/build/jenkins/run-diagnostics-suite.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+EXITCODE=0
+
+INSTANCE=$1
+REVISION=$2
+
+if [[ "$INSTANCE" == '' ]]; then
+ echo "Syntax: $0 <instance> [revision]"
+ exit 1
+fi
+
+if [[ "$REVISION" == '' ]]; then
+ # See if there's a configuration file with the revision?
+ CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
+ if [[ -f $CONFIG_PATH ]]; then
+ echo "Loading git revision from $CONFIG_PATH"
+ . $CONFIG_PATH
+ REVISION=$ARVADOS_GIT_REVISION
+ fi
+fi
+
+if [[ "$REVISION" != '' ]]; then
+ echo "Git revision is $REVISION"
+else
+ echo "No valid git revision found, proceeding with what is in place."
+fi
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo "WORKSPACE environment variable not set"
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+source /etc/profile.d/rvm.sh
+echo $WORKSPACE
+
+title "Starting diagnostics"
+timer_reset
+
+cd $WORKSPACE
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout $REVISION
+fi
+
+cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
+
+cd $WORKSPACE/apps/workbench
+
+HOME="$GEMHOME" bundle install --no-deployment
+
+if [[ ! -d tmp ]]; then
+ mkdir tmp
+fi
+
+RAILS_ENV=diagnostics bundle exec rake TEST=test/diagnostics/pipeline_test.rb
+
+ECODE=$?
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout master
+fi
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! DIAGNOSTICS FAILED (`timer`) !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+fi
+
+title "Diagnostics complete (`timer`)"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-library.sh b/build/jenkins/run-library.sh
new file mode 100755
index 0000000..c2e9b40
--- /dev/null
+++ b/build/jenkins/run-library.sh
@@ -0,0 +1,348 @@
+#!/bin/bash
+
+# A library of functions shared by the various scripts in this directory.
+
+# This is the timestamp about when we merged changed to include licenses
+# with Arvados packages. We use it as a heuristic to add revisions for
+# older packages.
+LICENSE_PACKAGE_TS=20151208015500
+
+debug_echo () {
+ echo "$@" >"$STDOUT_IF_DEBUG"
+}
+
+find_easy_install() {
+ for version_suffix in "$@"; do
+ if "easy_install$version_suffix" --version >/dev/null 2>&1; then
+ echo "easy_install$version_suffix"
+ return 0
+ fi
+ done
+ cat >&2 <<EOF
+$helpmessage
+
+Error: easy_install$1 (from Python setuptools module) not found
+
+EOF
+ exit 1
+}
+
+format_last_commit_here() {
+ local format="$1"; shift
+ TZ=UTC git log -n1 --first-parent "--format=format:$format" .
+}
+
+version_from_git() {
+ # Generates a version number from the git log for the current working
+ # directory, and writes it to stdout.
+ local git_ts git_hash
+ declare $(format_last_commit_here "git_ts=%ct git_hash=%h")
+ echo "0.1.$(date -ud "@$git_ts" +%Y%m%d%H%M%S).$git_hash"
+}
+
+nohash_version_from_git() {
+ version_from_git | cut -d. -f1-3
+}
+
+timestamp_from_git() {
+ format_last_commit_here "%ct"
+}
+
+handle_python_package () {
+ # This function assumes the current working directory is the python package directory
+ if [ -n "$(find dist -name "*-$(nohash_version_from_git).tar.gz" -print -quit)" ]; then
+ # This package doesn't need rebuilding.
+ return
+ fi
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ python setup.py $DASHQ_UNLESS_DEBUG sdist
+}
+
+handle_ruby_gem() {
+ local gem_name="$1"; shift
+ local gem_version="$(nohash_version_from_git)"
+ local gem_src_dir="$(pwd)"
+
+ if ! [[ -e "${gem_name}-${gem_version}.gem" ]]; then
+ find -maxdepth 1 -name "${gem_name}-*.gem" -delete
+
+ # -q appears to be broken in gem version 2.2.2
+ $GEM build "$gem_name.gemspec" $DASHQ_UNLESS_DEBUG >"$STDOUT_IF_DEBUG" 2>"$STDERR_IF_DEBUG"
+ fi
+}
+
+# Usage: package_go_binary services/foo arvados-foo "Compute foo to arbitrary precision"
+package_go_binary() {
+ local src_path="$1"; shift
+ local prog="$1"; shift
+ local description="$1"; shift
+ local license_file="${1:-agpl-3.0.txt}"; shift
+
+ debug_echo "package_go_binary $src_path as $prog"
+
+ local basename="${src_path##*/}"
+
+ mkdir -p "$GOPATH/src/git.curoverse.com"
+ ln -sfn "$WORKSPACE" "$GOPATH/src/git.curoverse.com/arvados.git"
+
+ cd "$GOPATH/src/git.curoverse.com/arvados.git/$src_path"
+ local version="$(version_from_git)"
+ local timestamp="$(timestamp_from_git)"
+
+ # If the command imports anything from the Arvados SDK, bump the
+ # version number and build a new package whenever the SDK changes.
+ if grep -qr git.curoverse.com/arvados .; then
+ cd "$GOPATH/src/git.curoverse.com/arvados.git/sdk/go"
+ if [[ $(timestamp_from_git) -gt "$timestamp" ]]; then
+ version=$(version_from_git)
+ fi
+ fi
+
+ cd $WORKSPACE/packages/$TARGET
+ go get "git.curoverse.com/arvados.git/$src_path"
+ fpm_build "$GOPATH/bin/$basename=/usr/bin/$prog" "$prog" 'Curoverse, Inc.' dir "$version" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=$description" "$WORKSPACE/$license_file=/usr/share/doc/$prog/$license_file"
+}
+
+default_iteration() {
+ local package_name="$1"; shift
+ local package_version="$1"; shift
+ local iteration=1
+ if [[ $package_version =~ ^0\.1\.([0-9]{14})(\.|$) ]] && \
+ [[ ${BASH_REMATCH[1]} -le $LICENSE_PACKAGE_TS ]]; then
+ iteration=2
+ fi
+ echo $iteration
+}
+
+_build_rails_package_scripts() {
+ local pkgname="$1"; shift
+ local destdir="$1"; shift
+ local srcdir="$RUN_BUILD_PACKAGES_PATH/rails-package-scripts"
+ for scriptname in postinst prerm postrm; do
+ cat "$srcdir/$pkgname.sh" "$srcdir/step2.sh" "$srcdir/$scriptname.sh" \
+ >"$destdir/$scriptname" || return $?
+ done
+}
+
+handle_rails_package() {
+ local pkgname="$1"; shift
+ local srcdir="$1"; shift
+ local license_path="$1"; shift
+ local scripts_dir="$(mktemp --tmpdir -d "$pkgname-XXXXXXXX.scripts")" && \
+ local version_file="$(mktemp --tmpdir "$pkgname-XXXXXXXX.version")" && (
+ set -e
+ _build_rails_package_scripts "$pkgname" "$scripts_dir"
+ cd "$srcdir"
+ mkdir -p tmp
+ version_from_git >"$version_file"
+ git rev-parse HEAD >git-commit.version
+ bundle package --all
+ )
+ if [[ 0 != "$?" ]] || ! cd "$WORKSPACE/packages/$TARGET"; then
+ echo "ERROR: $pkgname package prep failed" >&2
+ rm -rf "$scripts_dir" "$version_file"
+ EXITCODE=1
+ return 1
+ fi
+ local railsdir="/var/www/${pkgname%-server}/current"
+ local -a pos_args=("$srcdir/=$railsdir" "$pkgname" "Curoverse, Inc." dir
+ "$(cat "$version_file")")
+ local license_arg="$license_path=$railsdir/$(basename "$license_path")"
+ # --iteration=5 accommodates the package script bugfixes #8371 and #8413.
+ local -a switches=(--iteration=5
+ --after-install "$scripts_dir/postinst"
+ --before-remove "$scripts_dir/prerm"
+ --after-remove "$scripts_dir/postrm")
+ # For some reason fpm excludes need to not start with /.
+ local exclude_root="${railsdir#/}"
+ # .git and packages are for the SSO server, which is built from its
+ # repository root.
+ local -a exclude_list=(.git packages tmp log coverage Capfile\* \
+ config/deploy\* config/application.yml)
+ # for arvados-workbench, we need to have the (dummy) config/database.yml in the package
+ if [[ "$pkgname" != "arvados-workbench" ]]; then
+ exclude_list+=('config/database.yml')
+ fi
+ for exclude in ${exclude_list[@]}; do
+ switches+=(-x "$exclude_root/$exclude")
+ done
+ fpm_build "${pos_args[@]}" "${switches[@]}" \
+ -x "$exclude_root/vendor/bundle" "$@" "$license_arg"
+ rm -rf "$scripts_dir" "$version_file"
+}
+
+# Build packages for everything
+fpm_build () {
+ # The package source. Depending on the source type, this can be a
+ # path, or the name of the package in an upstream repository (e.g.,
+ # pip).
+ PACKAGE=$1
+ shift
+ # The name of the package to build. Defaults to $PACKAGE.
+ PACKAGE_NAME=${1:-$PACKAGE}
+ shift
+ # Optional: the vendor of the package. Should be "Curoverse, Inc." for
+ # packages of our own software. Passed to fpm --vendor.
+ VENDOR=$1
+ shift
+ # The type of source package. Passed to fpm -s. Default "python".
+ PACKAGE_TYPE=${1:-python}
+ shift
+ # Optional: the package version number. Passed to fpm -v.
+ VERSION=$1
+ shift
+
+ case "$PACKAGE_TYPE" in
+ python)
+ # All Arvados Python2 packages depend on Python 2.7.
+ # Make sure we build with that for consistency.
+ set -- "$@" --python-bin python2.7 \
+ --python-easyinstall "$EASY_INSTALL2" \
+ --python-package-name-prefix "$PYTHON2_PKG_PREFIX" \
+ --depends "$PYTHON2_PACKAGE"
+ ;;
+ python3)
+ # fpm does not actually support a python3 package type. Instead
+ # we recognize it as a convenience shortcut to add several
+ # necessary arguments to fpm's command line later, after we're
+ # done handling positional arguments.
+ PACKAGE_TYPE=python
+ set -- "$@" --python-bin python3 \
+ --python-easyinstall "$EASY_INSTALL3" \
+ --python-package-name-prefix "$PYTHON3_PKG_PREFIX" \
+ --depends "$PYTHON3_PACKAGE"
+ ;;
+ esac
+
+ declare -a COMMAND_ARR=("fpm" "--maintainer=Ward Vandewege <ward at curoverse.com>" "-s" "$PACKAGE_TYPE" "-t" "$FORMAT")
+ if [ python = "$PACKAGE_TYPE" ]; then
+ COMMAND_ARR+=(--exclude=\*/{dist,site}-packages/tests/\*)
+ if [ deb = "$FORMAT" ]; then
+ # Dependencies are built from setup.py. Since setup.py will never
+ # refer to Debian package iterations, it doesn't make sense to
+ # enforce those in the .deb dependencies.
+ COMMAND_ARR+=(--deb-ignore-iteration-in-dependencies)
+ fi
+ fi
+
+ if [[ "${DEBUG:-0}" != "0" ]]; then
+ COMMAND_ARR+=('--verbose' '--log' 'info')
+ fi
+
+ if [[ "$PACKAGE_NAME" != "$PACKAGE" ]]; then
+ COMMAND_ARR+=('-n' "$PACKAGE_NAME")
+ fi
+
+ if [[ "$VENDOR" != "" ]]; then
+ COMMAND_ARR+=('--vendor' "$VENDOR")
+ fi
+
+ if [[ "$VERSION" != "" ]]; then
+ COMMAND_ARR+=('-v' "$VERSION")
+ fi
+ # We can always add an --iteration here. If another one is specified in $@,
+ # that will take precedence, as desired.
+ COMMAND_ARR+=(--iteration "$(default_iteration "$PACKAGE" "$VERSION")")
+
+ # 'dir' type packages are provided in the form /path/to/source=/path/to/dest
+ # so strip off the 2nd part to check for fpm-info below.
+ PACKAGE_DIR=$(echo $PACKAGE | sed 's/\/=.*//')
+
+ # Append --depends X and other arguments specified by fpm-info.sh in
+ # the package source dir. These are added last so they can override
+ # the arguments added by this script.
+ declare -a fpm_args=()
+ declare -a build_depends=()
+ declare -a fpm_depends=()
+ declare -a fpm_exclude=()
+ FPM_INFO=""
+ if [[ -d "$PACKAGE_DIR" ]]; then
+ FPM_INFO="$PACKAGE_DIR/fpm-info.sh"
+ elif [[ -e "${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE}/fpm-info.sh" ]]; then
+ FPM_INFO="${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE}/fpm-info.sh"
+ debug_echo "Found fpm-info.sh in backports: $FPM_INFO"
+ elif [[ -e "${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE_NAME}/fpm-info.sh" ]]; then
+ FPM_INFO="${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE_NAME}/fpm-info.sh"
+ fi
+ if [[ -e "$FPM_INFO" ]]; then
+ debug_echo "Loading fpm overrides from $FPM_INFO"
+ source "$FPM_INFO"
+ fi
+ for pkg in "${build_depends[@]}"; do
+ if [[ $TARGET =~ debian|ubuntu ]]; then
+ pkg_deb=$(ls "$WORKSPACE/packages/$TARGET/$pkg_"*.deb | sort -rg | awk 'NR==1')
+ if [[ -e $pkg_deb ]]; then
+ echo "Installing build_dep $pkg from $pkg_deb"
+ dpkg -i "$pkg_deb"
+ else
+ echo "Attemping to install build_dep $pkg using apt-get"
+ apt-get install -y "$pkg"
+ fi
+ apt-get -y -f install
+ else
+ pkg_rpm=$(ls "$WORKSPACE/packages/$TARGET/$pkg"-[0-9]*.rpm | sort -rg | awk 'NR==1')
+ if [[ -e $pkg_rpm ]]; then
+ echo "Installing build_dep $pkg from $pkg_rpm"
+ rpm -i "$pkg_rpm"
+ else
+ echo "Attemping to install build_dep $pkg"
+ rpm -i "$pkg"
+ fi
+ fi
+ done
+ for i in "${fpm_depends[@]}"; do
+ COMMAND_ARR+=('--depends' "$i")
+ done
+ for i in "${fpm_exclude[@]}"; do
+ COMMAND_ARR+=('--exclude' "$i")
+ done
+
+ # Append remaining function arguments directly to fpm's command line.
+ for i; do
+ COMMAND_ARR+=("$i")
+ done
+
+ COMMAND_ARR+=("${fpm_args[@]}")
+
+ COMMAND_ARR+=("$PACKAGE")
+
+ debug_echo -e "\n${COMMAND_ARR[@]}\n"
+
+ FPM_RESULTS=$("${COMMAND_ARR[@]}")
+ FPM_EXIT_CODE=$?
+
+ fpm_verify $FPM_EXIT_CODE $FPM_RESULTS
+}
+
+# verify build results
+fpm_verify () {
+ FPM_EXIT_CODE=$1
+ shift
+ FPM_RESULTS=$@
+
+ FPM_PACKAGE_NAME=''
+ if [[ $FPM_RESULTS =~ ([A-Za-z0-9_\.-]*\.)(deb|rpm) ]]; then
+ FPM_PACKAGE_NAME=${BASH_REMATCH[1]}${BASH_REMATCH[2]}
+ fi
+
+ if [[ "$FPM_PACKAGE_NAME" == "" ]]; then
+ EXITCODE=1
+ echo "Error: $PACKAGE: Unable to figure out package name from fpm results:"
+ echo
+ echo $FPM_RESULTS
+ echo
+ elif [[ "$FPM_RESULTS" =~ "File already exists" ]]; then
+ echo "Package $FPM_PACKAGE_NAME exists, not rebuilding"
+ elif [[ 0 -ne "$FPM_EXIT_CODE" ]]; then
+ echo "Error building package for $1:\n $FPM_RESULTS"
+ fi
+}
+
+install_package() {
+ PACKAGES=$@
+ if [[ "$FORMAT" == "deb" ]]; then
+ $SUDO apt-get install $PACKAGES --yes
+ elif [[ "$FORMAT" == "rpm" ]]; then
+ $SUDO yum -q -y install $PACKAGES
+ fi
+}
diff --git a/build/jenkins/run-performance-suite.sh b/build/jenkins/run-performance-suite.sh
new file mode 100755
index 0000000..2944bda
--- /dev/null
+++ b/build/jenkins/run-performance-suite.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+EXITCODE=0
+
+INSTANCE=$1
+REVISION=$2
+
+if [[ "$INSTANCE" == '' ]]; then
+ echo "Syntax: $0 <instance> [revision]"
+ exit 1
+fi
+
+if [[ "$REVISION" == '' ]]; then
+ # See if there's a configuration file with the revision?
+ CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
+ if [[ -f $CONFIG_PATH ]]; then
+ echo "Loading git revision from $CONFIG_PATH"
+ . $CONFIG_PATH
+ REVISION=$ARVADOS_GIT_REVISION
+ fi
+fi
+
+if [[ "$REVISION" != '' ]]; then
+ echo "Git revision is $REVISION"
+else
+ echo "No valid git revision found, proceeding with what is in place."
+fi
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo "WORKSPACE environment variable not set"
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+source /etc/profile.d/rvm.sh
+echo $WORKSPACE
+
+title "Starting performance test"
+timer_reset
+
+cd $WORKSPACE
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout $REVISION
+fi
+
+cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
+
+cd $WORKSPACE/apps/workbench
+
+HOME="$GEMHOME" bundle install --no-deployment
+
+if [[ ! -d tmp ]]; then
+ mkdir tmp
+fi
+
+mkdir -p tmp/cache
+
+RAILS_ENV=performance bundle exec rake test:benchmark
+
+ECODE=$?
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout master
+fi
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! PERFORMANCE TESTS FAILED (`timer`) !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+fi
+
+title "Performance tests complete (`timer`)"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-tapestry-tests.sh b/build/jenkins/run-tapestry-tests.sh
new file mode 100755
index 0000000..851a81d
--- /dev/null
+++ b/build/jenkins/run-tapestry-tests.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+source /etc/profile.d/rvm.sh
+
+# This shouldn't really be necessary... but the jenkins/rvm integration seems a
+# bit wonky occasionally.
+rvm use ree
+
+echo $WORKSPACE
+
+# Tapestry
+title "Starting tapestry tests"
+cd "$WORKSPACE"
+
+# There are a few submodules
+git submodule init && git submodule update
+
+# Use sqlite for testing
+sed -i'' -e "s:mysql:sqlite3:" Gemfile
+
+# Tapestry is not set up yet to use --deployment
+#bundle install --deployment
+bundle install
+
+rm -f config/database.yml
+rm -f config/environments/test.rb
+cp $HOME/tapestry/test.rb config/environments/
+cp $HOME/tapestry/database.yml config/
+
+export RAILS_ENV=test
+
+bundle exec rake db:drop
+bundle exec rake db:create
+bundle exec rake db:setup
+bundle exec rake test
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! TAPESTRY TESTS FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "Tapestry tests complete"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-tests.sh b/build/jenkins/run-tests.sh
new file mode 100755
index 0000000..a17d610
--- /dev/null
+++ b/build/jenkins/run-tests.sh
@@ -0,0 +1,826 @@
+#!/bin/bash
+
+. `dirname "$(readlink -f "$0")"`/libcloud-pin
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Install and test Arvados components.
+
+Exit non-zero if any tests fail.
+
+Syntax:
+ $(basename $0) WORKSPACE=/path/to/arvados [options]
+
+Options:
+
+--skip FOO Do not test the FOO component.
+--only FOO Do not test anything except the FOO component.
+--temp DIR Install components and dependencies under DIR instead of
+ making a new temporary directory. Implies --leave-temp.
+--leave-temp Do not remove GOPATH, virtualenv, and other temp dirs at exit.
+ Instead, show the path to give as --temp to reuse them in
+ subsequent invocations.
+--skip-install Do not run any install steps. Just run tests.
+ You should provide GOPATH, GEMHOME, and VENVDIR options
+ from a previous invocation if you use this option.
+--only-install Run specific install step
+WORKSPACE=path Arvados source tree to test.
+CONFIGSRC=path Dir with api server config files to copy into source tree.
+ (If none given, leave config files alone in source tree.)
+services/api_test="TEST=test/functional/arvados/v1/collections_controller_test.rb"
+ Restrict apiserver tests to the given file
+sdk/python_test="--test-suite test.test_keep_locator"
+ Restrict Python SDK tests to the given class
+apps/workbench_test="TEST=test/integration/pipeline_instances_test.rb"
+ Restrict Workbench tests to the given file
+services/arv-git-httpd_test="-check.vv"
+ Show all log messages, even when tests pass (also works
+ with services/keepstore_test etc.)
+ARVADOS_DEBUG=1
+ Print more debug messages
+envvar=value Set \$envvar to value. Primarily useful for WORKSPACE,
+ *_test, and other examples shown above.
+
+Assuming --skip-install is not given, all components are installed
+into \$GOPATH, \$VENDIR, and \$GEMHOME before running any tests. Many
+test suites depend on other components being installed, and installing
+everything tends to be quicker than debugging dependencies.
+
+As a special concession to the current CI server config, CONFIGSRC
+defaults to $HOME/arvados-api-server if that directory exists.
+
+More information and background:
+
+https://arvados.org/projects/arvados/wiki/Running_tests
+
+Available tests:
+
+apps/workbench
+apps/workbench_benchmark
+apps/workbench_profile
+doc
+services/api
+services/arv-git-httpd
+services/crunchstat
+services/dockercleaner
+services/fuse
+services/keep-web
+services/keepproxy
+services/keepstore
+services/login-sync
+services/nodemanager
+services/crunch-dispatch-local
+sdk/cli
+sdk/pam
+sdk/python
+sdk/ruby
+sdk/go/arvadosclient
+sdk/go/keepclient
+sdk/go/manifest
+sdk/go/blockdigest
+sdk/go/streamer
+sdk/go/crunchrunner
+tools/crunchstat-summary
+tools/keep-rsync
+
+EOF
+
+# First make sure to remove any ARVADOS_ variables from the calling
+# environment that could interfere with the tests.
+unset $(env | cut -d= -f1 | grep \^ARVADOS_)
+
+# Reset other variables that could affect our [tests'] behavior by
+# accident.
+GITDIR=
+GOPATH=
+VENVDIR=
+VENV3DIR=
+PYTHONPATH=
+GEMHOME=
+PERLINSTALLBASE=
+
+COLUMNS=80
+
+skip_install=
+temp=
+temp_preserve=
+
+clear_temp() {
+ if [[ -z "$temp" ]]; then
+ # we didn't even get as far as making a temp dir
+ :
+ elif [[ -z "$temp_preserve" ]]; then
+ rm -rf "$temp"
+ else
+ echo "Leaving behind temp dirs in $temp"
+ fi
+}
+
+fatal() {
+ clear_temp
+ echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
+ exit 1
+}
+
+report_outcomes() {
+ for x in "${successes[@]}"
+ do
+ echo "Pass: $x"
+ done
+
+ if [[ ${#failures[@]} == 0 ]]
+ then
+ echo "All test suites passed."
+ else
+ echo "Failures (${#failures[@]}):"
+ for x in "${failures[@]}"
+ do
+ echo "Fail: $x"
+ done
+ fi
+}
+
+exit_cleanly() {
+ trap - INT
+ create-plot-data-from-log.sh $BUILD_NUMBER "$WORKSPACE/apps/workbench/log/test.log" "$WORKSPACE/apps/workbench/log/"
+ rotate_logfile "$WORKSPACE/apps/workbench/log/" "test.log"
+ stop_services
+ rotate_logfile "$WORKSPACE/services/api/log/" "test.log"
+ report_outcomes
+ clear_temp
+ exit ${#failures}
+}
+
+sanity_checks() {
+ ( [[ -n "$WORKSPACE" ]] && [[ -d "$WORKSPACE/services" ]] ) \
+ || fatal "WORKSPACE environment variable not set to a source directory (see: $0 --help)"
+ echo Checking dependencies:
+ echo -n 'virtualenv: '
+ virtualenv --version \
+ || fatal "No virtualenv. Try: apt-get install virtualenv (on ubuntu: python-virtualenv)"
+ echo -n 'go: '
+ go version \
+ || fatal "No go binary. See http://golang.org/doc/install"
+ echo -n 'gcc: '
+ gcc --version | egrep ^gcc \
+ || fatal "No gcc. Try: apt-get install build-essential"
+ echo -n 'fuse.h: '
+ find /usr/include -wholename '*fuse/fuse.h' \
+ || fatal "No fuse/fuse.h. Try: apt-get install libfuse-dev"
+ echo -n 'pyconfig.h: '
+ find /usr/include -name pyconfig.h | egrep --max-count=1 . \
+ || fatal "No pyconfig.h. Try: apt-get install python-dev"
+ echo -n 'nginx: '
+ PATH="$PATH:/sbin:/usr/sbin:/usr/local/sbin" nginx -v \
+ || fatal "No nginx. Try: apt-get install nginx"
+ echo -n 'perl: '
+ perl -v | grep version \
+ || fatal "No perl. Try: apt-get install perl"
+ for mod in ExtUtils::MakeMaker JSON LWP Net::SSL; do
+ echo -n "perl $mod: "
+ perl -e "use $mod; print \"\$$mod::VERSION\\n\"" \
+ || fatal "No $mod. Try: apt-get install perl-modules libcrypt-ssleay-perl libjson-perl libwww-perl"
+ done
+ echo -n 'gitolite: '
+ which gitolite \
+ || fatal "No gitolite. Try: apt-get install gitolite3"
+}
+
+rotate_logfile() {
+ # i.e. rotate_logfile "$WORKSPACE/apps/workbench/log/" "test.log"
+ # $BUILD_NUMBER is set by Jenkins if this script is being called as part of a Jenkins run
+ if [[ -f "$1/$2" ]]; then
+ THEDATE=`date +%Y%m%d%H%M%S`
+ mv "$1/$2" "$1/$THEDATE-$BUILD_NUMBER-$2"
+ gzip "$1/$THEDATE-$BUILD_NUMBER-$2"
+ fi
+}
+
+declare -a failures
+declare -A skip
+declare -A testargs
+skip[apps/workbench_profile]=1
+
+while [[ -n "$1" ]]
+do
+ arg="$1"; shift
+ case "$arg" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --skip)
+ skipwhat="$1"; shift
+ skip[$skipwhat]=1
+ ;;
+ --only)
+ only="$1"; skip[$1]=""; shift
+ ;;
+ --skip-install)
+ skip_install=1
+ ;;
+ --only-install)
+ skip_install=1
+ only_install="$1"; shift
+ ;;
+ --temp)
+ temp="$1"; shift
+ temp_preserve=1
+ ;;
+ --leave-temp)
+ temp_preserve=1
+ ;;
+ --retry)
+ retry=1
+ ;;
+ *_test=*)
+ suite="${arg%%_test=*}"
+ args="${arg#*=}"
+ testargs["$suite"]="$args"
+ ;;
+ *=*)
+ eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
+ ;;
+ *)
+ echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
+ exit 1
+ ;;
+ esac
+done
+
+start_api() {
+ echo 'Starting API server...'
+ cd "$WORKSPACE" \
+ && eval $(python sdk/python/tests/run_test_server.py start --auth admin) \
+ && export ARVADOS_TEST_API_HOST="$ARVADOS_API_HOST" \
+ && export ARVADOS_TEST_API_INSTALLED="$$" \
+ && (env | egrep ^ARVADOS)
+}
+
+start_nginx_proxy_services() {
+ echo 'Starting keepproxy, keep-web, arv-git-httpd, and nginx ssl proxy...'
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py start_keep_proxy \
+ && python sdk/python/tests/run_test_server.py start_keep-web \
+ && python sdk/python/tests/run_test_server.py start_arv-git-httpd \
+ && python sdk/python/tests/run_test_server.py start_nginx \
+ && export ARVADOS_TEST_PROXY_SERVICES=1
+}
+
+stop_services() {
+ if [[ -n "$ARVADOS_TEST_PROXY_SERVICES" ]]; then
+ unset ARVADOS_TEST_PROXY_SERVICES
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py stop_nginx \
+ && python sdk/python/tests/run_test_server.py stop_arv-git-httpd \
+ && python sdk/python/tests/run_test_server.py stop_keep-web \
+ && python sdk/python/tests/run_test_server.py stop_keep_proxy
+ fi
+ if [[ -n "$ARVADOS_TEST_API_HOST" ]]; then
+ unset ARVADOS_TEST_API_HOST
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py stop
+ fi
+}
+
+interrupt() {
+ failures+=("($(basename $0) interrupted)")
+ exit_cleanly
+}
+trap interrupt INT
+
+sanity_checks
+
+echo "WORKSPACE=$WORKSPACE"
+
+if [[ -z "$CONFIGSRC" ]] && [[ -d "$HOME/arvados-api-server" ]]; then
+ # Jenkins expects us to use this by default.
+ CONFIGSRC="$HOME/arvados-api-server"
+fi
+
+# Clean up .pyc files that may exist in the workspace
+cd "$WORKSPACE"
+find -name '*.pyc' -delete
+
+if [[ -z "$temp" ]]; then
+ temp="$(mktemp -d)"
+fi
+
+# Set up temporary install dirs (unless existing dirs were supplied)
+for tmpdir in VENVDIR VENV3DIR GOPATH GEMHOME PERLINSTALLBASE
+do
+ if [[ -z "${!tmpdir}" ]]; then
+ eval "$tmpdir"="$temp/$tmpdir"
+ fi
+ if ! [[ -d "${!tmpdir}" ]]; then
+ mkdir "${!tmpdir}" || fatal "can't create ${!tmpdir} (does $temp exist?)"
+ fi
+done
+
+setup_ruby_environment() {
+ if [[ -s "$HOME/.rvm/scripts/rvm" ]] ; then
+ source "$HOME/.rvm/scripts/rvm"
+ using_rvm=true
+ elif [[ -s "/usr/local/rvm/scripts/rvm" ]] ; then
+ source "/usr/local/rvm/scripts/rvm"
+ using_rvm=true
+ else
+ using_rvm=false
+ fi
+
+ if [[ "$using_rvm" == true ]]; then
+ # If rvm is in use, we can't just put separate "dependencies"
+ # and "gems-under-test" paths to GEM_PATH: passenger resets
+ # the environment to the "current gemset", which would lose
+ # our GEM_PATH and prevent our test suites from running ruby
+ # programs (for example, the Workbench test suite could not
+ # boot an API server or run arv). Instead, we have to make an
+ # rvm gemset and use it for everything.
+
+ [[ `type rvm | head -n1` == "rvm is a function" ]] \
+ || fatal 'rvm check'
+
+ # Put rvm's favorite path back in first place (overriding
+ # virtualenv, which just put itself there). Ignore rvm's
+ # complaint about not being in first place already.
+ rvm use @default 2>/dev/null
+
+ # Create (if needed) and switch to an @arvados-tests
+ # gemset. (Leave the choice of ruby to the caller.)
+ rvm use @arvados-tests --create \
+ || fatal 'rvm gemset setup'
+
+ rvm env
+ else
+ # When our "bundle install"s need to install new gems to
+ # satisfy dependencies, we want them to go where "gem install
+ # --user-install" would put them. (However, if the caller has
+ # already set GEM_HOME, we assume that's where dependencies
+ # should be installed, and we should leave it alone.)
+
+ if [ -z "$GEM_HOME" ]; then
+ user_gempath="$(gem env gempath)"
+ export GEM_HOME="${user_gempath%%:*}"
+ fi
+ PATH="$(gem env gemdir)/bin:$PATH"
+
+ # When we build and install our own gems, we install them in our
+ # $GEMHOME tmpdir, and we want them to be at the front of GEM_PATH and
+ # PATH so integration tests prefer them over other versions that
+ # happen to be installed in $user_gempath, system dirs, etc.
+
+ tmpdir_gem_home="$(env - PATH="$PATH" HOME="$GEMHOME" gem env gempath | cut -f1 -d:)"
+ PATH="$tmpdir_gem_home/bin:$PATH"
+ export GEM_PATH="$tmpdir_gem_home:$(gem env gempath)"
+
+ echo "Will install dependencies to $(gem env gemdir)"
+ echo "Will install arvados gems to $tmpdir_gem_home"
+ echo "Gem search path is GEM_PATH=$GEM_PATH"
+ fi
+}
+
+with_test_gemset() {
+ if [[ "$using_rvm" == true ]]; then
+ "$@"
+ else
+ GEM_HOME="$tmpdir_gem_home" GEM_PATH="$tmpdir_gem_home" "$@"
+ fi
+}
+
+gem_uninstall_if_exists() {
+ if gem list "$1\$" | egrep '^\w'; then
+ gem uninstall --force --all --executables "$1"
+ fi
+}
+
+setup_virtualenv() {
+ local venvdest="$1"; shift
+ if ! [[ -e "$venvdest/bin/activate" ]] || ! [[ -e "$venvdest/bin/pip" ]]; then
+ virtualenv --setuptools "$@" "$venvdest" || fatal "virtualenv $venvdest failed"
+ fi
+ "$venvdest/bin/pip" install 'setuptools>=18' 'pip>=7'
+ # ubuntu1404 can't seem to install mock via tests_require, but it can do this.
+ "$venvdest/bin/pip" install 'mock>=1.0' 'pbr<1.7.0'
+}
+
+export PERLINSTALLBASE
+export PERLLIB="$PERLINSTALLBASE/lib/perl5:${PERLLIB:+$PERLLIB}"
+
+export GOPATH
+mkdir -p "$GOPATH/src/git.curoverse.com"
+ln -sfn "$WORKSPACE" "$GOPATH/src/git.curoverse.com/arvados.git" \
+ || fatal "symlink failed"
+
+setup_virtualenv "$VENVDIR" --python python2.7
+. "$VENVDIR/bin/activate"
+
+# Needed for run_test_server.py which is used by certain (non-Python) tests.
+pip freeze 2>/dev/null | egrep ^PyYAML= \
+ || pip install PyYAML >/dev/null \
+ || fatal "pip install PyYAML failed"
+
+# Preinstall forked version of libcloud, because nodemanager "pip install"
+# won't pick it up by default.
+pip freeze 2>/dev/null | egrep ^apache-libcloud==$LIBCLOUD_PIN \
+ || pip install --pre --ignore-installed https://github.com/curoverse/libcloud/archive/apache-libcloud-$LIBCLOUD_PIN.zip >/dev/null \
+ || fatal "pip install apache-libcloud failed"
+
+# Uninstall old llfuse, because services/fuse "pip install" won't
+# upgrade it by default.
+if pip freeze | egrep '^llfuse==0\.41\.'; then
+ yes | pip uninstall 'llfuse<0.42'
+fi
+
+# Deactivate Python 2 virtualenv
+deactivate
+
+# If Python 3 is available, set up its virtualenv in $VENV3DIR.
+# Otherwise, skip dependent tests.
+PYTHON3=$(which python3)
+if [ "0" = "$?" ]; then
+ setup_virtualenv "$VENV3DIR" --python python3
+else
+ PYTHON3=
+ skip[services/dockercleaner]=1
+ cat >&2 <<EOF
+
+Warning: python3 could not be found
+services/dockercleaner install and tests will be skipped
+
+EOF
+fi
+
+# Reactivate Python 2 virtualenv
+. "$VENVDIR/bin/activate"
+
+# Note: this must be the last time we change PATH, otherwise rvm will
+# whine a lot.
+setup_ruby_environment
+
+echo "PATH is $PATH"
+
+if ! which bundler >/dev/null
+then
+ gem install --user-install bundler || fatal 'Could not install bundler'
+fi
+
+checkexit() {
+ if [[ "$1" != "0" ]]; then
+ title "!!!!!! $2 FAILED !!!!!!"
+ failures+=("$2 (`timer`)")
+ else
+ successes+=("$2 (`timer`)")
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+do_test() {
+ while ! do_test_once ${@} && [[ "$retry" == 1 ]]
+ do
+ read -p 'Try again? [Y/n] ' x
+ if [[ "$x" != "y" ]] && [[ "$x" != "" ]]
+ then
+ break
+ fi
+ done
+}
+
+do_test_once() {
+ unset result
+ if [[ -z "${skip[$1]}" ]] && ( [[ -z "$only" ]] || [[ "$only" == "$1" ]] )
+ then
+ title "Running $1 tests"
+ timer_reset
+ if [[ "$2" == "go" ]]
+ then
+ covername="coverage-$(echo "$1" | sed -e 's/\//_/g')"
+ coverflags=("-covermode=count" "-coverprofile=$WORKSPACE/tmp/.$covername.tmp")
+ # We do "go get -t" here to catch compilation errors
+ # before trying "go test". Otherwise, coverage-reporting
+ # mode makes Go show the wrong line numbers when reporting
+ # compilation errors.
+ if [[ -n "${testargs[$1]}" ]]
+ then
+ # "go test -check.vv giturl" doesn't work, but this
+ # does:
+ cd "$WORKSPACE/$1" && \
+ go get -t "git.curoverse.com/arvados.git/$1" && \
+ go test ${coverflags[@]} ${testargs[$1]}
+ else
+ # The above form gets verbose even when testargs is
+ # empty, so use this form in such cases:
+ go get -t "git.curoverse.com/arvados.git/$1" && \
+ go test ${coverflags[@]} "git.curoverse.com/arvados.git/$1"
+ fi
+ result="$?"
+ go tool cover -html="$WORKSPACE/tmp/.$covername.tmp" -o "$WORKSPACE/tmp/$covername.html"
+ rm "$WORKSPACE/tmp/.$covername.tmp"
+ elif [[ "$2" == "pip" ]]
+ then
+ # $3 can name a path directory for us to use, including trailing
+ # slash; e.g., the bin/ subdirectory of a virtualenv.
+ cd "$WORKSPACE/$1" \
+ && "${3}python" setup.py test ${testargs[$1]}
+ elif [[ "$2" != "" ]]
+ then
+ "test_$2"
+ else
+ "test_$1"
+ fi
+ result=${result:-$?}
+ checkexit $result "$1 tests"
+ title "End of $1 tests (`timer`)"
+ return $result
+ else
+ title "Skipping $1 tests"
+ fi
+}
+
+do_install() {
+ if [[ -z "$skip_install" || (-n "$only_install" && "$only_install" == "$1") ]]
+ then
+ title "Running $1 install"
+ timer_reset
+ if [[ "$2" == "go" ]]
+ then
+ go get -t "git.curoverse.com/arvados.git/$1"
+ elif [[ "$2" == "pip" ]]
+ then
+ # $3 can name a path directory for us to use, including trailing
+ # slash; e.g., the bin/ subdirectory of a virtualenv.
+
+ # Need to change to a different directory after creating
+ # the source dist package to avoid a pip bug.
+ # see https://arvados.org/issues/5766 for details.
+
+ # Also need to install twice, because if it believes the package is
+ # already installed, pip it won't install it. So the first "pip
+ # install" ensures that the dependencies are met, the second "pip
+ # install" ensures that we've actually installed the local package
+ # we just built.
+ cd "$WORKSPACE/$1" \
+ && "${3}python" setup.py sdist rotate --keep=1 --match .tar.gz \
+ && cd "$WORKSPACE" \
+ && "${3}pip" install --quiet "$WORKSPACE/$1/dist"/*.tar.gz \
+ && "${3}pip" install --quiet --no-deps --ignore-installed "$WORKSPACE/$1/dist"/*.tar.gz
+ elif [[ "$2" != "" ]]
+ then
+ "install_$2"
+ else
+ "install_$1"
+ fi
+ checkexit $? "$1 install"
+ title "End of $1 install (`timer`)"
+ else
+ title "Skipping $1 install"
+ fi
+}
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+bundle_install_trylocal() {
+ (
+ set -e
+ echo "(Running bundle install --local. 'could not find package' messages are OK.)"
+ if ! bundle install --local --no-deployment; then
+ echo "(Running bundle install again, without --local.)"
+ bundle install --no-deployment
+ fi
+ bundle package --all
+ )
+}
+
+install_doc() {
+ cd "$WORKSPACE/doc" \
+ && bundle_install_trylocal \
+ && rm -rf .site
+}
+do_install doc
+
+install_gem() {
+ gemname=$1
+ srcpath=$2
+ with_test_gemset gem_uninstall_if_exists "$gemname" \
+ && cd "$WORKSPACE/$srcpath" \
+ && bundle_install_trylocal \
+ && gem build "$gemname.gemspec" \
+ && with_test_gemset gem install --no-ri --no-rdoc $(ls -t "$gemname"-*.gem|head -n1)
+}
+
+install_ruby_sdk() {
+ install_gem arvados sdk/ruby
+}
+do_install sdk/ruby ruby_sdk
+
+install_perl_sdk() {
+ cd "$WORKSPACE/sdk/perl" \
+ && perl Makefile.PL INSTALL_BASE="$PERLINSTALLBASE" \
+ && make install INSTALLDIRS=perl
+}
+do_install sdk/perl perl_sdk
+
+install_cli() {
+ install_gem arvados-cli sdk/cli
+}
+do_install sdk/cli cli
+
+install_login-sync() {
+ install_gem arvados-login-sync services/login-sync
+}
+do_install services/login-sync login-sync
+
+# Install the Python SDK early. Various other test suites (like
+# keepproxy) bring up run_test_server.py, which imports the arvados
+# module. We can't actually *test* the Python SDK yet though, because
+# its own test suite brings up some of those other programs (like
+# keepproxy).
+declare -a pythonstuff
+pythonstuff=(
+ sdk/pam
+ sdk/python
+ services/fuse
+ services/nodemanager
+ tools/crunchstat-summary
+ )
+for p in "${pythonstuff[@]}"
+do
+ do_install "$p" pip
+done
+if [ -n "$PYTHON3" ]; then
+ do_install services/dockercleaner pip "$VENV3DIR/bin/"
+fi
+
+install_apiserver() {
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle_install_trylocal
+
+ rm -f config/environments/test.rb
+ cp config/environments/test.rb.example config/environments/test.rb
+
+ if [ -n "$CONFIGSRC" ]
+ then
+ for f in database.yml application.yml
+ do
+ cp "$CONFIGSRC/$f" config/ || fatal "$f"
+ done
+ fi
+
+ # Fill in a random secret_token and blob_signing_key for testing
+ SECRET_TOKEN=`echo 'puts rand(2**512).to_s(36)' |ruby`
+ BLOB_SIGNING_KEY=`echo 'puts rand(2**512).to_s(36)' |ruby`
+
+ sed -i'' -e "s:SECRET_TOKEN:$SECRET_TOKEN:" config/application.yml
+ sed -i'' -e "s:BLOB_SIGNING_KEY:$BLOB_SIGNING_KEY:" config/application.yml
+
+ # Set up empty git repo (for git tests)
+ GITDIR=$(mktemp -d)
+ sed -i'' -e "s:/var/cache/git:$GITDIR:" config/application.default.yml
+
+ rm -rf $GITDIR
+ mkdir -p $GITDIR/test
+ cd $GITDIR/test \
+ && git init \
+ && git config user.email "jenkins at ci.curoverse.com" \
+ && git config user.name "Jenkins, CI" \
+ && touch tmp \
+ && git add tmp \
+ && git commit -m 'initial commit'
+
+ # Clear out any lingering postgresql connections to the test
+ # database, so that we can drop it. This assumes the current user
+ # is a postgresql superuser.
+ cd "$WORKSPACE/services/api" \
+ && test_database=$(python -c "import yaml; print yaml.load(file('config/database.yml'))['test']['database']") \
+ && psql "$test_database" -c "SELECT pg_terminate_backend (pg_stat_activity.procpid::int) FROM pg_stat_activity WHERE pg_stat_activity.datname = '$test_database';" 2>/dev/null
+
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle exec rake db:drop \
+ && RAILS_ENV=test bundle exec rake db:setup \
+ && RAILS_ENV=test bundle exec rake db:fixtures:load
+}
+do_install services/api apiserver
+
+declare -a gostuff
+gostuff=(
+ sdk/go/arvadosclient
+ sdk/go/blockdigest
+ sdk/go/manifest
+ sdk/go/streamer
+ sdk/go/crunchrunner
+ services/arv-git-httpd
+ services/crunchstat
+ services/keep-web
+ services/keepstore
+ sdk/go/keepclient
+ services/keepproxy
+ services/datamanager/summary
+ services/datamanager/collection
+ services/datamanager/keep
+ services/datamanager
+ services/crunch-dispatch-local
+ services/crunch-run
+ tools/keep-rsync
+ )
+for g in "${gostuff[@]}"
+do
+ do_install "$g" go
+done
+
+install_workbench() {
+ cd "$WORKSPACE/apps/workbench" \
+ && mkdir -p tmp/cache \
+ && RAILS_ENV=test bundle_install_trylocal
+}
+do_install apps/workbench workbench
+
+test_doclinkchecker() {
+ (
+ set -e
+ cd "$WORKSPACE/doc"
+ ARVADOS_API_HOST=qr1hi.arvadosapi.com
+ # Make sure python-epydoc is installed or the next line won't
+ # do much good!
+ PYTHONPATH=$WORKSPACE/sdk/python/ bundle exec rake linkchecker baseurl=file://$WORKSPACE/doc/.site/ arvados_workbench_host=https://workbench.$ARVADOS_API_HOST arvados_api_host=$ARVADOS_API_HOST
+ )
+}
+do_test doc doclinkchecker
+
+stop_services
+
+test_apiserver() {
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[services/api]}
+}
+do_test services/api apiserver
+
+# Shortcut for when we're only running apiserver tests. This saves a bit of time,
+# because we don't need to start up the api server for subsequent tests.
+if [ ! -z "$only" ] && [ "$only" == "services/api" ]; then
+ rotate_logfile "$WORKSPACE/services/api/log/" "test.log"
+ exit_cleanly
+fi
+
+start_api
+
+test_ruby_sdk() {
+ cd "$WORKSPACE/sdk/ruby" \
+ && bundle exec rake test TESTOPTS=-v ${testargs[sdk/ruby]}
+}
+do_test sdk/ruby ruby_sdk
+
+test_cli() {
+ cd "$WORKSPACE/sdk/cli" \
+ && mkdir -p /tmp/keep \
+ && KEEP_LOCAL_STORE=/tmp/keep bundle exec rake test TESTOPTS=-v ${testargs[sdk/cli]}
+}
+do_test sdk/cli cli
+
+test_login-sync() {
+ cd "$WORKSPACE/services/login-sync" \
+ && bundle exec rake test TESTOPTS=-v ${testargs[services/login-sync]}
+}
+do_test services/login-sync login-sync
+
+for p in "${pythonstuff[@]}"
+do
+ do_test "$p" pip
+done
+do_test services/dockercleaner pip "$VENV3DIR/bin/"
+
+for g in "${gostuff[@]}"
+do
+ do_test "$g" go
+done
+
+test_workbench() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[apps/workbench]}
+}
+do_test apps/workbench workbench
+
+test_workbench_benchmark() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test:benchmark ${testargs[apps/workbench_benchmark]}
+}
+do_test apps/workbench_benchmark workbench_benchmark
+
+test_workbench_profile() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test:profile ${testargs[apps/workbench_profile]}
+}
+do_test apps/workbench_profile workbench_profile
+
+exit_cleanly
diff --git a/build/jenkins/run_upload_packages.py b/build/jenkins/run_upload_packages.py
new file mode 100755
index 0000000..04e6c80
--- /dev/null
+++ b/build/jenkins/run_upload_packages.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python3
+
+import argparse
+import functools
+import glob
+import logging
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+import time
+
+class TimestampFile:
+ def __init__(self, path):
+ self.path = path
+ self.start_time = time.time()
+
+ def last_upload(self):
+ try:
+ return os.path.getmtime(self.path)
+ except EnvironmentError:
+ return -1
+
+ def update(self):
+ os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
+ os.utime(self.path, (time.time(), self.start_time))
+
+
+class PackageSuite:
+ NEED_SSH = False
+
+ def __init__(self, glob_root, rel_globs):
+ logger_part = getattr(self, 'LOGGER_PART', os.path.basename(glob_root))
+ self.logger = logging.getLogger('arvados-dev.upload.' + logger_part)
+ self.globs = [os.path.join(glob_root, rel_glob)
+ for rel_glob in rel_globs]
+
+ def files_to_upload(self, since_timestamp):
+ for abs_glob in self.globs:
+ for path in glob.glob(abs_glob):
+ if os.path.getmtime(path) >= since_timestamp:
+ yield path
+
+ def upload_file(self, path):
+ raise NotImplementedError("PackageSuite.upload_file")
+
+ def upload_files(self, paths):
+ for path in paths:
+ self.logger.info("Uploading %s", path)
+ self.upload_file(path)
+
+ def post_uploads(self, paths):
+ pass
+
+ def update_packages(self, since_timestamp):
+ upload_paths = list(self.files_to_upload(since_timestamp))
+ if upload_paths:
+ self.upload_files(upload_paths)
+ self.post_uploads(upload_paths)
+
+
+class PythonPackageSuite(PackageSuite):
+ LOGGER_PART = 'python'
+
+ def __init__(self, glob_root, rel_globs):
+ super().__init__(glob_root, rel_globs)
+ self.seen_packages = set()
+
+ def upload_file(self, path):
+ src_dir = os.path.dirname(os.path.dirname(path))
+ if src_dir in self.seen_packages:
+ return
+ self.seen_packages.add(src_dir)
+ # NOTE: If we ever start uploading Python 3 packages, we'll need to
+ # figure out some way to adapt cmd to match. It might be easiest
+ # to give all our setup.py files the executable bit, and run that
+ # directly.
+ # We also must run `sdist` before `upload`: `upload` uploads any
+ # distributions previously generated in the command. It doesn't
+ # know how to upload distributions already on disk. We write the
+ # result to a dedicated directory to avoid interfering with our
+ # timestamp tracking.
+ cmd = ['python2.7', 'setup.py']
+ if not self.logger.isEnabledFor(logging.INFO):
+ cmd.append('--quiet')
+ cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
+ subprocess.check_call(cmd, cwd=src_dir)
+ shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
+
+
+class GemPackageSuite(PackageSuite):
+ LOGGER_PART = 'gems'
+
+ def upload_file(self, path):
+ cmd = ['gem', 'push', path]
+ push_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ repushed = any(line == b'Repushing of gem versions is not allowed.\n'
+ for line in push_proc.stdout)
+ # Read any remaining stdout before closing.
+ for line in push_proc.stdout:
+ pass
+ push_proc.stdout.close()
+ if (push_proc.wait() != 0) and not repushed:
+ raise subprocess.CalledProcessError(push_proc.returncode, cmd)
+
+
+class DistroPackageSuite(PackageSuite):
+ NEED_SSH = True
+ REMOTE_DEST_DIR = 'tmp'
+
+ def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts):
+ super().__init__(glob_root, rel_globs)
+ self.target = target
+ self.ssh_host = ssh_host
+ self.ssh_opts = ['-o' + opt for opt in ssh_opts]
+ if not self.logger.isEnabledFor(logging.INFO):
+ self.ssh_opts.append('-q')
+
+ def _build_cmd(self, base_cmd, *args):
+ cmd = [base_cmd]
+ cmd.extend(self.ssh_opts)
+ cmd.extend(args)
+ return cmd
+
+ def _paths_basenames(self, paths):
+ return (os.path.basename(path) for path in paths)
+
+ def _run_script(self, script, *args):
+ # SSH will use a shell to run our bash command, so we have to
+ # quote our arguments.
+ # self.__class__.__name__ provides $0 for the script, which makes a
+ # nicer message if there's an error.
+ subprocess.check_call(self._build_cmd(
+ 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
+ self.__class__.__name__, *(pipes.quote(s) for s in args)))
+
+ def upload_files(self, paths):
+ cmd = self._build_cmd('scp', *paths)
+ cmd.append('{self.ssh_host}:{self.REMOTE_DEST_DIR}'.format(self=self))
+ subprocess.check_call(cmd)
+
+
+class DebianPackageSuite(DistroPackageSuite):
+ FREIGHT_SCRIPT = """
+cd "$1"; shift
+DISTNAME=$1; shift
+freight add "$@" "apt/$DISTNAME"
+freight cache "apt/$DISTNAME"
+rm "$@"
+"""
+ TARGET_DISTNAMES = {
+ 'debian7': 'wheezy',
+ 'debian8': 'jessie',
+ 'ubuntu1204': 'precise',
+ 'ubuntu1404': 'trusty',
+ }
+
+ def post_uploads(self, paths):
+ self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR,
+ self.TARGET_DISTNAMES[self.target],
+ *self._paths_basenames(paths))
+
+
+class RedHatPackageSuite(DistroPackageSuite):
+ CREATEREPO_SCRIPT = """
+cd "$1"; shift
+REPODIR=$1; shift
+rpmsign --addsign "$@" </dev/null
+mv "$@" "$REPODIR"
+createrepo "$REPODIR"
+"""
+ REPO_ROOT = '/var/www/rpm.arvados.org/'
+ TARGET_REPODIRS = {
+ 'centos6': 'CentOS/6/os/x86_64/'
+ }
+
+ def post_uploads(self, paths):
+ repo_dir = os.path.join(self.REPO_ROOT,
+ self.TARGET_REPODIRS[self.target])
+ self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR,
+ repo_dir, *self._paths_basenames(paths))
+
+
+def _define_suite(suite_class, *rel_globs, **kwargs):
+ return functools.partial(suite_class, rel_globs=rel_globs, **kwargs)
+
+PACKAGE_SUITES = {
+ 'python': _define_suite(PythonPackageSuite,
+ 'sdk/pam/dist/*.tar.gz',
+ 'sdk/python/dist/*.tar.gz',
+ 'sdk/cwl/dist/*.tar.gz',
+ 'services/nodemanager/dist/*.tar.gz',
+ 'services/fuse/dist/*.tar.gz',
+ ),
+ 'gems': _define_suite(GemPackageSuite,
+ 'sdk/ruby/*.gem',
+ 'sdk/cli/*.gem',
+ 'services/login-sync/*.gem',
+ ),
+ }
+for target in ['debian7', 'debian8', 'ubuntu1204', 'ubuntu1404']:
+ PACKAGE_SUITES[target] = _define_suite(
+ DebianPackageSuite, os.path.join('packages', target, '*.deb'),
+ target=target)
+for target in ['centos6']:
+ PACKAGE_SUITES[target] = _define_suite(
+ RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
+ target=target)
+
+def parse_arguments(arguments):
+ parser = argparse.ArgumentParser(
+ prog="run_upload_packages.py",
+ description="Upload Arvados packages to various repositories")
+ parser.add_argument(
+ '--workspace', '-W', default=os.environ.get('WORKSPACE'),
+ help="Arvados source directory with built packages to upload")
+ parser.add_argument(
+ '--ssh-host', '-H',
+ help="Host specification for distribution repository server")
+ parser.add_argument('-o', action='append', default=[], dest='ssh_opts',
+ metavar='OPTION', help="Pass option to `ssh -o`")
+ parser.add_argument('--verbose', '-v', action='count', default=0,
+ help="Log more information and subcommand output")
+ parser.add_argument(
+ 'targets', nargs='*', default=['all'], metavar='target',
+ help="Upload packages to these targets (default all)\nAvailable targets: " +
+ ', '.join(sorted(PACKAGE_SUITES.keys())))
+ args = parser.parse_args(arguments)
+ if 'all' in args.targets:
+ args.targets = list(PACKAGE_SUITES.keys())
+
+ if args.workspace is None:
+ parser.error("workspace not set from command line or environment")
+ for target in args.targets:
+ try:
+ suite_class = PACKAGE_SUITES[target].func
+ except KeyError:
+ parser.error("unrecognized target {!r}".format(target))
+ if suite_class.NEED_SSH and (args.ssh_host is None):
+ parser.error(
+ "--ssh-host must be specified to upload distribution packages")
+ return args
+
+def setup_logger(stream_dest, args):
+ log_handler = logging.StreamHandler(stream_dest)
+ log_handler.setFormatter(logging.Formatter(
+ '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
+ '%Y-%m-%d %H:%M:%S'))
+ logger = logging.getLogger('arvados-dev.upload')
+ logger.addHandler(log_handler)
+ logger.setLevel(max(1, logging.WARNING - (10 * args.verbose)))
+
+def build_suite_and_upload(target, since_timestamp, args):
+ suite_def = PACKAGE_SUITES[target]
+ kwargs = {}
+ if suite_def.func.NEED_SSH:
+ kwargs.update(ssh_host=args.ssh_host, ssh_opts=args.ssh_opts)
+ suite = suite_def(args.workspace, **kwargs)
+ suite.update_packages(since_timestamp)
+
+def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
+ args = parse_arguments(arguments)
+ setup_logger(stderr, args)
+ ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
+ '.last_upload'))
+ last_upload_ts = ts_file.last_upload()
+ for target in args.targets:
+ build_suite_and_upload(target, last_upload_ts, args)
+ ts_file.update()
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
-----------------------------------------------------------------------
hooks/post-receive
--
More information about the arvados-commits
mailing list