[ARVADOS] updated: b3be671ae96bd42f5456214479ec9769c0a5c637
Git user
git at public.curoverse.com
Wed Mar 9 15:24:32 EST 2016
Summary of changes:
build/run-tests.sh | 6 ------
services/arv-git-httpd/gitolite_test.go | 7 +------
2 files changed, 1 insertion(+), 12 deletions(-)
discards da8714892b6dc603737395547cbcc13fe91c2ee5 (commit)
discards 583e436f2ea18503467a277ec7254f4618b1123c (commit)
discards dfcac6d391c0c94066efea7f395e529106c604c7 (commit)
discards 37e13f3cdd4f74777b7f87f55f20fca949af3482 (commit)
discards 76624e2dfb931459a7251643d59665d1c7d4ce4d (commit)
discards 95a519e20d1900f6f60a66b2e34afa08de1dd335 (commit)
discards cb2b22b2f7ae63a6c1c53d70b0830c25283126c7 (commit)
discards 9309fbb402407af5250bbf6df7fdbece8d16ed5d (commit)
discards f171464e53dffb5b5f06239d21e7cf2426274cd1 (commit)
discards 2b9b7518a60a71315a1504bf96b3182122bec702 (commit)
discards 8eda2e72fc37cdb9f947c19ad994ac094aa0a507 (commit)
discards 1659fd3eca62ac51e7628af3464dd03787e2d84f (commit)
discards 27ac5631c777fefc89839089109d0b06d596019d (commit)
discards a4f8a4e24e7a778194878c200391c4a3dca55961 (commit)
discards cdbcd6d4e3af4d99aeec97263ad12774e1ef81e4 (commit)
discards 2213311cb8647e41604a94d64ddd02b21ae78411 (commit)
discards 659d7361e7a0fc1f6537e6851ffd3d29ce19bb26 (commit)
discards 97424aa89b3f279986621ec5f574670dacb5a836 (commit)
discards d64b434147ba00224c937d192573a46d4543089d (commit)
discards 623af8e46e921f59abeeee9e7ae8ec4aef890b5b (commit)
discards f0a79eeed911157a9095b04e34e7da79c45d4340 (commit)
discards 8a067f3805be65efd9200a8d772cc4301707dcb0 (commit)
discards 3c7848724d18912317d3198192004cc2fceeaa3f (commit)
discards acab2f7c338f245baa7e5d9dca57476d12d863bf (commit)
discards ec60d7fd235835947efd8dba97213015d37b036a (commit)
discards f03b4165fd33b2f8844a4bcda7f650a7023986f1 (commit)
discards 1aa0a5407d6a4e6b861d1fcf52d8268d62b9d6d4 (commit)
discards e7ddc93ee0608266c34efbbfd4a84464eb8e81d2 (commit)
discards d7bda40439a964ab2117933aba3694cb635e23dc (commit)
discards 65a3779e6968835bc0b1fc1b103a408fd4eec4aa (commit)
discards 07c05348740cd932e54aecfa52bf2a1260b3948e (commit)
discards 726ff8da135916c0cd6e3e0e4c9614a91d3ccfca (commit)
discards 4640c72bbd678b70827a6d3ecaf13a8e502dfd2a (commit)
discards 8600760481ba1724cc18bd9b8be7b99bbca29dea (commit)
discards b7e4507cfe19368f0f80dddfde111f2dd0a690fb (commit)
discards abb1db4b4dcb4931c7fa2c03e21b994ddd911a3c (commit)
discards 99e1256022759020b0bba971ea5c4e252dab4611 (commit)
discards e407a1d4db57c41cebebaf8f3d663785df8f5fb4 (commit)
discards 8568240aaa6b9dba2af675dc0d6e09fe1001abfd (commit)
discards aa65f4dc68b750a635c857c0480df3f5a2dca7a4 (commit)
discards 819ffb0ad38eb1f86ab6b6e3c4ae7c860b465636 (commit)
discards 6f19089ca96e485f457932dce44256e46f1e3711 (commit)
discards f46095f12c8470a4eac64997bef622e5f26cafb9 (commit)
discards 11280265c0ce229e5bd617583a06e630901a8617 (commit)
discards bb26448a7551a9165e8674ade83e32f4d17adc9a (commit)
discards 48e6cb38b2e0c023589d4083192190bdc30c4762 (commit)
discards 512193d7a3befd4172b5aa4685c01da84a690430 (commit)
discards 17d02ba1aa5c25c101ae1a887f18e9dc5d2bb139 (commit)
discards a1ca4978b7e4e6d917c50c64c3bf5e527690a7e4 (commit)
discards a86550746f8343128d9bc38eb115d9495decc58f (commit)
discards 42f2e4db0a40db50617d8631a2ff7e5add6539e4 (commit)
discards 5dacdfe33d9d72e2445fff52069c2afd604271d3 (commit)
discards aa204e80a18f8827405fbb5a67de2ef8fa7074fc (commit)
discards baa66db7b5b05032c408a60180fa81b98d079ee4 (commit)
discards 0a18c5fb313d91930f4ff0c5cf05aabfe5e1a748 (commit)
discards 738bac6d373ec54b4bf75b2da71b9c65203ecc76 (commit)
discards 9a0aa9fcfdaec36bfa0cec543d8502f4006df2e8 (commit)
discards ff110d616ea28bffb9677f91702f716992e7d130 (commit)
discards 680bebbec151616f1582e6c1e85b7515907670f8 (commit)
discards 487aeb0d3e2263edc777f3f5451fa05f5f071285 (commit)
discards 18fb92535f505e30b86c50e1c58eaf93c0179412 (commit)
discards 43b16e43f7f2558bc619f3611ee4b96fb74b5e05 (commit)
discards e9043f6d8de8ca8c928b0097e39b1695a26ffb2b (commit)
discards 575d7f1fd77f66448df49c341a59baef6b3b5ef9 (commit)
discards a8c09e00f06c51887497b94b98178b2815e149e2 (commit)
discards 7512b31d3c7e3ae890a37b2222f113c9b351d86a (commit)
discards 7f05c28c4ac0ddffdf9e13bfc104bc60f7195174 (commit)
discards 06b0b75584e41e90aa635b6d833936343075c132 (commit)
discards 7b2fd8f0c85a013df886e881d5d97aebf4890169 (commit)
discards 026d70487631662d1b0ccf8f66fbef71b9be40d6 (commit)
discards 22e538ac7ece05cbfc32b2dfb463f3fb955d7f61 (commit)
discards fe5164555b117d5c9c538905c495192adad0b815 (commit)
discards 5cc977da78b718596faa9ad0352b7c14402f90fa (commit)
discards b408ce71a9caddea144fd7d90aa166c9cb16e715 (commit)
discards 846a9311a328b9cbadc8b70d34134b72b0b6b332 (commit)
discards f44f65a25c1d505e7faa9e26351525c10bb2cbbd (commit)
discards a956a49875fc484c2c237aec228a92f8ce5b0f06 (commit)
discards f84fd5c8f2292a41bbb4cd762dbedab09adbf784 (commit)
discards 2fe4a6a068e720e72b1695de133b4363e9a69a40 (commit)
discards ebdbb0bb23ceb1d1373472e6405fa2293f51616b (commit)
discards 60d3fdf6dedff3cb9eca478caacfeee19a752812 (commit)
discards ffd741be7af1eca4c11884a3e8aa6630be25de2d (commit)
discards c9e5a801bb72c20b6c56e0b88e78ea5626fb6497 (commit)
discards 1e48d4f97d643b3a36f7856284e3a95430bc8f92 (commit)
discards 211b5c1e8a86fe9f3a6704f76120e2fed78a9247 (commit)
discards 81afb79616d482b177d91ee4e198392e750221ab (commit)
discards e6ee12052db27b61f8bcf7af89acc9dc520a1799 (commit)
discards 0814135175ab777abe515e4ba404dd0bf2929b24 (commit)
discards 9f73adf4a13fef44dd133b91c46693aea1af69df (commit)
discards c1c5b145f832bd9a647c6e07e19f148cf26ff910 (commit)
discards 9fb392b2eaa59f20f7186de726531d922408dfa1 (commit)
discards 1c0ce486e11b13ef5cd10959d4eca1fd948d88d1 (commit)
discards 26170766797e80d0c86c25a10331f43c6d0eb283 (commit)
discards 46ae733bd885da0d75882831af822c9578621730 (commit)
discards 2bee50fa0b09c9dd785fe6d6beafe21fa961111d (commit)
discards 33f530c93b5bd397c6c9569385613915acf4cdbb (commit)
discards 4735c45083770bf8344dd80dd84695bb4e0fd169 (commit)
discards 886c3c5c57c6419aceb390aea88c5886f43bbf42 (commit)
discards 5560d70bf29553b1d0719dda060d298de7bfc338 (commit)
discards d42d4368192ec140faf161db6c98a500aa39fdcd (commit)
discards e88afb9fdcdd8b9659ad68021987fd71269d7ff4 (commit)
discards d42db2ef8638690a1a3e0f4e66ec898d529c8736 (commit)
discards fff5527a9d917345367f08f0e2fe2dbb81e00187 (commit)
discards bbed885afb9007c5d81ff55af70e8d7be92f8f37 (commit)
discards 575f75167b48977b3c825e30f944fca70a74f901 (commit)
discards 6db0d02823e4721272a9492c2a03bb16afb56469 (commit)
discards e7a2d57f559f84d9a2181526e3a4d6ca6082f3c4 (commit)
discards b4ca11159ff0d38af3750c3a109e731f295a8460 (commit)
discards dbedfbcff43c82cc5d4685a013d58c6245a5f6ab (commit)
discards 2748e092a403d6d8ff11a01238e55935aba7055a (commit)
discards d3bbd1396af6fb8e6b6c8f50a0406cbc71ee82ca (commit)
discards bebf531a2cb9cf08843d6ac4d4ae2238d58efc2b (commit)
discards 2542d0dfd960962d10bc4daf603fdb9e2c22d146 (commit)
discards 8c2f3c1f36572e6822d9457d13d7129efd330fd3 (commit)
discards 6ca2758f31fb1a4afeb4eb5aca3cd8d6af402169 (commit)
discards 14ef215b72d2d56986ad29f1358cf27f7be75294 (commit)
discards 24129230929833c3bad1e81b6af9b975c1f605a1 (commit)
discards fc413df044d945ac7620b0220980b23698cc1f5d (commit)
discards c6d5c89d0581b6f93793ed9e43d3ec28bc3488e1 (commit)
discards abc952f3017c4bcb68578cd90d130ec08f72aeb2 (commit)
discards e359eafa8b63bd374153ab0bfe81680b3a476dcd (commit)
discards 30fc3285c237081c3bbe20de8a6e3adcfbc29ca0 (commit)
discards 751ca54d959fdf93f042800cb5c4831e11979b78 (commit)
discards 723ab7027a196d66e82d373d58f767503e88f771 (commit)
discards acc982607d60ae73a482a9254b6a34979832efb8 (commit)
discards 835109e6384b40e3c801587849bcf76ea0b1e76f (commit)
discards e2a1b086e41099f52b407e4085576b00eed39b2b (commit)
discards a5bf97676451900886f6dda7521ca7db27cdd452 (commit)
discards 5cac6f5fd28ab38fcc7279d5d7bd5f224d689d8a (commit)
discards d35ac7e38b0eb480a4573a77a4bdff2a84f92f28 (commit)
discards 5cabf9c500789ffa2a32b8c298d0978f3c08c539 (commit)
discards 7c80a8b76a2972ef4e42dcc281f46d9e0c8839ea (commit)
discards 057ea788a6cfc9c0a5fc02376e0557f4d0529b38 (commit)
discards 3adaad524ee4dacb06434956bee3c96599299c98 (commit)
discards 57b2c57db16bed3f712200f603a9e77998706cc6 (commit)
discards 846f42e54e6f2e4b00802c5f1a45cb2ec1b23674 (commit)
discards 69e10b5d9116fed733b192ba902d2dc7ad68a076 (commit)
discards cadd265aa66c1fd41e3a385a3b23e97462094b4b (commit)
discards 40fff998c296ff1fa3ae681a1ae4c06df7f6abe5 (commit)
discards f551c4a120603e54eb6b9119b3e214df3523a8c9 (commit)
discards 426e8062746c72854622a7a676c8f5c51fef45ff (commit)
discards 937bbde50c186d8d28bff1d8980fd701cb9b72ab (commit)
discards d3c6950d8f2945c18448a0b1edf38bf8d15ced3f (commit)
discards d8b5db8865d9d2929e9a3fe708d609d10622817f (commit)
discards d119e26a14b0bc143638ca38926a4a0d911fe8f1 (commit)
discards cffee40d749672091b0969c8ec3168ab3e5254d6 (commit)
discards 9eac45e52bf4a4b37bebfb2fc85ec5584207bfa8 (commit)
discards a23620386db119bf670221368016484a5a2f1f93 (commit)
discards ad5f9554d13754e16def528a24a2669f7cbd4e5d (commit)
discards 72c6e7bcd898caa3a2ed718e66b71568fe66bc7d (commit)
discards d20e33394de2a85f17399e0e4d9d7d2e52c94178 (commit)
discards b67e59900349a05e6ed030f1cb92020a8109e7dd (commit)
discards bc27dd23b329c7d82deebb3065917dceba18da27 (commit)
discards 51601a277992c213e60c6bebbc876631891ce699 (commit)
discards 39520ae3caa8227e58471b8783106a3c2dba1304 (commit)
discards f4d018c979b333d61fed9b1884d7edab540f3c1a (commit)
discards d3f13c19bdc7b2efe160c66116461faf865fed4e (commit)
discards 8f535386bf3cf4bb0e6a929e4a39796fae235f3f (commit)
discards 3af59ee413454b94ad45bb5d8990896186b5d672 (commit)
discards 82b42172688762c4f5fa7727a6974c94e8ccd86a (commit)
discards be68e901353548dd87ccadcd62ae82551b1995d2 (commit)
discards 2d11eaae9b48ebf7998b37d978466d26ddcfc769 (commit)
discards 5f2cf1041d2579bc4c29d5691b9ebaf9250f39b8 (commit)
discards 44b7b4e113d39b34e35d6b2323fee9fbad21611c (commit)
discards da075298c7ddbefa68947783a787b46f659fefdf (commit)
discards 182891aa10947a1238195a8565c051efd1dd5480 (commit)
discards fec33dcaf67cb752c669915bb49dead9b8f11e4f (commit)
discards d99331757367dbbb3607a4644a5a6572667cb262 (commit)
discards 5a9879c969a110494880301464c80d648fcd560b (commit)
discards a9f03b2a636cbc7dc16628a5de8701cc66e04871 (commit)
discards 1c82c1f875ca2681ff36e804fde8f6beb8f36c77 (commit)
discards 9ac3e54bc97741867159976495d723a06f2cf28d (commit)
discards 3789a46123ea8a3c90977d8365d3720f9f6585b1 (commit)
discards f23d3167e9163944b7a52ce9d822b930053bb957 (commit)
discards 170c33818a1ac6a3e35f411d80cf77a008bd6e07 (commit)
discards c21753f5f91a3144395f4b259b09fb16b39b4b2d (commit)
discards 656b5e388467792f437caf1afa032dd4d36b2eb1 (commit)
discards 25ada289bea32adcbaee5a5181eb94809693ec9c (commit)
discards 9819eac0a0ec331264e58864e66682ae9d436e82 (commit)
discards 469c824c8db7dd960350997b3f4d55bcb3245456 (commit)
discards 7da79d9d4ec5509eefb06751173bb9ff5793a4ce (commit)
discards da2a7a1bd6ef8ca7daafbf9b88d0754be3fc9382 (commit)
discards 5ee7a415414d023634ef3e9b9333d49068791e79 (commit)
discards fb189c35101622a1bcd53d4c10b589963afd8747 (commit)
discards c43d72c6d70213350abf70e81710f98afa0a0862 (commit)
discards 302257484801465c1e0371e36dcc0d3a28392af1 (commit)
discards 9afc5f2d9ca0af29c34ff15e58b66387c3c149a6 (commit)
discards f6f099ed8d235f23a70870b2e013afd6a085db00 (commit)
discards f0c491076403ee51e42604e603aacb9873944aba (commit)
discards 716e83202432b70111466738123fadbf38824d87 (commit)
discards 33692d98e44828a7114f1b8faa54c359b769e87b (commit)
discards ee3c9aa1a39e6864a096a6619e237534d524ef69 (commit)
discards 323701f50f452cd96a15d14c278d41bd6e6b6f25 (commit)
discards 761d43de01eb9c197162a14f8cacd3d58d67da77 (commit)
discards d5f9f022f9414ffb8bc1b7b2f04096468574e2e5 (commit)
discards a4e96f869530a93545582f5a21ec0e6caa20bcd9 (commit)
discards c6f9a3fc32babe5c0d26a1b23a89a5c7917af8db (commit)
discards 6dab05d0e5f1337aaa045b785d81536766edd27c (commit)
discards 96952e4e39a617dd5233772ccf31c42195150d5e (commit)
discards 9201f7f9da3043ad5574d3cfb8f841493a76460e (commit)
discards 9dda02a389dd2989e6190e07d2d5e8727dd6842a (commit)
discards ff7b6fbcbca05e47fa226f8bef9f70de74e39024 (commit)
discards bfa2dd53794c53558aecb44577ab1e4752c268a0 (commit)
discards b4006ddaf13af4210e2019adfbc1eba3ae963c60 (commit)
discards 5ea1be908b4b46f029111deeff5216401a9662e2 (commit)
discards 73fbce50c5916c0daa9939cecc2088fb572ee17a (commit)
discards 4d3faccfe98c1f00987d7bc1ab6509ef347cc2ac (commit)
discards e6a5235b8be6eb8e884d6d648b7bc89179829d9f (commit)
discards 379365ca0bf7948269ed705af53874f65668d851 (commit)
discards ad88edf91c59cbf2cfdf59f21f278871889636b3 (commit)
discards d9b63321261cbc98a50166981213342b7c0b91f1 (commit)
discards 823a16ae78acdadd17d6f563dfafe4678c364906 (commit)
discards 21a3d01379891f2670991e4d24804e1dc87a1ab1 (commit)
discards 8713e250470011db40d0cc9f1a02afbf98d2853c (commit)
discards 77f198433d31d492d86c38359940acd7f5012cf8 (commit)
discards b5494642ea862658418d2f9a419b10d1fe25431a (commit)
discards b8568977a1c6f7ff82900291814699617f8547b1 (commit)
discards 8003783f717ecb167a47c11d01f459b43670fd0e (commit)
discards a3ec6d08fcaa76d3b16906f57eb07bbecafce545 (commit)
discards 8379e4d518e9351212004e1d4155d8b869e727f1 (commit)
discards fe19ce4b94f7ae3277e9016bc7886179f46ec472 (commit)
discards aafddea00106bd7e9fd6336623788dbab95b7306 (commit)
discards bab8ca353f89418b0d92ec639e5964cd1d2aada5 (commit)
discards feac22422cfdc33e14633b1a2b724c49eea69d94 (commit)
discards 0be695eef3af4788a0890060a765e6b0d2c32334 (commit)
discards fd45d42570bad689429ac62df86929e6b0c86cdf (commit)
discards fc1a358d5403e87bfdeca2d0c510362576a8c2e4 (commit)
discards 06dd898fe349cbc5a16fb5f6daf2b563fbb3c936 (commit)
discards 81b4b70978fe97e6bd22bcb85cdfa36131f2f012 (commit)
discards 7701a7ad84b0d4fb4642568bd75f63a5c0586658 (commit)
discards b0dd0080e98ffccb2d48f247583f55c8937bda84 (commit)
discards fcc0a767df83518e1b42c78ab7d4805fe6b5192b (commit)
discards bdffa02ff66636f6e3b28d5ab553d9fcad69037b (commit)
discards 3f03b72f0c24ee2c5abfe5654107f6e1c019bc15 (commit)
discards 59c684f9e8a545ffae5036b193d09e9c8ab7e689 (commit)
discards bf4baab1c1e2a422334fc07d77a44036832583a9 (commit)
discards dfc5f78444e593f24ab976a6ad472c2d1dce12b9 (commit)
discards 988b1304d57f960f1006af52dc700026061a280e (commit)
discards 4e76299b9c1d4132a1fb681d332be638fc922425 (commit)
discards 3d595c9b4cda969d75715395a29057e1740568e3 (commit)
discards 69e7600c1d70c485325a3b2c9d818ac2e1e1084d (commit)
discards ebaa21aa60ce8f177091dc347ef47ec7e90af3ae (commit)
discards c18ab4005222f73e5e50684ed1f242d8988baec4 (commit)
discards 61974cf1393c51568293d19685ea3a4aafc4a770 (commit)
discards b8048416d62f1c380f425d0d2dc7ed0fbbd6fe5c (commit)
discards 7034b1b4089ade8f362f592ec209fcfd45b87ec3 (commit)
discards eafd0331da34dd8d61f9f3afd22f7976615b2781 (commit)
discards d44585be5835346f21df58aae29fd0c9a3f3d63e (commit)
discards 310b52541abd4625151df391e1194ec2e7c76448 (commit)
discards cc2a6672fef9bb825b2fa4b8482f4306cae491a1 (commit)
discards ae548f25303fad421a20ebcc34af39a6d0ee0db8 (commit)
discards 310db12be5e8825d11e91a0abc30b63548ec4757 (commit)
discards 4886a88b72e5faef32780bc2fdd60ec5c2d7f16d (commit)
discards c0e1dbb244f0cf8d838ebd9296e797a0a2f23fdf (commit)
discards 34ef97e556eff9a315160f0317a6652154d749f0 (commit)
discards d8d561588362cf4c2e100949b39eb45661471535 (commit)
discards f32deb7586783a24dd419329975f1d9429728d29 (commit)
discards c31cdc09ae6aefbbf6f9e14b3c6fdf910a729b29 (commit)
discards 5cf15331acb6250da715ee9c41a2a0db7986917c (commit)
discards a5c1b7e9798565f07e231c43b3453fe7b69c7368 (commit)
discards d30c7f8b3210ae1e825033e852c369d16b7e48f1 (commit)
discards c9182875d51afe9407271334d0be1fdbd718c4d1 (commit)
discards ef2d37b1b95aa57a2840080995ee0555b7a64f50 (commit)
discards 9cc673cf435ec02b043e2b3729b0ce411cfd5f95 (commit)
discards 86cc2e4b923ab24ef9a6460efa23904a9f8ed742 (commit)
discards 30945c85e1d6b6dd20885683851ff4f1f0575346 (commit)
discards a7d40e5a84aadcbfcbce43876d6f1ad5e7d9d824 (commit)
discards 0a0ca0b129c2061db7c6f70e46334500088b984e (commit)
discards ea0e43b2e928db9e7ef77977adc789cf44bc81c2 (commit)
discards 5b7eaf60e9ae6a239f9ef601ff615913680f054f (commit)
discards 4690e2bf79cfef03c548e08fd5cf67efa709ad12 (commit)
discards bdfbb8c113d8e7f63bb8caa57010437826c59dc9 (commit)
discards 2edb46e696fa3ca24aeb0c820f15630ece41f0fc (commit)
discards c1aed40ede934f264217fa46f9b4bec666d3b600 (commit)
discards aa569f0b4c661ec83fb5be6574250f1c3f713119 (commit)
discards 08c24c27a61b52063146576e9c80773b0e481f37 (commit)
discards 8f04915e13d44ce5028eb07f3eed3879a592f11b (commit)
discards 5dfd1323ad00fa86e008072e8a1b19f9869b586f (commit)
discards 73f6f1bb2bc589b28f5779521de5db4aa0993deb (commit)
discards 510a6b6a1a8e263a7cfb305032ae3ef07d88a400 (commit)
discards 6086e2a55293b8a21883f720788d951c3545cac6 (commit)
discards 0a139ea3b0b4dd9785ce1c847e08d507a9f1bb6d (commit)
discards b3677ac8c09f7e42ee6e7cfe1ec7dfcdd0832009 (commit)
discards 400c39aabb36fa98b8878806bd5210677495527a (commit)
discards 287590ce79a8cf78573e1edf5bb8cd85c8e48210 (commit)
discards 9932d701d2ebe1d8f9d04aeaef701a1e7b616852 (commit)
discards e18701a42562461556c3fac9013abf76f29f44fd (commit)
discards d8c6d08d57863faf78b2b582950dd8997f83a811 (commit)
discards 264344dc26e402ac477c34f90ef1a8f52dc4ebab (commit)
discards a0c5e16c9709e4785c92489bc5347e8fbd8f3b5e (commit)
discards 0e301da832ce612d42488c2ad69addf1f44ee85f (commit)
discards 4044a6363ba1ccebc98369dfa70052961fcf1117 (commit)
discards f5d864c9681464e9984c92ccfd72189b5985d8b2 (commit)
discards 598d191de7ae6033ababaefcfcfc973e13ccdcbe (commit)
discards 5cb61be1c7a900d9d872c8b037f158e3c10f40bc (commit)
discards ca279f2c28d2c7130b3fba8df28fc6c86d559d1b (commit)
discards 6dc673b966cf35cd390b2fa1d139fa31a59788e2 (commit)
discards 3eecec6dc8eea0765a0821526255f682daa3f321 (commit)
discards c6de6999ba87c75f2856953b908ddec4af404c99 (commit)
discards f4fcedfae4edf9c38cf9becb228f3956f2812448 (commit)
discards 0b1b571382be1fbbfb4b2dbb499f0367a798e711 (commit)
discards 0181b121e163c32c932aa4585ae668d50584281f (commit)
discards 76edfd719d2cef94494cc4f50b02cf380f88cf99 (commit)
discards 121e7a814e34136e77191f2eb1951ad27a623453 (commit)
discards 82807388a1f7dd901e619419690253a5fd58663b (commit)
discards 53ad1144ce5b37d4d6e7c9cdbabddf92b483f75a (commit)
discards e5561b32e943e67eb457b8d7669e005a0338395e (commit)
discards 2f56fdaf3a039ee87d264933fe1306c2642f81fe (commit)
discards aac6af892c9bbaeb791d62ed7470a80713e66650 (commit)
discards 0e13d045f78e1c78c9f2be3012c6bf06bf1ea5bd (commit)
discards f0d1f9935eadd303d6b160ae9c349398f6691bd9 (commit)
discards 0f696afd56780250b3f2828b875fb7ea8e4ebe81 (commit)
discards 591ddfb6f369bd37bf8218646219f5f3fcc561bd (commit)
discards c159d1d0032ce82789e0725b9a50c48ef634ecd4 (commit)
discards 38f341f93f21a6ecd8c27620825c57a093677330 (commit)
discards f288c6cc7eebf112c73e909cbf7b74318cbf884c (commit)
discards c5dfc8a4b24c4da0a83e8bd759b361b6d1080955 (commit)
discards 84cfa56ae6bff1649b73a63c8f23d20a5a87f9ff (commit)
discards ef45c8676d34405a2c99ffa51c258b8cb7b6777a (commit)
discards fbce5ceda30fd45c1834d487ed882f394352ef8a (commit)
discards b9bcac3ebad592ae9a620f26b3841986c63614e8 (commit)
discards 6268a275acf7c9daf8561e4b969cfbe4e3f44df8 (commit)
discards 848ccc7efab5dab22f7fedd920daffbd102d09a7 (commit)
discards 855e16bace329e9a449fcfc569a0e8ba6acc2d21 (commit)
discards 530468d1224a7c2251ee54bf5ec8d1f8d875de14 (commit)
discards 1a4ea520c247fd46f544ae83bb9640e9f0652577 (commit)
discards 381b79bf5cfefe790bdcc24dd33296b8518e4c19 (commit)
discards 301f4e3ad12f7ff3c08b352b05dc7886c629900e (commit)
discards 05ee30be7534c9234ccf029076563ccc7e7a8189 (commit)
discards 0dd517477cee1a5a4c247b765e150976f4809e58 (commit)
discards 626ea7dba17faa4106fcd317aa7e277e9ec5227f (commit)
discards 7761d1a6b70e1531c73f91043b9f69a251099701 (commit)
discards 6c26aa2c194f60b2823da49166741203524a0b48 (commit)
discards 44eab398d387d1e72850c6a1fedd74272ca119a9 (commit)
discards a572ef644ee17b0a61e039ce691a39554031aa73 (commit)
discards ac6798733ce11d6a3410bf1da639cb3e5a84047f (commit)
discards 845e3998bb2b4175e947a20eea4ed530629a778b (commit)
discards b6a5513611786d2c0a513bac3ab2db00d84bf289 (commit)
discards 1e7b6aa1f16e06b796e3ed3f66c48f38a9287a5d (commit)
discards bb48082657c79156ad9af20d71c1ca73eafdf0fa (commit)
discards 6a1a627a78a021aa164bfd1136bb5396e1defd95 (commit)
discards 2b9864be33a42c3f9f612fcb3cf51e46c040a354 (commit)
discards cc7fb93641cd735c8bd0ec744b1fa1b84a242626 (commit)
discards 32f2a67ecfd7a5b4169d4e3fbc162306771f0d34 (commit)
discards eae444a782a53e348fd5df5929cfb2713f0d6fa8 (commit)
discards c4d584c524efe8c9c689a263bec54b99b3d89138 (commit)
discards 31e86126eba36e27c824b769ddea304e52ceeb59 (commit)
discards 23b42a86c646eed10830b4b34ec46173728140d0 (commit)
discards 0f0793754990012c75393cd517c85c261579cddd (commit)
discards a43a3fb3b393b125b7c6d4139375b4bd95890cfa (commit)
discards 9a4afad134f563d41f2a0c84b4389698df07a8db (commit)
discards 123d3d778051c90b6be39b0f0b25da24d025d382 (commit)
discards 1553f4842dba02aaca9fd7e5a9ac426d7532c462 (commit)
discards ffa8b415658e6d3ba3836c6e208fe35979de0bf4 (commit)
discards a3465f97627494bde0f148e27559046308cbedda (commit)
discards 5b7e2250d5802b20b73242ae4d0641b32f72cce7 (commit)
discards 4bfcd9f168a3371b19bbd12429a8da37883256c6 (commit)
discards 422349074a47c6cf6394234a3dba975c84b94b09 (commit)
discards 72c3457917a25dede1695c8abda316aa4e5afd5e (commit)
discards bf95f654a1587dceb6d2d27c03d6da84a5a2a92a (commit)
discards 0fbece8ea4acf263da8bc312ba3451c1adb7ea43 (commit)
discards 9fa534460eaa10ac16d1de355e45898517306ec5 (commit)
discards 08d02e15e140740902dfab5895295c08d4ca0d16 (commit)
discards 42e438d97a95d8e46f045acfaf82298d8bb3dcab (commit)
discards 303a6e2ad510ea2a4c36e7bc2b7ce789ce0b0a7a (commit)
discards 858cd93950d0fd7b621e9e4656261856e60856ad (commit)
discards 034677aa057f89be2d5bf923d6956164396d11cb (commit)
discards acdfc466ec52fbbf394f0e0b00544e809e74cec8 (commit)
discards 274c7abf3aee7dee8bfe648f485087a5a0ab59ed (commit)
discards 3002e7a6bc67564e900a5da490ef4e6b3b43224f (commit)
discards 8b51595b8ee2a1a554d365e857dda5cfde440e53 (commit)
discards d64856940e5568d3b8bd83d60f36d13e7bc11fa2 (commit)
discards f9bc13bd47c5404da529d5a71153500d2ee755c7 (commit)
discards decff518125e54dc044170fb4e31a5b86d566bb5 (commit)
discards ccb48a84de0d9adff1809574aac4df77b9b4575c (commit)
discards 672107d387a86334aa70711d071bc3c2bf9c5502 (commit)
discards 40ee991675ff42fe531bb8f2d1bd44311e9fe370 (commit)
discards aa9dc78611ff1339d02edcce329540d32fafd9b9 (commit)
discards 5cdd9c170ff4c5c3ac4c5101ee4a9ada47a4adce (commit)
discards 65ecef658fb337d97db7025bcdd4107276988543 (commit)
discards a00ef8633a3fbed5baecc5c47e9a330943dc8f8e (commit)
discards 3c0d0df8f6d287e83ea959881afedddc5d4b784a (commit)
discards 503254168510f46f55da0fcf6c49436c900920b5 (commit)
discards 6c6ca86bfa400d880d051fd87c7c1fb9b8905114 (commit)
discards 7b264271f10d442953f216d66f439d770e0fadad (commit)
discards 341344e17cc68abe57ed2a2ef97807f5d01d9232 (commit)
discards 8c01f6309392198702e89946f4dd6eb76c3f2772 (commit)
discards c50da8ea8cdc343407b53e41fcaf971294cf2a65 (commit)
discards 91a75ac34ec1b4f78ba2b9b63fe650941ab72dce (commit)
discards 7383891fa3dfa915967e3d15c5e678f9545ffb26 (commit)
discards 02d27b88fe5afeb52e69b387929ef8d59663c09c (commit)
discards be33b03fe8aef6e9e23f82ee7653bc46fabef161 (commit)
discards 03d58fa94878d11cac6d25cdb67180060d1e2fe5 (commit)
discards 2d03ecf4a147db64708991bd3bf2f1d9fc9b358e (commit)
discards e8492641dbf4051c456d355e536126740198892a (commit)
discards 6c63e6c0a80c1f77a9f96be9956aa2a31147a89e (commit)
discards 41d0494a83f08584f7427f6a050517cb1a6dd70d (commit)
discards abcdf588c84675cb4f5ce528ee30e4f40cac99b0 (commit)
discards f3e822f9fe498c6c9ef5fc0602b98695349e6e5e (commit)
discards 11339c91b5e7efdc214897856b103d0c532df45e (commit)
discards d7f1a63c8396715e501be04addefb0a158303e69 (commit)
discards 6772d21bda502b479d29f21030816560f110c04d (commit)
discards f6c53e0e14c29950c6b29f2b3c6a46c13071fbd2 (commit)
discards 501a707f36ac0b2ac13dc074020641b58128e175 (commit)
discards a526319c2a822c1a2b06b09d734fa3da6f84ec31 (commit)
discards e925baa047b64d387c539473746b495972310813 (commit)
discards a6969ab968422f146df9cf9ef301fb5b0b422ded (commit)
discards 3e70317d25333921718191ac3ba9be91e0408449 (commit)
discards 440a06656f8c81cf3d7757dd00991872683fc513 (commit)
discards 4af2bd565b75a9dcbb553975599d5e0b8856e940 (commit)
discards 253673e89ef17f5f19b5c3b7cbe32d8348e1ba4f (commit)
discards 20354a24f9582cc0abb268d45adacf5f386a10ed (commit)
discards 21bcc1b7eb165fb094db43f60b8073250074c3c1 (commit)
discards 960bdb270fb93118e9241ffc28648962f136a37f (commit)
discards e1fc53f3f8db8a6770dce3f11231191734217211 (commit)
discards 82295301ecb7223887e4860ded71ee6373d3bb94 (commit)
discards 04a389ca22184133efe626dc83d8e8951c802551 (commit)
discards 5fb964860b2cacdf87a18b5295cd0c492aace3f6 (commit)
discards 0f143ed9e47ec9d07c0b17758c3bc466e631366a (commit)
discards 0c5278fc1b23cae26dfea01be30a759b4413f35d (commit)
discards d5774ee9d1d5ff2cc1b8fc7edcc9aeac704e7d39 (commit)
discards 80f4c0641ca89efa0813feaa3429de9b471dd816 (commit)
discards c05dc4cdc80d754a284e93478e643fd38dec123f (commit)
discards 7efe1eb66d31585bd6cd4a52ef0d12e0bdd91a88 (commit)
discards 152e3b9509574666dc8e27d750b8bbe2d79b88a6 (commit)
discards 48c6d3e3a7f10c6351a1cbc8fab1ea06cb616348 (commit)
discards 49b655c14efe7bc5d73c099813db22fc09050d83 (commit)
discards fce78b35e3fa0534d4173762b979c916cc463cdd (commit)
discards c479436a110f977879ff278e9a550efe47f26e1d (commit)
discards 7490e64759f56a8da52c8259da8b96baa769a6fe (commit)
discards 2fb06b518ef7c6f402d7d8832577400a0814dab0 (commit)
discards 061a0fa3af58c1c1db584e21cb069547288f7e82 (commit)
discards 8bfa3e75783933f7a9e484b18074336456fbc834 (commit)
discards 511d0877b038c3ce33f3fcfa72def70f7da84692 (commit)
discards 72216082a3e042745707e69692e4b8664fc07a6e (commit)
discards 7b41632216dcdc83aba5bc48b61189f705bd3e97 (commit)
discards 8c7f4b71ffe3976dca29a8337e3d487ad7547096 (commit)
discards f85eb7e10ef091aa9989d811705f04236bbd897a (commit)
discards ebf8ab12ebbbdd4b4b317b1b14f1e65d67e381c3 (commit)
discards 4d4d004399af1dc0a99d67893f818756a8d19a90 (commit)
discards e28c8a92c6ca9b1c6a32d727cf5913a44229ec4c (commit)
discards d0e6362a7388d6e17d43f92ec64d13d5ee206894 (commit)
discards e52b761b16170faf94bad9dd407cbd3238bf371b (commit)
discards bcbbd1e9d64d4527644072364a267eca13af434d (commit)
discards 8a2221bea7f28533a62a1c39e822f8c9d30b0b95 (commit)
discards 358ad8eabdc9eec2e8b690ca0310b213dc4f6098 (commit)
discards 4a96daeef2dd5a209ec685ee3e1d6864987d1f4f (commit)
discards 5ab76d533d0189c66d68220229b7453e33fbf4b7 (commit)
discards ce997a9f7f79d7ee4c5ed9ca9ed072c3626f82a6 (commit)
discards ed9e8d8616e919f81703134a7d5cbac62f151877 (commit)
discards b5c641cae15249d888c7bb41c937338af8f7102d (commit)
discards 27d5f6ed785b6e127cf9a2f1145fac21627a9a89 (commit)
discards cd13ebefd14bc536d069f395bc866e902550ffff (commit)
discards 44c80db735811f803760159dd3f1065ab0822179 (commit)
discards 85ea947a0961e4217be9d53763fb5f40aed6e3db (commit)
discards adb6ea84650d517054fc67c097b456714057d07f (commit)
discards 6268adae9d3812084c7854d62e1be8b5412605f8 (commit)
discards c3db5a62be3c07e4ddaff6a76a8adcaaee1a0319 (commit)
discards 91e919d5ef0dc210dc1e4c9c150acb7b082ce92a (commit)
discards d3f844f115bdcb6b349b9aad3d46e32dd4d18d33 (commit)
discards 0536d6d65bc15cc4286bf26e21b893d94eb9f4a2 (commit)
discards 0528a6d4df9d81622f9813c2a8925dd7322a81bb (commit)
discards 076ba4ed7de93d81211c905551e38768f79a5518 (commit)
discards 7ae95cd88f8de821b56d81a49a6999669d823c76 (commit)
discards 1b9ca54af0f7f70c65382ebe748a22e6763130e6 (commit)
discards e9db1de8aaa0a519f31684561e3c660461ccf22e (commit)
discards 7b3296a9decf3443b47b386a399aecb5b996c3fa (commit)
discards e71be8357f42713487c158be2493cd245963deb2 (commit)
discards a522b5843859343b885850afff05130fa0b7345b (commit)
discards 1c44466e5983383785eaf25f3b5da01936cd2cad (commit)
discards a013dc7e4f0d91499d7349eee553a771551cd13e (commit)
discards 1a655bf26c6bbec82c13d79e15f9fdd43fb18c1d (commit)
discards 8f4aa687e70abdd7329a2ebf928a175eb529744b (commit)
discards 8d7ac59313253c4858bbe2cbd4db746b1d16e0c8 (commit)
discards 14b624d40b6267456bffbbd516410dd802f143df (commit)
discards 8dce81a50c4aa76f2d2bcb3742c9708e26b5dee1 (commit)
discards 23fa740736501e4cf023a3a94a3f9e567e95c2fa (commit)
discards ea45d699313db83eb9fc941bbd65872e1790b1e2 (commit)
discards aa68e1bfb8b5fe86127c4a3157183f423c9179b4 (commit)
discards 284acd7cf652e9ca0fcd57a230fedcc2a155db67 (commit)
discards 5060bf0df9927f1c4c20640db5d956692818f3cc (commit)
discards 27880e36c02ea90649f0badfb8e88894ede6f6ce (commit)
discards 1530c4fdb9c691ac378ebc67ff3c5775f2f9ee9b (commit)
discards eefe18d01ac964d062e32b9bbc7eed306fee9a8c (commit)
discards 7fa64db2c56414cf9db215a1771266a4f9b6161b (commit)
discards c1ab6401ba557f8ca015eb462f8bbba692c7b88a (commit)
discards 0ec1187c3f9b73dec2078ddfd98d3a248c3a1648 (commit)
discards 0d50b5b6183156ba3dbe1d6fe16588dccfcdc962 (commit)
discards 2923ce9f48f072119e8c5d0771ae3480a2679864 (commit)
discards c65e0ee82ed25c9161dfa63e3d1ff298bd0c4c17 (commit)
discards da1182c77e96a58bb6a829bf825d732cd299f807 (commit)
discards cc48478bd40eed336631339af3c272dd1a1d86bf (commit)
discards 7114c08c46f867f9e668d34b3fddeaf662a9b214 (commit)
discards ad3df9216ff20898eb96353e5d6c52b271a87375 (commit)
discards 77dffc7b9308abb3d331c165c3042ec6ff41992a (commit)
discards 30522a0e950b024366b07c8a25f24a66935600a0 (commit)
discards d99b860348c603932ca80c1c5b80c3e6880c1f35 (commit)
discards 0869894ffaad8b028c4775bafb853535d1183689 (commit)
discards 88474d0ad55e159de59e35a74cf9686d3e641f92 (commit)
discards 60dbf9f65de31341aae26c1804e77be6bf268dd0 (commit)
discards ca3061e04ae8556d3b66fb3391dc309df3c80010 (commit)
discards 56508d6b4085d51c640df635ebfa64ca824cdb94 (commit)
discards fef361d38a31b29fdda31e8a5eda13c908c04966 (commit)
discards b25bb303ff9c56f3b4349ba1e1cab399acf404f5 (commit)
discards 8e51498f431f233a38da76c169ea2e897e91f051 (commit)
discards 63517baef2f2e2d2e1ea10d4952189ef377da635 (commit)
discards ee10e6bbf12c994dd6335560acac2327392ba3b0 (commit)
discards 07b0b88d99a07be66b564d7d8cd76fbaf27e5909 (commit)
discards e4f90e4d14a89ff08ba654333d316cd3e5e97225 (commit)
discards 0b16f3e4e4beccf000f04f63129276e0716c141c (commit)
discards 73b46d8b02451bb782f7262253778593181fd02d (commit)
discards 1b30b40e3b83876ef1d96bcbffb9356dce94c5b7 (commit)
discards d9ef4f277d847a62e17f944cd42a892eb00aab67 (commit)
discards 68d7d4f2b6fce5f755f4ccecc978a8b16d73c802 (commit)
discards eb8d11d83e21d6554dd69784254898c7454a2727 (commit)
discards 0f9882d559fc996a260fb6e00254fd9d5f7de218 (commit)
discards 254c4352cf3f0fba7861bd51b42498e996036227 (commit)
discards cf97dfcc0a04149057688f302252d696c2d575fa (commit)
discards 12a671a65b2cc1ec7431f8cd8b024cf70f269f7a (commit)
discards 8c67902ccd67de79c0c86c85116c46b5dcfd82bb (commit)
discards 3052322166d7f44270479f7421275732582542e0 (commit)
discards 9022eaebb83e9ab8c53e0ca61c1b44841f0cc809 (commit)
discards 3c37f8e662a46e78b00e498ab7be26165018f87c (commit)
discards 3de7432a187e32c3ca0dd3fb94b3d52c7563dc04 (commit)
discards 565c31196f3ccd4346c8a1ac1e19b1deda22eea6 (commit)
discards 7cf1194ba3e69a861a7f88740cd6490193339e67 (commit)
discards 61a24458626de4c717cd8999ae57b1c573e3e160 (commit)
discards f7878b7f8780cb2e17078486b2239661be6aeedc (commit)
discards 01f16229133a6f5fda6d92b7f86edda5e08dac8f (commit)
discards ac72156ab86f90457a2907620cb2df5733c57184 (commit)
discards 7c797619a3bf856a5f3cee92fb40722c6e771336 (commit)
discards 38770e0e9fd272061c2bf80ee8bcce25c2611e6a (commit)
discards 043ff05cfca336d99cf40480e93ec4cae06a7fdd (commit)
discards d7e56bd7e8744b54af33675cb875859ca9d053ea (commit)
discards 327681d1eee19c941e33beaef75359ea72b0c2f9 (commit)
discards 9336a8a8a11983f28e61db93b9e2262eff793f39 (commit)
discards 79df4374ee643ae843e93abc14b80145c9154730 (commit)
discards 346ebae28e3a179b51fe79948212006a09d6940e (commit)
discards 6882ae221f4dcecda48162696b8dd756828b37c3 (commit)
discards 05e6f52d0cf418ee7d727b5e034d45a60a246ba3 (commit)
discards 9b75f439dcdfeb850487c59863c680c9edd772a1 (commit)
discards 06f6b31925a51c5100017c4a05bba430d29252a4 (commit)
discards b70e4ce5d5ea65e788a172e757af5c07b016fe4f (commit)
discards 2d816cf50f92cb6d984a8821f6c6ddd621f5140b (commit)
discards 17341ef86b68cab0352535d12c5925814609e4d2 (commit)
discards bae79452d0fb211338eb2a327f3679ebc5210ca2 (commit)
discards 32468a5b95280d671f4648dea6d2ff10d171378d (commit)
discards 35532d9eec8564a63e91af98a08117203e859d97 (commit)
discards 1d1cdff657785ca3c9d930d1cf3ef462d98a1b24 (commit)
discards d9b0e6003e265412de46174ea0e2705a8e872f2b (commit)
discards 45ab2f46c34ab5336768d095c0232f24efb6e04d (commit)
discards 1ae46a07c7f2d1453a82a6dad0c546b224485e64 (commit)
discards 61ed4ec992c01d2bb7c052983de9f917d3407edc (commit)
discards 95620c2efb18c5a89cc88995f5c1a09a054d56a9 (commit)
discards 2da54dc5851ad6bf0837e344b880bda01fc466a0 (commit)
discards 3d678bcc827a44ff27152db3dc5f1b0084f1e101 (commit)
discards aa4d046ce529b28b783c4b8044983d8676a2e7ca (commit)
discards 35f3cb05ac5c3319072b0f7f86b49f244e3cc3e7 (commit)
discards 84dd5e80955f6fd3c5a26c0eb752c765e5011c5c (commit)
discards b67325fa722e4cf3d2e68cffa401ee1304866396 (commit)
discards 2e7d59676bf6098d0c3f277e450ffd38a0bf938d (commit)
discards 9e4360d3c323f6d9b25055b142b87295236bc648 (commit)
discards 8ada64214ea1a4cf63748b32a7edaa2317709f7d (commit)
discards 17a7412040734fa4b906b1e6c5743ddc41bacf2f (commit)
discards 722ec7495f5ad00a404c31d8b72052be41a780da (commit)
discards 874f78e7f17a15da00e4aad0c9bc14620330dded (commit)
discards e688effcdc8513fe5acc24116857c71edb306267 (commit)
discards 2147a538d2b64392a2c86e90a123c47e7efaac24 (commit)
discards 1815b1113c4983d8e2666c76ac23401086440c01 (commit)
discards 760afe4c383fc4136c949c26b0f3b14169e1e2ca (commit)
discards 6ed1a66a9ea26c74a60643c1c99191f2bb43a1c5 (commit)
discards be1ff42eb4380029fa7118b9fb822fa88dfb1594 (commit)
discards 984848632f6d553a26661058c9b3561303e8d747 (commit)
discards e1f0d735e521b0487b2cd8bb179054932ca58201 (commit)
discards fcfdab3ca5bc40939aed45fcabf436ff3c3a67bb (commit)
via b3be671ae96bd42f5456214479ec9769c0a5c637 (commit)
via 7190a6d2a1fd99d56ce4b24c8c1d5c54edbdab29 (commit)
via 7651e3171c71d9abafb67cbc59ff59a6d7df563a (commit)
via ea02acfb2bd196d9fdcc1d7afee2119eab46263a (commit)
via ed99c3e8084fd3ae4a60652dc0348a5735a04dd4 (commit)
via b257d006610748cb133c352f9ce95b61d1c66e17 (commit)
via e48fa087b669f02e57b8f209e1f2e1d56b85ba05 (commit)
This update added new revisions after undoing existing revisions. That is
to say, the old revision is not a strict subset of the new revision. This
situation occurs when you --force push a change and generate a repository
containing something like this:
* -- * -- B -- O -- O -- O (da8714892b6dc603737395547cbcc13fe91c2ee5)
\
N -- N -- N (b3be671ae96bd42f5456214479ec9769c0a5c637)
When this happens we assume that you've already had alert emails for all
of the O revisions, and so we here report only the revisions in the N
branch from the common base, B.
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
commit b3be671ae96bd42f5456214479ec9769c0a5c637
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 13:37:48 2016 -0500
8491: Prohibit activesupport 5.0.0 so our gem can be installed with ruby <2.2.2
diff --git a/sdk/ruby/arvados.gemspec b/sdk/ruby/arvados.gemspec
index 3adcf4d..ffbd0ec 100644
--- a/sdk/ruby/arvados.gemspec
+++ b/sdk/ruby/arvados.gemspec
@@ -20,7 +20,8 @@ Gem::Specification.new do |s|
"README", "LICENSE-2.0.txt"]
s.required_ruby_version = '>= 2.1.0'
s.add_dependency('google-api-client', '~> 0.6.3', '>= 0.6.3')
- s.add_dependency('activesupport', '>= 3.2.13')
+ # activesupport <5 only because https://dev.arvados.org/issues/8222
+ s.add_dependency('activesupport', '>= 3.2.13', '< 5.0.0')
s.add_dependency('json', '~> 1.7', '>= 1.7.7')
s.add_dependency('andand', '~> 1.3', '>= 1.3.3')
s.add_runtime_dependency('jwt', '>= 0.1.5', '< 1.0.0')
commit 7190a6d2a1fd99d56ce4b24c8c1d5c54edbdab29
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 13:36:49 2016 -0500
8491: Remove git-commit.version before running api server tests
diff --git a/build/run-tests.sh b/build/run-tests.sh
index db2306c..c77c918 100755
--- a/build/run-tests.sh
+++ b/build/run-tests.sh
@@ -766,6 +766,7 @@ do_test doc doclinkchecker
stop_services
test_apiserver() {
+ rm -f "$WORKSPACE/services/api/git-commit.version"
cd "$WORKSPACE/services/api" \
&& RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[services/api]}
}
commit 7651e3171c71d9abafb67cbc59ff59a6d7df563a
Author: Tom Clegg <tom at curoverse.com>
Date: Wed Mar 9 13:36:16 2016 -0500
8491: --retry applies to do_install too
diff --git a/build/run-tests.sh b/build/run-tests.sh
index a17d610..db2306c 100755
--- a/build/run-tests.sh
+++ b/build/run-tests.sh
@@ -481,8 +481,8 @@ timer() {
echo -n "$(($SECONDS - $t0))s"
}
-do_test() {
- while ! do_test_once ${@} && [[ "$retry" == 1 ]]
+retry() {
+ while ! ${@} && [[ "$retry" == 1 ]]
do
read -p 'Try again? [Y/n] ' x
if [[ "$x" != "y" ]] && [[ "$x" != "" ]]
@@ -492,6 +492,10 @@ do_test() {
done
}
+do_test() {
+ retry do_test_once ${@}
+}
+
do_test_once() {
unset result
if [[ -z "${skip[$1]}" ]] && ( [[ -z "$only" ]] || [[ "$only" == "$1" ]] )
@@ -544,6 +548,10 @@ do_test_once() {
}
do_install() {
+ retry do_install_once ${@}
+}
+
+do_install_once() {
if [[ -z "$skip_install" || (-n "$only_install" && "$only_install" == "$1") ]]
then
title "Running $1 install"
commit ea02acfb2bd196d9fdcc1d7afee2119eab46263a
Author: Tom Clegg <tom at curoverse.com>
Date: Tue Mar 8 16:37:42 2016 -0500
8491: move files from arvados-dev into their new places
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..c92213f
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,7 @@
+export WORKSPACE?=$(shell pwd)
+test:
+ build/run-tests.sh
+packages:
+ build/run-build-packages-all-targets.sh
+test-packages:
+ build/run-build-packages-all-targets.sh --test-packages
diff --git a/build/COPYING b/build/COPYING
deleted file mode 100644
index af63e41..0000000
--- a/build/COPYING
+++ /dev/null
@@ -1,2 +0,0 @@
-This code is licenced under the GNU Affero General Public License version 3
-(see agpl-3.0.txt)
diff --git a/build/README b/build/README
deleted file mode 100644
index b076f0b..0000000
--- a/build/README
+++ /dev/null
@@ -1,30 +0,0 @@
-Welcome to Arvados!
-
-This is the arvados-dev source tree. It contains scripts that can be useful
-if you want to hack on Arvados itself.
-
-If you are interested in using Arvados or setting up your own Arvados
-installation, you most likely do not need this source tree.
-
-For the Arvados source code, check out the git repository at
- https://github.com/curoverse/arvados
-
-The main Arvados web site is
- https://arvados.org
-
-The Arvados public wiki is located at
- https://arvados.org/projects/arvados/wiki
-
-The Arvados public bug tracker is located at
- https://arvados.org/projects/arvados/issues
-
-For support see
- http://doc.arvados.org/user/getting_started/community.html
-
-Installation documentation is located at
- http://doc.arvados.org/install
-
-If you wish to build the documentation yourself, follow the instructions in
-doc/README to build the documentation, then consult the "Install Guide".
-
-See COPYING for information about Arvados Free Software licenses.
diff --git a/build/agpl-3.0.txt b/build/agpl-3.0.txt
deleted file mode 100644
index dba13ed..0000000
--- a/build/agpl-3.0.txt
+++ /dev/null
@@ -1,661 +0,0 @@
- GNU AFFERO GENERAL PUBLIC LICENSE
- Version 3, 19 November 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU Affero General Public License is a free, copyleft license for
-software and other kinds of works, specifically designed to ensure
-cooperation with the community in the case of network server software.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-our General Public Licenses are intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- Developers that use our General Public Licenses protect your rights
-with two steps: (1) assert copyright on the software, and (2) offer
-you this License which gives you legal permission to copy, distribute
-and/or modify the software.
-
- A secondary benefit of defending all users' freedom is that
-improvements made in alternate versions of the program, if they
-receive widespread use, become available for other developers to
-incorporate. Many developers of free software are heartened and
-encouraged by the resulting cooperation. However, in the case of
-software used on network servers, this result may fail to come about.
-The GNU General Public License permits making a modified version and
-letting the public access it on a server without ever releasing its
-source code to the public.
-
- The GNU Affero General Public License is designed specifically to
-ensure that, in such cases, the modified source code becomes available
-to the community. It requires the operator of a network server to
-provide the source code of the modified version running there to the
-users of that server. Therefore, public use of a modified version, on
-a publicly accessible server, gives the public access to the source
-code of the modified version.
-
- An older license, called the Affero General Public License and
-published by Affero, was designed to accomplish similar goals. This is
-a different license, not a version of the Affero GPL, but Affero has
-released a new version of the Affero GPL which permits relicensing under
-this license.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU Affero General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Remote Network Interaction; Use with the GNU General Public License.
-
- Notwithstanding any other provision of this License, if you modify the
-Program, your modified version must prominently offer all users
-interacting with it remotely through a computer network (if your version
-supports such interaction) an opportunity to receive the Corresponding
-Source of your version by providing access to the Corresponding Source
-from a network server at no charge, through some standard or customary
-means of facilitating copying of software. This Corresponding Source
-shall include the Corresponding Source for any work covered by version 3
-of the GNU General Public License that is incorporated pursuant to the
-following paragraph.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the work with which it is combined will remain governed by version
-3 of the GNU General Public License.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU Affero General Public License from time to time. Such new versions
-will be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU Affero General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU Affero General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU Affero General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
- If your software can interact with users remotely through a computer
-network, you should also make sure that it provides a way for users to
-get its source. For example, if your program is a web application, its
-interface could display a "Source" link that leads users to an archive
-of the code. There are many ways you could offer source, and different
-solutions will be better for different programs; see section 13 for the
-specific requirements.
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU AGPL, see
-<http://www.gnu.org/licenses/>.
diff --git a/build/jenkins/create-plot-data-from-log.sh b/build/create-plot-data-from-log.sh
similarity index 100%
rename from build/jenkins/create-plot-data-from-log.sh
rename to build/create-plot-data-from-log.sh
diff --git a/build/git/hooks/coding-standards.sh b/build/git/hooks/coding-standards.sh
deleted file mode 100755
index d4e4c71..0000000
--- a/build/git/hooks/coding-standards.sh
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env ruby
-
-# This script can be installed as a git update hook.
-
-# It can also be installed as a gitolite 'hooklet' in the
-# hooks/common/update.secondary.d/ directory.
-
-# NOTE: this script runs under the same assumptions as the 'update' hook, so
-# the starting directory must be maintained and arguments must be passed on.
-
-$refname = ARGV[0]
-$oldrev = ARGV[1]
-$newrev = ARGV[2]
-$user = ENV['USER']
-
-def blacklist bl
- all_revs = `git rev-list #{$oldrev}..#{$newrev}`.split("\n")
- all_revs.each do |rev|
- bl.each do |b|
- if rev == b
- puts "Revision #{b} is blacklisted, you must remove it from your branch (possibly using git rebase) before you can push."
- exit 1
- end
- end
- end
-end
-
-blacklist ['26d74dc0524c87c5dcc0c76040ce413a4848b57a']
-
-# Only enforce policy on the master branch
-exit 0 if $refname != 'refs/heads/master'
-
-puts "Enforcing Policies... \n(#{$refname}) (#{$oldrev[0,6]}) (#{$newrev[0,6]})"
-
-$regex = /\[ref: (\d+)\]/
-
-$broken_commit_message = /Please enter a commit message to explain why this merge is necessary/
-$wrong_way_merge_master = /Merge( remote-tracking)? branch '([^\/]+\/)?master' into/
-$merge_master = /Merge branch '[^']+'((?! into)| into master)/
-$pull_merge = /Merge branch 'master' of /
-$refs_or_closes_or_no_issue = /(refs #|closes #|fixes #|no issue #)/i
-
-# enforced custom commit message format
-def check_message_format
- all_revs = `git rev-list --first-parent #{$oldrev}..#{$newrev}`.split("\n")
- merge_revs = `git rev-list --first-parent --min-parents=2 #{$oldrev}..#{$newrev}`.split("\n")
- # single_revs = `git rev-list --first-parent --max-parents=1 #{$oldrev}..#{$newrev}`.split("\n")
- broken = false
- no_ff = false
-
- merge_revs.each do |rev|
- message = `git cat-file commit #{rev} | sed '1,/^$/d'`
- if $wrong_way_merge_master.match(message)
- puts "\n[POLICY] Only non-fast-forward merges into master are allowed. Please"
- puts "reset your master branch:"
- puts " git reset --hard origin/master"
- puts "and then merge your branch with the --no-ff option:"
- puts " git merge your-branch --no-ff\n"
- puts "Remember to add a reference to an issue number in the merge commit!\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- no_ff = true
- elsif $pull_merge.match(message)
- puts "\n[POLICY] This appears to be a git pull merge of remote master into local"
- puts "master. In order to maintain a linear first-parent history of master,"
- puts "please reset your branch and remerge or rebase using the latest master.\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- elsif not $merge_master.match(message) and not
- puts "\n[POLICY] This does not appear to be a merge of a feature"
- puts "branch into master. Merges must follow the format"
- puts "\"Merge branch 'feature-branch'\".\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
- end
-
- all_revs.each do |rev|
- message = `git cat-file commit #{rev} | sed '1,/^$/d'`
- if $broken_commit_message.match(message)
- puts "\n[POLICY] Rejected broken commit message for including boilerplate"
- puts "instruction text.\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
-
- # Do not test when the commit is a no_ff merge (which will be rejected), because
- # this test will complain about *every* commit in the merge otherwise, obscuring
- # the real reason for the rejection (the no_ff merge)
- if not no_ff and not $refs_or_closes_or_no_issue.match(message)
- puts "\n[POLICY] All commits to master must include an issue using \"refs #\" or"
- puts "\"closes #\", or specify \"no issue #\"\n"
- puts "\n******************************************************************\n"
- puts "\nOffending commit: #{rev}\n"
- puts "\nOffending commit message:\n\n"
- puts message
- puts "\n******************************************************************\n"
- puts "\n\n"
- broken = true
- end
- end
-
- if broken
- exit 1
- end
-end
-
-check_message_format
diff --git a/build/install/easy-docker-install.sh b/build/install/easy-docker-install.sh
deleted file mode 100755
index fe6e186..0000000
--- a/build/install/easy-docker-install.sh
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env bash
-
-# This script is intended to make Arvados installation easy. It will download the
-# latest copy of the Arvados docker images as well as the arvdock command. It
-# then uses arvdock to spin up Arvados on this computer.
-#
-# The latest version of this script is available at http://get.arvados.org, so that this
-# command does the right thing:
-#
-# $ \curl -sSL http://get.arvados.org | bash
-#
-# Prerequisites: working docker installation. Run this script as a user who is a member
-# of the docker group.
-
-COLUMNS=80
-
-fail () {
- title "$*"
- exit 1
-}
-
-title () {
- printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
-}
-
-docker_pull () {
- $DOCKER pull $*
-
- ECODE=$?
-
- if [[ "$ECODE" != "0" ]]; then
- title "$DOCKER pull $* failed"
- exit $ECODE
- fi
-}
-
-main () {
-
- \which which >/dev/null 2>&1 || fail "Error: could not find 'which' command."
-
- # find the docker binary
- DOCKER=`which docker.io`
-
- if [[ "$DOCKER" == "" ]]; then
- DOCKER=`which docker`
- fi
-
- if [[ "$DOCKER" == "" ]]; then
- fail "Error: you need to have docker installed. Could not find the docker executable."
- fi
-
- echo
- echo "If necessary, this command will download the latest Arvados docker images."
- echo "The download can take a long time, depending on the speed of your internet connection."
- echo "When the images are downloaded, it will then start an Arvados environment on this computer."
- echo
- docker_pull arvados/workbench
- docker_pull arvados/doc
- docker_pull arvados/keep
- docker_pull arvados/shell
- docker_pull arvados/sso
- docker_pull arvados/compute
- docker_pull arvados/keep
- docker_pull arvados/keepproxy
- docker_pull arvados/api
- docker_pull crosbymichael/skydns
- docker_pull crosbymichael/skydock
-
- # Now download arvdock and start the containers
- echo
- echo Downloading arvdock
- echo
- \curl -sSL https://raw.githubusercontent.com/curoverse/arvados/master/docker/arvdock -o arvdock
- chmod 755 arvdock
-
- echo
- echo Starting the docker containers
- echo
- ./arvdock start
-
- echo To stop the containers, run
- echo
- echo ./arvdock stop
- echo
-}
-
-main
diff --git a/build/jenkins/run-cwl-tests.sh b/build/jenkins/run-cwl-tests.sh
deleted file mode 100755
index 53c0538..0000000
--- a/build/jenkins/run-cwl-tests.sh
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/bin/bash
-
-read -rd "\000" helpmessage <<EOF
-$(basename $0): Test cwl tool and (optionally) upload to PyPi and Docker Hub.
-
-Syntax:
- WORKSPACE=/path/to/common-workflow-language $(basename $0) [options]
-
-Options:
-
---upload-pypi Upload package to pypi (default: false)
---upload-docker Upload packages to docker hub (default: false)
---debug Output debug information (default: false)
-
-WORKSPACE=path Path to the common-workflow-language source tree
-
-EOF
-
-EXITCODE=0
-CALL_FREIGHT=0
-
-DEBUG=0
-UPLOAD_PYPI=0
-UPLOAD_DOCKER=0
-
-VENVDIR=
-
-leave_temp=
-
-declare -A leave_temp
-
-set -e
-
-clear_temp() {
- leaving=""
- for var in VENVDIR
- do
- if [[ -z "${leave_temp[$var]}" ]]
- then
- if [[ -n "${!var}" ]]
- then
- rm -rf "${!var}"
- fi
- else
- leaving+=" $var=\"${!var}\""
- fi
- done
- if [[ -n "$leaving" ]]; then
- echo "Leaving behind temp dirs: $leaving"
- fi
-}
-
-fatal() {
- clear_temp
- echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
- exit 1
-}
-
-trap clear_temp INT EXIT
-
-# Set up temporary install dirs (unless existing dirs were supplied)
-for tmpdir in VENVDIR
-do
- if [[ -n "${!tmpdir}" ]]; then
- leave_temp[$tmpdir]=1
- else
- eval $tmpdir=$(mktemp -d)
- fi
-done
-
-
-while [[ -n "$1" ]]
-do
- arg="$1"; shift
- case "$arg" in
- --help)
- echo >&2 "$helpmessage"
- echo >&2
- exit 1
- ;;
- --debug)
- DEBUG=1
- ;;
- --upload-pypi)
- UPLOAD_PYPI=1
- ;;
- --upload-docker)
- UPLOAD_DOCKER=1
- ;;
- --leave-temp)
- leave_temp[VENVDIR]=1
- ;;
- *=*)
- eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
- ;;
- *)
- echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
- exit 1
- ;;
- esac
-done
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo >&2 "$helpmessage"
- echo >&2
- echo >&2 "Error: WORKSPACE environment variable not set"
- echo >&2
- exit 1
-fi
-
-if [[ "$DEBUG" != 0 ]]; then
- echo "Workspace is $WORKSPACE"
-fi
-
-virtualenv --setuptools "$VENVDIR" || fatal "virtualenv $VENVDIR failed"
-. "$VENVDIR/bin/activate"
-
-handle_python_package () {
- # This function assumes the current working directory is the python package directory
- if [[ "$UPLOAD_PYPI" != 0 ]]; then
- # Make sure only to use sdist - that's the only format pip can deal with (sigh)
- if [[ "$DEBUG" != 0 ]]; then
- python setup.py sdist upload
- else
- python setup.py -q sdist upload
- fi
- else
- # Make sure only to use sdist - that's the only format pip can deal with (sigh)
- if [[ "$DEBUG" != 0 ]]; then
- python setup.py sdist
- else
- python setup.py -q sdist
- fi
- fi
-}
-
-# Make all files world-readable -- jenkins runs with umask 027, and has checked
-# out our git tree here
-chmod o+r "$WORKSPACE" -R
-
-# Now fix our umask to something better suited to building and publishing
-# gems and packages
-umask 0022
-
-if [[ "$DEBUG" != 0 ]]; then
- echo "umask is" `umask`
-fi
-
-# Python packages
-if [[ "$DEBUG" != 0 ]]; then
- echo
- echo "Python packages"
- echo
-fi
-
-cd "$WORKSPACE"
-
-if test -d cwltool ; then
- (cd cwltool
- git fetch
- git reset --hard origin/master
- )
-else
- git clone git at github.com:common-workflow-language/cwltool.git
- (cd cwltool
- git config user.email "sysadmin at curoverse.com"
- git config user.name "Curoverse build bot"
- )
-fi
-
-(cd cwltool
- python setup.py install
- python setup.py test
- ./build-node-docker.sh
-)
-
-./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-2
-./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-3
-
-(cd cwltool
- handle_python_package
-)
-
-(cd cwltool/cwl-runner
- handle_python_package
-)
-
-(cd cwltool
- ./build-cwl-docker.sh
-)
-
-if [[ "$UPLOAD_DOCKER" != 0 ]]; then
- docker push commonworkflowlanguage/cwltool_module
- docker push commonworkflowlanguage/cwltool
- docker push commonworkflowlanguage/nodejs-engine
-fi
-
-if test -d common-workflow-language.github.io ; then
- (cd common-workflow-language.github.io
- git fetch
- git reset --hard origin/master
- )
-else
- git clone git at github.com:common-workflow-language/common-workflow-language.github.io.git
- (cd common-workflow-language.github.io
- git config user.email "sysadmin at curoverse.com"
- git config user.name "Curoverse build bot"
- )
-fi
-
-python -mcwltool --outdir=$PWD/common-workflow-language.github.io site/cwlsite.cwl site/cwlsite-job.json
-
-(cd common-workflow-language.github.io
- git add --all
- git diff-index --quiet HEAD || git commit -m"Build bot"
- git push
-)
diff --git a/build/jenkins/run-deploy.sh b/build/jenkins/run-deploy.sh
deleted file mode 100755
index 1b06c65..0000000
--- a/build/jenkins/run-deploy.sh
+++ /dev/null
@@ -1,266 +0,0 @@
-#!/bin/bash
-
-DEBUG=0
-SSH_PORT=22
-
-function usage {
- echo >&2
- echo >&2 "usage: $0 [options] <identifier>"
- echo >&2
- echo >&2 " <identifier> Arvados cluster name"
- echo >&2
- echo >&2 "$0 options:"
- echo >&2 " -p, --port <ssh port> SSH port to use (default 22)"
- echo >&2 " -d, --debug Enable debug output"
- echo >&2 " -h, --help Display this help and exit"
- echo >&2
- echo >&2 "Note: this script requires an arvados token created with these permissions:"
- echo >&2 ' arv api_client_authorization create_system_auth \'
- echo >&2 ' --scopes "[\"GET /arvados/v1/virtual_machines\",'
- echo >&2 ' \"GET /arvados/v1/keep_services\",'
- echo >&2 ' \"GET /arvados/v1/keep_services/\",'
- echo >&2 ' \"GET /arvados/v1/groups\",'
- echo >&2 ' \"GET /arvados/v1/groups/\",'
- echo >&2 ' \"GET /arvados/v1/links\",'
- echo >&2 ' \"GET /arvados/v1/collections\",'
- echo >&2 ' \"POST /arvados/v1/collections\",'
- echo >&2 ' \"POST /arvados/v1/links\"]"'
- echo >&2
-}
-
-# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
-TEMP=`getopt -o hdp: \
- --long help,debug,port: \
- -n "$0" -- "$@"`
-
-if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
-# Note the quotes around `$TEMP': they are essential!
-eval set -- "$TEMP"
-
-while [ $# -ge 1 ]
-do
- case $1 in
- -p | --port)
- SSH_PORT="$2"; shift 2
- ;;
- -d | --debug)
- DEBUG=1
- shift
- ;;
- --)
- shift
- break
- ;;
- *)
- usage
- exit 1
- ;;
- esac
-done
-
-IDENTIFIER=$1
-
-if [[ "$IDENTIFIER" == '' ]]; then
- usage
- exit 1
-fi
-
-EXITCODE=0
-
-COLUMNS=80
-
-PUPPET_AGENT='
-now() { date +%s; }
-let endtime="$(now) + 600"
-while [ "$endtime" -gt "$(now)" ]; do
- puppet agent --test --detailed-exitcodes
- agent_exitcode=$?
- if [ 0 = "$agent_exitcode" ] || [ 2 = "$agent_exitcode" ]; then
- break
- else
- sleep 10s
- fi
-done
-exit ${agent_exitcode:-99}
-'
-
-title () {
- date=`date +'%Y-%m-%d %H:%M:%S'`
- printf "$date $1\n"
-}
-
-function run_puppet() {
- node=$1
- return_var=$2
-
- title "Running puppet on $node"
- TMP_FILE=`mktemp`
- if [[ "$DEBUG" != "0" ]]; then
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" | tee $TMP_FILE
- else
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" > $TMP_FILE 2>&1
- fi
-
- ECODE=${PIPESTATUS[0]}
- RESULT=$(cat $TMP_FILE)
-
- if [[ "$ECODE" != "255" && ! ("$RESULT" =~ 'already in progress') && "$ECODE" != "2" && "$ECODE" != "0" ]]; then
- # Ssh exits 255 if the connection timed out. Just ignore that.
- # Puppet exits 2 if there are changes. For real!
- # Puppet prints 'Notice: Run of Puppet configuration client already in progress' if another puppet process
- # was already running
- echo "ERROR running puppet on $node: exit code $ECODE"
- if [[ "$DEBUG" == "0" ]]; then
- title "Command output follows:"
- echo $RESULT
- fi
- fi
- if [[ "$ECODE" == "255" ]]; then
- title "Connection timed out"
- ECODE=0
- fi
- if [[ "$ECODE" == "2" ]]; then
- ECODE=0
- fi
- rm -f $TMP_FILE
- eval "$return_var=$ECODE"
-}
-
-function run_command() {
- node=$1
- return_var=$2
- command=$3
-
- title "Running '$command' on $node"
- TMP_FILE=`mktemp`
- if [[ "$DEBUG" != "0" ]]; then
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" | tee $TMP_FILE
- else
- ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" > $TMP_FILE 2>&1
- fi
-
- ECODE=$?
- RESULT=$(cat $TMP_FILE)
-
- if [[ "$ECODE" != "255" && "$ECODE" != "0" ]]; then
- # Ssh exists 255 if the connection timed out. Just ignore that, it's possible that this node is
- # a shell node that is down.
- title "ERROR running command on $node: exit code $ECODE"
- if [[ "$DEBUG" == "0" ]]; then
- title "Command output follows:"
- echo $RESULT
- fi
- fi
- if [[ "$ECODE" == "255" ]]; then
- title "Connection timed out"
- ECODE=0
- fi
- rm -f $TMP_FILE
- eval "$return_var=$ECODE"
-}
-
-title "Updating API server"
-SUM_ECODE=0
-run_puppet $IDENTIFIER.arvadosapi.com ECODE
-SUM_ECODE=$(($SUM_ECODE + $ECODE))
-if [ ! "$IDENTIFIER" = "c97qk" ]
-then
- run_command $IDENTIFIER.arvadosapi.com ECODE "dpkg -L arvados-mailchimp-plugin 2>/dev/null && apt-get install arvados-mailchimp-plugin --reinstall || echo"
- SUM_ECODE=$(($SUM_ECODE + $ECODE))
-fi
-
-if [[ "$SUM_ECODE" != "0" ]]; then
- title "ERROR: Updating API server FAILED"
- EXITCODE=$(($EXITCODE + $SUM_ECODE))
- exit $EXITCODE
-fi
-
-title "Loading ARVADOS_API_HOST and ARVADOS_API_TOKEN"
-if [[ -f "$HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf" ]]; then
- . $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf
-else
- title "WARNING: $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf not found."
-fi
-if [[ "$ARVADOS_API_HOST" == "" ]] || [[ "$ARVADOS_API_TOKEN" == "" ]]; then
- title "ERROR: ARVADOS_API_HOST and/or ARVADOS_API_TOKEN environment variables are not set."
- exit 1
-fi
-
-title "Locating Arvados Standard Docker images project"
-
-JSON_FILTER="[[\"name\", \"=\", \"Arvados Standard Docker Images\"], [\"owner_uuid\", \"=\", \"$IDENTIFIER-tpzed-000000000000000\"]]"
-DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group list --filters="$JSON_FILTER"`
-
-if [[ "$DOCKER_IMAGES_PROJECT" == "" ]]; then
- title "Warning: Arvados Standard Docker Images project not found. Creating it."
-
- DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group create --group "{\"owner_uuid\":\"$IDENTIFIER-tpzed-000000000000000\", \"name\":\"Arvados Standard Docker Images\", \"group_class\":\"project\"}"`
- ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv link create --link "{\"tail_uuid\":\"$IDENTIFIER-j7d0g-fffffffffffffff\", \"head_uuid\":\"$DOCKER_IMAGES_PROJECT\", \"link_class\":\"permission\", \"name\":\"can_read\" }"
- if [[ "$?" != "0" ]]; then
- title "ERROR: could not create standard Docker images project Please create it, cf. http://doc.arvados.org/install/create-standard-objects.html"
- exit 1
- fi
-fi
-
-title "Found Arvados Standard Docker Images project with uuid $DOCKER_IMAGES_PROJECT"
-GIT_COMMIT=`ssh -o "StrictHostKeyChecking no" $IDENTIFIER cat /usr/local/arvados/src/git-commit.version`
-
-if [[ "$?" != "0" ]] || [[ "$GIT_COMMIT" == "" ]]; then
- title "ERROR: unable to get arvados/jobs Docker image git revision"
- exit 1
-else
- title "Found git commit for arvados/jobs Docker image: $GIT_COMMIT"
-fi
-
-run_command shell.$IDENTIFIER ECODE "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker" |grep -q $GIT_COMMIT
-
-if [[ "$?" == "0" ]]; then
- title "Found latest arvados/jobs Docker image, nothing to upload"
-else
- title "Installing latest arvados/jobs Docker image"
- ssh -o "StrictHostKeyChecking no" shell.$IDENTIFIER "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker --pull --project-uuid=$DOCKER_IMAGES_PROJECT arvados/jobs $GIT_COMMIT"
- if [[ "$?" -ne 0 ]]; then
- title "'git pull' failed exiting..."
- exit 1
- fi
-fi
-
-title "Gathering list of shell and Keep nodes"
-SHELL_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv virtual_machine list |jq .items[].hostname -r`
-KEEP_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv keep_service list |jq .items[].service_host -r`
-
-title "Updating workbench"
-SUM_ECODE=0
-if [[ `host workbench.$ARVADOS_API_HOST |cut -f4 -d' '` != `host $ARVADOS_API_HOST |cut -f4 -d' '` ]]; then
- # Workbench runs on a separate host. We need to run puppet there too.
- run_puppet workbench.$IDENTIFIER ECODE
- SUM_ECODE=$(($SUM_ECODE + $ECODE))
-fi
-
-if [[ "$SUM_ECODE" != "0" ]]; then
- title "ERROR: Updating workbench FAILED"
- EXITCODE=$(($EXITCODE + $SUM_ECODE))
- exit $EXITCODE
-fi
-
-for n in manage switchyard $SHELL_NODES $KEEP_NODES; do
- ECODE=0
- if [[ $n =~ $ARVADOS_API_HOST$ ]]; then
- # e.g. keep.qr1hi.arvadosapi.com
- node=$n
- else
- # e.g. shell
- node=$n.$ARVADOS_API_HOST
- fi
-
- # e.g. keep.qr1hi
- node=${node%.arvadosapi.com}
-
- title "Updating $node"
- run_puppet $node ECODE
- if [[ "$ECODE" != "0" ]]; then
- title "ERROR: Updating $node node FAILED: exit code $ECODE"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
- fi
-done
diff --git a/build/jenkins/run-diagnostics-suite.sh b/build/jenkins/run-diagnostics-suite.sh
deleted file mode 100755
index 015a053..0000000
--- a/build/jenkins/run-diagnostics-suite.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-INSTANCE=$1
-REVISION=$2
-
-if [[ "$INSTANCE" == '' ]]; then
- echo "Syntax: $0 <instance> [revision]"
- exit 1
-fi
-
-if [[ "$REVISION" == '' ]]; then
- # See if there's a configuration file with the revision?
- CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
- if [[ -f $CONFIG_PATH ]]; then
- echo "Loading git revision from $CONFIG_PATH"
- . $CONFIG_PATH
- REVISION=$ARVADOS_GIT_REVISION
- fi
-fi
-
-if [[ "$REVISION" != '' ]]; then
- echo "Git revision is $REVISION"
-else
- echo "No valid git revision found, proceeding with what is in place."
-fi
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo "WORKSPACE environment variable not set"
- exit 1
-fi
-
-title () {
- txt="********** $1 **********"
- printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
-}
-
-timer_reset() {
- t0=$SECONDS
-}
-
-timer() {
- echo -n "$(($SECONDS - $t0))s"
-}
-
-source /etc/profile.d/rvm.sh
-echo $WORKSPACE
-
-title "Starting diagnostics"
-timer_reset
-
-cd $WORKSPACE
-
-if [[ "$REVISION" != '' ]]; then
- git checkout $REVISION
-fi
-
-cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
-
-cd $WORKSPACE/apps/workbench
-
-HOME="$GEMHOME" bundle install --no-deployment
-
-if [[ ! -d tmp ]]; then
- mkdir tmp
-fi
-
-RAILS_ENV=diagnostics bundle exec rake TEST=test/diagnostics/pipeline_test.rb
-
-ECODE=$?
-
-if [[ "$REVISION" != '' ]]; then
- git checkout master
-fi
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! DIAGNOSTICS FAILED (`timer`) !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
-fi
-
-title "Diagnostics complete (`timer`)"
-
-exit $EXITCODE
diff --git a/build/jenkins/run-performance-suite.sh b/build/jenkins/run-performance-suite.sh
deleted file mode 100755
index 2944bda..0000000
--- a/build/jenkins/run-performance-suite.sh
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-INSTANCE=$1
-REVISION=$2
-
-if [[ "$INSTANCE" == '' ]]; then
- echo "Syntax: $0 <instance> [revision]"
- exit 1
-fi
-
-if [[ "$REVISION" == '' ]]; then
- # See if there's a configuration file with the revision?
- CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
- if [[ -f $CONFIG_PATH ]]; then
- echo "Loading git revision from $CONFIG_PATH"
- . $CONFIG_PATH
- REVISION=$ARVADOS_GIT_REVISION
- fi
-fi
-
-if [[ "$REVISION" != '' ]]; then
- echo "Git revision is $REVISION"
-else
- echo "No valid git revision found, proceeding with what is in place."
-fi
-
-# Sanity check
-if ! [[ -n "$WORKSPACE" ]]; then
- echo "WORKSPACE environment variable not set"
- exit 1
-fi
-
-title () {
- txt="********** $1 **********"
- printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
-}
-
-timer_reset() {
- t0=$SECONDS
-}
-
-timer() {
- echo -n "$(($SECONDS - $t0))s"
-}
-
-source /etc/profile.d/rvm.sh
-echo $WORKSPACE
-
-title "Starting performance test"
-timer_reset
-
-cd $WORKSPACE
-
-if [[ "$REVISION" != '' ]]; then
- git checkout $REVISION
-fi
-
-cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
-
-cd $WORKSPACE/apps/workbench
-
-HOME="$GEMHOME" bundle install --no-deployment
-
-if [[ ! -d tmp ]]; then
- mkdir tmp
-fi
-
-mkdir -p tmp/cache
-
-RAILS_ENV=performance bundle exec rake test:benchmark
-
-ECODE=$?
-
-if [[ "$REVISION" != '' ]]; then
- git checkout master
-fi
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! PERFORMANCE TESTS FAILED (`timer`) !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
- exit $EXITCODE
-fi
-
-title "Performance tests complete (`timer`)"
-
-exit $EXITCODE
diff --git a/build/jenkins/run-tapestry-tests.sh b/build/jenkins/run-tapestry-tests.sh
deleted file mode 100755
index 851a81d..0000000
--- a/build/jenkins/run-tapestry-tests.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-EXITCODE=0
-
-COLUMNS=80
-
-title () {
- printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
-}
-
-source /etc/profile.d/rvm.sh
-
-# This shouldn't really be necessary... but the jenkins/rvm integration seems a
-# bit wonky occasionally.
-rvm use ree
-
-echo $WORKSPACE
-
-# Tapestry
-title "Starting tapestry tests"
-cd "$WORKSPACE"
-
-# There are a few submodules
-git submodule init && git submodule update
-
-# Use sqlite for testing
-sed -i'' -e "s:mysql:sqlite3:" Gemfile
-
-# Tapestry is not set up yet to use --deployment
-#bundle install --deployment
-bundle install
-
-rm -f config/database.yml
-rm -f config/environments/test.rb
-cp $HOME/tapestry/test.rb config/environments/
-cp $HOME/tapestry/database.yml config/
-
-export RAILS_ENV=test
-
-bundle exec rake db:drop
-bundle exec rake db:create
-bundle exec rake db:setup
-bundle exec rake test
-
-ECODE=$?
-
-if [[ "$ECODE" != "0" ]]; then
- title "!!!!!! TAPESTRY TESTS FAILED !!!!!!"
- EXITCODE=$(($EXITCODE + $ECODE))
-fi
-
-title "Tapestry tests complete"
-
-exit $EXITCODE
diff --git a/build/jenkins/libcloud-pin b/build/libcloud-pin
similarity index 100%
rename from build/jenkins/libcloud-pin
rename to build/libcloud-pin
diff --git a/build/jenkins/package-build-dockerfiles/.gitignore b/build/package-build-dockerfiles/.gitignore
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/.gitignore
rename to build/package-build-dockerfiles/.gitignore
diff --git a/build/jenkins/package-build-dockerfiles/Makefile b/build/package-build-dockerfiles/Makefile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/Makefile
rename to build/package-build-dockerfiles/Makefile
diff --git a/build/jenkins/package-build-dockerfiles/README b/build/package-build-dockerfiles/README
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/README
rename to build/package-build-dockerfiles/README
diff --git a/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh b/build/package-build-dockerfiles/build-all-build-containers.sh
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
rename to build/package-build-dockerfiles/build-all-build-containers.sh
diff --git a/build/jenkins/package-build-dockerfiles/centos6/Dockerfile b/build/package-build-dockerfiles/centos6/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/centos6/Dockerfile
rename to build/package-build-dockerfiles/centos6/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/debian7/Dockerfile b/build/package-build-dockerfiles/debian7/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/debian7/Dockerfile
rename to build/package-build-dockerfiles/debian7/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/debian8/Dockerfile b/build/package-build-dockerfiles/debian8/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/debian8/Dockerfile
rename to build/package-build-dockerfiles/debian8/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile b/build/package-build-dockerfiles/ubuntu1204/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
rename to build/package-build-dockerfiles/ubuntu1204/Dockerfile
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile b/build/package-build-dockerfiles/ubuntu1404/Dockerfile
similarity index 100%
rename from build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
rename to build/package-build-dockerfiles/ubuntu1404/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/centos6/Dockerfile b/build/package-test-dockerfiles/centos6/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/centos6/Dockerfile
rename to build/package-test-dockerfiles/centos6/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo b/build/package-test-dockerfiles/centos6/localrepo.repo
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
rename to build/package-test-dockerfiles/centos6/localrepo.repo
diff --git a/build/jenkins/package-test-dockerfiles/debian7/Dockerfile b/build/package-test-dockerfiles/debian7/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/debian7/Dockerfile
rename to build/package-test-dockerfiles/debian7/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/debian8/Dockerfile b/build/package-test-dockerfiles/debian8/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/debian8/Dockerfile
rename to build/package-test-dockerfiles/debian8/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile b/build/package-test-dockerfiles/ubuntu1204/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
rename to build/package-test-dockerfiles/ubuntu1204/Dockerfile
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile b/build/package-test-dockerfiles/ubuntu1404/Dockerfile
similarity index 100%
rename from build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
rename to build/package-test-dockerfiles/ubuntu1404/Dockerfile
diff --git a/build/jenkins/package-testing/common-test-packages.sh b/build/package-testing/common-test-packages.sh
similarity index 100%
rename from build/jenkins/package-testing/common-test-packages.sh
rename to build/package-testing/common-test-packages.sh
diff --git a/build/jenkins/package-testing/deb-common-test-packages.sh b/build/package-testing/deb-common-test-packages.sh
similarity index 100%
rename from build/jenkins/package-testing/deb-common-test-packages.sh
rename to build/package-testing/deb-common-test-packages.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-api-server.sh b/build/package-testing/test-package-arvados-api-server.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-api-server.sh
rename to build/package-testing/test-package-arvados-api-server.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-node-manager.sh b/build/package-testing/test-package-arvados-node-manager.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-node-manager.sh
rename to build/package-testing/test-package-arvados-node-manager.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-sso-server.sh b/build/package-testing/test-package-arvados-sso-server.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-sso-server.sh
rename to build/package-testing/test-package-arvados-sso-server.sh
diff --git a/build/jenkins/package-testing/test-package-arvados-workbench.sh b/build/package-testing/test-package-arvados-workbench.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-arvados-workbench.sh
rename to build/package-testing/test-package-arvados-workbench.sh
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh b/build/package-testing/test-package-python27-python-arvados-fuse.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
rename to build/package-testing/test-package-python27-python-arvados-fuse.sh
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh b/build/package-testing/test-package-python27-python-arvados-python-client.sh
similarity index 100%
rename from build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
rename to build/package-testing/test-package-python27-python-arvados-python-client.sh
diff --git a/build/jenkins/package-testing/test-packages-centos6.sh b/build/package-testing/test-packages-centos6.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-centos6.sh
rename to build/package-testing/test-packages-centos6.sh
diff --git a/build/jenkins/package-testing/test-packages-debian7.sh b/build/package-testing/test-packages-debian7.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-debian7.sh
rename to build/package-testing/test-packages-debian7.sh
diff --git a/build/jenkins/package-testing/test-packages-debian8.sh b/build/package-testing/test-packages-debian8.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-debian8.sh
rename to build/package-testing/test-packages-debian8.sh
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1204.sh b/build/package-testing/test-packages-ubuntu1204.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-ubuntu1204.sh
rename to build/package-testing/test-packages-ubuntu1204.sh
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1404.sh b/build/package-testing/test-packages-ubuntu1404.sh
similarity index 100%
rename from build/jenkins/package-testing/test-packages-ubuntu1404.sh
rename to build/package-testing/test-packages-ubuntu1404.sh
diff --git a/build/jenkins/rails-package-scripts/README.md b/build/rails-package-scripts/README.md
similarity index 100%
rename from build/jenkins/rails-package-scripts/README.md
rename to build/rails-package-scripts/README.md
diff --git a/build/jenkins/rails-package-scripts/arvados-api-server.sh b/build/rails-package-scripts/arvados-api-server.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-api-server.sh
rename to build/rails-package-scripts/arvados-api-server.sh
diff --git a/build/jenkins/rails-package-scripts/arvados-sso-server.sh b/build/rails-package-scripts/arvados-sso-server.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-sso-server.sh
rename to build/rails-package-scripts/arvados-sso-server.sh
diff --git a/build/jenkins/rails-package-scripts/arvados-workbench.sh b/build/rails-package-scripts/arvados-workbench.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/arvados-workbench.sh
rename to build/rails-package-scripts/arvados-workbench.sh
diff --git a/build/jenkins/rails-package-scripts/postinst.sh b/build/rails-package-scripts/postinst.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/postinst.sh
rename to build/rails-package-scripts/postinst.sh
diff --git a/build/jenkins/rails-package-scripts/postrm.sh b/build/rails-package-scripts/postrm.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/postrm.sh
rename to build/rails-package-scripts/postrm.sh
diff --git a/build/jenkins/rails-package-scripts/prerm.sh b/build/rails-package-scripts/prerm.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/prerm.sh
rename to build/rails-package-scripts/prerm.sh
diff --git a/build/jenkins/rails-package-scripts/step2.sh b/build/rails-package-scripts/step2.sh
similarity index 100%
rename from build/jenkins/rails-package-scripts/step2.sh
rename to build/rails-package-scripts/step2.sh
diff --git a/build/jenkins/run-build-docker-images.sh b/build/run-build-docker-images.sh
similarity index 100%
rename from build/jenkins/run-build-docker-images.sh
rename to build/run-build-docker-images.sh
diff --git a/build/jenkins/run-build-docker-jobs-image.sh b/build/run-build-docker-jobs-image.sh
similarity index 100%
rename from build/jenkins/run-build-docker-jobs-image.sh
rename to build/run-build-docker-jobs-image.sh
diff --git a/build/jenkins/run-build-packages-all-targets.sh b/build/run-build-packages-all-targets.sh
similarity index 100%
rename from build/jenkins/run-build-packages-all-targets.sh
rename to build/run-build-packages-all-targets.sh
diff --git a/build/jenkins/run-build-packages-one-target.sh b/build/run-build-packages-one-target.sh
similarity index 100%
rename from build/jenkins/run-build-packages-one-target.sh
rename to build/run-build-packages-one-target.sh
diff --git a/build/jenkins/run-build-packages-sso.sh b/build/run-build-packages-sso.sh
similarity index 100%
rename from build/jenkins/run-build-packages-sso.sh
rename to build/run-build-packages-sso.sh
diff --git a/build/jenkins/run-build-packages.sh b/build/run-build-packages.sh
similarity index 100%
rename from build/jenkins/run-build-packages.sh
rename to build/run-build-packages.sh
diff --git a/build/jenkins/run-library.sh b/build/run-library.sh
similarity index 100%
rename from build/jenkins/run-library.sh
rename to build/run-library.sh
diff --git a/build/jenkins/run-tests.sh b/build/run-tests.sh
similarity index 100%
rename from build/jenkins/run-tests.sh
rename to build/run-tests.sh
diff --git a/build/jenkins/run_upload_packages.py b/build/run_upload_packages.py
similarity index 100%
rename from build/jenkins/run_upload_packages.py
rename to build/run_upload_packages.py
commit ed99c3e8084fd3ae4a60652dc0348a5735a04dd4
Author: Tom Clegg <tom at curoverse.com>
Date: Tue Mar 8 13:31:22 2016 -0500
Add 'build/' from commit '2b9b7518a60a71315a1504bf96b3182122bec702'
git-subtree-dir: build
git-subtree-mainline: 0a0011c987cbec72c7e13762dbc99b8e19db47c1
git-subtree-split: 2b9b7518a60a71315a1504bf96b3182122bec702
diff --git a/build/COPYING b/build/COPYING
new file mode 100644
index 0000000..af63e41
--- /dev/null
+++ b/build/COPYING
@@ -0,0 +1,2 @@
+This code is licenced under the GNU Affero General Public License version 3
+(see agpl-3.0.txt)
diff --git a/build/README b/build/README
new file mode 100644
index 0000000..b076f0b
--- /dev/null
+++ b/build/README
@@ -0,0 +1,30 @@
+Welcome to Arvados!
+
+This is the arvados-dev source tree. It contains scripts that can be useful
+if you want to hack on Arvados itself.
+
+If you are interested in using Arvados or setting up your own Arvados
+installation, you most likely do not need this source tree.
+
+For the Arvados source code, check out the git repository at
+ https://github.com/curoverse/arvados
+
+The main Arvados web site is
+ https://arvados.org
+
+The Arvados public wiki is located at
+ https://arvados.org/projects/arvados/wiki
+
+The Arvados public bug tracker is located at
+ https://arvados.org/projects/arvados/issues
+
+For support see
+ http://doc.arvados.org/user/getting_started/community.html
+
+Installation documentation is located at
+ http://doc.arvados.org/install
+
+If you wish to build the documentation yourself, follow the instructions in
+doc/README to build the documentation, then consult the "Install Guide".
+
+See COPYING for information about Arvados Free Software licenses.
diff --git a/build/agpl-3.0.txt b/build/agpl-3.0.txt
new file mode 100644
index 0000000..dba13ed
--- /dev/null
+++ b/build/agpl-3.0.txt
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+<http://www.gnu.org/licenses/>.
diff --git a/build/git/hooks/coding-standards.sh b/build/git/hooks/coding-standards.sh
new file mode 100755
index 0000000..d4e4c71
--- /dev/null
+++ b/build/git/hooks/coding-standards.sh
@@ -0,0 +1,128 @@
+#!/usr/bin/env ruby
+
+# This script can be installed as a git update hook.
+
+# It can also be installed as a gitolite 'hooklet' in the
+# hooks/common/update.secondary.d/ directory.
+
+# NOTE: this script runs under the same assumptions as the 'update' hook, so
+# the starting directory must be maintained and arguments must be passed on.
+
+$refname = ARGV[0]
+$oldrev = ARGV[1]
+$newrev = ARGV[2]
+$user = ENV['USER']
+
+def blacklist bl
+ all_revs = `git rev-list #{$oldrev}..#{$newrev}`.split("\n")
+ all_revs.each do |rev|
+ bl.each do |b|
+ if rev == b
+ puts "Revision #{b} is blacklisted, you must remove it from your branch (possibly using git rebase) before you can push."
+ exit 1
+ end
+ end
+ end
+end
+
+blacklist ['26d74dc0524c87c5dcc0c76040ce413a4848b57a']
+
+# Only enforce policy on the master branch
+exit 0 if $refname != 'refs/heads/master'
+
+puts "Enforcing Policies... \n(#{$refname}) (#{$oldrev[0,6]}) (#{$newrev[0,6]})"
+
+$regex = /\[ref: (\d+)\]/
+
+$broken_commit_message = /Please enter a commit message to explain why this merge is necessary/
+$wrong_way_merge_master = /Merge( remote-tracking)? branch '([^\/]+\/)?master' into/
+$merge_master = /Merge branch '[^']+'((?! into)| into master)/
+$pull_merge = /Merge branch 'master' of /
+$refs_or_closes_or_no_issue = /(refs #|closes #|fixes #|no issue #)/i
+
+# enforced custom commit message format
+def check_message_format
+ all_revs = `git rev-list --first-parent #{$oldrev}..#{$newrev}`.split("\n")
+ merge_revs = `git rev-list --first-parent --min-parents=2 #{$oldrev}..#{$newrev}`.split("\n")
+ # single_revs = `git rev-list --first-parent --max-parents=1 #{$oldrev}..#{$newrev}`.split("\n")
+ broken = false
+ no_ff = false
+
+ merge_revs.each do |rev|
+ message = `git cat-file commit #{rev} | sed '1,/^$/d'`
+ if $wrong_way_merge_master.match(message)
+ puts "\n[POLICY] Only non-fast-forward merges into master are allowed. Please"
+ puts "reset your master branch:"
+ puts " git reset --hard origin/master"
+ puts "and then merge your branch with the --no-ff option:"
+ puts " git merge your-branch --no-ff\n"
+ puts "Remember to add a reference to an issue number in the merge commit!\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ no_ff = true
+ elsif $pull_merge.match(message)
+ puts "\n[POLICY] This appears to be a git pull merge of remote master into local"
+ puts "master. In order to maintain a linear first-parent history of master,"
+ puts "please reset your branch and remerge or rebase using the latest master.\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ elsif not $merge_master.match(message) and not
+ puts "\n[POLICY] This does not appear to be a merge of a feature"
+ puts "branch into master. Merges must follow the format"
+ puts "\"Merge branch 'feature-branch'\".\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+ end
+
+ all_revs.each do |rev|
+ message = `git cat-file commit #{rev} | sed '1,/^$/d'`
+ if $broken_commit_message.match(message)
+ puts "\n[POLICY] Rejected broken commit message for including boilerplate"
+ puts "instruction text.\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+
+ # Do not test when the commit is a no_ff merge (which will be rejected), because
+ # this test will complain about *every* commit in the merge otherwise, obscuring
+ # the real reason for the rejection (the no_ff merge)
+ if not no_ff and not $refs_or_closes_or_no_issue.match(message)
+ puts "\n[POLICY] All commits to master must include an issue using \"refs #\" or"
+ puts "\"closes #\", or specify \"no issue #\"\n"
+ puts "\n******************************************************************\n"
+ puts "\nOffending commit: #{rev}\n"
+ puts "\nOffending commit message:\n\n"
+ puts message
+ puts "\n******************************************************************\n"
+ puts "\n\n"
+ broken = true
+ end
+ end
+
+ if broken
+ exit 1
+ end
+end
+
+check_message_format
diff --git a/build/install/easy-docker-install.sh b/build/install/easy-docker-install.sh
new file mode 100755
index 0000000..fe6e186
--- /dev/null
+++ b/build/install/easy-docker-install.sh
@@ -0,0 +1,87 @@
+#!/usr/bin/env bash
+
+# This script is intended to make Arvados installation easy. It will download the
+# latest copy of the Arvados docker images as well as the arvdock command. It
+# then uses arvdock to spin up Arvados on this computer.
+#
+# The latest version of this script is available at http://get.arvados.org, so that this
+# command does the right thing:
+#
+# $ \curl -sSL http://get.arvados.org | bash
+#
+# Prerequisites: working docker installation. Run this script as a user who is a member
+# of the docker group.
+
+COLUMNS=80
+
+fail () {
+ title "$*"
+ exit 1
+}
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_pull () {
+ $DOCKER pull $*
+
+ ECODE=$?
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "$DOCKER pull $* failed"
+ exit $ECODE
+ fi
+}
+
+main () {
+
+ \which which >/dev/null 2>&1 || fail "Error: could not find 'which' command."
+
+ # find the docker binary
+ DOCKER=`which docker.io`
+
+ if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+ fi
+
+ if [[ "$DOCKER" == "" ]]; then
+ fail "Error: you need to have docker installed. Could not find the docker executable."
+ fi
+
+ echo
+ echo "If necessary, this command will download the latest Arvados docker images."
+ echo "The download can take a long time, depending on the speed of your internet connection."
+ echo "When the images are downloaded, it will then start an Arvados environment on this computer."
+ echo
+ docker_pull arvados/workbench
+ docker_pull arvados/doc
+ docker_pull arvados/keep
+ docker_pull arvados/shell
+ docker_pull arvados/sso
+ docker_pull arvados/compute
+ docker_pull arvados/keep
+ docker_pull arvados/keepproxy
+ docker_pull arvados/api
+ docker_pull crosbymichael/skydns
+ docker_pull crosbymichael/skydock
+
+ # Now download arvdock and start the containers
+ echo
+ echo Downloading arvdock
+ echo
+ \curl -sSL https://raw.githubusercontent.com/curoverse/arvados/master/docker/arvdock -o arvdock
+ chmod 755 arvdock
+
+ echo
+ echo Starting the docker containers
+ echo
+ ./arvdock start
+
+ echo To stop the containers, run
+ echo
+ echo ./arvdock stop
+ echo
+}
+
+main
diff --git a/build/jenkins/create-plot-data-from-log.sh b/build/jenkins/create-plot-data-from-log.sh
new file mode 100755
index 0000000..ce3bfed
--- /dev/null
+++ b/build/jenkins/create-plot-data-from-log.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+build=$1
+file=$2
+outputdir=$3
+
+usage() {
+ echo "./$0 build_number file_to_parse output_dir"
+ echo "this script will use the build output to generate *csv and *txt"
+ echo "for jenkins plugin plot https://github.com/jenkinsci/plot-plugin/"
+}
+
+if [ $# -ne 3 ]
+then
+ usage
+ exit 1
+fi
+
+if [ ! -e $file ]
+then
+ usage
+ echo "$file doesn't exist! exiting"
+ exit 2
+fi
+if [ ! -w $outputdir ]
+then
+ usage
+ echo "$outputdir isn't writeable! exiting"
+ exit 3
+fi
+
+#------------------------------
+## MAXLINE is the amount of lines that will read after the pattern
+## is match (the logfile could be hundred thousands lines long).
+## 1000 should be safe enough to capture all the output of the individual test
+MAXLINES=1000
+
+## TODO: check $build and $file make sense
+
+for test in \
+ test_Create_and_show_large_collection_with_manifest_text_of_20000000 \
+ test_Create,_show,_and_update_description_for_large_collection_with_manifest_text_of_100000 \
+ test_Create_one_large_collection_of_20000000_and_one_small_collection_of_10000_and_combine_them
+do
+ cleaned_test=$(echo $test | tr -d ",.:;/")
+ (zgrep -i -E -A$MAXLINES "^[A-Za-z0-9]+Test: $test" $file && echo "----") | tail -n +1 | tail --lines=+3|grep -B$MAXLINES -E "^-*$" -m1 > $outputdir/$cleaned_test-$build.txt
+ result=$?
+ if [ $result -eq 0 ]
+ then
+ echo processing $outputdir/$cleaned_test-$build.txt creating $outputdir/$cleaned_test.csv
+ echo $(grep ^Completed $outputdir/$cleaned_test-$build.txt | perl -n -e '/^Completed (.*) in [0-9]+ms.*$/;print "".++$line."-$1,";' | perl -p -e 's/,$//g'|tr " " "_" ) > $outputdir/$cleaned_test.csv
+ echo $(grep ^Completed $outputdir/$cleaned_test-$build.txt | perl -n -e '/^Completed.*in ([0-9]+)ms.*$/;print "$1,";' | perl -p -e 's/,$//g' ) >> $outputdir/$cleaned_test.csv
+ #echo URL=https://ci.curoverse.com/view/job/arvados-api-server/ws/apps/workbench/log/$cleaned_test-$build.txt/*view*/ >> $outputdir/$test.properties
+ else
+ echo "$test was't found on $file"
+ cleaned_test=$(echo $test | tr -d ",.:;/")
+ > $outputdir/$cleaned_test.csv
+ fi
+done
diff --git a/build/jenkins/libcloud-pin b/build/jenkins/libcloud-pin
new file mode 100644
index 0000000..3fa07e6
--- /dev/null
+++ b/build/jenkins/libcloud-pin
@@ -0,0 +1 @@
+LIBCLOUD_PIN=0.20.2.dev1
\ No newline at end of file
diff --git a/build/jenkins/package-build-dockerfiles/.gitignore b/build/jenkins/package-build-dockerfiles/.gitignore
new file mode 100644
index 0000000..ceee9fa
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/.gitignore
@@ -0,0 +1,2 @@
+*/generated
+common-generated/
diff --git a/build/jenkins/package-build-dockerfiles/Makefile b/build/jenkins/package-build-dockerfiles/Makefile
new file mode 100644
index 0000000..70fbf28
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/Makefile
@@ -0,0 +1,29 @@
+all: centos6/generated debian7/generated debian8/generated ubuntu1204/generated ubuntu1404/generated
+
+centos6/generated: common-generated-all
+ test -d centos6/generated || mkdir centos6/generated
+ cp -rlt centos6/generated common-generated/*
+
+debian7/generated: common-generated-all
+ test -d debian7/generated || mkdir debian7/generated
+ cp -rlt debian7/generated common-generated/*
+
+debian8/generated: common-generated-all
+ test -d debian8/generated || mkdir debian8/generated
+ cp -rlt debian8/generated common-generated/*
+
+ubuntu1204/generated: common-generated-all
+ test -d ubuntu1204/generated || mkdir ubuntu1204/generated
+ cp -rlt ubuntu1204/generated common-generated/*
+
+ubuntu1404/generated: common-generated-all
+ test -d ubuntu1404/generated || mkdir ubuntu1404/generated
+ cp -rlt ubuntu1404/generated common-generated/*
+
+common-generated-all: common-generated/golang-amd64.tar.gz
+
+common-generated/golang-amd64.tar.gz: common-generated
+ wget -cqO common-generated/golang-amd64.tar.gz https://storage.googleapis.com/golang/go1.4.2.linux-amd64.tar.gz
+
+common-generated:
+ mkdir common-generated
diff --git a/build/jenkins/package-build-dockerfiles/README b/build/jenkins/package-build-dockerfiles/README
new file mode 100644
index 0000000..0dfab94
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/README
@@ -0,0 +1,13 @@
+==================
+DOCKER IMAGE BUILD
+==================
+
+1. `make`
+2. `cd DISTRO`
+3. `docker build -t arvados/build:DISTRO .`
+
+==============
+BUILD PACKAGES
+==============
+
+`docker run -v /path/to/your/arvados-dev/jenkins:/jenkins -v /path/to/your/arvados:/arvados arvados/build:DISTRO`
diff --git a/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh b/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
new file mode 100755
index 0000000..34ffcce
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/build-all-build-containers.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+make
+
+for target in `find -maxdepth 1 -type d |grep -v generated`; do
+ if [[ "$target" == "." ]]; then
+ continue
+ fi
+ target=${target#./}
+ echo $target
+ cd $target
+ docker build -t arvados/build:$target .
+ cd ..
+done
+
+
diff --git a/build/jenkins/package-build-dockerfiles/centos6/Dockerfile b/build/jenkins/package-build-dockerfiles/centos6/Dockerfile
new file mode 100644
index 0000000..cfd94c8
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/centos6/Dockerfile
@@ -0,0 +1,31 @@
+FROM centos:6
+MAINTAINER Brett Smith <brett at curoverse.com>
+
+# Install build dependencies provided in base distribution
+RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar scl-utils centos-release-SCL postgresql-devel
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Need to "touch" RPM database to workaround bug in interaction between
+# overlayfs and yum (https://bugzilla.redhat.com/show_bug.cgi?id=1213602)
+RUN touch /var/lib/rpm/* && yum -q -y install python27 python33
+RUN scl enable python33 "easy_install-3.3 pip" && scl enable python27 "easy_install-2.7 pip"
+
+RUN cd /tmp && \
+ curl -OL 'http://pkgs.repoforge.org/rpmforge-release/rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm' && \
+ rpm -ivh rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm && \
+ sed -i 's/enabled = 0/enabled = 1/' /etc/yum.repos.d/rpmforge.repo
+
+RUN touch /var/lib/rpm/* && yum install --assumeyes git
+
+ENV WORKSPACE /arvados
+CMD ["scl", "enable", "python33", "python27", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos6"]
diff --git a/build/jenkins/package-build-dockerfiles/debian7/Dockerfile b/build/jenkins/package-build-dockerfiles/debian7/Dockerfile
new file mode 100644
index 0000000..0d04590
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/debian7/Dockerfile
@@ -0,0 +1,19 @@
+FROM debian:wheezy
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "debian7"]
diff --git a/build/jenkins/package-build-dockerfiles/debian8/Dockerfile b/build/jenkins/package-build-dockerfiles/debian8/Dockerfile
new file mode 100644
index 0000000..fcd390f
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/debian8/Dockerfile
@@ -0,0 +1,19 @@
+FROM debian:jessie
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "debian8"]
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile b/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
new file mode 100644
index 0000000..158053c
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/ubuntu1204/Dockerfile
@@ -0,0 +1,19 @@
+FROM ubuntu:precise
+MAINTAINER Ward Vandewege <ward at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip build-essential
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1204"]
diff --git a/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile b/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
new file mode 100644
index 0000000..0b8ee7a
--- /dev/null
+++ b/build/jenkins/package-build-dockerfiles/ubuntu1404/Dockerfile
@@ -0,0 +1,19 @@
+FROM ubuntu:trusty
+MAINTAINER Brett Smith <brett at curoverse.com>
+
+# Install dependencies and set up system.
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip
+
+# Install RVM
+RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundler fpm
+
+# Install golang binary
+ADD generated/golang-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
+ENV WORKSPACE /arvados
+CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1404"]
diff --git a/build/jenkins/package-test-dockerfiles/centos6/Dockerfile b/build/jenkins/package-test-dockerfiles/centos6/Dockerfile
new file mode 100644
index 0000000..69927a1
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/centos6/Dockerfile
@@ -0,0 +1,20 @@
+FROM centos:6
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+RUN yum -q install --assumeyes scl-utils centos-release-SCL \
+ which tar
+
+# Install RVM
+RUN touch /var/lib/rpm/* && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1 && \
+ /usr/local/rvm/bin/rvm-exec default gem install bundle fpm
+
+RUN cd /tmp && \
+ curl -OL 'http://pkgs.repoforge.org/rpmforge-release/rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm' && \
+ rpm -ivh rpmforge-release-0.5.3-1.el6.rf.x86_64.rpm && \
+ sed -i 's/enabled = 0/enabled = 1/' /etc/yum.repos.d/rpmforge.repo
+
+COPY localrepo.repo /etc/yum.repos.d/localrepo.repo
\ No newline at end of file
diff --git a/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo b/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
new file mode 100644
index 0000000..ac6b898
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/centos6/localrepo.repo
@@ -0,0 +1,5 @@
+[localrepo]
+name=Arvados Test
+baseurl=file:///arvados/packages/centos6
+gpgcheck=0
+enabled=1
diff --git a/build/jenkins/package-test-dockerfiles/debian7/Dockerfile b/build/jenkins/package-test-dockerfiles/debian7/Dockerfile
new file mode 100644
index 0000000..c9a2fdc
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/debian7/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:7
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl procps && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/debian7/ /" >>/etc/apt/sources.list
diff --git a/build/jenkins/package-test-dockerfiles/debian8/Dockerfile b/build/jenkins/package-test-dockerfiles/debian8/Dockerfile
new file mode 100644
index 0000000..cde1847
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/debian8/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:8
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/debian8/ /" >>/etc/apt/sources.list
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile b/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
new file mode 100644
index 0000000..0cb77c8
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/ubuntu1204/Dockerfile
@@ -0,0 +1,14 @@
+FROM ubuntu:precise
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/ubuntu1204/ /" >>/etc/apt/sources.list
\ No newline at end of file
diff --git a/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile b/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
new file mode 100644
index 0000000..6c4d0e9
--- /dev/null
+++ b/build/jenkins/package-test-dockerfiles/ubuntu1404/Dockerfile
@@ -0,0 +1,14 @@
+FROM ubuntu:trusty
+MAINTAINER Peter Amstutz <peter.amstutz at curoverse.com>
+
+# Install RVM
+RUN apt-get update && apt-get -y install curl && \
+ gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
+ curl -L https://get.rvm.io | bash -s stable && \
+ /usr/local/rvm/bin/rvm install 2.1 && \
+ /usr/local/rvm/bin/rvm alias create default ruby-2.1
+
+# udev daemon can't start in a container, so don't try.
+RUN mkdir -p /etc/udev/disabled
+
+RUN echo "deb file:///arvados/packages/ubuntu1404/ /" >>/etc/apt/sources.list
\ No newline at end of file
diff --git a/build/jenkins/package-testing/common-test-packages.sh b/build/jenkins/package-testing/common-test-packages.sh
new file mode 100755
index 0000000..2dc67ab
--- /dev/null
+++ b/build/jenkins/package-testing/common-test-packages.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+set -eu
+
+FAIL=0
+
+echo
+
+while read so && [ -n "$so" ]; do
+ if ldd "$so" | grep "not found" ; then
+ echo "^^^ Missing while scanning $so ^^^"
+ FAIL=1
+ fi
+done <<EOF
+$(find -name '*.so')
+EOF
+
+if test -x "/jenkins/package-testing/test-package-$1.sh" ; then
+ if ! "/jenkins/package-testing/test-package-$1.sh" ; then
+ FAIL=1
+ fi
+fi
+
+if test $FAIL = 0 ; then
+ echo "Package $1 passed"
+fi
+
+exit $FAIL
diff --git a/build/jenkins/package-testing/deb-common-test-packages.sh b/build/jenkins/package-testing/deb-common-test-packages.sh
new file mode 100755
index 0000000..5f32a60
--- /dev/null
+++ b/build/jenkins/package-testing/deb-common-test-packages.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+set -eu
+
+# Multiple .deb based distros symlink to this script, so extract the target
+# from the invocation path.
+target=$(echo $0 | sed 's/.*test-packages-\([^.]*\)\.sh.*/\1/')
+
+export ARV_PACKAGES_DIR="/arvados/packages/$target"
+
+dpkg-query --show > "$ARV_PACKAGES_DIR/$1.before"
+
+apt-get -qq update
+apt-get --assume-yes --force-yes install "$1"
+
+dpkg-query --show > "$ARV_PACKAGES_DIR/$1.after"
+
+set +e
+diff "$ARV_PACKAGES_DIR/$1.before" "$ARV_PACKAGES_DIR/$1.after" > "$ARV_PACKAGES_DIR/$1.diff"
+set -e
+
+mkdir -p /tmp/opts
+cd /tmp/opts
+
+export ARV_PACKAGES_DIR="/arvados/packages/$target"
+
+dpkg-deb -x $(ls -t "$ARV_PACKAGES_DIR/$1"_*.deb | head -n1) .
+
+while read so && [ -n "$so" ]; do
+ echo
+ echo "== Packages dependencies for $so =="
+ ldd "$so" | awk '($3 ~ /^\//){print $3}' | sort -u | xargs dpkg -S | cut -d: -f1 | sort -u
+done <<EOF
+$(find -name '*.so')
+EOF
+
+exec /jenkins/package-testing/common-test-packages.sh "$1"
diff --git a/build/jenkins/package-testing/test-package-arvados-api-server.sh b/build/jenkins/package-testing/test-package-arvados-api-server.sh
new file mode 100755
index 0000000..e975448
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-api-server.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+set -e
+cd /var/www/arvados-api/current/
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ apt-get install -y nginx
+ dpkg-reconfigure arvados-api-server
+ ;;
+ centos6)
+ yum install --assumeyes httpd
+ yum reinstall --assumeyes arvados-api-server
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+/usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-api-server.gems"
diff --git a/build/jenkins/package-testing/test-package-arvados-node-manager.sh b/build/jenkins/package-testing/test-package-arvados-node-manager.sh
new file mode 100755
index 0000000..2f416d1
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-node-manager.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+exec python <<EOF
+import libcloud.compute.types
+import libcloud.compute.providers
+libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.AZURE_ARM)
+print "Successfully imported compatible libcloud library"
+EOF
diff --git a/build/jenkins/package-testing/test-package-arvados-sso-server.sh b/build/jenkins/package-testing/test-package-arvados-sso-server.sh
new file mode 100755
index 0000000..c1a377e
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-sso-server.sh
@@ -0,0 +1,172 @@
+#!/bin/bash
+
+set -e
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ FORMAT=deb
+ ;;
+ centos6)
+ FORMAT=rpm
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+checkexit() {
+ if [[ "$1" != "0" ]]; then
+ title "!!!!!! $2 FAILED !!!!!!"
+ fi
+}
+
+
+# Find the SSO server package
+
+cd "$WORKSPACE"
+
+if [[ ! -d "/var/www/arvados-sso" ]]; then
+ echo "/var/www/arvados-sso should exist"
+ exit 1
+fi
+
+if [[ ! -e "/etc/arvados/sso/application.yml" ]]; then
+ mkdir -p /etc/arvados/sso/
+ RANDOM_PASSWORD=`date | md5sum |cut -f1 -d' '`
+ cp config/application.yml.example /etc/arvados/sso/application.yml
+ sed -i -e 's/uuid_prefix: ~/uuid_prefix: zzzzz/' /etc/arvados/sso/application.yml
+ sed -i -e "s/secret_token: ~/secret_token: $RANDOM_PASSWORD/" /etc/arvados/sso/application.yml
+fi
+
+if [[ ! -e "/etc/arvados/sso/database.yml" ]]; then
+ # We need to set up our database configuration now.
+ if [[ "$FORMAT" == "rpm" ]]; then
+ # postgres packaging on CentOS6 is kind of primitive, needs an initdb
+ service postgresql initdb
+ if [ "$TARGET" = "centos6" ]; then
+ sed -i -e "s/127.0.0.1\/32 ident/127.0.0.1\/32 md5/" /var/lib/pgsql/data/pg_hba.conf
+ sed -i -e "s/::1\/128 ident/::1\/128 md5/" /var/lib/pgsql/data/pg_hba.conf
+ fi
+ fi
+ service postgresql start
+
+ RANDOM_PASSWORD=`date | md5sum |cut -f1 -d' '`
+ cat >/etc/arvados/sso/database.yml <<EOF
+production:
+ adapter: postgresql
+ encoding: utf8
+ database: sso_provider_production
+ username: sso_provider_user
+ password: $RANDOM_PASSWORD
+ host: localhost
+EOF
+
+ su postgres -c "psql -c \"CREATE USER sso_provider_user WITH PASSWORD '$RANDOM_PASSWORD'\""
+ su postgres -c "createdb sso_provider_production -O sso_provider_user"
+fi
+
+if [[ "$FORMAT" == "deb" ]]; then
+ # Test 2: the package should reconfigure cleanly
+ dpkg-reconfigure arvados-sso-server || EXITCODE=3
+
+ cd /var/www/arvados-sso/current/
+ /usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-sso-server.gems"
+
+ # Test 3: the package should remove cleanly
+ apt-get remove arvados-sso-server --yes || EXITCODE=3
+
+ checkexit $EXITCODE "apt-get remove arvados-sso-server --yes"
+
+ # Test 4: the package configuration should remove cleanly
+ dpkg --purge arvados-sso-server || EXITCODE=4
+
+ checkexit $EXITCODE "dpkg --purge arvados-sso-server"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=4
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+ # Test 5: the package should remove cleanly with --purge
+
+ apt-get remove arvados-sso-server --purge --yes || EXITCODE=5
+
+ checkexit $EXITCODE "apt-get remove arvados-sso-server --purge --yes"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=5
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+elif [[ "$FORMAT" == "rpm" ]]; then
+
+ # Set up Nginx first
+ # (courtesy of https://www.phusionpassenger.com/library/walkthroughs/deploy/ruby/ownserver/nginx/oss/el6/install_passenger.html)
+ yum install -q -y epel-release pygpgme curl
+ curl --fail -sSLo /etc/yum.repos.d/passenger.repo https://oss-binaries.phusionpassenger.com/yum/definitions/el-passenger.repo
+ yum install -q -y nginx passenger
+ sed -i -e 's/^# passenger/passenger/' /etc/nginx/conf.d/passenger.conf
+ # Done setting up Nginx
+
+ # Test 2: the package should reinstall cleanly
+ yum --assumeyes reinstall arvados-sso-server || EXITCODE=3
+
+ cd /var/www/arvados-sso/current/
+ /usr/local/rvm/bin/rvm-exec default bundle list >$ARV_PACKAGES_DIR/arvados-sso-server.gems
+
+ # Test 3: the package should remove cleanly
+ yum -q -y remove arvados-sso-server || EXITCODE=3
+
+ checkexit $EXITCODE "yum -q -y remove arvados-sso-server"
+
+ if [[ -e "/var/www/arvados-sso" ]]; then
+ EXITCODE=3
+ fi
+
+ checkexit $EXITCODE "leftover items under /var/www/arvados-sso"
+
+fi
+
+if [[ "$EXITCODE" == "0" ]]; then
+ echo "Testing complete, no errors!"
+else
+ echo "Errors while testing!"
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/package-testing/test-package-arvados-workbench.sh b/build/jenkins/package-testing/test-package-arvados-workbench.sh
new file mode 100755
index 0000000..1be4dea
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-arvados-workbench.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+set -e
+cd /var/www/arvados-workbench/current/
+
+case "$TARGET" in
+ debian*|ubuntu*)
+ apt-get install -y nginx
+ dpkg-reconfigure arvados-workbench
+ ;;
+ centos6)
+ yum install --assumeyes httpd
+ yum reinstall --assumeyes arvados-workbench
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+/usr/local/rvm/bin/rvm-exec default bundle list >"$ARV_PACKAGES_DIR/arvados-workbench.gems"
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh b/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
new file mode 100755
index 0000000..1654be9
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-python27-python-arvados-fuse.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+exec python <<EOF
+import arvados_fuse
+print "Successfully imported arvados_fuse"
+EOF
diff --git a/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh b/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
new file mode 100755
index 0000000..0772fbf
--- /dev/null
+++ b/build/jenkins/package-testing/test-package-python27-python-arvados-python-client.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+exec python <<EOF
+import arvados
+print "Successfully imported arvados"
+EOF
diff --git a/build/jenkins/package-testing/test-packages-centos6.sh b/build/jenkins/package-testing/test-packages-centos6.sh
new file mode 100755
index 0000000..4e05364
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-centos6.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+set -eu
+
+yum -q clean all
+touch /var/lib/rpm/*
+
+export ARV_PACKAGES_DIR=/arvados/packages/centos6
+
+rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.before"
+
+yum install --assumeyes $1
+
+rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.after"
+
+set +e
+diff "$ARV_PACKAGES_DIR/$1.before" "$ARV_PACKAGES_DIR/$1.after" >"$ARV_PACKAGES_DIR/$1.diff"
+set -e
+
+SCL=""
+if scl enable python27 true 2>/dev/null ; then
+ SCL="scl enable python27"
+fi
+
+mkdir -p /tmp/opts
+cd /tmp/opts
+
+rpm2cpio $(ls -t "$ARV_PACKAGES_DIR/$1"-*.rpm | head -n1) | cpio -idm 2>/dev/null
+
+shared=$(find -name '*.so')
+if test -n "$shared" ; then
+ for so in $shared ; do
+ echo
+ echo "== Packages dependencies for $so =="
+ $SCL ldd "$so" \
+ | awk '($3 ~ /^\//){print $3}' | sort -u | xargs rpm -qf | sort -u
+ done
+fi
+
+if test -n "$SCL" ; then
+ exec $SCL "/jenkins/package-testing/common-test-packages.sh '$1'"
+else
+ exec /jenkins/package-testing/common-test-packages.sh "$1"
+fi
diff --git a/build/jenkins/package-testing/test-packages-debian7.sh b/build/jenkins/package-testing/test-packages-debian7.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-debian7.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-debian8.sh b/build/jenkins/package-testing/test-packages-debian8.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-debian8.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1204.sh b/build/jenkins/package-testing/test-packages-ubuntu1204.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-ubuntu1204.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/package-testing/test-packages-ubuntu1404.sh b/build/jenkins/package-testing/test-packages-ubuntu1404.sh
new file mode 120000
index 0000000..54ce94c
--- /dev/null
+++ b/build/jenkins/package-testing/test-packages-ubuntu1404.sh
@@ -0,0 +1 @@
+deb-common-test-packages.sh
\ No newline at end of file
diff --git a/build/jenkins/rails-package-scripts/README.md b/build/jenkins/rails-package-scripts/README.md
new file mode 100644
index 0000000..3a93c31
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/README.md
@@ -0,0 +1,14 @@
+When run-build-packages.sh builds a Rails package, it generates the package's pre/post-inst/rm scripts by concatenating:
+
+1. package_name.sh, which defines variables about where package files live and some human-readable names about them.
+2. step2.sh, which uses those to define some utility variables and set defaults for things that aren't set.
+3. stepname.sh, like postinst.sh, prerm.sh, etc., which uses all this information to do the actual work.
+
+Since our build process is a tower of shell scripts, concatenating files seemed like the least worst option to share code between these files and packages. More advanced code generation would've been too much trouble to integrate into our build process at this time. Trying to inject portions of files into other files seemed error-prone and likely to introduce bugs to the end result.
+
+postinst.sh lets the early parts define a few hooks to control behavior:
+
+* After it installs the core configuration files (database.yml, application.yml, and production.rb) to /etc/arvados/server, it calls setup_extra_conffiles. By default this is a noop function (in step2.sh). API server defines this to set up the old omniauth.rb conffile.
+* Before it restarts nginx, it calls setup_before_nginx_restart. By default this is a noop function (in step2.sh). API server defines this to set up the internal git repository, if necessary.
+* $RAILSPKG_DATABASE_LOAD_TASK defines the Rake task to load the database. API server uses db:structure:load. SSO server uses db:schema:load. Workbench doesn't set this, which causes the postinst to skip all database work.
+* If $RAILSPKG_SUPPORTS_CONFIG_CHECK != 1, it won't run the config:check rake task. SSO clears this flag (it doesn't have that task code).
diff --git a/build/jenkins/rails-package-scripts/arvados-api-server.sh b/build/jenkins/rails-package-scripts/arvados-api-server.sh
new file mode 100644
index 0000000..c2b99f0
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-api-server.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-api-server
+INSTALL_PATH=/var/www/arvados-api
+CONFIG_PATH=/etc/arvados/api
+DOC_URL="http://doc.arvados.org/install/install-api-server.html#configure"
+
+RAILSPKG_DATABASE_LOAD_TASK=db:structure:load
+setup_extra_conffiles() {
+ setup_conffile initializers/omniauth.rb
+}
+
+setup_before_nginx_restart() {
+ # initialize git_internal_dir
+ # usually /var/lib/arvados/internal.git (set in application.default.yml )
+ if [ "$APPLICATION_READY" = "1" ]; then
+ GIT_INTERNAL_DIR=$($COMMAND_PREFIX bundle exec rake config:check 2>&1 | grep git_internal_dir | awk '{ print $2 }')
+ if [ ! -e "$GIT_INTERNAL_DIR" ]; then
+ run_and_report "Creating git_internal_dir '$GIT_INTERNAL_DIR'" \
+ mkdir -p "$GIT_INTERNAL_DIR"
+ run_and_report "Initializing git_internal_dir '$GIT_INTERNAL_DIR'" \
+ git init --quiet --bare $GIT_INTERNAL_DIR
+ else
+ echo "Initializing git_internal_dir $GIT_INTERNAL_DIR: directory exists, skipped."
+ fi
+ run_and_report "Making sure '$GIT_INTERNAL_DIR' has the right permission" \
+ chown -R "$WWW_OWNER:" "$GIT_INTERNAL_DIR"
+ else
+ echo "Initializing git_internal_dir... skipped."
+ fi
+}
diff --git a/build/jenkins/rails-package-scripts/arvados-sso-server.sh b/build/jenkins/rails-package-scripts/arvados-sso-server.sh
new file mode 100644
index 0000000..10b2ee2
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-sso-server.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-sso-server
+INSTALL_PATH=/var/www/arvados-sso
+CONFIG_PATH=/etc/arvados/sso
+DOC_URL="http://doc.arvados.org/install/install-sso.html#configure"
+RAILSPKG_DATABASE_LOAD_TASK=db:schema:load
+RAILSPKG_SUPPORTS_CONFIG_CHECK=0
diff --git a/build/jenkins/rails-package-scripts/arvados-workbench.sh b/build/jenkins/rails-package-scripts/arvados-workbench.sh
new file mode 100644
index 0000000..f2b8a56
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/arvados-workbench.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+# This file declares variables common to all scripts for one Rails package.
+
+PACKAGE_NAME=arvados-workbench
+INSTALL_PATH=/var/www/arvados-workbench
+CONFIG_PATH=/etc/arvados/workbench
+DOC_URL="http://doc.arvados.org/install/install-workbench-app.html#configure"
diff --git a/build/jenkins/rails-package-scripts/postinst.sh b/build/jenkins/rails-package-scripts/postinst.sh
new file mode 100644
index 0000000..6fac26b
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/postinst.sh
@@ -0,0 +1,251 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+set -e
+
+DATABASE_READY=1
+APPLICATION_READY=1
+
+if [ -s "$HOME/.rvm/scripts/rvm" ] || [ -s "/usr/local/rvm/scripts/rvm" ]; then
+ COMMAND_PREFIX="/usr/local/rvm/bin/rvm-exec default"
+else
+ COMMAND_PREFIX=
+fi
+
+report_not_ready() {
+ local ready_flag="$1"; shift
+ local config_file="$1"; shift
+ if [ "1" != "$ready_flag" ]; then cat >&2 <<EOF
+
+PLEASE NOTE:
+
+The $PACKAGE_NAME package was not configured completely because
+$config_file needs some tweaking.
+Please refer to the documentation at
+<$DOC_URL> for more details.
+
+When $(basename "$config_file") has been modified,
+reconfigure or reinstall this package.
+
+EOF
+ fi
+}
+
+report_web_service_warning() {
+ local warning="$1"; shift
+ cat >&2 <<EOF
+
+WARNING: $warning.
+
+To override, set the WEB_SERVICE environment variable to the name of the service
+hosting the Rails server.
+
+For Debian-based systems, then reconfigure this package with dpkg-reconfigure.
+
+For RPM-based systems, then reinstall this package.
+
+EOF
+}
+
+run_and_report() {
+ # Usage: run_and_report ACTION_MSG CMD
+ # This is the usual wrapper that prints ACTION_MSG, runs CMD, then writes
+ # a message about whether CMD succeeded or failed. Returns the exit code
+ # of CMD.
+ local action_message="$1"; shift
+ local retcode=0
+ echo -n "$action_message..."
+ if "$@"; then
+ echo " done."
+ else
+ retcode=$?
+ echo " failed."
+ fi
+ return $retcode
+}
+
+setup_confdirs() {
+ for confdir in "$@"; do
+ if [ ! -d "$confdir" ]; then
+ install -d -g "$WWW_OWNER" -m 0750 "$confdir"
+ fi
+ done
+}
+
+setup_conffile() {
+ # Usage: setup_conffile CONFFILE_PATH [SOURCE_PATH]
+ # Both paths are relative to RELEASE_CONFIG_PATH.
+ # This function will try to safely ensure that a symbolic link for
+ # the configuration file points from RELEASE_CONFIG_PATH to CONFIG_PATH.
+ # If SOURCE_PATH is given, this function will try to install that file as
+ # the configuration file in CONFIG_PATH, and return 1 if the file in
+ # CONFIG_PATH is unmodified from the source.
+ local conffile_relpath="$1"; shift
+ local conffile_source="$1"
+ local release_conffile="$RELEASE_CONFIG_PATH/$conffile_relpath"
+ local etc_conffile="$CONFIG_PATH/$(basename "$conffile_relpath")"
+
+ # Note that -h can return true and -e will return false simultaneously
+ # when the target is a dangling symlink. We're okay with that outcome,
+ # so check -h first.
+ if [ ! -h "$release_conffile" ]; then
+ if [ ! -e "$release_conffile" ]; then
+ ln -s "$etc_conffile" "$release_conffile"
+ # If there's a config file in /var/www identical to the one in /etc,
+ # overwrite it with a symlink after porting its permissions.
+ elif cmp --quiet "$release_conffile" "$etc_conffile"; then
+ local ownership="$(stat -c "%u:%g" "$release_conffile")"
+ local owning_group="${ownership#*:}"
+ if [ 0 != "$owning_group" ]; then
+ chgrp "$owning_group" "$CONFIG_PATH" /etc/arvados
+ fi
+ chown "$ownership" "$etc_conffile"
+ chmod --reference="$release_conffile" "$etc_conffile"
+ ln --force -s "$etc_conffile" "$release_conffile"
+ fi
+ fi
+
+ if [ -n "$conffile_source" ]; then
+ if [ ! -e "$etc_conffile" ]; then
+ install -g "$WWW_OWNER" -m 0640 \
+ "$RELEASE_CONFIG_PATH/$conffile_source" "$etc_conffile"
+ return 1
+ # Even if $etc_conffile already existed, it might be unmodified from
+ # the source. This is especially likely when a user installs, updates
+ # database.yml, then reconfigures before they update application.yml.
+ # Use cmp to be sure whether $etc_conffile is modified.
+ elif cmp --quiet "$RELEASE_CONFIG_PATH/$conffile_source" "$etc_conffile"; then
+ return 1
+ fi
+ fi
+}
+
+prepare_database() {
+ DB_MIGRATE_STATUS=`$COMMAND_PREFIX bundle exec rake db:migrate:status 2>&1 || true`
+ if echo $DB_MIGRATE_STATUS | grep -qF 'Schema migrations table does not exist yet.'; then
+ # The database exists, but the migrations table doesn't.
+ run_and_report "Setting up database" $COMMAND_PREFIX bundle exec \
+ rake "$RAILSPKG_DATABASE_LOAD_TASK" db:seed
+ elif echo $DB_MIGRATE_STATUS | grep -q '^database: '; then
+ run_and_report "Running db:migrate" \
+ $COMMAND_PREFIX bundle exec rake db:migrate
+ elif echo $DB_MIGRATE_STATUS | grep -q 'database .* does not exist'; then
+ if ! run_and_report "Running db:setup" \
+ $COMMAND_PREFIX bundle exec rake db:setup 2>/dev/null; then
+ echo "Warning: unable to set up database." >&2
+ DATABASE_READY=0
+ fi
+ else
+ echo "Warning: Database is not ready to set up. Skipping database setup." >&2
+ DATABASE_READY=0
+ fi
+}
+
+configure_version() {
+ WEB_SERVICE=${WEB_SERVICE:-$(service --status-all 2>/dev/null \
+ | grep -Eo '\bnginx|httpd[^[:space:]]*' || true)}
+ if [ -z "$WEB_SERVICE" ]; then
+ report_web_service_warning "Web service (Nginx or Apache) not found"
+ elif [ "$WEB_SERVICE" != "$(echo "$WEB_SERVICE" | head -n 1)" ]; then
+ WEB_SERVICE=$(echo "$WEB_SERVICE" | head -n 1)
+ report_web_service_warning \
+ "Multiple web services found. Choosing the first one ($WEB_SERVICE)"
+ fi
+
+ if [ -e /etc/redhat-release ]; then
+ # Recognize any service that starts with "nginx"; e.g., nginx16.
+ if [ "$WEB_SERVICE" != "${WEB_SERVICE#nginx}" ]; then
+ WWW_OWNER=nginx
+ else
+ WWW_OWNER=apache
+ fi
+ else
+ # Assume we're on a Debian-based system for now.
+ # Both Apache and Nginx run as www-data by default.
+ WWW_OWNER=www-data
+ fi
+
+ echo
+ echo "Assumption: $WEB_SERVICE is configured to serve Rails from"
+ echo " $RELEASE_PATH"
+ echo "Assumption: $WEB_SERVICE and passenger run as $WWW_OWNER"
+ echo
+
+ echo -n "Creating symlinks to configuration in $CONFIG_PATH ..."
+ setup_confdirs /etc/arvados "$CONFIG_PATH"
+ setup_conffile environments/production.rb environments/production.rb.example \
+ || true
+ setup_conffile application.yml application.yml.example || APPLICATION_READY=0
+ if [ -n "$RAILSPKG_DATABASE_LOAD_TASK" ]; then
+ setup_conffile database.yml database.yml.example || DATABASE_READY=0
+ fi
+ setup_extra_conffiles
+ echo "... done."
+
+ # Before we do anything else, make sure some directories and files are in place
+ if [ ! -e $SHARED_PATH/log ]; then mkdir -p $SHARED_PATH/log; fi
+ if [ ! -e $RELEASE_PATH/tmp ]; then mkdir -p $RELEASE_PATH/tmp; fi
+ if [ ! -e $RELEASE_PATH/log ]; then ln -s $SHARED_PATH/log $RELEASE_PATH/log; fi
+ if [ ! -e $SHARED_PATH/log/production.log ]; then touch $SHARED_PATH/log/production.log; fi
+
+ cd "$RELEASE_PATH"
+ export RAILS_ENV=production
+
+ if ! $COMMAND_PREFIX bundle --version >/dev/null; then
+ run_and_report "Installing bundle" $COMMAND_PREFIX gem install bundle
+ fi
+
+ run_and_report "Running bundle install" \
+ $COMMAND_PREFIX bundle install --path $SHARED_PATH/vendor_bundle --local --quiet
+
+ echo -n "Ensuring directory and file permissions ..."
+ # Ensure correct ownership of a few files
+ chown "$WWW_OWNER:" $RELEASE_PATH/config/environment.rb
+ chown "$WWW_OWNER:" $RELEASE_PATH/config.ru
+ chown "$WWW_OWNER:" $RELEASE_PATH/Gemfile.lock
+ chown -R "$WWW_OWNER:" $RELEASE_PATH/tmp
+ chown -R "$WWW_OWNER:" $SHARED_PATH/log
+ case "$RAILSPKG_DATABASE_LOAD_TASK" in
+ db:schema:load) chown "$WWW_OWNER:" $RELEASE_PATH/db/schema.rb ;;
+ db:structure:load) chown "$WWW_OWNER:" $RELEASE_PATH/db/structure.sql ;;
+ esac
+ chmod 644 $SHARED_PATH/log/*
+ chmod -R 2775 $RELEASE_PATH/tmp
+ echo "... done."
+
+ if [ -n "$RAILSPKG_DATABASE_LOAD_TASK" ]; then
+ prepare_database
+ fi
+
+ if [ 11 = "$RAILSPKG_SUPPORTS_CONFIG_CHECK$APPLICATION_READY" ]; then
+ run_and_report "Checking application.yml for completeness" \
+ $COMMAND_PREFIX bundle exec rake config:check || APPLICATION_READY=0
+ fi
+
+ # precompile assets; thankfully this does not take long
+ if [ "$APPLICATION_READY" = "1" ]; then
+ run_and_report "Precompiling assets" \
+ $COMMAND_PREFIX bundle exec rake assets:precompile -q -s 2>/dev/null \
+ || APPLICATION_READY=0
+ else
+ echo "Precompiling assets... skipped."
+ fi
+ chown -R "$WWW_OWNER:" $RELEASE_PATH/tmp
+
+ setup_before_nginx_restart
+
+ if [ ! -z "$WEB_SERVICE" ]; then
+ service "$WEB_SERVICE" restart
+ fi
+}
+
+if [ "$1" = configure ]; then
+ # This is a debian-based system
+ configure_version
+elif [ "$1" = "0" ] || [ "$1" = "1" ] || [ "$1" = "2" ]; then
+ # This is an rpm-based system
+ configure_version
+fi
+
+report_not_ready "$DATABASE_READY" "$CONFIG_PATH/database.yml"
+report_not_ready "$APPLICATION_READY" "$CONFIG_PATH/application.yml"
diff --git a/build/jenkins/rails-package-scripts/postrm.sh b/build/jenkins/rails-package-scripts/postrm.sh
new file mode 100644
index 0000000..2d63f0b
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/postrm.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+set -e
+
+purge () {
+ rm -rf $SHARED_PATH/vendor_bundle
+ rm -rf $SHARED_PATH/log
+ rm -rf $CONFIG_PATH
+ rmdir $SHARED_PATH || true
+ rmdir $INSTALL_PATH || true
+}
+
+if [ "$1" = 'purge' ]; then
+ # This is a debian-based system and purge was requested
+ purge
+elif [ "$1" = "0" ]; then
+ # This is an rpm-based system, no guarantees are made, always purge
+ # Apparently yum doesn't actually remember what it installed.
+ # Clean those files up here, then purge.
+ rm -rf $RELEASE_PATH
+ purge
+fi
diff --git a/build/jenkins/rails-package-scripts/prerm.sh b/build/jenkins/rails-package-scripts/prerm.sh
new file mode 100644
index 0000000..4ef5904
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/prerm.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This code runs after package variable definitions and step2.sh.
+
+remove () {
+ rm -f $RELEASE_PATH/config/database.yml
+ rm -f $RELEASE_PATH/config/environments/production.rb
+ rm -f $RELEASE_PATH/config/application.yml
+ # Old API server configuration file.
+ rm -f $RELEASE_PATH/config/initializers/omniauth.rb
+ rm -rf $RELEASE_PATH/public/assets/
+ rm -rf $RELEASE_PATH/tmp
+ rm -rf $RELEASE_PATH/.bundle
+ rm -rf $RELEASE_PATH/log
+}
+
+if [ "$1" = 'remove' ]; then
+ # This is a debian-based system and removal was requested
+ remove
+elif [ "$1" = "0" ] || [ "$1" = "1" ] || [ "$1" = "2" ]; then
+ # This is an rpm-based system
+ remove
+fi
diff --git a/build/jenkins/rails-package-scripts/step2.sh b/build/jenkins/rails-package-scripts/step2.sh
new file mode 100644
index 0000000..816b906
--- /dev/null
+++ b/build/jenkins/rails-package-scripts/step2.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This code runs after package variable definitions, before the actual
+# pre/post package work, to set some variable and function defaults.
+
+if [ -z "$INSTALL_PATH" ]; then
+ cat >&2 <<EOF
+
+PACKAGE BUILD ERROR: $0 is missing package metadata.
+
+This package is buggy. Please mail <support at curoverse.com> to let
+us know the name and version number of the package you tried to
+install, and we'll get it fixed.
+
+EOF
+ exit 3
+fi
+
+RELEASE_PATH=$INSTALL_PATH/current
+RELEASE_CONFIG_PATH=$RELEASE_PATH/config
+SHARED_PATH=$INSTALL_PATH/shared
+
+RAILSPKG_SUPPORTS_CONFIG_CHECK=${RAILSPKG_SUPPORTS_CONFIG_CHECK:-1}
+if ! type setup_extra_conffiles >/dev/null 2>&1; then
+ setup_extra_conffiles() { return; }
+fi
+if ! type setup_before_nginx_restart >/dev/null 2>&1; then
+ setup_before_nginx_restart() { return; }
+fi
diff --git a/build/jenkins/run-build-docker-images.sh b/build/jenkins/run-build-docker-images.sh
new file mode 100755
index 0000000..0a5841d
--- /dev/null
+++ b/build/jenkins/run-build-docker-images.sh
@@ -0,0 +1,167 @@
+#!/bin/bash
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options]"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -t, --tags [csv_tags] comma separated tags"
+ echo >&2 " -u, --upload Upload the images (docker push)"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 " If no options are given, just builds the images."
+}
+
+upload=false
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hut: \
+ --long help,upload,tags: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -u | --upload)
+ upload=true
+ shift
+ ;;
+ -t | --tags)
+ case "$2" in
+ "")
+ echo "ERROR: --tags needs a parameter";
+ usage;
+ exit 1
+ ;;
+ *)
+ tags=$2;
+ shift 2
+ ;;
+ esac
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_push () {
+ if [[ ! -z "$tags" ]]
+ then
+ for tag in $( echo $tags|tr "," " " )
+ do
+ $DOCKER tag $1 $1:$tag
+ done
+ fi
+
+ # Sometimes docker push fails; retry it a few times if necessary.
+ for i in `seq 1 5`; do
+ $DOCKER push $*
+ ECODE=$?
+ if [[ "$ECODE" == "0" ]]; then
+ break
+ fi
+ done
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker push $* failed !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+echo $WORKSPACE
+
+# find the docker binary
+DOCKER=`which docker.io`
+
+if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+fi
+
+if [[ "$DOCKER" == "" ]]; then
+ title "Error: you need to have docker installed. Could not find the docker executable."
+ exit 1
+fi
+
+# DOCKER
+title "Starting docker build"
+
+timer_reset
+
+# clean up the docker build environment
+cd "$WORKSPACE"
+
+tools/arvbox/bin/arvbox build dev
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+tools/arvbox/bin/arvbox build localdemo
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "docker build complete (`timer`)"
+
+title "uploading images"
+
+timer_reset
+
+if [[ "$ECODE" != "0" ]]; then
+ title "upload arvados images SKIPPED because build failed"
+else
+ if [[ $upload == true ]]; then
+ ## 20150526 nico -- *sometimes* dockerhub needs re-login
+ ## even though credentials are already in .dockercfg
+ docker login -u arvados
+
+ docker_push arvados/arvbox-dev
+ docker_push arvados/arvbox-demo
+ title "upload arvados images complete (`timer`)"
+ else
+ title "upload arvados images SKIPPED because no --upload option set"
+ fi
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-docker-jobs-image.sh b/build/jenkins/run-build-docker-jobs-image.sh
new file mode 100755
index 0000000..fcf849b
--- /dev/null
+++ b/build/jenkins/run-build-docker-jobs-image.sh
@@ -0,0 +1,164 @@
+#!/bin/bash
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options]"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -t, --tags [csv_tags] comma separated tags"
+ echo >&2 " -u, --upload Upload the images (docker push)"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 " If no options are given, just builds the images."
+}
+
+upload=false
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hut: \
+ --long help,upload,tags: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -u | --upload)
+ upload=true
+ shift
+ ;;
+ -t | --tags)
+ case "$2" in
+ "")
+ echo "ERROR: --tags needs a parameter";
+ usage;
+ exit 1
+ ;;
+ *)
+ tags=$2;
+ shift 2
+ ;;
+ esac
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+docker_push () {
+ if [[ ! -z "$tags" ]]
+ then
+ for tag in $( echo $tags|tr "," " " )
+ do
+ $DOCKER tag -f $1 $1:$tag
+ done
+ fi
+
+ # Sometimes docker push fails; retry it a few times if necessary.
+ for i in `seq 1 5`; do
+ $DOCKER push $*
+ ECODE=$?
+ if [[ "$ECODE" == "0" ]]; then
+ break
+ fi
+ done
+
+ if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker push $* failed !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+echo $WORKSPACE
+
+# find the docker binary
+DOCKER=`which docker.io`
+
+if [[ "$DOCKER" == "" ]]; then
+ DOCKER=`which docker`
+fi
+
+if [[ "$DOCKER" == "" ]]; then
+ title "Error: you need to have docker installed. Could not find the docker executable."
+ exit 1
+fi
+
+# DOCKER
+title "Starting docker build"
+
+timer_reset
+
+# clean up the docker build environment
+cd "$WORKSPACE"
+cd docker
+rm -f jobs-image
+rm -f config.yml
+
+# Get test config.yml file
+cp $HOME/docker/config.yml .
+
+./build.sh jobs-image
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! docker BUILD FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "docker build complete (`timer`)"
+
+title "uploading images"
+
+timer_reset
+
+if [[ "$ECODE" != "0" ]]; then
+ title "upload arvados images SKIPPED because build failed"
+else
+ if [[ $upload == true ]]; then
+ ## 20150526 nico -- *sometimes* dockerhub needs re-login
+ ## even though credentials are already in .dockercfg
+ docker login -u arvados
+
+ docker_push arvados/jobs
+ title "upload arvados images complete (`timer`)"
+ else
+ title "upload arvados images SKIPPED because no --upload option set"
+ fi
+fi
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-packages-all-targets.sh b/build/jenkins/run-build-packages-all-targets.sh
new file mode 100755
index 0000000..f1a1e1c
--- /dev/null
+++ b/build/jenkins/run-build-packages-all-targets.sh
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Orchestrate run-build-packages.sh for every target
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+Options:
+
+--command
+ Build command to execute (default: use built-in Docker image command)
+--test-packages
+ Run package install tests
+--debug
+ Output debug information (default: false)
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+set -e
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,test-packages,debug,command:,only-test: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+COMMAND=
+DEBUG=
+TEST_PACKAGES=
+ONLY_TEST=
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --debug)
+ DEBUG="--debug"
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --test-packages)
+ TEST_PACKAGES="--test-packages"
+ ;;
+ --only-test)
+ ONLY_TEST="$1 $2"; shift
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+cd $(dirname $0)
+
+FINAL_EXITCODE=0
+
+for dockerfile_path in $(find -name Dockerfile); do
+ if ./run-build-packages-one-target.sh --target "$(basename $(dirname "$dockerfile_path"))" --command "$COMMAND" $DEBUG $TEST_PACKAGES $ONLY_TEST ; then
+ true
+ else
+ FINAL_EXITCODE=$?
+ fi
+done
+
+if test $FINAL_EXITCODE != 0 ; then
+ echo "Build packages failed with code $FINAL_EXITCODE" >&2
+fi
+
+exit $FINAL_EXITCODE
diff --git a/build/jenkins/run-build-packages-one-target.sh b/build/jenkins/run-build-packages-one-target.sh
new file mode 100755
index 0000000..c5e0a89
--- /dev/null
+++ b/build/jenkins/run-build-packages-one-target.sh
@@ -0,0 +1,203 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Orchestrate run-build-packages.sh for one target
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+--target <target>
+ Distribution to build packages for (default: debian7)
+--command
+ Build command to execute (default: use built-in Docker image command)
+--test-packages
+ Run package install test script "test-packages-$target.sh"
+--debug
+ Output debug information (default: false)
+--only-test
+ Test only a specific package
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+set -e
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,debug,test-packages,target:,command:,only-test: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+TARGET=debian7
+COMMAND=
+DEBUG=
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --only-test)
+ packages="$2"; shift
+ ;;
+ --debug)
+ DEBUG=" --debug"
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --test-packages)
+ test_packages=1
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+set -e
+
+if [[ -n "$test_packages" ]]; then
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name *.rpm)" ]] ; then
+ createrepo $WORKSPACE/packages/$TARGET
+ fi
+
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name *.deb)" ]] ; then
+ (cd $WORKSPACE/packages/$TARGET
+ dpkg-scanpackages . 2> >(grep -v 'warning' 1>&2) | gzip -c > Packages.gz
+ )
+ fi
+
+ COMMAND="/jenkins/package-testing/test-packages-$TARGET.sh"
+ IMAGE="arvados/package-test:$TARGET"
+else
+ IMAGE="arvados/build:$TARGET"
+ if [[ "$COMMAND" != "" ]]; then
+ COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET$DEBUG"
+ fi
+fi
+
+JENKINS_DIR=$(dirname "$(readlink -e "$0")")
+
+if [[ -n "$test_packages" ]]; then
+ pushd "$JENKINS_DIR/package-test-dockerfiles"
+else
+ pushd "$JENKINS_DIR/package-build-dockerfiles"
+ make "$TARGET/generated"
+fi
+
+echo $TARGET
+cd $TARGET
+time docker build --tag=$IMAGE .
+popd
+
+if test -z "$packages" ; then
+ packages="arvados-api-server
+ arvados-data-manager
+ arvados-docker-cleaner
+ arvados-git-httpd
+ arvados-node-manager
+ arvados-src
+ arvados-workbench
+ crunchstat
+ keepproxy
+ keep-rsync
+ keepstore
+ keep-web
+ libarvados-perl"
+
+ case "$TARGET" in
+ centos6)
+ packages="$packages python27-python-arvados-fuse
+ python27-python-arvados-python-client"
+ ;;
+ *)
+ packages="$packages python-arvados-fuse
+ python-arvados-python-client"
+ ;;
+ esac
+fi
+
+FINAL_EXITCODE=0
+
+package_fails=""
+
+mkdir -p "$WORKSPACE/apps/workbench/vendor/cache-$TARGET"
+mkdir -p "$WORKSPACE/services/api/vendor/cache-$TARGET"
+
+docker_volume_args=(
+ -v "$JENKINS_DIR:/jenkins"
+ -v "$WORKSPACE:/arvados"
+ -v /arvados/services/api/vendor/bundle
+ -v /arvados/apps/workbench/vendor/bundle
+ -v "$WORKSPACE/services/api/vendor/cache-$TARGET:/arvados/services/api/vendor/cache"
+ -v "$WORKSPACE/apps/workbench/vendor/cache-$TARGET:/arvados/apps/workbench/vendor/cache"
+)
+
+if [[ -n "$test_packages" ]]; then
+ for p in $packages ; do
+ echo
+ echo "START: $p test on $IMAGE" >&2
+ if docker run --rm \
+ "${docker_volume_args[@]}" \
+ --env ARVADOS_DEBUG=1 \
+ --env "TARGET=$TARGET" \
+ --env "WORKSPACE=/arvados" \
+ "$IMAGE" $COMMAND $p
+ then
+ echo "OK: $p test on $IMAGE succeeded" >&2
+ else
+ FINAL_EXITCODE=$?
+ package_fails="$package_fails $p"
+ echo "ERROR: $p test on $IMAGE failed with exit status $FINAL_EXITCODE" >&2
+ fi
+ done
+else
+ echo
+ echo "START: build packages on $IMAGE" >&2
+ if docker run --rm \
+ "${docker_volume_args[@]}" \
+ --env ARVADOS_DEBUG=1 \
+ "$IMAGE" $COMMAND
+ then
+ echo
+ echo "OK: build packages on $IMAGE succeeded" >&2
+ else
+ FINAL_EXITCODE=$?
+ echo "ERROR: build packages on $IMAGE failed with exit status $FINAL_EXITCODE" >&2
+ fi
+fi
+
+if test -n "$package_fails" ; then
+ echo "Failed package tests:$package_fails" >&2
+fi
+
+exit $FINAL_EXITCODE
diff --git a/build/jenkins/run-build-packages-sso.sh b/build/jenkins/run-build-packages-sso.sh
new file mode 100755
index 0000000..cc673a6
--- /dev/null
+++ b/build/jenkins/run-build-packages-sso.sh
@@ -0,0 +1,161 @@
+#!/bin/bash
+
+JENKINS_DIR=$(dirname $(readlink -e "$0"))
+. "$JENKINS_DIR/run-library.sh"
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Build Arvados SSO server package
+
+Syntax:
+ WORKSPACE=/path/to/arvados-sso $(basename $0) [options]
+
+Options:
+
+--debug
+ Output debug information (default: false)
+--target
+ Distribution to build packages for (default: debian7)
+
+WORKSPACE=path Path to the Arvados SSO source tree to build packages from
+
+EOF
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+TARGET=debian7
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,build-bundle-packages,debug,target: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --test-packages)
+ test_packages=1
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+case "$TARGET" in
+ debian7)
+ FORMAT=deb
+ ;;
+ debian8)
+ FORMAT=deb
+ ;;
+ ubuntu1204)
+ FORMAT=deb
+ ;;
+ ubuntu1404)
+ FORMAT=deb
+ ;;
+ centos6)
+ FORMAT=rpm
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if ! [[ -d "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: $WORKSPACE is not a directory"
+ echo >&2
+ exit 1
+fi
+
+# Test for fpm
+fpm --version >/dev/null 2>&1
+
+if [[ "$?" != 0 ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: fpm not found"
+ echo >&2
+ exit 1
+fi
+
+RUN_BUILD_PACKAGES_PATH="`dirname \"$0\"`"
+RUN_BUILD_PACKAGES_PATH="`( cd \"$RUN_BUILD_PACKAGES_PATH\" && pwd )`" # absolutized and normalized
+if [ -z "$RUN_BUILD_PACKAGES_PATH" ] ; then
+ # error; for some reason, the path is not accessible
+ # to the script (e.g. permissions re-evaled after suid)
+ exit 1 # fail
+fi
+
+debug_echo "$0 is running from $RUN_BUILD_PACKAGES_PATH"
+debug_echo "Workspace is $WORKSPACE"
+
+if [[ -f /etc/profile.d/rvm.sh ]]; then
+ source /etc/profile.d/rvm.sh
+ GEM="rvm-exec default gem"
+else
+ GEM=gem
+fi
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# More cleanup - make sure all executables that we'll package are 755
+# No executables in the sso server package
+#find -type d -name 'bin' |xargs -I {} find {} -type f |xargs -I {} chmod 755 {}
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+debug_echo "umask is" `umask`
+
+if [[ ! -d "$WORKSPACE/packages/$TARGET" ]]; then
+ mkdir -p "$WORKSPACE/packages/$TARGET"
+fi
+
+# Build the SSO server package
+handle_rails_package arvados-sso-server "$WORKSPACE" \
+ "$WORKSPACE/LICENCE" --url="https://arvados.org" \
+ --description="Arvados SSO server - Arvados is a free and open source platform for big data science." \
+ --license="Expat license"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-build-packages.sh b/build/jenkins/run-build-packages.sh
new file mode 100755
index 0000000..136b73c
--- /dev/null
+++ b/build/jenkins/run-build-packages.sh
@@ -0,0 +1,538 @@
+#!/bin/bash
+
+. `dirname "$(readlink -f "$0")"`/run-library.sh
+. `dirname "$(readlink -f "$0")"`/libcloud-pin
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Build Arvados packages
+
+Syntax:
+ WORKSPACE=/path/to/arvados $(basename $0) [options]
+
+Options:
+
+--build-bundle-packages (default: false)
+ Build api server and workbench packages with vendor/bundle included
+--debug
+ Output debug information (default: false)
+--target
+ Distribution to build packages for (default: debian7)
+--command
+ Build command to execute (defaults to the run command defined in the
+ Docker image)
+
+WORKSPACE=path Path to the Arvados source tree to build packages from
+
+EOF
+
+EXITCODE=0
+DEBUG=${ARVADOS_DEBUG:-0}
+TARGET=debian7
+COMMAND=
+
+PARSEDOPTS=$(getopt --name "$0" --longoptions \
+ help,build-bundle-packages,debug,target: \
+ -- "" "$@")
+if [ $? -ne 0 ]; then
+ exit 1
+fi
+
+eval set -- "$PARSEDOPTS"
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --target)
+ TARGET="$2"; shift
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --command)
+ COMMAND="$2"; shift
+ ;;
+ --)
+ if [ $# -gt 1 ]; then
+ echo >&2 "$0: unrecognized argument '$2'. Try: $0 --help"
+ exit 1
+ fi
+ ;;
+ esac
+ shift
+done
+
+if [[ "$COMMAND" != "" ]]; then
+ COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET"
+fi
+
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQ_UNLESS_DEBUG=-q
+if [[ "$DEBUG" != 0 ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+fi
+
+declare -a PYTHON_BACKPORTS PYTHON3_BACKPORTS
+
+PYTHON2_VERSION=2.7
+PYTHON3_VERSION=$(python3 -c 'import sys; print("{v.major}.{v.minor}".format(v=sys.version_info))')
+
+case "$TARGET" in
+ debian7)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ 'pycurl<7.21.5' contextlib2)
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ debian8)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ 'pycurl<7.21.5')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ ubuntu1204)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname llfuse \
+ contextlib2 \
+ 'pycurl<7.21.5')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ ;;
+ ubuntu1404)
+ FORMAT=deb
+ PYTHON2_PACKAGE=python$PYTHON2_VERSION
+ PYTHON2_PKG_PREFIX=python
+ PYTHON3_PACKAGE=python$PYTHON3_VERSION
+ PYTHON3_PKG_PREFIX=python3
+ PYTHON_BACKPORTS=(pyasn1==0.1.7 pyvcf pyasn1-modules==0.0.5 llfuse ciso8601 \
+ google-api-python-client==1.4.2 six uritemplate oauth2client==1.5.2 httplib2 \
+ rsa 'pycurl<7.21.5' backports.ssl_match_hostname)
+ PYTHON3_BACKPORTS=(docker-py requests websocket-client)
+ ;;
+ centos6)
+ FORMAT=rpm
+ PYTHON2_PACKAGE=$(rpm -qf "$(which python$PYTHON2_VERSION)" --queryformat '%{NAME}\n')
+ PYTHON2_PKG_PREFIX=$PYTHON2_PACKAGE
+ PYTHON3_PACKAGE=$(rpm -qf "$(which python$PYTHON3_VERSION)" --queryformat '%{NAME}\n')
+ PYTHON3_PKG_PREFIX=$PYTHON3_PACKAGE
+ PYTHON_BACKPORTS=(python-gflags pyvcf google-api-python-client==1.4.2 \
+ oauth2client==1.5.2 pyasn1==0.1.7 pyasn1-modules==0.0.5 \
+ rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
+ ciso8601 pycrypto backports.ssl_match_hostname 'pycurl<7.21.5' \
+ python-daemon lockfile llfuse 'pbr<1.0')
+ PYTHON3_BACKPORTS=(docker-py six requests websocket-client)
+ export PYCURL_SSL_LIBRARY=nss
+ ;;
+ *)
+ echo -e "$0: Unknown target '$TARGET'.\n" >&2
+ exit 1
+ ;;
+esac
+
+
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+# Test for fpm
+fpm --version >/dev/null 2>&1
+
+if [[ "$?" != 0 ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: fpm not found"
+ echo >&2
+ exit 1
+fi
+
+EASY_INSTALL2=$(find_easy_install -$PYTHON2_VERSION "")
+EASY_INSTALL3=$(find_easy_install -$PYTHON3_VERSION 3)
+
+RUN_BUILD_PACKAGES_PATH="`dirname \"$0\"`"
+RUN_BUILD_PACKAGES_PATH="`( cd \"$RUN_BUILD_PACKAGES_PATH\" && pwd )`" # absolutized and normalized
+if [ -z "$RUN_BUILD_PACKAGES_PATH" ] ; then
+ # error; for some reason, the path is not accessible
+ # to the script (e.g. permissions re-evaled after suid)
+ exit 1 # fail
+fi
+
+debug_echo "$0 is running from $RUN_BUILD_PACKAGES_PATH"
+debug_echo "Workspace is $WORKSPACE"
+
+if [[ -f /etc/profile.d/rvm.sh ]]; then
+ source /etc/profile.d/rvm.sh
+ GEM="rvm-exec default gem"
+else
+ GEM=gem
+fi
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# More cleanup - make sure all executables that we'll package are 755
+find -type d -name 'bin' |xargs -I {} find {} -type f |xargs -I {} chmod 755 {}
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+debug_echo "umask is" `umask`
+
+if [[ ! -d "$WORKSPACE/packages/$TARGET" ]]; then
+ mkdir -p $WORKSPACE/packages/$TARGET
+fi
+
+# Perl packages
+debug_echo -e "\nPerl packages\n"
+
+cd "$WORKSPACE/sdk/perl"
+
+if [[ -e Makefile ]]; then
+ make realclean >"$STDOUT_IF_DEBUG"
+fi
+find -maxdepth 1 \( -name 'MANIFEST*' -or -name "libarvados-perl*.$FORMAT" \) \
+ -delete
+rm -rf install
+
+perl Makefile.PL INSTALL_BASE=install >"$STDOUT_IF_DEBUG" && \
+ make install INSTALLDIRS=perl >"$STDOUT_IF_DEBUG" && \
+ fpm_build install/lib/=/usr/share libarvados-perl \
+ "Curoverse, Inc." dir "$(version_from_git)" install/man/=/usr/share/man \
+ "$WORKSPACE/LICENSE-2.0.txt=/usr/share/doc/libarvados-perl/LICENSE-2.0.txt" && \
+ mv --no-clobber libarvados-perl*.$FORMAT "$WORKSPACE/packages/$TARGET/"
+
+# Ruby gems
+debug_echo -e "\nRuby gems\n"
+
+FPM_GEM_PREFIX=$($GEM environment gemdir)
+
+cd "$WORKSPACE/sdk/ruby"
+handle_ruby_gem arvados
+
+cd "$WORKSPACE/sdk/cli"
+handle_ruby_gem arvados-cli
+
+cd "$WORKSPACE/services/login-sync"
+handle_ruby_gem arvados-login-sync
+
+# Python packages
+debug_echo -e "\nPython packages\n"
+
+cd "$WORKSPACE/sdk/pam"
+handle_python_package
+
+cd "$WORKSPACE/sdk/python"
+handle_python_package
+
+cd "$WORKSPACE/sdk/cwl"
+handle_python_package
+
+cd "$WORKSPACE/services/fuse"
+handle_python_package
+
+cd "$WORKSPACE/services/nodemanager"
+handle_python_package
+
+# arvados-src
+(
+ set -e
+
+ cd "$WORKSPACE"
+ COMMIT_HASH=$(format_last_commit_here "%H")
+
+ SRC_BUILD_DIR=$(mktemp -d)
+ # mktemp creates the directory with 0700 permissions by default
+ chmod 755 $SRC_BUILD_DIR
+ git clone $DASHQ_UNLESS_DEBUG "$WORKSPACE/.git" "$SRC_BUILD_DIR"
+ cd "$SRC_BUILD_DIR"
+
+ # go into detached-head state
+ git checkout $DASHQ_UNLESS_DEBUG "$COMMIT_HASH"
+ echo "$COMMIT_HASH" >git-commit.version
+
+ cd "$SRC_BUILD_DIR"
+ PKG_VERSION=$(version_from_git)
+ cd $WORKSPACE/packages/$TARGET
+ fpm_build $SRC_BUILD_DIR/=/usr/local/arvados/src arvados-src 'Curoverse, Inc.' 'dir' "$PKG_VERSION" "--exclude=usr/local/arvados/src/.git" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=The Arvados source code" "--architecture=all"
+
+ rm -rf "$SRC_BUILD_DIR"
+)
+
+# On older platforms we need to publish a backport of libfuse >=2.9.2,
+# and we need to build and install it here in order to even build an
+# llfuse package.
+cd $WORKSPACE/packages/$TARGET
+if [[ $TARGET =~ ubuntu1204 ]]; then
+ # port libfuse 2.9.2 to Ubuntu 12.04
+ LIBFUSE_DIR=$(mktemp -d)
+ (
+ cd $LIBFUSE_DIR
+ # download fuse 2.9.2 ubuntu 14.04 source package
+ file="fuse_2.9.2.orig.tar.xz" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+ file="fuse_2.9.2-4ubuntu4.14.04.1.debian.tar.xz" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+ file="fuse_2.9.2-4ubuntu4.14.04.1.dsc" && curl -L -o "${file}" "http://archive.ubuntu.com/ubuntu/pool/main/f/fuse/${file}"
+
+ # install dpkg-source and dpkg-buildpackage commands
+ apt-get install -y --no-install-recommends dpkg-dev
+
+ # extract source and apply patches
+ dpkg-source -x fuse_2.9.2-4ubuntu4.14.04.1.dsc
+ rm -f fuse_2.9.2.orig.tar.xz fuse_2.9.2-4ubuntu4.14.04.1.debian.tar.xz fuse_2.9.2-4ubuntu4.14.04.1.dsc
+
+ # add new version to changelog
+ cd fuse-2.9.2
+ (
+ echo "fuse (2.9.2-5) precise; urgency=low"
+ echo
+ echo " * Backported from trusty-security to precise"
+ echo
+ echo " -- Joshua Randall <jcrandall at alum.mit.edu> Thu, 4 Feb 2016 11:31:00 -0000"
+ echo
+ cat debian/changelog
+ ) > debian/changelog.new
+ mv debian/changelog.new debian/changelog
+
+ # install build-deps and build
+ apt-get install -y --no-install-recommends debhelper dh-autoreconf libselinux-dev
+ dpkg-buildpackage -rfakeroot -b
+ )
+ fpm_build "$LIBFUSE_DIR/fuse_2.9.2-5_amd64.deb" fuse "Ubuntu Developers" deb "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/libfuse2_2.9.2-5_amd64.deb" libfuse2 "Ubuntu Developers" deb "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/libfuse-dev_2.9.2-5_amd64.deb" libfuse-dev "Ubuntu Developers" deb "2.9.2" --iteration 5
+ dpkg -i \
+ "$WORKSPACE/packages/$TARGET/fuse_2.9.2-5_amd64.deb" \
+ "$WORKSPACE/packages/$TARGET/libfuse2_2.9.2-5_amd64.deb" \
+ "$WORKSPACE/packages/$TARGET/libfuse-dev_2.9.2-5_amd64.deb"
+ apt-get -y --no-install-recommends -f install
+ rm -rf $LIBFUSE_DIR
+elif [[ $TARGET =~ centos6 ]]; then
+ # port fuse 2.9.2 to centos 6
+ # install tools to build rpm from source
+ yum install -y rpm-build redhat-rpm-config
+ LIBFUSE_DIR=$(mktemp -d)
+ (
+ cd "$LIBFUSE_DIR"
+ # download fuse 2.9.2 centos 7 source rpm
+ file="fuse-2.9.2-6.el7.src.rpm" && curl -L -o "${file}" "http://vault.centos.org/7.2.1511/os/Source/SPackages/${file}"
+ (
+ # modify source rpm spec to remove conflict on filesystem version
+ mkdir -p /root/rpmbuild/SOURCES
+ cd /root/rpmbuild/SOURCES
+ rpm2cpio ${LIBFUSE_DIR}/fuse-2.9.2-6.el7.src.rpm | cpio -i
+ perl -pi -e 's/Conflicts:\s*filesystem.*//g' fuse.spec
+ )
+ # build rpms from source
+ rpmbuild -bb /root/rpmbuild/SOURCES/fuse.spec
+ rm -f fuse-2.9.2-6.el7.src.rpm
+ # move built RPMs to LIBFUSE_DIR
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-libs-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ mv "/root/rpmbuild/RPMS/x86_64/fuse-devel-2.9.2-6.el6.x86_64.rpm" ${LIBFUSE_DIR}/
+ rm -rf /root/rpmbuild
+ )
+ fpm_build "$LIBFUSE_DIR/fuse-libs-2.9.2-6.el6.x86_64.rpm" fuse-libs "Centos Developers" rpm "2.9.2" --iteration 5
+ fpm_build "$LIBFUSE_DIR/fuse-2.9.2-6.el6.x86_64.rpm" fuse "Centos Developers" rpm "2.9.2" --iteration 5 --no-auto-depends
+ fpm_build "$LIBFUSE_DIR/fuse-devel-2.9.2-6.el6.x86_64.rpm" fuse-devel "Centos Developers" rpm "2.9.2" --iteration 5 --no-auto-depends
+ yum install \
+ "$WORKSPACE/packages/$TARGET/fuse-libs-2.9.2-5.x86_64.rpm" \
+ "$WORKSPACE/packages/$TARGET/fuse-2.9.2-5.x86_64.rpm" \
+ "$WORKSPACE/packages/$TARGET/fuse-devel-2.9.2-5.x86_64.rpm"
+fi
+
+# Go binaries
+cd $WORKSPACE/packages/$TARGET
+export GOPATH=$(mktemp -d)
+package_go_binary services/keepstore keepstore \
+ "Keep storage daemon, accessible to clients on the LAN"
+package_go_binary services/keepproxy keepproxy \
+ "Make a Keep cluster accessible to clients that are not on the LAN"
+package_go_binary services/keep-web keep-web \
+ "Static web hosting service for user data stored in Arvados Keep"
+package_go_binary services/datamanager arvados-data-manager \
+ "Ensure block replication levels, report disk usage, and determine which blocks should be deleted when space is needed"
+package_go_binary services/arv-git-httpd arvados-git-httpd \
+ "Provide authenticated http access to Arvados-hosted git repositories"
+package_go_binary services/crunchstat crunchstat \
+ "Gather cpu/memory/network statistics of running Crunch jobs"
+package_go_binary tools/keep-rsync keep-rsync \
+ "Copy all data from one set of Keep servers to another"
+
+# The Python SDK
+# Please resist the temptation to add --no-python-fix-name to the fpm call here
+# (which would remove the python- prefix from the package name), because this
+# package is a dependency of arvados-fuse, and fpm can not omit the python-
+# prefix from only one of the dependencies of a package... Maybe I could
+# whip up a patch and send it upstream, but that will be for another day. Ward,
+# 2014-05-15
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/sdk/python/build"
+fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/python/arvados_python_client.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados Python SDK" --deb-recommends=git
+
+# The PAM module
+if [[ $TARGET =~ debian|ubuntu ]]; then
+ cd $WORKSPACE/packages/$TARGET
+ rm -rf "$WORKSPACE/sdk/pam/build"
+ fpm_build $WORKSPACE/sdk/pam libpam-arvados 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/pam/arvados_pam.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=PAM module for authenticating shell logins using Arvados API tokens" --depends libpam-python
+fi
+
+# The FUSE driver
+# Please see comment about --no-python-fix-name above; we stay consistent and do
+# not omit the python- prefix first.
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/fuse/build"
+fpm_build $WORKSPACE/services/fuse "${PYTHON2_PKG_PREFIX}-arvados-fuse" 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/fuse/arvados_fuse.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Keep FUSE driver"
+
+# The node manager
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/nodemanager/build"
+fpm_build $WORKSPACE/services/nodemanager arvados-node-manager 'Curoverse, Inc.' 'python' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/nodemanager/arvados_node_manager.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados node manager"
+
+# The Docker image cleaner
+cd $WORKSPACE/packages/$TARGET
+rm -rf "$WORKSPACE/services/dockercleaner/build"
+fpm_build $WORKSPACE/services/dockercleaner arvados-docker-cleaner 'Curoverse, Inc.' 'python3' "$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/dockercleaner/arvados_docker_cleaner.egg-info/PKG-INFO)" "--url=https://arvados.org" "--description=The Arvados Docker image cleaner"
+
+# Forked libcloud
+LIBCLOUD_DIR=$(mktemp -d)
+(
+ cd $LIBCLOUD_DIR
+ git clone $DASHQ_UNLESS_DEBUG https://github.com/curoverse/libcloud.git .
+ git checkout apache-libcloud-$LIBCLOUD_PIN
+ # libcloud is absurdly noisy without -q, so force -q here
+ OLD_DASHQ_UNLESS_DEBUG=$DASHQ_UNLESS_DEBUG
+ DASHQ_UNLESS_DEBUG=-q
+ handle_python_package
+ DASHQ_UNLESS_DEBUG=$OLD_DASHQ_UNLESS_DEBUG
+)
+fpm_build $LIBCLOUD_DIR "$PYTHON2_PKG_PREFIX"-apache-libcloud
+rm -rf $LIBCLOUD_DIR
+
+# Python 2 dependencies
+declare -a PIP_DOWNLOAD_SWITCHES=(--no-deps)
+# Add --no-use-wheel if this pip knows it.
+pip wheel --help >/dev/null 2>&1
+case "$?" in
+ 0) PIP_DOWNLOAD_SWITCHES+=(--no-use-wheel) ;;
+ 2) ;;
+ *) echo "WARNING: `pip wheel` test returned unknown exit code $?" ;;
+esac
+
+for deppkg in "${PYTHON_BACKPORTS[@]}"; do
+ outname=$(echo "$deppkg" | sed -e 's/^python-//' -e 's/[<=>].*//' -e 's/_/-/g' -e "s/^/${PYTHON2_PKG_PREFIX}-/")
+ case "$deppkg" in
+ httplib2|google-api-python-client)
+ # Work around 0640 permissions on some package files.
+ # See #7591 and #7991.
+ pyfpm_workdir=$(mktemp --tmpdir -d pyfpm-XXXXXX) && (
+ set -e
+ cd "$pyfpm_workdir"
+ pip install "${PIP_DOWNLOAD_SWITCHES[@]}" --download . "$deppkg"
+ tar -xf "$deppkg"-*.tar*
+ cd "$deppkg"-*/
+ "python$PYTHON2_VERSION" setup.py $DASHQ_UNLESS_DEBUG egg_info build
+ chmod -R go+rX .
+ set +e
+ # --iteration 2 provides an upgrade for previously built
+ # buggy packages.
+ fpm_build . "$outname" "" python "" --iteration 2
+ # The upload step uses the package timestamp to determine
+ # whether it's new. --no-clobber plays nice with that.
+ mv --no-clobber "$outname"*.$FORMAT "$WORKSPACE/packages/$TARGET"
+ )
+ if [ 0 != "$?" ]; then
+ echo "ERROR: $deppkg build process failed"
+ EXITCODE=1
+ fi
+ if [ -n "$pyfpm_workdir" ]; then
+ rm -rf "$pyfpm_workdir"
+ fi
+ ;;
+ *)
+ fpm_build "$deppkg" "$outname"
+ ;;
+ esac
+done
+
+# Python 3 dependencies
+for deppkg in "${PYTHON3_BACKPORTS[@]}"; do
+ outname=$(echo "$deppkg" | sed -e 's/^python-//' -e 's/[<=>].*//' -e 's/_/-/g' -e "s/^/${PYTHON3_PKG_PREFIX}-/")
+ # The empty string is the vendor argument: these aren't Curoverse software.
+ fpm_build "$deppkg" "$outname" "" python3
+done
+
+# Build the API server package
+handle_rails_package arvados-api-server "$WORKSPACE/services/api" \
+ "$WORKSPACE/agpl-3.0.txt" --url="https://arvados.org" \
+ --description="Arvados API server - Arvados is a free and open source platform for big data science." \
+ --license="GNU Affero General Public License, version 3.0"
+
+# Build the workbench server package
+(
+ set -e
+ cd "$WORKSPACE/apps/workbench"
+
+ # We need to bundle to be ready even when we build a package without vendor directory
+ # because asset compilation requires it.
+ bundle install --path vendor/bundle >"$STDOUT_IF_DEBUG"
+
+ # clear the tmp directory; the asset generation step will recreate tmp/cache/assets,
+ # and we want that in the package, so it's easier to not exclude the tmp directory
+ # from the package - empty it instead.
+ rm -rf tmp
+ mkdir tmp
+
+ # Set up application.yml and production.rb so that asset precompilation works
+ \cp config/application.yml.example config/application.yml -f
+ \cp config/environments/production.rb.example config/environments/production.rb -f
+ sed -i 's/secret_token: ~/secret_token: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/' config/application.yml
+
+ RAILS_ENV=production RAILS_GROUPS=assets bundle exec rake assets:precompile >/dev/null
+
+ # Remove generated configuration files so they don't go in the package.
+ rm config/application.yml config/environments/production.rb
+)
+
+if [[ "$?" != "0" ]]; then
+ echo "ERROR: Asset precompilation failed"
+ EXITCODE=1
+else
+ handle_rails_package arvados-workbench "$WORKSPACE/apps/workbench" \
+ "$WORKSPACE/agpl-3.0.txt" --url="https://arvados.org" \
+ --description="Arvados Workbench - Arvados is a free and open source platform for big data science." \
+ --license="GNU Affero General Public License, version 3.0"
+fi
+
+# clean up temporary GOPATH
+rm -rf "$GOPATH"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-cwl-tests.sh b/build/jenkins/run-cwl-tests.sh
new file mode 100755
index 0000000..53c0538
--- /dev/null
+++ b/build/jenkins/run-cwl-tests.sh
@@ -0,0 +1,218 @@
+#!/bin/bash
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Test cwl tool and (optionally) upload to PyPi and Docker Hub.
+
+Syntax:
+ WORKSPACE=/path/to/common-workflow-language $(basename $0) [options]
+
+Options:
+
+--upload-pypi Upload package to pypi (default: false)
+--upload-docker Upload packages to docker hub (default: false)
+--debug Output debug information (default: false)
+
+WORKSPACE=path Path to the common-workflow-language source tree
+
+EOF
+
+EXITCODE=0
+CALL_FREIGHT=0
+
+DEBUG=0
+UPLOAD_PYPI=0
+UPLOAD_DOCKER=0
+
+VENVDIR=
+
+leave_temp=
+
+declare -A leave_temp
+
+set -e
+
+clear_temp() {
+ leaving=""
+ for var in VENVDIR
+ do
+ if [[ -z "${leave_temp[$var]}" ]]
+ then
+ if [[ -n "${!var}" ]]
+ then
+ rm -rf "${!var}"
+ fi
+ else
+ leaving+=" $var=\"${!var}\""
+ fi
+ done
+ if [[ -n "$leaving" ]]; then
+ echo "Leaving behind temp dirs: $leaving"
+ fi
+}
+
+fatal() {
+ clear_temp
+ echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
+ exit 1
+}
+
+trap clear_temp INT EXIT
+
+# Set up temporary install dirs (unless existing dirs were supplied)
+for tmpdir in VENVDIR
+do
+ if [[ -n "${!tmpdir}" ]]; then
+ leave_temp[$tmpdir]=1
+ else
+ eval $tmpdir=$(mktemp -d)
+ fi
+done
+
+
+while [[ -n "$1" ]]
+do
+ arg="$1"; shift
+ case "$arg" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --debug)
+ DEBUG=1
+ ;;
+ --upload-pypi)
+ UPLOAD_PYPI=1
+ ;;
+ --upload-docker)
+ UPLOAD_DOCKER=1
+ ;;
+ --leave-temp)
+ leave_temp[VENVDIR]=1
+ ;;
+ *=*)
+ eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
+ ;;
+ *)
+ echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
+ exit 1
+ ;;
+ esac
+done
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo >&2 "$helpmessage"
+ echo >&2
+ echo >&2 "Error: WORKSPACE environment variable not set"
+ echo >&2
+ exit 1
+fi
+
+if [[ "$DEBUG" != 0 ]]; then
+ echo "Workspace is $WORKSPACE"
+fi
+
+virtualenv --setuptools "$VENVDIR" || fatal "virtualenv $VENVDIR failed"
+. "$VENVDIR/bin/activate"
+
+handle_python_package () {
+ # This function assumes the current working directory is the python package directory
+ if [[ "$UPLOAD_PYPI" != 0 ]]; then
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ if [[ "$DEBUG" != 0 ]]; then
+ python setup.py sdist upload
+ else
+ python setup.py -q sdist upload
+ fi
+ else
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ if [[ "$DEBUG" != 0 ]]; then
+ python setup.py sdist
+ else
+ python setup.py -q sdist
+ fi
+ fi
+}
+
+# Make all files world-readable -- jenkins runs with umask 027, and has checked
+# out our git tree here
+chmod o+r "$WORKSPACE" -R
+
+# Now fix our umask to something better suited to building and publishing
+# gems and packages
+umask 0022
+
+if [[ "$DEBUG" != 0 ]]; then
+ echo "umask is" `umask`
+fi
+
+# Python packages
+if [[ "$DEBUG" != 0 ]]; then
+ echo
+ echo "Python packages"
+ echo
+fi
+
+cd "$WORKSPACE"
+
+if test -d cwltool ; then
+ (cd cwltool
+ git fetch
+ git reset --hard origin/master
+ )
+else
+ git clone git at github.com:common-workflow-language/cwltool.git
+ (cd cwltool
+ git config user.email "sysadmin at curoverse.com"
+ git config user.name "Curoverse build bot"
+ )
+fi
+
+(cd cwltool
+ python setup.py install
+ python setup.py test
+ ./build-node-docker.sh
+)
+
+./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-2
+./run_test.sh RUNNER=cwltool/cwltool/main.py DRAFT=draft-3
+
+(cd cwltool
+ handle_python_package
+)
+
+(cd cwltool/cwl-runner
+ handle_python_package
+)
+
+(cd cwltool
+ ./build-cwl-docker.sh
+)
+
+if [[ "$UPLOAD_DOCKER" != 0 ]]; then
+ docker push commonworkflowlanguage/cwltool_module
+ docker push commonworkflowlanguage/cwltool
+ docker push commonworkflowlanguage/nodejs-engine
+fi
+
+if test -d common-workflow-language.github.io ; then
+ (cd common-workflow-language.github.io
+ git fetch
+ git reset --hard origin/master
+ )
+else
+ git clone git at github.com:common-workflow-language/common-workflow-language.github.io.git
+ (cd common-workflow-language.github.io
+ git config user.email "sysadmin at curoverse.com"
+ git config user.name "Curoverse build bot"
+ )
+fi
+
+python -mcwltool --outdir=$PWD/common-workflow-language.github.io site/cwlsite.cwl site/cwlsite-job.json
+
+(cd common-workflow-language.github.io
+ git add --all
+ git diff-index --quiet HEAD || git commit -m"Build bot"
+ git push
+)
diff --git a/build/jenkins/run-deploy.sh b/build/jenkins/run-deploy.sh
new file mode 100755
index 0000000..1b06c65
--- /dev/null
+++ b/build/jenkins/run-deploy.sh
@@ -0,0 +1,266 @@
+#!/bin/bash
+
+DEBUG=0
+SSH_PORT=22
+
+function usage {
+ echo >&2
+ echo >&2 "usage: $0 [options] <identifier>"
+ echo >&2
+ echo >&2 " <identifier> Arvados cluster name"
+ echo >&2
+ echo >&2 "$0 options:"
+ echo >&2 " -p, --port <ssh port> SSH port to use (default 22)"
+ echo >&2 " -d, --debug Enable debug output"
+ echo >&2 " -h, --help Display this help and exit"
+ echo >&2
+ echo >&2 "Note: this script requires an arvados token created with these permissions:"
+ echo >&2 ' arv api_client_authorization create_system_auth \'
+ echo >&2 ' --scopes "[\"GET /arvados/v1/virtual_machines\",'
+ echo >&2 ' \"GET /arvados/v1/keep_services\",'
+ echo >&2 ' \"GET /arvados/v1/keep_services/\",'
+ echo >&2 ' \"GET /arvados/v1/groups\",'
+ echo >&2 ' \"GET /arvados/v1/groups/\",'
+ echo >&2 ' \"GET /arvados/v1/links\",'
+ echo >&2 ' \"GET /arvados/v1/collections\",'
+ echo >&2 ' \"POST /arvados/v1/collections\",'
+ echo >&2 ' \"POST /arvados/v1/links\"]"'
+ echo >&2
+}
+
+# NOTE: This requires GNU getopt (part of the util-linux package on Debian-based distros).
+TEMP=`getopt -o hdp: \
+ --long help,debug,port: \
+ -n "$0" -- "$@"`
+
+if [ $? != 0 ] ; then echo "Use -h for help"; exit 1 ; fi
+# Note the quotes around `$TEMP': they are essential!
+eval set -- "$TEMP"
+
+while [ $# -ge 1 ]
+do
+ case $1 in
+ -p | --port)
+ SSH_PORT="$2"; shift 2
+ ;;
+ -d | --debug)
+ DEBUG=1
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+IDENTIFIER=$1
+
+if [[ "$IDENTIFIER" == '' ]]; then
+ usage
+ exit 1
+fi
+
+EXITCODE=0
+
+COLUMNS=80
+
+PUPPET_AGENT='
+now() { date +%s; }
+let endtime="$(now) + 600"
+while [ "$endtime" -gt "$(now)" ]; do
+ puppet agent --test --detailed-exitcodes
+ agent_exitcode=$?
+ if [ 0 = "$agent_exitcode" ] || [ 2 = "$agent_exitcode" ]; then
+ break
+ else
+ sleep 10s
+ fi
+done
+exit ${agent_exitcode:-99}
+'
+
+title () {
+ date=`date +'%Y-%m-%d %H:%M:%S'`
+ printf "$date $1\n"
+}
+
+function run_puppet() {
+ node=$1
+ return_var=$2
+
+ title "Running puppet on $node"
+ TMP_FILE=`mktemp`
+ if [[ "$DEBUG" != "0" ]]; then
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" | tee $TMP_FILE
+ else
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C bash -c "'$PUPPET_AGENT'" > $TMP_FILE 2>&1
+ fi
+
+ ECODE=${PIPESTATUS[0]}
+ RESULT=$(cat $TMP_FILE)
+
+ if [[ "$ECODE" != "255" && ! ("$RESULT" =~ 'already in progress') && "$ECODE" != "2" && "$ECODE" != "0" ]]; then
+ # Ssh exits 255 if the connection timed out. Just ignore that.
+ # Puppet exits 2 if there are changes. For real!
+ # Puppet prints 'Notice: Run of Puppet configuration client already in progress' if another puppet process
+ # was already running
+ echo "ERROR running puppet on $node: exit code $ECODE"
+ if [[ "$DEBUG" == "0" ]]; then
+ title "Command output follows:"
+ echo $RESULT
+ fi
+ fi
+ if [[ "$ECODE" == "255" ]]; then
+ title "Connection timed out"
+ ECODE=0
+ fi
+ if [[ "$ECODE" == "2" ]]; then
+ ECODE=0
+ fi
+ rm -f $TMP_FILE
+ eval "$return_var=$ECODE"
+}
+
+function run_command() {
+ node=$1
+ return_var=$2
+ command=$3
+
+ title "Running '$command' on $node"
+ TMP_FILE=`mktemp`
+ if [[ "$DEBUG" != "0" ]]; then
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" | tee $TMP_FILE
+ else
+ ssh -t -p$SSH_PORT -o "StrictHostKeyChecking no" -o "ConnectTimeout 5" root@$node -C "$command" > $TMP_FILE 2>&1
+ fi
+
+ ECODE=$?
+ RESULT=$(cat $TMP_FILE)
+
+ if [[ "$ECODE" != "255" && "$ECODE" != "0" ]]; then
+ # Ssh exists 255 if the connection timed out. Just ignore that, it's possible that this node is
+ # a shell node that is down.
+ title "ERROR running command on $node: exit code $ECODE"
+ if [[ "$DEBUG" == "0" ]]; then
+ title "Command output follows:"
+ echo $RESULT
+ fi
+ fi
+ if [[ "$ECODE" == "255" ]]; then
+ title "Connection timed out"
+ ECODE=0
+ fi
+ rm -f $TMP_FILE
+ eval "$return_var=$ECODE"
+}
+
+title "Updating API server"
+SUM_ECODE=0
+run_puppet $IDENTIFIER.arvadosapi.com ECODE
+SUM_ECODE=$(($SUM_ECODE + $ECODE))
+if [ ! "$IDENTIFIER" = "c97qk" ]
+then
+ run_command $IDENTIFIER.arvadosapi.com ECODE "dpkg -L arvados-mailchimp-plugin 2>/dev/null && apt-get install arvados-mailchimp-plugin --reinstall || echo"
+ SUM_ECODE=$(($SUM_ECODE + $ECODE))
+fi
+
+if [[ "$SUM_ECODE" != "0" ]]; then
+ title "ERROR: Updating API server FAILED"
+ EXITCODE=$(($EXITCODE + $SUM_ECODE))
+ exit $EXITCODE
+fi
+
+title "Loading ARVADOS_API_HOST and ARVADOS_API_TOKEN"
+if [[ -f "$HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf" ]]; then
+ . $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf
+else
+ title "WARNING: $HOME/.config/arvados/$IDENTIFIER.arvadosapi.com.conf not found."
+fi
+if [[ "$ARVADOS_API_HOST" == "" ]] || [[ "$ARVADOS_API_TOKEN" == "" ]]; then
+ title "ERROR: ARVADOS_API_HOST and/or ARVADOS_API_TOKEN environment variables are not set."
+ exit 1
+fi
+
+title "Locating Arvados Standard Docker images project"
+
+JSON_FILTER="[[\"name\", \"=\", \"Arvados Standard Docker Images\"], [\"owner_uuid\", \"=\", \"$IDENTIFIER-tpzed-000000000000000\"]]"
+DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group list --filters="$JSON_FILTER"`
+
+if [[ "$DOCKER_IMAGES_PROJECT" == "" ]]; then
+ title "Warning: Arvados Standard Docker Images project not found. Creating it."
+
+ DOCKER_IMAGES_PROJECT=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv --format=uuid group create --group "{\"owner_uuid\":\"$IDENTIFIER-tpzed-000000000000000\", \"name\":\"Arvados Standard Docker Images\", \"group_class\":\"project\"}"`
+ ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv link create --link "{\"tail_uuid\":\"$IDENTIFIER-j7d0g-fffffffffffffff\", \"head_uuid\":\"$DOCKER_IMAGES_PROJECT\", \"link_class\":\"permission\", \"name\":\"can_read\" }"
+ if [[ "$?" != "0" ]]; then
+ title "ERROR: could not create standard Docker images project Please create it, cf. http://doc.arvados.org/install/create-standard-objects.html"
+ exit 1
+ fi
+fi
+
+title "Found Arvados Standard Docker Images project with uuid $DOCKER_IMAGES_PROJECT"
+GIT_COMMIT=`ssh -o "StrictHostKeyChecking no" $IDENTIFIER cat /usr/local/arvados/src/git-commit.version`
+
+if [[ "$?" != "0" ]] || [[ "$GIT_COMMIT" == "" ]]; then
+ title "ERROR: unable to get arvados/jobs Docker image git revision"
+ exit 1
+else
+ title "Found git commit for arvados/jobs Docker image: $GIT_COMMIT"
+fi
+
+run_command shell.$IDENTIFIER ECODE "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker" |grep -q $GIT_COMMIT
+
+if [[ "$?" == "0" ]]; then
+ title "Found latest arvados/jobs Docker image, nothing to upload"
+else
+ title "Installing latest arvados/jobs Docker image"
+ ssh -o "StrictHostKeyChecking no" shell.$IDENTIFIER "ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN /usr/local/rvm/bin/rvm-exec default arv keep docker --pull --project-uuid=$DOCKER_IMAGES_PROJECT arvados/jobs $GIT_COMMIT"
+ if [[ "$?" -ne 0 ]]; then
+ title "'git pull' failed exiting..."
+ exit 1
+ fi
+fi
+
+title "Gathering list of shell and Keep nodes"
+SHELL_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv virtual_machine list |jq .items[].hostname -r`
+KEEP_NODES=`ARVADOS_API_HOST=$ARVADOS_API_HOST ARVADOS_API_TOKEN=$ARVADOS_API_TOKEN arv keep_service list |jq .items[].service_host -r`
+
+title "Updating workbench"
+SUM_ECODE=0
+if [[ `host workbench.$ARVADOS_API_HOST |cut -f4 -d' '` != `host $ARVADOS_API_HOST |cut -f4 -d' '` ]]; then
+ # Workbench runs on a separate host. We need to run puppet there too.
+ run_puppet workbench.$IDENTIFIER ECODE
+ SUM_ECODE=$(($SUM_ECODE + $ECODE))
+fi
+
+if [[ "$SUM_ECODE" != "0" ]]; then
+ title "ERROR: Updating workbench FAILED"
+ EXITCODE=$(($EXITCODE + $SUM_ECODE))
+ exit $EXITCODE
+fi
+
+for n in manage switchyard $SHELL_NODES $KEEP_NODES; do
+ ECODE=0
+ if [[ $n =~ $ARVADOS_API_HOST$ ]]; then
+ # e.g. keep.qr1hi.arvadosapi.com
+ node=$n
+ else
+ # e.g. shell
+ node=$n.$ARVADOS_API_HOST
+ fi
+
+ # e.g. keep.qr1hi
+ node=${node%.arvadosapi.com}
+
+ title "Updating $node"
+ run_puppet $node ECODE
+ if [[ "$ECODE" != "0" ]]; then
+ title "ERROR: Updating $node node FAILED: exit code $ECODE"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+ fi
+done
diff --git a/build/jenkins/run-diagnostics-suite.sh b/build/jenkins/run-diagnostics-suite.sh
new file mode 100755
index 0000000..015a053
--- /dev/null
+++ b/build/jenkins/run-diagnostics-suite.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+EXITCODE=0
+
+INSTANCE=$1
+REVISION=$2
+
+if [[ "$INSTANCE" == '' ]]; then
+ echo "Syntax: $0 <instance> [revision]"
+ exit 1
+fi
+
+if [[ "$REVISION" == '' ]]; then
+ # See if there's a configuration file with the revision?
+ CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
+ if [[ -f $CONFIG_PATH ]]; then
+ echo "Loading git revision from $CONFIG_PATH"
+ . $CONFIG_PATH
+ REVISION=$ARVADOS_GIT_REVISION
+ fi
+fi
+
+if [[ "$REVISION" != '' ]]; then
+ echo "Git revision is $REVISION"
+else
+ echo "No valid git revision found, proceeding with what is in place."
+fi
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo "WORKSPACE environment variable not set"
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+source /etc/profile.d/rvm.sh
+echo $WORKSPACE
+
+title "Starting diagnostics"
+timer_reset
+
+cd $WORKSPACE
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout $REVISION
+fi
+
+cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
+
+cd $WORKSPACE/apps/workbench
+
+HOME="$GEMHOME" bundle install --no-deployment
+
+if [[ ! -d tmp ]]; then
+ mkdir tmp
+fi
+
+RAILS_ENV=diagnostics bundle exec rake TEST=test/diagnostics/pipeline_test.rb
+
+ECODE=$?
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout master
+fi
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! DIAGNOSTICS FAILED (`timer`) !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+fi
+
+title "Diagnostics complete (`timer`)"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-library.sh b/build/jenkins/run-library.sh
new file mode 100755
index 0000000..c2e9b40
--- /dev/null
+++ b/build/jenkins/run-library.sh
@@ -0,0 +1,348 @@
+#!/bin/bash
+
+# A library of functions shared by the various scripts in this directory.
+
+# This is the timestamp about when we merged changed to include licenses
+# with Arvados packages. We use it as a heuristic to add revisions for
+# older packages.
+LICENSE_PACKAGE_TS=20151208015500
+
+debug_echo () {
+ echo "$@" >"$STDOUT_IF_DEBUG"
+}
+
+find_easy_install() {
+ for version_suffix in "$@"; do
+ if "easy_install$version_suffix" --version >/dev/null 2>&1; then
+ echo "easy_install$version_suffix"
+ return 0
+ fi
+ done
+ cat >&2 <<EOF
+$helpmessage
+
+Error: easy_install$1 (from Python setuptools module) not found
+
+EOF
+ exit 1
+}
+
+format_last_commit_here() {
+ local format="$1"; shift
+ TZ=UTC git log -n1 --first-parent "--format=format:$format" .
+}
+
+version_from_git() {
+ # Generates a version number from the git log for the current working
+ # directory, and writes it to stdout.
+ local git_ts git_hash
+ declare $(format_last_commit_here "git_ts=%ct git_hash=%h")
+ echo "0.1.$(date -ud "@$git_ts" +%Y%m%d%H%M%S).$git_hash"
+}
+
+nohash_version_from_git() {
+ version_from_git | cut -d. -f1-3
+}
+
+timestamp_from_git() {
+ format_last_commit_here "%ct"
+}
+
+handle_python_package () {
+ # This function assumes the current working directory is the python package directory
+ if [ -n "$(find dist -name "*-$(nohash_version_from_git).tar.gz" -print -quit)" ]; then
+ # This package doesn't need rebuilding.
+ return
+ fi
+ # Make sure only to use sdist - that's the only format pip can deal with (sigh)
+ python setup.py $DASHQ_UNLESS_DEBUG sdist
+}
+
+handle_ruby_gem() {
+ local gem_name="$1"; shift
+ local gem_version="$(nohash_version_from_git)"
+ local gem_src_dir="$(pwd)"
+
+ if ! [[ -e "${gem_name}-${gem_version}.gem" ]]; then
+ find -maxdepth 1 -name "${gem_name}-*.gem" -delete
+
+ # -q appears to be broken in gem version 2.2.2
+ $GEM build "$gem_name.gemspec" $DASHQ_UNLESS_DEBUG >"$STDOUT_IF_DEBUG" 2>"$STDERR_IF_DEBUG"
+ fi
+}
+
+# Usage: package_go_binary services/foo arvados-foo "Compute foo to arbitrary precision"
+package_go_binary() {
+ local src_path="$1"; shift
+ local prog="$1"; shift
+ local description="$1"; shift
+ local license_file="${1:-agpl-3.0.txt}"; shift
+
+ debug_echo "package_go_binary $src_path as $prog"
+
+ local basename="${src_path##*/}"
+
+ mkdir -p "$GOPATH/src/git.curoverse.com"
+ ln -sfn "$WORKSPACE" "$GOPATH/src/git.curoverse.com/arvados.git"
+
+ cd "$GOPATH/src/git.curoverse.com/arvados.git/$src_path"
+ local version="$(version_from_git)"
+ local timestamp="$(timestamp_from_git)"
+
+ # If the command imports anything from the Arvados SDK, bump the
+ # version number and build a new package whenever the SDK changes.
+ if grep -qr git.curoverse.com/arvados .; then
+ cd "$GOPATH/src/git.curoverse.com/arvados.git/sdk/go"
+ if [[ $(timestamp_from_git) -gt "$timestamp" ]]; then
+ version=$(version_from_git)
+ fi
+ fi
+
+ cd $WORKSPACE/packages/$TARGET
+ go get "git.curoverse.com/arvados.git/$src_path"
+ fpm_build "$GOPATH/bin/$basename=/usr/bin/$prog" "$prog" 'Curoverse, Inc.' dir "$version" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=$description" "$WORKSPACE/$license_file=/usr/share/doc/$prog/$license_file"
+}
+
+default_iteration() {
+ local package_name="$1"; shift
+ local package_version="$1"; shift
+ local iteration=1
+ if [[ $package_version =~ ^0\.1\.([0-9]{14})(\.|$) ]] && \
+ [[ ${BASH_REMATCH[1]} -le $LICENSE_PACKAGE_TS ]]; then
+ iteration=2
+ fi
+ echo $iteration
+}
+
+_build_rails_package_scripts() {
+ local pkgname="$1"; shift
+ local destdir="$1"; shift
+ local srcdir="$RUN_BUILD_PACKAGES_PATH/rails-package-scripts"
+ for scriptname in postinst prerm postrm; do
+ cat "$srcdir/$pkgname.sh" "$srcdir/step2.sh" "$srcdir/$scriptname.sh" \
+ >"$destdir/$scriptname" || return $?
+ done
+}
+
+handle_rails_package() {
+ local pkgname="$1"; shift
+ local srcdir="$1"; shift
+ local license_path="$1"; shift
+ local scripts_dir="$(mktemp --tmpdir -d "$pkgname-XXXXXXXX.scripts")" && \
+ local version_file="$(mktemp --tmpdir "$pkgname-XXXXXXXX.version")" && (
+ set -e
+ _build_rails_package_scripts "$pkgname" "$scripts_dir"
+ cd "$srcdir"
+ mkdir -p tmp
+ version_from_git >"$version_file"
+ git rev-parse HEAD >git-commit.version
+ bundle package --all
+ )
+ if [[ 0 != "$?" ]] || ! cd "$WORKSPACE/packages/$TARGET"; then
+ echo "ERROR: $pkgname package prep failed" >&2
+ rm -rf "$scripts_dir" "$version_file"
+ EXITCODE=1
+ return 1
+ fi
+ local railsdir="/var/www/${pkgname%-server}/current"
+ local -a pos_args=("$srcdir/=$railsdir" "$pkgname" "Curoverse, Inc." dir
+ "$(cat "$version_file")")
+ local license_arg="$license_path=$railsdir/$(basename "$license_path")"
+ # --iteration=5 accommodates the package script bugfixes #8371 and #8413.
+ local -a switches=(--iteration=5
+ --after-install "$scripts_dir/postinst"
+ --before-remove "$scripts_dir/prerm"
+ --after-remove "$scripts_dir/postrm")
+ # For some reason fpm excludes need to not start with /.
+ local exclude_root="${railsdir#/}"
+ # .git and packages are for the SSO server, which is built from its
+ # repository root.
+ local -a exclude_list=(.git packages tmp log coverage Capfile\* \
+ config/deploy\* config/application.yml)
+ # for arvados-workbench, we need to have the (dummy) config/database.yml in the package
+ if [[ "$pkgname" != "arvados-workbench" ]]; then
+ exclude_list+=('config/database.yml')
+ fi
+ for exclude in ${exclude_list[@]}; do
+ switches+=(-x "$exclude_root/$exclude")
+ done
+ fpm_build "${pos_args[@]}" "${switches[@]}" \
+ -x "$exclude_root/vendor/bundle" "$@" "$license_arg"
+ rm -rf "$scripts_dir" "$version_file"
+}
+
+# Build packages for everything
+fpm_build () {
+ # The package source. Depending on the source type, this can be a
+ # path, or the name of the package in an upstream repository (e.g.,
+ # pip).
+ PACKAGE=$1
+ shift
+ # The name of the package to build. Defaults to $PACKAGE.
+ PACKAGE_NAME=${1:-$PACKAGE}
+ shift
+ # Optional: the vendor of the package. Should be "Curoverse, Inc." for
+ # packages of our own software. Passed to fpm --vendor.
+ VENDOR=$1
+ shift
+ # The type of source package. Passed to fpm -s. Default "python".
+ PACKAGE_TYPE=${1:-python}
+ shift
+ # Optional: the package version number. Passed to fpm -v.
+ VERSION=$1
+ shift
+
+ case "$PACKAGE_TYPE" in
+ python)
+ # All Arvados Python2 packages depend on Python 2.7.
+ # Make sure we build with that for consistency.
+ set -- "$@" --python-bin python2.7 \
+ --python-easyinstall "$EASY_INSTALL2" \
+ --python-package-name-prefix "$PYTHON2_PKG_PREFIX" \
+ --depends "$PYTHON2_PACKAGE"
+ ;;
+ python3)
+ # fpm does not actually support a python3 package type. Instead
+ # we recognize it as a convenience shortcut to add several
+ # necessary arguments to fpm's command line later, after we're
+ # done handling positional arguments.
+ PACKAGE_TYPE=python
+ set -- "$@" --python-bin python3 \
+ --python-easyinstall "$EASY_INSTALL3" \
+ --python-package-name-prefix "$PYTHON3_PKG_PREFIX" \
+ --depends "$PYTHON3_PACKAGE"
+ ;;
+ esac
+
+ declare -a COMMAND_ARR=("fpm" "--maintainer=Ward Vandewege <ward at curoverse.com>" "-s" "$PACKAGE_TYPE" "-t" "$FORMAT")
+ if [ python = "$PACKAGE_TYPE" ]; then
+ COMMAND_ARR+=(--exclude=\*/{dist,site}-packages/tests/\*)
+ if [ deb = "$FORMAT" ]; then
+ # Dependencies are built from setup.py. Since setup.py will never
+ # refer to Debian package iterations, it doesn't make sense to
+ # enforce those in the .deb dependencies.
+ COMMAND_ARR+=(--deb-ignore-iteration-in-dependencies)
+ fi
+ fi
+
+ if [[ "${DEBUG:-0}" != "0" ]]; then
+ COMMAND_ARR+=('--verbose' '--log' 'info')
+ fi
+
+ if [[ "$PACKAGE_NAME" != "$PACKAGE" ]]; then
+ COMMAND_ARR+=('-n' "$PACKAGE_NAME")
+ fi
+
+ if [[ "$VENDOR" != "" ]]; then
+ COMMAND_ARR+=('--vendor' "$VENDOR")
+ fi
+
+ if [[ "$VERSION" != "" ]]; then
+ COMMAND_ARR+=('-v' "$VERSION")
+ fi
+ # We can always add an --iteration here. If another one is specified in $@,
+ # that will take precedence, as desired.
+ COMMAND_ARR+=(--iteration "$(default_iteration "$PACKAGE" "$VERSION")")
+
+ # 'dir' type packages are provided in the form /path/to/source=/path/to/dest
+ # so strip off the 2nd part to check for fpm-info below.
+ PACKAGE_DIR=$(echo $PACKAGE | sed 's/\/=.*//')
+
+ # Append --depends X and other arguments specified by fpm-info.sh in
+ # the package source dir. These are added last so they can override
+ # the arguments added by this script.
+ declare -a fpm_args=()
+ declare -a build_depends=()
+ declare -a fpm_depends=()
+ declare -a fpm_exclude=()
+ FPM_INFO=""
+ if [[ -d "$PACKAGE_DIR" ]]; then
+ FPM_INFO="$PACKAGE_DIR/fpm-info.sh"
+ elif [[ -e "${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE}/fpm-info.sh" ]]; then
+ FPM_INFO="${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE}/fpm-info.sh"
+ debug_echo "Found fpm-info.sh in backports: $FPM_INFO"
+ elif [[ -e "${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE_NAME}/fpm-info.sh" ]]; then
+ FPM_INFO="${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE_NAME}/fpm-info.sh"
+ fi
+ if [[ -e "$FPM_INFO" ]]; then
+ debug_echo "Loading fpm overrides from $FPM_INFO"
+ source "$FPM_INFO"
+ fi
+ for pkg in "${build_depends[@]}"; do
+ if [[ $TARGET =~ debian|ubuntu ]]; then
+ pkg_deb=$(ls "$WORKSPACE/packages/$TARGET/$pkg_"*.deb | sort -rg | awk 'NR==1')
+ if [[ -e $pkg_deb ]]; then
+ echo "Installing build_dep $pkg from $pkg_deb"
+ dpkg -i "$pkg_deb"
+ else
+ echo "Attemping to install build_dep $pkg using apt-get"
+ apt-get install -y "$pkg"
+ fi
+ apt-get -y -f install
+ else
+ pkg_rpm=$(ls "$WORKSPACE/packages/$TARGET/$pkg"-[0-9]*.rpm | sort -rg | awk 'NR==1')
+ if [[ -e $pkg_rpm ]]; then
+ echo "Installing build_dep $pkg from $pkg_rpm"
+ rpm -i "$pkg_rpm"
+ else
+ echo "Attemping to install build_dep $pkg"
+ rpm -i "$pkg"
+ fi
+ fi
+ done
+ for i in "${fpm_depends[@]}"; do
+ COMMAND_ARR+=('--depends' "$i")
+ done
+ for i in "${fpm_exclude[@]}"; do
+ COMMAND_ARR+=('--exclude' "$i")
+ done
+
+ # Append remaining function arguments directly to fpm's command line.
+ for i; do
+ COMMAND_ARR+=("$i")
+ done
+
+ COMMAND_ARR+=("${fpm_args[@]}")
+
+ COMMAND_ARR+=("$PACKAGE")
+
+ debug_echo -e "\n${COMMAND_ARR[@]}\n"
+
+ FPM_RESULTS=$("${COMMAND_ARR[@]}")
+ FPM_EXIT_CODE=$?
+
+ fpm_verify $FPM_EXIT_CODE $FPM_RESULTS
+}
+
+# verify build results
+fpm_verify () {
+ FPM_EXIT_CODE=$1
+ shift
+ FPM_RESULTS=$@
+
+ FPM_PACKAGE_NAME=''
+ if [[ $FPM_RESULTS =~ ([A-Za-z0-9_\.-]*\.)(deb|rpm) ]]; then
+ FPM_PACKAGE_NAME=${BASH_REMATCH[1]}${BASH_REMATCH[2]}
+ fi
+
+ if [[ "$FPM_PACKAGE_NAME" == "" ]]; then
+ EXITCODE=1
+ echo "Error: $PACKAGE: Unable to figure out package name from fpm results:"
+ echo
+ echo $FPM_RESULTS
+ echo
+ elif [[ "$FPM_RESULTS" =~ "File already exists" ]]; then
+ echo "Package $FPM_PACKAGE_NAME exists, not rebuilding"
+ elif [[ 0 -ne "$FPM_EXIT_CODE" ]]; then
+ echo "Error building package for $1:\n $FPM_RESULTS"
+ fi
+}
+
+install_package() {
+ PACKAGES=$@
+ if [[ "$FORMAT" == "deb" ]]; then
+ $SUDO apt-get install $PACKAGES --yes
+ elif [[ "$FORMAT" == "rpm" ]]; then
+ $SUDO yum -q -y install $PACKAGES
+ fi
+}
diff --git a/build/jenkins/run-performance-suite.sh b/build/jenkins/run-performance-suite.sh
new file mode 100755
index 0000000..2944bda
--- /dev/null
+++ b/build/jenkins/run-performance-suite.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+EXITCODE=0
+
+INSTANCE=$1
+REVISION=$2
+
+if [[ "$INSTANCE" == '' ]]; then
+ echo "Syntax: $0 <instance> [revision]"
+ exit 1
+fi
+
+if [[ "$REVISION" == '' ]]; then
+ # See if there's a configuration file with the revision?
+ CONFIG_PATH=/home/jenkins/configuration/$INSTANCE.arvadosapi.com-versions.conf
+ if [[ -f $CONFIG_PATH ]]; then
+ echo "Loading git revision from $CONFIG_PATH"
+ . $CONFIG_PATH
+ REVISION=$ARVADOS_GIT_REVISION
+ fi
+fi
+
+if [[ "$REVISION" != '' ]]; then
+ echo "Git revision is $REVISION"
+else
+ echo "No valid git revision found, proceeding with what is in place."
+fi
+
+# Sanity check
+if ! [[ -n "$WORKSPACE" ]]; then
+ echo "WORKSPACE environment variable not set"
+ exit 1
+fi
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+source /etc/profile.d/rvm.sh
+echo $WORKSPACE
+
+title "Starting performance test"
+timer_reset
+
+cd $WORKSPACE
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout $REVISION
+fi
+
+cp -f /home/jenkins/diagnostics/arvados-workbench/$INSTANCE-application.yml $WORKSPACE/apps/workbench/config/application.yml
+
+cd $WORKSPACE/apps/workbench
+
+HOME="$GEMHOME" bundle install --no-deployment
+
+if [[ ! -d tmp ]]; then
+ mkdir tmp
+fi
+
+mkdir -p tmp/cache
+
+RAILS_ENV=performance bundle exec rake test:benchmark
+
+ECODE=$?
+
+if [[ "$REVISION" != '' ]]; then
+ git checkout master
+fi
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! PERFORMANCE TESTS FAILED (`timer`) !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+ exit $EXITCODE
+fi
+
+title "Performance tests complete (`timer`)"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-tapestry-tests.sh b/build/jenkins/run-tapestry-tests.sh
new file mode 100755
index 0000000..851a81d
--- /dev/null
+++ b/build/jenkins/run-tapestry-tests.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+EXITCODE=0
+
+COLUMNS=80
+
+title () {
+ printf "\n%*s\n\n" $(((${#title}+$COLUMNS)/2)) "********** $1 **********"
+}
+
+source /etc/profile.d/rvm.sh
+
+# This shouldn't really be necessary... but the jenkins/rvm integration seems a
+# bit wonky occasionally.
+rvm use ree
+
+echo $WORKSPACE
+
+# Tapestry
+title "Starting tapestry tests"
+cd "$WORKSPACE"
+
+# There are a few submodules
+git submodule init && git submodule update
+
+# Use sqlite for testing
+sed -i'' -e "s:mysql:sqlite3:" Gemfile
+
+# Tapestry is not set up yet to use --deployment
+#bundle install --deployment
+bundle install
+
+rm -f config/database.yml
+rm -f config/environments/test.rb
+cp $HOME/tapestry/test.rb config/environments/
+cp $HOME/tapestry/database.yml config/
+
+export RAILS_ENV=test
+
+bundle exec rake db:drop
+bundle exec rake db:create
+bundle exec rake db:setup
+bundle exec rake test
+
+ECODE=$?
+
+if [[ "$ECODE" != "0" ]]; then
+ title "!!!!!! TAPESTRY TESTS FAILED !!!!!!"
+ EXITCODE=$(($EXITCODE + $ECODE))
+fi
+
+title "Tapestry tests complete"
+
+exit $EXITCODE
diff --git a/build/jenkins/run-tests.sh b/build/jenkins/run-tests.sh
new file mode 100755
index 0000000..a17d610
--- /dev/null
+++ b/build/jenkins/run-tests.sh
@@ -0,0 +1,826 @@
+#!/bin/bash
+
+. `dirname "$(readlink -f "$0")"`/libcloud-pin
+
+read -rd "\000" helpmessage <<EOF
+$(basename $0): Install and test Arvados components.
+
+Exit non-zero if any tests fail.
+
+Syntax:
+ $(basename $0) WORKSPACE=/path/to/arvados [options]
+
+Options:
+
+--skip FOO Do not test the FOO component.
+--only FOO Do not test anything except the FOO component.
+--temp DIR Install components and dependencies under DIR instead of
+ making a new temporary directory. Implies --leave-temp.
+--leave-temp Do not remove GOPATH, virtualenv, and other temp dirs at exit.
+ Instead, show the path to give as --temp to reuse them in
+ subsequent invocations.
+--skip-install Do not run any install steps. Just run tests.
+ You should provide GOPATH, GEMHOME, and VENVDIR options
+ from a previous invocation if you use this option.
+--only-install Run specific install step
+WORKSPACE=path Arvados source tree to test.
+CONFIGSRC=path Dir with api server config files to copy into source tree.
+ (If none given, leave config files alone in source tree.)
+services/api_test="TEST=test/functional/arvados/v1/collections_controller_test.rb"
+ Restrict apiserver tests to the given file
+sdk/python_test="--test-suite test.test_keep_locator"
+ Restrict Python SDK tests to the given class
+apps/workbench_test="TEST=test/integration/pipeline_instances_test.rb"
+ Restrict Workbench tests to the given file
+services/arv-git-httpd_test="-check.vv"
+ Show all log messages, even when tests pass (also works
+ with services/keepstore_test etc.)
+ARVADOS_DEBUG=1
+ Print more debug messages
+envvar=value Set \$envvar to value. Primarily useful for WORKSPACE,
+ *_test, and other examples shown above.
+
+Assuming --skip-install is not given, all components are installed
+into \$GOPATH, \$VENDIR, and \$GEMHOME before running any tests. Many
+test suites depend on other components being installed, and installing
+everything tends to be quicker than debugging dependencies.
+
+As a special concession to the current CI server config, CONFIGSRC
+defaults to $HOME/arvados-api-server if that directory exists.
+
+More information and background:
+
+https://arvados.org/projects/arvados/wiki/Running_tests
+
+Available tests:
+
+apps/workbench
+apps/workbench_benchmark
+apps/workbench_profile
+doc
+services/api
+services/arv-git-httpd
+services/crunchstat
+services/dockercleaner
+services/fuse
+services/keep-web
+services/keepproxy
+services/keepstore
+services/login-sync
+services/nodemanager
+services/crunch-dispatch-local
+sdk/cli
+sdk/pam
+sdk/python
+sdk/ruby
+sdk/go/arvadosclient
+sdk/go/keepclient
+sdk/go/manifest
+sdk/go/blockdigest
+sdk/go/streamer
+sdk/go/crunchrunner
+tools/crunchstat-summary
+tools/keep-rsync
+
+EOF
+
+# First make sure to remove any ARVADOS_ variables from the calling
+# environment that could interfere with the tests.
+unset $(env | cut -d= -f1 | grep \^ARVADOS_)
+
+# Reset other variables that could affect our [tests'] behavior by
+# accident.
+GITDIR=
+GOPATH=
+VENVDIR=
+VENV3DIR=
+PYTHONPATH=
+GEMHOME=
+PERLINSTALLBASE=
+
+COLUMNS=80
+
+skip_install=
+temp=
+temp_preserve=
+
+clear_temp() {
+ if [[ -z "$temp" ]]; then
+ # we didn't even get as far as making a temp dir
+ :
+ elif [[ -z "$temp_preserve" ]]; then
+ rm -rf "$temp"
+ else
+ echo "Leaving behind temp dirs in $temp"
+ fi
+}
+
+fatal() {
+ clear_temp
+ echo >&2 "Fatal: $* (encountered in ${FUNCNAME[1]} at ${BASH_SOURCE[1]} line ${BASH_LINENO[0]})"
+ exit 1
+}
+
+report_outcomes() {
+ for x in "${successes[@]}"
+ do
+ echo "Pass: $x"
+ done
+
+ if [[ ${#failures[@]} == 0 ]]
+ then
+ echo "All test suites passed."
+ else
+ echo "Failures (${#failures[@]}):"
+ for x in "${failures[@]}"
+ do
+ echo "Fail: $x"
+ done
+ fi
+}
+
+exit_cleanly() {
+ trap - INT
+ create-plot-data-from-log.sh $BUILD_NUMBER "$WORKSPACE/apps/workbench/log/test.log" "$WORKSPACE/apps/workbench/log/"
+ rotate_logfile "$WORKSPACE/apps/workbench/log/" "test.log"
+ stop_services
+ rotate_logfile "$WORKSPACE/services/api/log/" "test.log"
+ report_outcomes
+ clear_temp
+ exit ${#failures}
+}
+
+sanity_checks() {
+ ( [[ -n "$WORKSPACE" ]] && [[ -d "$WORKSPACE/services" ]] ) \
+ || fatal "WORKSPACE environment variable not set to a source directory (see: $0 --help)"
+ echo Checking dependencies:
+ echo -n 'virtualenv: '
+ virtualenv --version \
+ || fatal "No virtualenv. Try: apt-get install virtualenv (on ubuntu: python-virtualenv)"
+ echo -n 'go: '
+ go version \
+ || fatal "No go binary. See http://golang.org/doc/install"
+ echo -n 'gcc: '
+ gcc --version | egrep ^gcc \
+ || fatal "No gcc. Try: apt-get install build-essential"
+ echo -n 'fuse.h: '
+ find /usr/include -wholename '*fuse/fuse.h' \
+ || fatal "No fuse/fuse.h. Try: apt-get install libfuse-dev"
+ echo -n 'pyconfig.h: '
+ find /usr/include -name pyconfig.h | egrep --max-count=1 . \
+ || fatal "No pyconfig.h. Try: apt-get install python-dev"
+ echo -n 'nginx: '
+ PATH="$PATH:/sbin:/usr/sbin:/usr/local/sbin" nginx -v \
+ || fatal "No nginx. Try: apt-get install nginx"
+ echo -n 'perl: '
+ perl -v | grep version \
+ || fatal "No perl. Try: apt-get install perl"
+ for mod in ExtUtils::MakeMaker JSON LWP Net::SSL; do
+ echo -n "perl $mod: "
+ perl -e "use $mod; print \"\$$mod::VERSION\\n\"" \
+ || fatal "No $mod. Try: apt-get install perl-modules libcrypt-ssleay-perl libjson-perl libwww-perl"
+ done
+ echo -n 'gitolite: '
+ which gitolite \
+ || fatal "No gitolite. Try: apt-get install gitolite3"
+}
+
+rotate_logfile() {
+ # i.e. rotate_logfile "$WORKSPACE/apps/workbench/log/" "test.log"
+ # $BUILD_NUMBER is set by Jenkins if this script is being called as part of a Jenkins run
+ if [[ -f "$1/$2" ]]; then
+ THEDATE=`date +%Y%m%d%H%M%S`
+ mv "$1/$2" "$1/$THEDATE-$BUILD_NUMBER-$2"
+ gzip "$1/$THEDATE-$BUILD_NUMBER-$2"
+ fi
+}
+
+declare -a failures
+declare -A skip
+declare -A testargs
+skip[apps/workbench_profile]=1
+
+while [[ -n "$1" ]]
+do
+ arg="$1"; shift
+ case "$arg" in
+ --help)
+ echo >&2 "$helpmessage"
+ echo >&2
+ exit 1
+ ;;
+ --skip)
+ skipwhat="$1"; shift
+ skip[$skipwhat]=1
+ ;;
+ --only)
+ only="$1"; skip[$1]=""; shift
+ ;;
+ --skip-install)
+ skip_install=1
+ ;;
+ --only-install)
+ skip_install=1
+ only_install="$1"; shift
+ ;;
+ --temp)
+ temp="$1"; shift
+ temp_preserve=1
+ ;;
+ --leave-temp)
+ temp_preserve=1
+ ;;
+ --retry)
+ retry=1
+ ;;
+ *_test=*)
+ suite="${arg%%_test=*}"
+ args="${arg#*=}"
+ testargs["$suite"]="$args"
+ ;;
+ *=*)
+ eval export $(echo $arg | cut -d= -f1)=\"$(echo $arg | cut -d= -f2-)\"
+ ;;
+ *)
+ echo >&2 "$0: Unrecognized option: '$arg'. Try: $0 --help"
+ exit 1
+ ;;
+ esac
+done
+
+start_api() {
+ echo 'Starting API server...'
+ cd "$WORKSPACE" \
+ && eval $(python sdk/python/tests/run_test_server.py start --auth admin) \
+ && export ARVADOS_TEST_API_HOST="$ARVADOS_API_HOST" \
+ && export ARVADOS_TEST_API_INSTALLED="$$" \
+ && (env | egrep ^ARVADOS)
+}
+
+start_nginx_proxy_services() {
+ echo 'Starting keepproxy, keep-web, arv-git-httpd, and nginx ssl proxy...'
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py start_keep_proxy \
+ && python sdk/python/tests/run_test_server.py start_keep-web \
+ && python sdk/python/tests/run_test_server.py start_arv-git-httpd \
+ && python sdk/python/tests/run_test_server.py start_nginx \
+ && export ARVADOS_TEST_PROXY_SERVICES=1
+}
+
+stop_services() {
+ if [[ -n "$ARVADOS_TEST_PROXY_SERVICES" ]]; then
+ unset ARVADOS_TEST_PROXY_SERVICES
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py stop_nginx \
+ && python sdk/python/tests/run_test_server.py stop_arv-git-httpd \
+ && python sdk/python/tests/run_test_server.py stop_keep-web \
+ && python sdk/python/tests/run_test_server.py stop_keep_proxy
+ fi
+ if [[ -n "$ARVADOS_TEST_API_HOST" ]]; then
+ unset ARVADOS_TEST_API_HOST
+ cd "$WORKSPACE" \
+ && python sdk/python/tests/run_test_server.py stop
+ fi
+}
+
+interrupt() {
+ failures+=("($(basename $0) interrupted)")
+ exit_cleanly
+}
+trap interrupt INT
+
+sanity_checks
+
+echo "WORKSPACE=$WORKSPACE"
+
+if [[ -z "$CONFIGSRC" ]] && [[ -d "$HOME/arvados-api-server" ]]; then
+ # Jenkins expects us to use this by default.
+ CONFIGSRC="$HOME/arvados-api-server"
+fi
+
+# Clean up .pyc files that may exist in the workspace
+cd "$WORKSPACE"
+find -name '*.pyc' -delete
+
+if [[ -z "$temp" ]]; then
+ temp="$(mktemp -d)"
+fi
+
+# Set up temporary install dirs (unless existing dirs were supplied)
+for tmpdir in VENVDIR VENV3DIR GOPATH GEMHOME PERLINSTALLBASE
+do
+ if [[ -z "${!tmpdir}" ]]; then
+ eval "$tmpdir"="$temp/$tmpdir"
+ fi
+ if ! [[ -d "${!tmpdir}" ]]; then
+ mkdir "${!tmpdir}" || fatal "can't create ${!tmpdir} (does $temp exist?)"
+ fi
+done
+
+setup_ruby_environment() {
+ if [[ -s "$HOME/.rvm/scripts/rvm" ]] ; then
+ source "$HOME/.rvm/scripts/rvm"
+ using_rvm=true
+ elif [[ -s "/usr/local/rvm/scripts/rvm" ]] ; then
+ source "/usr/local/rvm/scripts/rvm"
+ using_rvm=true
+ else
+ using_rvm=false
+ fi
+
+ if [[ "$using_rvm" == true ]]; then
+ # If rvm is in use, we can't just put separate "dependencies"
+ # and "gems-under-test" paths to GEM_PATH: passenger resets
+ # the environment to the "current gemset", which would lose
+ # our GEM_PATH and prevent our test suites from running ruby
+ # programs (for example, the Workbench test suite could not
+ # boot an API server or run arv). Instead, we have to make an
+ # rvm gemset and use it for everything.
+
+ [[ `type rvm | head -n1` == "rvm is a function" ]] \
+ || fatal 'rvm check'
+
+ # Put rvm's favorite path back in first place (overriding
+ # virtualenv, which just put itself there). Ignore rvm's
+ # complaint about not being in first place already.
+ rvm use @default 2>/dev/null
+
+ # Create (if needed) and switch to an @arvados-tests
+ # gemset. (Leave the choice of ruby to the caller.)
+ rvm use @arvados-tests --create \
+ || fatal 'rvm gemset setup'
+
+ rvm env
+ else
+ # When our "bundle install"s need to install new gems to
+ # satisfy dependencies, we want them to go where "gem install
+ # --user-install" would put them. (However, if the caller has
+ # already set GEM_HOME, we assume that's where dependencies
+ # should be installed, and we should leave it alone.)
+
+ if [ -z "$GEM_HOME" ]; then
+ user_gempath="$(gem env gempath)"
+ export GEM_HOME="${user_gempath%%:*}"
+ fi
+ PATH="$(gem env gemdir)/bin:$PATH"
+
+ # When we build and install our own gems, we install them in our
+ # $GEMHOME tmpdir, and we want them to be at the front of GEM_PATH and
+ # PATH so integration tests prefer them over other versions that
+ # happen to be installed in $user_gempath, system dirs, etc.
+
+ tmpdir_gem_home="$(env - PATH="$PATH" HOME="$GEMHOME" gem env gempath | cut -f1 -d:)"
+ PATH="$tmpdir_gem_home/bin:$PATH"
+ export GEM_PATH="$tmpdir_gem_home:$(gem env gempath)"
+
+ echo "Will install dependencies to $(gem env gemdir)"
+ echo "Will install arvados gems to $tmpdir_gem_home"
+ echo "Gem search path is GEM_PATH=$GEM_PATH"
+ fi
+}
+
+with_test_gemset() {
+ if [[ "$using_rvm" == true ]]; then
+ "$@"
+ else
+ GEM_HOME="$tmpdir_gem_home" GEM_PATH="$tmpdir_gem_home" "$@"
+ fi
+}
+
+gem_uninstall_if_exists() {
+ if gem list "$1\$" | egrep '^\w'; then
+ gem uninstall --force --all --executables "$1"
+ fi
+}
+
+setup_virtualenv() {
+ local venvdest="$1"; shift
+ if ! [[ -e "$venvdest/bin/activate" ]] || ! [[ -e "$venvdest/bin/pip" ]]; then
+ virtualenv --setuptools "$@" "$venvdest" || fatal "virtualenv $venvdest failed"
+ fi
+ "$venvdest/bin/pip" install 'setuptools>=18' 'pip>=7'
+ # ubuntu1404 can't seem to install mock via tests_require, but it can do this.
+ "$venvdest/bin/pip" install 'mock>=1.0' 'pbr<1.7.0'
+}
+
+export PERLINSTALLBASE
+export PERLLIB="$PERLINSTALLBASE/lib/perl5:${PERLLIB:+$PERLLIB}"
+
+export GOPATH
+mkdir -p "$GOPATH/src/git.curoverse.com"
+ln -sfn "$WORKSPACE" "$GOPATH/src/git.curoverse.com/arvados.git" \
+ || fatal "symlink failed"
+
+setup_virtualenv "$VENVDIR" --python python2.7
+. "$VENVDIR/bin/activate"
+
+# Needed for run_test_server.py which is used by certain (non-Python) tests.
+pip freeze 2>/dev/null | egrep ^PyYAML= \
+ || pip install PyYAML >/dev/null \
+ || fatal "pip install PyYAML failed"
+
+# Preinstall forked version of libcloud, because nodemanager "pip install"
+# won't pick it up by default.
+pip freeze 2>/dev/null | egrep ^apache-libcloud==$LIBCLOUD_PIN \
+ || pip install --pre --ignore-installed https://github.com/curoverse/libcloud/archive/apache-libcloud-$LIBCLOUD_PIN.zip >/dev/null \
+ || fatal "pip install apache-libcloud failed"
+
+# Uninstall old llfuse, because services/fuse "pip install" won't
+# upgrade it by default.
+if pip freeze | egrep '^llfuse==0\.41\.'; then
+ yes | pip uninstall 'llfuse<0.42'
+fi
+
+# Deactivate Python 2 virtualenv
+deactivate
+
+# If Python 3 is available, set up its virtualenv in $VENV3DIR.
+# Otherwise, skip dependent tests.
+PYTHON3=$(which python3)
+if [ "0" = "$?" ]; then
+ setup_virtualenv "$VENV3DIR" --python python3
+else
+ PYTHON3=
+ skip[services/dockercleaner]=1
+ cat >&2 <<EOF
+
+Warning: python3 could not be found
+services/dockercleaner install and tests will be skipped
+
+EOF
+fi
+
+# Reactivate Python 2 virtualenv
+. "$VENVDIR/bin/activate"
+
+# Note: this must be the last time we change PATH, otherwise rvm will
+# whine a lot.
+setup_ruby_environment
+
+echo "PATH is $PATH"
+
+if ! which bundler >/dev/null
+then
+ gem install --user-install bundler || fatal 'Could not install bundler'
+fi
+
+checkexit() {
+ if [[ "$1" != "0" ]]; then
+ title "!!!!!! $2 FAILED !!!!!!"
+ failures+=("$2 (`timer`)")
+ else
+ successes+=("$2 (`timer`)")
+ fi
+}
+
+timer_reset() {
+ t0=$SECONDS
+}
+
+timer() {
+ echo -n "$(($SECONDS - $t0))s"
+}
+
+do_test() {
+ while ! do_test_once ${@} && [[ "$retry" == 1 ]]
+ do
+ read -p 'Try again? [Y/n] ' x
+ if [[ "$x" != "y" ]] && [[ "$x" != "" ]]
+ then
+ break
+ fi
+ done
+}
+
+do_test_once() {
+ unset result
+ if [[ -z "${skip[$1]}" ]] && ( [[ -z "$only" ]] || [[ "$only" == "$1" ]] )
+ then
+ title "Running $1 tests"
+ timer_reset
+ if [[ "$2" == "go" ]]
+ then
+ covername="coverage-$(echo "$1" | sed -e 's/\//_/g')"
+ coverflags=("-covermode=count" "-coverprofile=$WORKSPACE/tmp/.$covername.tmp")
+ # We do "go get -t" here to catch compilation errors
+ # before trying "go test". Otherwise, coverage-reporting
+ # mode makes Go show the wrong line numbers when reporting
+ # compilation errors.
+ if [[ -n "${testargs[$1]}" ]]
+ then
+ # "go test -check.vv giturl" doesn't work, but this
+ # does:
+ cd "$WORKSPACE/$1" && \
+ go get -t "git.curoverse.com/arvados.git/$1" && \
+ go test ${coverflags[@]} ${testargs[$1]}
+ else
+ # The above form gets verbose even when testargs is
+ # empty, so use this form in such cases:
+ go get -t "git.curoverse.com/arvados.git/$1" && \
+ go test ${coverflags[@]} "git.curoverse.com/arvados.git/$1"
+ fi
+ result="$?"
+ go tool cover -html="$WORKSPACE/tmp/.$covername.tmp" -o "$WORKSPACE/tmp/$covername.html"
+ rm "$WORKSPACE/tmp/.$covername.tmp"
+ elif [[ "$2" == "pip" ]]
+ then
+ # $3 can name a path directory for us to use, including trailing
+ # slash; e.g., the bin/ subdirectory of a virtualenv.
+ cd "$WORKSPACE/$1" \
+ && "${3}python" setup.py test ${testargs[$1]}
+ elif [[ "$2" != "" ]]
+ then
+ "test_$2"
+ else
+ "test_$1"
+ fi
+ result=${result:-$?}
+ checkexit $result "$1 tests"
+ title "End of $1 tests (`timer`)"
+ return $result
+ else
+ title "Skipping $1 tests"
+ fi
+}
+
+do_install() {
+ if [[ -z "$skip_install" || (-n "$only_install" && "$only_install" == "$1") ]]
+ then
+ title "Running $1 install"
+ timer_reset
+ if [[ "$2" == "go" ]]
+ then
+ go get -t "git.curoverse.com/arvados.git/$1"
+ elif [[ "$2" == "pip" ]]
+ then
+ # $3 can name a path directory for us to use, including trailing
+ # slash; e.g., the bin/ subdirectory of a virtualenv.
+
+ # Need to change to a different directory after creating
+ # the source dist package to avoid a pip bug.
+ # see https://arvados.org/issues/5766 for details.
+
+ # Also need to install twice, because if it believes the package is
+ # already installed, pip it won't install it. So the first "pip
+ # install" ensures that the dependencies are met, the second "pip
+ # install" ensures that we've actually installed the local package
+ # we just built.
+ cd "$WORKSPACE/$1" \
+ && "${3}python" setup.py sdist rotate --keep=1 --match .tar.gz \
+ && cd "$WORKSPACE" \
+ && "${3}pip" install --quiet "$WORKSPACE/$1/dist"/*.tar.gz \
+ && "${3}pip" install --quiet --no-deps --ignore-installed "$WORKSPACE/$1/dist"/*.tar.gz
+ elif [[ "$2" != "" ]]
+ then
+ "install_$2"
+ else
+ "install_$1"
+ fi
+ checkexit $? "$1 install"
+ title "End of $1 install (`timer`)"
+ else
+ title "Skipping $1 install"
+ fi
+}
+
+title () {
+ txt="********** $1 **********"
+ printf "\n%*s%s\n\n" $((($COLUMNS-${#txt})/2)) "" "$txt"
+}
+
+bundle_install_trylocal() {
+ (
+ set -e
+ echo "(Running bundle install --local. 'could not find package' messages are OK.)"
+ if ! bundle install --local --no-deployment; then
+ echo "(Running bundle install again, without --local.)"
+ bundle install --no-deployment
+ fi
+ bundle package --all
+ )
+}
+
+install_doc() {
+ cd "$WORKSPACE/doc" \
+ && bundle_install_trylocal \
+ && rm -rf .site
+}
+do_install doc
+
+install_gem() {
+ gemname=$1
+ srcpath=$2
+ with_test_gemset gem_uninstall_if_exists "$gemname" \
+ && cd "$WORKSPACE/$srcpath" \
+ && bundle_install_trylocal \
+ && gem build "$gemname.gemspec" \
+ && with_test_gemset gem install --no-ri --no-rdoc $(ls -t "$gemname"-*.gem|head -n1)
+}
+
+install_ruby_sdk() {
+ install_gem arvados sdk/ruby
+}
+do_install sdk/ruby ruby_sdk
+
+install_perl_sdk() {
+ cd "$WORKSPACE/sdk/perl" \
+ && perl Makefile.PL INSTALL_BASE="$PERLINSTALLBASE" \
+ && make install INSTALLDIRS=perl
+}
+do_install sdk/perl perl_sdk
+
+install_cli() {
+ install_gem arvados-cli sdk/cli
+}
+do_install sdk/cli cli
+
+install_login-sync() {
+ install_gem arvados-login-sync services/login-sync
+}
+do_install services/login-sync login-sync
+
+# Install the Python SDK early. Various other test suites (like
+# keepproxy) bring up run_test_server.py, which imports the arvados
+# module. We can't actually *test* the Python SDK yet though, because
+# its own test suite brings up some of those other programs (like
+# keepproxy).
+declare -a pythonstuff
+pythonstuff=(
+ sdk/pam
+ sdk/python
+ services/fuse
+ services/nodemanager
+ tools/crunchstat-summary
+ )
+for p in "${pythonstuff[@]}"
+do
+ do_install "$p" pip
+done
+if [ -n "$PYTHON3" ]; then
+ do_install services/dockercleaner pip "$VENV3DIR/bin/"
+fi
+
+install_apiserver() {
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle_install_trylocal
+
+ rm -f config/environments/test.rb
+ cp config/environments/test.rb.example config/environments/test.rb
+
+ if [ -n "$CONFIGSRC" ]
+ then
+ for f in database.yml application.yml
+ do
+ cp "$CONFIGSRC/$f" config/ || fatal "$f"
+ done
+ fi
+
+ # Fill in a random secret_token and blob_signing_key for testing
+ SECRET_TOKEN=`echo 'puts rand(2**512).to_s(36)' |ruby`
+ BLOB_SIGNING_KEY=`echo 'puts rand(2**512).to_s(36)' |ruby`
+
+ sed -i'' -e "s:SECRET_TOKEN:$SECRET_TOKEN:" config/application.yml
+ sed -i'' -e "s:BLOB_SIGNING_KEY:$BLOB_SIGNING_KEY:" config/application.yml
+
+ # Set up empty git repo (for git tests)
+ GITDIR=$(mktemp -d)
+ sed -i'' -e "s:/var/cache/git:$GITDIR:" config/application.default.yml
+
+ rm -rf $GITDIR
+ mkdir -p $GITDIR/test
+ cd $GITDIR/test \
+ && git init \
+ && git config user.email "jenkins at ci.curoverse.com" \
+ && git config user.name "Jenkins, CI" \
+ && touch tmp \
+ && git add tmp \
+ && git commit -m 'initial commit'
+
+ # Clear out any lingering postgresql connections to the test
+ # database, so that we can drop it. This assumes the current user
+ # is a postgresql superuser.
+ cd "$WORKSPACE/services/api" \
+ && test_database=$(python -c "import yaml; print yaml.load(file('config/database.yml'))['test']['database']") \
+ && psql "$test_database" -c "SELECT pg_terminate_backend (pg_stat_activity.procpid::int) FROM pg_stat_activity WHERE pg_stat_activity.datname = '$test_database';" 2>/dev/null
+
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle exec rake db:drop \
+ && RAILS_ENV=test bundle exec rake db:setup \
+ && RAILS_ENV=test bundle exec rake db:fixtures:load
+}
+do_install services/api apiserver
+
+declare -a gostuff
+gostuff=(
+ sdk/go/arvadosclient
+ sdk/go/blockdigest
+ sdk/go/manifest
+ sdk/go/streamer
+ sdk/go/crunchrunner
+ services/arv-git-httpd
+ services/crunchstat
+ services/keep-web
+ services/keepstore
+ sdk/go/keepclient
+ services/keepproxy
+ services/datamanager/summary
+ services/datamanager/collection
+ services/datamanager/keep
+ services/datamanager
+ services/crunch-dispatch-local
+ services/crunch-run
+ tools/keep-rsync
+ )
+for g in "${gostuff[@]}"
+do
+ do_install "$g" go
+done
+
+install_workbench() {
+ cd "$WORKSPACE/apps/workbench" \
+ && mkdir -p tmp/cache \
+ && RAILS_ENV=test bundle_install_trylocal
+}
+do_install apps/workbench workbench
+
+test_doclinkchecker() {
+ (
+ set -e
+ cd "$WORKSPACE/doc"
+ ARVADOS_API_HOST=qr1hi.arvadosapi.com
+ # Make sure python-epydoc is installed or the next line won't
+ # do much good!
+ PYTHONPATH=$WORKSPACE/sdk/python/ bundle exec rake linkchecker baseurl=file://$WORKSPACE/doc/.site/ arvados_workbench_host=https://workbench.$ARVADOS_API_HOST arvados_api_host=$ARVADOS_API_HOST
+ )
+}
+do_test doc doclinkchecker
+
+stop_services
+
+test_apiserver() {
+ cd "$WORKSPACE/services/api" \
+ && RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[services/api]}
+}
+do_test services/api apiserver
+
+# Shortcut for when we're only running apiserver tests. This saves a bit of time,
+# because we don't need to start up the api server for subsequent tests.
+if [ ! -z "$only" ] && [ "$only" == "services/api" ]; then
+ rotate_logfile "$WORKSPACE/services/api/log/" "test.log"
+ exit_cleanly
+fi
+
+start_api
+
+test_ruby_sdk() {
+ cd "$WORKSPACE/sdk/ruby" \
+ && bundle exec rake test TESTOPTS=-v ${testargs[sdk/ruby]}
+}
+do_test sdk/ruby ruby_sdk
+
+test_cli() {
+ cd "$WORKSPACE/sdk/cli" \
+ && mkdir -p /tmp/keep \
+ && KEEP_LOCAL_STORE=/tmp/keep bundle exec rake test TESTOPTS=-v ${testargs[sdk/cli]}
+}
+do_test sdk/cli cli
+
+test_login-sync() {
+ cd "$WORKSPACE/services/login-sync" \
+ && bundle exec rake test TESTOPTS=-v ${testargs[services/login-sync]}
+}
+do_test services/login-sync login-sync
+
+for p in "${pythonstuff[@]}"
+do
+ do_test "$p" pip
+done
+do_test services/dockercleaner pip "$VENV3DIR/bin/"
+
+for g in "${gostuff[@]}"
+do
+ do_test "$g" go
+done
+
+test_workbench() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test TESTOPTS=-v ${testargs[apps/workbench]}
+}
+do_test apps/workbench workbench
+
+test_workbench_benchmark() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test:benchmark ${testargs[apps/workbench_benchmark]}
+}
+do_test apps/workbench_benchmark workbench_benchmark
+
+test_workbench_profile() {
+ start_nginx_proxy_services \
+ && cd "$WORKSPACE/apps/workbench" \
+ && RAILS_ENV=test bundle exec rake test:profile ${testargs[apps/workbench_profile]}
+}
+do_test apps/workbench_profile workbench_profile
+
+exit_cleanly
diff --git a/build/jenkins/run_upload_packages.py b/build/jenkins/run_upload_packages.py
new file mode 100755
index 0000000..04e6c80
--- /dev/null
+++ b/build/jenkins/run_upload_packages.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python3
+
+import argparse
+import functools
+import glob
+import logging
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+import time
+
+class TimestampFile:
+ def __init__(self, path):
+ self.path = path
+ self.start_time = time.time()
+
+ def last_upload(self):
+ try:
+ return os.path.getmtime(self.path)
+ except EnvironmentError:
+ return -1
+
+ def update(self):
+ os.close(os.open(self.path, os.O_CREAT | os.O_APPEND))
+ os.utime(self.path, (time.time(), self.start_time))
+
+
+class PackageSuite:
+ NEED_SSH = False
+
+ def __init__(self, glob_root, rel_globs):
+ logger_part = getattr(self, 'LOGGER_PART', os.path.basename(glob_root))
+ self.logger = logging.getLogger('arvados-dev.upload.' + logger_part)
+ self.globs = [os.path.join(glob_root, rel_glob)
+ for rel_glob in rel_globs]
+
+ def files_to_upload(self, since_timestamp):
+ for abs_glob in self.globs:
+ for path in glob.glob(abs_glob):
+ if os.path.getmtime(path) >= since_timestamp:
+ yield path
+
+ def upload_file(self, path):
+ raise NotImplementedError("PackageSuite.upload_file")
+
+ def upload_files(self, paths):
+ for path in paths:
+ self.logger.info("Uploading %s", path)
+ self.upload_file(path)
+
+ def post_uploads(self, paths):
+ pass
+
+ def update_packages(self, since_timestamp):
+ upload_paths = list(self.files_to_upload(since_timestamp))
+ if upload_paths:
+ self.upload_files(upload_paths)
+ self.post_uploads(upload_paths)
+
+
+class PythonPackageSuite(PackageSuite):
+ LOGGER_PART = 'python'
+
+ def __init__(self, glob_root, rel_globs):
+ super().__init__(glob_root, rel_globs)
+ self.seen_packages = set()
+
+ def upload_file(self, path):
+ src_dir = os.path.dirname(os.path.dirname(path))
+ if src_dir in self.seen_packages:
+ return
+ self.seen_packages.add(src_dir)
+ # NOTE: If we ever start uploading Python 3 packages, we'll need to
+ # figure out some way to adapt cmd to match. It might be easiest
+ # to give all our setup.py files the executable bit, and run that
+ # directly.
+ # We also must run `sdist` before `upload`: `upload` uploads any
+ # distributions previously generated in the command. It doesn't
+ # know how to upload distributions already on disk. We write the
+ # result to a dedicated directory to avoid interfering with our
+ # timestamp tracking.
+ cmd = ['python2.7', 'setup.py']
+ if not self.logger.isEnabledFor(logging.INFO):
+ cmd.append('--quiet')
+ cmd.extend(['sdist', '--dist-dir', '.upload_dist', 'upload'])
+ subprocess.check_call(cmd, cwd=src_dir)
+ shutil.rmtree(os.path.join(src_dir, '.upload_dist'))
+
+
+class GemPackageSuite(PackageSuite):
+ LOGGER_PART = 'gems'
+
+ def upload_file(self, path):
+ cmd = ['gem', 'push', path]
+ push_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ repushed = any(line == b'Repushing of gem versions is not allowed.\n'
+ for line in push_proc.stdout)
+ # Read any remaining stdout before closing.
+ for line in push_proc.stdout:
+ pass
+ push_proc.stdout.close()
+ if (push_proc.wait() != 0) and not repushed:
+ raise subprocess.CalledProcessError(push_proc.returncode, cmd)
+
+
+class DistroPackageSuite(PackageSuite):
+ NEED_SSH = True
+ REMOTE_DEST_DIR = 'tmp'
+
+ def __init__(self, glob_root, rel_globs, target, ssh_host, ssh_opts):
+ super().__init__(glob_root, rel_globs)
+ self.target = target
+ self.ssh_host = ssh_host
+ self.ssh_opts = ['-o' + opt for opt in ssh_opts]
+ if not self.logger.isEnabledFor(logging.INFO):
+ self.ssh_opts.append('-q')
+
+ def _build_cmd(self, base_cmd, *args):
+ cmd = [base_cmd]
+ cmd.extend(self.ssh_opts)
+ cmd.extend(args)
+ return cmd
+
+ def _paths_basenames(self, paths):
+ return (os.path.basename(path) for path in paths)
+
+ def _run_script(self, script, *args):
+ # SSH will use a shell to run our bash command, so we have to
+ # quote our arguments.
+ # self.__class__.__name__ provides $0 for the script, which makes a
+ # nicer message if there's an error.
+ subprocess.check_call(self._build_cmd(
+ 'ssh', self.ssh_host, 'bash', '-ec', pipes.quote(script),
+ self.__class__.__name__, *(pipes.quote(s) for s in args)))
+
+ def upload_files(self, paths):
+ cmd = self._build_cmd('scp', *paths)
+ cmd.append('{self.ssh_host}:{self.REMOTE_DEST_DIR}'.format(self=self))
+ subprocess.check_call(cmd)
+
+
+class DebianPackageSuite(DistroPackageSuite):
+ FREIGHT_SCRIPT = """
+cd "$1"; shift
+DISTNAME=$1; shift
+freight add "$@" "apt/$DISTNAME"
+freight cache "apt/$DISTNAME"
+rm "$@"
+"""
+ TARGET_DISTNAMES = {
+ 'debian7': 'wheezy',
+ 'debian8': 'jessie',
+ 'ubuntu1204': 'precise',
+ 'ubuntu1404': 'trusty',
+ }
+
+ def post_uploads(self, paths):
+ self._run_script(self.FREIGHT_SCRIPT, self.REMOTE_DEST_DIR,
+ self.TARGET_DISTNAMES[self.target],
+ *self._paths_basenames(paths))
+
+
+class RedHatPackageSuite(DistroPackageSuite):
+ CREATEREPO_SCRIPT = """
+cd "$1"; shift
+REPODIR=$1; shift
+rpmsign --addsign "$@" </dev/null
+mv "$@" "$REPODIR"
+createrepo "$REPODIR"
+"""
+ REPO_ROOT = '/var/www/rpm.arvados.org/'
+ TARGET_REPODIRS = {
+ 'centos6': 'CentOS/6/os/x86_64/'
+ }
+
+ def post_uploads(self, paths):
+ repo_dir = os.path.join(self.REPO_ROOT,
+ self.TARGET_REPODIRS[self.target])
+ self._run_script(self.CREATEREPO_SCRIPT, self.REMOTE_DEST_DIR,
+ repo_dir, *self._paths_basenames(paths))
+
+
+def _define_suite(suite_class, *rel_globs, **kwargs):
+ return functools.partial(suite_class, rel_globs=rel_globs, **kwargs)
+
+PACKAGE_SUITES = {
+ 'python': _define_suite(PythonPackageSuite,
+ 'sdk/pam/dist/*.tar.gz',
+ 'sdk/python/dist/*.tar.gz',
+ 'sdk/cwl/dist/*.tar.gz',
+ 'services/nodemanager/dist/*.tar.gz',
+ 'services/fuse/dist/*.tar.gz',
+ ),
+ 'gems': _define_suite(GemPackageSuite,
+ 'sdk/ruby/*.gem',
+ 'sdk/cli/*.gem',
+ 'services/login-sync/*.gem',
+ ),
+ }
+for target in ['debian7', 'debian8', 'ubuntu1204', 'ubuntu1404']:
+ PACKAGE_SUITES[target] = _define_suite(
+ DebianPackageSuite, os.path.join('packages', target, '*.deb'),
+ target=target)
+for target in ['centos6']:
+ PACKAGE_SUITES[target] = _define_suite(
+ RedHatPackageSuite, os.path.join('packages', target, '*.rpm'),
+ target=target)
+
+def parse_arguments(arguments):
+ parser = argparse.ArgumentParser(
+ prog="run_upload_packages.py",
+ description="Upload Arvados packages to various repositories")
+ parser.add_argument(
+ '--workspace', '-W', default=os.environ.get('WORKSPACE'),
+ help="Arvados source directory with built packages to upload")
+ parser.add_argument(
+ '--ssh-host', '-H',
+ help="Host specification for distribution repository server")
+ parser.add_argument('-o', action='append', default=[], dest='ssh_opts',
+ metavar='OPTION', help="Pass option to `ssh -o`")
+ parser.add_argument('--verbose', '-v', action='count', default=0,
+ help="Log more information and subcommand output")
+ parser.add_argument(
+ 'targets', nargs='*', default=['all'], metavar='target',
+ help="Upload packages to these targets (default all)\nAvailable targets: " +
+ ', '.join(sorted(PACKAGE_SUITES.keys())))
+ args = parser.parse_args(arguments)
+ if 'all' in args.targets:
+ args.targets = list(PACKAGE_SUITES.keys())
+
+ if args.workspace is None:
+ parser.error("workspace not set from command line or environment")
+ for target in args.targets:
+ try:
+ suite_class = PACKAGE_SUITES[target].func
+ except KeyError:
+ parser.error("unrecognized target {!r}".format(target))
+ if suite_class.NEED_SSH and (args.ssh_host is None):
+ parser.error(
+ "--ssh-host must be specified to upload distribution packages")
+ return args
+
+def setup_logger(stream_dest, args):
+ log_handler = logging.StreamHandler(stream_dest)
+ log_handler.setFormatter(logging.Formatter(
+ '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
+ '%Y-%m-%d %H:%M:%S'))
+ logger = logging.getLogger('arvados-dev.upload')
+ logger.addHandler(log_handler)
+ logger.setLevel(max(1, logging.WARNING - (10 * args.verbose)))
+
+def build_suite_and_upload(target, since_timestamp, args):
+ suite_def = PACKAGE_SUITES[target]
+ kwargs = {}
+ if suite_def.func.NEED_SSH:
+ kwargs.update(ssh_host=args.ssh_host, ssh_opts=args.ssh_opts)
+ suite = suite_def(args.workspace, **kwargs)
+ suite.update_packages(since_timestamp)
+
+def main(arguments, stdout=sys.stdout, stderr=sys.stderr):
+ args = parse_arguments(arguments)
+ setup_logger(stderr, args)
+ ts_file = TimestampFile(os.path.join(args.workspace, 'packages',
+ '.last_upload'))
+ last_upload_ts = ts_file.last_upload()
+ for target in args.targets:
+ build_suite_and_upload(target, last_upload_ts, args)
+ ts_file.update()
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
-----------------------------------------------------------------------
hooks/post-receive
--
More information about the arvados-commits
mailing list