[ARVADOS] updated: 607fe087f6167061714a524dd53cbbc21b974973
Git user
git at public.curoverse.com
Tue May 2 13:02:45 EDT 2017
Summary of changes:
.../controllers/container_requests_controller.rb | 49 +-
.../app/controllers/keep_disks_controller.rb | 1 +
apps/workbench/app/controllers/users_controller.rb | 1 +
apps/workbench/app/helpers/provenance_helper.rb | 48 ++
apps/workbench/app/models/arvados_base.rb | 4 +
apps/workbench/app/models/arvados_resource_list.rb | 8 +
apps/workbench/app/models/job.rb | 2 +-
apps/workbench/app/models/pipeline_instance.rb | 7 +-
apps/workbench/app/models/proxy_work_unit.rb | 1 +
.../_extra_tab_line_buttons.html.erb | 50 +-
.../_show_provenance.html.erb | 4 +-
.../container_requests_controller_test.rb | 80 ++-
.../test/integration/container_requests_test.rb | 12 +
.../test/unit/arvados_resource_list_test.rb | 8 +
apps/workbench/test/unit/link_test.rb | 3 +
apps/workbench/test/unit/pipeline_instance_test.rb | 3 +
apps/workbench/test/unit/work_unit_test.rb | 3 +
...nstall-manual-prerequisites.html.textile.liquid | 10 +-
doc/install/migrate-docker19.html.textile.liquid | 9 +-
sdk/cwl/arvados_cwl/arvcontainer.py | 62 +-
sdk/cwl/arvados_cwl/arvjob.py | 8 +-
sdk/cwl/arvados_cwl/arvworkflow.py | 5 +-
sdk/cwl/arvados_cwl/crunch_script.py | 3 +-
sdk/cwl/arvados_cwl/pathmapper.py | 16 +
sdk/cwl/arvados_cwl/runner.py | 18 +-
sdk/cwl/tests/test_container.py | 84 +++
sdk/go/arvadosclient/arvadosclient_test.go | 7 +-
sdk/go/dispatch/throttle_test.go | 29 +-
sdk/go/keepclient/discover.go | 15 +-
sdk/go/keepclient/keepclient.go | 22 +-
sdk/go/keepclient/keepclient_test.go | 20 +-
sdk/go/keepclient/support.go | 98 +--
sdk/python/arvados/_ranges.py | 2 +-
sdk/python/arvados/arvfile.py | 171 +++--
sdk/python/arvados/commands/put.py | 62 +-
sdk/python/tests/run_test_server.py | 5 +-
sdk/python/tests/test_arv_put.py | 28 +
sdk/python/tests/test_collections.py | 60 +-
sdk/ruby/lib/arvados.rb | 23 +-
sdk/ruby/lib/arvados/keep.rb | 6 +-
services/api/.gitignore | 3 +-
services/api/Gemfile | 53 +-
services/api/Gemfile.lock | 279 ++++----
services/api/Rakefile | 6 -
.../api/app/controllers/application_controller.rb | 14 +-
.../app/controllers/arvados/v1/nodes_controller.rb | 6 +-
.../controllers/arvados/v1/schema_controller.rb | 7 +-
.../app/controllers/arvados/v1/users_controller.rb | 2 +-
.../api/app/controllers/database_controller.rb | 4 +-
.../app/controllers/user_sessions_controller.rb | 3 +-
.../api/app/models/api_client_authorization.rb | 8 +-
services/api/app/models/arvados_model.rb | 99 ++-
services/api/app/models/collection.rb | 2 +-
services/api/app/models/link.rb | 5 -
services/api/app/models/log.rb | 2 +-
services/api/app/models/user.rb | 8 +-
services/api/app/models/virtual_machine.rb | 6 +-
services/api/config/application.default.yml | 36 +-
services/api/config/application.rb | 29 +-
.../api/config/environments/development.rb.example | 3 -
.../api/config/environments/production.rb.example | 2 +-
services/api/config/environments/test.rb.example | 5 +-
services/api/config/initializers/eventbus.rb | 38 +-
services/api/config/initializers/load_config.rb | 1 +
.../api/config/initializers/noop_deep_munge.rb | 1 +
.../config/initializers/permit_all_parameters.rb | 1 +
services/api/config/initializers/time_format.rb | 2 +
services/api/config/routes.rb | 28 +-
...0170319063406_serialized_columns_accept_null.rb | 5 +
services/api/db/structure.sql | 9 +-
services/api/lib/can_be_an_owner.rb | 2 +-
services/api/lib/create_superuser_token.rb | 4 +-
services/api/lib/eventbus.rb | 358 ----------
services/api/lib/has_uuid.rb | 14 +-
services/api/lib/load_param.rb | 2 +-
services/api/lib/serializers.rb | 6 +
.../api/lib/tasks/delete_old_container_logs.rake | 2 +-
services/api/lib/tasks/delete_old_job_logs.rake | 2 +-
services/api/lib/whitelist_update.rb | 15 +-
services/api/log/.gitkeep | 0
services/api/test/fixtures/container_requests.yml | 2 +-
.../arvados/v1/collections_controller_test.rb | 6 +-
.../functional/arvados/v1/users_controller_test.rb | 1 +
.../arvados/v1/virtual_machines_controller_test.rb | 4 +-
.../test/functional/database_controller_test.rb | 2 +-
.../api_client_authorizations_scopes_test.rb | 6 +-
.../api/test/integration/crunch_dispatch_test.rb | 2 +-
.../api/test/integration/database_reset_test.rb | 2 -
services/api/test/integration/errors_test.rb | 2 +-
services/api/test/integration/pipeline_test.rb | 2 +-
.../api/test/integration/reader_tokens_test.rb | 2 +-
services/api/test/integration/websocket_test.rb | 742 ---------------------
services/api/test/test_helper.rb | 26 +-
services/api/test/unit/arvados_model_test.rb | 76 ++-
.../api/test/unit/create_superuser_token_test.rb | 2 +-
services/api/test/unit/job_test.rb | 23 +-
services/api/test/unit/user_test.rb | 12 +-
services/api/test/unit/workflow_test.rb | 8 +-
services/crunch-run/crunchrun.go | 6 +-
services/fuse/tests/test_tmp_collection.py | 13 +
services/keep-web/handler.go | 12 +-
services/keep-web/handler_test.go | 29 +
services/keepproxy/keepproxy.go | 118 ++--
services/keepproxy/keepproxy_test.go | 59 +-
services/keepproxy/proxy_client.go | 19 +
tools/keep-exercise/keep-exercise.go | 3 +-
106 files changed, 1494 insertions(+), 1804 deletions(-)
copy apps/workbench/app/views/{jobs => container_requests}/_show_provenance.html.erb (52%)
create mode 100644 services/api/config/initializers/permit_all_parameters.rb
create mode 100644 services/api/db/migrate/20170319063406_serialized_columns_accept_null.rb
delete mode 100644 services/api/lib/eventbus.rb
delete mode 100644 services/api/log/.gitkeep
delete mode 100644 services/api/test/integration/websocket_test.rb
create mode 100644 services/keepproxy/proxy_client.go
via 607fe087f6167061714a524dd53cbbc21b974973 (commit)
via fca805a18c671ccbb03cef640c15172d1f02ffe3 (commit)
via 613155d587da60dbe04c7635649b1f3694938adc (commit)
via be4852ec32e5eeed1af9a62017cfc39ed66ac186 (commit)
via fe45b1b66c730f2546d78a7899375707c0816518 (commit)
via 18f3b51a1795922a5c5c595b5b3fb5ce12978c64 (commit)
via 4d012b23a4ac88f433986054fc0085ee6714b5b3 (commit)
via c6452ff3cf71462a46c3d7584696e0948dcce4dc (commit)
via ed375727b95e6f1cd56c599a177bbd7979e1cf78 (commit)
via 70166077b0f77780441df216232390f0d09ad31a (commit)
via fb72bf49a923ba3f25defa749efc2d49353414b7 (commit)
via d2ebb8fe6bdf4b33ec2aeea314efb5e4df22eebb (commit)
via fea486f94bb5cc8f51d9563eafc172b6ba2aec57 (commit)
via 43d1a5d0b9272f6e57a3d0afd59b956054ff4d7a (commit)
via d694a717acb2e577afa396aea140e5284b7f763d (commit)
via e56ae6aad06c37d5512537047871d7363dd97620 (commit)
via 59b27bcb7fe510ff351dd9d8f71b1d4b56d131b5 (commit)
via e39a7d5704932cbec606e85cdca50aa38a1ed053 (commit)
via 88a25d414025c64231f977a9383fd6a69cf6246a (commit)
via 72900c01e197d602e79fda8d306b17fd1e32a3ea (commit)
via 6a7edde0c5b906c58a7e739f2d0c612c67f63dc5 (commit)
via 41a7f3914a6ced0fd374bd903470fb4fc91ea5e4 (commit)
via e289b9ad5959a76795681ca95d310bad2288656a (commit)
via 3843210bdf340751795b8ce9903cf712661b94e7 (commit)
via b92089c1674dcc99184a36b7094ac72ffc787922 (commit)
via f5f5de0ae41e12738a380c422417d5a5e5af7f09 (commit)
via e8317521741c3814e824e209f75edf23636e32ff (commit)
via 318c49002aea966128a9d37ab29e601a104d79bb (commit)
via f517c9897428fd3c50a269b494b6b912cae291df (commit)
via 65da23323c06079612db9285e0ab2bd1ac9ea253 (commit)
via 8c948701e887e96f05ffaf0adaec9da9f2533f72 (commit)
via b075d1be1377760f5d8497a29f63c8e416cd5378 (commit)
via 8c5f2973a5c5f042d1d12aef1c470b37519fd416 (commit)
via edfc619e6189c5407d16798c75aaace08a13536d (commit)
via 90209af8fa35bc99c9821db0c815404d1234ef31 (commit)
via 88c241d7c4fbfadb951f370bf2706db687adad75 (commit)
via 137ebf94ff14837c9df773533ea86e821469bda9 (commit)
via fed8ee006cdfc2029f287c656b184f3ce7507847 (commit)
via 260e85a9d9cf2c20313bcf2edb63da57bdd3a69f (commit)
via e7b46691f98bf4e7edcf1ea3b98a677273d70b3b (commit)
via 30146198f24c70941d95af714e036ff3c451626c (commit)
via 95e2bdda5afc3ffc6afb2f08ea6d7cba8f8d62f1 (commit)
via 629557aa041a80f0704b02e7c679b2f01d9c0be2 (commit)
via f2f8340b18430738a9527f05e707dd8f03508cc0 (commit)
via ad7294edfcc59c3e67548328a88c9e689c3ae2cf (commit)
via 3ef580c47029ff0fbf959b044f29c183f41cb609 (commit)
via 4ccbea9ef440a7e4252b0df5e710dcb767831c60 (commit)
via c043e133b2646037ed630d571e91dbf77344f855 (commit)
via 93c92875aaebe5b06f8dbfe2822b59a772895c08 (commit)
via bf5d77baad2071af6eea514c76b4892cec4974a0 (commit)
via 04bd6b08b9ac13d29ac05c9281850d430d71066d (commit)
via 840b855ff0317e66f4176ae0f23e9785f72267b4 (commit)
via 1220e2184449ccab288fa41de4749fb029cd317b (commit)
via 17b80c32a5b177ee8c5f32b81dd0889f3399eee8 (commit)
via 9905d124877e5695053cce388d3680c667815de5 (commit)
via 35c2572761bb060aa1c12f417f97aa9e1ccbe7eb (commit)
via a93d95d85ea15c7afd70657abf60635c29043c89 (commit)
via f4661a02245a35f8d223693a5aecaae87083fb16 (commit)
via 52c6f13db207030bdbe063665c0dd524007db828 (commit)
via 0ecea550fde014578e71004360b700cdfeae4909 (commit)
via 7116da151dc8bfd5ac1a9b016b2ed6e4c35572f7 (commit)
via bef5d901f00648e703bb6a3ad58fa481a610ffd7 (commit)
via de083a9fec0ca08afda5a9369c6cd32dbdcd0965 (commit)
via f5d09a4904e609b5df20edd0194a9f1ade40c28a (commit)
via 0d5be4fa36006459e1579f087b695904e4f32ee3 (commit)
via 77d9c05d89dabc9e9e9a15f46cd12c8ad61ed64e (commit)
via e1e05845b74ce70712e414830f992ce57d7a8453 (commit)
via 78ff2a600b29f05f522f8e8818967dac88394fd6 (commit)
via 7fd4aa96f997a133d31b3df88a8d2f4820c5b881 (commit)
via fb7bb4c8f17a49abab40e42b7a0101cac7478d60 (commit)
via 8d9b12f2a87ffd7183d3a36ca32ee1c7e701a0e2 (commit)
via d42ec212e992a83f8e7fc48b59fb3daf58a62787 (commit)
via c066a2e6d064a270638baea8f8b0d106f5903e0f (commit)
via c36272a5f83ba70980160c4cb205bdfb8a1c660b (commit)
via 045bace65c2395b6efe9f3d8c93bec74196f58e1 (commit)
via 7c6852e1b3675b3c1de7e9792373333cb752d40d (commit)
via dfd8c4bbd6f126b90d436e9d242cd30e15e70d2e (commit)
via de4df7f80c531ab16e59ea36671a8efa9e6ff33d (commit)
via 126dd750f48654cb3b1a4e53c5b7d337003e112f (commit)
via 30dbddd3b311653652fa731dbff950aba0712301 (commit)
via ab314b9ea3618c71556a6a5f6dd7c769beaf2737 (commit)
via 390af6a13f7c8974329aecc2f23fbfa81f8e298b (commit)
via 9090c60b28de593b8bb2ce606a9ab35b62b57608 (commit)
via 84ad215752fde4291070143411a945fa7a94241c (commit)
via c9a361f7fd3b1cf7f4959e9b0292d0f495d82771 (commit)
via 05d453ec38b10a022ea6db77867957e7115b9b35 (commit)
via 09dcf71e59907c2eaf4b94918c63da07193481a4 (commit)
via 099a8c62fcb0905855ddf243a3deddc7398c3c10 (commit)
via fe446e10da189d3d3e0ea5f19061389cc2200a08 (commit)
via 6498c7751cd0305a28494df45a70965ccc6c3737 (commit)
from 3a0d849c08f750dca1d6a40153c0107001769c6d (commit)
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
commit 607fe087f6167061714a524dd53cbbc21b974973
Merge: 3a0d849 fca805a
Author: Tom Clegg <tom at curoverse.com>
Date: Tue May 2 13:02:32 2017 -0400
11308: Merge branch 'master' into 11308-python3
Conflicts:
sdk/python/arvados/arvfile.py
sdk/python/arvados/commands/put.py
sdk/python/tests/test_collections.py
diff --cc sdk/python/arvados/arvfile.py
index 931a3a1,a2ec76a..2fc9c73
--- a/sdk/python/arvados/arvfile.py
+++ b/sdk/python/arvados/arvfile.py
@@@ -597,40 -630,52 +643,53 @@@ class _BlockManager(object)
self._pending_write_size += closed_file_size
# Check if there are enough small blocks for filling up one in full
- if force or (self._pending_write_size >= config.KEEP_BLOCK_SIZE):
+ if not (force or (self._pending_write_size >= config.KEEP_BLOCK_SIZE)):
+ return
- # Search blocks ready for getting packed together before being committed to Keep.
- # A WRITABLE block always has an owner.
- # A WRITABLE block with its owner.closed() implies that it's
- # size is <= KEEP_BLOCK_SIZE/2.
- try:
- small_blocks = [b for b in listvalues(self._bufferblocks) if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
- except AttributeError:
- # Writable blocks without owner shouldn't exist.
- raise UnownedBlockError()
- # Search blocks ready for getting packed together before being committed to Keep.
++ # Search blocks ready for getting packed together before being
++ # committed to Keep.
+ # A WRITABLE block always has an owner.
- # A WRITABLE block with its owner.closed() implies that it's
++ # A WRITABLE block with its owner.closed() implies that its
+ # size is <= KEEP_BLOCK_SIZE/2.
+ try:
- small_blocks = [b for b in self._bufferblocks.values()
++ small_blocks = [b for b in listvalues(self._bufferblocks)
+ if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
+ except AttributeError:
+ # Writable blocks without owner shouldn't exist.
+ raise UnownedBlockError()
+
+ if len(small_blocks) <= 1:
+ # Not enough small blocks for repacking
+ return
- if len(small_blocks) <= 1:
- # Not enough small blocks for repacking
- return
+ for bb in small_blocks:
+ bb.repack_writes()
- # Update the pending write size count with its true value, just in case
- # some small file was opened, written and closed several times.
- self._pending_write_size = sum([b.size() for b in small_blocks])
- if self._pending_write_size < config.KEEP_BLOCK_SIZE and not force:
- return
+ # Update the pending write size count with its true value, just in case
+ # some small file was opened, written and closed several times.
+ self._pending_write_size = sum([b.size() for b in small_blocks])
- new_bb = self._alloc_bufferblock()
- while len(small_blocks) > 0 and (new_bb.write_pointer + small_blocks[0].size()) <= config.KEEP_BLOCK_SIZE:
- bb = small_blocks.pop(0)
- arvfile = bb.owner
- self._pending_write_size -= bb.size()
- new_bb.append(bb.buffer_view[0:bb.write_pointer].tobytes())
- arvfile.set_segments([Range(new_bb.blockid,
- 0,
- bb.size(),
- new_bb.write_pointer - bb.size())])
- self._delete_bufferblock(bb.blockid)
- self.commit_bufferblock(new_bb, sync=sync)
+ if self._pending_write_size < config.KEEP_BLOCK_SIZE and not force:
+ return
+
+ new_bb = self._alloc_bufferblock()
+ files = []
+ while len(small_blocks) > 0 and (new_bb.write_pointer + small_blocks[0].size()) <= config.KEEP_BLOCK_SIZE:
+ bb = small_blocks.pop(0)
+ self._pending_write_size -= bb.size()
+ new_bb.append(bb.buffer_view[0:bb.write_pointer].tobytes())
+ files.append((bb, new_bb.write_pointer - bb.size()))
+
+ self.commit_bufferblock(new_bb, sync=sync)
+
+ for bb, new_bb_segment_offset in files:
+ newsegs = bb.owner.segments()
+ for s in newsegs:
+ if s.locator == bb.blockid:
+ s.locator = new_bb.locator()
+ s.segment_offset = new_bb_segment_offset+s.segment_offset
+ bb.owner.set_segments(newsegs)
+ self._delete_bufferblock(bb.blockid)
def commit_bufferblock(self, block, sync):
"""Initiate a background upload of a bufferblock.
@@@ -1010,39 -1055,8 +1069,8 @@@ class ArvadosFile(object)
self.parent._my_block_manager().block_prefetch(lr.locator)
locs.add(lr.locator)
- return ''.join(data)
+ return b''.join(data)
- def _repack_writes(self, num_retries):
- """Optimize buffer block by repacking segments in file sequence.
-
- When the client makes random writes, they appear in the buffer block in
- the sequence they were written rather than the sequence they appear in
- the file. This makes for inefficient, fragmented manifests. Attempt
- to optimize by repacking writes in file sequence.
-
- """
- segs = self._segments
-
- # Collect the segments that reference the buffer block.
- bufferblock_segs = [s for s in segs if s.locator == self._current_bblock.blockid]
-
- # Collect total data referenced by segments (could be smaller than
- # bufferblock size if a portion of the file was written and
- # then overwritten).
- write_total = sum([s.range_size for s in bufferblock_segs])
-
- if write_total < self._current_bblock.size() or len(bufferblock_segs) > 1:
- # If there's more than one segment referencing this block, it is
- # due to out-of-order writes and will produce a fragmented
- # manifest, so try to optimize by re-packing into a new buffer.
- contents = self.parent._my_block_manager().get_block_contents(self._current_bblock.blockid, num_retries)
- new_bb = self.parent._my_block_manager().alloc_bufferblock(self._current_bblock.blockid, starting_capacity=write_total, owner=self)
- for t in bufferblock_segs:
- new_bb.append(contents[t.segment_offset:t.segment_offset+t.range_size])
- t.segment_offset = new_bb.size() - t.range_size
-
- self._current_bblock = new_bb
-
@must_be_writable
@synchronized
def writeto(self, offset, data, num_retries):
diff --cc sdk/python/arvados/commands/put.py
index ed9d55c,6836d80..12f9329
--- a/sdk/python/arvados/commands/put.py
+++ b/sdk/python/arvados/commands/put.py
@@@ -452,12 -471,14 +470,16 @@@ class ArvPutUploadJob(object)
except (SystemExit, Exception) as e:
self._checkpoint_before_quit = False
# Log stack trace only when Ctrl-C isn't pressed (SIGINT)
-- # Note: We're expecting SystemExit instead of KeyboardInterrupt because
-- # we have a custom signal handler in place that raises SystemExit with
-- # the catched signal's code.
- if not isinstance(e, SystemExit) or e.code != -2:
++ # Note: We're expecting SystemExit instead of
++ # KeyboardInterrupt because we have a custom signal
++ # handler in place that raises SystemExit with the catched
++ # signal's code.
+ if isinstance(e, PathDoesNotExistError):
+ # We aren't interested in the traceback for this case
+ pass
+ elif not isinstance(e, SystemExit) or e.code != -2:
- self.logger.warning("Abnormal termination:\n{}".format(traceback.format_exc(e)))
+ self.logger.warning("Abnormal termination:\n{}".format(
+ traceback.format_exc()))
raise
finally:
if not self.dry_run:
diff --cc sdk/python/tests/test_arv_put.py
index 083b8fc,3201891..6d10352
--- a/sdk/python/tests/test_arv_put.py
+++ b/sdk/python/tests/test_arv_put.py
@@@ -19,13 -17,16 +19,14 @@@ import yam
import threading
import hashlib
import random
+ import uuid
-from cStringIO import StringIO
-
import arvados
import arvados.commands.put as arv_put
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
-from arvados_testutil import ArvadosBaseTestCase, fake_httplib2_response
-import run_test_server
+from .arvados_testutil import ArvadosBaseTestCase, fake_httplib2_response
+from . import run_test_server
class ArvadosPutResumeCacheTest(ArvadosBaseTestCase):
CACHE_ARGSET = [
diff --cc sdk/python/tests/test_collections.py
index 86215f5,fd31664..24f305a
--- a/sdk/python/tests/test_collections.py
+++ b/sdk/python/tests/test_collections.py
@@@ -1173,16 -1111,16 +1173,16 @@@ class NewCollectionTestCaseWithServers(
def test_only_small_blocks_are_packed_together(self):
c = Collection()
- # Write a couple of small files,
+ # Write a couple of small files,
- f = c.open("count.txt", "w")
- f.write("0123456789")
+ f = c.open("count.txt", "wb")
+ f.write(b"0123456789")
f.close(flush=False)
- foo = c.open("foo.txt", "w")
- foo.write("foo")
+ foo = c.open("foo.txt", "wb")
+ foo.write(b"foo")
foo.close(flush=False)
# Then, write a big file, it shouldn't be packed with the ones above
- big = c.open("bigfile.txt", "w")
- big.write("x" * 1024 * 1024 * 33) # 33 MB > KEEP_BLOCK_SIZE/2
+ big = c.open("bigfile.txt", "wb")
+ big.write(b"x" * 1024 * 1024 * 33) # 33 MB > KEEP_BLOCK_SIZE/2
big.close(flush=False)
self.assertEqual(
c.manifest_text("."),
-----------------------------------------------------------------------
hooks/post-receive
--
More information about the arvados-commits
mailing list