[ARVADOS] updated: 3d625ef22b42affa956f48948351bc60dd4298ac
git at public.curoverse.com
git at public.curoverse.com
Sun Feb 7 19:37:20 EST 2016
Summary of changes:
tools/crunchstat-summary/crunchstat_summary/summarizer.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
discards a875cf4ff32ed95fb7b3780bce50294afe69c68f (commit)
discards 97369f522bfc9018b201f5988cd187af324c6544 (commit)
discards 2f0a0ba4ce7da315f94c2d4a9bfb27a835940618 (commit)
via 3d625ef22b42affa956f48948351bc60dd4298ac (commit)
This update added new revisions after undoing existing revisions. That is
to say, the old revision is not a strict subset of the new revision. This
situation occurs when you --force push a change and generate a repository
containing something like this:
* -- * -- B -- O -- O -- O (a875cf4ff32ed95fb7b3780bce50294afe69c68f)
\
N -- N -- N (3d625ef22b42affa956f48948351bc60dd4298ac)
When this happens we assume that you've already had alert emails for all
of the O revisions, and so we here report only the revisions in the N
branch from the common base, B.
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
commit 3d625ef22b42affa956f48948351bc60dd4298ac
Author: Tom Clegg <tom at curoverse.com>
Date: Sun Feb 7 17:28:33 2016 -0500
8341: Get job log from logs API if the log has not been written to Keep yet.
diff --git a/tools/crunchstat-summary/crunchstat_summary/summarizer.py b/tools/crunchstat-summary/crunchstat_summary/summarizer.py
index 486f0e7..96fd9cf 100644
--- a/tools/crunchstat-summary/crunchstat_summary/summarizer.py
+++ b/tools/crunchstat-summary/crunchstat_summary/summarizer.py
@@ -90,7 +90,7 @@ class Summarizer(object):
logger.debug('%s: done %s', self.label, uuid)
continue
- m = re.search(r'^(?P<timestamp>\S+) (?P<job_uuid>\S+) \d+ (?P<seq>\d+) stderr crunchstat: (?P<category>\S+) (?P<current>.*?)( -- interval (?P<interval>.*))?\n', line)
+ m = re.search(r'^(?P<timestamp>[^\s.]+)(\.\d+)? (?P<job_uuid>\S+) \d+ (?P<seq>\d+) stderr crunchstat: (?P<category>\S+) (?P<current>.*?)( -- interval (?P<interval>.*))?\n', line)
if not m:
continue
@@ -327,8 +327,8 @@ class Summarizer(object):
return '{}'.format(val)
-class CollectionSummarizer(Summarizer):
- def __init__(self, collection_id, **kwargs):
+class CollectionReader(object):
+ def __init__(self, collection_id):
logger.debug('load collection %s', collection_id)
collection = arvados.collection.CollectionReader(collection_id)
filenames = [filename for filename in collection]
@@ -336,24 +336,73 @@ class CollectionSummarizer(Summarizer):
raise ValueError(
"collection {} has {} files; need exactly one".format(
collection_id, len(filenames)))
+ self._reader = collection.open(filenames[0])
+
+ def __iter__(self):
+ return iter(self._reader)
+
+
+class LiveLogReader(object):
+ def __init__(self, job_uuid):
+ logger.debug('load stderr events for job %s', job_uuid)
+ self._filters = [
+ ['object_uuid', '=', job_uuid],
+ ['event_type', '=', 'stderr']]
+ self._last_id = 0
+ self._buffer = collections.deque()
+ self._got_all = False
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self._buffer is None:
+ raise StopIteration
+ elif len(self._buffer) == 0:
+ if self._got_all:
+ raise StopIteration
+ got = arvados.api().logs().index(
+ limit=1000,
+ order=['id asc'],
+ filters=self._filters + [['id','>',str(self._last_id)]],
+ ).execute()
+ logger.debug('received %d, %d more remain', len(got['items']), got['items_available'] - len(got['items']))
+ if len(got['items']) == 0:
+ self._got_all = True
+ self._buffer = None
+ raise StopIteration
+ elif len(got['items']) == got['items_available']:
+ # Don't try to fetch any more after this page
+ self._got_all = True
+ for i in got['items']:
+ for line in i['properties']['text'].split('\n'):
+ self._buffer.append(line)
+ self._last_id = i['id']
+ return self._buffer.popleft() + '\n'
+
+
+class CollectionSummarizer(Summarizer):
+ def __init__(self, collection_id, **kwargs):
super(CollectionSummarizer, self).__init__(
- collection.open(filenames[0]), **kwargs)
+ CollectionReader(collection_id), **kwargs)
self.label = collection_id
-class JobSummarizer(CollectionSummarizer):
+class JobSummarizer(Summarizer):
def __init__(self, job, **kwargs):
arv = arvados.api('v1')
if isinstance(job, basestring):
self.job = arv.jobs().get(uuid=job).execute()
else:
self.job = job
- if not self.job['log']:
- raise ValueError(
- "job {} has no log; live summary not implemented".format(
- self.job['uuid']))
- super(JobSummarizer, self).__init__(self.job['log'], **kwargs)
- self.label = self.job['uuid']
+ if self.job['log']:
+ rdr = CollectionReader(self.job['log'])
+ label = self.job['uuid']
+ else:
+ rdr = LiveLogReader(self.job['uuid'])
+ label = self.job['uuid'] + ' (partial)'
+ super(JobSummarizer, self).__init__(rdr, **kwargs)
+ self.label = label
self.existing_constraints = self.job.get('runtime_constraints', {})
-----------------------------------------------------------------------
hooks/post-receive
--
More information about the arvados-commits
mailing list