From patchwork Tue Jul 26 05:19:13 2011 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Michael-Doyle Hudson X-Patchwork-Id: 3117 Return-Path: X-Original-To: patchwork@peony.canonical.com Delivered-To: patchwork@peony.canonical.com Received: from fiordland.canonical.com (fiordland.canonical.com [91.189.94.145]) by peony.canonical.com (Postfix) with ESMTP id 4D59923F41 for ; Tue, 26 Jul 2011 05:19:18 +0000 (UTC) Received: from mail-qw0-f52.google.com (mail-qw0-f52.google.com [209.85.216.52]) by fiordland.canonical.com (Postfix) with ESMTP id C94F4A18284 for ; Tue, 26 Jul 2011 05:19:17 +0000 (UTC) Received: by qwb8 with SMTP id 8so56257qwb.11 for ; Mon, 25 Jul 2011 22:19:17 -0700 (PDT) Received: by 10.229.44.19 with SMTP id y19mr4439539qce.190.1311657555720; Mon, 25 Jul 2011 22:19:15 -0700 (PDT) X-Forwarded-To: linaro-patchwork@canonical.com X-Forwarded-For: patch@linaro.org linaro-patchwork@canonical.com Delivered-To: patches@linaro.org Received: by 10.229.217.78 with SMTP id hl14cs98811qcb; Mon, 25 Jul 2011 22:19:15 -0700 (PDT) Received: by 10.216.54.197 with SMTP id i47mr1199018wec.48.1311657554294; Mon, 25 Jul 2011 22:19:14 -0700 (PDT) Received: from adelie.canonical.com (adelie.canonical.com [91.189.90.139]) by mx.google.com with ESMTP id u5si188469wec.117.2011.07.25.22.19.13; Mon, 25 Jul 2011 22:19:14 -0700 (PDT) Received-SPF: pass (google.com: best guess record for domain of bounces@canonical.com designates 91.189.90.139 as permitted sender) client-ip=91.189.90.139; Authentication-Results: mx.google.com; spf=pass (google.com: best guess record for domain of bounces@canonical.com designates 91.189.90.139 as permitted sender) smtp.mail=bounces@canonical.com Received: from loganberry.canonical.com ([91.189.90.37]) by adelie.canonical.com with esmtp (Exim 4.71 #1 (Debian)) id 1Qla2z-0007pn-IF for ; Tue, 26 Jul 2011 05:19:13 +0000 Received: from loganberry.canonical.com (localhost [127.0.0.1]) by loganberry.canonical.com (Postfix) with ESMTP id 7B5E62E8911 for ; Tue, 26 Jul 2011 05:19:13 +0000 (UTC) MIME-Version: 1.0 X-Launchpad-Project: lava-scheduler X-Launchpad-Branch: ~linaro-validation/lava-scheduler/trunk X-Launchpad-Message-Rationale: Subscriber X-Launchpad-Branch-Revision-Number: 43 X-Launchpad-Notification-Type: branch-revision To: Linaro Patch Tracker From: noreply@launchpad.net Subject: [Branch ~linaro-validation/lava-scheduler/trunk] Rev 43: Add the ability to view the logs of jobs, even as they run. Message-Id: <20110726051913.3080.10656.launchpad@loganberry.canonical.com> Date: Tue, 26 Jul 2011 05:19:13 -0000 Reply-To: noreply@launchpad.net Sender: bounces@canonical.com Errors-To: bounces@canonical.com Precedence: bulk X-Generated-By: Launchpad (canonical.com); Revision="13503"; Instance="initZopeless config overlay" X-Launchpad-Hash: 3c42909fa3446ea0bfdf3827c381df40abd72630 Merge authors: Michael Hudson-Doyle (mwhudson) Related merge proposals: https://code.launchpad.net/~mwhudson/lava-scheduler/log-viewing/+merge/69030 proposed by: Michael Hudson-Doyle (mwhudson) ------------------------------------------------------------ revno: 43 [merge] committer: Michael-Doyle Hudson branch nick: trunk timestamp: Tue 2011-07-26 17:16:47 +1200 message: Add the ability to view the logs of jobs, even as they run. added: lava_scheduler_app/migrations/0006_auto__add_field_testjob_log_file.py modified: fake-dispatcher lava-scheduler lava_scheduler_app/extension.py lava_scheduler_app/models.py lava_scheduler_app/templates/lava_scheduler_app/job.html lava_scheduler_app/tests.py lava_scheduler_app/urls.py lava_scheduler_app/views.py lava_scheduler_daemon/board.py lava_scheduler_daemon/dbjobsource.py lava_scheduler_daemon/jobsource.py lava_scheduler_daemon/tests/test_board.py --- lp:lava-scheduler https://code.launchpad.net/~linaro-validation/lava-scheduler/trunk You are subscribed to branch lp:lava-scheduler. To unsubscribe from this branch go to https://code.launchpad.net/~linaro-validation/lava-scheduler/trunk/+edit-subscription === modified file 'fake-dispatcher' --- fake-dispatcher 2011-06-15 04:57:02 +0000 +++ fake-dispatcher 2011-07-25 05:41:22 +0000 @@ -1,6 +1,10 @@ #!/bin/sh echo starting processing $1 echo error >&2 -sleep 10 +for i in `seq 30`; do +sleep 1 +echo $i cat $1 +echo +done echo ending === modified file 'lava-scheduler' --- lava-scheduler 2011-07-07 09:48:14 +0000 +++ lava-scheduler 2011-07-25 05:41:22 +0000 @@ -11,7 +11,7 @@ from lava_scheduler_daemon.dbjobsource import DatabaseJobSource source = DatabaseJobSource() -service = BoardSet(source, 'lava-dispatch', reactor) +service = BoardSet(source, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fake-dispatcher'), reactor) reactor.callWhenRunning(service.startService) logger = logging.getLogger('') === modified file 'lava_scheduler_app/extension.py' --- lava_scheduler_app/extension.py 2011-06-12 23:17:32 +0000 +++ lava_scheduler_app/extension.py 2011-07-26 04:36:41 +0000 @@ -52,5 +52,3 @@ @property def version(self): return versiontools.format_version(lava_scheduler_app.__version__) - - === added file 'lava_scheduler_app/migrations/0006_auto__add_field_testjob_log_file.py' --- lava_scheduler_app/migrations/0006_auto__add_field_testjob_log_file.py 1970-01-01 00:00:00 +0000 +++ lava_scheduler_app/migrations/0006_auto__add_field_testjob_log_file.py 2011-07-26 04:33:05 +0000 @@ -0,0 +1,84 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding field 'TestJob.log_file' + db.add_column('lava_scheduler_app_testjob', 'log_file', self.gf('django.db.models.fields.files.FileField')(default=None, max_length=100, null=True), keep_default=False) + + + def backwards(self, orm): + + # Deleting field 'TestJob.log_file' + db.delete_column('lava_scheduler_app_testjob', 'log_file') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'lava_scheduler_app.device': { + 'Meta': {'object_name': 'Device'}, + 'current_job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lava_scheduler_app.TestJob']", 'unique': 'True', 'null': 'True', 'blank': 'True'}), + 'device_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lava_scheduler_app.DeviceType']"}), + 'hostname': ('django.db.models.fields.CharField', [], {'max_length': '200', 'primary_key': 'True'}), + 'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}) + }, + 'lava_scheduler_app.devicetype': { + 'Meta': {'object_name': 'DeviceType'}, + 'name': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True', 'db_index': 'True'}) + }, + 'lava_scheduler_app.testjob': { + 'Meta': {'object_name': 'TestJob'}, + 'definition': ('django.db.models.fields.TextField', [], {}), + 'device_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lava_scheduler_app.DeviceType']"}), + 'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'log_file': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}), + 'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'submit_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lava_scheduler_app.Device']", 'null': 'True'}) + } + } + + complete_apps = ['lava_scheduler_app'] === modified file 'lava_scheduler_app/models.py' --- lava_scheduler_app/models.py 2011-07-26 04:09:37 +0000 +++ lava_scheduler_app/models.py 2011-07-26 04:38:13 +0000 @@ -132,6 +132,8 @@ definition = models.TextField( editable = False, ) + log_file = models.FileField( + upload_to='lava-logs', default=None, null=True) def __unicode__(self): r = "%s test job" % self.get_status_display() === modified file 'lava_scheduler_app/templates/lava_scheduler_app/job.html' --- lava_scheduler_app/templates/lava_scheduler_app/job.html 2011-07-26 04:16:53 +0000 +++ lava_scheduler_app/templates/lava_scheduler_app/job.html 2011-07-26 05:10:49 +0000 @@ -9,6 +9,12 @@ padding-right: 2em; padding-bottom: 1em; } +#tab-output pre { + margin: 0; +} +.skip { + color:red; +} {% endblock %} @@ -57,20 +63,90 @@
 {{ job.definition }}
     
- +{% if log_file_present %} +
+
+ +
+
+{% endif %}
=== modified file 'lava_scheduler_app/tests.py' --- lava_scheduler_app/tests.py 2011-07-26 04:33:39 +0000 +++ lava_scheduler_app/tests.py 2011-07-26 05:00:31 +0000 @@ -195,7 +195,17 @@ requested_device=device, definition=json.dumps(definition)) transaction.commit() self.assertEqual( - definition, DatabaseJobSource().getJobForBoard_impl('panda01')) + definition, DatabaseJobSource().getJobForBoard_impl('panda01')[0]) + + def test_getJobForBoard_returns_writable_file(self): + device = self.factory.make_device(hostname='panda01') + definition = {'foo': 'bar'} + self.factory.make_testjob( + target=device, definition=json.dumps(definition)) + transaction.commit() + log_file = DatabaseJobSource().getJobForBoard_impl('panda01')[1] + log_file.write('a') + log_file.close() def test_getJobForBoard_returns_None_if_no_job(self): self.factory.make_device(hostname='panda01') @@ -212,7 +222,7 @@ definition=json.dumps(definition)) transaction.commit() self.assertEqual( - definition, DatabaseJobSource().getJobForBoard_impl('panda01')) + definition, DatabaseJobSource().getJobForBoard_impl('panda01')[0]) def test_getJobForBoard_prefers_older(self): panda_type = self.factory.ensure_device_type(name='panda') @@ -229,7 +239,7 @@ transaction.commit() self.assertEqual( first_definition, - DatabaseJobSource().getJobForBoard_impl('panda01')) + DatabaseJobSource().getJobForBoard_impl('panda01')[0]) def test_getJobForBoard_prefers_directly_targeted(self): panda_type = self.factory.ensure_device_type(name='panda') @@ -247,7 +257,7 @@ transaction.commit() self.assertEqual( device_definition, - DatabaseJobSource().getJobForBoard_impl('panda01')) + DatabaseJobSource().getJobForBoard_impl('panda01')[0]) def test_getJobForBoard_avoids_targeted_to_other_board_of_same_type(self): panda_type = self.factory.ensure_device_type(name='panda') @@ -306,7 +316,7 @@ def test_jobCompleted_set_statuses(self): device, job = self.get_device_and_running_job() transaction.commit() - DatabaseJobSource().jobCompleted_impl('panda01', None) + DatabaseJobSource().jobCompleted_impl('panda01') job = TestJob.objects.get(pk=job.pk) device = Device.objects.get(pk=device.pk) self.assertEqual( @@ -330,7 +340,7 @@ device, job = self.get_device_and_running_job() before = datetime.datetime.now() transaction.commit() - DatabaseJobSource().jobCompleted_impl('panda01', None) + DatabaseJobSource().jobCompleted_impl('panda01') after = datetime.datetime.now() job = TestJob.objects.get(pk=job.pk) self.assertTrue(before < job.end_time < after) @@ -338,6 +348,6 @@ def test_jobCompleted_clears_current_job(self): device, job = self.get_device_and_running_job() transaction.commit() - DatabaseJobSource().jobCompleted_impl('panda01', None) + DatabaseJobSource().jobCompleted_impl('panda01') device = Device.objects.get(pk=device.pk) self.assertEquals(None, device.current_job) === modified file 'lava_scheduler_app/urls.py' --- lava_scheduler_app/urls.py 2011-07-21 03:53:15 +0000 +++ lava_scheduler_app/urls.py 2011-07-25 03:09:15 +0000 @@ -5,4 +5,5 @@ url(r'^$', 'index'), url(r'^alljobs$', 'alljobs'), url(r'^job/(?P[0-9]+)$', 'job'), + url(r'^job/(?P[0-9]+)/output$', 'job_output'), ) === modified file 'lava_scheduler_app/views.py' --- lava_scheduler_app/views.py 2011-07-21 03:53:15 +0000 +++ lava_scheduler_app/views.py 2011-07-26 05:06:11 +0000 @@ -1,3 +1,6 @@ +import os + +from django.http import HttpResponse from django.template import RequestContext from django.shortcuts import render_to_response @@ -24,9 +27,45 @@ def job(request, pk): + job = TestJob.objects.get(pk=pk) return render_to_response( "lava_scheduler_app/job.html", { + 'log_file_present': bool(job.log_file), 'job': TestJob.objects.get(pk=pk), }, RequestContext(request)) + + +LOG_CHUNK_SIZE = 512*1024 +NEWLINE_SCAN_SIZE = 80 + + +def job_output(request, pk): + start = int(request.GET.get('start', 0)) + count_present = 'count' in request.GET + job = TestJob.objects.get(pk=pk) + log_file = job.log_file + log_file.seek(0, os.SEEK_END) + size = int(request.GET.get('count', log_file.tell())) + if size - start > LOG_CHUNK_SIZE and not count_present: + log_file.seek(-LOG_CHUNK_SIZE, os.SEEK_END) + content = log_file.read(LOG_CHUNK_SIZE) + nl_index = content.find('\n', 0, NEWLINE_SCAN_SIZE) + if nl_index > 0 and not count_present: + content = content[nl_index + 1:] + skipped = size - start - len(content) + else: + skipped = 0 + log_file.seek(start, os.SEEK_SET) + content = log_file.read(size - start) + nl_index = content.rfind('\n', -NEWLINE_SCAN_SIZE) + if nl_index >= 0 and not count_present: + content = content[:nl_index+1] + response = HttpResponse(content) + if skipped: + response['X-Skipped-Bytes'] = str(skipped) + response['X-Current-Size'] = str(start + len(content)) + if job.status != TestJob.RUNNING: + response['X-Is-Finished'] = '1' + return response === modified file 'lava_scheduler_daemon/board.py' --- lava_scheduler_daemon/board.py 2011-07-07 11:32:49 +0000 +++ lava_scheduler_daemon/board.py 2011-07-26 01:01:46 +0000 @@ -12,56 +12,51 @@ logger = logging.getLogger(__name__ + '.DispatcherProcessProtocol') - def __init__(self, deferred): + def __init__(self, deferred, log_file): + print log_file self.deferred = deferred - - def connectionMade(self): - fd, self._logpath = tempfile.mkstemp() - self._output = os.fdopen(fd, 'wb') + self.log_file = log_file def outReceived(self, text): - self._output.write(text) + self.log_file.write(text) + self.log_file.flush() errReceived = outReceived - def _cleanUp(self, result): - os.unlink(self._logpath) - return result - def processEnded(self, reason): # This discards the process exit value. - self._output.close() - self.deferred.callback(self._logpath) - self.deferred.addCallback(self._cleanUp) + self.log_file.close() + self.deferred.callback(None) class Job(object): logger = logging.getLogger(__name__ + '.Job') - def __init__(self, json_data, dispatcher, reactor): - self.json_data = json_data + def __init__(self, job_data, dispatcher, reactor): + self.job_data = job_data self.dispatcher = dispatcher self.reactor = reactor self._json_file = None def run(self): d = defer.Deferred() + json_data, log_file = self.job_data fd, self._json_file = tempfile.mkstemp() with os.fdopen(fd, 'wb') as f: - json.dump(self.json_data, f) + json.dump(json_data, f) self.reactor.spawnProcess( - DispatcherProcessProtocol(d), self.dispatcher, + DispatcherProcessProtocol(d, log_file), self.dispatcher, args=[self.dispatcher, self._json_file], childFDs={0:0, 1:'r', 2:'r'}) d.addBoth(self._exited) return d - def _exited(self, log_file_path): - self.logger.info("job finished on %s", self.json_data['target']) + def _exited(self, result): + self.logger.info("job finished on %s", self.job_data[0]['target']) if self._json_file is not None: os.unlink(self._json_file) - return log_file_path + return result class Board(object): @@ -185,9 +180,9 @@ d.callback(None) self._stopping_deferreds = [] - def _maybeStartJob(self, json_data): + def _maybeStartJob(self, job_data): self.checking = False - if json_data is None: + if job_data is None: self.logger.debug("no job found") if self._stopping_deferreds: self._finish_stop() @@ -195,17 +190,16 @@ self._check_call = self.reactor.callLater( 10, self._checkForJob) return - self.logger.debug("starting job %r", json_data) + self.logger.debug("starting job %r", job_data) self.running_job = self.job_cls( - json_data, self.dispatcher, self.reactor) + job_data, self.dispatcher, self.reactor) d = self.running_job.run() d.addCallbacks(self._cbJobFinished, self._ebJobFinished) - def _cbJobFinished(self, log_file_path): + def _cbJobFinished(self, result): self.logger.info("reporting job completed") self.source.jobCompleted( - self.board_name, log_file_path). addCallback( - self._cbJobCompleted) + self.board_name).addCallback(self._cbJobCompleted) def _ebJobFinished(self, result): self.logger.exception(result.value) === modified file 'lava_scheduler_daemon/dbjobsource.py' --- lava_scheduler_daemon/dbjobsource.py 2011-07-26 02:58:20 +0000 +++ lava_scheduler_daemon/dbjobsource.py 2011-07-26 04:31:14 +0000 @@ -2,6 +2,7 @@ import json import logging +from django.core.files.base import ContentFile from django.db import IntegrityError, transaction from django.db.models import Q @@ -58,9 +59,15 @@ transaction.rollback() continue else: + job.log_file.save( + 'job-%s.log' % job.id, ContentFile(''), save=False) job.save() transaction.commit() - return json.loads(job.definition) + json_data = json.loads(job.definition) + log_file = job.log_file + log_file.file.close() + log_file.open('wb') + return json_data, log_file else: # We don't really need to rollback here, as no modifying # operations have been made to the database. But Django is @@ -74,7 +81,7 @@ return deferToThread(self.getJobForBoard_impl, board_name) @transaction.commit_on_success() - def jobCompleted_impl(self, board_name, log_stream): + def jobCompleted_impl(self, board_name): self.logger.debug('marking job as complete on %s', board_name) device = Device.objects.get(hostname=board_name) device.status = Device.IDLE @@ -85,6 +92,5 @@ device.save() job.save() - def jobCompleted(self, board_name, log_file_path): - return deferToThread( - self.jobCompleted_impl, board_name, log_file_path) + def jobCompleted(self, board_name): + return deferToThread(self.jobCompleted_impl, board_name) === modified file 'lava_scheduler_daemon/jobsource.py' --- lava_scheduler_daemon/jobsource.py 2011-07-06 21:59:57 +0000 +++ lava_scheduler_daemon/jobsource.py 2011-07-26 00:34:21 +0000 @@ -17,12 +17,12 @@ """Get the list of currently configured board names.""" def getJobForBoard(board_name): - """Return the json data of a job for board_name to run. + """Return the json data of a job for board_name and a log file. The job should be marked as started before it is returned. """ - def jobCompleted(board_name, log_file_path): + def jobCompleted(board_name): """Mark the job currently running on `board_name` as completed.""" @@ -72,14 +72,14 @@ if json_data['target'] == board_name: self.logger.debug('running %s on %s', json_file, board_name) json_file.moveTo(board_dir.child(json_file.basename())) - return json_data + return json_data, open('/dev/null', 'w') else: return None def getJobForBoard(self, board_name): return defer.maybeDeferred(self._getJobForBoard, board_name) - def _jobCompleted(self, board_name, log_file_path): + def _jobCompleted(self, board_name): [json_file] = self._board_dir(board_name).children() completed = self.directory.child('completed') counter = 0 @@ -90,6 +90,5 @@ counter += 1 json_file.moveTo(completed.child(fname)) - def jobCompleted(self, board_name, log_file_path): - return defer.maybeDeferred( - self._jobCompleted, board_name, log_file_path) + def jobCompleted(self, board_name): + return defer.maybeDeferred(self._jobCompleted, board_name) === modified file 'lava_scheduler_daemon/tests/test_board.py' --- lava_scheduler_daemon/tests/test_board.py 2011-06-21 03:10:04 +0000 +++ lava_scheduler_daemon/tests/test_board.py 2011-07-26 00:34:21 +0000 @@ -33,10 +33,11 @@ def _completeCall(self, method_name, board_name, result): self._requests[method_name][board_name].callback(result) + class TestJob(object): - def __init__(self, json_data, dispatcher, reactor): - self.json_data = json_data + def __init__(self, job_data, dispatcher, reactor): + self.json_data = job_data self.dispatcher = dispatcher self.reactor = reactor self.deferred = defer.Deferred() @@ -95,13 +96,13 @@ def test_actual_job_runs(self): b = self.make_board('board') b.start() - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) self.assertEqual('R', b._state_name()) def test_completion_calls_jobCompleted(self): b = self.make_board('board') b.start() - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) b.running_job.deferred.callback('path') self.assertEqual( 1, len(self.source._calls['board']['jobCompleted'])) @@ -109,14 +110,14 @@ def test_still_running_during_jobCompleted(self): b = self.make_board('board') b.start() - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) b.running_job.deferred.callback('path') self.assertEqual('R', b._state_name()) def test_check_again_on_completion(self): b = self.make_board('board') b.start() - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) b.running_job.deferred.callback('path') self.source._completeCall('jobCompleted', 'board', None) self.assertEqual('C', b._state_name()) @@ -145,7 +146,7 @@ stop_results = [] s.addCallback(stop_results.append) self.assertEqual(0, len(stop_results)) - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) self.assertEqual(0, len(stop_results)) self.assertEqual('R+S', b._state_name()) @@ -156,7 +157,7 @@ stop_results = [] s.addCallback(stop_results.append) self.assertEqual(0, len(stop_results)) - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) b.running_job.deferred.callback(None) self.source._completeCall('jobCompleted', 'board', None) self.assertEqual(1, len(stop_results)) @@ -165,7 +166,7 @@ def test_stop_while_running_job_stops_on_complete(self): b = self.make_board('board') b.start() - self.source._completeCall('getJobForBoard', 'board', {}) + self.source._completeCall('getJobForBoard', 'board', ({}, None)) self.assertEqual('R', b._state_name()) s = b.stop() stop_results = []