aboutsummaryrefslogtreecommitdiff
path: root/app
diff options
context:
space:
mode:
authorMilo Casagrande <milo.casagrande@linaro.org>2015-01-14 15:42:28 +0100
committerMilo Casagrande <milo.casagrande@linaro.org>2015-01-14 15:42:28 +0100
commitb617a7ff50d14f3d74efd7dbe969b37c9f0135d2 (patch)
tree0bee3d444fd8297e6ee366bcd58a223a476124b8 /app
parent5f5ccfc9c7f2dac96f8748af1558b2ffcf33b4ce (diff)
parentf7d9a594b29904f435cb0f98a053a22282962750 (diff)
Merge branch 'master' into file-upload
Conflicts: app/urls.py Change-Id: I4bb5663e88379f84eac7eb6f9cb4cf25614947c2
Diffstat (limited to 'app')
-rw-r--r--app/handlers/__init__.py2
-rw-r--r--app/handlers/common.py19
-rw-r--r--app/handlers/count.py70
-rw-r--r--app/handlers/defconf.py40
-rw-r--r--app/handlers/job.py18
-rw-r--r--app/handlers/lab.py7
-rw-r--r--app/handlers/response.py25
-rw-r--r--app/handlers/send.py90
-rw-r--r--app/handlers/tests/test_defconf_handler.py16
-rw-r--r--app/handlers/tests/test_job_handler.py33
-rw-r--r--app/handlers/tests/test_send_handler.py149
-rw-r--r--app/handlers/token.py25
-rw-r--r--app/models/__init__.py1
-rwxr-xr-xapp/server.py134
-rw-r--r--app/taskqueue/celery.py5
-rw-r--r--app/tests/__init__.py1
-rw-r--r--app/urls.py37
-rw-r--r--app/utils/batch/batch_op.py88
-rw-r--r--app/utils/batch/common.py63
-rw-r--r--app/utils/db.py9
-rw-r--r--app/utils/report.py15
21 files changed, 561 insertions, 286 deletions
diff --git a/app/handlers/__init__.py b/app/handlers/__init__.py
index 7374003..cdd928b 100644
--- a/app/handlers/__init__.py
+++ b/app/handlers/__init__.py
@@ -1,2 +1,2 @@
-__version__ = "2015.1"
+__version__ = "2015.1.1"
__versionfull__ = __version__
diff --git a/app/handlers/common.py b/app/handlers/common.py
index ed6fdd8..282db22 100644
--- a/app/handlers/common.py
+++ b/app/handlers/common.py
@@ -310,6 +310,25 @@ REPORT_VALID_KEYS = {
]
}
+SEND_VALID_KEYS = {
+ "POST": {
+ models.MANDATORY_KEYS: [
+ models.JOB_KEY,
+ models.KERNEL_KEY
+ ],
+ models.ACCEPTED_KEYS: [
+ models.BOOT_REPORT_SEND_TO_KEY,
+ models.BUILD_REPORT_SEND_TO_KEY,
+ models.DELAY_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.REPORT_SEND_TO_KEY,
+ models.SEND_BOOT_REPORT_KEY,
+ models.SEND_BUILD_REPORT_KEY
+ ]
+ }
+}
+
ID_KEYS = [
models.BOOT_ID_KEY,
models.DEFCONFIG_ID_KEY,
diff --git a/app/handlers/count.py b/app/handlers/count.py
index 526af8a..3092ed7 100644
--- a/app/handlers/count.py
+++ b/app/handlers/count.py
@@ -15,30 +15,21 @@
"""Handle the /count URLs used to count objects in the database."""
-from tornado.web import asynchronous
-
-from handlers.base import BaseHandler
-from handlers.common import (
- COLLECTIONS,
- COUNT_VALID_KEYS,
- update_id_fields,
- get_and_add_date_range,
- get_query_spec,
-)
-from handlers.response import HandlerResponse
-from models import ID_KEY
-from utils.db import (
- count,
- find_and_count,
-)
+import tornado.web
+
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import utils.db
# Internally used only. It is used to retrieve just one field for
# the query results since we only need to count the results, we are
# not interested in the values.
-COUNT_FIELDS = {ID_KEY: True}
+COUNT_FIELDS = {models.ID_KEY: True}
-class CountHandler(BaseHandler):
+class CountHandler(hbase.BaseHandler):
"""Handle the /count URLs."""
def __init__(self, application, request, **kwargs):
@@ -46,14 +37,14 @@ class CountHandler(BaseHandler):
@staticmethod
def _valid_keys(method):
- return COUNT_VALID_KEYS.get(method, None)
+ return hcommon.COUNT_VALID_KEYS.get(method, None)
def _get_one(self, collection):
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
- if collection in COLLECTIONS.keys():
+ if collection in hcommon.COLLECTIONS.keys():
response.result = count_one_collection(
- self.db[COLLECTIONS[collection]],
+ self.db[hcommon.COLLECTIONS[collection]],
collection,
self.get_query_arguments,
self._valid_keys("GET")
@@ -66,7 +57,7 @@ class CountHandler(BaseHandler):
return response
def _get(self, **kwargs):
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
response.result = count_all_collections(
self.db,
self.get_query_arguments,
@@ -75,12 +66,12 @@ class CountHandler(BaseHandler):
return response
- @asynchronous
+ @tornado.web.asynchronous
def post(self, *args, **kwargs):
"""Not implemented."""
self.write_error(status_code=501)
- @asynchronous
+ @tornado.web.asynchronous
def delete(self, *args, **kwargs):
"""Not implemented."""
self.write_error(status_code=501)
@@ -103,12 +94,12 @@ def count_one_collection(
optionally the `fields` fields.
"""
result = []
- spec = get_query_spec(query_args_func, valid_keys)
- get_and_add_date_range(spec, query_args_func)
- update_id_fields(spec)
+ spec = hcommon.get_query_spec(query_args_func, valid_keys)
+ hcommon.get_and_add_date_range(spec, query_args_func)
+ hcommon.update_id_fields(spec)
if spec:
- _, number = find_and_count(
+ _, number = utils.db.find_and_count(
collection, 0, 0, spec, COUNT_FIELDS
)
if not number:
@@ -121,7 +112,7 @@ def count_one_collection(
result.append(
dict(
collection=collection_name,
- count=count(collection)
+ count=utils.db.count(collection)
)
)
@@ -143,20 +134,25 @@ def count_all_collections(database, query_args_func, valid_keys):
"""
result = []
- spec = get_query_spec(query_args_func, valid_keys)
- get_and_add_date_range(spec, query_args_func)
- update_id_fields(spec)
+ spec = hcommon.get_query_spec(query_args_func, valid_keys)
+ hcommon.get_and_add_date_range(spec, query_args_func)
+ hcommon.update_id_fields(spec)
if spec:
- for key, val in COLLECTIONS.iteritems():
- _, number = find_and_count(
+ for key, val in hcommon.COLLECTIONS.iteritems():
+ _, number = utils.db.find_and_count(
database[val], 0, 0, spec, COUNT_FIELDS
)
if not number:
number = 0
result.append(dict(collection=key, count=number))
else:
- for key, val in COLLECTIONS.iteritems():
- result.append(dict(collection=key, count=count(database[val])))
+ for key, val in hcommon.COLLECTIONS.iteritems():
+ result.append(
+ dict(
+ collection=key,
+ count=utils.db.count(database[val])
+ )
+ )
return result
diff --git a/app/handlers/defconf.py b/app/handlers/defconf.py
index d67edaf..5a8823c 100644
--- a/app/handlers/defconf.py
+++ b/app/handlers/defconf.py
@@ -15,16 +15,14 @@
"""The RequestHandler for /defconfig URLs."""
-from tornado.web import asynchronous
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import utils.db
-from handlers.base import BaseHandler
-from handlers.common import DEFCONFIG_VALID_KEYS
-from handlers.response import HandlerResponse
-from models import DEFCONFIG_COLLECTION
-from utils.db import delete
-
-class DefConfHandler(BaseHandler):
+class DefConfHandler(hbase.BaseHandler):
"""Handle the /defconfig URLs."""
def __init__(self, application, request, **kwargs):
@@ -32,22 +30,32 @@ class DefConfHandler(BaseHandler):
@property
def collection(self):
- return self.db[DEFCONFIG_COLLECTION]
+ return self.db[models.DEFCONFIG_COLLECTION]
@staticmethod
def _valid_keys(method):
- return DEFCONFIG_VALID_KEYS.get(method, None)
+ return hcommon.DEFCONFIG_VALID_KEYS.get(method, None)
+
+ def execute_post(self, *args, **kwargs):
+ """Execute the POST pre-operations.
+
+ Checks that everything is OK to perform a POST.
+ """
+ response = None
- @asynchronous
- def post(self, *args, **kwargs):
- """Not implemented."""
- self.write_error(501)
+ if self.validate_req_token("POST"):
+ response = hresponse.HandlerResponse(501)
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
+
+ return response
def _delete(self, defconf_id):
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
response.result = None
- response.status_code = delete(self.collection, defconf_id)
+ response.status_code = utils.db.delete(self.collection, defconf_id)
if response.status_code == 200:
response.reason = "Resource '%s' deleted" % defconf_id
diff --git a/app/handlers/job.py b/app/handlers/job.py
index b13e777..b773653 100644
--- a/app/handlers/job.py
+++ b/app/handlers/job.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -21,13 +19,9 @@ import handlers.base as hbase
import handlers.common as hcommon
import handlers.response as hresponse
import models
+import taskqueue.tasks as taskq
import utils.db
-from taskqueue.tasks import (
- import_job,
- schedule_boot_report
-)
-
class JobHandler(hbase.BaseHandler):
"""Handle the /job URLs."""
@@ -53,8 +47,9 @@ class JobHandler(hbase.BaseHandler):
mail_options = self.settings["mailoptions"]
countdown = self.settings["senddelay"]
- import_job.apply_async([json_obj, db_options])
- schedule_boot_report.apply_async(
+ taskq.import_job.apply_async([json_obj, db_options])
+ # TODO: remove email scheduling once deployed and job updated.
+ taskq.schedule_boot_report.apply_async(
[json_obj, db_options, mail_options, countdown])
return response
@@ -90,6 +85,9 @@ class JobHandler(hbase.BaseHandler):
response.status_code = 404
response.reason = self._get_status_message(404)
except bson.errors.InvalidId, ex:
- raise ex
+ self.log.exception(ex)
+ self.log.error("Invalid ID specified: %s", job_id)
+ response.status_code = 400
+ response.reason = "Wrong ID specified"
return response
diff --git a/app/handlers/lab.py b/app/handlers/lab.py
index 6282045..08d201a 100644
--- a/app/handlers/lab.py
+++ b/app/handlers/lab.py
@@ -13,10 +13,9 @@
"""Handler for the /lab URLs."""
-from urlparse import urlunparse
-
-import datetime
import bson
+import datetime
+import urlparse
import handlers.base
import handlers.common
@@ -288,7 +287,7 @@ class LabHandler(handlers.base.BaseHandler):
models.NAME_KEY: lab_doc.name,
models.TOKEN_KEY: token.token
}
- location = urlunparse(
+ location = urlparse.urlunparse(
(
'http',
self.request.headers.get('Host'),
diff --git a/app/handlers/response.py b/app/handlers/response.py
index 653f815..a62cbf2 100644
--- a/app/handlers/response.py
+++ b/app/handlers/response.py
@@ -15,13 +15,8 @@
"""A generic response object that handlers can pass along."""
-from pymongo.cursor import Cursor
-from types import (
- DictionaryType,
- IntType,
- ListType,
- StringTypes,
-)
+import pymongo.cursor
+import types
class HandlerResponse(object):
@@ -45,7 +40,7 @@ class HandlerResponse(object):
By default the status code is set to 200.
"""
- if not isinstance(status_code, IntType):
+ if not isinstance(status_code, types.IntType):
raise ValueError("Value must be an integer")
self._status_code = status_code
@@ -66,7 +61,7 @@ class HandlerResponse(object):
:param value: The status code, must be an int.
"""
- if not isinstance(value, IntType):
+ if not isinstance(value, types.IntType):
raise ValueError("Value must be an integer")
self._status_code = value
@@ -82,7 +77,7 @@ class HandlerResponse(object):
:param value: The reason as string.
"""
- if not isinstance(value, StringTypes):
+ if not isinstance(value, types.StringTypes):
raise ValueError("Value must be a string")
self._reason = value
@@ -98,7 +93,7 @@ class HandlerResponse(object):
:param value: A dictionary with the headers to set.
"""
- if not isinstance(value, DictionaryType):
+ if not isinstance(value, types.DictionaryType):
raise ValueError("Value must be a dictionary")
self._headers = value
@@ -110,7 +105,7 @@ class HandlerResponse(object):
@count.setter
def count(self, value):
- if not isinstance(value, IntType):
+ if not isinstance(value, types.IntType):
raise ValueError("Value must be a integer")
self._count = value
@@ -121,7 +116,7 @@ class HandlerResponse(object):
@limit.setter
def limit(self, value):
- if not isinstance(value, IntType):
+ if not isinstance(value, types.IntType):
raise ValueError("Value must be an integer")
self._limit = value
@@ -147,9 +142,9 @@ class HandlerResponse(object):
self._result = value
else:
# The pymongo cursor is an iterable.
- if not isinstance(value, (ListType, Cursor)):
+ if not isinstance(value, (types.ListType, pymongo.cursor.Cursor)):
value = [value]
- elif isinstance(value, Cursor):
+ elif isinstance(value, pymongo.cursor.Cursor):
value = [r for r in value]
self._result = value
diff --git a/app/handlers/send.py b/app/handlers/send.py
new file mode 100644
index 0000000..51de4b7
--- /dev/null
+++ b/app/handlers/send.py
@@ -0,0 +1,90 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""This module is used to send boot and build report email."""
+
+import bson
+import datetime
+
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import taskqueue.tasks as taskq
+
+
+# pylint: disable=too-many-public-methods
+class SendHandler(hbase.BaseHandler):
+ """Handle the /send URLs."""
+
+ def __init__(self, application, request, **kwargs):
+ super(SendHandler, self).__init__(application, request, **kwargs)
+
+ @staticmethod
+ def _valid_keys(method):
+ return hcommon.SEND_VALID_KEYS.get(method, None)
+
+ def _post(self, *args, **kwargs):
+ response = hresponse.HandlerResponse(202)
+
+ json_obj = kwargs["json_obj"]
+ db_options = kwargs["db_options"]
+ mail_options = self.settings["mailoptions"]
+
+ countdown = json_obj.get(models.DELAY_KEY, self.settings["senddelay"])
+ if countdown is None:
+ countdown = self.settings["senddelay"]
+
+ when = (
+ datetime.datetime.now(tz=bson.tz_util.utc) +
+ datetime.timedelta(seconds=countdown)
+ )
+ response.reason = (
+ "Email report scheduled to be sent at '%s' UTC" %
+ when.isoformat()
+ )
+
+ taskq.schedule_boot_report.apply_async(
+ [json_obj, db_options, mail_options, countdown])
+
+ return response
+
+ def execute_delete(self, *args, **kwargs):
+ """Perform DELETE pre-operations.
+
+ Check that the DELETE request is OK.
+ """
+ response = None
+
+ if self.validate_req_token("DELETE"):
+ response = hresponse.HandlerResponse(501)
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
+
+ return response
+
+ def execute_get(self, *args, **kwargs):
+ """Execute the GET pre-operations.
+
+ Checks that everything is OK to perform a GET.
+ """
+ response = None
+
+ if self.validate_req_token("GET"):
+ response = hresponse.HandlerResponse(501)
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
+
+ return response
diff --git a/app/handlers/tests/test_defconf_handler.py b/app/handlers/tests/test_defconf_handler.py
index 75e241c..14ff6b6 100644
--- a/app/handlers/tests/test_defconf_handler.py
+++ b/app/handlers/tests/test_defconf_handler.py
@@ -105,10 +105,22 @@ class TestDefconfHandler(
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- def test_post(self):
- # POST is not implemented for the DefConfHandler.
+ def test_post_no_token(self):
response = self.fetch('/defconfig', method='POST', body='')
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_token(self):
+ # POST is not implemented for the DefConfHandler.
+ headers = {
+ 'Authorization': 'foo',
+ 'Content-Type': 'application/json'
+ }
+ response = self.fetch(
+ '/defconfig', method='POST', body="{}", headers=headers)
+
self.assertEqual(response.code, 501)
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
diff --git a/app/handlers/tests/test_job_handler.py b/app/handlers/tests/test_job_handler.py
index 172adad..cb70eea 100644
--- a/app/handlers/tests/test_job_handler.py
+++ b/app/handlers/tests/test_job_handler.py
@@ -193,8 +193,8 @@ class TestJobHandler(
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @mock.patch('handlers.job.import_job')
- @mock.patch('handlers.job.schedule_boot_report')
+ @mock.patch('taskqueue.tasks.import_job')
+ @mock.patch('taskqueue.tasks.schedule_boot_report')
def test_post_correct(self, mock_import_job, mock_schedule):
mock_import_job.apply_async = mock.MagicMock()
mock_schedule.apply_async = mock.MagicMock()
@@ -258,6 +258,35 @@ class TestJobHandler(
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ def test_delete_wrong_id(self):
+ headers = {"Authorization": "foo"}
+
+ response = self.fetch(
+ "/job/foo", method="DELETE", headers=headers,
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("utils.db.delete")
+ @mock.patch("utils.db.find_one")
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_db_error(self, mock_id, mock_find, mock_delete):
+ mock_id.return_value = "job"
+ mock_find.return_value = "job"
+ mock_delete.return_value = 500
+
+ headers = {"Authorization": "foo"}
+
+ response = self.fetch(
+ "/job/foo", method="DELETE", headers=headers,
+ )
+
+ self.assertEqual(response.code, 500)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
@mock.patch('handlers.job.JobHandler._get_one')
def test_get_wrong_handler_response(self, mock_get_one):
mock_get_one.return_value = ""
diff --git a/app/handlers/tests/test_send_handler.py b/app/handlers/tests/test_send_handler.py
new file mode 100644
index 0000000..d615417
--- /dev/null
+++ b/app/handlers/tests/test_send_handler.py
@@ -0,0 +1,149 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Test module for the TokenHandler handler."""
+
+import json
+import mock
+import mongomock
+
+from concurrent.futures import ThreadPoolExecutor
+from tornado import (
+ ioloop,
+ testing,
+ web,
+)
+
+from handlers.app import AppHandler
+from urls import _SEND_URL
+
+# Default Content-Type header returned by Tornado.
+DEFAULT_CONTENT_TYPE = "application/json; charset=UTF-8"
+
+
+class TestSendHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+
+ def setUp(self):
+ self.mongodb_client = mongomock.Connection()
+
+ super(TestSendHandler, self).setUp()
+
+ patched_find_token = mock.patch(
+ "handlers.base.BaseHandler._find_token")
+ self.find_token = patched_find_token.start()
+ self.find_token.return_value = "token"
+
+ patched_validate_token = mock.patch("handlers.common.validate_token")
+ self.validate_token = patched_validate_token.start()
+ self.validate_token.return_value = True
+
+ self.addCleanup(patched_find_token.stop)
+ self.addCleanup(patched_validate_token.stop)
+
+ def get_app(self):
+ dboptions = {
+ "dbpassword": "",
+ "dbuser": ""
+ }
+
+ mailoptions = {}
+
+ settings = {
+ "dboptions": dboptions,
+ "client": self.mongodb_client,
+ "executor": ThreadPoolExecutor(max_workers=2),
+ "default_handler_class": AppHandler,
+ "master_key": "bar",
+ "debug": False,
+ "mailoptions": mailoptions,
+ "senddelay": 60*60
+ }
+
+ return web.Application([_SEND_URL], **settings)
+
+ def get_new_ioloop(self):
+ return ioloop.IOLoop.instance()
+
+ def test_get(self):
+ headers = {"Authorization": "foo"}
+ response = self.fetch(
+ "/send", method="GET", headers=headers)
+
+ self.assertEqual(response.code, 501)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_get_no_token(self):
+ response = self.fetch("/send", method="GET")
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_delete(self):
+ headers = {"Authorization": "foo"}
+ response = self.fetch(
+ "/send", method="DELETE", headers=headers)
+
+ self.assertEqual(response.code, 501)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_delete_no_token(self):
+ response = self.fetch("/send", method="DELETE")
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_post_no_token(self):
+ response = self.fetch("/send", method="POST", body="")
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_post_missing_job_key(self):
+ headers = {
+ "Authorization": "foo",
+ "Content-Type": "application/json",
+ }
+ body = json.dumps(dict(kernel="kernel"))
+ response = self.fetch(
+ "/send", method="POST", headers=headers, body=body)
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ def test_post_missing_kernel_key(self):
+ headers = {
+ "Authorization": "foo",
+ "Content-Type": "application/json",
+ }
+ body = json.dumps(dict(job="job"))
+ response = self.fetch(
+ "/send", method="POST", headers=headers, body=body)
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("taskqueue.tasks.schedule_boot_report")
+ def test_post_correct(self, mock_schedule):
+ mock_schedule.apply_async = mock.MagicMock()
+ headers = {
+ "Authorization": "foo",
+ "Content-Type": "application/json",
+ }
+ body = json.dumps(dict(job="job", kernel="kernel", delay=None))
+ response = self.fetch(
+ "/send", method="POST", headers=headers, body=body)
+ self.assertEqual(response.code, 202)
+ self.assertEqual(
+ response.headers["Content-Type"], DEFAULT_CONTENT_TYPE)
diff --git a/app/handlers/token.py b/app/handlers/token.py
index 79bb8cf..fcb6137 100644
--- a/app/handlers/token.py
+++ b/app/handlers/token.py
@@ -16,8 +16,7 @@
"""The RequestHandler for /token URLs."""
import bson
-
-from urlparse import urlunparse
+import urlparse
import handlers.base as hbase
import handlers.common as hcommon
@@ -87,18 +86,18 @@ class TokenHandler(hbase.BaseHandler):
def _post(self, *args, **kwargs):
response = None
- if kwargs and kwargs.get('id', None):
+ if kwargs and kwargs.get("id", None):
self.log.info(
"Token update from IP address %s",
self.request.remote_ip
)
- response = self._update_data(kwargs['id'], kwargs['json_obj'])
+ response = self._update_data(kwargs["id"], kwargs["json_obj"])
else:
self.log.info(
"New token creation from IP address %s",
self.request.remote_ip
)
- response = self._new_data(kwargs['json_obj'])
+ response = self._new_data(kwargs["json_obj"])
return response
@@ -117,15 +116,15 @@ class TokenHandler(hbase.BaseHandler):
response.status_code, _ = utils.db.save(self.db, new_token)
if response.status_code == 201:
response.result = {models.TOKEN_KEY: new_token.token}
- location = urlunparse(
+ location = urlparse.urlunparse(
(
- 'http',
- self.request.headers.get('Host'),
- self.request.uri + '/' + new_token.token,
- '', '', ''
+ "http",
+ self.request.headers.get("Host"),
+ self.request.uri + "/" + new_token.token,
+ "", "", ""
)
)
- response.headers = {'Location': location}
+ response.headers = {"Location": location}
except KeyError:
response.status_code = 400
response.reason = (
@@ -263,8 +262,8 @@ class TokenHandler(hbase.BaseHandler):
response = hresponse.HandlerResponse(400)
if self.validate_req_token("DELETE"):
- if kwargs and kwargs.get('id', None):
- response.status_code = self._delete(kwargs['id'])
+ if kwargs and kwargs.get("id", None):
+ response.status_code = self._delete(kwargs["id"])
if response.status_code == 200:
response.reason = "Resource deleted"
else:
diff --git a/app/models/__init__.py b/app/models/__init__.py
index c4a4254..ba1041e 100644
--- a/app/models/__init__.py
+++ b/app/models/__init__.py
@@ -124,6 +124,7 @@ SEND_BUILD_REPORT_KEY = "build_report"
BOOT_REPORT_SEND_TO_KEY = "boot_send_to"
BUILD_REPORT_SEND_TO_KEY = "build_send_to"
REPORT_SEND_TO_KEY = "send_to"
+DELAY_KEY = "delay"
# Token special fields.
ADMIN_KEY = "admin"
diff --git a/app/server.py b/app/server.py
index a3c37bb..ef3c7b4 100755
--- a/app/server.py
+++ b/app/server.py
@@ -16,70 +16,70 @@
"""The Tornado application base module."""
+import concurrent.futures
import os
import pymongo
import tornado
+import tornado.httpserver
+import tornado.netutil
+import tornado.options as topt
+import tornado.web
+import uuid
-from concurrent.futures import ThreadPoolExecutor
-from tornado.httpserver import HTTPServer
-from tornado.netutil import bind_unix_socket
-from tornado.options import (
- define,
- options,
-)
-from tornado.web import Application
-from uuid import uuid4
-
-from handlers.app import AppHandler
-from handlers.dbindexes import ensure_indexes
-from urls import APP_URLS
+import handlers.app as happ
+import handlers.dbindexes as hdbindexes
+import urls
-DEFAULT_CONFIG_FILE = '/etc/linaro/kernelci-backend.cfg'
+DEFAULT_CONFIG_FILE = "/etc/linaro/kernelci-backend.cfg"
-define('master_key', default=str(uuid4()), type=str, help="The master key")
-define(
- 'max_workers', default=20, type=int,
+topt.define(
+ "master_key", default=str(uuid.uuid4()), type=str, help="The master key")
+topt.define(
+ "max_workers", default=20, type=int,
help="The number of workers for the thread pool executor"
)
-define('gzip', default=True)
-define('debug', default=True)
-define('autoreload', default=True)
+topt.define("gzip", default=True)
+topt.define("debug", default=True)
+topt.define("autoreload", default=True)
# mongodb connection parameters.
-define(
- 'dbhost', default='localhost', type=str, help="The DB host to connect to"
+topt.define(
+ "dbhost", default="localhost", type=str, help="The DB host to connect to"
)
-define("dbport", default=27017, type=int, help="The DB port to connect to")
-define(
+topt.define("dbport", default=27017, type=int, help="The DB port to connect to")
+topt.define(
"dbuser", default="", type=str,
help="The user name to use for the DB connection"
)
-define(
+topt.define(
"dbpassword", default="", type=str,
help="The password to use for the DB connection"
)
-define("dbpool", default=250, type=int, help="The DB connections pool size")
-define(
+topt.define(
+ "dbpool", default=250, type=int, help="The DB connections pool size")
+topt.define(
"unixsocket", default=False, type=bool,
help="If unix socket should be used"
)
-define(
- "smtp_host", default='', type=str, help="The SMTP host to connect to")
-define("smtp_user", default='', type=str, help="SMTP connection user")
-define("smtp_password", default='', type=str, help="SMTP connection password")
-define(
+topt.define(
+ "smtp_host", default="", type=str, help="The SMTP host to connect to")
+topt.define("smtp_user", default="", type=str, help="SMTP connection user")
+topt.define(
+ "smtp_password", default="", type=str, help="SMTP connection password")
+topt.define(
"smtp_port", default=587, type=int,
help="The SMTP connection port, default to 587")
-define("smtp_sender", default='', type=str, help="The sender email address")
-define(
+topt.define(
+ "smtp_sender", default="", type=str, help="The sender email address")
+topt.define(
"send_delay", default=60*60+5, type=int,
help="The delay in sending the report emails, "
"default to 1 hour and 5 seconds"
)
-class KernelCiBackend(Application):
+class KernelCiBackend(tornado.web.Application):
"""The Kernel CI backend application.
Where everything starts.
@@ -89,62 +89,64 @@ class KernelCiBackend(Application):
def __init__(self):
db_options = {
- 'dbhost': options.dbhost,
- 'dbport': options.dbport,
- 'dbuser': options.dbuser,
- 'dbpassword': options.dbpassword,
- 'dbpool': options.dbpool
+ "dbhost": topt.options.dbhost,
+ "dbport": topt.options.dbport,
+ "dbuser": topt.options.dbuser,
+ "dbpassword": topt.options.dbpassword,
+ "dbpool": topt.options.dbpool
}
mail_options = {
- "host": options.smtp_host,
- "user": options.smtp_user,
- "password": options.smtp_password,
- "port": options.smtp_port,
- "sender": options.smtp_sender
+ "host": topt.options.smtp_host,
+ "user": topt.options.smtp_user,
+ "password": topt.options.smtp_password,
+ "port": topt.options.smtp_port,
+ "sender": topt.options.smtp_sender
}
if self.mongodb_client is None:
self.mongodb_client = pymongo.MongoClient(
- host=options.dbhost,
- port=options.dbport,
- max_pool_size=options.dbpool
+ host=topt.options.dbhost,
+ port=topt.options.dbport,
+ max_pool_size=topt.options.dbpool
)
settings = {
- 'client': self.mongodb_client,
- 'dboptions': db_options,
+ "client": self.mongodb_client,
+ "dboptions": db_options,
"mailoptions": mail_options,
- 'default_handler_class': AppHandler,
- 'executor': ThreadPoolExecutor(max_workers=options.max_workers),
- 'gzip': options.gzip,
- 'debug': options.debug,
- 'master_key': options.master_key,
- 'autoreload': options.autoreload,
- "senddelay": options.send_delay
+ "default_handler_class": happ.AppHandler,
+ "executor": concurrent.futures.ThreadPoolExecutor(
+ max_workers=topt.options.max_workers),
+ "gzip": topt.options.gzip,
+ "debug": topt.options.debug,
+ "master_key": topt.options.master_key,
+ "autoreload": topt.options.autoreload,
+ "senddelay": topt.options.send_delay
}
- ensure_indexes(self.mongodb_client, db_options)
+ hdbindexes.ensure_indexes(self.mongodb_client, db_options)
- super(KernelCiBackend, self).__init__(APP_URLS, **settings)
+ super(KernelCiBackend, self).__init__(urls.APP_URLS, **settings)
-if __name__ == '__main__':
+if __name__ == "__main__":
if os.path.isfile(DEFAULT_CONFIG_FILE):
- options.parse_config_file(DEFAULT_CONFIG_FILE, final=False)
+ topt.options.parse_config_file(DEFAULT_CONFIG_FILE, final=False)
- options.parse_command_line()
+ topt.options.parse_command_line()
# Settings that should be passed also to the HTTPServer.
HTTP_SETTINGS = {
- 'xheaders': True,
+ "xheaders": True,
}
- if options.unixsocket:
+ if topt.options.unixsocket:
application = KernelCiBackend()
- server = HTTPServer(application, **HTTP_SETTINGS)
- unix_socket = bind_unix_socket("/tmp/kernelci-backend.socket")
+ server = tornado.httpserver.HTTPServer(application, **HTTP_SETTINGS)
+ unix_socket = tornado.netutil.bind_unix_socket(
+ "/tmp/kernelci-backend.socket")
server.add_socket(unix_socket)
else:
KernelCiBackend().listen(8888, **HTTP_SETTINGS)
diff --git a/app/taskqueue/celery.py b/app/taskqueue/celery.py
index 8a361f3..b94d037 100644
--- a/app/taskqueue/celery.py
+++ b/app/taskqueue/celery.py
@@ -17,16 +17,15 @@
from __future__ import absolute_import
+import celery
import os
-from celery import Celery
-
import taskqueue.celeryconfig as celeryconfig
TASKS_LIST = ["taskqueue.tasks"]
-app = Celery(
+app = celery.Celery(
"tasks",
include=TASKS_LIST
)
diff --git a/app/tests/__init__.py b/app/tests/__init__.py
index f34fd28..38ba1e2 100644
--- a/app/tests/__init__.py
+++ b/app/tests/__init__.py
@@ -30,6 +30,7 @@ def test_modules():
'handlers.tests.test_job_handler',
'handlers.tests.test_lab_handler',
'handlers.tests.test_report_handler',
+ 'handlers.tests.test_send_handler',
'handlers.tests.test_token_handler',
'handlers.tests.test_version_handler',
'models.tests.test_bisect_model',
diff --git a/app/urls.py b/app/urls.py
index f1e8d1f..d9b3db3 100644
--- a/app/urls.py
+++ b/app/urls.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -15,7 +13,7 @@
"""Define URLs and handlers to server them."""
-from tornado.web import url
+import tornado.web
import handlers.batch
import handlers.bisect
@@ -25,58 +23,64 @@ import handlers.defconf
import handlers.job
import handlers.lab
import handlers.report
+import handlers.send
import handlers.subscription
import handlers.token
import handlers.upload
import handlers.version
-_JOB_URL = url(
+_JOB_URL = tornado.web.url(
r'/job[s]?(?P<sl>/)?(?P<id>.*)', handlers.job.JobHandler, name='job'
)
-_DEFCONF_URL = url(
+_DEFCONF_URL = tornado.web.url(
r'/defconfig[s]?(?P<sl>/)?(?P<id>.*)',
handlers.defconf.DefConfHandler,
name='defconf'
)
-_SUBSCRIPTION_URL = url(
+_SUBSCRIPTION_URL = tornado.web.url(
r'/subscription[s]?(?P<sl>/)?(?P<id>.*)',
handlers.subscription.SubscriptionHandler,
name='subscription',
)
-_BOOT_URL = url(
+_BOOT_URL = tornado.web.url(
r'/boot[s]?(?P<sl>/)?(?P<id>.*)', handlers.boot.BootHandler, name='boot'
)
-_COUNT_URL = url(
+_COUNT_URL = tornado.web.url(
r'/count[s]?(?P<sl>/)?(?P<id>.*)', handlers.count.CountHandler, name='count'
)
-_TOKEN_URL = url(
+_TOKEN_URL = tornado.web.url(
r'/token[s]?(?P<sl>/)?(?P<id>.*)', handlers.token.TokenHandler, name='token'
)
-_BATCH_URL = url(
+_BATCH_URL = tornado.web.url(
r'/batch', handlers.batch.BatchHandler, name='batch'
)
-_BISECT_URL = url(
+_BISECT_URL = tornado.web.url(
r"/bisect[s]?/(?P<collection>.*)/(?P<id>.*)",
handlers.bisect.BisectHandler,
name="bisect"
)
-_LAB_URL = url(
+_LAB_URL = tornado.web.url(
r"/lab[s]?(?P<sl>/)?(?P<id>.*)", handlers.lab.LabHandler, name="lab"
)
-_VERSION_URL = url(
+_VERSION_URL = tornado.web.url(
r"/version", handlers.version.VersionHandler, name="version"
)
-_REPORT_URL = url(
+_REPORT_URL = tornado.web.url(
r"/report[s]?(?P<sl>/)?(?P<id>.*)",
handlers.report.ReportHandler,
name="response"
)
-_UPLOAD_URL = url(
+_UPLOAD_URL = tornado.web.url(
r"/upload(?P<sl>/)?(?P<path>.*)",
handlers.upload.UploadHandler,
name="upload"
)
+_SEND_URL = tornado.web.url(
+ r"/send(?P<sl>/)?",
+ handlers.send.SendHandler,
+ name="send"
+)
APP_URLS = [
_BATCH_URL,
@@ -90,5 +94,6 @@ APP_URLS = [
_TOKEN_URL,
_VERSION_URL,
_REPORT_URL,
- _UPLOAD_URL
+ _UPLOAD_URL,
+ _SEND_URL
]
diff --git a/app/utils/batch/batch_op.py b/app/utils/batch/batch_op.py
index 3348ab6..e7515cf 100644
--- a/app/utils/batch/batch_op.py
+++ b/app/utils/batch/batch_op.py
@@ -13,36 +13,18 @@
"""Batch operation classes."""
-from bson.json_util import default
-from json import (
- dumps as j_dump,
- loads as j_load,
-)
-
-from handlers.common import (
- BOOT_VALID_KEYS,
- COUNT_VALID_KEYS,
- DEFCONFIG_VALID_KEYS,
- JOB_VALID_KEYS,
- get_all_query_values,
- get_query_fields,
-)
-from handlers.count import (
- count_all_collections,
- count_one_collection,
-)
-from models import (
- COUNT_KEY,
- LIMIT_KEY,
- OP_ID_KEY,
- RESULT_KEY,
-)
-from utils import LOG
-from utils.db import (
- aggregate,
- find_and_count,
- find_one,
-)
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import bson
+
+import handlers.common as hcommon
+import handlers.count as hcount
+import models
+import utils
+import utils.db
class BatchOperation(object):
@@ -134,25 +116,25 @@ class BatchOperation(object):
"""Prepare the necessary parameters for a GET operation."""
if self.document_id:
# Get only one document.
- self.operation = find_one
+ self.operation = utils.db.find_one
self.args = [
self._database[self._collection],
self.document_id,
]
self.kwargs = {
- 'fields': get_query_fields(self.query_args_func)
+ 'fields': hcommon.get_query_fields(self.query_args_func)
}
else:
# Get the spec and perform the query, can perform and aggregation
# as well.
spec, sort, fields, self._skip, self._limit, unique = \
- get_all_query_values(
+ hcommon.get_all_query_values(
self.query_args_func, self.valid_keys.get(self.method)
)
if unique:
# Perform an aggregate
- self.operation = aggregate
+ self.operation = utils.db.aggregate
self.args = [
self._database[self._collection],
unique
@@ -164,7 +146,7 @@ class BatchOperation(object):
'limit': self._limit
}
else:
- self.operation = find_and_count
+ self.operation = utils.db.find_and_count
self.args = [
self._database[self._collection],
self._limit,
@@ -192,7 +174,7 @@ class BatchOperation(object):
"""
response = {}
if self.operation_id:
- response[OP_ID_KEY] = self.operation_id
+ response[models.OP_ID_KEY] = self.operation_id
# find_and_count returns 2 results: the mongodb cursor and the
# results count.
@@ -203,28 +185,34 @@ class BatchOperation(object):
res = [r for r in result[0]]
json_obj = {
- RESULT_KEY: res,
- COUNT_KEY: count
+ models.RESULT_KEY: res,
+ models.COUNT_KEY: count
}
if self._limit is not None:
- json_obj[LIMIT_KEY] = self._limit
+ json_obj[models.LIMIT_KEY] = self._limit
try:
# Doing like this otherwise the result returned will be a
# string and not a real JSON object (since we have to serialize
# it here).
- response[RESULT_KEY] = [
- j_load(j_dump(json_obj, default=default))
+ response[models.RESULT_KEY] = [
+ json.loads(
+ json.dumps(
+ json_obj,
+ default=bson.json_util.default,
+ separators=(",", ":")
+ )
+ )
]
except TypeError:
- response[RESULT_KEY] = []
- LOG.error(
+ response[models.RESULT_KEY] = []
+ utils.LOG.error(
"Error serializing JSON object. Objects to serialize: "
"%s, %s", type(res), type(count)
)
else:
- response[RESULT_KEY] = result
+ response[models.RESULT_KEY] = result
return response
@@ -249,7 +237,7 @@ class BatchBootOperation(BatchOperation):
super(BatchBootOperation, self).__init__(
collection, database, operation_id
)
- self.valid_keys = BOOT_VALID_KEYS
+ self.valid_keys = hcommon.BOOT_VALID_KEYS
class BatchJobOperation(BatchOperation):
@@ -259,7 +247,7 @@ class BatchJobOperation(BatchOperation):
super(BatchJobOperation, self).__init__(
collection, database, operation_id
)
- self.valid_keys = JOB_VALID_KEYS
+ self.valid_keys = hcommon.JOB_VALID_KEYS
class BatchDefconfigOperation(BatchOperation):
@@ -269,7 +257,7 @@ class BatchDefconfigOperation(BatchOperation):
super(BatchDefconfigOperation, self).__init__(
collection, database, operation_id
)
- self.valid_keys = DEFCONFIG_VALID_KEYS
+ self.valid_keys = hcommon.DEFCONFIG_VALID_KEYS
class BatchCountOperation(BatchOperation):
@@ -279,11 +267,11 @@ class BatchCountOperation(BatchOperation):
super(BatchCountOperation, self).__init__(
collection, database, operation_id
)
- self.valid_keys = COUNT_VALID_KEYS
+ self.valid_keys = hcommon.COUNT_VALID_KEYS
def _prepare_get_operation(self):
if self.document_id:
- self.operation = count_one_collection
+ self.operation = hcount.count_one_collection
# We use document_id here with the database since we need to count
# on a different collection.
self.args = [
@@ -293,7 +281,7 @@ class BatchCountOperation(BatchOperation):
self.valid_keys.get(self.method)
]
else:
- self.operation = count_all_collections
+ self.operation = hcount.count_all_collections
self.args = [
self._database,
self.query_args_func,
diff --git a/app/utils/batch/common.py b/app/utils/batch/common.py
index 97945d8..0bb8b3d 100644
--- a/app/utils/batch/common.py
+++ b/app/utils/batch/common.py
@@ -11,30 +11,11 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from types import (
- ListType,
- StringTypes,
-)
-
-from models import (
- BOOT_COLLECTION,
- COLLECTION_KEY,
- COUNT_COLLECTION,
- DEFCONFIG_COLLECTION,
- DOCUMENT_ID_KEY,
- JOB_COLLECTION,
- METHOD_KEY,
- OP_ID_KEY,
- QUERY_KEY,
-)
-from utils.batch.batch_op import (
- BatchBootOperation,
- BatchCountOperation,
- BatchDefconfigOperation,
- BatchJobOperation,
- BatchOperation,
-)
-from utils.db import get_db_connection
+import models
+import types
+
+import utils.batch.batch_op as batchop
+import utils.db
def create_batch_operation(json_obj, db_options):
@@ -54,37 +35,37 @@ def create_batch_operation(json_obj, db_options):
if json_obj:
get_func = json_obj.get
- collection = get_func(COLLECTION_KEY, None)
+ collection = get_func(models.COLLECTION_KEY, None)
if collection:
- database = get_db_connection(db_options)
- operation_id = get_func(OP_ID_KEY, None)
+ database = utils.db.get_db_connection(db_options)
+ operation_id = get_func(models.OP_ID_KEY, None)
- if collection == COUNT_COLLECTION:
- batch_op = BatchCountOperation(
+ if collection == models.COUNT_COLLECTION:
+ batch_op = batchop.BatchCountOperation(
collection, database, operation_id=operation_id
)
- elif collection == BOOT_COLLECTION:
- batch_op = BatchBootOperation(
+ elif collection == models.BOOT_COLLECTION:
+ batch_op = batchop.BatchBootOperation(
collection, database, operation_id=operation_id
)
- elif collection == JOB_COLLECTION:
- batch_op = BatchJobOperation(
+ elif collection == models.JOB_COLLECTION:
+ batch_op = batchop.BatchJobOperation(
collection, database, operation_id=operation_id
)
- elif collection == DEFCONFIG_COLLECTION:
- batch_op = BatchDefconfigOperation(
+ elif collection == models.DEFCONFIG_COLLECTION:
+ batch_op = batchop.BatchDefconfigOperation(
collection, database, operation_id=operation_id)
else:
- batch_op = BatchOperation(
+ batch_op = batchop.BatchOperation(
collection, database, operation_id=operation_id)
batch_op.query_args = get_batch_query_args(
- get_func(QUERY_KEY, None)
+ get_func(models.QUERY_KEY, None)
)
- batch_op.document_id = get_func(DOCUMENT_ID_KEY, None)
+ batch_op.document_id = get_func(models.DOCUMENT_ID_KEY, None)
batch_op.query_args_func = batch_op.query_args.get
- batch_op.method = get_func(METHOD_KEY, None)
+ batch_op.method = get_func(models.METHOD_KEY, None)
return batch_op
@@ -127,12 +108,12 @@ def get_batch_query_args(query):
"""
args = {}
- if all([query, isinstance(query, StringTypes)]):
+ if all([query, isinstance(query, types.StringTypes)]):
if query.startswith("?"):
query = query[1:]
query = query.split("&")
- if isinstance(query, ListType):
+ if isinstance(query, types.ListType):
for arg in query:
arg = arg.split("=")
# Can't have query with just one element, they have to be
diff --git a/app/utils/db.py b/app/utils/db.py
index de9ba65..f1f51f9 100644
--- a/app/utils/db.py
+++ b/app/utils/db.py
@@ -16,10 +16,9 @@
"""Collection of mongodb database operations."""
import pymongo
+import pymongo.errors
import types
-from pymongo.errors import OperationFailure
-
import models
import models.base as mbase
import utils
@@ -211,7 +210,7 @@ def save(database, document, manipulate=False):
utils.LOG.info(
"Document '%s' saved (%s)", document.name, document.collection
)
- except OperationFailure, ex:
+ except pymongo.errors.OperationFailure, ex:
utils.LOG.error(
"Error saving the following document: %s (%s)",
document.name, document.collection
@@ -294,7 +293,7 @@ def update(collection, spec, document, operation="$set"):
operation: document,
}
)
- except OperationFailure, ex:
+ except pymongo.errors.OperationFailure, ex:
utils.LOG.error(
"Error updating the following document: %s", str(document)
)
@@ -318,7 +317,7 @@ def delete(collection, spec_or_id):
try:
collection.remove(spec_or_id)
- except OperationFailure, ex:
+ except pymongo.errors.OperationFailure, ex:
utils.LOG.error(
"Error removing the following document: %s", str(spec_or_id)
)
diff --git a/app/utils/report.py b/app/utils/report.py
index 7bef4dc..0800fcd 100644
--- a/app/utils/report.py
+++ b/app/utils/report.py
@@ -129,6 +129,9 @@ def create_boot_report(job, kernel, db_options):
fields=BOOT_SEARCH_FIELDS,
sort=BOOT_SEARCH_SORT)
+ failed_data = None
+ conflict_data = None
+
if fail_count > 0:
failed_data, unique_data = _parse_results(
fail_results.clone(), get_unique=True)
@@ -176,11 +179,12 @@ def create_boot_report(job, kernel, db_options):
email_body, subject = _create_boot_email(
job, kernel, failed_data, fail_count, total_count, conflict_data)
- else:
+ elif fail_count == 0 and total_count > 0:
+ email_body, subject = _create_boot_email(
+ job, kernel, failed_data, fail_count, total_count, conflict_data)
+ elif fail_count == 0 and total_count == 0:
utils.LOG.warn(
- "No results found for job '%s' and kernel '%s': "
- "email report not created",
- job, kernel)
+ "Nothing found for '%s-%s': no email report sent", job, kernel)
return email_body, subject
@@ -398,7 +402,8 @@ def _parse_and_write_results(failed_data, conflict_data, args, m_string):
if failed_data:
m_string.write(
- u"\nFailed Boot Results: %(boot_url)s/?%(kernel)s&fail\n" % args
+ u"\nFailed Boot Results: %(base_url)s/boot/?%(kernel)s&fail\n" %
+ args
)
_traverse_data_struct(failed_data, m_string)