Get the frontend unittest to run against SQLite. This required scattered changes:
* change frontend_unittest to tell django to use an in-memory SQLite database. ideally this would've been the only change necessary, but it wasn't.
* change long to ints in the doctests, as that's what pysqlite2 returns
* change several imports to use autotest_lib; that's the way everything should be, and the other way breaks things when trying to run stuff without using manage.py
* make readonly_connection better support testing by changing its connection attribute into a method, and adding the capability to disable the module completely during testing
* get rid of use of GROUP_CONCAT in models.py; SQLite doesn't support it (not in our old version anyhow), and it;s really not necessary anyway
git-svn-id: http://test.kernel.org/svn/autotest/trunk@2244 592f7852-d20e-0410-864c-8624ca9c26a4
diff --git a/frontend/afe/doctests/001_rpc_test.txt b/frontend/afe/doctests/001_rpc_test.txt
index 84697b4..27ceeb2 100644
--- a/frontend/afe/doctests/001_rpc_test.txt
+++ b/frontend/afe/doctests/001_rpc_test.txt
@@ -37,7 +37,7 @@
# create a label
>>> rpc_interface.add_label(name='test_label')
-1L
+1
# we can modify the label by referencing its ID...
>>> rpc_interface.modify_label(1, kernel_config='/my/kernel/config')
@@ -47,7 +47,7 @@
# we use get_labels to retrieve object data
>>> data = rpc_interface.get_labels(name='test_label')
->>> data == [{'id': 1L,
+>>> data == [{'id': 1,
... 'name': 'test_label',
... 'platform': 1,
... 'kernel_config': '/my/kernel/config',
@@ -57,15 +57,15 @@
# get_labels return multiple matches as lists of dictionaries
>>> rpc_interface.add_label(name='label1', platform=False)
-2L
+2
>>> rpc_interface.add_label(name='label2', platform=True)
-3L
+3
>>> rpc_interface.add_label(name='label3', platform=False)
-4L
+4
>>> data = rpc_interface.get_labels(platform=False)
->>> data == [{'id': 2L, 'name': 'label1', 'platform': 0, 'kernel_config': '',
+>>> data == [{'id': 2, 'name': 'label1', 'platform': 0, 'kernel_config': '',
... 'only_if_needed': False, 'invalid': 0},
-... {'id': 4L, 'name': 'label3', 'platform': 0, 'kernel_config': '',
+... {'id': 4, 'name': 'label3', 'platform': 0, 'kernel_config': '',
... 'only_if_needed': False, 'invalid': 0}]
True
@@ -82,13 +82,13 @@
# all the add*, modify*, delete*, and get* methods work the same way
# hosts...
>>> rpc_interface.add_host(hostname='ipaj1', locked=True)
-1L
+1
>>> rpc_interface.modify_host('ipaj1', status='Hello')
>>> data = rpc_interface.get_hosts()
# delete the lock_time field, since that can't be reliably checked
>>> del data[0]['lock_time']
->>> data == [{'id': 1L,
+>>> data == [{'id': 1,
... 'hostname': 'ipaj1',
... 'locked': 1,
... 'synch_id': None,
@@ -108,16 +108,16 @@
... description='Sleep Test', test_time=1,
... test_category='Functional',
... test_class='Kernel', path='sleeptest')
-1L
+1
>>> rpc_interface.modify_test('sleeptest', path='/my/path')
>>> data = rpc_interface.get_tests()
->>> data == [{'id': 1L,
+>>> data == [{'id': 1,
... 'name': 'sleeptest',
... 'author': 'Test',
... 'description': 'Sleep Test',
... 'dependencies': '',
... 'experimental': 1,
-... 'sync_count': 1L,
+... 'sync_count': 1,
... 'test_type': 'Client',
... 'test_class': 'Kernel',
... 'test_time': 'SHORT',
@@ -132,10 +132,10 @@
# profilers...
>>> rpc_interface.add_profiler(name='oprofile')
-1L
+1
>>> rpc_interface.modify_profiler('oprofile', description='Oh profile!')
>>> data = rpc_interface.get_profilers()
->>> data == [{'id': 1L,
+>>> data == [{'id': 1,
... 'name': 'oprofile',
... 'description': 'Oh profile!'}]
True
@@ -146,10 +146,10 @@
# users...
>>> rpc_interface.add_user(login='showard')
-2L
+2
>>> rpc_interface.modify_user('showard', access_level=1)
>>> data = rpc_interface.get_users(login='showard')
->>> data == [{'id': 2L,
+>>> data == [{'id': 2,
... 'login': 'showard',
... 'access_level': 1}]
True
@@ -160,10 +160,10 @@
# acl groups...
# 1 ACL group already exists, named "Everyone" (ID 1)
>>> rpc_interface.add_acl_group(name='my_group')
-2L
+2
>>> rpc_interface.modify_acl_group('my_group', description='my new acl group')
>>> data = rpc_interface.get_acl_groups(name='my_group')
->>> data == [{'id': 2L,
+>>> data == [{'id': 2,
... 'name': 'my_group',
... 'description': 'my new acl group',
... 'users': ['debug_user'],
@@ -171,7 +171,7 @@
True
>>> rpc_interface.delete_acl_group('my_group')
>>> data = rpc_interface.get_acl_groups()
->>> data == [{'id': 1L,
+>>> data == [{'id': 1,
... 'name': 'Everyone',
... 'description': '',
... 'users': ['debug_user'],
@@ -184,13 +184,13 @@
# first, create some hosts and labels to play around with
>>> rpc_interface.add_host(hostname='host1')
-2L
+2
>>> rpc_interface.add_host(hostname='host2')
-3L
+3
>>> rpc_interface.add_label(name='label1')
-2L
+2
>>> rpc_interface.add_label(name='label2', platform=True)
-3L
+3
# add hosts to labels
>>> rpc_interface.host_add_labels('host1', ['label1'])
@@ -280,9 +280,9 @@
['Everyone']
>>> rpc_interface.add_user(login='showard', access_level=0)
-3L
+2
>>> rpc_interface.add_acl_group(name='my_group')
-3L
+2
>>> rpc_interface.acl_group_add_users('my_group', ['showard'])
>>> rpc_interface.acl_group_add_hosts('my_group', ['host1'])
@@ -323,21 +323,21 @@
# add some entries to play with
>>> rpc_interface.add_label(name='my_label', kernel_config='my_kernel_config')
-5L
+5
>>> test_control_path = os.path.join(test_path, 'test.control')
>>> rpc_interface.add_test(name='sleeptest', test_type='Client', author='Test',
... test_category='Test',
... test_class='Kernel', path=test_control_path)
-2L
+1
>>> test_control_path = os.path.join(test_path, 'test.control.2')
>>> rpc_interface.add_test(name='my_test', test_type='Client', author='Test',
... test_category='Test',
... test_class='Kernel', path=test_control_path)
-3L
+2
>>> rpc_interface.add_host(hostname='my_label_host1')
-4L
+4
>>> rpc_interface.add_host(hostname='my_label_host2')
-5L
+5
>>> rpc_interface.label_add_hosts(id='my_label', hosts=['my_label_host1', 'my_label_host2'])
# generate a control file
@@ -373,13 +373,13 @@
... control_type='Client',
... hosts=['host1', 'host2'],
... meta_hosts=['my_label', 'my_label'])
-1L
+1
# get job info - this does not include status info for particular hosts
>>> data = rpc_interface.get_jobs()
>>> data = data[0]
>>> data['id'], data['owner'], data['name'], data['priority']
-(1L, 'debug_user', 'my_job', 'Low')
+(1, 'debug_user', 'my_job', 'Low')
>>> data['control_file'] == cf_info['control_file']
True
>>> data['control_type']
@@ -390,7 +390,7 @@
# get_num_jobs - useful when dealing with large numbers of jobs
>>> rpc_interface.get_num_jobs(name='my_job')
-1L
+1
# check host queue entries for a job
>>> data = rpc_interface.get_host_queue_entries(job=1)
@@ -405,7 +405,7 @@
>>> job == {'control_file': cf_info['control_file'], # the control file we used
... 'control_type': 'Client',
... 'created_on': None,
-... 'id': 1L,
+... 'id': 1,
... 'name': 'my_job',
... 'owner': 'debug_user',
... 'priority': 'Low',
@@ -422,7 +422,7 @@
... {'active': 0,
... 'complete': 0,
... 'host': {'hostname': 'host1', # full host info here
-... 'id': 2L,
+... 'id': 2,
... 'invalid': 0,
... 'locked': 0,
... 'status': 'Ready',
@@ -430,7 +430,7 @@
... 'protection': 'No protection',
... 'locked_by': None,
... 'lock_time': None},
-... 'id': 1L,
+... 'id': 1,
... 'job': job, # full job info here
... 'meta_host': None,
... 'priority': 0,
@@ -441,7 +441,7 @@
... {'active': 0,
... 'complete': 0,
... 'host': None,
-... 'id': 3L,
+... 'id': 3,
... 'job': job,
... 'meta_host': 'my_label',
... 'priority': 0,
@@ -449,7 +449,7 @@
... 'deleted': 0})
True
>>> rpc_interface.get_num_host_queue_entries(job=1)
-4L
+4
>>> rpc_interface.get_hqe_percentage_complete(job=1)
0.0
@@ -457,13 +457,13 @@
>>> data = rpc_interface.get_jobs_summary()
>>> counts = data[0]['status_counts']
>>> counts
-{'Queued': 4L}
+{'Queued': 4}
# abort the job
>>> rpc_interface.abort_job(1)
>>> data = rpc_interface.get_jobs_summary(id=1)
>>> data[0]['status_counts']
-{'Aborted': 4L}
+{'Aborted': 4}
# Remove the two hosts in my_label
>>> rpc_interface.delete_host('my_label_host1')
@@ -498,7 +498,7 @@
... control_type='Server',
... is_synchronous=False,
... hosts=['host1'])
-2L
+2
>>> data = rpc_interface.get_jobs()
>>> data[0]['synch_type']
@@ -510,7 +510,7 @@
['host1', 'host2']
>>> rpc_interface.add_acl_group(name='mygroup')
-4L
+3
>>> rpc_interface.acl_group_add_users('mygroup', ['debug_user'])
>>> rpc_interface.acl_group_add_hosts('mygroup', ['host1'])
>>> data = rpc_interface.get_acl_groups(name='Everyone')[0]
diff --git a/frontend/afe/doctests/003_misc_rpc_features.txt b/frontend/afe/doctests/003_misc_rpc_features.txt
index abaac61..c71e987 100644
--- a/frontend/afe/doctests/003_misc_rpc_features.txt
+++ b/frontend/afe/doctests/003_misc_rpc_features.txt
@@ -5,9 +5,9 @@
>>> from frontend.afe import rpc_interface
>>> rpc_interface.add_profiler(name='oprofile')
-2L
+1
>>> rpc_interface.add_profiler(name='iostat')
-3L
+2
# profiler support in control file generation
>>> cf_info = rpc_interface.generate_control_file(
diff --git a/frontend/afe/management.py b/frontend/afe/management.py
index 193947d..ed71a9b 100644
--- a/frontend/afe/management.py
+++ b/frontend/afe/management.py
@@ -2,8 +2,9 @@
from django.dispatch import dispatcher
from django.db.models import signals
-import frontend.afe.models
from django.contrib import auth
+import common
+from autotest_lib.frontend.afe import models
BASIC_ADMIN = 'Basic admin'
@@ -35,5 +36,5 @@
print 'Group "%s" already exists' % BASIC_ADMIN
-dispatcher.connect(create_admin_group, sender=frontend.afe.models,
+dispatcher.connect(create_admin_group, sender=models,
signal=signals.post_syncdb)
diff --git a/frontend/afe/model_logic.py b/frontend/afe/model_logic.py
index c683dca..c30af9e 100644
--- a/frontend/afe/model_logic.py
+++ b/frontend/afe/model_logic.py
@@ -4,7 +4,7 @@
from django.db import models as dbmodels, backend, connection
from django.utils import datastructures
-from frontend.afe import readonly_connection
+from autotest_lib.frontend.afe import readonly_connection
class ValidationError(Exception):
"""\
@@ -15,11 +15,11 @@
def _wrap_with_readonly(method):
def wrapper_method(*args, **kwargs):
- readonly_connection.connection.set_django_connection()
+ readonly_connection.connection().set_django_connection()
try:
return method(*args, **kwargs)
finally:
- readonly_connection.connection.unset_django_connection()
+ readonly_connection.connection().unset_django_connection()
wrapper_method.__name__ = method.__name__
return wrapper_method
@@ -31,11 +31,11 @@
"""
def wrapper_generator(*args, **kwargs):
generator_obj = generator(*args, **kwargs)
- readonly_connection.connection.set_django_connection()
+ readonly_connection.connection().set_django_connection()
try:
first_value = generator_obj.next()
finally:
- readonly_connection.connection.unset_django_connection()
+ readonly_connection.connection().unset_django_connection()
yield first_value
while True:
diff --git a/frontend/afe/models.py b/frontend/afe/models.py
index f62213a..d578fd5 100644
--- a/frontend/afe/models.py
+++ b/frontend/afe/models.py
@@ -485,18 +485,19 @@
job_ids = ','.join(str(job['id']) for job in jobs)
cursor = connection.cursor()
cursor.execute("""
- SELECT jobs.id, GROUP_CONCAT(labels.name)
+ SELECT jobs.id, labels.name
FROM jobs
INNER JOIN jobs_dependency_labels
ON jobs.id = jobs_dependency_labels.job_id
INNER JOIN labels ON jobs_dependency_labels.label_id = labels.id
WHERE jobs.id IN (%s)
- GROUP BY jobs.id
""" % job_ids)
- id_to_dependencies = dict((job_id, dependencies)
- for job_id, dependencies in cursor.fetchall())
+ job_dependencies = {}
+ for job_id, dependency in cursor.fetchall():
+ job_dependencies.setdefault(job_id, []).append(dependency)
for job in jobs:
- job['dependencies'] = id_to_dependencies.get(job['id'], '')
+ dependencies = ','.join(job_dependencies.get(job['id'], []))
+ job['dependencies'] = dependencies
class Job(dbmodels.Model, model_logic.ModelExtensions):
diff --git a/frontend/afe/readonly_connection.py b/frontend/afe/readonly_connection.py
index 81d468f..d4e51bb 100644
--- a/frontend/afe/readonly_connection.py
+++ b/frontend/afe/readonly_connection.py
@@ -10,6 +10,24 @@
django.db.connection which are undocumented as far as I know, but I believe
it works across many, if not all, of the backends.
"""
+ _the_instance = None
+
+ # support singleton
+ @classmethod
+ def get_connection(cls):
+ if cls._the_instance is None:
+ cls._the_instance = ReadOnlyConnection()
+ return cls._the_instance
+
+
+ @classmethod
+ def set_testing_mode(cls, enabled):
+ if enabled:
+ cls._the_instance = DummyReadOnlyConnection()
+ else:
+ cls._the_instance = None
+
+
def __init__(self):
self._connection = None
@@ -73,6 +91,36 @@
self._connection = None
-connection = ReadOnlyConnection()
+class DummyReadOnlyConnection(object):
+ 'A dummy version for testing which does nothing.'
+
+ def __init__(self):
+ self._is_set = False
+
+ def set_django_connection(self):
+ assert not self._is_set
+ self._is_set = True
+
+
+ def unset_django_connection(self):
+ assert self._is_set
+ self._is_set = False
+
+
+ def cursor(self):
+ return django_connection.cursor()
+
+
+ def close(self):
+ pass
+
+
+# convenience
+def connection():
+ return ReadOnlyConnection.get_connection()
+
+
# close any open connection when request finishes
-dispatcher.connect(connection.close, signal=signals.request_finished)
+def _close_connection():
+ connection().close()
+dispatcher.connect(_close_connection, signal=signals.request_finished)
diff --git a/frontend/afe/rpc_interface.py b/frontend/afe/rpc_interface.py
index 6beb3be..17fcf70 100644
--- a/frontend/afe/rpc_interface.py
+++ b/frontend/afe/rpc_interface.py
@@ -31,7 +31,6 @@
from frontend import thread_local
from frontend.afe import models, model_logic, control_file, rpc_utils
-from frontend.afe import readonly_connection
from autotest_lib.client.common_lib import global_config
diff --git a/frontend/frontend_unittest.py b/frontend/frontend_unittest.py
index a45fe84..b36e535 100644
--- a/frontend/frontend_unittest.py
+++ b/frontend/frontend_unittest.py
@@ -4,16 +4,34 @@
This file provides a unittest.TestCase wrapper around the Django unit test
runner.
"""
-import common
import unittest
from django.core import management
-from autotest_lib.frontend import settings
+import common
+def setup_test_environ():
+ from autotest_lib.frontend import settings
+ management.setup_environ(settings)
+ from django.conf import settings
+ # django.conf.settings.LazySettings is buggy and requires us to get
+ # something from it before we set stuff on it
+ getattr(settings, 'DATABASE_ENGINE')
+ settings.DATABASE_ENGINE = 'sqlite3'
+ settings.DATABASE_NAME = ':memory:'
+
+# must call setup_test_environ() before importing any Django code
+setup_test_environ()
+from autotest_lib.frontend.afe import test, readonly_connection
class FrontendTest(unittest.TestCase):
+ def setUp(self):
+ readonly_connection.ReadOnlyConnection.set_testing_mode(True)
+
+
+ def tearDown(self):
+ readonly_connection.ReadOnlyConnection.set_testing_mode(False)
+
+
def test_all(self):
- management.setup_environ(settings)
- from autotest_lib.frontend.afe import test
errors = test.run_tests()
self.assert_(errors == 0, '%s failures in frontend unit tests' % errors)