Prevent duplicate hosts in job creation.
* have rpc_utils.create_new_job() explicitly check for duplicate hosts and throw a ValidationError, and add a test for this
* add a unique key over job_id and host_id on host_queue_entries.  we don't want to depend on this from the frontend (since that'd result in half-created jobs), but it's a good safety net.

Signed-off-by: Steve Howard <showard@google.com>


git-svn-id: http://test.kernel.org/svn/autotest/trunk@3959 592f7852-d20e-0410-864c-8624ca9c26a4
diff --git a/frontend/afe/rpc_interface_unittest.py b/frontend/afe/rpc_interface_unittest.py
index f1a6824..dee25c5 100755
--- a/frontend/afe/rpc_interface_unittest.py
+++ b/frontend/afe/rpc_interface_unittest.py
@@ -163,6 +163,11 @@
         self.assertEquals(host.aclgroup_set.count(), 0)
 
 
+    def test_create_job_duplicate_hosts(self):
+        self.assertRaises(model_logic.ValidationError, self._create_job_helper,
+                          hosts=[1, 1])
+
+
     def _setup_special_tasks(self):
         host = self.hosts[0]
 
@@ -243,11 +248,6 @@
         self.assertEquals(entry2['started_on'], '2009-01-03 00:00:00')
 
 
-    def _create_job_helper(self, **kwargs):
-        return rpc_interface.create_job('test', 'Medium', 'control file',
-                                        'Server', **kwargs)
-
-
     def test_view_invalid_host(self):
         # RPCs used by View Host page should work for invalid hosts
         self._create_job_helper(hosts=[1])
diff --git a/frontend/afe/rpc_utils.py b/frontend/afe/rpc_utils.py
index ae966f9..eb86cf4 100644
--- a/frontend/afe/rpc_utils.py
+++ b/frontend/afe/rpc_utils.py
@@ -430,6 +430,20 @@
     return info
 
 
+def check_for_duplicate_hosts(host_objects):
+    host_ids = set()
+    duplicate_hostnames = set()
+    for host in host_objects:
+        if host.id in host_ids:
+            duplicate_hostnames.add(host.hostname)
+        host_ids.add(host.id)
+
+    if duplicate_hostnames:
+        raise model_logic.ValidationError(
+                {'hosts' : 'Duplicate hosts: %s'
+                 % ', '.join(duplicate_hostnames)})
+
+
 def create_new_job(owner, options, host_objects, metahost_objects,
                    atomic_group=None):
     labels_by_name = dict((label.name, label)
@@ -468,6 +482,7 @@
                      'atomic group was specified for this job.' %
                      (', '.join(unusable_host_names),)})
 
+    check_for_duplicate_hosts(host_objects)
 
     check_job_dependencies(host_objects, dependencies)
     options['dependencies'] = [labels_by_name[label_name]