Prevent duplicate hosts in job creation.
* have rpc_utils.create_new_job() explicitly check for duplicate hosts and throw a ValidationError, and add a test for this
* add a unique key over job_id and host_id on host_queue_entries.  we don't want to depend on this from the frontend (since that'd result in half-created jobs), but it's a good safety net.

Signed-off-by: Steve Howard <showard@google.com>


git-svn-id: http://test.kernel.org/svn/autotest/trunk@3959 592f7852-d20e-0410-864c-8624ca9c26a4
diff --git a/frontend/afe/rpc_utils.py b/frontend/afe/rpc_utils.py
index ae966f9..eb86cf4 100644
--- a/frontend/afe/rpc_utils.py
+++ b/frontend/afe/rpc_utils.py
@@ -430,6 +430,20 @@
     return info
 
 
+def check_for_duplicate_hosts(host_objects):
+    host_ids = set()
+    duplicate_hostnames = set()
+    for host in host_objects:
+        if host.id in host_ids:
+            duplicate_hostnames.add(host.hostname)
+        host_ids.add(host.id)
+
+    if duplicate_hostnames:
+        raise model_logic.ValidationError(
+                {'hosts' : 'Duplicate hosts: %s'
+                 % ', '.join(duplicate_hostnames)})
+
+
 def create_new_job(owner, options, host_objects, metahost_objects,
                    atomic_group=None):
     labels_by_name = dict((label.name, label)
@@ -468,6 +482,7 @@
                      'atomic group was specified for this job.' %
                      (', '.join(unusable_host_names),)})
 
+    check_for_duplicate_hosts(host_objects)
 
     check_job_dependencies(host_objects, dependencies)
     options['dependencies'] = [labels_by_name[label_name]