Make 'job clone --id' an action instead of 'job create --clone'.
Add the backward compatibility method and call.
This is a first step in allowing to specify different hosts when cloning,
but it's just a code restructuring, it doesn't fix anything (and hopefully
doesnt' break new things either).
Signed-off-by: Jean-Marc Eurin <jmeurin@google.com>
git-svn-id: http://test.kernel.org/svn/autotest/trunk@3480 592f7852-d20e-0410-864c-8624ca9c26a4
diff --git a/cli/atest.py b/cli/atest.py
index d47c8c0..a366c62 100755
--- a/cli/atest.py
+++ b/cli/atest.py
@@ -66,8 +66,8 @@
# If we have a syntax error now, it should
# refer to the topic class.
- syntax_class = getattr(topic_module, topic)
- syntax_obj = syntax_class()
+ topic_class = getattr(topic_module, topic)
+ topic_obj = topic_class()
if len(sys.argv) > 1:
action = sys.argv.pop(1)
@@ -76,29 +76,32 @@
action = 'help'
sys.argv.insert(1, '-h')
else:
- syntax_obj.invalid_syntax('No action argument')
+ topic_obj.invalid_syntax('No action argument')
+
+ # Any backward compatibility changes?
+ action = topic_obj.backward_compatibility(action, sys.argv)
# Instantiate a topic object
try:
- topic_class = getattr(topic_module, topic + '_' + action)
+ action_class = getattr(topic_module, topic + '_' + action)
except AttributeError:
- syntax_obj.invalid_syntax('Invalid action %s' % action)
+ topic_obj.invalid_syntax('Invalid action %s' % action)
- topic_obj = topic_class()
+ action_obj = action_class()
- topic_obj.parse()
+ action_obj.parse()
try:
try:
- results = topic_obj.execute()
+ results = action_obj.execute()
except topic_common.CliError:
pass
except Exception, err:
traceback.print_exc()
- topic_obj.generic_error("Unexpected exception: %s" % err)
+ action_obj.generic_error("Unexpected exception: %s" % err)
else:
try:
- topic_obj.output(results)
+ action_obj.output(results)
except Exception:
traceback.print_exc()
finally:
- return topic_obj.show_all_failures()
+ return action_obj.show_all_failures()
diff --git a/cli/job.py b/cli/job.py
index 1dfc3e5..bc466b4 100755
--- a/cli/job.py
+++ b/cli/job.py
@@ -22,8 +22,8 @@
class job(topic_common.atest):
"""Job class
- atest job [create|list|stat|abort] <options>"""
- usage_action = '[create|list|stat|abort]'
+ atest job [create|clone|list|stat|abort] <options>"""
+ usage_action = '[create|clone|list|stat|abort]'
topic = msg_topic = 'job'
msg_items = '<job_ids>'
@@ -37,6 +37,16 @@
result['status_counts'] = ', '.join(status)
+ def backward_compatibility(self, action, argv):
+ """ 'job create --clone' became 'job clone --id' """
+ if action == 'create':
+ for option in ['-l', '--clone']:
+ if option in argv:
+ argv[argv.index(option)] = '--id'
+ action = 'clone'
+ return action
+
+
class job_help(job):
"""Just here to get the atest logic working.
Usage is set by its parent"""
@@ -203,7 +213,87 @@
super(job_stat, self).output(results, keys)
-class job_create(action_common.atest_create, job):
+class job_create_or_clone(action_common.atest_create, job):
+ """Class containing the code common to the job create and clone actions"""
+ msg_items = 'job_name'
+
+ def __init__(self):
+ super(job_create_or_clone, self).__init__()
+ self.hosts = []
+ self.data_item_key = 'name'
+ self.parser.add_option('-p', '--priority', help='Job priority (low, '
+ 'medium, high, urgent), default=medium',
+ type='choice', choices=('low', 'medium', 'high',
+ 'urgent'), default='medium')
+ self.parser.add_option('-b', '--labels',
+ help='Comma separated list of labels '
+ 'to get machine list from.', default='')
+ self.parser.add_option('-m', '--machine', help='List of machines to '
+ 'run on')
+ self.parser.add_option('-M', '--mlist',
+ help='File listing machines to use',
+ type='string', metavar='MACHINE_FLIST')
+ self.parser.add_option('--one-time-hosts',
+ help='List of one time hosts')
+ self.parser.add_option('-e', '--email',
+ help='A comma seperated list of '
+ 'email addresses to notify of job completion',
+ default='')
+
+
+ def parse(self):
+ host_info = topic_common.item_parse_info(attribute_name='hosts',
+ inline_option='machine',
+ filename_option='mlist')
+ job_info = topic_common.item_parse_info(attribute_name='jobname',
+ use_leftover=True)
+ oth_info = topic_common.item_parse_info(attribute_name='one_time_hosts',
+ inline_option='one_time_hosts')
+
+ options, leftover = super(job_create_or_clone,
+ self).parse([host_info, job_info, oth_info],
+ req_items='jobname')
+ self.data = {}
+ if len(self.jobname) > 1:
+ self.invalid_syntax('Too many arguments specified, only expected '
+ 'to receive job name: %s' % self.jobname)
+ self.jobname = self.jobname[0]
+
+ if options.priority:
+ self.data['priority'] = options.priority.capitalize()
+
+ if self.one_time_hosts:
+ self.data['one_time_hosts'] = self.one_time_hosts
+
+ if options.labels:
+ labels = options.labels.split(',')
+ labels = [label.strip() for label in labels if label.strip()]
+ label_hosts = self.execute_rpc(op='get_hosts',
+ multiple_labels=labels)
+ for host in label_hosts:
+ self.hosts.append(host['hostname'])
+
+ self.data['name'] = self.jobname
+
+ (self.data['hosts'],
+ self.data['meta_hosts']) = self.parse_hosts(self.hosts)
+
+ self.data['email_list'] = options.email
+
+ return options, leftover
+
+
+ def create_job(self):
+ job_id = self.execute_rpc(op='create_job', **self.data)
+ return ['%s (id %s)' % (self.jobname, job_id)]
+
+
+ def get_items(self):
+ return [self.jobname]
+
+
+
+class job_create(job_create_or_clone):
"""atest job create [--priority <Low|Medium|High|Urgent>]
[--synch_count] [--control-file </path/to/cfile>]
[--on-server] [--test <test1,test2>] [--kernel <http://kernel>]
@@ -220,17 +310,10 @@
so it only uses the __init__() and output() from its superclass.
"""
op_action = 'create'
- msg_items = 'job_name'
def __init__(self):
super(job_create, self).__init__()
- self.hosts = []
self.ctrl_file_data = {}
- self.data_item_key = 'name'
- self.parser.add_option('-p', '--priority', help='Job priority (low, '
- 'medium, high, urgent), default=medium',
- type='choice', choices=('low', 'medium', 'high',
- 'urgent'), default='medium')
self.parser.add_option('-y', '--synch_count', type=int,
help='Number of machines to use per autoserv '
'execution')
@@ -243,24 +326,14 @@
help='List of tests to run')
self.parser.add_option('-k', '--kernel', help='Install kernel from this'
' URL before beginning job')
+
self.parser.add_option('-d', '--dependencies', help='Comma separated '
'list of labels this job is dependent on.',
default='')
- self.parser.add_option('-b', '--labels', help='Comma separated list of '
- 'labels to get machine list from.', default='')
self.parser.add_option('-G', '--atomic_group', help='Name of an Atomic '
'Group to schedule this job on.',
default='')
- self.parser.add_option('-m', '--machine', help='List of machines to '
- 'run on')
- self.parser.add_option('-M', '--mlist',
- help='File listing machines to use',
- type='string', metavar='MACHINE_FLIST')
- self.parser.add_option('--one-time-hosts',
- help='List of one time hosts')
- self.parser.add_option('-e', '--email', help='A comma seperated list '
- 'of email addresses to notify of job completion',
- default='')
+
self.parser.add_option('-B', '--reboot_before',
help='Whether or not to reboot the machine '
'before the job (never/if dirty/always)',
@@ -273,18 +346,12 @@
type='choice',
choices=('never', 'if all tests passed',
'always'))
+
self.parser.add_option('--parse-failed-repair',
help='Whether or not to parse failed repair '
'results as part of the job',
type='choice',
choices=('true', 'false'))
- self.parser.add_option('-l', '--clone', help='Clone an existing job. '
- 'This will discard all other options except '
- '--reuse-hosts.', default=False,
- metavar='JOB_ID')
- self.parser.add_option('-r', '--reuse-hosts', help='Use the exact same '
- 'hosts as cloned job. Only for use with '
- '--clone.', action='store_true', default=False)
self.parser.add_option('-n', '--noverify',
help='Do not run verify for job',
default=False, action='store_true')
@@ -295,32 +362,7 @@
def parse(self):
- host_info = topic_common.item_parse_info(attribute_name='hosts',
- inline_option='machine',
- filename_option='mlist')
- job_info = topic_common.item_parse_info(attribute_name='jobname',
- use_leftover=True)
- oth_info = topic_common.item_parse_info(attribute_name='one_time_hosts',
- inline_option='one_time_hosts')
-
- options, leftover = super(job_create,
- self).parse([host_info, job_info, oth_info],
- req_items='jobname')
- self.data = {}
- if len(self.jobname) > 1:
- self.invalid_syntax('Too many arguments specified, only expected '
- 'to receive job name: %s' % self.jobname)
- self.jobname = self.jobname[0]
-
- if options.reuse_hosts and not options.clone:
- self.invalid_syntax('--reuse-hosts only to be used with --clone.')
- # If cloning skip parse, parsing is done in execute
- self.clone_id = options.clone
- if options.clone:
- self.op_action = 'clone'
- self.msg_items = 'jobid'
- self.reuse_hosts = options.reuse_hosts
- return options, leftover
+ options, leftover = super(job_create, self).parse()
if (len(self.hosts) == 0 and not self.one_time_hosts
and not options.labels and not options.atomic_group):
@@ -365,8 +407,6 @@
self.ctrl_file_data['tests'] = tests
- if options.priority:
- self.data['priority'] = options.priority.capitalize()
if options.reboot_before:
self.data['reboot_before'] = options.reboot_before.capitalize()
if options.reboot_after:
@@ -381,21 +421,6 @@
if options.max_runtime:
self.data['max_runtime_hrs'] = options.max_runtime
- if self.one_time_hosts:
- self.data['one_time_hosts'] = self.one_time_hosts
- if options.labels:
- labels = options.labels.split(',')
- labels = [label.strip() for label in labels if label.strip()]
- label_hosts = self.execute_rpc(op='get_hosts',
- multiple_labels=labels)
- for host in label_hosts:
- self.hosts.append(host['hostname'])
-
- self.data['name'] = self.jobname
-
- (self.data['hosts'],
- self.data['meta_hosts']) = self.parse_hosts(self.hosts)
-
if options.atomic_group:
self.data['atomic_group_name'] = options.atomic_group
@@ -403,7 +428,6 @@
deps = [dep.strip() for dep in deps if dep.strip()]
self.data['dependencies'] = deps
- self.data['email_list'] = options.email
if options.synch_count:
self.data['synch_count'] = options.synch_count
if options.server:
@@ -448,48 +472,89 @@
if 'synch_count' not in self.data:
self.data['synch_count'] = 1
- if self.clone_id:
- clone_info = self.execute_rpc(op='get_info_for_clone',
- id=self.clone_id,
- preserve_metahosts=self.reuse_hosts)
- self.data = clone_info['job']
-
- # Remove fields from clone data that cannot be reused
- unused_fields = ('name', 'created_on', 'id', 'owner')
- for field in unused_fields:
- del self.data[field]
-
- # Keyword args cannot be unicode strings
- for key, val in self.data.iteritems():
- del self.data[key]
- self.data[str(key)] = val
-
- # Convert host list from clone info that can be used for job_create
- host_list = []
- if clone_info['meta_host_counts']:
- # Creates a dictionary of meta_hosts, e.g.
- # {u'label1': 3, u'label2': 2, u'label3': 5}
- meta_hosts = clone_info['meta_host_counts']
- # Create a list of formatted metahosts, e.g.
- # [u'3*label1', u'2*label2', u'5*label3']
- meta_host_list = ['%s*%s' % (str(val), key) for key,val in
- meta_hosts.items()]
- host_list.extend(meta_host_list)
- if clone_info['hosts']:
- # Creates a list of hosts, e.g. [u'host1', u'host2']
- hosts = [host['hostname'] for host in clone_info['hosts']]
- host_list.extend(hosts)
-
- (self.data['hosts'],
- self.data['meta_hosts']) = self.parse_hosts(host_list)
- self.data['name'] = self.jobname
-
- job_id = self.execute_rpc(op='create_job', **self.data)
- return ['%s (id %s)' % (self.jobname, job_id)]
+ return self.create_job()
- def get_items(self):
- return [self.jobname]
+class job_clone(job_create_or_clone):
+ """atest job clone [--priority <Low|Medium|High|Urgent>]
+ [--mlist </path/to/machinelist>] [--machine <host1 host2 host3>]
+ [--labels <list of labels of machines to run on>]
+ [--one-time-hosts <hosts>] [--email <email>]
+ job_name
+
+ Cloning a job is rather different from the other create operations,
+ so it only uses the __init__() and output() from its superclass.
+ """
+ op_action = 'clone'
+ usage_action = 'clone'
+
+ def __init__(self):
+ super(job_clone, self).__init__()
+ self.parser.add_option('-i', '--id', help='Job id to clone',
+ default=False,
+ metavar='JOB_ID')
+ self.parser.add_option('-r', '--reuse-hosts',
+ help='Use the exact same hosts as the '
+ 'cloned job.',
+ action='store_true', default=False)
+
+
+ def parse(self):
+ options, leftover = super(job_clone, self).parse()
+
+ self.clone_id = options.id
+ self.reuse_hosts = options.reuse_hosts
+
+ if ((not self.reuse_hosts) and
+ (len(self.hosts) == 0 and not self.one_time_hosts
+ and not options.labels)):
+ self.invalid_syntax('Must reuse or specify at least one machine '
+ '(-r, -m, -M, -b or --one-time-hosts).')
+
+ return options, leftover
+
+
+ def execute(self):
+ clone_info = self.execute_rpc(op='get_info_for_clone',
+ id=self.clone_id,
+ preserve_metahosts=self.reuse_hosts)
+ # TODO(jmeurin) - Fixing the self.data is the actual bug
+ # I'll fix in the next CL, so some of this doesn't entirely
+ # makes sense (like calling parse_hosts() again)
+ # We really need to merge self.data and clone_info.
+ self.data = clone_info['job']
+
+ # Remove fields from clone data that cannot be reused
+ unused_fields = ('name', 'created_on', 'id', 'owner')
+ for field in unused_fields:
+ del self.data[field]
+
+ # Keyword args cannot be unicode strings
+ for key, val in self.data.iteritems():
+ del self.data[key]
+ self.data[str(key)] = val
+
+ # Convert host list from clone info that can be used for job_create
+ host_list = []
+ if clone_info['meta_host_counts']:
+ # Creates a dictionary of meta_hosts, e.g.
+ # {u'label1': 3, u'label2': 2, u'label3': 5}
+ meta_hosts = clone_info['meta_host_counts']
+ # Create a list of formatted metahosts, e.g.
+ # [u'3*label1', u'2*label2', u'5*label3']
+ meta_host_list = ['%s*%s' % (str(val), key) for key,val in
+ meta_hosts.items()]
+ host_list.extend(meta_host_list)
+ if clone_info['hosts']:
+ # Creates a list of hosts, e.g. [u'host1', u'host2']
+ hosts = [host['hostname'] for host in clone_info['hosts']]
+ host_list.extend(hosts)
+
+ (self.data['hosts'],
+ self.data['meta_hosts']) = self.parse_hosts(host_list)
+ self.data['name'] = self.jobname
+
+ return self.create_job()
class job_abort(job, action_common.atest_delete):
diff --git a/cli/job_unittest.py b/cli/job_unittest.py
index aa27043..4825316 100755
--- a/cli/job_unittest.py
+++ b/cli/job_unittest.py
@@ -2,7 +2,6 @@
#
# Copyright 2008 Google Inc. All Rights Reserved.
-#TODO(rkubiak): Add unittest for job cloning
"""Tests for job."""
@@ -978,6 +977,102 @@
['meta0']*5 + ['meta1']*2 + ['meta2'])
+class job_clone_unittest(cli_mock.cli_unittest):
+ job_data = {'control_file': u'NAME = \'Server Sleeptest\'\nAUTHOR = \'mbligh@google.com (Martin Bligh)\'\nTIME = \'SHORT\'\nTEST_CLASS = \'Software\'\nTEST_CATEGORY = \'Functional\'\nTEST_TYPE = \'server\'\nEXPERIMENTAL = \'False\'\n\nDOC = """\nruns sleep for one second on the list of machines.\n"""\n\ndef run(machine):\n host = hosts.create_host(machine)\n job.run_test(\'sleeptest\')\n\njob.parallel_simple(run, machines)\n',
+ 'control_type': u'Server',
+ 'dependencies': [],
+ 'email_list': u'',
+ 'max_runtime_hrs': 480,
+ 'parse_failed_repair': True,
+ 'priority': u'Medium',
+ 'reboot_after': u'Always',
+ 'reboot_before': u'If dirty',
+ 'run_verify': True,
+ 'synch_count': 1,
+ 'timeout': 480}
+
+ def setUp(self):
+ super(job_clone_unittest, self).setUp()
+ self.job_data_clone_info = copy.deepcopy(self.job_data)
+ self.job_data_clone_info['created_on'] = '2009-07-23 16:21:29'
+ self.job_data_clone_info['name'] = 'testing_clone'
+ self.job_data_clone_info['id'] = 42
+ self.job_data_clone_info['owner'] = 'user0'
+
+ self.job_data_cloned = copy.deepcopy(self.job_data)
+ self.job_data_cloned['name'] = 'cloned'
+ self.job_data_cloned['hosts'] = [u'host0']
+ self.job_data_cloned['meta_hosts'] = []
+
+
+ def test_backward_compat(self):
+ self.run_cmd(argv=['atest', 'job', 'create', '--clone', '42',
+ '-r', 'cloned'],
+ rpcs=[('get_info_for_clone', {'id': '42',
+ 'preserve_metahosts': True},
+ True,
+ {u'atomic_group_name': None,
+ u'hosts': [{u'acls': [u'acl0'],
+ u'atomic_group': None,
+ u'attributes': {},
+ u'dirty': False,
+ u'hostname': u'host0',
+ u'id': 4378,
+ u'invalid': False,
+ u'labels': [u'label0', u'label1'],
+ u'lock_time': None,
+ u'locked': False,
+ u'locked_by': None,
+ u'other_labels': u'label0, label1',
+ u'platform': u'plat0',
+ u'protection': u'Repair software only',
+ u'status': u'Ready',
+ u'synch_id': None}],
+ u'job': self.job_data_clone_info,
+ u'meta_host_counts': {}}),
+ ('create_job', self.job_data_cloned, True, 43)],
+ out_words_ok=['Created job', '43'])
+
+
+ def test_clone_reuse_hosts(self):
+ self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42',
+ '-r', 'cloned'],
+ rpcs=[('get_info_for_clone', {'id': '42',
+ 'preserve_metahosts': True},
+ True,
+ {u'atomic_group_name': None,
+ u'hosts': [{u'acls': [u'acl0'],
+ u'atomic_group': None,
+ u'attributes': {},
+ u'dirty': False,
+ u'hostname': u'host0',
+ u'id': 4378,
+ u'invalid': False,
+ u'labels': [u'label0', u'label1'],
+ u'lock_time': None,
+ u'locked': False,
+ u'locked_by': None,
+ u'other_labels': u'label0, label1',
+ u'platform': u'plat0',
+ u'protection': u'Repair software only',
+ u'status': u'Ready',
+ u'synch_id': None}],
+ u'job': self.job_data_clone_info,
+ u'meta_host_counts': {}}),
+ ('create_job', self.job_data_cloned, True, 43)],
+ out_words_ok=['Created job', '43'])
+
+
+ def test_clone_no_hosts(self):
+ self.run_cmd(argv=['atest', 'job', 'clone', '--id', '42', 'cloned'],
+ exit_code=1,
+ out_words_ok=['usage'],
+ err_words_ok=['machine'])
+
+
+ # TODO(jmeurin) Add tests with hosts once the code is working
+
+
class job_abort_unittest(cli_mock.cli_unittest):
results = [{u'status_counts': {u'Aborted': 1}, u'control_file':
u"job.run_test('sleeptest')\n", u'name': u'test_job0',
diff --git a/cli/topic_common.py b/cli/topic_common.py
index 0070349..1e1e08c 100755
--- a/cli/topic_common.py
+++ b/cli/topic_common.py
@@ -367,6 +367,11 @@
self.msg_items)
+ def backward_compatibility(self, action, argv):
+ """To be overidden by subclass if their syntax changed"""
+ return action
+
+
def parse(self, parse_info=[], req_items=None):
"""parse_info is a list of item_parse_info objects