blob: 52e0ae0b20f53f3b31f8c2fe3ec63b21b07b3f89 [file] [log] [blame]
mblighc86b0b42006-07-28 17:35:28 +00001"""The main job wrapper
mbligha2508052006-05-28 21:29:53 +00002
mblighc86b0b42006-07-28 17:35:28 +00003This is the core infrastructure.
4"""
5
6__author__ = """Copyright Andy Whitcroft, Martin J. Bligh 2006"""
mbligha2508052006-05-28 21:29:53 +00007
mbligh8f243ec2006-10-10 05:55:49 +00008# standard stuff
apw8fef4ac2006-10-10 22:53:37 +00009import os, sys, re, pickle, shutil
mbligh8f243ec2006-10-10 05:55:49 +000010# autotest stuff
mblighf4c35322006-03-13 01:01:10 +000011from autotest_utils import *
apw8fef4ac2006-10-10 22:53:37 +000012from parallel import *
mbligh8f243ec2006-10-10 05:55:49 +000013import kernel, test, profilers, barrier, filesystem, fd_stack, boottool
apw059e1b12006-10-12 17:18:26 +000014import harness, config
mblighf4c35322006-03-13 01:01:10 +000015
16class job:
mblighc86b0b42006-07-28 17:35:28 +000017 """The actual job against which we do everything.
18
19 Properties:
20 autodir
21 The top level autotest directory (/usr/local/autotest).
22 Comes from os.environ['AUTODIR'].
23 bindir
24 <autodir>/bin/
25 testdir
26 <autodir>/tests/
27 profdir
28 <autodir>/profilers/
29 tmpdir
30 <autodir>/tmp/
31 resultdir
32 <autodir>/results/<jobtag>
33 stdout
34 fd_stack object for stdout
35 stderr
36 fd_stack object for stderr
37 profilers
38 the profilers object for this job
apw504a7dd2006-10-12 17:18:37 +000039 harness
40 the server harness object for this job
apw059e1b12006-10-12 17:18:26 +000041 config
42 the job configuration for this job
mblighc86b0b42006-07-28 17:35:28 +000043 """
44
apw96da1a42006-11-02 00:23:18 +000045 def __init__(self, control, jobtag, cont):
mblighc86b0b42006-07-28 17:35:28 +000046 """
47 control
48 The control file (pathname of)
49 jobtag
50 The job tag string (eg "default")
apw96da1a42006-11-02 00:23:18 +000051 cont
52 If this is the continuation of this job
mblighc86b0b42006-07-28 17:35:28 +000053 """
mblighf4c35322006-03-13 01:01:10 +000054 self.autodir = os.environ['AUTODIR']
mbligh06743772006-05-18 21:30:19 +000055 self.bindir = self.autodir + '/bin'
mbligh82641862006-04-23 06:21:36 +000056 self.testdir = self.autodir + '/tests'
mbligha2508052006-05-28 21:29:53 +000057 self.profdir = self.autodir + '/profilers'
mblighf4c35322006-03-13 01:01:10 +000058 self.tmpdir = self.autodir + '/tmp'
mbligh24f7da02006-04-23 06:32:18 +000059 self.resultdir = self.autodir + '/results/' + jobtag
mbligha2508052006-05-28 21:29:53 +000060
apw96da1a42006-11-02 00:23:18 +000061 if not cont:
62 if os.path.exists(self.tmpdir):
63 system('rm -rf ' + self.tmpdir)
64 os.mkdir(self.tmpdir)
65
66 if os.path.exists(self.resultdir):
67 system('rm -rf ' + self.resultdir)
68 os.mkdir(self.resultdir)
69
70 os.mkdir(self.resultdir + "/debug")
71 os.mkdir(self.resultdir + "/analysis")
72 os.mkdir(self.resultdir + "/sysinfo")
73 shutil.copyfile(control,
74 os.path.join(self.resultdir,'control'))
mbligh4b089662006-06-14 22:34:58 +000075
apwecf41b72006-03-31 14:00:55 +000076 self.control = control
mblighf4c35322006-03-13 01:01:10 +000077 self.jobtab = jobtag
78
mbligh56f1fbb2006-10-01 15:10:56 +000079 self.stdout = fd_stack.fd_stack(1, sys.stdout)
80 self.stderr = fd_stack.fd_stack(2, sys.stderr)
mblighf4c35322006-03-13 01:01:10 +000081
apwde1503a2006-10-10 08:34:21 +000082 self.harness = harness.select('', self)
83
apw059e1b12006-10-12 17:18:26 +000084 self.config = config.config(self)
85
mbligha35553b2006-04-23 15:52:25 +000086 self.profilers = profilers.profilers(self)
mbligh72905562006-05-25 01:30:49 +000087
mblighcaa605c2006-10-02 00:37:35 +000088 try:
89 self.bootloader = boottool.boottool()
90 except:
91 pass
92
mbligh06743772006-05-18 21:30:19 +000093 pwd = os.getcwd()
94 os.chdir(self.resultdir + "/sysinfo")
95 system(self.bindir + '/sysinfo.py')
96 os.chdir(pwd)
mbligh3a6d6ca2006-04-23 15:50:24 +000097
mblighcaa605c2006-10-02 00:37:35 +000098
apwde1503a2006-10-10 08:34:21 +000099 def harness_select(self, which):
100 self.harness = harness.select(which, self)
101
102
apw059e1b12006-10-12 17:18:26 +0000103 def config_set(self, name, value):
104 self.config.set(name, value)
105
106
107 def config_get(self, name):
108 return self.config.get(name)
109
110
mbligh1e8858e2006-11-24 22:18:35 +0000111 def kernel(self, base_tree, results_dir = '', tmp_dir = '', leave = False):
mblighc86b0b42006-07-28 17:35:28 +0000112 """Summon a kernel object"""
mbligh1e8858e2006-11-24 22:18:35 +0000113 if not tmp_dir:
114 tmp_dir = self.tmpdir + '/build'
115 if not os.path.exists(tmp_dir):
116 os.mkdir(tmp_dir)
117 if not os.path.isdir(tmp_dir):
118 raise "Temp dir (%s) is not a dir - args backwards?" \
119 % self.tmpdir
120
121 # We label the first build "build" and then subsequent ones
122 # as "build.2", "build.3", etc. Whilst this is a little bit
123 # inconsistent, 99.9% of jobs will only have one build
124 # (that's not done as kernbench, sparse, or buildtest),
125 # so it works out much cleaner. One of life's comprimises.
126 if not results_dir:
127 results_dir = os.path.join(self.resultdir, 'build')
128 i = 2
129 while os.path.exists(results_dir):
130 results_dir = os.path.join(self.resultdir, 'build.%d' % i)
131 if not os.path.exists(results_dir):
132 os.mkdir(results_dir)
133
134 return kernel.kernel(self, base_tree, results_dir, tmp_dir, leave)
mblighf4c35322006-03-13 01:01:10 +0000135
mblighcaa605c2006-10-02 00:37:35 +0000136
mblighfadca202006-09-23 04:40:01 +0000137 def barrier(self, *args):
138 """Create a barrier object"""
139 return barrier.barrier(*args)
140
mblighcaa605c2006-10-02 00:37:35 +0000141
mbligh4b089662006-06-14 22:34:58 +0000142 def setup_dep(self, deps):
mblighc86b0b42006-07-28 17:35:28 +0000143 """Set up the dependencies for this test.
144
145 deps is a list of libraries required for this test.
146 """
mbligh4b089662006-06-14 22:34:58 +0000147 for dep in deps:
148 try:
149 os.chdir(self.autodir + '/deps/' + dep)
150 system('./' + dep + '.py')
151 except:
152 error = "setting up dependency " + dep + "\n"
153 raise UnhandledError(error)
154
155
mbligh12a7df72006-10-06 03:54:33 +0000156 def __runtest(self, tag, url, test_args):
apwf1a81162006-04-25 10:10:29 +0000157 try:
mbligh12a7df72006-10-06 03:54:33 +0000158 test.runtest(self, tag, url, test_args)
apwf1a81162006-04-25 10:10:29 +0000159 except AutotestError:
160 raise
161 except:
162 raise UnhandledError('running test ' + \
163 self.__class__.__name__ + "\n")
164
mblighcaa605c2006-10-02 00:37:35 +0000165
mbligh12a7df72006-10-06 03:54:33 +0000166 def runtest(self, tag, url, *test_args):
mblighc86b0b42006-07-28 17:35:28 +0000167 """Summon a test object and run it.
168
169 tag
170 tag to add to testname
mbligh12a7df72006-10-06 03:54:33 +0000171 url
172 url of the test to run
mblighc86b0b42006-07-28 17:35:28 +0000173 """
mbligh12a7df72006-10-06 03:54:33 +0000174
175 (group, name) = test.testname(url)
mbligh7880b1b2006-05-07 16:57:50 +0000176 if (tag):
177 name += '.' + tag
apwf1a81162006-04-25 10:10:29 +0000178 try:
179 try:
mbligh12a7df72006-10-06 03:54:33 +0000180 self.__runtest(tag, url, test_args)
apwf1a81162006-04-25 10:10:29 +0000181 except Exception, detail:
182 self.record("FAIL " + name + " " + \
183 detail.__str__() + "\n")
184
185 raise
186 else:
187 self.record("GOOD " + name + \
188 " Completed Successfully\n")
189 except TestError:
190 return 0
191 except:
192 raise
193 else:
194 return 1
apw0865f482006-03-30 18:50:19 +0000195
mblighd7fb4a62006-10-01 00:57:53 +0000196
197 def filesystem(self, device, mountpoint = None):
198 if not mountpoint:
199 mountpoint = self.tmpdir
200 return filesystem.filesystem(device, mountpoint)
201
mblighcaa605c2006-10-02 00:37:35 +0000202
203 def reboot(self, tag='autotest'):
apwde1503a2006-10-10 08:34:21 +0000204 self.harness.run_reboot()
mblighcaa605c2006-10-02 00:37:35 +0000205 self.bootloader.boot_once(tag)
206 system("reboot")
apw0778a2f2006-10-06 03:11:40 +0000207 self.quit()
mblighcaa605c2006-10-02 00:37:35 +0000208
209
apw0865f482006-03-30 18:50:19 +0000210 def noop(self, text):
211 print "job: noop: " + text
212
mblighcaa605c2006-10-02 00:37:35 +0000213
apw0865f482006-03-30 18:50:19 +0000214 # Job control primatives.
mblighc86b0b42006-07-28 17:35:28 +0000215
apw8fef4ac2006-10-10 22:53:37 +0000216 def __parallel_execute(self, func, *args):
217 func(*args)
218
219
mblighc86b0b42006-07-28 17:35:28 +0000220 def parallel(self, *tasklist):
221 """Run tasks in parallel"""
apw8fef4ac2006-10-10 22:53:37 +0000222
223 pids = []
224 for task in tasklist:
225 pids.append(fork_start(self.resultdir,
226 lambda: self.__parallel_execute(*task)))
227 for pid in pids:
228 fork_waitfor(self.resultdir, pid)
apw0865f482006-03-30 18:50:19 +0000229
mblighcaa605c2006-10-02 00:37:35 +0000230
apw0865f482006-03-30 18:50:19 +0000231 def quit(self):
mblighc86b0b42006-07-28 17:35:28 +0000232 # XXX: should have a better name.
apwde1503a2006-10-10 08:34:21 +0000233 self.harness.run_pause()
apwf2c66602006-04-27 14:11:25 +0000234 raise JobContinue("more to come")
apw0865f482006-03-30 18:50:19 +0000235
mblighcaa605c2006-10-02 00:37:35 +0000236
apw0865f482006-03-30 18:50:19 +0000237 def complete(self, status):
mblighc86b0b42006-07-28 17:35:28 +0000238 """Clean up and exit"""
apw0865f482006-03-30 18:50:19 +0000239 # We are about to exit 'complete' so clean up the control file.
240 try:
apwecf41b72006-03-31 14:00:55 +0000241 os.unlink(self.control + '.state')
apw0865f482006-03-30 18:50:19 +0000242 except:
243 pass
apwde1503a2006-10-10 08:34:21 +0000244 self.harness.run_complete(status)
apw1b021902006-04-03 17:02:56 +0000245 sys.exit(status)
apw0865f482006-03-30 18:50:19 +0000246
mblighcaa605c2006-10-02 00:37:35 +0000247
apw0865f482006-03-30 18:50:19 +0000248 steps = []
249 def next_step(self, step):
mblighc86b0b42006-07-28 17:35:28 +0000250 """Define the next step"""
apw83f8d772006-04-27 14:12:56 +0000251 step[0] = step[0].__name__
apw0865f482006-03-30 18:50:19 +0000252 self.steps.append(step)
apwecf41b72006-03-31 14:00:55 +0000253 pickle.dump(self.steps, open(self.control + '.state', 'w'))
apw0865f482006-03-30 18:50:19 +0000254
mblighcaa605c2006-10-02 00:37:35 +0000255
apw83f8d772006-04-27 14:12:56 +0000256 def step_engine(self):
mblighc86b0b42006-07-28 17:35:28 +0000257 """the stepping engine -- if the control file defines
258 step_init we will be using this engine to drive multiple runs.
259 """
260 """Do the next step"""
apw83f8d772006-04-27 14:12:56 +0000261 lcl = dict({'job': self})
262
263 str = """
264from error import *
265from autotest_utils import *
266"""
267 exec(str, lcl, lcl)
268 execfile(self.control, lcl, lcl)
269
apw0865f482006-03-30 18:50:19 +0000270 # If there is a mid-job state file load that in and continue
271 # where it indicates. Otherwise start stepping at the passed
272 # entry.
273 try:
apwecf41b72006-03-31 14:00:55 +0000274 self.steps = pickle.load(open(self.control + '.state',
275 'r'))
apw0865f482006-03-30 18:50:19 +0000276 except:
apw83f8d772006-04-27 14:12:56 +0000277 if lcl.has_key('step_init'):
278 self.next_step([lcl['step_init']])
apw0865f482006-03-30 18:50:19 +0000279
280 # Run the step list.
281 while len(self.steps) > 0:
apwfd922bb2006-04-04 07:47:00 +0000282 step = self.steps.pop(0)
apwecf41b72006-03-31 14:00:55 +0000283 pickle.dump(self.steps, open(self.control + '.state',
284 'w'))
apw0865f482006-03-30 18:50:19 +0000285
286 cmd = step.pop(0)
apw83f8d772006-04-27 14:12:56 +0000287 cmd = lcl[cmd]
288 lcl['__cmd'] = cmd
289 lcl['__args'] = step
290 exec("__cmd(*__args)", lcl, lcl)
apw0865f482006-03-30 18:50:19 +0000291
292 # all done, clean up and exit.
293 self.complete(0)
294
mblighcaa605c2006-10-02 00:37:35 +0000295
apwf1a81162006-04-25 10:10:29 +0000296 def record(self, msg):
mblighc86b0b42006-07-28 17:35:28 +0000297 """Record job-level status"""
apw7db8d0b2006-10-09 08:10:25 +0000298
299 # Ensure any continuation lines are marked so we can
300 # detect them in the status file to ensure it is parsable.
apwdf31f0a2006-10-19 10:35:36 +0000301 msg = msg.rstrip()
apw7db8d0b2006-10-09 08:10:25 +0000302 mfix = re.compile('\n')
303 msg = mfix.sub("\n ", msg)
304
apwde1503a2006-10-10 08:34:21 +0000305 self.harness.test_status(msg)
apwf1a81162006-04-25 10:10:29 +0000306 print msg
307 status = self.resultdir + "/status"
308 fd = file(status, "a")
apwdf31f0a2006-10-19 10:35:36 +0000309 fd.write(msg + "\n")
apwf1a81162006-04-25 10:10:29 +0000310 fd.close()
apwce9abe92006-04-27 14:14:04 +0000311
312
mblighf3fef462006-09-13 16:05:05 +0000313def runjob(control, cont = False, tag = "default"):
mblighc86b0b42006-07-28 17:35:28 +0000314 """The main interface to this module
315
316 control
317 The control file to use for this job.
318 cont
319 Whether this is the continuation of a previously started job
320 """
apwce9abe92006-04-27 14:14:04 +0000321 state = control + '.state'
322
323 # instantiate the job object ready for the control file.
324 myjob = None
325 try:
326 # Check that the control file is valid
327 if not os.path.exists(control):
328 raise JobError(control + ": control file not found")
329
330 # When continuing, the job is complete when there is no
331 # state file, ensure we don't try and continue.
mblighf3fef462006-09-13 16:05:05 +0000332 if cont and not os.path.exists(state):
apwce9abe92006-04-27 14:14:04 +0000333 sys.exit(1)
mblighf3fef462006-09-13 16:05:05 +0000334 if cont == False and os.path.exists(state):
apwce9abe92006-04-27 14:14:04 +0000335 os.unlink(state)
336
apw96da1a42006-11-02 00:23:18 +0000337 myjob = job(control, tag, cont)
apwce9abe92006-04-27 14:14:04 +0000338
339 # Load in the users control file, may do any one of:
340 # 1) execute in toto
341 # 2) define steps, and select the first via next_step()
342 myjob.step_engine()
343
344 # If we get here, then we assume the job is complete and good.
345 myjob.complete(0)
346
347 except JobContinue:
348 sys.exit(5)
349
350 except JobError, instance:
351 print "JOB ERROR: " + instance.args[0]
352 if myjob != None:
353 myjob.complete(1)
354
355 except:
356 # Ensure we cannot continue this job, it is in rictus.
357 if os.path.exists(state):
358 os.unlink(state)
359 raise