1*9c5db199SXin Li# 2*9c5db199SXin Li# Copyright 2008 Google Inc. All Rights Reserved. 3*9c5db199SXin Li 4*9c5db199SXin Li""" 5*9c5db199SXin LiThe job module contains the objects and methods used to 6*9c5db199SXin Limanage jobs in Autotest. 7*9c5db199SXin Li 8*9c5db199SXin LiThe valid actions are: 9*9c5db199SXin Lilist: lists job(s) 10*9c5db199SXin Licreate: create a job 11*9c5db199SXin Liabort: abort job(s) 12*9c5db199SXin Listat: detailed listing of job(s) 13*9c5db199SXin Li 14*9c5db199SXin LiThe common options are: 15*9c5db199SXin Li 16*9c5db199SXin LiSee topic_common.py for a High Level Design and Algorithm. 17*9c5db199SXin Li""" 18*9c5db199SXin Li 19*9c5db199SXin Li# pylint: disable=missing-docstring 20*9c5db199SXin Li 21*9c5db199SXin Lifrom __future__ import print_function 22*9c5db199SXin Li 23*9c5db199SXin Liimport getpass, re 24*9c5db199SXin Lifrom autotest_lib.cli import topic_common, action_common 25*9c5db199SXin Lifrom autotest_lib.client.common_lib import control_data 26*9c5db199SXin Lifrom autotest_lib.client.common_lib import priorities 27*9c5db199SXin Li 28*9c5db199SXin Li 29*9c5db199SXin Liclass job(topic_common.atest): 30*9c5db199SXin Li """Job class 31*9c5db199SXin Li atest job [create|clone|list|stat|abort] <options>""" 32*9c5db199SXin Li usage_action = '[create|clone|list|stat|abort]' 33*9c5db199SXin Li topic = msg_topic = 'job' 34*9c5db199SXin Li msg_items = '<job_ids>' 35*9c5db199SXin Li 36*9c5db199SXin Li 37*9c5db199SXin Li def _convert_status(self, results): 38*9c5db199SXin Li for result in results: 39*9c5db199SXin Li total = sum(result['status_counts'].values()) 40*9c5db199SXin Li status = ['%s=%s(%.1f%%)' % (key, val, 100.0*float(val)/total) 41*9c5db199SXin Li for key, val in result['status_counts'].iteritems()] 42*9c5db199SXin Li status.sort() 43*9c5db199SXin Li result['status_counts'] = ', '.join(status) 44*9c5db199SXin Li 45*9c5db199SXin Li 46*9c5db199SXin Li def backward_compatibility(self, action, argv): 47*9c5db199SXin Li """ 'job create --clone' became 'job clone --id' """ 48*9c5db199SXin Li if action == 'create': 49*9c5db199SXin Li for option in ['-l', '--clone']: 50*9c5db199SXin Li if option in argv: 51*9c5db199SXin Li argv[argv.index(option)] = '--id' 52*9c5db199SXin Li action = 'clone' 53*9c5db199SXin Li return action 54*9c5db199SXin Li 55*9c5db199SXin Li 56*9c5db199SXin Liclass job_help(job): 57*9c5db199SXin Li """Just here to get the atest logic working. 58*9c5db199SXin Li Usage is set by its parent""" 59*9c5db199SXin Li pass 60*9c5db199SXin Li 61*9c5db199SXin Li 62*9c5db199SXin Liclass job_list_stat(action_common.atest_list, job): 63*9c5db199SXin Li def __init__(self): 64*9c5db199SXin Li super(job_list_stat, self).__init__() 65*9c5db199SXin Li 66*9c5db199SXin Li self.topic_parse_info = topic_common.item_parse_info( 67*9c5db199SXin Li attribute_name='jobs', 68*9c5db199SXin Li use_leftover=True) 69*9c5db199SXin Li 70*9c5db199SXin Li 71*9c5db199SXin Li def __split_jobs_between_ids_names(self): 72*9c5db199SXin Li job_ids = [] 73*9c5db199SXin Li job_names = [] 74*9c5db199SXin Li 75*9c5db199SXin Li # Sort between job IDs and names 76*9c5db199SXin Li for job_id in self.jobs: 77*9c5db199SXin Li if job_id.isdigit(): 78*9c5db199SXin Li job_ids.append(job_id) 79*9c5db199SXin Li else: 80*9c5db199SXin Li job_names.append(job_id) 81*9c5db199SXin Li return (job_ids, job_names) 82*9c5db199SXin Li 83*9c5db199SXin Li 84*9c5db199SXin Li def execute_on_ids_and_names(self, op, filters={}, 85*9c5db199SXin Li check_results={'id__in': 'id', 86*9c5db199SXin Li 'name__in': 'id'}, 87*9c5db199SXin Li tag_id='id__in', tag_name='name__in'): 88*9c5db199SXin Li if not self.jobs: 89*9c5db199SXin Li # Want everything 90*9c5db199SXin Li return super(job_list_stat, self).execute(op=op, filters=filters) 91*9c5db199SXin Li 92*9c5db199SXin Li all_jobs = [] 93*9c5db199SXin Li (job_ids, job_names) = self.__split_jobs_between_ids_names() 94*9c5db199SXin Li 95*9c5db199SXin Li for items, tag in [(job_ids, tag_id), 96*9c5db199SXin Li (job_names, tag_name)]: 97*9c5db199SXin Li if items: 98*9c5db199SXin Li new_filters = filters.copy() 99*9c5db199SXin Li new_filters[tag] = items 100*9c5db199SXin Li jobs = super(job_list_stat, 101*9c5db199SXin Li self).execute(op=op, 102*9c5db199SXin Li filters=new_filters, 103*9c5db199SXin Li check_results=check_results) 104*9c5db199SXin Li all_jobs.extend(jobs) 105*9c5db199SXin Li 106*9c5db199SXin Li return all_jobs 107*9c5db199SXin Li 108*9c5db199SXin Li 109*9c5db199SXin Liclass job_list(job_list_stat): 110*9c5db199SXin Li """atest job list [<jobs>] [--all] [--running] [--user <username>]""" 111*9c5db199SXin Li def __init__(self): 112*9c5db199SXin Li super(job_list, self).__init__() 113*9c5db199SXin Li self.parser.add_option('-a', '--all', help='List jobs for all ' 114*9c5db199SXin Li 'users.', action='store_true', default=False) 115*9c5db199SXin Li self.parser.add_option('-r', '--running', help='List only running ' 116*9c5db199SXin Li 'jobs', action='store_true') 117*9c5db199SXin Li self.parser.add_option('-u', '--user', help='List jobs for given ' 118*9c5db199SXin Li 'user', type='string') 119*9c5db199SXin Li 120*9c5db199SXin Li 121*9c5db199SXin Li def parse(self): 122*9c5db199SXin Li options, leftover = super(job_list, self).parse() 123*9c5db199SXin Li self.all = options.all 124*9c5db199SXin Li self.data['running'] = options.running 125*9c5db199SXin Li if options.user: 126*9c5db199SXin Li if options.all: 127*9c5db199SXin Li self.invalid_syntax('Only specify --all or --user, not both.') 128*9c5db199SXin Li else: 129*9c5db199SXin Li self.data['owner'] = options.user 130*9c5db199SXin Li elif not options.all and not self.jobs: 131*9c5db199SXin Li self.data['owner'] = getpass.getuser() 132*9c5db199SXin Li 133*9c5db199SXin Li return options, leftover 134*9c5db199SXin Li 135*9c5db199SXin Li 136*9c5db199SXin Li def execute(self): 137*9c5db199SXin Li return self.execute_on_ids_and_names(op='get_jobs_summary', 138*9c5db199SXin Li filters=self.data) 139*9c5db199SXin Li 140*9c5db199SXin Li 141*9c5db199SXin Li def output(self, results): 142*9c5db199SXin Li keys = ['id', 'owner', 'name', 'status_counts'] 143*9c5db199SXin Li if self.verbose: 144*9c5db199SXin Li keys.extend(['priority', 'control_type', 'created_on']) 145*9c5db199SXin Li self._convert_status(results) 146*9c5db199SXin Li super(job_list, self).output(results, keys) 147*9c5db199SXin Li 148*9c5db199SXin Li 149*9c5db199SXin Li 150*9c5db199SXin Liclass job_stat(job_list_stat): 151*9c5db199SXin Li """atest job stat <job>""" 152*9c5db199SXin Li usage_action = 'stat' 153*9c5db199SXin Li 154*9c5db199SXin Li def __init__(self): 155*9c5db199SXin Li super(job_stat, self).__init__() 156*9c5db199SXin Li self.parser.add_option('-f', '--control-file', 157*9c5db199SXin Li help='Display the control file', 158*9c5db199SXin Li action='store_true', default=False) 159*9c5db199SXin Li self.parser.add_option('-N', '--list-hosts', 160*9c5db199SXin Li help='Display only a list of hosts', 161*9c5db199SXin Li action='store_true') 162*9c5db199SXin Li self.parser.add_option('-s', '--list-hosts-status', 163*9c5db199SXin Li help='Display only the hosts in these statuses ' 164*9c5db199SXin Li 'for a job.', action='store') 165*9c5db199SXin Li 166*9c5db199SXin Li 167*9c5db199SXin Li def parse(self): 168*9c5db199SXin Li status_list = topic_common.item_parse_info( 169*9c5db199SXin Li attribute_name='status_list', 170*9c5db199SXin Li inline_option='list_hosts_status') 171*9c5db199SXin Li options, leftover = super(job_stat, self).parse([status_list], 172*9c5db199SXin Li req_items='jobs') 173*9c5db199SXin Li 174*9c5db199SXin Li if not self.jobs: 175*9c5db199SXin Li self.invalid_syntax('Must specify at least one job.') 176*9c5db199SXin Li 177*9c5db199SXin Li self.show_control_file = options.control_file 178*9c5db199SXin Li self.list_hosts = options.list_hosts 179*9c5db199SXin Li 180*9c5db199SXin Li if self.list_hosts and self.status_list: 181*9c5db199SXin Li self.invalid_syntax('--list-hosts is implicit when using ' 182*9c5db199SXin Li '--list-hosts-status.') 183*9c5db199SXin Li if len(self.jobs) > 1 and (self.list_hosts or self.status_list): 184*9c5db199SXin Li self.invalid_syntax('--list-hosts and --list-hosts-status should ' 185*9c5db199SXin Li 'only be used on a single job.') 186*9c5db199SXin Li 187*9c5db199SXin Li return options, leftover 188*9c5db199SXin Li 189*9c5db199SXin Li 190*9c5db199SXin Li def _merge_results(self, summary, qes): 191*9c5db199SXin Li hosts_status = {} 192*9c5db199SXin Li for qe in qes: 193*9c5db199SXin Li if qe['host']: 194*9c5db199SXin Li job_id = qe['job']['id'] 195*9c5db199SXin Li hostname = qe['host']['hostname'] 196*9c5db199SXin Li hosts_status.setdefault(job_id, 197*9c5db199SXin Li {}).setdefault(qe['status'], 198*9c5db199SXin Li []).append(hostname) 199*9c5db199SXin Li 200*9c5db199SXin Li for job in summary: 201*9c5db199SXin Li job_id = job['id'] 202*9c5db199SXin Li if hosts_status.has_key(job_id): 203*9c5db199SXin Li this_job = hosts_status[job_id] 204*9c5db199SXin Li job['hosts'] = ' '.join(' '.join(host) for host in 205*9c5db199SXin Li this_job.itervalues()) 206*9c5db199SXin Li host_per_status = ['%s="%s"' %(status, ' '.join(host)) 207*9c5db199SXin Li for status, host in this_job.iteritems()] 208*9c5db199SXin Li job['hosts_status'] = ', '.join(host_per_status) 209*9c5db199SXin Li if self.status_list: 210*9c5db199SXin Li statuses = set(s.lower() for s in self.status_list) 211*9c5db199SXin Li all_hosts = [s for s in host_per_status if s.split('=', 212*9c5db199SXin Li 1)[0].lower() in statuses] 213*9c5db199SXin Li job['hosts_selected_status'] = '\n'.join(all_hosts) 214*9c5db199SXin Li else: 215*9c5db199SXin Li job['hosts_status'] = '' 216*9c5db199SXin Li 217*9c5db199SXin Li if not job.get('hosts'): 218*9c5db199SXin Li self.generic_error('Job has unassigned meta-hosts, ' 219*9c5db199SXin Li 'try again shortly.') 220*9c5db199SXin Li 221*9c5db199SXin Li return summary 222*9c5db199SXin Li 223*9c5db199SXin Li 224*9c5db199SXin Li def execute(self): 225*9c5db199SXin Li summary = self.execute_on_ids_and_names(op='get_jobs_summary') 226*9c5db199SXin Li 227*9c5db199SXin Li # Get the real hostnames 228*9c5db199SXin Li qes = self.execute_on_ids_and_names(op='get_host_queue_entries', 229*9c5db199SXin Li check_results={}, 230*9c5db199SXin Li tag_id='job__in', 231*9c5db199SXin Li tag_name='job__name__in') 232*9c5db199SXin Li 233*9c5db199SXin Li self._convert_status(summary) 234*9c5db199SXin Li 235*9c5db199SXin Li return self._merge_results(summary, qes) 236*9c5db199SXin Li 237*9c5db199SXin Li 238*9c5db199SXin Li def output(self, results): 239*9c5db199SXin Li if self.list_hosts: 240*9c5db199SXin Li keys = ['hosts'] 241*9c5db199SXin Li elif self.status_list: 242*9c5db199SXin Li keys = ['hosts_selected_status'] 243*9c5db199SXin Li elif not self.verbose: 244*9c5db199SXin Li keys = ['id', 'name', 'priority', 'status_counts', 'hosts_status'] 245*9c5db199SXin Li else: 246*9c5db199SXin Li keys = ['id', 'name', 'priority', 'status_counts', 'hosts_status', 247*9c5db199SXin Li 'owner', 'control_type', 'synch_count', 'created_on', 248*9c5db199SXin Li 'run_verify', 'reboot_before', 'reboot_after', 249*9c5db199SXin Li 'parse_failed_repair'] 250*9c5db199SXin Li 251*9c5db199SXin Li if self.show_control_file: 252*9c5db199SXin Li keys.append('control_file') 253*9c5db199SXin Li 254*9c5db199SXin Li super(job_stat, self).output(results, keys) 255*9c5db199SXin Li 256*9c5db199SXin Li 257*9c5db199SXin Liclass job_create_or_clone(action_common.atest_create, job): 258*9c5db199SXin Li """Class containing the code common to the job create and clone actions""" 259*9c5db199SXin Li msg_items = 'job_name' 260*9c5db199SXin Li 261*9c5db199SXin Li def __init__(self): 262*9c5db199SXin Li super(job_create_or_clone, self).__init__() 263*9c5db199SXin Li self.hosts = [] 264*9c5db199SXin Li self.data_item_key = 'name' 265*9c5db199SXin Li self.parser.add_option('-p', '--priority', 266*9c5db199SXin Li help='Job priority (int)', type='int', 267*9c5db199SXin Li default=priorities.Priority.DEFAULT) 268*9c5db199SXin Li self.parser.add_option('-b', '--labels', 269*9c5db199SXin Li help='Comma separated list of labels ' 270*9c5db199SXin Li 'to get machine list from.', default='') 271*9c5db199SXin Li self.parser.add_option('-m', '--machine', help='List of machines to ' 272*9c5db199SXin Li 'run on') 273*9c5db199SXin Li self.parser.add_option('-M', '--mlist', 274*9c5db199SXin Li help='File listing machines to use', 275*9c5db199SXin Li type='string', metavar='MACHINE_FLIST') 276*9c5db199SXin Li self.parser.add_option('--one-time-hosts', 277*9c5db199SXin Li help='List of one time hosts') 278*9c5db199SXin Li self.parser.add_option('-e', '--email', 279*9c5db199SXin Li help='A comma seperated list of ' 280*9c5db199SXin Li 'email addresses to notify of job completion', 281*9c5db199SXin Li default='') 282*9c5db199SXin Li 283*9c5db199SXin Li 284*9c5db199SXin Li def _parse_hosts(self, args): 285*9c5db199SXin Li """ Parses the arguments to generate a list of hosts and meta_hosts 286*9c5db199SXin Li A host is a regular name, a meta_host is n*label or *label. 287*9c5db199SXin Li These can be mixed on the CLI, and separated by either commas or 288*9c5db199SXin Li spaces, e.g.: 5*Machine_Label host0 5*Machine_Label2,host2 """ 289*9c5db199SXin Li 290*9c5db199SXin Li hosts = [] 291*9c5db199SXin Li meta_hosts = [] 292*9c5db199SXin Li 293*9c5db199SXin Li for arg in args: 294*9c5db199SXin Li for host in arg.split(','): 295*9c5db199SXin Li if re.match('^[0-9]+[*]', host): 296*9c5db199SXin Li num, host = host.split('*', 1) 297*9c5db199SXin Li meta_hosts += int(num) * [host] 298*9c5db199SXin Li elif re.match('^[*](\w*)', host): 299*9c5db199SXin Li meta_hosts += [re.match('^[*](\w*)', host).group(1)] 300*9c5db199SXin Li elif host != '' and host not in hosts: 301*9c5db199SXin Li # Real hostname and not a duplicate 302*9c5db199SXin Li hosts.append(host) 303*9c5db199SXin Li 304*9c5db199SXin Li return (hosts, meta_hosts) 305*9c5db199SXin Li 306*9c5db199SXin Li 307*9c5db199SXin Li def parse(self, parse_info=[]): 308*9c5db199SXin Li host_info = topic_common.item_parse_info(attribute_name='hosts', 309*9c5db199SXin Li inline_option='machine', 310*9c5db199SXin Li filename_option='mlist') 311*9c5db199SXin Li job_info = topic_common.item_parse_info(attribute_name='jobname', 312*9c5db199SXin Li use_leftover=True) 313*9c5db199SXin Li oth_info = topic_common.item_parse_info(attribute_name='one_time_hosts', 314*9c5db199SXin Li inline_option='one_time_hosts') 315*9c5db199SXin Li label_info = topic_common.item_parse_info(attribute_name='labels', 316*9c5db199SXin Li inline_option='labels') 317*9c5db199SXin Li 318*9c5db199SXin Li options, leftover = super(job_create_or_clone, self).parse( 319*9c5db199SXin Li [host_info, job_info, oth_info, label_info] + parse_info, 320*9c5db199SXin Li req_items='jobname') 321*9c5db199SXin Li self.data = { 322*9c5db199SXin Li 'priority': options.priority, 323*9c5db199SXin Li } 324*9c5db199SXin Li jobname = getattr(self, 'jobname') 325*9c5db199SXin Li if len(jobname) > 1: 326*9c5db199SXin Li self.invalid_syntax('Too many arguments specified, only expected ' 327*9c5db199SXin Li 'to receive job name: %s' % jobname) 328*9c5db199SXin Li self.jobname = jobname[0] 329*9c5db199SXin Li 330*9c5db199SXin Li if self.one_time_hosts: 331*9c5db199SXin Li self.data['one_time_hosts'] = self.one_time_hosts 332*9c5db199SXin Li 333*9c5db199SXin Li if self.labels: 334*9c5db199SXin Li label_hosts = self.execute_rpc(op='get_hosts', 335*9c5db199SXin Li multiple_labels=self.labels) 336*9c5db199SXin Li for host in label_hosts: 337*9c5db199SXin Li self.hosts.append(host['hostname']) 338*9c5db199SXin Li 339*9c5db199SXin Li self.data['name'] = self.jobname 340*9c5db199SXin Li 341*9c5db199SXin Li (self.data['hosts'], 342*9c5db199SXin Li self.data['meta_hosts']) = self._parse_hosts(self.hosts) 343*9c5db199SXin Li 344*9c5db199SXin Li self.data['email_list'] = options.email 345*9c5db199SXin Li 346*9c5db199SXin Li return options, leftover 347*9c5db199SXin Li 348*9c5db199SXin Li 349*9c5db199SXin Li def create_job(self): 350*9c5db199SXin Li job_id = self.execute_rpc(op='create_job', **self.data) 351*9c5db199SXin Li return ['%s (id %s)' % (self.jobname, job_id)] 352*9c5db199SXin Li 353*9c5db199SXin Li 354*9c5db199SXin Li def get_items(self): 355*9c5db199SXin Li return [self.jobname] 356*9c5db199SXin Li 357*9c5db199SXin Li 358*9c5db199SXin Li 359*9c5db199SXin Liclass job_create(job_create_or_clone): 360*9c5db199SXin Li """atest job create [--priority <int>] 361*9c5db199SXin Li [--synch_count] [--control-file </path/to/cfile>] 362*9c5db199SXin Li [--on-server] [--test <test1,test2>] 363*9c5db199SXin Li [--mlist </path/to/machinelist>] [--machine <host1 host2 host3>] 364*9c5db199SXin Li [--labels <list of labels of machines to run on>] 365*9c5db199SXin Li [--reboot_before <option>] [--reboot_after <option>] 366*9c5db199SXin Li [--noverify] [--timeout <timeout>] [--max_runtime <max runtime>] 367*9c5db199SXin Li [--one-time-hosts <hosts>] [--email <email>] 368*9c5db199SXin Li [--dependencies <labels this job is dependent on>] 369*9c5db199SXin Li [--parse-failed-repair <option>] 370*9c5db199SXin Li [--image <http://path/to/image>] [--require-ssp] 371*9c5db199SXin Li job_name 372*9c5db199SXin Li 373*9c5db199SXin Li Creating a job is rather different from the other create operations, 374*9c5db199SXin Li so it only uses the __init__() and output() from its superclass. 375*9c5db199SXin Li """ 376*9c5db199SXin Li op_action = 'create' 377*9c5db199SXin Li 378*9c5db199SXin Li def __init__(self): 379*9c5db199SXin Li super(job_create, self).__init__() 380*9c5db199SXin Li self.ctrl_file_data = {} 381*9c5db199SXin Li self.parser.add_option('-y', '--synch_count', type=int, 382*9c5db199SXin Li help='Number of machines to use per autoserv ' 383*9c5db199SXin Li 'execution') 384*9c5db199SXin Li self.parser.add_option('-f', '--control-file', 385*9c5db199SXin Li help='use this control file', metavar='FILE') 386*9c5db199SXin Li self.parser.add_option('-s', '--server', 387*9c5db199SXin Li help='This is server-side job', 388*9c5db199SXin Li action='store_true', default=False) 389*9c5db199SXin Li self.parser.add_option('-t', '--test', 390*9c5db199SXin Li help='List of tests to run') 391*9c5db199SXin Li 392*9c5db199SXin Li self.parser.add_option('-d', '--dependencies', help='Comma separated ' 393*9c5db199SXin Li 'list of labels this job is dependent on.', 394*9c5db199SXin Li default='') 395*9c5db199SXin Li 396*9c5db199SXin Li self.parser.add_option('-B', '--reboot_before', 397*9c5db199SXin Li help='Whether or not to reboot the machine ' 398*9c5db199SXin Li 'before the job (never/if dirty/always)', 399*9c5db199SXin Li type='choice', 400*9c5db199SXin Li choices=('never', 'if dirty', 'always')) 401*9c5db199SXin Li self.parser.add_option('-a', '--reboot_after', 402*9c5db199SXin Li help='Whether or not to reboot the machine ' 403*9c5db199SXin Li 'after the job (never/if all tests passed/' 404*9c5db199SXin Li 'always)', 405*9c5db199SXin Li type='choice', 406*9c5db199SXin Li choices=('never', 'if all tests passed', 407*9c5db199SXin Li 'always')) 408*9c5db199SXin Li 409*9c5db199SXin Li self.parser.add_option('--parse-failed-repair', 410*9c5db199SXin Li help='Whether or not to parse failed repair ' 411*9c5db199SXin Li 'results as part of the job', 412*9c5db199SXin Li type='choice', 413*9c5db199SXin Li choices=('true', 'false')) 414*9c5db199SXin Li self.parser.add_option('-n', '--noverify', 415*9c5db199SXin Li help='Do not run verify for job', 416*9c5db199SXin Li default=False, action='store_true') 417*9c5db199SXin Li self.parser.add_option('-o', '--timeout_mins', 418*9c5db199SXin Li help='Job timeout in minutes.', 419*9c5db199SXin Li metavar='TIMEOUT') 420*9c5db199SXin Li self.parser.add_option('--max_runtime', 421*9c5db199SXin Li help='Job maximum runtime in minutes') 422*9c5db199SXin Li 423*9c5db199SXin Li self.parser.add_option('-i', '--image', 424*9c5db199SXin Li help='OS image to install before running the ' 425*9c5db199SXin Li 'test.') 426*9c5db199SXin Li self.parser.add_option('--require-ssp', 427*9c5db199SXin Li help='Require server-side packaging', 428*9c5db199SXin Li default=False, action='store_true') 429*9c5db199SXin Li 430*9c5db199SXin Li 431*9c5db199SXin Li def parse(self): 432*9c5db199SXin Li deps_info = topic_common.item_parse_info(attribute_name='dependencies', 433*9c5db199SXin Li inline_option='dependencies') 434*9c5db199SXin Li options, leftover = super(job_create, self).parse( 435*9c5db199SXin Li parse_info=[deps_info]) 436*9c5db199SXin Li 437*9c5db199SXin Li if (len(self.hosts) == 0 and not self.one_time_hosts 438*9c5db199SXin Li and not options.labels): 439*9c5db199SXin Li self.invalid_syntax('Must specify at least one machine.' 440*9c5db199SXin Li '(-m, -M, -b or --one-time-hosts).') 441*9c5db199SXin Li if not options.control_file and not options.test: 442*9c5db199SXin Li self.invalid_syntax('Must specify either --test or --control-file' 443*9c5db199SXin Li ' to create a job.') 444*9c5db199SXin Li if options.control_file and options.test: 445*9c5db199SXin Li self.invalid_syntax('Can only specify one of --control-file or ' 446*9c5db199SXin Li '--test, not both.') 447*9c5db199SXin Li if options.control_file: 448*9c5db199SXin Li try: 449*9c5db199SXin Li control_file_f = open(options.control_file) 450*9c5db199SXin Li try: 451*9c5db199SXin Li control_file_data = control_file_f.read() 452*9c5db199SXin Li finally: 453*9c5db199SXin Li control_file_f.close() 454*9c5db199SXin Li except IOError: 455*9c5db199SXin Li self.generic_error('Unable to read from specified ' 456*9c5db199SXin Li 'control-file: %s' % options.control_file) 457*9c5db199SXin Li self.data['control_file'] = control_file_data 458*9c5db199SXin Li if options.test: 459*9c5db199SXin Li if options.server: 460*9c5db199SXin Li self.invalid_syntax('If you specify tests, then the ' 461*9c5db199SXin Li 'client/server setting is implicit and ' 462*9c5db199SXin Li 'cannot be overriden.') 463*9c5db199SXin Li tests = [t.strip() for t in options.test.split(',') if t.strip()] 464*9c5db199SXin Li self.ctrl_file_data['tests'] = tests 465*9c5db199SXin Li 466*9c5db199SXin Li if options.image: 467*9c5db199SXin Li self.data['image'] = options.image 468*9c5db199SXin Li 469*9c5db199SXin Li if options.reboot_before: 470*9c5db199SXin Li self.data['reboot_before'] = options.reboot_before.capitalize() 471*9c5db199SXin Li if options.reboot_after: 472*9c5db199SXin Li self.data['reboot_after'] = options.reboot_after.capitalize() 473*9c5db199SXin Li if options.parse_failed_repair: 474*9c5db199SXin Li self.data['parse_failed_repair'] = ( 475*9c5db199SXin Li options.parse_failed_repair == 'true') 476*9c5db199SXin Li if options.noverify: 477*9c5db199SXin Li self.data['run_verify'] = False 478*9c5db199SXin Li if options.timeout_mins: 479*9c5db199SXin Li self.data['timeout_mins'] = options.timeout_mins 480*9c5db199SXin Li if options.max_runtime: 481*9c5db199SXin Li self.data['max_runtime_mins'] = options.max_runtime 482*9c5db199SXin Li 483*9c5db199SXin Li self.data['dependencies'] = self.dependencies 484*9c5db199SXin Li 485*9c5db199SXin Li if options.synch_count: 486*9c5db199SXin Li self.data['synch_count'] = options.synch_count 487*9c5db199SXin Li if options.server: 488*9c5db199SXin Li self.data['control_type'] = control_data.CONTROL_TYPE_NAMES.SERVER 489*9c5db199SXin Li else: 490*9c5db199SXin Li self.data['control_type'] = control_data.CONTROL_TYPE_NAMES.CLIENT 491*9c5db199SXin Li 492*9c5db199SXin Li self.data['require_ssp'] = options.require_ssp 493*9c5db199SXin Li 494*9c5db199SXin Li return options, leftover 495*9c5db199SXin Li 496*9c5db199SXin Li 497*9c5db199SXin Li def execute(self): 498*9c5db199SXin Li if self.ctrl_file_data: 499*9c5db199SXin Li cf_info = self.execute_rpc(op='generate_control_file', 500*9c5db199SXin Li item=self.jobname, 501*9c5db199SXin Li **self.ctrl_file_data) 502*9c5db199SXin Li 503*9c5db199SXin Li self.data['control_file'] = cf_info['control_file'] 504*9c5db199SXin Li if 'synch_count' not in self.data: 505*9c5db199SXin Li self.data['synch_count'] = cf_info['synch_count'] 506*9c5db199SXin Li if cf_info['is_server']: 507*9c5db199SXin Li self.data['control_type'] = control_data.CONTROL_TYPE_NAMES.SERVER 508*9c5db199SXin Li else: 509*9c5db199SXin Li self.data['control_type'] = control_data.CONTROL_TYPE_NAMES.CLIENT 510*9c5db199SXin Li 511*9c5db199SXin Li # Get the union of the 2 sets of dependencies 512*9c5db199SXin Li deps = set(self.data['dependencies']) 513*9c5db199SXin Li deps = sorted(deps.union(cf_info['dependencies'])) 514*9c5db199SXin Li self.data['dependencies'] = list(deps) 515*9c5db199SXin Li 516*9c5db199SXin Li if 'synch_count' not in self.data: 517*9c5db199SXin Li self.data['synch_count'] = 1 518*9c5db199SXin Li 519*9c5db199SXin Li return self.create_job() 520*9c5db199SXin Li 521*9c5db199SXin Li 522*9c5db199SXin Liclass job_clone(job_create_or_clone): 523*9c5db199SXin Li """atest job clone [--priority <int>] 524*9c5db199SXin Li [--mlist </path/to/machinelist>] [--machine <host1 host2 host3>] 525*9c5db199SXin Li [--labels <list of labels of machines to run on>] 526*9c5db199SXin Li [--one-time-hosts <hosts>] [--email <email>] 527*9c5db199SXin Li job_name 528*9c5db199SXin Li 529*9c5db199SXin Li Cloning a job is rather different from the other create operations, 530*9c5db199SXin Li so it only uses the __init__() and output() from its superclass. 531*9c5db199SXin Li """ 532*9c5db199SXin Li op_action = 'clone' 533*9c5db199SXin Li usage_action = 'clone' 534*9c5db199SXin Li 535*9c5db199SXin Li def __init__(self): 536*9c5db199SXin Li super(job_clone, self).__init__() 537*9c5db199SXin Li self.parser.add_option('-i', '--id', help='Job id to clone', 538*9c5db199SXin Li default=False, 539*9c5db199SXin Li metavar='JOB_ID') 540*9c5db199SXin Li self.parser.add_option('-r', '--reuse-hosts', 541*9c5db199SXin Li help='Use the exact same hosts as the ' 542*9c5db199SXin Li 'cloned job.', 543*9c5db199SXin Li action='store_true', default=False) 544*9c5db199SXin Li 545*9c5db199SXin Li 546*9c5db199SXin Li def parse(self): 547*9c5db199SXin Li options, leftover = super(job_clone, self).parse() 548*9c5db199SXin Li 549*9c5db199SXin Li self.clone_id = options.id 550*9c5db199SXin Li self.reuse_hosts = options.reuse_hosts 551*9c5db199SXin Li 552*9c5db199SXin Li host_specified = self.hosts or self.one_time_hosts or options.labels 553*9c5db199SXin Li if self.reuse_hosts and host_specified: 554*9c5db199SXin Li self.invalid_syntax('Cannot specify hosts and reuse the same ' 555*9c5db199SXin Li 'ones as the cloned job.') 556*9c5db199SXin Li 557*9c5db199SXin Li if not (self.reuse_hosts or host_specified): 558*9c5db199SXin Li self.invalid_syntax('Must reuse or specify at least one ' 559*9c5db199SXin Li 'machine (-r, -m, -M, -b or ' 560*9c5db199SXin Li '--one-time-hosts).') 561*9c5db199SXin Li 562*9c5db199SXin Li return options, leftover 563*9c5db199SXin Li 564*9c5db199SXin Li 565*9c5db199SXin Li def execute(self): 566*9c5db199SXin Li clone_info = self.execute_rpc(op='get_info_for_clone', 567*9c5db199SXin Li id=self.clone_id, 568*9c5db199SXin Li preserve_metahosts=self.reuse_hosts) 569*9c5db199SXin Li 570*9c5db199SXin Li # Remove fields from clone data that cannot be reused 571*9c5db199SXin Li for field in ('name', 'created_on', 'id', 'owner'): 572*9c5db199SXin Li del clone_info['job'][field] 573*9c5db199SXin Li 574*9c5db199SXin Li # Also remove parameterized_job field, as the feature still is 575*9c5db199SXin Li # incomplete, this tool does not attempt to support it for now, 576*9c5db199SXin Li # it uses a different API function and it breaks create_job() 577*9c5db199SXin Li if clone_info['job'].has_key('parameterized_job'): 578*9c5db199SXin Li del clone_info['job']['parameterized_job'] 579*9c5db199SXin Li 580*9c5db199SXin Li # Keyword args cannot be unicode strings 581*9c5db199SXin Li self.data.update((str(key), val) 582*9c5db199SXin Li for key, val in clone_info['job'].iteritems()) 583*9c5db199SXin Li 584*9c5db199SXin Li if self.reuse_hosts: 585*9c5db199SXin Li # Convert host list from clone info that can be used for job_create 586*9c5db199SXin Li for label, qty in clone_info['meta_host_counts'].iteritems(): 587*9c5db199SXin Li self.data['meta_hosts'].extend([label]*qty) 588*9c5db199SXin Li 589*9c5db199SXin Li self.data['hosts'].extend(host['hostname'] 590*9c5db199SXin Li for host in clone_info['hosts']) 591*9c5db199SXin Li 592*9c5db199SXin Li return self.create_job() 593*9c5db199SXin Li 594*9c5db199SXin Li 595*9c5db199SXin Liclass job_abort(job, action_common.atest_delete): 596*9c5db199SXin Li """atest job abort <job(s)>""" 597*9c5db199SXin Li usage_action = op_action = 'abort' 598*9c5db199SXin Li msg_done = 'Aborted' 599*9c5db199SXin Li 600*9c5db199SXin Li def parse(self): 601*9c5db199SXin Li job_info = topic_common.item_parse_info(attribute_name='jobids', 602*9c5db199SXin Li use_leftover=True) 603*9c5db199SXin Li options, leftover = super(job_abort, self).parse([job_info], 604*9c5db199SXin Li req_items='jobids') 605*9c5db199SXin Li 606*9c5db199SXin Li 607*9c5db199SXin Li def execute(self): 608*9c5db199SXin Li data = {'job__id__in': self.jobids} 609*9c5db199SXin Li self.execute_rpc(op='abort_host_queue_entries', **data) 610*9c5db199SXin Li print('Aborting jobs: %s' % ', '.join(self.jobids)) 611*9c5db199SXin Li 612*9c5db199SXin Li 613*9c5db199SXin Li def get_items(self): 614*9c5db199SXin Li return self.jobids 615