Prashanth B | dc5cb2e | 2014-03-03 13:20:54 -0800 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | # Copyright (c) 2014 The Chromium OS Authors. All rights reserved. |
| 4 | # Use of this source code is governed by a BSD-style license that can be |
| 5 | # found in the LICENSE file. |
| 6 | |
| 7 | """A shell for crbug_crawler. |
| 8 | """ |
| 9 | |
| 10 | import crbug_crawler |
| 11 | import cmd |
| 12 | import logging |
| 13 | import os |
| 14 | import sys |
| 15 | |
| 16 | import common |
| 17 | |
| 18 | from autotest_lib.client.common_lib import global_config |
| 19 | from autotest_lib.server.cros.dynamic_suite import reporting |
| 20 | |
| 21 | try: |
| 22 | from oauth2client import file as oauth_file |
| 23 | from oauth2client import client |
| 24 | from oauth2client import tools |
| 25 | except ImportError: |
| 26 | logging.error('You do not have the appropriate oauth2client libraries' |
| 27 | 'required for authorization. Run ./<autotest_checkout>/utils/\ ' |
| 28 | 'build_externals.py or pip install the oauth2client.') |
| 29 | sys.exit(1) |
| 30 | |
| 31 | |
| 32 | def check_auth(): |
| 33 | """Checks if valid oath credentials exist on the system. |
| 34 | |
| 35 | If valid credentials aren't found on the client they're generated, |
| 36 | if possible, using the cliend_id and client_secret from the shadow_config. |
| 37 | """ |
| 38 | shadow_config = os.path.join(common.autotest_dir, 'shadow_config.ini') |
| 39 | if not os.path.exists(shadow_config): |
| 40 | logging.error('Cannot autorize without a shadow_config that contains' |
| 41 | 'the appropriate client id for oauth. Contact ' |
| 42 | 'chromeos-lab-infrastructure if you think this is a mistake.') |
| 43 | sys.exit(1) |
| 44 | |
| 45 | auth_store = oauth_file.Storage(reporting.Reporter.get_creds_abspath()) |
| 46 | creds = auth_store.get() |
| 47 | if creds is None or creds.invalid: |
| 48 | client_id = global_config.global_config.get_config_value( |
| 49 | reporting.BUG_CONFIG_SECTION, 'client_id', default='') |
| 50 | client_secret = global_config.global_config.get_config_value( |
| 51 | reporting.BUG_CONFIG_SECTION, 'client_secret', default='') |
| 52 | scope = global_config.global_config.get_config_value( |
| 53 | reporting.BUG_CONFIG_SECTION, 'scope', default='') |
| 54 | if not client_secret and not client_id: |
| 55 | logging.error('Unable to generate oauth credentials, client_id ' |
| 56 | 'is %s and client_secret %s. If you do not require oauth ' |
| 57 | 'run this script with --noauth. This may or may not be ' |
| 58 | 'implemented ATM ;).', client_id, client_secret) |
| 59 | |
| 60 | input_flow = client.OAuth2WebServerFlow(client_id=client_id, |
| 61 | client_secret=client_secret, scope=scope) |
| 62 | logging.warning('Running oauth flow, make sure you use your chromium ' |
| 63 | 'account during autorization.') |
| 64 | creds = tools.run(input_flow, auth_store) |
| 65 | |
| 66 | |
| 67 | class CrBugShell(cmd.Cmd): |
| 68 | def __init__(self, *args, **kwargs): |
| 69 | cmd.Cmd.__init__(self, *args, **kwargs) |
| 70 | self.queries = [] |
| 71 | self.labels = [] |
| 72 | if not kwargs.get('noauth'): |
| 73 | check_auth() |
| 74 | self.crawler = crbug_crawler.Crawler() |
| 75 | |
| 76 | |
| 77 | def do_reap(self, line): |
| 78 | self.crawler.filter_issues(queries='', |
| 79 | labels=self.crawler.all_autofiled_label) |
| 80 | if line: |
| 81 | try: |
| 82 | limit = int(line) |
| 83 | except ValueError: |
| 84 | logging.warning('Reap can only take an integer argument.') |
| 85 | return |
| 86 | else: |
| 87 | limit = None |
| 88 | self.crawler.dump_issues(limit=limit) |
| 89 | |
| 90 | |
| 91 | def do_query_filter(self, query): |
| 92 | print 'Adding query %s' % query |
| 93 | self.queries.append(query) |
| 94 | |
| 95 | |
| 96 | def do_label_filter(self, label): |
| 97 | print 'Adding label %s' % label |
| 98 | self.labels.append(label) |
| 99 | |
| 100 | |
| 101 | def do_show_filters(self, line=''): |
| 102 | print ('queries: %s, labels %s' % |
| 103 | (self.queries, self.labels)) |
| 104 | |
| 105 | |
| 106 | def do_reset(self, line): |
| 107 | self.crawler.issues = None |
| 108 | self.queries = [] |
| 109 | self.labels = [] |
| 110 | |
| 111 | |
| 112 | def do_run_filter(self, line): |
| 113 | print 'running the following filter: %s' % self.do_show_filters() |
| 114 | |
| 115 | # Populate cached issues if this is a first time query. If we have |
| 116 | # cached issues from an incremental search, filter those instead. |
| 117 | if self.crawler.issues: |
| 118 | self.crawler.issues = self.crawler.filter_labels( |
| 119 | self.crawler.issues, self.labels) |
| 120 | self.crawler.issues = self.crawler.filter_queries( |
| 121 | self.crawler.issues, self.queries) |
| 122 | else: |
| 123 | self.crawler.filter_issues(queries=' '.join(self.queries), |
| 124 | labels=' '.join(self.labels)) |
| 125 | self.crawler.dump_issues() |
| 126 | |
| 127 | |
| 128 | if __name__ == '__main__': |
| 129 | CrBugShell().cmdloop() |