Merge tag 'android-11.0.0_r48' into int/11/fp3

Android 11.0.0 Release 48 (RD2A.211001.002)

* tag 'android-11.0.0_r48':
  Updating build-tools package revision number to ship a hot fix for b/169193089 Bug: 169193089 Change-Id: I025c0d96e77b7355baddac935779c6b27f3042e3 Test: N/A

Change-Id: I771e88b515d08ba8294a7a8f7d9a38b37ef52171
diff --git a/.gitignore b/.gitignore
index 42494a2..7195703 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,4 @@
 *.iml
 .idea/
 gen/
+keystone
diff --git a/cmds/monkey/src/com/android/commands/monkey/Monkey.java b/cmds/monkey/src/com/android/commands/monkey/Monkey.java
index 85ce18d..fe5f57d 100644
--- a/cmds/monkey/src/com/android/commands/monkey/Monkey.java
+++ b/cmds/monkey/src/com/android/commands/monkey/Monkey.java
@@ -175,6 +175,9 @@
     /** Generate hprof reports before/after monkey runs */
     private boolean mGenerateHprof;
 
+    /** Disable all the logs from monkey */
+    private boolean mDisableLogs = false;
+
     /** If set, only match error if this text appears in the description text. */
     private String mMatchDescription;
 
@@ -478,6 +481,9 @@
      * @param command Command line to execute.
      */
     private void commandLineReport(String reportName, String command) {
+        if (mDisableLogs) {
+            return;
+        }
         Logger.err.println(reportName + ":");
         Runtime rt = Runtime.getRuntime();
         Writer logOutput = null;
@@ -897,6 +903,11 @@
                 } else if (opt.equals("-h")) {
                     showUsage();
                     return false;
+                } else if (opt.equals("--disable-logs")) {
+                    Logger.err.println("** Monkey: all log disbled!");
+                    mDisableLogs = true;
+                    Logger.stdout = false;
+                    Logger.logcat = false;
                 } else {
                     Logger.err.println("** Error: Unknown option: " + opt);
                     showUsage();
diff --git a/vendor_snapshot/update.py b/vendor_snapshot/update.py
index 5697fe2..3057e9b 100644
--- a/vendor_snapshot/update.py
+++ b/vendor_snapshot/update.py
@@ -14,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Installs vendor snapshot under prebuilts/vendor/v{version}."""
+"""Unzips and installs the vendor snapshot."""
 
 import argparse
 import glob
@@ -30,22 +30,27 @@
 
 INDENT = ' ' * 4
 
+
 def get_notice_path(module_name):
-    return os.path.join('NOTICE_FILES', module_name+'.txt')
+    return os.path.join('NOTICE_FILES', module_name + '.txt')
+
 
 def get_target_arch(json_rel_path):
     return json_rel_path.split('/')[0]
 
+
 def get_arch(json_rel_path):
     return json_rel_path.split('/')[1].split('-')[1]
 
+
 def get_variation(json_rel_path):
     return json_rel_path.split('/')[2]
 
+
 # convert .bp prop dictionary to .bp prop string
 def gen_bp_prop(prop, ind):
     bp = ''
-    for key in prop:
+    for key in sorted(prop):
         val = prop[key]
 
         # Skip empty list or dict, rather than printing empty prop like
@@ -54,9 +59,9 @@
             if len(val) == 0:
                 continue
 
-        bp += ind + key + ": "
+        bp += ind + key + ': '
         if type(val) == bool:
-            bp += "true,\n" if val else "false,\n"
+            bp += 'true,\n' if val else 'false,\n'
         elif type(val) == str:
             bp += '"%s",\n' % val
         elif type(val) == list:
@@ -72,6 +77,7 @@
             raise TypeError('unsupported type %s for gen_bp_prop' % type(val))
     return bp
 
+
 # Remove non-existent dirs from given list. Emits warning for such dirs.
 def remove_invalid_dirs(paths, bp_dir, module_name):
     ret = []
@@ -79,26 +85,40 @@
         if os.path.isdir(os.path.join(bp_dir, path)):
             ret.append(path)
         else:
-            logging.warning(
-                'Dir "%s" of module "%s" does not exist' % (path, module_name))
+            logging.warning('Dir "%s" of module "%s" does not exist', path,
+                            module_name)
     return ret
 
+
 JSON_TO_BP = {
-    'ModuleName':          'name',
+    'ModuleName': 'name',
     'RelativeInstallPath': 'relative_install_path',
-    'ExportedDirs':        'export_include_dirs',
-    'ExportedSystemDirs':  'export_system_include_dirs',
-    'ExportedFlags':       'export_flags',
-    'SanitizeMinimalDep':  'sanitize_minimal_dep',
-    'SanitizeUbsanDep':    'sanitize_ubsan_dep',
-    'Symlinks':            'symlinks',
-    'InitRc':              'init_rc',
-    'VintfFragments':      'vintf_fragments',
-    'SharedLibs':          'shared_libs',
-    'RuntimeLibs':         'runtime_libs',
-    'Required':            'required',
+    'ExportedDirs': 'export_include_dirs',
+    'ExportedSystemDirs': 'export_system_include_dirs',
+    'ExportedFlags': 'export_flags',
+    'Sanitize': 'sanitize',
+    'SanitizeMinimalDep': 'sanitize_minimal_dep',
+    'SanitizeUbsanDep': 'sanitize_ubsan_dep',
+    # TODO(b/181815415) remove is_lldnk when possible
+    'IsLlndk': 'is_llndk',
+    'Symlinks': 'symlinks',
+    'InitRc': 'init_rc',
+    'VintfFragments': 'vintf_fragments',
+    'SharedLibs': 'shared_libs',
+    'RuntimeLibs': 'runtime_libs',
+    'Required': 'required',
 }
 
+SANITIZER_VARIANT_PROPS = {
+    'export_include_dirs',
+    'export_system_include_dirs',
+    'export_flags',
+    'sanitize_minimal_dep',
+    'sanitize_ubsan_dep',
+    'src',
+}
+
+
 # Converts parsed json dictionary (which is intermediate) to Android.bp prop
 # dictionary. This validates paths such as include directories and init_rc
 # files while converting.
@@ -121,17 +141,18 @@
         if key in JSON_TO_BP:
             ret[JSON_TO_BP[key]] = prop[key]
         else:
-            logging.warning(
-                'Unknown prop "%s" of module "%s"' % (key, module_name))
+            logging.warning('Unknown prop "%s" of module "%s"', key,
+                            module_name)
 
     return ret
 
-def gen_bp_module(variation, name, version, target_arch, arch_props, bp_dir):
+
+def gen_bp_module(image, variation, name, version, target_arch, arch_props, bp_dir):
     prop = {
         # These three are common for all snapshot modules.
         'version': str(version),
         'target_arch': target_arch,
-        'vendor': True,
+        image: True,
         'arch': {},
     }
 
@@ -144,55 +165,57 @@
                 common_prop[k] = arch_props[arch][k]
             continue
         for k in list(common_prop.keys()):
-            if not k in arch_props[arch] or common_prop[k] != arch_props[arch][k]:
+            if k not in arch_props[arch] or common_prop[k] != arch_props[arch][k]:
                 del common_prop[k]
 
-    # Forcing src to be arch_props prevents 32-bit only modules to be used as
-    # 64-bit modules, and vice versa.
-    if 'src' in common_prop:
-        del common_prop['src']
+    # Some keys has to be arch_props to prevent 32-bit only modules from being
+    # used as 64-bit modules, and vice versa.
+    for arch_prop_key in ['src', 'cfi']:
+        if arch_prop_key in common_prop:
+            del common_prop[arch_prop_key]
     prop.update(common_prop)
 
+    stem32 = stem64 = ''
+
     for arch in arch_props:
         for k in common_prop:
             if k in arch_props[arch]:
                 del arch_props[arch][k]
         prop['arch'][arch] = arch_props[arch]
+        # Record stem for executable binary snapshots.
+        # We don't check existence of 'src'; src must exist for executables
+        if variation == 'binary':
+            if '64' in arch:  # arm64, x86_64
+                stem64 = os.path.basename(arch_props[arch]['src'])
+            else:
+                stem32 = os.path.basename(arch_props[arch]['src'])
 
-    bp = 'vendor_snapshot_%s {\n' % variation
+    # For binary snapshots, compile_multilib must be assigned to 'both'
+    # in order to install both. Prefer 64bit if their stem collide and
+    # installing both is impossible
+    if variation == 'binary':
+        if stem32 and stem64:
+            if stem32 == stem64:
+                prop['compile_multilib'] = 'first'
+            else:
+                prop['compile_multilib'] = 'both'
+        elif stem32:
+            prop['compile_multilib'] = '32'
+        elif stem64:
+            prop['compile_multilib'] = '64'
+
+    bp = '%s_snapshot_%s {\n' % (image, variation)
     bp += gen_bp_prop(prop, INDENT)
     bp += '}\n\n'
     return bp
 
-def get_args():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        'snapshot_version',
-        type=int,
-        help='Vendor snapshot version to install, e.g. "30".')
-    parser.add_argument(
-        '-v',
-        '--verbose',
-        action='count',
-        default=0,
-        help='Increase output verbosity, e.g. "-v", "-vv".')
-    return parser.parse_args()
 
-def main():
-    """Program entry point."""
-    args = get_args()
-    verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
-    verbosity = min(args.verbose, 2)
-    logging.basicConfig(
-        format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
-        level=verbose_map[verbosity])
-    install_dir = os.path.join('prebuilts', 'vendor', 'v'+str(args.snapshot_version))
-
+def build_props(install_dir):
     # props[target_arch]["static"|"shared"|"binary"|"header"][name][arch] : json
     props = dict()
 
     # {target_arch}/{arch}/{variation}/{module}.json
-    for root, _, files in os.walk(install_dir):
+    for root, _, files in os.walk(install_dir, followlinks = True):
         for file_name in sorted(files):
             if not file_name.endswith('.json'):
                 continue
@@ -216,33 +239,475 @@
 
             if variation != 'header':
                 prop['src'] = os.path.relpath(
-                    rel_path[:-5], # removing .json
+                    rel_path[:-5],  # removing .json
                     target_arch)
 
             module_name = prop['name']
-            notice_path = 'NOTICE_FILES/' + module_name + ".txt"
+
+            # Is this sanitized variant?
+            if 'sanitize' in prop:
+                sanitizer_type = prop['sanitize']
+                # module_name is {name}.{sanitizer_type}; trim sanitizer_type
+                module_name = module_name[:-len(sanitizer_type) - 1]
+                # Only leave props for the sanitize variant
+                for k in list(prop.keys()):
+                    if not k in SANITIZER_VARIANT_PROPS:
+                        del prop[k]
+                prop = {'name': module_name, sanitizer_type: prop}
+
+            notice_path = 'NOTICE_FILES/' + module_name + '.txt'
             if os.path.exists(os.path.join(bp_dir, notice_path)):
                 prop['notice'] = notice_path
 
             variation_dict = props[target_arch][variation]
             if not module_name in variation_dict:
                 variation_dict[module_name] = dict()
-            variation_dict[module_name][arch] = prop
+            if not arch in variation_dict[module_name]:
+                variation_dict[module_name][arch] = prop
+            else:
+                variation_dict[module_name][arch].update(prop)
 
-    for target_arch in props:
+    return props
+
+
+def gen_bp_files(image, install_dir, snapshot_version):
+    props = build_props(install_dir)
+
+    for target_arch in sorted(props):
         androidbp = ''
         bp_dir = os.path.join(install_dir, target_arch)
-        for variation in props[target_arch]:
-            for name in props[target_arch][variation]:
-                androidbp += gen_bp_module(
+        for variation in sorted(props[target_arch]):
+            for name in sorted(props[target_arch][variation]):
+                androidbp += gen_bp_module(image, variation, name,
+                                           snapshot_version, target_arch,
+                                           props[target_arch][variation][name],
+                                           bp_dir)
+        with open(os.path.join(bp_dir, 'Android.bp'), 'w') as f:
+            logging.info('Generating Android.bp to: {}'.format(f.name))
+            f.write(androidbp)
+
+
+def find_all_installed_files(install_dir):
+    installed_files = dict()
+    for root, _, files in os.walk(install_dir, followlinks = True):
+        for file_name in sorted(files):
+            if file_name.endswith('.json'):
+                continue
+            if file_name.endswith('Android.bp'):
+                continue
+            full_path = os.path.join(root, file_name)
+            size = os.stat(full_path).st_size
+            installed_files[full_path] = size
+
+    logging.debug('')
+    for f in sorted(installed_files.keys()):
+        logging.debug(f)
+    logging.debug('')
+    logging.debug('found {} installed files'.format(len(installed_files)))
+    logging.debug('')
+    return installed_files
+
+
+def find_files_in_props(target_arch, arch_install_dir, variation, name, props, file_to_info):
+    logging.debug('{} {} {} {} {}'.format(
+        target_arch, arch_install_dir, variation, name, props))
+
+    def add_info(file, name, variation, arch, is_cfi, is_header):
+        info = (name, variation, arch, is_cfi, is_header)
+        info_list = file_to_info.get(file)
+        if not info_list:
+            info_list = []
+            file_to_info[file] = info_list
+        info_list.append(info)
+
+    def find_file_in_list(dict, key, is_cfi):
+        list = dict.get(key)
+        logging.debug('    {} {}'.format(key, list))
+        if list:
+            for item in list:
+                item_path = os.path.join(arch_install_dir, item)
+                add_info(item_path, name, variation, arch, is_cfi, False)
+
+    def find_file_in_dirs(dict, key, is_cfi, is_header):
+        dirs = dict.get(key)
+        logging.debug('    {} {}'.format(key, dirs))
+        if dirs:
+            for dir in dirs:
+                dir_path = os.path.join(arch_install_dir, dir)
+                logging.debug('        scanning {}'.format(dir_path))
+                for root, _, files in os.walk(dir_path, followlinks = True):
+                    for file_name in sorted(files):
+                        item_path = os.path.join(root, file_name)
+                        add_info(item_path, name, variation, arch, is_cfi, is_header)
+
+    def find_file_in_dict(dict, is_cfi):
+        logging.debug('    arch {}'.format(arch))
+        logging.debug('    name {}'.format( name))
+        logging.debug('    is_cfi {}'.format(is_cfi))
+
+        src = dict.get('src')
+        logging.debug('    src {}'.format(src))
+        if src:
+            src_path = os.path.join(arch_install_dir, src)
+            add_info(src_path, name, variation, arch, is_cfi, False)
+
+        notice = dict.get('notice')
+        logging.debug('    notice {}'.format(notice))
+        if notice:
+            notice_path = os.path.join(arch_install_dir, notice)
+            add_info(notice_path, name, variation, arch, is_cfi, False)
+
+        find_file_in_list(dict, 'init_rc', is_cfi)
+        find_file_in_list(dict, 'vintf_fragments', is_cfi)
+
+        find_file_in_dirs(dict, 'export_include_dirs', is_cfi, True)
+        find_file_in_dirs(dict, 'export_system_include_dirs', is_cfi, True)
+
+    for arch in sorted(props):
+        name = props[arch]['name']
+        find_file_in_dict(props[arch], False)
+        cfi = props[arch].get('cfi')
+        if cfi:
+            find_file_in_dict(cfi, True)
+
+
+def find_all_props_files(install_dir):
+
+    # This function builds a database of filename to module. This means that we
+    # need to dive into the json to find the files that the vendor snapshot
+    # provides, and link these back to modules that provide them.
+
+    file_to_info = dict()
+
+    props = build_props(install_dir)
+    for target_arch in sorted(props):
+        arch_install_dir = os.path.join(install_dir, target_arch)
+        for variation in sorted(props[target_arch]):
+            for name in sorted(props[target_arch][variation]):
+                find_files_in_props(
+                    target_arch,
+                    arch_install_dir,
                     variation,
                     name,
-                    args.snapshot_version,
-                    target_arch,
                     props[target_arch][variation][name],
-                    bp_dir)
-        with open(os.path.join(bp_dir, 'Android.bp'), 'w') as f:
-            f.write(androidbp)
+                    file_to_info)
+
+    logging.debug('')
+    for f in sorted(file_to_info.keys()):
+        logging.debug(f)
+    logging.debug('')
+    logging.debug('found {} props files'.format(len(file_to_info)))
+    logging.debug('')
+    return file_to_info
+
+
+def get_ninja_inputs(ninja_binary, ninja_build_file, modules):
+    """Returns the set of input file path strings for the given modules.
+
+    Uses the `ninja -t inputs` tool.
+
+    Args:
+        ninja_binary: The path to a ninja binary.
+        ninja_build_file: The path to a .ninja file from a build.
+        modules: The list of modules to scan for inputs.
+    """
+    inputs = set()
+    cmd = [
+        ninja_binary,
+        "-f",
+        ninja_build_file,
+        "-t",
+        "inputs",
+        "-d",
+    ] + list(modules)
+    logging.debug('invoke ninja {}'.format(cmd))
+    inputs = inputs.union(set(
+        subprocess.check_output(cmd).decode().strip("\n").split("\n")))
+
+    return inputs
+
+
+def check_module_usage(install_dir, ninja_binary, image, ninja_file, goals,
+                       output):
+    all_installed_files = find_all_installed_files(install_dir)
+    all_props_files = find_all_props_files(install_dir)
+
+    ninja_inputs = get_ninja_inputs(ninja_binary, ninja_file, goals)
+    logging.debug('')
+    logging.debug('ninja inputs')
+    for ni in ninja_inputs:
+        logging.debug(ni)
+
+    logging.debug('found {} ninja_inputs for goals {}'.format(
+        len(ninja_inputs), goals))
+
+    # Intersect the file_to_info dict with the ninja_inputs to determine
+    # which items from the vendor snapshot are actually used by the goals.
+
+    total_size = 0
+    used_size = 0
+    used_file_to_info = dict()
+
+    for file, size in all_installed_files.items():
+        total_size += size
+        if file in ninja_inputs:
+            logging.debug('used: {}'.format(file))
+            used_size += size
+            info = all_props_files.get(file)
+
+            if info:
+                used_file_to_info[file] = info
+            else:
+                logging.warning('No info for file {}'.format(file))
+                used_file_to_info[file] = 'no info'
+
+    logging.debug('Total size {}'.format(total_size))
+    logging.debug('Used size {}'.format(used_size))
+    logging.debug('')
+    logging.debug('used items')
+
+    used_modules = set()
+
+    for f, i in sorted(used_file_to_info.items()):
+        logging.debug('{} {}'.format(f, i))
+        for m in i:
+            (name, variation, arch, is_cfi, is_header) = m
+            if not is_header:
+                used_modules.add(name)
+
+    with open(output, 'w') as f:
+        f.write('%s_SNAPSHOT_MODULES := \\\n' % image.upper())
+        for m in sorted(used_modules):
+            f.write('  %s \\\n' % m)
+
+def check_call(cmd):
+    logging.debug('Running `{}`'.format(' '.join(cmd)))
+    subprocess.check_call(cmd)
+
+
+def fetch_artifact(branch, build, target, pattern, destination):
+    """Fetches build artifacts from Android Build server.
+
+    Args:
+      branch: string, branch to pull build artifacts from
+      build: string, build number to pull build artifacts from
+      target: string, target name to pull build artifacts from
+      pattern: string, pattern of build artifact file name
+      destination: string, destination to pull build artifact to
+    """
+    fetch_artifact_path = '/google/data/ro/projects/android/fetch_artifact'
+    cmd = [
+        fetch_artifact_path, '--branch', branch, '--target', target, '--bid',
+        build, pattern, destination
+    ]
+    check_call(cmd)
+
+def install_artifacts(image, branch, build, target, local_dir, symlink,
+                      install_dir):
+    """Installs vendor snapshot build artifacts to {install_dir}/v{version}.
+
+    1) Fetch build artifacts from Android Build server or from local_dir
+    2) Unzip or create symlinks to build artifacts
+
+    Args:
+      image: string, img file for which the snapshot was created (vendor,
+             recovery, etc.)
+      branch: string or None, branch name of build artifacts
+      build: string or None, build number of build artifacts
+      target: string or None, target name of build artifacts
+      local_dir: string or None, local dir to pull artifacts from
+      symlink: boolean, whether to use symlinks instead of unzipping the
+        vendor snapshot zip
+      install_dir: string, directory to install vendor snapshot
+      temp_artifact_dir: string, temp directory to hold build artifacts fetched
+        from Android Build server. For 'local' option, is set to None.
+    """
+    artifact_pattern = image + '-*.zip'
+
+    def unzip_artifacts(artifact_dir):
+        artifacts = glob.glob(os.path.join(artifact_dir, artifact_pattern))
+        for artifact in artifacts:
+            logging.info('Unzipping Vendor snapshot: {}'.format(artifact))
+            check_call(['unzip', '-qn', artifact, '-d', install_dir])
+
+    if branch and build and target:
+        with tempfile.TemporaryDirectory() as tmpdir:
+            logging.info(
+                'Fetching {pattern} from {branch} (bid: {build}, target: {target})'
+                .format(
+                    pattern=artifact_pattern,
+                    branch=branch,
+                    build=build,
+                    target=target))
+            fetch_artifact(branch, build, target, artifact_pattern, tmpdir)
+            unzip_artifacts(tmpdir)
+    elif local_dir:
+        if symlink:
+            # This assumes local_dir is the location of vendor-snapshot in the
+            # build (e.g., out/soong/vendor-snapshot).
+            #
+            # Create the first level as proper directories and the next level
+            # as symlinks.
+            for item1 in os.listdir(local_dir):
+                dest_dir = os.path.join(install_dir, item1)
+                src_dir = os.path.join(local_dir, item1)
+                if os.path.isdir(src_dir):
+                    check_call(['mkdir', '-p', dest_dir])
+                    # Create symlinks.
+                    for item2 in os.listdir(src_dir):
+                        src_item = os.path.join(src_dir, item2)
+                        logging.info('Creating symlink from {} in {}'.format(
+                            src_item, dest_dir))
+                        os.symlink(src_item, os.path.join(dest_dir, item2))
+        else:
+            logging.info('Fetching local VNDK snapshot from {}'.format(
+                local_dir))
+            unzip_artifacts(local_dir)
+    else:
+        raise RuntimeError('Neither local nor remote fetch information given.')
+
+def get_args():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        'snapshot_version',
+        type=int,
+        help='Vendor snapshot version to install, e.g. "30".')
+    parser.add_argument(
+        '--image',
+        help=('Image whose snapshot is being updated (e.g., vendor, '
+              'recovery , ramdisk, etc.)'),
+        default='vendor')
+    parser.add_argument('--branch', help='Branch to pull build from.')
+    parser.add_argument('--build', help='Build number to pull.')
+    parser.add_argument('--target', help='Target to pull.')
+    parser.add_argument(
+        '--local',
+        help=('Fetch local vendor snapshot artifacts from specified local '
+              'directory instead of Android Build server. '
+              'Example: --local /path/to/local/dir'))
+    parser.add_argument(
+        '--symlink',
+        action='store_true',
+        help='Use symlinks instead of unzipping vendor snapshot zip')
+    parser.add_argument(
+        '--install-dir',
+        required=True,
+        help=(
+            'Base directory to which vendor snapshot artifacts are installed. '
+            'Example: --install-dir vendor/<company name>/vendor_snapshot/v30'))
+    parser.add_argument(
+        '--overwrite',
+        action='store_true',
+        help=(
+            'If provided, does not ask before overwriting the install-dir.'))
+    parser.add_argument(
+        '--check-module-usage',
+        action='store_true',
+        help='Check which modules are used.')
+    parser.add_argument(
+        '--check-module-usage-goal',
+        action='append',
+        help='Goal(s) for which --check-module-usage is calculated.')
+    parser.add_argument(
+        '--check-module-usage-ninja-file',
+        help='Ninja file for which --check-module-usage is calculated.')
+    parser.add_argument(
+        '--check-module-usage-output',
+        help='File to which to write the check-module-usage results.')
+
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='count',
+        default=0,
+        help='Increase output verbosity, e.g. "-v", "-vv".')
+    return parser.parse_args()
+
+
+def main():
+    """Program entry point."""
+    args = get_args()
+
+    verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
+    verbosity = min(args.verbose, 2)
+    logging.basicConfig(
+        format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
+        level=verbose_map[verbosity])
+
+    if not args.install_dir:
+        raise ValueError('Please provide --install-dir option.')
+    install_dir = os.path.expanduser(args.install_dir)
+
+    if args.check_module_usage:
+        ninja_binary = './prebuilts/build-tools/linux-x86/bin/ninja'
+
+        if not args.check_module_usage_goal:
+            raise ValueError('Please provide --check-module-usage-goal option.')
+        if not args.check_module_usage_ninja_file:
+            raise ValueError(
+                'Please provide --check-module-usage-ninja-file option.')
+        if not args.check_module_usage_output:
+            raise ValueError(
+                'Please provide --check-module-usage-output option.')
+
+        check_module_usage(install_dir, ninja_binary, args.image,
+                           args.check_module_usage_ninja_file,
+                           args.check_module_usage_goal,
+                           args.check_module_usage_output)
+        return
+
+    local = None
+    if args.local:
+        local = os.path.expanduser(args.local)
+
+    if local:
+        if args.build or args.branch or args.target:
+            raise ValueError(
+                'When --local option is set, --branch, --build or --target cannot be '
+                'specified.')
+        elif not os.path.isdir(local):
+            raise RuntimeError(
+                'The specified local directory, {}, does not exist.'.format(
+                    local))
+    else:
+        if not (args.build and args.branch and args.target):
+            raise ValueError(
+                'Please provide --branch, --build and --target. Or set --local '
+                'option.')
+
+    snapshot_version = args.snapshot_version
+    raw_mode = args.image.strip().lower() == 'raw'
+
+    if os.path.exists(install_dir):
+        def remove_dir():
+            logging.info('Removing {}'.format(install_dir))
+            check_call(['rm', '-rf', install_dir])
+        if args.overwrite:
+            remove_dir()
+        else:
+            if not raw_mode:
+                resp = input('Directory {} already exists. IT WILL BE REMOVED.\n'
+                             'Are you sure? (yes/no): '.format(install_dir))
+                if resp == 'yes':
+                    remove_dir()
+                elif resp == 'no':
+                    logging.info('Cancelled snapshot install.')
+                    return
+                else:
+                    raise ValueError('Did not understand: ' + resp)
+    check_call(['mkdir', '-p', install_dir])
+
+    install_artifacts(
+        image=args.image,
+        branch=args.branch,
+        build=args.build,
+        target=args.target,
+        local_dir=local,
+        symlink=args.symlink,
+        install_dir=install_dir)
+
+    if not raw_mode:
+        gen_bp_files(args.image, install_dir, snapshot_version)
 
 if __name__ == '__main__':
     main()