Merge "Add media_profiles_V1_0.dtd" into oc-dev
am: 8d66d58d1d

Change-Id: I160e09ee10f87c3ba4d0e83a5d75682082d700fb
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 88f9172..b955e25 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -417,6 +417,9 @@
 # Soong module variant change, remove obsolete intermediates
 $(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
 
+# Version checking moving to Soong
+$(call add-clean-step, rm -rf $(OUT_DIR)/versions_checked.mk)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/core/Makefile b/core/Makefile
index b6a704a..f742f29 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -225,11 +225,16 @@
 BUILDINFO_SH := build/tools/buildinfo.sh
 VENDOR_BUILDINFO_SH := build/tools/vendor_buildinfo.sh
 
-# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test harness to distinguish builds.
+# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test
+# harness to distinguish builds. Only add _asan for a sanitized build
+# if it isn't already a part of the flavor (via a dedicated lunch
+# config for example).
 TARGET_BUILD_FLAVOR := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
 ifdef SANITIZE_TARGET
+ifeq (,$(findstring _asan,$(TARGET_BUILD_FLAVOR)))
 TARGET_BUILD_FLAVOR := $(TARGET_BUILD_FLAVOR)_asan
 endif
+endif
 
 ifdef TARGET_SYSTEM_PROP
 system_prop_file := $(TARGET_SYSTEM_PROP)
@@ -857,11 +862,11 @@
 	$(hide) zip -qjX $@ $<
 	$(remove-timestamps-from-package)
 
-# Carry the public key for update_engine if it's a non-Brillo target that
+# Carry the public key for update_engine if it's a non-IoT target that
 # uses the AB updater. We use the same key as otacerts but in RSA public key
 # format.
 ifeq ($(AB_OTA_UPDATER),true)
-ifeq ($(BRILLO),)
+ifneq ($(PRODUCT_IOT),true)
 ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
 $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
 	$(hide) rm -f $@
@@ -961,6 +966,7 @@
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
@@ -1225,6 +1231,9 @@
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
 $(INSTALLED_BOOTIMAGE_TARGET) : $(VBOOT_SIGNER)
 endif
+ifeq (true,$(BOARD_AVB_ENABLE))
+$(INSTALLED_BOOTIMAGE_TARGET) : $(AVBTOOL)
+endif
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
 		$(INSTALLED_RAMDISK_TARGET) \
 		$(INTERNAL_RECOVERYIMAGE_FILES) \
@@ -1805,6 +1814,26 @@
 endif
 
 # -----------------------------------------------------------------
+# dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE
+INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	$(AVBTOOL) add_hash_footer \
+		--image $@ \
+		--partition_size $(BOARD_DTBOIMG_PARTITION_SIZE) \
+		--partition_name dtbo $(INTERNAL_AVB_SIGNING_ARGS) \
+		$(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+else
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+endif
+
+endif
+
+# -----------------------------------------------------------------
 # vbmeta image
 ifeq ($(BOARD_AVB_ENABLE),true)
 
@@ -1819,6 +1848,11 @@
     --include_descriptors_from_image $(INSTALLED_VENDORIMAGE_TARGET)
 endif
 
+ifdef INSTALLED_DTBOIMAGE_TARGET
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+		--include_descriptors_from_image $(INSTALLED_DTBOIMAGE_TARGET)
+endif
+
 ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += --setup_rootfs_from_kernel $(BUILT_SYSTEMIMAGE)
 endif
@@ -1855,7 +1889,7 @@
 endef
 
 INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_VBMETAIMAGE_TARGET)
-$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_VENDORIMAGE_TARGET)
+$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_VENDORIMAGE_TARGET) $(INSTALLED_DTBOIMAGE_TARGET)
 	$(build-vbmetaimage-target)
 
 .PHONY: vbmetaimage-nodeps
@@ -1943,12 +1977,6 @@
   $(VBOOT_SIGNER)
 endif
 
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
-OTATOOLS += \
-  $(FUTILITY) \
-  $(VBOOT_SIGNER)
-endif
-
 # Shared libraries.
 OTATOOLS += \
   $(HOST_LIBRARY_PATH)/libc++$(HOST_SHLIB_SUFFIX) \
@@ -2102,6 +2130,7 @@
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_CACHEIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
 		$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
 		$(INSTALLED_KERNEL_TARGET) \
@@ -2322,6 +2351,10 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 	@# Run fs_config on all the system, vendor, boot ramdisk,
 	@# and recovery ramdisk files in the zip, and save the output
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
@@ -2386,6 +2419,10 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 
+ifeq ($(AB_OTA_UPDATER),true)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BRILLO_UPDATE_PAYLOAD)
+endif
+
 $(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
 		build/tools/releasetools/ota_from_target_files
 	@echo "Package OTA: $@"
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 92e69bb..c327d2c 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -248,6 +248,8 @@
 ###########################################################
 include $(BUILD_SYSTEM)/configure_module_stem.mk
 
+LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem)
+
 # OVERRIDE_BUILT_MODULE_PATH is only allowed to be used by the
 # internal SHARED_LIBRARIES build files.
 OVERRIDE_BUILT_MODULE_PATH := $(strip $(OVERRIDE_BUILT_MODULE_PATH))
@@ -255,11 +257,8 @@
   ifneq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
     $(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
   endif
-  built_module_path := $(OVERRIDE_BUILT_MODULE_PATH)
-else
-  built_module_path := $(intermediates)
+  $(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE),$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)))
 endif
-LOCAL_BUILT_MODULE := $(built_module_path)/$(my_built_module_stem)
 
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
   # Apk and its attachments reside in its own subdir.
@@ -295,6 +294,11 @@
 .KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
 # Build .toc file when using mm, mma, or make $(my_register_name)
 $(my_all_targets): $(LOCAL_BUILT_MODULE).toc
+
+ifdef OVERRIDE_BUILT_MODULE_PATH
+$(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE).toc,$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc))
+$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc: $(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)
+endif
 endif
 
 ###########################################################
@@ -431,9 +435,27 @@
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
 
+# If we are building a native test or benchmark and its stem variants are not defined,
+# separate the multiple architectures into subdirectories of the testcase folder.
+arch_dir :=
+is_native :=
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
+  is_native := true
+endif
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
+  is_native := true
+endif
+ifdef LOCAL_MULTILIB
+  is_native := true
+endif
+ifdef is_native
+  arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+  is_native :=
+endif
+
 # The module itself.
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
-  $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+  $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
     $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem))))
 
 # Make sure we only add the files once for multilib modules.
@@ -461,6 +483,13 @@
   $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
     $(LOCAL_PATH)/DynamicConfig.xml:$(dir)/$(LOCAL_MODULE).dynamic)))
 endif
+
+ifneq (,$(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config))
+$(foreach extra_config, $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config), \
+  $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+    $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+      $(extra_config):$(dir)/$(notdir $(extra_config))))))
+endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
 $(call create-suite-dependencies)
diff --git a/core/binary.mk b/core/binary.mk
index 589c462..625d348 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -309,7 +309,7 @@
 ifneq ($(LOCAL_NO_PIC),true)
 ifneq ($($(my_prefix)OS),windows)
 ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-my_cflags += -fpie
+my_cflags += -fPIE
 else
 my_cflags += -fPIC
 endif
@@ -515,37 +515,6 @@
 my_asflags += -D__ASSEMBLY__
 
 ###########################################################
-## When compiling against the VNDK, use LL-NDK libraries
-###########################################################
-ifneq ($(LOCAL_USE_VNDK),)
-  ####################################################
-  ## Soong modules may be built twice, once for /system
-  ## and once for /vendor. If we're using the VNDK,
-  ## switch all soong libraries over to the /vendor
-  ## variant.
-  ####################################################
-  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    # Soong-built libraries should always use the .vendor variant
-    my_whole_static_libraries := $(addsuffix .vendor,$(my_whole_static_libraries))
-    my_static_libraries := $(addsuffix .vendor,$(my_static_libraries))
-    my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
-    my_system_shared_libraries := $(addsuffix .vendor,$(my_system_shared_libraries))
-    my_header_libraries := $(addsuffix .vendor,$(my_header_libraries))
-  else
-    my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
-      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
-    my_static_libraries := $(foreach l,$(my_static_libraries),\
-      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
-    my_shared_libraries := $(foreach l,$(my_shared_libraries),\
-      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
-    my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
-      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
-    my_header_libraries := $(foreach l,$(my_header_libraries),\
-      $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
-  endif
-endif
-
-###########################################################
 ## Define PRIVATE_ variables from global vars
 ###########################################################
 ifndef LOCAL_IS_HOST_MODULE
@@ -1343,6 +1312,36 @@
 asm_objects += $(asm_objects_asm)
 endif
 
+###########################################################
+## When compiling against the VNDK, use LL-NDK libraries
+###########################################################
+ifneq ($(LOCAL_USE_VNDK),)
+  ####################################################
+  ## Soong modules may be built twice, once for /system
+  ## and once for /vendor. If we're using the VNDK,
+  ## switch all soong libraries over to the /vendor
+  ## variant.
+  ####################################################
+  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    # Soong-built libraries should always use the .vendor variant
+    my_whole_static_libraries := $(addsuffix .vendor,$(my_whole_static_libraries))
+    my_static_libraries := $(addsuffix .vendor,$(my_static_libraries))
+    my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
+    my_system_shared_libraries := $(addsuffix .vendor,$(my_system_shared_libraries))
+    my_header_libraries := $(addsuffix .vendor,$(my_header_libraries))
+  else
+    my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
+      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_static_libraries := $(foreach l,$(my_static_libraries),\
+      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
+      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_header_libraries := $(foreach l,$(my_header_libraries),\
+      $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
+  endif
+endif
 
 ##########################################################
 ## Set up installed module dependency
@@ -1396,39 +1395,28 @@
 ## other NDK-built libraries
 ####################################################
 
-my_link_type := $(intermediates)/link_type
-all_link_types: $(my_link_type)
 ifdef LOCAL_SDK_VERSION
-$(my_link_type): PRIVATE_LINK_TYPE := native:ndk
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := native:ndk
+my_warn_types :=
+my_allowed_types := native:ndk
 else ifdef LOCAL_USE_VNDK
-$(my_link_type): PRIVATE_LINK_TYPE := native:vendor
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:vendor
+my_link_type := native:vendor
+my_warn_types :=
+my_allowed_types := native:vendor
 else
-$(my_link_type): PRIVATE_LINK_TYPE := native:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := native:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-my_link_type_deps := $(strip \
-   $(foreach l,$(my_whole_static_libraries) $(my_static_libraries), \
-     $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-ifneq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
-ifneq ($(LOCAL_MODULE_CLASS),HEADER_LIBRARIES)
-my_link_type_deps += $(strip \
-   $(foreach l,$(my_shared_libraries), \
-     $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-endif
-endif
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check module type: $@
-	$(check-link-type)
 
+my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
+ifneq ($(filter-out STATIC_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+my_link_deps += $(addprefix SHARED_LIBRARIES:,$(my_shared_libraries))
+endif
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 
 ###########################################################
 ## Common object handling.
@@ -1696,6 +1684,12 @@
     ifeq ($(my_tidy_flags),)
       my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
     endif
+
+    # We might be using the static analyzer through clang-tidy.
+    # https://bugs.llvm.org/show_bug.cgi?id=32914
+    ifneq ($(my_tidy_checks),)
+      my_tidy_flags += "-extra-arg-before=-D__clang_analyzer__"
+    endif
   endif
 endif
 
@@ -1815,7 +1809,7 @@
 .KATI_RESTAT: $(export_includes)
 
 # Make sure export_includes gets generated when you are running mm/mmm
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+$(LOCAL_BUILT_MODULE) : | $(export_includes)
 
 ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
 ifneq (,$(filter-out $(LOCAL_PATH)/%,$(my_export_c_include_dirs)))
@@ -1826,10 +1820,11 @@
     $(SOONG_CONV.$(LOCAL_MODULE).PROBLEMS) $(my_soong_problems)
 SOONG_CONV.$(LOCAL_MODULE).DEPS := \
     $(SOONG_CONV.$(LOCAL_MODULE).DEPS) \
-    $(my_static_libraries) \
-    $(my_whole_static_libraries) \
-    $(my_shared_libraries) \
-    $(my_system_shared_libraries)
+    $(filter-out $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_RUNTIME_LIBRARY),\
+        $(my_static_libraries) \
+        $(my_whole_static_libraries) \
+        $(my_shared_libraries) \
+        $(my_system_shared_libraries))
 SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
 endif
 
diff --git a/core/build-system.html b/core/build-system.html
index e72e141..c7938cc 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -592,6 +592,17 @@
 </ul>
 </p>
 
+<h4>LOCAL_ANNOTATION_PROCESSORS</h4>
+<p>Set this to a list of modules built with <code>BUILD_HOST_JAVA_LIBRARY</code>
+to have their jars passed to javac with -processorpath for use as annotation
+processors.</p>
+
+<h4>LOCAL_ANNOTATION_PROCESSOR_CLASSES</h4>
+<p>Set this to a list of classes to be passed to javac as -processor arguments.
+This list is would be unnecessary, as javac will autodetect annotation processor
+classes, except that the Grok tool that is used on the Android source code
+does not autodetect them and requires listing them manually.</p>
+
 <h4>LOCAL_ASSET_FILES</h4>
 <p>In Android.mk files that <code>include $(BUILD_PACKAGE)</code> set this
 to the set of files you want built into your app.  Usually:</p>
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index fa89758..b7109f6 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -14,7 +14,7 @@
 #
 
 # Don't bother with the cleanspecs if you are running mm/mmm
-ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother),)
+ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother)$(NO_ANDROID_CLEANSPEC),)
 
 INTERNAL_CLEAN_STEPS :=
 
@@ -142,53 +142,7 @@
 INTERNAL_CLEAN_STEPS :=
 INTERNAL_CLEAN_BUILD_VERSION :=
 
-endif  # if not ONE_SHOT_MAKEFILE dont_bother
-
-# Since products and build variants (unfortunately) share the same
-# PRODUCT_OUT staging directory, things can get out of sync if different
-# build configurations are built in the same tree.  The following logic
-# will notice when the configuration has changed and remove the files
-# necessary to keep things consistent.
-
-previous_build_config_file := $(PRODUCT_OUT)/previous_build_config.mk
-current_build_config_file := $(PRODUCT_OUT)/current_build_config.mk
-
-current_build_config := \
-    $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-force_installclean := false
-
-# Read the current state from the file, if present.
-# Will set PREVIOUS_BUILD_CONFIG.
-#
-PREVIOUS_BUILD_CONFIG :=
--include $(previous_build_config_file)
-PREVIOUS_BUILD_CONFIG := $(strip $(PREVIOUS_BUILD_CONFIG))
-
-ifdef PREVIOUS_BUILD_CONFIG
-  ifneq ($(current_build_config),$(PREVIOUS_BUILD_CONFIG))
-    $(info *** Build configuration changed: "$(PREVIOUS_BUILD_CONFIG)" -> "$(current_build_config)")
-    ifneq ($(DISABLE_AUTO_INSTALLCLEAN),true)
-      force_installclean := true
-    else
-      $(info DISABLE_AUTO_INSTALLCLEAN is set; skipping auto-clean. Your tree may be in an inconsistent state.)
-    endif
-  endif
-endif  # else, this is the first build, so no need to clean.
-
-# Write the new state to the file.
-#
-$(shell \
-  mkdir -p $(dir $(current_build_config_file)) && \
-  echo "PREVIOUS_BUILD_CONFIG := $(current_build_config)" > \
-      $(current_build_config_file) \
- )
-$(shell cmp $(current_build_config_file) $(previous_build_config_file) > /dev/null 2>&1 || \
-  mv -f $(current_build_config_file) $(previous_build_config_file))
-
-PREVIOUS_BUILD_CONFIG :=
-previous_build_config_file :=
-current_build_config_file :=
-current_build_config :=
+endif  # if not ONE_SHOT_MAKEFILE dont_bother NO_ANDROID_CLEANSPEC
 
 #
 # installclean logic
@@ -272,14 +226,6 @@
 	$(hide) rm -rf $(FILES)
 	@echo "Deleted images and staging directories."
 
-ifeq ($(force_installclean),true)
-  $(info *** Forcing "make installclean"...)
-  $(info *** rm -rf $(dataclean_files) $(installclean_files))
-  $(shell rm -rf $(dataclean_files) $(installclean_files))
-  $(info *** Done with the cleaning, now starting the real build.)
-endif
-force_installclean :=
-
 ###########################################################
 
 .PHONY: clean-jack-files
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 91243c7..93da54c 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -12,6 +12,8 @@
 LOCAL_ADDITIONAL_JAVA_DIR:=
 LOCAL_AIDL_INCLUDES:=
 LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
+LOCAL_ANNOTATION_PROCESSORS:=
+LOCAL_ANNOTATION_PROCESSOR_CLASSES:=
 LOCAL_APK_LIBRARIES:=
 LOCAL_ARM_MODE:=
 LOCAL_ASFLAGS:=
diff --git a/core/config.mk b/core/config.mk
index cd20354..34f2c44 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -492,13 +492,24 @@
 BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
 DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
 
+#TODO: use a smaller -Xmx value for most libraries;
+#      only core.jar and framework.jar need a heap this big.
+ifndef DX_ALT_JAR
 DX := $(HOST_OUT_EXECUTABLES)/dx
+DX_COMMAND := $(DX) -JXms16M -JXmx2048M
+else
+DX := $(DX_ALT_JAR)
+DX_COMMAND := java -Xms16M -Xmx2048M -jar $(DX)
+endif
+
 MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
 
 SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
 ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
 FILESLIST := $(SOONG_HOST_OUT_EXECUTABLES)/fileslist
 
+SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
+
 # Always use prebuilts for ckati and makeparallel
 prebuilt_build_tools := prebuilts/build-tools
 ifeq ($(filter address,$(SANITIZE_HOST)),)
@@ -634,6 +645,7 @@
 FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
 VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh
 FEC := $(HOST_OUT_EXECUTABLES)/fec
+BRILLO_UPDATE_PAYLOAD := $(HOST_OUT_EXECUTABLES)/brillo_update_payload
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -851,7 +863,7 @@
 
 # These goals don't need to collect and include Android.mks/CleanSpec.mks
 # in the source tree.
-dont_bother_goals := clean clobber dataclean installclean \
+dont_bother_goals := dataclean installclean \
     help out \
     snod systemimage-nodeps \
     stnod systemtarball-nodeps \
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 729ef48..04aedf4 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -63,7 +63,7 @@
 endif
 
 # If CFI is disabled globally, remove it from my_sanitize.
-ifeq ($(strip $(ENABLE_CFI)),)
+ifeq ($(strip $(ENABLE_CFI)),false)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
@@ -74,6 +74,12 @@
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
+# Also disable CFI if ASAN is enabled.
+ifneq ($(filter address,$(my_sanitize)),)
+  my_sanitize := $(filter-out cfi,$(my_sanitize))
+  my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
+endif
+
 # CFI needs gold linker, and mips toolchain does not have one.
 ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/definitions.mk b/core/definitions.mk
index 4bc1a08..5c39eca 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1017,12 +1017,15 @@
 $(hide) echo >> $2
 endef
 
+# b/37755219
+RS_CC_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0:detect_container_overflow=0
+
 define transform-renderscripts-to-java-and-bc
 @echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
 $(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/res/raw
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/src
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
   -o $(PRIVATE_RS_OUTPUT_DIR)/res/raw \
   -p $(PRIVATE_RS_OUTPUT_DIR)/src \
   -d $(PRIVATE_RS_OUTPUT_DIR) \
@@ -1058,7 +1061,7 @@
 @echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
 $(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
   -o $(PRIVATE_RS_OUTPUT_DIR)/ \
   -d $(PRIVATE_RS_OUTPUT_DIR) \
   -a $@ -MD \
@@ -2006,6 +2009,9 @@
 APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)
 endif
 
+# b/37750224
+AAPT_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
 # TODO: Right now we generate the asset resources twice, first as part
 # of generating the Java classes, then at the end when packaging the final
 # assets.  This should be changed to do one of two things: (1) Don't generate
@@ -2020,7 +2026,7 @@
 define create-resource-java-files
 @mkdir -p $(PRIVATE_SOURCE_INTERMEDIATES_DIR)
 @mkdir -p $(dir $(PRIVATE_RESOURCE_PUBLICS_OUTPUT))
-$(hide) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
     $(eval # PRIVATE_PRODUCT_AAPT_CONFIG is intentionally missing-- see comment.) \
     $(addprefix -J , $(PRIVATE_SOURCE_INTERMEDIATES_DIR)) \
     $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2199,9 +2205,9 @@
 # $(2): bootclasspath
 define compile-java
 $(hide) rm -f $@
-$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
 $(hide) mkdir -p $(dir $@)
-$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
 $(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
 $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
@@ -2214,13 +2220,13 @@
 $(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
     | $(NORMALIZE_PATH) | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
 $(hide) if [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
-    $(1) -encoding UTF-8 \
+    $(SOONG_JAVAC_WRAPPER) $(1) -encoding UTF-8 \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     $(2) \
     $(addprefix -classpath ,$(strip \
         $(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
-    -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
+    -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) -s $(PRIVATE_ANNO_INTERMEDIATES_DIR) \
     $(PRIVATE_JAVACFLAGS) \
     \@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq \
     || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
@@ -2402,13 +2408,16 @@
 fi
 endef
 
+# b/37756495
+IJAR_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
 ## Rule to create a table of contents from a .jar file.
 ## Must be called with $(eval).
 # $(1): A .jar file
 define _transform-jar-to-toc
 $1.toc: $1 | $(IJAR)
 	@echo Generating TOC: $$@
-	$(hide) $(IJAR) $$< $$@.tmp
+	$(hide) $(IJAR_ASAN_OPTIONS) $(IJAR) $$< $$@.tmp
 	$$(call commit-change-for-toc,$$@)
 endef
 
@@ -2545,14 +2554,11 @@
 endef
 
 
-#TODO: use a smaller -Xmx value for most libraries;
-#      only core.jar and framework.jar need a heap this big.
 define transform-classes.jar-to-dex
 @echo "target Dex: $(PRIVATE_MODULE)"
 @mkdir -p $(dir $@)
 $(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX) \
-    -JXms16M -JXmx2048M \
+$(hide) $(DX_COMMAND) \
     --dex --output=$(dir $@) \
     --min-sdk-version=$(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
     $(if $(NO_OPTIMIZE_DX), \
@@ -2605,7 +2611,7 @@
 #values; applications can override these by explicitly stating
 #them in their manifest.
 define add-assets-to-package
-$(hide) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
     $(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
     $(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
     $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -3150,44 +3156,6 @@
 endef
 
 ###########################################################
-# Link type checking
-###########################################################
-define check-link-type
-$(hide) mkdir -p $(dir $@) && rm -f $@
-$(hide) $(CHECK_LINK_TYPE) --makefile $(PRIVATE_MAKEFILE) --module $(PRIVATE_MODULE) \
-  --type "$(PRIVATE_LINK_TYPE)" $(addprefix --allowed ,$(PRIVATE_ALLOWED_TYPES)) \
-  $(addprefix --warn ,$(PRIVATE_WARN_TYPES)) $(PRIVATE_DEPS)
-$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-endef
-
-define link-type-partitions
-ifndef LOCAL_IS_HOST_MODULE
-ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
-ifneq ($(filter $(TARGET_OUT_VENDOR)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:vendor
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_OEM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:oem
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:odm
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_DATA)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:data partition:vendor partition:oem partition:odm
-else
-$(1): PRIVATE_WARN_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-else # uninstallable module
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-endif
-endef
-
-###########################################################
 # Basic math functions for positive integers <= 100
 #
 # (SDK versions for example)
@@ -3253,11 +3221,12 @@
 ## Compatibility suite tools
 ###########################################################
 
-# Return a list of output directories for a given suite and the current LOCAL_MODULE
+# Return a list of output directories for a given suite and the current LOCAL_MODULE.
+# Can be passed a subdirectory to use for the common testcase directory.
 define compatibility_suite_dirs
   $(strip \
     $(COMPATIBILITY_TESTCASES_OUT_$(1)) \
-    $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE))
+    $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)$(2))
 endef
 
 # For each suite:
@@ -3274,6 +3243,141 @@
 endef
 
 ###########################################################
+## Path Cleaning
+###########################################################
+
+# Remove "dir .." combinations (but keep ".. ..")
+#
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-dotdot
+$(strip \
+  $(if $(word 2,$(1)),
+    $(if $(call streq,$(word 2,$(1)),..),
+      $(if $(call streq,$(word 1,$(1)),..),
+        $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+      ,
+        $(call _clean-path-strip-dotdot,$(wordlist 3,$(words $(1)),$(1)))
+      )
+    ,
+      $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+    )
+  ,
+    $(1)
+  )
+)
+endef
+
+# Remove any leading .. from the path (in case of /..)
+#
+# Should only be called if the original path started with /
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-root-dotdots
+$(strip $(if $(call streq,$(firstword $(1)),..),
+  $(call _clean-path-strip-root-dotdots,$(wordlist 2,$(words $(1)),$(1))),
+  $(1)))
+endef
+
+# Call _clean-path-strip-dotdot until the path stops changing
+# $(1): Non-empty if this path started with a /
+# $(2): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-expanded
+$(strip \
+  $(eval _ep := $(call _clean-path-strip-dotdot,$(2)))
+  $(if $(1),$(eval _ep := $(call _clean-path-strip-root-dotdots,$(_ep))))
+  $(if $(call streq,$(2),$(_ep)),
+    $(_ep),
+    $(call _clean-path-expanded,$(1),$(_ep))))
+endef
+
+# Clean the file path -- remove //, dir/.., extra .
+#
+# This should be the same semantics as golang's filepath.Clean
+#
+# $(1): The file path to clean
+define clean-path
+$(strip \
+  $(if $(call streq,$(words $(1)),1),
+    $(eval _rooted := $(filter /%,$(1)))
+    $(eval _expanded_path := $(filter-out .,$(subst /,$(space),$(1))))
+    $(eval _path := $(if $(_rooted),/)$(subst $(space),/,$(call _clean-path-expanded,$(_rooted),$(_expanded_path))))
+    $(if $(_path),
+      $(_path),
+      .
+     )
+  ,
+    $(if $(call streq,$(words $(1)),0),
+      .,
+      $(error Call clean-path with only one path (without spaces))
+    )
+  )
+)
+endef
+
+ifeq ($(TEST_MAKE_clean_path),true)
+  define my_test
+    $(if $(call streq,$(call clean-path,$(1)),$(2)),,
+      $(eval my_failed := true)
+      $(warning clean-path test '$(1)': expected '$(2)', got '$(call clean-path,$(1))'))
+  endef
+  my_failed :=
+
+  # Already clean
+  $(call my_test,abc,abc)
+  $(call my_test,abc/def,abc/def)
+  $(call my_test,a/b/c,a/b/c)
+  $(call my_test,.,.)
+  $(call my_test,..,..)
+  $(call my_test,../..,../..)
+  $(call my_test,../../abc,../../abc)
+  $(call my_test,/abc,/abc)
+  $(call my_test,/,/)
+
+  # Empty is current dir
+  $(call my_test,,.)
+
+  # Remove trailing slash
+  $(call my_test,abc/,abc)
+  $(call my_test,abc/def/,abc/def)
+  $(call my_test,a/b/c/,a/b/c)
+  $(call my_test,./,.)
+  $(call my_test,../,..)
+  $(call my_test,../../,../..)
+  $(call my_test,/abc/,/abc)
+
+  # Remove doubled slash
+  $(call my_test,abc//def//ghi,abc/def/ghi)
+  $(call my_test,//abc,/abc)
+  $(call my_test,///abc,/abc)
+  $(call my_test,//abc//,/abc)
+  $(call my_test,abc//,abc)
+
+  # Remove . elements
+  $(call my_test,abc/./def,abc/def)
+  $(call my_test,/./abc/def,/abc/def)
+  $(call my_test,abc/.,abc)
+
+  # Remove .. elements
+  $(call my_test,abc/def/ghi/../jkl,abc/def/jkl)
+  $(call my_test,abc/def/../ghi/../jkl,abc/jkl)
+  $(call my_test,abc/def/..,abc)
+  $(call my_test,abc/def/../..,.)
+  $(call my_test,/abc/def/../..,/)
+  $(call my_test,abc/def/../../..,..)
+  $(call my_test,/abc/def/../../..,/)
+  $(call my_test,abc/def/../../../ghi/jkl/../../../mno,../../mno)
+  $(call my_test,/../abc,/abc)
+
+  # Combinations
+  $(call my_test,abc/./../def,def)
+  $(call my_test,abc//./../def,def)
+  $(call my_test,abc/../../././../def,../../def)
+
+  ifdef my_failed
+    $(error failed clean-path test)
+  endif
+endif
+
+###########################################################
 ## Other includes
 ###########################################################
 
@@ -3352,4 +3456,4 @@
   $(eval include $(BUILD_SYSTEM)/generate_enforce_rro.mk) \
   $(eval ALL_MODULES.$(enforce_rro_source_module).REQUIRED += $(enforce_rro_module)) \
 )
-endef
\ No newline at end of file
+endef
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 67ac751..a734cc7 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -79,6 +79,20 @@
 CORRECT_BUILD_ENV_SEQUENCE_NUMBER := 13
 
 # ---------------------------------------------------------------
+# Whether we can expect a full build graph
+ALLOW_MISSING_DEPENDENCIES := $(filter true,$(ALLOW_MISSING_DEPENDENCIES))
+ifneq ($(TARGET_BUILD_APPS),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(ONE_SHOT_MAKEFILE),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+.KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
+
+# ---------------------------------------------------------------
 # The product defaults to generic on hardware
 # NOTE: This will be overridden in product_config.mk if make
 # was invoked with a PRODUCT-xxx-yyy goal.
@@ -173,6 +187,12 @@
 TARGET_COPY_OUT_ODM := odm
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
+
+# Returns the non-sanitized version of the path provided in $1.
+define get_non_asan_path
+$(patsubst $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/%,$(PRODUCT_OUT)/%,$1)
+endef
+
 ###########################################
 # Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
 # we don't know if the device wants to build a separate vendor.img
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index f6d6e9a..4d41871 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -8,8 +8,8 @@
     $(error $(LOCAL_PATH): $(LOCAL_MODULE): NDK fuzz tests are not supported.)
 endif
 
-LOCAL_CFLAGS += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
-LOCAL_STATIC_LIBRARIES += libLLVMFuzzer
+LOCAL_CFLAGS += -fsanitize-coverage=trace-pc-guard
+LOCAL_STATIC_LIBRARIES += libFuzzer
 
 ifdef LOCAL_MODULE_PATH
 $(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH when building test $(LOCAL_MODULE))
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 34e88ce..7101229 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -86,7 +86,7 @@
 ifndef LOCAL_JACK_ENABLED
 
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -96,7 +96,10 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(annotation_processor_deps) \
+        $(NORMALIZE_PATH) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-package)
 
 my_desugaring :=
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index f1da553..d30c90d 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -64,7 +64,7 @@
 endif
 
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -74,9 +74,13 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
+        $(annotation_processor_deps) \
         $(NORMALIZE_PATH) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(ZIPTIME) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-package)
+	$(remove-timestamps-from-package)
 
 javac-check : $(full_classes_compiled_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index c5804a4..0e92153 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -54,7 +54,8 @@
 # The jni libaries will be installed to the system.img.
 my_jni_filenames := $(notdir $(my_jni_shared_libraries))
 # Make sure the JNI libraries get installed
-my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
+my_shared_library_path := $(call get_non_asan_path,\
+  $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
 # Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
 $(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
 
@@ -108,30 +109,18 @@
 
 # Verify that all included libraries are built against the NDK
 ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
-my_link_type := $(call intermediates-dir-for,APPS,$(LOCAL_MODULE))/$(my_2nd_arch_prefix)jni_link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
-  $(foreach l,$(LOCAL_JNI_SHARED_LIBRARIES),\
-    $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),,,$(my_2nd_arch_prefix))/link_type))
 ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := app:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := native:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := app:sdk
+my_warn_types := native:platform
+my_allowed_types := native:ndk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := app:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := app:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check JNI module types: $@
-	$(check-link-type)
 
-$(LOCAL_BUILT_MODULE): | $(my_link_type)
+my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
 
-my_link_type :=
-my_link_type_deps :=
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 endif
diff --git a/core/jack-default.args b/core/jack-default.args
index 0232301..433bc53 100644
--- a/core/jack-default.args
+++ b/core/jack-default.args
@@ -5,3 +5,5 @@
 -D jack.reporter.level.file=error=--,warning=-
 --verbose error
 -D jack.jayce.cache=false
+-D jack.lambda.grouping-scope=package
+-D jack.lambda.simplify-stateless=true
diff --git a/core/java.mk b/core/java.mk
index b31e316..95be4f9 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -428,7 +428,7 @@
 LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
 endif
 
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
@@ -441,8 +441,10 @@
         $(layers_file) \
         $(RenderScript_file_stamp) \
         $(proto_java_sources_file_stamp) \
+        $(annotation_processor_deps) \
         $(NORMALIZE_PATH) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-java-to-classes.jar)
 
 javac-check : $(full_classes_compiled_jar)
@@ -611,7 +613,7 @@
 endif
 
 # If not using jack and building against the current SDK version then filter
-# out junit and android.test classes from the application that are to be
+# out the junit, android.test and c.a.i.u.Predicate classes that are to be
 # removed from the Android API as part of b/30188076 but which are still
 # present in the Android API. This is to allow changes to be made to the
 # build to statically include those classes into the application without
@@ -620,7 +622,7 @@
 ifndef LOCAL_JACK_ENABLED
 ifdef LOCAL_SDK_VERSION
 ifeq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**)
+proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**,!com/android/internal/util/*)
 endif
 endif
 endif
@@ -736,13 +738,14 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS :=
 endif # LOCAL_PROGUARD_ENABLED defined
 
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS) $(annotation_processor_flags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
 
 jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
         $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
         $(common_proguard_flag_files) $(proguard_flag_files) \
-        $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
+        $(proto_java_sources_file_stamp) $(annotation_processor_deps) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
         $(NORMALIZE_PATH) $(JACK_DEFAULT_ARGS) $(JACK)
 
 $(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
diff --git a/core/java_common.mk b/core/java_common.mk
index 03856ac..555712c 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -153,6 +153,20 @@
 need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
 ifdef need_compile_java
 
+annotation_processor_flags :=
+annotation_processor_deps :=
+
+ifdef LOCAL_ANNOTATION_PROCESSORS
+  annotation_processor_jars := $(call java-lib-deps,$(LOCAL_ANNOTATION_PROCESSORS),true)
+  annotation_processor_flags += -processorpath $(call normalize-path-list,$(annotation_processor_jars))
+  annotation_processor_deps += $(annotation_processor_jars)
+
+  # b/25860419: annotation processors must be explicitly specified for grok
+  annotation_processor_flags += $(foreach class,$(LOCAL_ANNOTATION_PROCESSOR_CLASSES),-processor $(class))
+
+  annotation_processor_jars :=
+endif
+
 full_static_java_libs := \
     $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
       $(call intermediates-dir-for, \
@@ -164,6 +178,7 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANNO_INTERMEDIATES_DIR := $(intermediates.COMMON)/anno
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_PROTO_SOURCES := $(if $(proto_sources),true)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/proto
@@ -386,35 +401,24 @@
 # Verify that all libraries are safe to use
 ###########################################################
 ifndef LOCAL_IS_HOST_MODULE
-my_link_type := $(intermediates.COMMON)/link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
-  $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES),\
-    $(call intermediates-dir-for, \
-      JAVA_LIBRARIES,$(lib),,COMMON)/link_type) \
-  $(foreach lib,$(apk_libraries), \
-    $(call intermediates-dir-for, \
-      APPS,$(lib),,COMMON)/link_type))
 ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
-$(my_link_type): PRIVATE_WARN_TYPES := java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system
+my_link_type := java:system
+my_warn_types := java:platform
+my_allowed_types := java:sdk java:system
 else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := java:system java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk
+my_link_type := java:sdk
+my_warn_types := java:system java:platform
+my_allowed_types := java:sdk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system java:platform
+my_link_type := java:platform
+my_warn_types :=
+my_allowed_types := java:sdk java:system java:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check Java library module types: $@
-	$(check-link-type)
 
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+my_link_deps := $(addprefix JAVA_LIBRARIES:,$(LOCAL_STATIC_JAVA_LIBRARIES))
+my_link_deps += $(addprefix APPS:,$(apk_libraries))
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
 endif  # !LOCAL_IS_HOST_MODULE
diff --git a/core/link_type.mk b/core/link_type.mk
new file mode 100644
index 0000000..ff525cb
--- /dev/null
+++ b/core/link_type.mk
@@ -0,0 +1,27 @@
+# Inputs:
+#   LOCAL_MODULE_CLASS, LOCAL_MODULE, LOCAL_MODULE_MAKEFILE, LOCAL_BUILT_MODULE
+#   from base_rules.mk: my_kind, my_host_cross
+#   my_common: empty or COMMON, like the argument to intermediates-dir-for
+#   my_2nd_arch_prefix: usually LOCAL_2ND_ARCH_VAR_PREFIX, separate for JNI installation
+#
+#   my_link_type: the tags to apply to this module
+#   my_warn_types: the tags to warn about in our dependencies
+#   my_allowed_types: the tags to allow in our dependencies
+#   my_link_deps: the dependencies, in the form of <MODULE_CLASS>:<name>
+#
+
+my_link_prefix := LINK_TYPE:$(call find-idf-prefix,$(my_kind),$(my_host_cross)):$(if $(my_common),$(my_common):_,_:$(if $(my_2nd_arch_prefix),$(my_2nd_arch_prefix),_))
+link_type := $(my_link_prefix):$(LOCAL_MODULE_CLASS):$(LOCAL_MODULE)
+ALL_LINK_TYPES := $(ALL_LINK_TYPES) $(link_type)
+$(link_type).TYPE := $(my_link_type)
+$(link_type).MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
+$(link_type).WARN := $(my_warn_types)
+$(link_type).ALLOWED := $(my_allowed_types)
+$(link_type).DEPS := $(addprefix $(my_link_prefix):,$(my_link_deps))
+$(link_type).BUILT := $(LOCAL_BUILT_MODULE)
+
+link_type :=
+my_allowed_types :=
+my_link_prefix :=
+my_link_type :=
+my_warn_types :=
diff --git a/core/main.mk b/core/main.mk
index 54ab832..3bd95bc 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -88,146 +88,6 @@
 # Include the google-specific config
 -include vendor/google/build/config.mk
 
-VERSION_CHECK_SEQUENCE_NUMBER := 6
-JAVA_NOT_REQUIRED_CHECKED :=
--include $(OUT_DIR)/versions_checked.mk
-ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER)$(JAVA_NOT_REQUIRED),$(VERSIONS_CHECKED)$(JAVA_NOT_REQUIRED_CHECKED))
-
-$(info Checking build tools versions...)
-
-# check for a case sensitive file system
-ifneq (a,$(shell mkdir -p $(OUT_DIR) ; \
-                echo a > $(OUT_DIR)/casecheck.txt; \
-                    echo B > $(OUT_DIR)/CaseCheck.txt; \
-                cat $(OUT_DIR)/casecheck.txt))
-$(warning ************************************************************)
-$(warning You are building on a case-insensitive filesystem.)
-$(warning Please move your source tree to a case-sensitive filesystem.)
-$(warning ************************************************************)
-$(error Case-insensitive filesystems not supported)
-endif
-
-# Make sure that there are no spaces in the absolute path; the
-# build system can't deal with them.
-ifneq ($(words $(shell pwd)),1)
-$(warning ************************************************************)
-$(warning You are building in a directory whose absolute path contains)
-$(warning a space character:)
-$(warning $(space))
-$(warning "$(shell pwd)")
-$(warning $(space))
-$(warning Please move your source tree to a path that does not contain)
-$(warning any spaces.)
-$(warning ************************************************************)
-$(error Directory names containing spaces not supported)
-endif
-
-ifneq ($(JAVA_NOT_REQUIRED),true)
-java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
-javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
-
-# Check for the correct version of java, should be 1.8 by
-# default and only 1.7 if LEGACY_USE_JAVA7 is set.
-ifeq ($(LEGACY_USE_JAVA7),) # if LEGACY_USE_JAVA7 == ''
-required_version := "1.8.x"
-required_javac_version := "1.8"
-java_version := $(shell echo '$(java_version_str)' | grep '[ "]1\.8[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.8[\. "$$]')
-else
-required_version := "1.7.x"
-required_javac_version := "1.7"
-java_version := $(shell echo '$(java_version_str)' | grep '^java .*[ "]1\.7[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.7[\. "$$]')
-endif # if LEGACY_USE_JAVA7 == ''
-
-ifeq ($(strip $(java_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of java.)
-$(info $(space))
-$(info Your version is: $(java_version_str).)
-$(info The required version is: $(required_version))
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/initializing.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-# Check for the current JDK.
-#
-# For Java 1.7/1.8, we require OpenJDK on linux and Oracle JDK on Mac OS.
-requires_openjdk := false
-ifeq ($(BUILD_OS),linux)
-requires_openjdk := true
-endif
-
-
-# Check for the current jdk
-ifeq ($(requires_openjdk), true)
-# The user asked for openjdk, so check that the host
-# java version is really openjdk and not some other JDK.
-ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You asked for an OpenJDK based build but your version is)
-$(info $(java_version_str).)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not OpenJdk
-else # if requires_openjdk
-ifneq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info You use OpenJDK but only Sun/Oracle JDK is supported.)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not Sun Oracle JDK
-endif # if requires_openjdk
-
-KNOWN_INCOMPATIBLE_JAVAC_VERSIONS := google
-incompat_javac := $(foreach v,$(KNOWN_INCOMPATIBLE_JAVAC_VERSIONS),$(findstring $(v),$(javac_version_str)))
-ifneq ($(incompat_javac),)
-javac_version :=
-endif
-
-# Check for the correct version of javac
-ifeq ($(strip $(javac_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of javac.)
-$(info $(space))
-$(info Your version is: $(javac_version_str).)
-ifneq ($(incompat_javac),)
-$(info This '$(incompat_javac)' version is not supported for Android platform builds.)
-$(info Use a publicly available JDK and make sure you have run envsetup.sh / lunch.)
-else
-$(info The required version is: $(required_javac_version))
-endif
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-endif # if JAVA_NOT_REQUIRED
-
-ifndef BUILD_EMULATOR
-  # Emulator binaries are now provided under prebuilts/android-emulator/
-  BUILD_EMULATOR := false
-endif
-
-$(shell echo 'VERSIONS_CHECKED := $(VERSION_CHECK_SEQUENCE_NUMBER)' \
-        > $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'BUILD_EMULATOR ?= $(BUILD_EMULATOR)' \
-        >> $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'JAVA_NOT_REQUIRED_CHECKED := $(JAVA_NOT_REQUIRED)' \
-        >> $(OUT_DIR)/versions_checked.mk)
-endif
-
 # These are the modifier targets that don't do anything themselves, but
 # change the behavior of the build.
 # (must be defined before including definitions.make)
@@ -335,8 +195,8 @@
 
 # Boolean variable determining if Treble is fully enabled
 PRODUCT_FULL_TREBLE := false
-ifeq ($(PRODUCT_FULL_TREBLE_OVERRIDE),true)
-  PRODUCT_FULL_TREBLE := true
+ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
+  PRODUCT_FULL_TREBLE := $(PRODUCT_FULL_TREBLE_OVERRIDE)
 else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
   #$(warning no product shipping level defined)
 else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),26),)
@@ -459,8 +319,8 @@
 
 ADDITIONAL_BUILD_PROPERTIES += net.bt.name=Android
 
-# enable vm tracing in files for now to help track
-# the cause of ANRs in the content process
+# Sets the location that the runtime dumps stack traces to when signalled
+# with SIGQUIT. Stack trace dumping is turned on for all android builds.
 ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-file=/data/anr/traces.txt
 
 # ------------------------------------------------------------
@@ -732,6 +592,168 @@
 deps :=
 add-required-deps :=
 
+################################################################################
+# Link type checking
+#
+# ALL_LINK_TYPES contains a list of all link type prefixes (generally one per
+# module, but APKs can "link" to both java and native code). The link type
+# prefix consists of all the information needed by intermediates-dir-for:
+#
+#  LINK_TYPE:TARGET:_:2ND:STATIC_LIBRARIES:libfoo
+#
+#   1: LINK_TYPE literal
+#   2: prefix
+#     - TARGET
+#     - HOST
+#     - HOST_CROSS
+#     - AUX
+#   3: Whether to use the common intermediates directory or not
+#     - _
+#     - COMMON
+#   4: Whether it's the second arch or not
+#     - _
+#     - 2ND_
+#   5: Module Class
+#     - STATIC_LIBRARIES
+#     - SHARED_LIBRARIES
+#     - ...
+#   6: Module Name
+#
+# Then fields under that are separated by a period and the field name:
+#   - TYPE: the link types for this module
+#   - MAKEFILE: Where this module was defined
+#   - BUILT: The built module location
+#   - DEPS: the link type prefixes for the module's dependencies
+#   - ALLOWED: the link types to allow in this module's dependencies
+#   - WARN: the link types to warn about in this module's dependencies
+#
+# All of the dependency link types not listed in ALLOWED or WARN will become
+# errors.
+################################################################################
+
+link_type_error :=
+
+define link-type-prefix
+$(word 2,$(subst :,$(space),$(1)))
+endef
+define link-type-common
+$(patsubst _,,$(word 3,$(subst :,$(space),$(1))))
+endef
+define link-type-2ndarchprefix
+$(patsubst _,,$(word 4,$(subst :,$(space),$(1))))
+endef
+define link-type-class
+$(word 5,$(subst :,$(space),$(1)))
+endef
+define link-type-name
+$(word 6,$(subst :,$(space),$(1)))
+endef
+define link-type-os
+$(strip $(eval _p := $(link-type-prefix))\
+  $(if $(filter HOST HOST_CROSS,$(_p)),\
+    $($(_p)_OS),\
+    $(if $(filter AUX,$(_p)),AUX,android)))
+endef
+define link-type-arch
+$($(link-type-prefix)_$(link-type-2ndarchprefix)ARCH)
+endef
+define link-type-name-variant
+$(link-type-name) ($(link-type-class) $(link-type-os)-$(link-type-arch))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-warning
+$(shell $(call echo-warning,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) should not link against $(call link-type-name,$(2)) ($(3))"))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-error
+$(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) can not link against $(call link-type-name,$(2)) ($(3))"))\
+$(eval link_type_error := true)
+endef
+
+link-type-missing :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+  # Print an error message if the linked-to module is missing
+  # $(1): the prefix of the module doing the linking
+  # $(2): the prefix of the missing module
+  define link-type-missing
+    $(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name-variant,$(1)) missing $(call link-type-name-variant,$(2))"))\
+    $(eval available_variants := $(filter %:$(call link-type-name,$(2)),$(ALL_LINK_TYPES)))\
+    $(if $(available_variants),\
+      $(info Available variants:)\
+      $(foreach v,$(available_variants),$(info $(space)$(space)$(call link-type-name-variant,$(v)))))\
+    $(info You can set ALLOW_MISSING_DEPENDENCIES=true in your environment if this is intentional, but that may defer real problems until later in the build.)\
+    $(eval link_type_error := true)
+  endef
+else
+  define link-type-missing
+    $(eval $$(1).MISSING := true)
+  endef
+endif
+
+# Verify that $(1) can link against $(2)
+# Both $(1) and $(2) are the link type prefix defined above
+define verify-link-type
+$(foreach t,$($(2).TYPE),\
+  $(if $(filter-out $($(1).ALLOWED),$(t)),\
+    $(if $(filter $(t),$($(1).WARN)),\
+      $(call link-type-warning,$(1),$(2),$(t)),\
+      $(call link-type-error,$(1),$(2),$(t)))))
+endef
+
+# TODO: Verify all branches/configs have reasonable warnings/errors, and remove
+# these overrides
+link-type-missing = $(eval $$(1).MISSING := true)
+verify-link-type = $(eval $$(1).MISSING := true)
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+  $(foreach d,$($(lt).DEPS),\
+    $(if $($(d).TYPE),\
+      $(call verify-link-type,$(lt),$(d)),\
+      $(call link-type-missing,$(lt),$(d)))))
+
+ifdef link_type_error
+  $(error exiting from previous errors)
+endif
+
+# The intermediate filename for link type rules
+#
+# APPS are special -- they have up to three different rules:
+#  1. The COMMON rule for Java libraries
+#  2. The jni_link_type rule for embedded native code
+#  3. The 2ND_jni_link_type for the second architecture native code
+define link-type-file
+$(call intermediates-dir-for,$(link-type-class),$(link-type-name),$(filter AUX HOST HOST_CROSS,$(link-type-prefix)),$(link-type-common),$(link-type-2ndarchprefix),$(filter HOST_CROSS,$(link-type-prefix)))/$(if $(filter APPS,$(link-type-class)),$(if $(link-type-common),,$(link-type-2ndarchprefix)jni_))link_type
+endef
+
+# Write out the file-based link_type rules for the ALLOW_MISSING_DEPENDENCIES
+# case. We always need to write the file for mm to work, but only need to
+# check it if we weren't able to check it when reading the Android.mk files.
+define link-type-file-rule
+my_link_type_deps := $(foreach l,$($(1).DEPS),$(call link-type-file,$(l)))
+my_link_type_file := $(call link-type-file,$(1))
+$($(1).BUILT): | $$(my_link_type_file)
+$$(my_link_type_file): PRIVATE_DEPS := $$(my_link_type_deps)
+ifeq ($($(1).MISSING),true)
+$$(my_link_type_file): $(CHECK_LINK_TYPE)
+endif
+$$(my_link_type_file): $$(my_link_type_deps)
+	@echo Check module type: $$@
+	$$(hide) mkdir -p $$(dir $$@) && rm -f $$@
+ifeq ($($(1).MISSING),true)
+	$$(hide) $(CHECK_LINK_TYPE) --makefile $($(1).MAKEFILE) --module $(link-type-name) \
+	  --type "$($(1).TYPE)" $(addprefix --allowed ,$($(1).ALLOWED)) \
+	  $(addprefix --warn ,$($(1).WARN)) $$(PRIVATE_DEPS)
+endif
+	$$(hide) echo "$($(1).TYPE)" >$$@
+endef
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+  $(eval $(call link-type-file-rule,$(lt))))
+
 # -------------------------------------------------------------------
 # Figure out our module sets.
 #
@@ -1114,14 +1136,6 @@
 .PHONY: findbugs
 findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
 
-.PHONY: clean
-clean:
-	@rm -rf $(OUT_DIR)/*
-	@echo "Entire build directory removed."
-
-.PHONY: clobber
-clobber: clean
-
 # The rules for dataclean and installclean are defined in cleanbuild.mk.
 
 #xxx scrape this from ALL_MODULE_NAME_TAGS
@@ -1146,7 +1160,4 @@
 ndk: $(SOONG_OUT_DIR)/ndk.timestamp
 .PHONY: ndk
 
-.PHONY: all_link_types
-all_link_types:
-
 endif # KATI
diff --git a/core/package.mk b/core/package.mk
index 4fe058d..f3713fc 100644
--- a/core/package.mk
+++ b/core/package.mk
@@ -4,13 +4,15 @@
 
 $(call record-module-type,PACKAGE)
 
-ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
-LOCAL_MULTILIB := first
-endif
-
 my_prefix := TARGET_
 include $(BUILD_SYSTEM)/multilib.mk
 
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+  ifneq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(my_module_multilib),|64)
+    my_module_multilib := first
+  endif
+endif
+
 ifeq ($(TARGET_SUPPORTS_32_BIT_APPS)|$(TARGET_SUPPORTS_64_BIT_APPS),true|true)
   # packages default to building for either architecture,
   # the preferred if its supported, otherwise the non-preferred.
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 242203b..4003aaf 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -88,6 +88,7 @@
   LOCAL_RESOURCE_DIR := $(LOCAL_PATH)/res
 else
   need_compile_res := true
+  LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
 endif
 
 package_resource_overlays := $(strip \
@@ -374,7 +375,7 @@
 ifdef LOCAL_PACKAGE_SPLITS
 my_apk_split_configs := $(LOCAL_PACKAGE_SPLITS)
 my_split_suffixes := $(subst $(comma),_,$(my_apk_split_configs))
-built_apk_splits := $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk)
+built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
 installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
 endif
 
@@ -654,7 +655,7 @@
 # That way the build system will rerun the aapt after the user changes the splitting parameters.
 $(built_apk_splits): PRIVATE_PRIVATE_KEY := $(private_key)
 $(built_apk_splits): PRIVATE_CERTIFICATE := $(certificate)
-$(built_apk_splits) : $(built_module_path)/%.apk : $(LOCAL_BUILT_MODULE)
+$(built_apk_splits) : $(intermediates)/%.apk : $(LOCAL_BUILT_MODULE)
 	$(hide) if [ ! -f $@ ]; then \
 	  echo 'No $@ generated, check your apk splitting parameters.' 1>&2; \
 	  rm $<; exit 1; \
@@ -662,14 +663,14 @@
 	$(sign-package)
 
 # Rules to install the splits
-$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(intermediates)/package_%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
 # Register the additional built and installed files.
 ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
-  $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+  $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
 
 # Make sure to install the splits when you run "make <module_name>".
 $(my_all_targets): $(installed_apk_splits)
@@ -679,7 +680,7 @@
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
     $(foreach s,$(my_split_suffixes),\
-      $(built_module_path)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+      $(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
 
 $(call create-suite-dependencies)
 
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 8a5470e..5c9d822 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -148,21 +148,20 @@
 endif
 export_cflags :=
 
-my_link_type := $(intermediates)/link_type
 ifdef LOCAL_SDK_VERSION
-$(my_link_type): PRIVATE_LINK_TYPE := native:ndk
+my_link_type := native:ndk
 else ifdef LOCAL_USE_VNDK
-$(my_link_type): PRIVATE_LINK_TYPE := native:vendor
+my_link_type := native:vendor
 else
-$(my_link_type): PRIVATE_LINK_TYPE := native:platform
+my_link_type := native:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
-	@echo Check module type: $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-	$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
 
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 endif  # prebuilt_module_is_a_library
 
 # The real dependency will be added after all Android.mks are loaded and the install paths
@@ -371,7 +370,7 @@
 ## Install split apks.
 ifdef LOCAL_PACKAGE_SPLITS
 # LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
-built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 
 # Rules to sign the split apks.
@@ -384,19 +383,19 @@
 $(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
 $(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
 $(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk
+$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
 	$(copy-file-to-new-target)
 	$(sign-package)
 
 # Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
 # Register the additional built and installed files.
 ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
-  $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(built_module_path)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+  $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
 
 # Make sure to install the splits when you run "make <module_name>".
 $(my_all_targets): $(installed_apk_splits)
@@ -471,20 +470,20 @@
 $(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
 $(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_PREFIX := $(my_prefix)
 
-my_link_type := $(intermediates.COMMON)/link_type
 ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
+my_link_type := java:system
 else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
+my_link_type := java:sdk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
+my_link_type := java:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
-	@echo Check module type: $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-	$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
 
 ifeq ($(prebuilt_module_is_dex_javalib),true)
 # For prebuilt shared Java library we don't have classes.jar.
diff --git a/core/product.mk b/core/product.mk
index 1e5a30e..7742cc3 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -131,6 +131,7 @@
     VENDOR_EXCEPTION_PATHS \
     PRODUCT_ART_USE_READ_BARRIER \
     PRODUCT_IOT \
+    PRODUCT_SYSTEM_HEADROOM \
 
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 57b7669..e069ff1 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -429,3 +429,7 @@
 # Package list to apply enforcing RRO.
 PRODUCT_ENFORCE_RRO_TARGETS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_RRO_TARGETS))
+
+# Add reserved headroom to a system image.
+PRODUCT_SYSTEM_HEADROOM := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 576c8ab..0a2208b 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -13,6 +13,17 @@
 endif
 endif
 
+# Converts a list to a JSON list.
+# $1: List separator.
+# $2: List.
+_json_list = [$(if $(2),"$(subst $(1),"$(comma)",$(2))")]
+
+# Converts a space-separated list to a JSON list.
+json_list = $(call _json_list,$(space),$(1))
+
+# Converts a comma-separated list to a JSON list.
+csv_to_json_list = $(call _json_list,$(comma),$(1))
+
 # Create soong.variables with copies of makefile settings.  Runs every build,
 # but only updates soong.variables if it changes
 SOONG_VARIABLES_TMP := $(SOONG_VARIABLES).$$$$
@@ -23,13 +34,14 @@
 	echo '    "Make_suffix": "-$(TARGET_PRODUCT)",'; \
 	echo ''; \
 	echo '    "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
+	echo '    "Platform_version_all_codenames": $(call csv_to_json_list,$(PLATFORM_VERSION_ALL_CODENAMES)),'; \
 	echo '    "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
 	echo '    "Brillo": $(if $(BRILLO),true,false),'; \
 	echo '    "Malloc_not_svelte": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
-	echo '    "Allow_missing_dependencies": $(if $(TARGET_BUILD_APPS)$(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),true,false),'; \
-	echo '    "SanitizeHost": [$(if $(SANITIZE_HOST),"$(subst $(space),"$(comma)",$(SANITIZE_HOST))")],'; \
-	echo '    "SanitizeDevice": [$(if $(SANITIZE_TARGET),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET))")],'; \
-	echo '    "SanitizeDeviceArch": [$(if $(SANITIZE_TARGET_ARCH),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET_ARCH))")],'; \
+	echo '    "Allow_missing_dependencies": $(if $(ALLOW_MISSING_DEPENDENCIES),true,false),'; \
+	echo '    "SanitizeHost": $(call json_list,$(SANITIZE_HOST)),'; \
+	echo '    "SanitizeDevice": $(call json_list,$(SANITIZE_TARGET)),'; \
+	echo '    "SanitizeDeviceArch": $(call json_list,$(SANITIZE_TARGET_ARCH)),'; \
 	echo '    "HostStaticBinaries": $(if $(strip $(BUILD_HOST_static)),true,false),'; \
 	echo '    "Binder32bit": $(if $(BINDER32BIT),true,false),'; \
 	echo '    "DevicePrefer32BitExecutables": $(if $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)),true,false),'; \
@@ -42,8 +54,8 @@
 	echo '    "TidyChecks": "$(WITH_TIDY_CHECKS)",'; \
 	echo ''; \
 	echo '    "NativeCoverage": $(if $(filter true,$(NATIVE_COVERAGE)),true,false),'; \
-	echo '    "CoveragePaths": [$(if $(COVERAGE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_PATHS)))")],'; \
-	echo '    "CoverageExcludePaths": [$(if $(COVERAGE_EXCLUDE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_EXCLUDE_PATHS)))")],'; \
+	echo '    "CoveragePaths": $(call csv_to_json_list,$(COVERAGE_PATHS)),'; \
+	echo '    "CoverageExcludePaths": $(call csv_to_json_list,$(COVERAGE_EXCLUDE_PATHS)),'; \
 	echo ''; \
 	echo '    "DeviceName": "$(TARGET_DEVICE)",'; \
 	echo '    "DeviceArch": "$(TARGET_ARCH)",'; \
@@ -65,7 +77,9 @@
 	echo '    "CrossHostArch": "$(HOST_CROSS_ARCH)",'; \
 	echo '    "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)",'; \
 	echo '    "Safestack": $(if $(filter true,$(USE_SAFESTACK)),true,false),'; \
-	echo '    "EnableCFI": $(if $(filter true,$(ENABLE_CFI)),true,false),'; \
+	echo '    "EnableCFI": $(if $(filter false,$(ENABLE_CFI)),false,true),'; \
+	echo '    "Device_uses_hwc2": $(if $(filter true,$(TARGET_USES_HWC2)),true,false),'; \
+	echo '    "Override_rs_driver": "$(OVERRIDE_RS_DRIVER)",'; \
 	echo ''; \
 	echo '    "ArtUseReadBarrier": $(if $(filter false,$(PRODUCT_ART_USE_READ_BARRIER)),false,true),'; \
 	echo ''; \
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 69196f4..6452fa8 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -39,6 +39,7 @@
 # A static Java library needs to explicily set LOCAL_RESOURCE_DIR.
 ifdef LOCAL_RESOURCE_DIR
 need_compile_res := true
+LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
 endif
 ifdef LOCAL_USE_AAPT2
 ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
@@ -186,9 +187,9 @@
 
 # if we have custom proguarding done use the proguarded classes jar instead of the normal classes jar
 ifeq ($(filter custom,$(LOCAL_PROGUARD_ENABLED)),custom)
-aar_classes_jar = $(full_classes_jar)
+aar_classes_jar = $(full_classes_proguard_jar)
 else
-aar_classes_jar = $(full_classes_pre_proguard_jar)
+aar_classes_jar = $(full_classes_jar)
 endif
 
 # Rule to build AAR, archive including classes.jar, resource, etc.
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index 8ebf89b..c5f2a96 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -54,6 +54,8 @@
   CUSTOM_IMAGE_SELINUX \
   CUSTOM_IMAGE_SUPPORT_VERITY \
   CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
+  CUSTOM_IMAGE_AVB_ENABLE \
+  CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS \
 
 # We don't expect product makefile to inherit/override PRODUCT_CUSTOM_IMAGE_MAKEFILES,
 # so we don't put it in the _product_var_list.
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 731937f..b1b936a 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -17,11 +17,13 @@
 
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
 $(device-tests-zip): $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
-	echo $(COMPATIBILITY.device-tests.FILES) > $@.list
+	echo $(sort $(COMPATIBILITY.device-tests.FILES)) > $@.list
 	sed -i -e 's/\s\+/\n/g' $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(HOST_OUT) -l $@-host.list -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
 
 device-tests: $(device-tests-zip)
 $(call dist-for-goals, device-tests, $(device-tests-zip))
+
+tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index e02faa7..763dd51 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -16,11 +16,11 @@
 
 general-tests-zip := $(PRODUCT_OUT)/general-tests.zip
 $(general-tests-zip): $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
-	echo $(COMPATIBILITY.general-tests.FILES) > $@.list
+	echo $(sort $(COMPATIBILITY.general-tests.FILES)) > $@.list
 	sed -i -e 's/\s\+/\n/g' $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(HOST_OUT) -l $@-host.list -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
 
 general-tests: $(general-tests-zip)
 $(call dist-for-goals, general-tests, $(general-tests-zip))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f0db476..f916e86 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -74,6 +74,11 @@
 $(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)
 $(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
 $(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
+$(my_built_custom_image): PRIVATE_AVB_ENABLE := $(CUSTOM_IMAGE_AVB_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS)
+ifeq (true,$(CUSTOM_IMAGE_AVB_ENABLE))
+  $(my_built_custom_image): $(AVBTOOL)
+endif
 $(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) \
   $(CUSTOM_IMAGE_DICT_FILE)
 	@echo "Build image $@"
@@ -97,6 +102,11 @@
 	    echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+	$(if $(PRIVATE_AVB_ENABLE),\
+	  $(hide) echo "avb_enable=$(PRIVATE_AVB_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+	    echo "avb_avbtool=$(AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+	    echo "avb_signing_args=$(INTERNAL_AVB_SIGNING_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+	    echo "avb_add_hashtree_footer_args=$(PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	$(if $(PRIVATE_DICT_FILE),\
 	  $(hide) echo "# Properties from $(PRIVATE_DICT_FILE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 4dde9fd..63fab63 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -25,6 +25,12 @@
   $(eval my_modules_and_deps += $(_explicitly_required))\
 )
 
+# Ignore unknown installed files on partial builds
+my_missing_files :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+endif
+
 # Iterate over modules' built files and installed files;
 # Calculate the dest files in the output zip file.
 
@@ -34,7 +40,7 @@
   $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
     $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
   $(if $(_pickup_files)$(_built_files),,\
-    $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(m)')))\
+    $(call my_missing_files,$(m)))\
   $(eval my_pickup_files += $(_pickup_files))\
   $(foreach i, $(_built_files),\
     $(eval bui_ins := $(subst :,$(space),$(i)))\
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 5066522..656d57e 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -49,6 +49,12 @@
 
 ifndef TARGET_PLATFORM_VERSION
   TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+else ifeq ($(TARGET_PLATFORM_VERSION),OPR1)
+  # HACK: lunch currently sets TARGET_PLATFORM_VERSION to
+  # DEFAULT_PLATFORM_VERSION, which causes unnecessary pain
+  # when the old DEFAULT_PLATFORM_VERSION becomes invalid.
+  # For now, silently upgrade OPR1 to the current default.
+  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
 endif
 
 ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
@@ -126,7 +132,22 @@
   # This is all of the development codenames that are active.  Should be either
   # the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
   # codenames after PLATFORM_VERSION_CODENAME.
-  PLATFORM_VERSION_ALL_CODENAMES := $(PLATFORM_VERSION_CODENAME)
+  PLATFORM_VERSION_ALL_CODENAMES :=
+
+  # Build a list of all possible code names. Avoid duplicates, and stop when we
+  # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+  # that is not included in our build.
+  _versions_in_target := \
+    $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+  $(foreach version,$(_versions_in_target),\
+    $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+    $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+      $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+  # And convert from space separated to comma separated.
+  PLATFORM_VERSION_ALL_CODENAMES := \
+    $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
 endif
 
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
diff --git a/envsetup.sh b/envsetup.sh
index ec6c960..6aaa8c9 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -610,7 +610,11 @@
 
     export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
     export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
-    export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+    if [ -n "$version" ]; then
+      export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+    else
+      unset TARGET_PLATFORM_VERSION
+    fi
     export TARGET_BUILD_TYPE=release
 
     echo
diff --git a/target/product/aosp_arm64_ab.mk b/target/product/aosp_arm64_ab.mk
index d885aa7..9a9107a 100644
--- a/target/product/aosp_arm64_ab.mk
+++ b/target/product/aosp_arm64_ab.mk
@@ -74,8 +74,6 @@
     android.hardware.wifi@1.0 \
     android.hardware.wifi.supplicant@1.0 \
     android.hidl.allocator@1.0 \
-    android.hidl.base@1.0 \
-    android.hidl.manager@1.0 \
     android.hidl.memory@1.0 \
 
 PRODUCT_PACKAGES += \
diff --git a/target/product/base.mk b/target/product/base.mk
index 89a2aaa..ad4c133 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -147,7 +147,8 @@
 
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
-    perfprofd
+    perfprofd \
+    sqlite3
 
 PRODUCT_COPY_FILES := $(call add-to-product-copy-files-if-exists,\
     frameworks/base/preloaded-classes:system/etc/preloaded-classes)
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 06c9c13..6217883 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -34,8 +34,6 @@
     dumpsys \
     fastboot \
     gralloc.default \
-    grep \
-    gzip \
     healthd \
     hwservicemanager \
     init \
@@ -72,17 +70,13 @@
     logcat \
     logwrapper \
     lshal \
-    mkshrc \
-    reboot \
     recovery \
     service \
     servicemanager \
-    sh \
+    shell_and_utilities \
     storaged \
     surfaceflinger \
     tombstoned \
-    toolbox \
-    toybox \
     tzdatacheck \
     vndservice \
     vndservicemanager \
diff --git a/target/product/product_launched_with_n_mr1.mk b/target/product/product_launched_with_n_mr1.mk
new file mode 100644
index 0000000..65d4d3f
--- /dev/null
+++ b/target/product/product_launched_with_n_mr1.mk
@@ -0,0 +1,2 @@
+#PRODUCT_SHIPPING_API_LEVEL indicates the first api level, device has been commercially launced on.
+PRODUCT_SHIPPING_API_LEVEL := 25
diff --git a/tests/envsetup_tests.sh b/tests/envsetup_tests.sh
index 4aae255..abdcd56 100755
--- a/tests/envsetup_tests.sh
+++ b/tests/envsetup_tests.sh
@@ -19,8 +19,9 @@
 valid_version=PPR1
 
 # lunch tests
-check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       "$default_version"
-check_lunch "aosp_arm64-userdebug"                      "aosp_arm64" "userdebug" "$default_version"
+check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
+check_lunch "aosp_arm64-userdebug"                      "aosp_arm64" "userdebug" ""
+check_lunch "aosp_arm64-userdebug-$default_version"     "aosp_arm64" "userdebug" "$default_version"
 check_lunch "aosp_arm64-userdebug-$valid_version"       "aosp_arm64" "userdebug" "$valid_version"
 check_lunch "abc"                                       "" "" ""
 check_lunch "aosp_arm64-abc"                            "" "" ""
diff --git a/tools/checkowners.py b/tools/checkowners.py
index 8f450e7..b874955 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -5,6 +5,7 @@
 import argparse
 import re
 import sys
+import urllib
 import urllib2
 
 parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
@@ -29,7 +30,8 @@
 
 def find_address(address):
   if address not in checked_addresses:
-    request = gerrit_server + '/accounts/?suggest&q=' + address
+    request = (gerrit_server + '/accounts/?n=1&o=ALL_EMAILS&q=email:'
+               + urllib.quote(address))
     echo('Checking email address: ' + address)
     result = urllib2.urlopen(request).read()
     expected = '"email": "' + address + '"'
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 65f8a08..dcd41aa 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -113,6 +113,11 @@
 
 include $(BUILD_HOST_EXECUTABLE)
 fs_config_generate_bin := $(LOCAL_INSTALLED_MODULE)
+# List of all supported vendor, oem and odm Partitions
+fs_config_generate_extra_partition_list := $(strip \
+  $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
+  $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
+  $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm))
 
 ##################################
 # Generate the system/etc/fs_config_dirs binary file for the target
@@ -121,10 +126,13 @@
 
 LOCAL_MODULE := fs_config_dirs
 LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
 include $(BUILD_SYSTEM)/base_rules.mk
 $(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
 	@mkdir -p $(dir $@)
-	$< -D -o $@
+	$< -D $(if $(fs_config_generate_extra_partition_list), \
+	   -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+	   -o $@
 
 ##################################
 # Generate the system/etc/fs_config_files binary file for the target
@@ -133,10 +141,112 @@
 
 LOCAL_MODULE := fs_config_files
 LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
 include $(BUILD_SYSTEM)/base_rules.mk
 $(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
 	@mkdir -p $(dir $@)
-	$< -F -o $@
+	$< -F $(if $(fs_config_generate_extra_partition_list), \
+	   -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+	   -o $@
+
+ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the vendor/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_vendor to PRODUCT_PACKAGES in
+# the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P vendor -o $@
+
+##################################
+# Generate the vendor/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_vendor to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P vendor -o $@
+
+endif
+
+ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the oem/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P oem -o $@
+
+##################################
+# Generate the oem/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P oem -o $@
+
+endif
+
+ifneq ($(filter odm,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the odm/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P odm -o $@
+
+##################################
+# Generate the odm/etc/fs_config_files binary file for the target
+# Add fs_config_files of fs_config_files_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P odm -o $@
+
+endif
 
 # The newer passwd/group targets are only generated if you
 # use the new TARGET_FS_CONFIG_GEN method.
@@ -195,3 +305,36 @@
 my_fs_config_h :=
 fs_config_generate_bin :=
 my_gen_oem_aid :=
+
+# -----------------------------------------------------------------------------
+# Unit tests.
+# -----------------------------------------------------------------------------
+
+test_c_flags := \
+    -fstack-protector-all \
+    -g \
+    -Wall \
+    -Wextra \
+    -Werror \
+    -fno-builtin \
+    -DANDROID_FILESYSTEM_CONFIG='"android_filesystem_config_test_data.h"'
+
+##################################
+# test executable
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config_generate_test
+LOCAL_SRC_FILES := fs_config_generate.c
+LOCAL_SHARED_LIBRARIES := libcutils
+LOCAL_CFLAGS := $(test_c_flags)
+LOCAL_MODULE_RELATIVE_PATH := fs_config-unit-tests
+LOCAL_GTEST := false
+include $(BUILD_HOST_NATIVE_TEST)
+
+##################################
+# gTest tool
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config-unit-tests
+LOCAL_CFLAGS += $(test_c_flags) -DHOST
+LOCAL_SHARED_LIBRARIES := liblog libcutils libbase
+LOCAL_SRC_FILES := fs_config_test.cpp
+include $(BUILD_HOST_NATIVE_TEST)
diff --git a/tools/fs_config/README b/tools/fs_config/README
index 9919131..5af407f 100644
--- a/tools/fs_config/README
+++ b/tools/fs_config/README
@@ -156,9 +156,28 @@
 ${OUT} directory are used in the final stages when building the filesystem
 images to set the file and directory properties.
 
+For systems with separate partition images, such as vendor or oem,
+fs_config_generate can be instructed to filter the specific file references
+to land in each partition's etc/fs_config_dirs or etc/fs_config_files
+locations. The filter can be instructed to blacklist a partition's data by
+providing the comma separated minus sign prefixed partition names. The filter
+can be instructed to whitelist partition data by providing the partition name.
+
+For example:
+- For system.img, but not vendor, oem or odm file references:
+      -P -vendor,-oem,-odm
+  This makes sure the results only contain content associated with the
+  system, and not vendor, oem or odm, blacklisting their content.
+- For vendor.img file references: -P vendor
+- For oem.img file references: -P oem
+- For odm.img file references: -P odm
+
 fs_config_generate --help reports:
 
 Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)
-from device-specific android_filesystem_config.h override
+from device-specific android_filesystem_config.h override. Filter based
+on a comma separated partition list (-P) whitelist or prefixed by a
+minus blacklist. Partitions are identified as path references to
+<partition>/ or system/<partition>
 
-Usage: fs_config_generate -D|-F [-o output-file]
+Usage: fs_config_generate -D|-F [-P list] [-o output-file]
diff --git a/tools/fs_config/android_filesystem_config_test_data.h b/tools/fs_config/android_filesystem_config_test_data.h
new file mode 100644
index 0000000..07bc8e5
--- /dev/null
+++ b/tools/fs_config/android_filesystem_config_test_data.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <private/android_filesystem_config.h>
+
+/* Test Data */
+
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
+
+static const struct fs_path_config android_device_dirs[] = {
+    {00555, AID_ROOT, AID_SYSTEM, 0, "system/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "vendor/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "oem/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "odm/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "oem/data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "odm/data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "vendor/data/misc"},
+    {00555, AID_SYSTEM, AID_ROOT, 0, "etc"},
+};
+
+static const struct fs_path_config android_device_files[] = {
+    {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "etc/fs_config_files"},
+    {00666, AID_ROOT, AID_SYSTEM, 0, "data/misc/oem"},
+};
diff --git a/tools/fs_config/fs_config_generate.c b/tools/fs_config/fs_config_generate.c
index c06213f..cb7ff9d 100644
--- a/tools/fs_config/fs_config_generate.c
+++ b/tools/fs_config/fs_config_generate.c
@@ -14,9 +14,11 @@
  * limitations under the License.
  */
 
+#include <ctype.h>
 #include <stdbool.h>
 #include <stdio.h>
 #include <stdlib.h>
+#include <string.h>
 #include <unistd.h>
 
 #include <private/android_filesystem_config.h>
@@ -28,38 +30,57 @@
  * the binary format used in the /system/etc/fs_config_dirs and
  * the /system/etc/fs_config_files to be used by the runtimes.
  */
+#ifdef ANDROID_FILESYSTEM_CONFIG
+#include ANDROID_FILESYSTEM_CONFIG
+#else
 #include "android_filesystem_config.h"
+#endif
 
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-  static const struct fs_path_config android_device_dirs[] = {
-};
+static const struct fs_path_config android_device_dirs[] = { };
 #endif
 
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
 static const struct fs_path_config android_device_files[] = {
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-    { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs" },
+    {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_dirs"},
 #endif
-    { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files" },
+    {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_files"},
 };
 #endif
 
 static void usage() {
   fprintf(stderr,
     "Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)\n"
-    "from device-specific android_filesystem_config.h override\n\n"
-    "Usage: fs_config_generate -D|-F [-o output-file]\n");
+    "from device-specific android_filesystem_config.h override.  Filter based\n"
+    "on a comma separated partition list (-P) whitelist or prefixed by a\n"
+    "minus blacklist.  Partitions are identified as path references to\n"
+    "<partition>/ or system/<partition>/\n\n"
+    "Usage: fs_config_generate -D|-F [-P list] [-o output-file]\n");
 }
 
-int main(int argc, char** argv) {
-  const struct fs_path_config *pc;
-  const struct fs_path_config *end;
-  bool dir = false, file = false;
-  FILE *fp = stdout;
-  int opt;
+/* If tool switches to C++, use android-base/macros.h array_size() */
+#ifndef ARRAY_SIZE /* popular macro */
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+#endif
 
-  while((opt = getopt(argc, argv, "DFho:")) != -1) {
-    switch(opt) {
+int main(int argc, char** argv) {
+  const struct fs_path_config* pc;
+  const struct fs_path_config* end;
+  bool dir = false, file = false;
+  const char* partitions = NULL;
+  FILE* fp = stdout;
+  int opt;
+  static const char optstring[] = "DFP:ho:";
+
+  while ((opt = getopt(argc, argv, optstring)) != -1) {
+    switch (opt) {
     case 'D':
       if (file) {
         fprintf(stderr, "Must specify only -D or -F\n");
@@ -76,6 +97,30 @@
       }
       file = true;
       break;
+    case 'P':
+      if (partitions) {
+        fprintf(stderr, "Specify only one partition list\n");
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      while (*optarg && isspace(*optarg)) ++optarg;
+      if (!optarg[0]) {
+        fprintf(stderr, "Partition list empty\n");
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      if (!optarg[1]) {
+        fprintf(stderr, "Partition list too short \"%s\"\n", optarg);
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      if ((optarg[0] == '-') && strchr(optstring, optarg[1]) && !optarg[2]) {
+        fprintf(stderr, "Partition list is a flag \"%s\"\n", optarg);
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      partitions = optarg;
+      break;
     case 'o':
       if (fp != stdout) {
         fprintf(stderr, "Specify only one output file\n");
@@ -97,6 +142,12 @@
     }
   }
 
+  if (optind < argc) {
+    fprintf(stderr, "Unknown non-argument \"%s\"\n", argv[optind]);
+    usage();
+    exit(EXIT_FAILURE);
+  }
+
   if (!file && !dir) {
     fprintf(stderr, "Must specify either -F or -D\n");
     usage();
@@ -105,19 +156,64 @@
 
   if (dir) {
     pc = android_device_dirs;
-    end = &android_device_dirs[sizeof(android_device_dirs) / sizeof(android_device_dirs[0])];
+    end = &android_device_dirs[ARRAY_SIZE(android_device_dirs)];
   } else {
     pc = android_device_files;
-    end = &android_device_files[sizeof(android_device_files) / sizeof(android_device_files[0])];
+    end = &android_device_files[ARRAY_SIZE(android_device_files)];
   }
-  for(; (pc < end) && pc->prefix; pc++) {
+  for (; (pc < end) && pc->prefix; pc++) {
+    bool submit;
     char buffer[512];
     ssize_t len = fs_config_generate(buffer, sizeof(buffer), pc);
     if (len < 0) {
       fprintf(stderr, "Entry too large\n");
       exit(EXIT_FAILURE);
     }
-    if (fwrite(buffer, 1, len, fp) != (size_t)len) {
+    submit = true;
+    if (partitions) {
+      char* partitions_copy = strdup(partitions);
+      char* arg = partitions_copy;
+      char* sv = NULL; /* Do not leave uninitialized, NULL is known safe. */
+      /* Deal with case all iterated partitions are blacklists with no match */
+      bool all_blacklist_but_no_match = true;
+      submit = false;
+
+      if (!partitions_copy) {
+        fprintf(stderr, "Failed to allocate a copy of %s\n", partitions);
+        exit(EXIT_FAILURE);
+      }
+      /* iterate through (officially) comma separated list of partitions */
+      while (!!(arg = strtok_r(arg, ",:; \t\n\r\f", &sv))) {
+        static const char system[] = "system/";
+        size_t plen;
+        bool blacklist = false;
+        if (*arg == '-') {
+          blacklist = true;
+          ++arg;
+        } else {
+          all_blacklist_but_no_match = false;
+        }
+        plen = strlen(arg);
+        /* deal with evil callers */
+        while (arg[plen - 1] == '/') {
+          --plen;
+        }
+        /* check if we have <partition>/ or /system/<partition>/ */
+        if ((!strncmp(pc->prefix, arg, plen) && (pc->prefix[plen] == '/')) ||
+            (!strncmp(pc->prefix, system, strlen(system)) &&
+             !strncmp(pc->prefix + strlen(system), arg, plen) &&
+             (pc->prefix[strlen(system) + plen] == '/'))) {
+          all_blacklist_but_no_match = false;
+          /* we have a match !!! */
+          if (!blacklist) submit = true;
+          break;
+        }
+        arg = NULL;
+      }
+      free(partitions_copy);
+      if (all_blacklist_but_no_match) submit = true;
+    }
+    if (submit && (fwrite(buffer, 1, len, fp) != (size_t)len)) {
       fprintf(stderr, "Write failure\n");
       exit(EXIT_FAILURE);
     }
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 2cf2fd8..c8d1dd3 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -709,7 +709,7 @@
                 int(cap, 0)
                 tmp.append('(' + cap + ')')
             except ValueError:
-                tmp.append('(1ULL << CAP_' + cap.upper() + ')')
+                tmp.append('CAP_MASK_LONG(CAP_' + cap.upper() + ')')
 
         caps = tmp
 
diff --git a/tools/fs_config/fs_config_test.cpp b/tools/fs_config/fs_config_test.cpp
new file mode 100644
index 0000000..f95a4ca
--- /dev/null
+++ b/tools/fs_config/fs_config_test.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <sys/cdefs.h>
+
+#include <string>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/macros.h>
+#include <android-base/strings.h>
+#include <android-base/stringprintf.h>
+#include <gtest/gtest.h>
+#include <private/android_filesystem_config.h>
+#include <private/fs_config.h>
+
+#include "android_filesystem_config_test_data.h"
+
+// must run test in the test directory
+const static char fs_config_generate_command[] = "./fs_config_generate_test";
+
+static std::string popenToString(std::string command) {
+  std::string ret;
+
+  FILE* fp = popen(command.c_str(), "r");
+  if (fp) {
+    if (!android::base::ReadFdToString(fileno(fp), &ret)) ret = "";
+    pclose(fp);
+  }
+  return ret;
+}
+
+static void confirm(std::string&& data, const fs_path_config* config,
+                    ssize_t num_config) {
+  const struct fs_path_config_from_file* pc =
+      reinterpret_cast<const fs_path_config_from_file*>(data.c_str());
+  size_t len = data.size();
+
+  ASSERT_TRUE(config != NULL);
+  ASSERT_LT(0, num_config);
+
+  while (len > 0) {
+    uint16_t host_len = pc->len;
+    if (host_len > len) break;
+
+    EXPECT_EQ(config->mode, pc->mode);
+    EXPECT_EQ(config->uid, pc->uid);
+    EXPECT_EQ(config->gid, pc->gid);
+    EXPECT_EQ(config->capabilities, pc->capabilities);
+    EXPECT_STREQ(config->prefix, pc->prefix);
+
+    EXPECT_LT(0, num_config);
+    --num_config;
+    if (num_config >= 0) ++config;
+    pc = reinterpret_cast<const fs_path_config_from_file*>(
+        reinterpret_cast<const char*>(pc) + host_len);
+    len -= host_len;
+  }
+  EXPECT_EQ(0, num_config);
+}
+
+/* See local android_filesystem_config.h for test data */
+
+TEST(fs_conf_test, dirs) {
+  confirm(popenToString(
+              android::base::StringPrintf("%s -D", fs_config_generate_command)),
+          android_device_dirs, arraysize(android_device_dirs));
+}
+
+TEST(fs_conf_test, files) {
+  confirm(popenToString(
+              android::base::StringPrintf("%s -F", fs_config_generate_command)),
+          android_device_files, arraysize(android_device_files));
+}
+
+static const char vendor_str[] = "vendor/";
+static const char vendor_alt_str[] = "system/vendor/";
+static const char oem_str[] = "oem/";
+static const char oem_alt_str[] = "system/oem/";
+static const char odm_str[] = "odm/";
+static const char odm_alt_str[] = "system/odm/";
+
+TEST(fs_conf_test, system_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (!android::base::StartsWith(config->prefix, vendor_str) &&
+        !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+        !android::base::StartsWith(config->prefix, oem_str) &&
+        !android::base::StartsWith(config->prefix, oem_alt_str) &&
+        !android::base::StartsWith(config->prefix, odm_str) &&
+        !android::base::StartsWith(config->prefix, odm_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P -vendor,-oem,-odm", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, vendor_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, vendor_str) ||
+        android::base::StartsWith(config->prefix, vendor_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P vendor", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, oem_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, oem_str) ||
+        android::base::StartsWith(config->prefix, oem_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P oem", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, odm_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, odm_str) ||
+        android::base::StartsWith(config->prefix, odm_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P odm", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, system_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (!android::base::StartsWith(config->prefix, vendor_str) &&
+        !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+        !android::base::StartsWith(config->prefix, oem_str) &&
+        !android::base::StartsWith(config->prefix, oem_alt_str) &&
+        !android::base::StartsWith(config->prefix, odm_str) &&
+        !android::base::StartsWith(config->prefix, odm_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P -vendor,-oem,-odm", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, vendor_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, vendor_str) ||
+        android::base::StartsWith(config->prefix, vendor_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P vendor", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, oem_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, oem_str) ||
+        android::base::StartsWith(config->prefix, oem_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P oem", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, odm_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, odm_str) ||
+        android::base::StartsWith(config->prefix, odm_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P odm", fs_config_generate_command)),
+          &files[0], files.size());
+}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 7c3679c..82394ca 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -166,6 +166,13 @@
               block_list=block_list)
   return img.name
 
+def FindDtboPrebuilt(prefix="IMAGES/"):
+  """Find the prebuilt image of DTBO partition."""
+
+  prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "dtbo.img")
+  if os.path.exists(prebuilt_path):
+    return prebuilt_path
+  return None
 
 def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
   print("creating " + what + ".img...")
@@ -286,7 +293,7 @@
 
 
 def AddVBMeta(output_zip, boot_img_path, system_img_path, vendor_img_path,
-              prefix="IMAGES/"):
+              dtbo_img_path, prefix="IMAGES/"):
   """Create a VBMeta image and store it in output_zip."""
   img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
   avbtool = os.getenv('AVBTOOL') or "avbtool"
@@ -296,6 +303,8 @@
          "--include_descriptors_from_image", system_img_path]
   if vendor_img_path is not None:
     cmd.extend(["--include_descriptors_from_image", vendor_img_path])
+  if dtbo_img_path is not None:
+    cmd.extend(["--include_descriptors_from_image", dtbo_img_path])
   if OPTIONS.info_dict.get("system_root_image", None) == "true":
     cmd.extend(["--setup_rootfs_from_kernel", system_img_path])
   common.AppendAVBSigningArgs(cmd)
@@ -481,7 +490,9 @@
   if OPTIONS.info_dict.get("board_avb_enable", None) == "true":
     banner("vbmeta")
     boot_contents = boot_image.WriteToTemp()
-    AddVBMeta(output_zip, boot_contents.name, system_img_path, vendor_img_path)
+    dtbo_img_path = FindDtboPrebuilt()
+    AddVBMeta(output_zip, boot_contents.name, system_img_path,
+              vendor_img_path, dtbo_img_path)
 
   # For devices using A/B update, copy over images from RADIO/ and/or
   # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index e385866..b8123c0 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -41,10 +41,10 @@
   cmd = ['imgdiff', '-z'] if imgdiff else ['bsdiff']
   cmd.extend([srcfile, tgtfile, patchfile])
 
-  # Not using common.Run(), which would otherwise dump all the bsdiff/imgdiff
-  # commands when OPTIONS.verbose is True - not useful for the case here, since
-  # they contain temp filenames only.
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  # Don't dump the bsdiff/imgdiff commands, which are not useful for the case
+  # here, since they contain temp filenames only.
+  p = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+                 stderr=subprocess.STDOUT)
   output, _ = p.communicate()
 
   if p.returncode != 0:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 16c8018..3094dca 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -493,11 +493,11 @@
     shutil.rmtree(staging_system, ignore_errors=True)
     shutil.copytree(origin_in, staging_system, symlinks=True)
 
-  reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
+  has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
   ext4fs_output = None
 
   try:
-    if reserved_blocks and fs_type.startswith("ext4"):
+    if fs_type.startswith("ext4"):
       (ext4fs_output, exit_code) = RunCommand(build_command)
     else:
       (_, exit_code) = RunCommand(build_command)
@@ -518,7 +518,9 @@
   # not writable even with root privilege. It only affects devices using
   # file-based OTA and a kernel version of 3.10 or greater (currently just
   # sprout).
-  if reserved_blocks and fs_type.startswith("ext4"):
+  # Separately, check if there's enough headroom space available. This is useful for
+  # devices with low disk space that have system image variation between builds.
+  if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
     assert ext4fs_output is not None
     ext4fs_stats = re.compile(
         r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
@@ -526,14 +528,21 @@
     m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
     used_blocks = int(m.groupdict().get('used_blocks'))
     total_blocks = int(m.groupdict().get('total_blocks'))
-    reserved_blocks = min(4096, int(total_blocks * 0.02))
-    adjusted_blocks = total_blocks - reserved_blocks
+    reserved_blocks = 0
+    headroom_blocks = 0
+    adjusted_blocks = total_blocks
+    if has_reserved_blocks:
+      reserved_blocks = min(4096, int(total_blocks * 0.02))
+      adjusted_blocks -= reserved_blocks
+    if "partition_headroom" in prop_dict:
+      headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+      adjusted_blocks -= headroom_blocks
     if used_blocks > adjusted_blocks:
       mount_point = prop_dict.get("mount_point")
       print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
-            "reserved: %d blocks, available: %d blocks)" % (
+            "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
                 mount_point, total_blocks, used_blocks, reserved_blocks,
-                adjusted_blocks))
+                headroom_blocks, adjusted_blocks))
       return False
 
   if not fs_spans_partition:
@@ -614,9 +623,10 @@
   d["mount_point"] = mount_point
   if mount_point == "system":
     copy_prop("fs_type", "fs_type")
-    # Copy the generic sysetem fs type first, override with specific one if
+    # Copy the generic system fs type first, override with specific one if
     # available.
     copy_prop("system_fs_type", "fs_type")
+    copy_prop("system_headroom", "partition_headroom")
     copy_prop("system_size", "partition_size")
     copy_prop("system_journal_size", "journal_size")
     copy_prop("system_verity_block_device", "verity_block_device")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index e200f9f..925a523 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -107,10 +107,15 @@
   pass
 
 
-def Run(args, **kwargs):
-  """Create and return a subprocess.Popen object, printing the command
-  line on the terminal if -v was specified."""
-  if OPTIONS.verbose:
+def Run(args, verbose=None, **kwargs):
+  """Create and return a subprocess.Popen object.
+
+  Caller can specify if the command line should be printed. The global
+  OPTIONS.verbose will be used if not specified.
+  """
+  if verbose is None:
+    verbose = OPTIONS.verbose
+  if verbose:
     print("  running: ", " ".join(args))
   return subprocess.Popen(args, **kwargs)
 
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 1a7e10e..2090400 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -181,14 +181,14 @@
 OPTIONS.payload_signer = None
 OPTIONS.payload_signer_args = []
 OPTIONS.extracted_input = None
+OPTIONS.key_passwords = []
 
 METADATA_NAME = 'META-INF/com/android/metadata'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*']
 
 
 def SignOutput(temp_zip_name, output_zip_name):
-  key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
-  pw = key_passwords[OPTIONS.package_key]
+  pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
   common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
                   whole_file=True)
@@ -1021,21 +1021,17 @@
   # The place where the output from the subprocess should go.
   log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
 
-  # Setup signing keys.
-  if OPTIONS.package_key is None:
-    OPTIONS.package_key = OPTIONS.info_dict.get(
-        "default_system_dev_certificate",
-        "build/target/product/security/testkey")
-
   # A/B updater expects a signing key in RSA format. Gets the key ready for
   # later use in step 3, unless a payload_signer has been specified.
   if OPTIONS.payload_signer is None:
     cmd = ["openssl", "pkcs8",
            "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-           "-inform", "DER", "-nocrypt"]
+           "-inform", "DER"]
+    pw = OPTIONS.key_passwords[OPTIONS.package_key]
+    cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
     rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
     cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
+    p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
     p1.communicate()
     assert p1.returncode == 0, "openssl pkcs8 failed"
 
@@ -1383,6 +1379,17 @@
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
 
+  # Use the default key to sign the package if not specified with package_key.
+  # package_keys are needed on ab_updates, so always define them if an
+  # ab_update is getting created.
+  if not OPTIONS.no_signing or ab_update:
+    if OPTIONS.package_key is None:
+      OPTIONS.package_key = OPTIONS.info_dict.get(
+          "default_system_dev_certificate",
+          "build/target/product/security/testkey")
+    # Get signing keys
+    OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+
   if ab_update:
     if OPTIONS.incremental_source is not None:
       OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1448,13 +1455,6 @@
     raise common.ExternalError(
         "--- target build has specified no recovery ---")
 
-  # Use the default key to sign the package if not specified with package_key.
-  if not OPTIONS.no_signing:
-    if OPTIONS.package_key is None:
-      OPTIONS.package_key = OPTIONS.info_dict.get(
-          "default_system_dev_certificate",
-          "build/target/product/security/testkey")
-
   # Set up the output zip. Create a temporary zip file if signing is needed.
   if OPTIONS.no_signing:
     if os.path.exists(args[1]):
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
new file mode 100755
index 0000000..331122b
--- /dev/null
+++ b/tools/releasetools/ota_package_parser.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import sys
+import traceback
+import zipfile
+
+from rangelib import RangeSet
+
+class Stash(object):
+  """Build a map to track stashed blocks during update simulation."""
+
+  def __init__(self):
+    self.blocks_stashed = 0
+    self.overlap_blocks_stashed = 0
+    self.max_stash_needed = 0
+    self.current_stash_size = 0
+    self.stash_map = {}
+
+  def StashBlocks(self, SHA1, blocks):
+    if SHA1 in self.stash_map:
+      logging.info("already stashed {}: {}".format(SHA1, blocks))
+      return
+    self.blocks_stashed += blocks.size()
+    self.current_stash_size += blocks.size()
+    self.max_stash_needed = max(self.current_stash_size, self.max_stash_needed)
+    self.stash_map[SHA1] = blocks
+
+  def FreeBlocks(self, SHA1):
+    assert self.stash_map.has_key(SHA1), "stash {} not found".format(SHA1)
+    self.current_stash_size -= self.stash_map[SHA1].size()
+    del self.stash_map[SHA1]
+
+  def HandleOverlapBlocks(self, SHA1, blocks):
+    self.StashBlocks(SHA1, blocks)
+    self.overlap_blocks_stashed += blocks.size()
+    self.FreeBlocks(SHA1)
+
+
+class OtaPackageParser(object):
+  """Parse a block-based OTA package."""
+
+  def __init__(self, package):
+    self.package = package
+    self.new_data_size = 0
+    self.patch_data_size = 0
+    self.block_written = 0
+    self.block_stashed = 0
+
+  @staticmethod
+  def GetSizeString(size):
+    assert size >= 0
+    base = 1024.0
+    if size <= base:
+      return "{} bytes".format(size)
+    for units in ['K', 'M', 'G']:
+      if size <= base * 1024 or units == 'G':
+        return "{:.1f}{}".format(size / base, units)
+      base *= 1024
+
+  def ParseTransferList(self, name):
+    """Simulate the transfer commands and calculate the amout of I/O."""
+
+    logging.info("\nSimulating commands in '{}':".format(name))
+    lines = self.package.read(name).strip().splitlines()
+    assert len(lines) >= 4, "{} is too short; Transfer list expects at least" \
+        "4 lines, it has {}".format(name, len(lines))
+    assert int(lines[0]) >= 3
+    logging.info("(version: {})".format(lines[0]))
+
+    blocks_written = 0
+    my_stash = Stash()
+    for line in lines[4:]:
+      cmd_list = line.strip().split(" ")
+      cmd_name = cmd_list[0]
+      try:
+        if cmd_name == "new" or cmd_name == "zero":
+          assert len(cmd_list) == 2, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[1])
+          blocks_written += target_range.size()
+        elif cmd_name == "move":
+          # Example:  move <onehash> <tgt_range> <src_blk_count> <src_range>
+          # [<loc_range> <stashed_blocks>]
+          assert len(cmd_list) >= 5, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[2])
+          blocks_written += target_range.size()
+          if cmd_list[4] == '-':
+            continue
+          SHA1 = cmd_list[1]
+          source_range = RangeSet.parse_raw(cmd_list[4])
+          if target_range.overlaps(source_range):
+            my_stash.HandleOverlapBlocks(SHA1, source_range)
+        elif cmd_name == "bsdiff" or cmd_name == "imgdiff":
+          # Example:  bsdiff <offset> <len> <src_hash> <tgt_hash> <tgt_range>
+          # <src_blk_count> <src_range> [<loc_range> <stashed_blocks>]
+          assert len(cmd_list) >= 8, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[5])
+          blocks_written += target_range.size()
+          if cmd_list[7] == '-':
+            continue
+          source_SHA1 = cmd_list[3]
+          source_range = RangeSet.parse_raw(cmd_list[7])
+          if target_range.overlaps(source_range):
+            my_stash.HandleOverlapBlocks(source_SHA1, source_range)
+        elif cmd_name == "stash":
+          assert len(cmd_list) == 3, "command format error: {}".format(line)
+          SHA1 = cmd_list[1]
+          source_range = RangeSet.parse_raw(cmd_list[2])
+          my_stash.StashBlocks(SHA1, source_range)
+        elif cmd_name == "free":
+          assert len(cmd_list) == 2, "command format error: {}".format(line)
+          SHA1 = cmd_list[1]
+          my_stash.FreeBlocks(SHA1)
+      except:
+        logging.error("failed to parse command in: " + line)
+        raise
+
+    self.block_written += blocks_written
+    self.block_stashed += my_stash.blocks_stashed
+
+    logging.info("blocks written: {}  (expected: {})".format(
+        blocks_written, lines[1]))
+    logging.info("max blocks stashed simultaneously: {}  (expected: {})".
+        format(my_stash.max_stash_needed, lines[3]))
+    logging.info("total blocks stashed: {}".format(my_stash.blocks_stashed))
+    logging.info("blocks stashed implicitly: {}".format(
+        my_stash.overlap_blocks_stashed))
+
+  def PrintDataInfo(self, partition):
+    logging.info("\nReading data info for {} partition:".format(partition))
+    new_data = self.package.getinfo(partition + ".new.dat")
+    patch_data = self.package.getinfo(partition + ".patch.dat")
+    logging.info("{:<40}{:<40}".format(new_data.filename, patch_data.filename))
+    logging.info("{:<40}{:<40}".format(
+          "compress_type: " + str(new_data.compress_type),
+          "compress_type: " + str(patch_data.compress_type)))
+    logging.info("{:<40}{:<40}".format(
+          "compressed_size: " + OtaPackageParser.GetSizeString(
+              new_data.compress_size),
+          "compressed_size: " + OtaPackageParser.GetSizeString(
+              patch_data.compress_size)))
+    logging.info("{:<40}{:<40}".format(
+        "file_size: " + OtaPackageParser.GetSizeString(new_data.file_size),
+        "file_size: " + OtaPackageParser.GetSizeString(patch_data.file_size)))
+
+    self.new_data_size += new_data.file_size
+    self.patch_data_size += patch_data.file_size
+
+  def AnalyzePartition(self, partition):
+    assert partition in ("system", "vendor")
+    assert partition + ".new.dat" in self.package.namelist()
+    assert partition + ".patch.dat" in self.package.namelist()
+    assert partition + ".transfer.list" in self.package.namelist()
+
+    self.PrintDataInfo(partition)
+    self.ParseTransferList(partition + ".transfer.list")
+
+  def PrintMetadata(self):
+    metadata_path = "META-INF/com/android/metadata"
+    logging.info("\nMetadata info:")
+    metadata_info = {}
+    for line in self.package.read(metadata_path).strip().splitlines():
+      index = line.find("=")
+      metadata_info[line[0 : index].strip()] = line[index + 1:].strip()
+    assert metadata_info.get("ota-type") == "BLOCK"
+    assert "pre-device" in metadata_info
+    logging.info("device: {}".format(metadata_info["pre-device"]))
+    if "pre-build" in metadata_info:
+      logging.info("pre-build: {}".format(metadata_info["pre-build"]))
+    assert "post-build" in metadata_info
+    logging.info("post-build: {}".format(metadata_info["post-build"]))
+
+  def Analyze(self):
+    logging.info("Analyzing ota package: " + self.package.filename)
+    self.PrintMetadata()
+    assert "system.new.dat" in self.package.namelist()
+    self.AnalyzePartition("system")
+    if "vendor.new.dat" in self.package.namelist():
+      self.AnalyzePartition("vendor")
+
+    #TODO Add analysis of other partitions(e.g. bootloader, boot, radio)
+
+    BLOCK_SIZE = 4096
+    logging.info("\nOTA package analyzed:")
+    logging.info("new data size (uncompressed): " +
+        OtaPackageParser.GetSizeString(self.new_data_size))
+    logging.info("patch data size (uncompressed): " +
+        OtaPackageParser.GetSizeString(self.patch_data_size))
+    logging.info("total data written: " +
+        OtaPackageParser.GetSizeString(self.block_written * BLOCK_SIZE))
+    logging.info("total data stashed: " +
+        OtaPackageParser.GetSizeString(self.block_stashed * BLOCK_SIZE))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Analyze an OTA package.')
+  parser.add_argument("ota_package", help='Path of the OTA package.')
+  args = parser.parse_args(argv)
+
+  logging_format = '%(message)s'
+  logging.basicConfig(level=logging.INFO, format=logging_format)
+
+  try:
+    with zipfile.ZipFile(args.ota_package, 'r') as package:
+      package_parser = OtaPackageParser(package)
+      package_parser.Analyze()
+  except:
+    logging.error("Failed to read " + args.ota_package)
+    traceback.print_exc()
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
new file mode 100755
index 0000000..1dd3159
--- /dev/null
+++ b/tools/releasetools/validate_target_files.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Validate a given (signed) target_files.zip.
+
+It performs checks to ensure the integrity of the input zip.
+ - It verifies the file consistency between the ones in IMAGES/system.img (read
+   via IMAGES/system.map) and the ones under unpacked folder of SYSTEM/. The
+   same check also applies to the vendor image if present.
+"""
+
+import common
+import logging
+import os.path
+import sparse_img
+import sys
+
+
+def _GetImage(which, tmpdir):
+  assert which in ('system', 'vendor')
+
+  path = os.path.join(tmpdir, 'IMAGES', which + '.img')
+  mappath = os.path.join(tmpdir, 'IMAGES', which + '.map')
+
+  # Map file must exist (allowed to be empty).
+  assert os.path.exists(path) and os.path.exists(mappath)
+
+  clobbered_blocks = '0'
+  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
+
+
+def ValidateFileConsistency(input_zip, input_tmp):
+  """Compare the files from image files and unpacked folders."""
+
+  def RoundUpTo4K(value):
+    rounded_up = value + 4095
+    return rounded_up - (rounded_up % 4096)
+
+  def CheckAllFiles(which):
+    logging.info('Checking %s image.', which)
+    image = _GetImage(which, input_tmp)
+    prefix = '/' + which
+    for entry in image.file_map:
+      if not entry.startswith(prefix):
+        continue
+
+      # Read the blocks that the file resides. Note that it will contain the
+      # bytes past the file length, which is expected to be padded with '\0's.
+      ranges = image.file_map[entry]
+      blocks_sha1 = image.RangeSha1(ranges)
+
+      # The filename under unpacked directory, such as SYSTEM/bin/sh.
+      unpacked_name = os.path.join(
+          input_tmp, which.upper(), entry[(len(prefix) + 1):])
+      with open(unpacked_name) as f:
+        file_data = f.read()
+      file_size = len(file_data)
+      file_size_rounded_up = RoundUpTo4K(file_size)
+      file_data += '\0' * (file_size_rounded_up - file_size)
+      file_sha1 = common.File(entry, file_data).sha1
+
+      assert blocks_sha1 == file_sha1, \
+          'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
+              entry, ranges, blocks_sha1, file_sha1)
+
+  logging.info('Validating file consistency.')
+
+  # Verify IMAGES/system.img.
+  CheckAllFiles('system')
+
+  # Verify IMAGES/vendor.img if applicable.
+  if 'VENDOR/' in input_zip.namelist():
+    CheckAllFiles('vendor')
+
+  # Not checking IMAGES/system_other.img since it doesn't have the map file.
+
+
+def main(argv):
+  def option_handler():
+    return True
+
+  args = common.ParseOptions(
+      argv, __doc__, extra_opts="",
+      extra_long_opts=[],
+      extra_option_handler=option_handler)
+
+  if len(args) != 1:
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
+  date_format = '%Y/%m/%d %H:%M:%S'
+  logging.basicConfig(level=logging.INFO, format=logging_format,
+                      datefmt=date_format)
+
+  logging.info("Unzipping the input target_files.zip: %s", args[0])
+  input_tmp, input_zip = common.UnzipTemp(args[0])
+
+  ValidateFileConsistency(input_zip, input_tmp)
+
+  # TODO: Check if the OTA keys have been properly updated (the ones on /system,
+  # in recovery image).
+
+  # TODO(b/35411009): Verify the contents in /system/bin/install-recovery.sh.
+
+  logging.info("Done.")
+
+
+if __name__ == '__main__':
+  try:
+    main(sys.argv[1:])
+  finally:
+    common.Cleanup()
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
index 4506e2f..051a51d 100644
--- a/tools/signapk/Android.mk
+++ b/tools/signapk/Android.mk
@@ -30,7 +30,6 @@
 include $(BUILD_HOST_JAVA_LIBRARY)
 
 ifeq ($(TARGET_BUILD_APPS),)
-ifeq ($(BRILLO),)
 # The post-build signing tools need signapk.jar and its shared libraries,
 # but we don't need this if we're just doing unbundled apps.
 my_dist_files := $(LOCAL_INSTALLED_MODULE) \
@@ -39,4 +38,3 @@
 $(call dist-for-goals,droidcore,$(my_dist_files))
 my_dist_files :=
 endif
-endif
diff --git a/tools/warn.py b/tools/warn.py
index 5be6d9d..8aed830 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -73,14 +73,9 @@
 # New dynamic HTML related function to emit data:
 #   escape_string, strip_escape_string, emit_warning_arrays
 #   emit_js_data():
-#
-# To emit csv files of warning message counts:
-#   flag --gencsv
-#   description_for_csv, string_for_csv:
-#   count_severity(sev, kind):
-#   dump_csv():
 
 import argparse
+import csv
 import multiprocessing
 import os
 import re
@@ -88,6 +83,9 @@
 import sys
 
 parser = argparse.ArgumentParser(description='Convert a build log into HTML')
+parser.add_argument('--csvpath',
+                    help='Save CSV warning file to the passed absolute path',
+                    default=None)
 parser.add_argument('--gencsv',
                     help='Generate a CSV file with number of various warnings',
                     action='store_true',
@@ -2387,7 +2385,8 @@
 
 
 def parse_input_file(infile):
-  """Parse input file, match warning lines."""
+  """Parse input file, collect parameters and warning lines."""
+  global android_root
   global platform_version
   global target_product
   global target_variant
@@ -2414,6 +2413,9 @@
       m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
       if m is not None:
         target_variant = m.group(0)
+      m = re.search('.* TOP=([^ ]*) .*', line)
+      if m is not None:
+        android_root = m.group(1)
   return warning_lines
 
 
@@ -2465,10 +2467,11 @@
     if (FlagURL == "") return line;
     if (FlagSeparator == "") {
       return line.replace(ParseLinePattern,
-        "<a href='" + FlagURL + "/$1'>$1</a>:$2:$3");
+        "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
     }
     return line.replace(ParseLinePattern,
-      "<a href='" + FlagURL + "/$1" + FlagSeparator + "$2'>$1:$2</a>:$3");
+      "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
+        "$2'>$1:$2</a>:$3");
   }
   function createArrayOfDictionaries(n) {
     var result = [];
@@ -2672,48 +2675,46 @@
   return category['description']
 
 
-def string_for_csv(s):
-  # Only some Java warning desciptions have used quotation marks.
-  # TODO(chh): if s has double quote character, s should be quoted.
-  if ',' in s:
-    # TODO(chh): replace a double quote with two double quotes in s.
-    return '"{}"'.format(s)
-  return s
-
-
-def count_severity(sev, kind):
+def count_severity(writer, sev, kind):
   """Count warnings of given severity."""
   total = 0
   for i in warn_patterns:
     if i['severity'] == sev and i['members']:
       n = len(i['members'])
       total += n
-      warning = string_for_csv(kind + ': ' + description_for_csv(i))
-      print '{},,{}'.format(n, warning)
+      warning = kind + ': ' + description_for_csv(i)
+      writer.writerow([n, '', warning])
       # print number of warnings for each project, ordered by project name.
       projects = i['projects'].keys()
       projects.sort()
       for p in projects:
-        print '{},{},{}'.format(i['projects'][p], p, warning)
-  print '{},,{}'.format(total, kind + ' warnings')
+        writer.writerow([i['projects'][p], p, warning])
+  writer.writerow([total, '', kind + ' warnings'])
+
   return total
 
 
 # dump number of warnings in csv format to stdout
-def dump_csv():
+def dump_csv(writer):
   """Dump number of warnings in csv format to stdout."""
   sort_warnings()
   total = 0
   for s in Severity.range:
-    total += count_severity(s, Severity.column_headers[s])
-  print '{},,{}'.format(total, 'All warnings')
+    total += count_severity(writer, s, Severity.column_headers[s])
+  writer.writerow([total, '', 'All warnings'])
 
 
 def main():
   warning_lines = parse_input_file(open(args.buildlog, 'r'))
   parallel_classify_warnings(warning_lines)
+  # If a user pases a csv path, save the fileoutput to the path
+  # If the user also passed gencsv write the output to stdout
+  # If the user did not pass gencsv flag dump the html report to stdout.
+  if args.csvpath:
+    with open(args.csvpath, 'w') as f:
+      dump_csv(csv.writer(f, lineterminator='\n'))
   if args.gencsv:
-    dump_csv()
+    dump_csv(csv.writer(sys.stdout, lineterminator='\n'))
   else:
     dump_html()