Merge "Add LOCAL_OVERRIDES_MODULES"
diff --git a/CleanSpec.mk b/CleanSpec.mk
index e0c826c..b955e25 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -405,6 +405,21 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/lib*)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/lib*)
 
+# Revert that move
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/lib*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/lib*)
+
+# Sanitized libraries now live in a different location.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/lib*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/vendor/lib*)
+
+# Soong module variant change, remove obsolete intermediates
+$(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
+
+# Version checking moving to Soong
+$(call add-clean-step, rm -rf $(OUT_DIR)/versions_checked.mk)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..89b446a
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,2 @@
+ccross@android.com
+dwillemsen@google.com
diff --git a/buildspec.mk.default b/buildspec.mk.default
index a7ac7ec..b31578a 100644
--- a/buildspec.mk.default
+++ b/buildspec.mk.default
@@ -36,6 +36,12 @@
 #TARGET_BUILD_VARIANT:=eng
 endif
 
+# Choose a targeted release.  If you don't pick one, the default is the
+# soonest future release.
+ifndef TARGET_PLATFORM_RELEASE
+#TARGET_PLATFORM_RELEASE:=OPR1
+endif
+
 # Choose additional targets to always install, even when building
 # minimal targets like "make droid".  This takes simple target names
 # like "Browser" or "MyApp", the names used by LOCAL_MODULE or
@@ -105,4 +111,4 @@
 # variable will be changed.  After you have modified this file with the new
 # changes (see buildspec.mk.default), update this to the new value from
 # buildspec.mk.default.
-BUILD_ENV_SEQUENCE_NUMBER := 12
+BUILD_ENV_SEQUENCE_NUMBER := 13
diff --git a/core/Makefile b/core/Makefile
index cf0aa4a..b6a9780 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -191,11 +191,16 @@
 
 BUILDINFO_SH := build/tools/buildinfo.sh
 
-# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test harness to distinguish builds.
+# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test
+# harness to distinguish builds. Only add _asan for a sanitized build
+# if it isn't already a part of the flavor (via a dedicated lunch
+# config for example).
 TARGET_BUILD_FLAVOR := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
 ifdef SANITIZE_TARGET
+ifeq (,$(findstring _asan,$(TARGET_BUILD_FLAVOR)))
 TARGET_BUILD_FLAVOR := $(TARGET_BUILD_FLAVOR)_asan
 endif
+endif
 
 ifdef TARGET_SYSTEM_PROP
 system_prop_file := $(TARGET_SYSTEM_PROP)
@@ -562,25 +567,25 @@
 
 else ifeq (true,$(BOARD_AVB_ENABLE)) # TARGET_BOOTIMAGE_USE_EXT2 != true
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
 	$(call pretty,"Target boot image: $@")
 	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 	$(hide) $(AVBTOOL) add_hash_footer \
 	  --image $@ \
 	  --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-	  --partition_name boot $(INTERNAL_AVB_SIGNING_ARGS) \
+	  --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
 	  $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 
 .PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL)
+bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 	@echo "make $@: ignoring dependencies"
 	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
 	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 	$(hide) $(AVBTOOL) add_hash_footer \
 	  --image $@ \
 	  --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-	  --partition_name boot $(INTERNAL_AVB_SIGNING_ARGS) \
+	  --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
 	  $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 
 else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
@@ -653,13 +658,14 @@
 
 .PHONY: notice_files
 
-# Create the rule to combine the files into text and html forms
-# $(1) - Plain text output file
-# $(2) - HTML output file
-# $(3) - File title
-# $(4) - Directory to use.  Notice files are all $(4)/src.  Other
+# Create the rule to combine the files into text and html/xml forms
+# $(1) - xml_excluded_vendor|xml_vendor|html
+# $(2) - Plain text output file
+# $(3) - HTML/XML output file
+# $(4) - File title
+# $(5) - Directory to use.  Notice files are all $(4)/src.  Other
 #		 directories in there will be used for scratch
-# $(5) - Dependencies for the output files
+# $(6) - Dependencies for the output files
 #
 # The algorithm here is that we go collect a hash for each of the notice
 # files and write the names of the files that match that hash.  Then
@@ -673,12 +679,16 @@
 # original notice files instead of making rules to copy them somwehere.
 # Then we could traverse that without quite as much bash drama.
 define combine-notice-files
-$(1) $(2): PRIVATE_MESSAGE := $(3)
-$(1) $(2): PRIVATE_DIR := $(4)
-$(1) : $(2)
-$(2) : $(5) $(BUILD_SYSTEM)/Makefile build/tools/generate-notice-files.py
-	build/tools/generate-notice-files.py $(1) $(2) $$(PRIVATE_MESSAGE) $$(PRIVATE_DIR)/src
-notice_files: $(1) $(2)
+$(2) $(3): PRIVATE_MESSAGE := $(4)
+$(2) $(3): PRIVATE_DIR := $(5)
+$(2) : $(3)
+$(3) : $(6) $(BUILD_SYSTEM)/Makefile build/tools/generate-notice-files.py
+	build/tools/generate-notice-files.py --text-output $(2) \
+		$(if $(filter $(1),xml_excluded_vendor),-e vendor --xml-output, \
+		  $(if $(filter $(1),xml_vendor),-i vendor --xml-output, \
+		    --html-output)) $(3) \
+		-t $$(PRIVATE_MESSAGE) -s $$(PRIVATE_DIR)/src
+notice_files: $(2) $(3)
 endef
 
 # TODO These intermediate NOTICE.txt/NOTICE.html files should go into
@@ -686,24 +696,51 @@
 # the src subdirectory.
 
 target_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE.txt
-target_notice_file_html := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
-target_notice_file_html_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html.gz
+target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
+target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html.gz
+installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
 tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt
 tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
 
+ifeq ($(PRODUCT_FULL_TREBLE),true)
+target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml
+target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml.gz
+installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.xml.gz
+
+target_vendor_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.txt
+target_vendor_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.xml
+target_vendor_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_VENDOR.xml.gz
+installed_vendor_notice_xml_gz := $(TARGET_OUT_VENDOR)/etc/NOTICE.xml.gz
+endif
+
 ifndef TARGET_BUILD_APPS
 kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
 winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
 pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
 
-$(eval $(call combine-notice-files, \
+ifdef target_vendor_notice_file_xml_gz
+$(eval $(call combine-notice-files, xml_excluded_vendor, \
 			$(target_notice_file_txt), \
-			$(target_notice_file_html), \
+			$(target_notice_file_html_or_xml), \
 			"Notices for files contained in the filesystem images in this directory:", \
 			$(TARGET_OUT_NOTICE_FILES), \
 			$(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+$(eval $(call combine-notice-files, xml_vendor, \
+			$(target_vendor_notice_file_txt), \
+			$(target_vendor_notice_file_xml), \
+			"Notices for files contained in the vendor filesystem image in this directory:", \
+			$(TARGET_OUT_NOTICE_FILES), \
+			$(target_notice_file_html_or_xml)))
+else
+$(eval $(call combine-notice-files, html, \
+			$(target_notice_file_txt), \
+			$(target_notice_file_html_or_xml), \
+			"Notices for files contained in the filesystem images in this directory:", \
+			$(TARGET_OUT_NOTICE_FILES), \
+			$(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+endif
 
-$(eval $(call combine-notice-files, \
+$(eval $(call combine-notice-files, html, \
 			$(tools_notice_file_txt), \
 			$(tools_notice_file_html), \
 			"Notices for files contained in the tools directory:", \
@@ -716,15 +753,25 @@
 # the module processing has already been done -- in fact, we used the
 # fact that all that has been done to get the list of modules that we
 # need notice files for.
-$(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
+$(target_notice_file_html_or_xml_gz): $(target_notice_file_html_or_xml) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
-installed_notice_html_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
-$(installed_notice_html_gz): $(target_notice_file_html_gz)
+$(installed_notice_html_or_xml_gz): $(target_notice_file_html_or_xml_gz)
 	$(copy-file-to-target)
 
+ifdef target_vendor_notice_file_xml_gz
+# Install the vendor html file at /vendor/etc/NOTICE.xml.gz.
+$(target_vendor_notice_file_xml_gz): $(target_vendor_notice_file_xml) | $(MINIGZIP)
+	$(hide) $(MINIGZIP) -9 < $< > $@
+$(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
+	$(copy-file-to-target)
+endif
+
 # if we've been run my mm, mmm, etc, don't reinstall this every time
 ifeq ($(ONE_SHOT_MAKEFILE),)
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_gz)
+  ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
+  ifdef target_vendor_notice_file_xml_gz
+    ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
+  endif
 endif
 endif  # TARGET_BUILD_APPS
 
@@ -759,11 +806,11 @@
 	$(hide) zip -qjX $@ $<
 	$(remove-timestamps-from-package)
 
-# Carry the public key for update_engine if it's a non-Brillo target that
+# Carry the public key for update_engine if it's a non-IoT target that
 # uses the AB updater. We use the same key as otacerts but in RSA public key
 # format.
 ifeq ($(AB_OTA_UPDATER),true)
-ifeq ($(BRILLO),)
+ifneq ($(PRODUCT_IOT),true)
 ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
 $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
 	$(hide) rm -f $@
@@ -819,6 +866,10 @@
 endif
 endif
 
+ifeq ($(BOARD_AVB_ENABLE),true)
+INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
+endif
+
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
@@ -840,6 +891,10 @@
 
 INTERNAL_USERIMAGES_DEPS += $(BLK_ALLOC_TO_BASE_FS)
 
+ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true)
+INTERNAL_USERIMAGES_DEPS += $(MKE2FS_CONF)
+endif
+
 # $(1): the path of the output dictionary file
 # $(2): additional "key=value" pairs to append to the dictionary file.
 define generate-userimage-prop-dictionary
@@ -855,6 +910,7 @@
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
@@ -888,10 +944,21 @@
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_signing_args=$(INTERNAL_AVB_SIGNING_ARGS)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_avbtool=$(AVBTOOL)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "system_avb_enable=$(BOARD_AVB_ENABLE)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "system_avb_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_avbtool=$(notdir $(AVBTOOL))" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_SYSTEM_KEY_PATH),\
+        $(hide) echo "avb_system_key_path=$(BOARD_AVB_SYSTEM_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_system_algorithm=$(BOARD_AVB_SYSTEM_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_system_rollback_index_location=$(BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_vendor_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_vendor_add_hashtree_footer_args=$(BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_VENDOR_KEY_PATH),\
+        $(hide) echo "avb_vendor_key_path=$(BOARD_AVB_VENDOR_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_vendor_algorithm=$(BOARD_AVB_VENDOR_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_vendor_rollback_index_location=$(BOARD_AVB_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
@@ -1043,7 +1110,7 @@
     $(hide) mkdir -p $(4)/lib/modules/0.0/$(3)lib/modules
     $(hide) cp $(1) $(4)/lib/modules/0.0/$(3)lib/modules
     $(hide) $(DEPMOD) -b $(4) 0.0
-    $(hide) sed -e 's/\(.*modules.*\):/\/\1:/g' -e 's/: \(.*modules.*\)/: \/\1/g' -i $(4)/lib/modules/0.0/modules.dep
+    $(hide) sed -e 's/\(.*modules.*\):/\/\1:/g' -e 's/ \([^ ]*modules[^ ]*\)/ \/\1/g' -i $(4)/lib/modules/0.0/modules.dep
     $(hide) cp $(4)/lib/modules/0.0/modules.dep $(2)/lib/modules
 endef
 
@@ -1095,7 +1162,7 @@
       $(hide) $(AVBTOOL) add_hash_footer \
         --image $(1) \
         --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-        --partition_name boot $(INTERNAL_AVB_SIGNING_ARGS) \
+        --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
         $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS))
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
     $(hide) $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE)), \
@@ -1110,6 +1177,9 @@
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
 $(INSTALLED_BOOTIMAGE_TARGET) : $(VBOOT_SIGNER)
 endif
+ifeq (true,$(BOARD_AVB_ENABLE))
+$(INSTALLED_BOOTIMAGE_TARGET) : $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
+endif
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
 		$(INSTALLED_RAMDISK_TARGET) \
 		$(INTERNAL_RECOVERYIMAGE_FILES) \
@@ -1178,8 +1248,16 @@
     $(PDK_FUSION_SYSIMG_FILES) \
     $(RECOVERY_RESOURCE_ZIP))
 
-
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
+
+# ASAN libraries in the system image - add dependency.
+ASAN_IN_SYSTEM_INSTALLED := $(TARGET_OUT)/asan.tar.bz2
+ifneq (,$(SANITIZE_TARGET))
+  ifeq (true,$(SANITIZE_TARGET_SYSTEM))
+    FULL_SYSTEMIMAGE_DEPS += $(ASAN_IN_SYSTEM_INSTALLED)
+  endif
+endif
+
 # -----------------------------------------------------------------
 # installed file list
 # Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
@@ -1187,11 +1265,11 @@
 # so that we can get the size stat even if the build fails due to too large
 # system image.
 INSTALLED_FILES_FILE := $(PRODUCT_OUT)/installed-files.txt
-$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS)
+$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
-	$(hide) build/tools/fileslist.py $(TARGET_OUT) > $(@:.txt=.json)
+	$(hide) $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
 	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 .PHONY: installed-file-list
@@ -1291,6 +1369,9 @@
 endif
 endif
 
+.PHONY: sync
+sync: $(INTERNAL_SYSTEMIMAGE_FILES)
+
 #######
 ## system tarball
 define build-systemtarball-target
@@ -1355,7 +1436,7 @@
 	  $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\
 	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\
 	  $(hide) echo "PDK.DEXPREOPT.$(m).PRIVILEGED_MODULE:=$(DEXPREOPT.$(m).PRIVILEGED_MODULE)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).PROPRIETARY_MODULE:=$(DEXPREOPT.$(m).PROPRIETARY_MODULE)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).VENDOR_MODULE:=$(DEXPREOPT.$(m).VENDOR_MODULE)" >> $@$(newline)\
 	  $(hide) echo "PDK.DEXPREOPT.$(m).TARGET_ARCH:=$(DEXPREOPT.$(m).TARGET_ARCH)" >> $@$(newline)\
 	  $(hide) echo "PDK.DEXPREOPT.$(m).STRIPPED_SRC:=$(patsubst $(PRODUCT_OUT)/%,%,$(DEXPREOPT.$(m).INSTALLED_STRIPPED))" >> $@$(newline)\
 	  )
@@ -1456,9 +1537,11 @@
 
 # We just build this directly to the install location.
 INSTALLED_USERDATAIMAGE_TARGET := $(BUILT_USERDATAIMAGE_TARGET)
-$(INSTALLED_USERDATAIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) \
-                                   $(INTERNAL_USERDATAIMAGE_FILES) \
-                                   $(BUILD_IMAGE_SRCS)
+INSTALLED_USERDATAIMAGE_TARGET_DEPS := \
+    $(INTERNAL_USERIMAGES_DEPS) \
+    $(INTERNAL_USERDATAIMAGE_FILES) \
+    $(BUILD_IMAGE_SRCS)
+$(INSTALLED_USERDATAIMAGE_TARGET): $(INSTALLED_USERDATAIMAGE_TARGET_DEPS)
 	$(build-userdataimage-target)
 
 .PHONY: userdataimage-nodeps
@@ -1468,6 +1551,19 @@
 endif # not skip_userdata.img
 skip_userdata.img :=
 
+# ASAN libraries in the system image - build rule.
+ASAN_OUT_DIRS_FOR_SYSTEM_INSTALL := $(sort $(patsubst $(PRODUCT_OUT)/%,%,\
+  $(TARGET_OUT_SHARED_LIBRARIES) \
+  $(2ND_TARGET_OUT_SHARED_LIBRARIES) \
+  $(TARGET_OUT_VENDOR_SHARED_LIBRARIES) \
+  $(2ND_TARGET_OUT_VENDOR_SHARED_LIBRARIES)))
+# Extra options: Enforce the system user for the files to avoid having to change ownership.
+ASAN_SYSTEM_INSTALL_OPTIONS := --owner=1000 --group=1000
+# Note: experimentally, it seems not worth it to try to get "best" compression. We don't save
+#       enough space.
+$(ASAN_IN_SYSTEM_INSTALLED): $(INSTALLED_USERDATAIMAGE_TARGET_DEPS)
+	tar cfj $(ASAN_IN_SYSTEM_INSTALLED) $(ASAN_SYSTEM_INSTALL_OPTIONS) -C $(TARGET_OUT_DATA)/.. $(ASAN_OUT_DIRS_FOR_SYSTEM_INSTALL) >/dev/null
+
 #######
 ## data partition tarball
 define build-userdatatarball-target
@@ -1561,61 +1657,6 @@
 endif # BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
 
 # -----------------------------------------------------------------
-# vbmeta image
-ifeq ($(BOARD_AVB_ENABLE),true)
-
-BUILT_VBMETAIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta.img
-
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS := \
-    --include_descriptors_from_image $(INSTALLED_BOOTIMAGE_TARGET) \
-    --include_descriptors_from_image $(INSTALLED_SYSTEMIMAGE) \
-    --generate_dm_verity_cmdline_from_hashtree $(INSTALLED_SYSTEMIMAGE)
-
-ifdef BOARD_AVB_ROLLBACK_INDEX
-INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += --rollback_index $(BOARD_AVB_ROLLBACK_INDEX)
-endif
-
-ifndef BOARD_AVB_KEY_PATH
-# If key path isn't specified, use the 4096-bit test key.
-INTERNAL_AVB_SIGNING_ARGS := \
-    --algorithm SHA256_RSA4096 \
-    --key external/avb/test/data/testkey_rsa4096.pem
-else
-INTERNAL_AVB_SIGNING_ARGS := \
-    --algorithm $(BOARD_AVB_ALGORITHM) --key $(BOARD_AVB_KEY_PATH)
-endif
-
-ifndef BOARD_BOOTIMAGE_PARTITION_SIZE
-  $(error BOARD_BOOTIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
-endif
-
-ifndef BOARD_SYSTEMIMAGE_PARTITION_SIZE
-  $(error BOARD_SYSTEMIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
-endif
-
-define build-vbmetaimage-target
-  $(call pretty,"Target vbmeta image: $(INSTALLED_VBMETAIMAGE_TARGET)")
-  $(hide) $(AVBTOOL) make_vbmeta_image \
-    $(INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS) \
-    $(INTERNAL_AVB_SIGNING_ARGS) \
-    $(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS) \
-    --output $@
-endef
-
-INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_VBMETAIMAGE_TARGET)
-$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE)
-	$(build-vbmetaimage-target)
-
-.PHONY: vbmetaimage-nodeps
-vbmetaimage-nodeps:
-	$(build-vbmetaimage-target)
-
-# We need $(AVBTOOL) for system.img generation.
-FULL_SYSTEMIMAGE_DEPS += $(AVBTOOL)
-
-endif # BOARD_AVB_ENABLE
-
-# -----------------------------------------------------------------
 # system_other partition image
 ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 BOARD_USES_SYSTEM_OTHER := true
@@ -1634,11 +1675,12 @@
       $(ALL_PDK_FUSION_FILES))
 
 INSTALLED_FILES_FILE_SYSTEMOTHER := $(PRODUCT_OUT)/installed-files-system-other.txt
-$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
-	$(hide) build/tools/fileslist.py $(TARGET_OUT_SYSTEM_OTHER) > $@
+	$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 systemotherimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,system_other)
@@ -1680,11 +1722,11 @@
 $(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
 
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
-$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES)
+$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
-	$(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+	$(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
 	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 vendorimage_intermediates := \
@@ -1712,12 +1754,185 @@
 vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS) $(DEPMOD)
 	$(build-vendorimage-target)
 
+sync: $(INTERNAL_VENDORIMAGE_FILES)
+
 else ifdef BOARD_PREBUILT_VENDORIMAGE
 INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
 endif
 
 # -----------------------------------------------------------------
+# dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE
+INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	$(AVBTOOL) add_hash_footer \
+		--image $@ \
+		--partition_size $(BOARD_DTBOIMG_PARTITION_SIZE) \
+		--partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+		$(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+else
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+endif
+
+endif
+
+# Convert to lower case without requiring a shell, which isn't cacheable.
+to-lower = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,\
+$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,\
+$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,\
+$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
+
+# -----------------------------------------------------------------
+# vbmeta image
+ifeq ($(BOARD_AVB_ENABLE),true)
+
+BUILT_VBMETAIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta.img
+AVB_CHAIN_KEY_DIR := $(TARGET_OUT_INTERMEDIATES)/avb_chain_keys
+
+ifdef BOARD_AVB_KEY_PATH
+$(if $(BOARD_AVB_ALGORITHM),,$(error BOARD_AVB_ALGORITHM is not defined))
+else
+# If key path isn't specified, use the 4096-bit test key.
+BOARD_AVB_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+endif
+
+INTERNAL_AVB_SIGNING_ARGS := \
+    --algorithm $(BOARD_AVB_ALGORITHM) --key $(BOARD_AVB_KEY_PATH)
+
+BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
+DTBO_FOOTER_ARGS := BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS
+SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
+VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
+
+# Check and set required build variables for a chain partition.
+# $(1): the partition to enable AVB chain, e.g., BOOT or SYSTEM.
+define check-and-set-avb-chain-args
+$(eval PART := $(1))
+$(eval part=$(call to-lower,$(PART)))
+
+$(eval _key_path := BOARD_AVB_$(PART)_KEY_PATH)
+$(eval _signing_algorithm := BOARD_AVB_$(PART)_ALGORITHM)
+$(eval _rollback_index := BOARD_AVB_$(PART)_ROLLBACK_INDEX)
+$(eval _rollback_index_location := BOARD_AVB_$(PART)_ROLLBACK_INDEX_LOCATION)
+$(if $($(_key_path)),,$(error $(_key_path) is not defined))
+$(if $($(_signing_algorithm)),,$(error $(_signing_algorithm) is not defined))
+$(if $($(_rollback_index)),,$(error $(_rollback_index) is not defined))
+$(if $($(_rollback_index_location)),,$(error $(_rollback_index_location) is not defined))
+
+# Set INTERNAL_AVB_(PART)_SIGNING_ARGS
+$(eval _signing_args := INTERNAL_AVB_$(PART)_SIGNING_ARGS)
+$(eval $(_signing_args) := \
+    --algorithm $($(_signing_algorithm)) --key $($(_key_path)))
+
+$(eval INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey)
+
+# Set rollback_index via footer args
+$(eval _footer_args := $(PART)_FOOTER_ARGS)
+$(eval $($(_footer_args)) += --rollback_index $($(_rollback_index)))
+endef
+
+ifdef BOARD_AVB_BOOT_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,BOOT))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_BOOTIMAGE_TARGET)
+endif
+
+ifdef BOARD_AVB_SYSTEM_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,SYSTEM))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_SYSTEMIMAGE)
+endif
+
+ifdef INSTALLED_VENDORIMAGE_TARGET
+ifdef BOARD_AVB_VENDOR_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,VENDOR))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_VENDORIMAGE_TARGET)
+endif
+endif
+
+ifdef INSTALLED_DTBOIMAGE_TARGET
+ifdef BOARD_AVB_DTBO_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,DTBO))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_DTBOIMAGE_TARGET)
+endif
+endif
+
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
+
+# Add kernel cmdline descriptor for kernel to mount system.img as root with
+# dm-verity. This works when system.img is either chained or not-chained:
+# - chained: The --setup_as_rootfs_from_kernel option will add dm-verity kernel
+#   cmdline descriptor to system.img
+# - not-chained: The --include_descriptors_from_image option for make_vbmeta_image
+#   will include the kernel cmdline descriptor from system.img into vbmeta.img
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --setup_as_rootfs_from_kernel
+endif
+
+ifdef BOARD_AVB_ROLLBACK_INDEX
+BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --rollback_index $(BOARD_AVB_ROLLBACK_INDEX)
+endif
+
+ifndef BOARD_BOOTIMAGE_PARTITION_SIZE
+  $(error BOARD_BOOTIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
+endif
+
+ifndef BOARD_SYSTEMIMAGE_PARTITION_SIZE
+  $(error BOARD_SYSTEMIMAGE_PARTITION_SIZE must be set for BOARD_AVB_ENABLE)
+endif
+
+# $(1): the directory to extract public keys to
+define extract-avb-chain-public-keys
+  $(if $(BOARD_AVB_BOOT_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_BOOT_KEY_PATH) \
+      --output $(1)/boot.avbpubkey)
+  $(if $(BOARD_AVB_SYSTEM_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_SYSTEM_KEY_PATH) \
+      --output $(1)/system.avbpubkey)
+  $(if $(BOARD_AVB_VENDOR_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VENDOR_KEY_PATH) \
+      --output $(1)/vendor.avbpubkey)
+  $(if $(BOARD_AVB_DTBO_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
+      --output $(1)/dtbo.avbpubkey)
+endef
+
+define build-vbmetaimage-target
+  $(call pretty,"Target vbmeta image: $(INSTALLED_VBMETAIMAGE_TARGET)")
+  $(hide) mkdir -p $(AVB_CHAIN_KEY_DIR)
+  $(call extract-avb-chain-public-keys, $(AVB_CHAIN_KEY_DIR))
+  $(hide) $(AVBTOOL) make_vbmeta_image \
+    $(INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS) \
+    $(INTERNAL_AVB_SIGNING_ARGS) \
+    $(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS) \
+    --output $@
+  $(hide) rm -rf $(AVB_CHAIN_KEY_DIR)
+endef
+
+INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_VBMETAIMAGE_TARGET)
+$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_VENDORIMAGE_TARGET) $(INSTALLED_DTBOIMAGE_TARGET) $(BOARD_AVB_KEY_PATH)
+	$(build-vbmetaimage-target)
+
+.PHONY: vbmetaimage-nodeps
+vbmetaimage-nodeps:
+	$(build-vbmetaimage-target)
+
+endif # BOARD_AVB_ENABLE
+
+# -----------------------------------------------------------------
 # bring in the installer image generation defines if necessary
 ifeq ($(TARGET_USE_DISKINSTALLER),true)
 include bootable/diskinstaller/config.mk
@@ -1764,6 +1979,7 @@
   $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar \
   $(HOST_OUT_EXECUTABLES)/make_ext4fs \
   $(HOST_OUT_EXECUTABLES)/mkuserimg.sh \
   $(HOST_OUT_EXECUTABLES)/mke2fs \
@@ -1785,6 +2001,7 @@
   $(HOST_OUT_EXECUTABLES)/brillo_update_payload \
   $(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
   $(HOST_OUT_EXECUTABLES)/delta_generator \
+  $(AVBTOOL) \
   $(BLK_ALLOC_TO_BASE_FS)
 
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
@@ -1827,22 +2044,32 @@
 BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
 $(BUILT_OTATOOLS_PACKAGE): zip_root := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
 
-$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) | $(ACP)
+OTATOOLS_DEPS := \
+  system/extras/verity/build_verity_metadata.py \
+  system/extras/ext4_utils/mke2fs.conf \
+  external/avb/test/data/testkey_rsa4096.pem \
+  $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
+      -name verity_key | sort) \
+  $(shell find device vendor -type f -name \*.pk8 -o -name verifiedboot\* -o \
+      -name \*.x509.pem -o -name oem\*.prop | sort)
+
+OTATOOLS_RELEASETOOLS := \
+  $(shell find build/tools/releasetools -name \*.pyc -prune -o -type f | sort)
+
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+OTATOOLS_DEPS += \
+  $(shell find external/vboot_reference/tests/devkeys -type f | sort)
+endif
+
+$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) | $(ACP)
 	@echo "Package OTA tools: $@"
 	$(hide) rm -rf $@ $(zip_root)
-	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools $(zip_root)/system/extras/verity
+	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
 	$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
-	$(hide) $(ACP) $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar $(zip_root)/framework/
-	$(hide) $(ACP) -p system/extras/verity/build_verity_metadata.py $(zip_root)/system/extras/verity/
 	$(hide) $(ACP) -r -d -p build/tools/releasetools/* $(zip_root)/releasetools
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
-	$(hide) mkdir -p $(zip_root)/external/vboot_reference/tests/devkeys
-	$(hide) $(ACP) -r -d -p external/vboot_reference/tests/devkeys/* $(zip_root)/external/vboot_reference/tests/devkeys
-endif
 	$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
 	$(hide) (cd $(zip_root) && zip -qryX $(abspath $@) *)
-	$(hide) zip -qryX $(abspath $@) build/target/product/security/
-	$(hide) find device vendor -name \*.pk8 -o -name verifiedboot\* -o -name \*.x509.pem -o -name oem\*.prop | xargs zip -qryX $(abspath $@)>/dev/null || true
+	$(hide) echo $(OTATOOLS_DEPS) | xargs zip -qryX $(abspath $@)>/dev/null || true
 
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
@@ -1891,14 +2118,18 @@
 
 ifeq ($(TARGET_RELEASETOOLS_EXTENSIONS),)
 # default to common dir for device vendor
-$(BUILT_TARGET_FILES_PACKAGE): tool_extensions := $(TARGET_DEVICE_DIR)/../common
+tool_extensions := $(TARGET_DEVICE_DIR)/../common
 else
-$(BUILT_TARGET_FILES_PACKAGE): tool_extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
+tool_extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
 endif
+tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSIONS := $(tool_extensions)
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
 ifeq ($(AB_OTA_UPDATER),true)
 # Build zlib fingerprint if using the AB Updater.
 updater_dep := $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint
+updater_dep += system/update_engine/update_engine.conf
 else
 # Build OTA tools if not using the AB Updater.
 updater_dep := $(built_ota_tools)
@@ -1912,6 +2143,18 @@
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
 endif
 
+ifeq ($(AB_OTA_UPDATER),true)
+  ifdef BRILLO_VENDOR_PARTITIONS
+    $(BUILT_TARGET_FILES_PACKAGE): $(foreach p,$(BRILLO_VENDOR_PARTITIONS),\
+                                     $(call word-colon,1,$(p))/$(call word-colon,2,$(p)))
+  endif
+  ifdef OSRELEASED_DIRECTORY
+    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
+    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
+    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
+  endif
+endif
+
 # Run fs_config while creating the target files package
 # $1: root directory
 # $2: add prefix
@@ -1925,19 +2168,27 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_RADIOIMAGE_TARGET) \
 		$(INSTALLED_RECOVERYIMAGE_TARGET) \
-		$(INSTALLED_SYSTEMIMAGE) \
+		$(FULL_SYSTEMIMAGE_DEPS) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_CACHEIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
-		$(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
+		$(INSTALLED_DTBOIMAGE_TARGET) \
+		$(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
 		$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
+		$(INSTALLED_KERNEL_TARGET) \
+		$(INSTALLED_2NDBOOTLOADER_TARGET) \
+		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
+		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
 		$(SELINUX_FC) \
 		$(APKCERTS_FILE) \
 		$(SOONG_ZIP) \
 		$(HOST_OUT_EXECUTABLES)/fs_config \
-		build/tools/releasetools/add_img_to_target_files \
+		$(HOST_OUT_EXECUTABLES)/imgdiff \
+		$(HOST_OUT_EXECUTABLES)/bsdiff \
+		$(BUILD_IMAGE_SRCS) \
 		| $(ACP)
 	@echo "Package target files: $@"
+	$(call create-system-vendor-symlink)
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@@ -1946,11 +2197,10 @@
 	$(hide) $(call package_files-copy-root, \
 		$(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
 ifdef INSTALLED_KERNEL_TARGET
-	$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
+	$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
 endif
 ifdef INSTALLED_2NDBOOTLOADER_TARGET
-	$(hide) $(ACP) \
-		$(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
+	$(hide) cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
 endif
 ifdef INTERNAL_KERNEL_CMDLINE
 	$(hide) echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
@@ -1975,11 +2225,10 @@
 	@# If we are using recovery as boot, this is already done when processing recovery.
 ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 ifdef INSTALLED_KERNEL_TARGET
-	$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
+	$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
 endif
 ifdef INSTALLED_2NDBOOTLOADER_TARGET
-	$(hide) $(ACP) \
-		$(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second
+	$(hide) cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second
 endif
 ifdef INTERNAL_KERNEL_CMDLINE
 	$(hide) echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
@@ -1993,7 +2242,7 @@
 endif # BOARD_USES_RECOVERY_AS_BOOT
 	$(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\
 	            mkdir -p $(zip_root)/RADIO; \
-	            $(ACP) $(t) $(zip_root)/RADIO/$(notdir $(t));)
+	            cp $(t) $(zip_root)/RADIO/$(notdir $(t));)
 	@# Contents of the system image
 	$(hide) $(call package_files-copy-root, \
 		$(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
@@ -2012,20 +2261,22 @@
 endif
 	@# Extra contents of the OTA package
 	$(hide) mkdir -p $(zip_root)/OTA
-	$(hide) $(ACP) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
+	$(hide) cp $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
 ifneq ($(AB_OTA_UPDATER),true)
 ifneq ($(built_ota_tools),)
 	$(hide) mkdir -p $(zip_root)/OTA/bin
-	$(hide) $(ACP) $(PRIVATE_OTA_TOOLS) $(zip_root)/OTA/bin/
+	$(hide) cp $(PRIVATE_OTA_TOOLS) $(zip_root)/OTA/bin/
 endif
 endif
 	@# Files that do not end up in any images, but are necessary to
 	@# build them.
 	$(hide) mkdir -p $(zip_root)/META
-	$(hide) $(ACP) $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
-	$(hide) if test -e $(tool_extensions)/releasetools.py; then $(ACP) $(tool_extensions)/releasetools.py $(zip_root)/META/; fi
+	$(hide) cp $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
+ifneq ($(tool_extension),)
+	$(hide) cp $(PRIVATE_TOOL_EXTENSION) $(zip_root)/META/
+endif
 	$(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
-	$(hide) $(ACP) $(SELINUX_FC) $(zip_root)/META/file_contexts.bin
+	$(hide) cp $(SELINUX_FC) $(zip_root)/META/file_contexts.bin
 	$(hide) echo "recovery_api_version=$(PRIVATE_RECOVERY_API_VERSION)" > $(zip_root)/META/misc_info.txt
 	$(hide) echo "fstab_version=$(PRIVATE_RECOVERY_FSTAB_VERSION)" >> $(zip_root)/META/misc_info.txt
 ifdef BOARD_FLASH_BLOCK_SIZE
@@ -2052,7 +2303,7 @@
 else
 	$(hide) echo "recovery_mount_options=$(DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
 endif
-	$(hide) echo "tool_extensions=$(tool_extensions)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "tool_extensions=$(PRIVATE_TOOL_EXTENSIONS)" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "default_system_dev_certificate=$(DEFAULT_SYSTEM_DEV_CERTIFICATE)" >> $(zip_root)/META/misc_info.txt
 ifdef PRODUCT_EXTRA_RECOVERY_KEYS
 	$(hide) echo "extra_recovery_keys=$(PRODUCT_EXTRA_RECOVERY_KEYS)" >> $(zip_root)/META/misc_info.txt
@@ -2066,11 +2317,11 @@
 	$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
 endif
 ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),)
-	$(hide) $(ACP) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
+	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH))
 endif
 ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),)
-	$(hide) $(ACP) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
+	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
 endif
 ifneq ($(strip $(SANITIZE_TARGET)),)
@@ -2081,14 +2332,17 @@
 	$(hide) echo "full_recovery_image=true" >> $(zip_root)/META/misc_info.txt
 endif
 ifeq ($(BOARD_AVB_ENABLE),true)
-	$(hide) echo "board_avb_enable=true" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_rollback_index=$(BOARD_AVB_ROLLBACK_INDEX)" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_key_path=$(BOARD_AVB_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_algorithm=$(BOARD_AVB_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_boot_add_hash_footer_args=$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_system_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "board_avb_make_vbmeta_image_args=$(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
-endif
+	$(hide) echo "avb_enable=true" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_vbmeta_key_path=$(BOARD_AVB_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_vbmeta_algorithm=$(BOARD_AVB_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_vbmeta_args=$(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_boot_add_hash_footer_args=$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
+ifdef BOARD_AVB_BOOT_KEY_PATH
+	$(hide) echo "avb_boot_key_path=$(BOARD_AVB_BOOT_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_boot_algorithm=$(BOARD_AVB_BOOT_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_boot_rollback_index_location=$(BOARD_AVB_BOOT_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_BOOT_KEY_PATH
+endif # BOARD_AVB_ENABLE
 ifdef BOARD_BPT_INPUT_FILES
 	$(hide) echo "board_bpt_enable=true" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "board_bpt_make_table_args=$(BOARD_BPT_MAKE_TABLE_ARGS)" >> $(zip_root)/META/misc_info.txt
@@ -2104,8 +2358,8 @@
 endif
 ifeq ($(AB_OTA_UPDATER),true)
 	@# When using the A/B updater, include the updater config files in the zip.
-	$(hide) $(ACP) $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
-	$(hide) $(ACP) $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint $(zip_root)/META/zlib_fingerprint.txt
+	$(hide) cp $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
+	$(hide) cp $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint $(zip_root)/META/zlib_fingerprint.txt
 	$(hide) for part in $(AB_OTA_PARTITIONS); do \
 	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
 	done
@@ -2123,13 +2377,13 @@
 	  src=$${pair1}/$${pair2}; \
 	  dest=$(zip_root)/VENDOR_IMAGES/$${pair2}; \
 	  mkdir -p $$(dirname "$${dest}"); \
-	  $(ACP) $${src} $${dest}; \
+	  cp $${src} $${dest}; \
 	done;
 endif
 ifdef OSRELEASED_DIRECTORY
-	$(hide) $(ACP) $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
-	$(hide) $(ACP) $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
-	$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version $(zip_root)/META/system_version.txt
+	$(hide) cp $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
+	$(hide) cp $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
+	$(hide) cp $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version $(zip_root)/META/system_version.txt
 endif
 endif
 ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
@@ -2140,6 +2394,21 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
+	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
+	$(hide) echo "has_dtbo=true" >> $(zip_root)/META/misc_info.txt
+ifeq ($(BOARD_AVB_ENABLE),true)
+	$(hide) echo "dtbo_size=$(BOARD_DTBOIMG_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_dtbo_add_hash_footer_args=$(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
+ifdef BOARD_AVB_DTBO_KEY_PATH
+	$(hide) echo "avb_dtbo_key_path=$(BOARD_AVB_DTBO_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_dtbo_algorithm=$(BOARD_AVB_DTBO_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_dtbo_rollback_index_location=$(BOARD_AVB_DTBO_ROLLBACK_INDEX_LOCATION)" \
+	    >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_DTBO_KEY_PATH
+endif # BOARD_AVB_ENABLE
+endif # BOARD_PREBUILT_DTBOIMAGE
 	@# Run fs_config on all the system, vendor, boot ramdisk,
 	@# and recovery ramdisk files in the zip, and save the output
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
@@ -2161,7 +2430,7 @@
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
 	$(hide) find $(zip_root)/META | sort >$@.list
-	$(hide) find $(zip_root) | grep -v "^$(zip_root)/META/" | sort >>$@.list
+	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -l $@.list
 
 .PHONY: target-files-package
@@ -2194,12 +2463,17 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 
+ifeq ($(AB_OTA_UPDATER),true)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BRILLO_UPDATE_PAYLOAD)
+endif
+
 $(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
 		build/tools/releasetools/ota_from_target_files
 	@echo "Package OTA: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/ota_from_target_files -v \
 	   --block \
+	   --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
 	   -p $(HOST_OUT) \
 	   -k $(KEY_CERT_PAIR) \
 	   $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
diff --git a/core/aapt2.mk b/core/aapt2.mk
index ccc4535..e34b09b 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -33,7 +33,7 @@
 ifneq ($(my_generated_res_dirs),)
 my_generated_resources_flata := $(my_compiled_res_base_dir)/gen_res.flata
 $(my_generated_resources_flata): PRIVATE_SOURCE_RES_DIRS := $(my_generated_res_dirs)
-$(my_generated_resources_flata) : $(my_generated_res_dirs_deps)
+$(my_generated_resources_flata) : $(my_generated_res_dirs_deps) $(AAPT2)
 	@echo "AAPT2 compile $@ <- $(PRIVATE_SOURCE_RES_DIRS)"
 	$(call aapt2-compile-resource-dirs)
 
diff --git a/core/base_rules.mk b/core/base_rules.mk
index b4d17b5..07d1cd9 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -65,6 +65,16 @@
   my_host_cross :=
 endif
 
+ifndef LOCAL_PROPRIETARY_MODULE
+  LOCAL_PROPRIETARY_MODULE := $(LOCAL_VENDOR_MODULE)
+endif
+ifndef LOCAL_VENDOR_MODULE
+  LOCAL_VENDOR_MODULE := $(LOCAL_PROPRIETARY_MODULE)
+endif
+ifneq ($(filter-out $(LOCAL_PROPRIETARY_MODULE),$(LOCAL_VENDOR_MODULE))$(filter-out $(LOCAL_VENDOR_MODULE),$(LOCAL_PROPRIETARY_MODULE)),)
+$(call pretty-error,Only one of LOCAL_PROPRIETARY_MODULE[$(LOCAL_PROPRIETARY_MODULE)] and LOCAL_VENDOR_MODULE[$(LOCAL_VENDOR_MODULE)] may be set, or they must be equal)
+endif
+
 include $(BUILD_SYSTEM)/local_vndk.mk
 
 my_module_tags := $(LOCAL_MODULE_TAGS)
@@ -78,8 +88,6 @@
 endif
 endif
 
-my_module_is_soong := $(if $(filter $(OUT_DIR)/soong/%,$(LOCAL_MODULE_MAKEFILE)),true,false)
-
 # Ninja has an implicit dependency on the command being run, and kati will
 # regenerate the ninja manifest if any read makefile changes, so there is no
 # need to have dependencies on makefiles.
@@ -165,18 +173,10 @@
 endif
 my_module_path := $(patsubst %/,%,$(my_module_path))
 my_module_relative_path := $(strip $(LOCAL_MODULE_RELATIVE_PATH))
-
-# my_module_default_path is the path that is automatically chosen according to the attributes of
-# a module. It is used when the module does not explicitly specify install path using LOCAL_MODULE_PATH.
-# If LOCAL_MODULE_PATH is specified, it is always respected and my_module_default_path is
-# ignored. However, for shared libraries, such conflict generates warning so that module owner
-# can place the library in the correct location (, stop using LOCAL_MODULE_PATH, or migrate to Soong to
-# be better).
-my_module_default_path :=
 ifdef LOCAL_IS_HOST_MODULE
   partition_tag :=
 else
-ifeq (true,$(LOCAL_PROPRIETARY_MODULE))
+ifeq (true,$(LOCAL_VENDOR_MODULE))
   partition_tag := _VENDOR
 else ifeq (true,$(LOCAL_OEM_MODULE))
   partition_tag := _OEM
@@ -190,137 +190,20 @@
   partition_tag := $(if $(call should-install-to-system,$(my_module_tags)),,_DATA)
 endif
 endif
-install_path_var := $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT$(partition_tag)_$(LOCAL_MODULE_CLASS)
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
-  install_path_var := $(install_path_var)_PRIVILEGED
-endif
-
-my_module_default_path := $($(install_path_var))
-ifeq ($(strip $(my_module_path)$(my_module_default_path)),)
-  $(call pretty-error,internal error in base_rules.mk; $(install_path_var) is not defined.)
-endif
-
-# Determine lib_type and do some sanity checks.
-ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
-  ifneq ($(filter $(LOCAL_MODULE),$(addprefix lib,$(NDK_PREBUILT_SHARED_LIBRARIES))),)
-    ifneq ($(partition_tag),)
-      $(call pretty-error,"NDK library must be installed at system partition, where other libraries will look for it. It cannot be moved.")
-    endif
-    lib_type := ndk
-  else ifneq ($(filter $(LOCAL_MODULE),$(VNDK_LIBRARIES) $(VNDK_INDIRECT_LIBRARIES)),)
-    ifneq ($(partition_tag),)
-      $(call pretty-error,"VNDK library must be installed at system partition. DO NOT modify VNDK_LIBRARIES or VNDK_LIBRARIES. \
-If your library needs to be shared between system.img and vendor.img then define it as a VNDK-ext library. Use vndk_ext_library {...} \
-in Android.bp to do so.")
-    endif
-    lib_type := vndk
-  else ifneq ($(filter $(LOCAL_MODULE),$(BOARD_SAME_PROCESS_HAL_DEPS)),)
-    # List of libraries implementing same-process HALs (and their internal sub-libraries) is
-    # defined by vendors.
-    ifeq ($(partition_tag),)
-      $(call pretty-error,Sameprocess HAL must not be installed at system partition)
-    endif
-    lib_type := sameprocess_hal
-  else ifeq ($(LOCAL_IS_HOST_MODULE)$(partition_tag),)
-    lib_type := framework
-  else ifneq ($(partition_tag),_DATA)
-    # Here, vendor means vendor/oem/odm
-    lib_type := vendor_provided
-  else
-    # Test, samples lib falls into this. No lib_type required for them.
-    ifeq ($(filter tests samples,$(LOCAL_MODULE_TAGS)),)
-      $(call pretty-warning,Cannot determine the type of this library)
-    endif
-    lib_type :=
-  endif
-else
-  lib_type :=
-endif
-
-# This is the default path until N. From O, the default path is changed.
-# Let's save the old default path in case we need a symlink to it later.
-my_module_pre_o_default_path := $(my_module_default_path)
-
-# Special case for pre_o_default_path of Soong defined modules.
-# For those modules, we guess their pre_o_default_path by removing /ndk, /vndk, etc.
-# from their LOCAL_MODULE_PATH. This is because relative_install_path is already
-# embedded to my_module_path.
-ifeq ($(my_module_is_soong),true)
-ifndef LOCAL_IS_HOST_MODULE
-ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
-  my_module_pre_o_default_path := $(my_module_path)
-  my_module_pre_o_default_path := $(subst /vndk-ext,,$(my_module_pre_o_default_path))
-  my_module_pre_o_default_path := $(subst /vndk,,$(my_module_pre_o_default_path))
-  my_module_pre_o_default_path := $(subst /ndk,,$(my_module_pre_o_default_path))
-  my_module_pre_o_default_path := $(subst /sameprocess,,$(my_module_pre_o_default_path))
-endif
-endif
-endif
-
-# Amend the default_path once again depending on lib_type. This is new from O.
-ifeq ($(lib_type),vndk)
-  my_module_default_path := $(my_module_default_path)/vndk
-  # TODO(b/35020246): before P, we should support installing two snapshots of VNDK
-  # libraries. One for framework libs and execs and the other for vendor libs and execs.
-else ifeq ($(lib_type),ndk)
-  my_module_default_path := $(my_module_default_path)/ndk
-else ifeq ($(lib_type),sameprocess_hal)
-  my_module_default_path := $(my_module_default_path)/sameprocess
-endif
-
-# Relative path is appended to path resolved so far
-ifneq ($(my_module_relative_path),)
-  my_module_default_path := $(my_module_default_path)/$(my_module_relative_path)
-  my_module_pre_o_default_path := $(my_module_pre_o_default_path)/$(my_module_relative_path)
-  ifneq ($(my_module_path),)
-    my_module_path := $(my_module_path)/$(my_module_relative_path)
-  endif
-endif
-
-_lib_moved :=
 ifeq ($(my_module_path),)
-  # If LOCAL_MODULE_PATH is not specified, use the automatically determined path.
-  my_module_path := $(my_module_default_path)
+  install_path_var := $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT$(partition_tag)_$(LOCAL_MODULE_CLASS)
+  ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+    install_path_var := $(install_path_var)_PRIVILEGED
+  endif
 
-  # Mark if the lib is installed to a different path than before. With this hint,
-  # a symlink is created if BOARD_SYMLINK_FOR_LIBS is true.
-  ifneq ($(my_module_path),$(my_module_pre_o_default_path))
-    _lib_moved := true
-  endif
-else
-  # If LOCAL_MODULE_PATH is specified, we respect it.
-  ifndef LOCAL_IS_HOST_MODULE
-  ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
-  ifeq ($(filter $(TARGET_OUT_DATA)%,$(my_module_path)),)
-    # However, we are kind enough to warn if it seems to be wrong.
-    # Warn only for Android.mk defined shared libraries that will be installed
-    # to system or vendor partition. For other types of files - especially
-    # Soong-defined libs -, we don't warn because Soong always gives us correct
-    # paths.
-    ifeq ($(my_module_is_soong),false)
-      ifneq ($(my_module_path),$(my_module_default_path))
-        ifeq ($(SHOW_MODULE_PATH_WARNINGS),true)
-        # TODO(b/35020635): s/warning/error/
-        $(call pretty-warning,$(lib_type) library must be installed to \
-$(subst $(PRODUCT_OUT)/,,$(my_module_default_path)) but requested to be installed at \
-$(subst $(PRODUCT_OUT)/,,$(my_module_path)). Please fix.)
-        endif
-      endif
-    else
-      # For Soong-defined module, symlink is provided if the path has been amended
-      # ...except for vndk-ext libraries because there already is a symlink for the
-      # vndk (unmodified) version of the vndk-ext library.
-      ifneq ($(my_module_path),$(my_module_pre_o_default_path))
-        ifeq ($(filter vndk-ext,$(subst /,$(space),$(my_module_path))),)
-          _lib_moved := true
-        endif
-      endif
-    endif
-  endif
-  endif
+  my_module_path := $($(install_path_var))
+  ifeq ($(strip $(my_module_path)),)
+    $(error $(LOCAL_PATH): unhandled install path "$(install_path_var) for $(LOCAL_MODULE)")
   endif
 endif
-
+ifneq ($(my_module_relative_path),)
+  my_module_path := $(my_module_path)/$(my_module_relative_path)
+endif
 endif # not LOCAL_UNINSTALLABLE_MODULE
 
 ifneq ($(strip $(LOCAL_BUILT_MODULE)$(LOCAL_INSTALLED_MODULE)),)
@@ -377,6 +260,8 @@
 ###########################################################
 include $(BUILD_SYSTEM)/configure_module_stem.mk
 
+LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem)
+
 # OVERRIDE_BUILT_MODULE_PATH is only allowed to be used by the
 # internal SHARED_LIBRARIES build files.
 OVERRIDE_BUILT_MODULE_PATH := $(strip $(OVERRIDE_BUILT_MODULE_PATH))
@@ -384,11 +269,8 @@
   ifneq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
     $(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
   endif
-  built_module_path := $(OVERRIDE_BUILT_MODULE_PATH)
-else
-  built_module_path := $(intermediates)
+  $(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE),$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)))
 endif
-LOCAL_BUILT_MODULE := $(built_module_path)/$(my_built_module_stem)
 
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
   # Apk and its attachments reside in its own subdir.
@@ -424,6 +306,11 @@
 .KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
 # Build .toc file when using mm, mma, or make $(my_register_name)
 $(my_all_targets): $(LOCAL_BUILT_MODULE).toc
+
+ifdef OVERRIDE_BUILT_MODULE_PATH
+$(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE).toc,$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc))
+$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc: $(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)
+endif
 endif
 
 ###########################################################
@@ -523,30 +410,8 @@
 
 # Rule to install the module's companion symlinks
 my_installed_symlinks := $(addprefix $(my_module_path)/,$(LOCAL_MODULE_SYMLINKS) $(LOCAL_MODULE_SYMLINKS_$(my_32_64_bit_suffix)))
-
-# If this lib is installed to the different directory than before,
-# make a symlink from the old path to the new path.
-# This symlink is required because there are so many plances that expect the old
-# path (e.g. systemproperty rild.libpath). Until that places are all fixed,
-# we keep this symlink.
-# TODO(b/34917183): remove symlinks after everything migrations to the new paths;
-# this should be done before O launch unless it will be a security hole that
-# we can't restrict access to a certain set of libraries by using the directory
-# path.
-ifneq ($(BOARD_SYMLINK_FOR_LIBS),false)
-ifeq ($(_lib_moved),true)
-  my_installed_symlinks += $(my_module_pre_o_default_path)/$(my_installed_module_stem)
-endif
-else
-# Symlinks for ndk libs are permanent.
-ifeq ($(lib_type)$(_lib_moved),ndktrue)
-  my_installed_symlinks += $(my_module_pre_o_default_path)/$(my_installed_module_stem)
-endif
-endif
-
-# Make a symlink $(symlink) -> $(LOCAL_INSTALLED_MODULE)
 $(foreach symlink,$(my_installed_symlinks),\
-    $(call symlink-file,$(LOCAL_INSTALLED_MODULE),$(LOCAL_INSTALLED_MODULE),$(symlink),true))
+    $(call symlink-file,$(LOCAL_INSTALLED_MODULE),$(my_installed_module_stem),$(symlink)))
 
 $(my_all_targets) : | $(my_installed_symlinks)
 
@@ -582,9 +447,27 @@
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
 
+# If we are building a native test or benchmark and its stem variants are not defined,
+# separate the multiple architectures into subdirectories of the testcase folder.
+arch_dir :=
+is_native :=
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
+  is_native := true
+endif
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
+  is_native := true
+endif
+ifdef LOCAL_MULTILIB
+  is_native := true
+endif
+ifdef is_native
+  arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+  is_native :=
+endif
+
 # The module itself.
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
-  $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+  $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
     $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem))))
 
 # Make sure we only add the files once for multilib modules.
@@ -612,6 +495,13 @@
   $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
     $(LOCAL_PATH)/DynamicConfig.xml:$(dir)/$(LOCAL_MODULE).dynamic)))
 endif
+
+ifneq (,$(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config))
+$(foreach extra_config, $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config), \
+  $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+    $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+      $(extra_config):$(dir)/$(notdir $(extra_config))))))
+endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
 $(call create-suite-dependencies)
diff --git a/core/binary.mk b/core/binary.mk
index faae547..b2b8186 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -80,9 +80,8 @@
 my_ndk_sysroot :=
 my_ndk_sysroot_include :=
 my_ndk_sysroot_lib :=
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
   ifdef LOCAL_IS_HOST_MODULE
-    # LOCAL_USE_VNDK is checked in local_vndk.mk
     $(error $(LOCAL_PATH): LOCAL_SDK_VERSION cannot be used in host module)
   endif
 
@@ -111,13 +110,9 @@
   # missing API levels to existing ones where necessary, but we're not doing
   # that for the generated libraries. Clip the API level to the minimum where
   # appropriate.
-  ifdef LOCAL_USE_VNDK
-    my_ndk_api := $(BOARD_VNDK_VERSION)
-  else
-    my_ndk_api := $(LOCAL_SDK_VERSION)
-  endif
+  my_ndk_api := $(LOCAL_SDK_VERSION)
   ifneq ($(my_ndk_api),current)
-      my_ndk_api := $(call math_max,$(my_ndk_api),$(my_min_sdk_version))
+    my_ndk_api := $(call math_max,$(my_ndk_api),$(my_min_sdk_version))
   endif
 
   my_ndk_api_def := $(my_ndk_api)
@@ -165,28 +160,25 @@
   my_built_ndk_libs := $(my_ndk_platform_dir)/usr/$(my_ndk_libdir_name)
   my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/$(my_ndk_libdir_name)
 
-  ifndef LOCAL_USE_VNDK
-    # The bionic linker now has support for packed relocations and gnu style
-    # hashes (which are much faster!), but shipping to older devices requires
-    # the old style hash. Fortunately, we can build with both and it'll work
-    # anywhere.
-    #
-    # This is not currently supported on MIPS architectures.
-    ifeq (,$(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
-      my_ldflags += -Wl,--hash-style=both
-    endif
-
-    # We don't want to expose the relocation packer to the NDK just yet.
-    LOCAL_PACK_MODULE_RELOCATIONS := false
+  # The bionic linker now has support for packed relocations and gnu style
+  # hashes (which are much faster!), but shipping to older devices requires
+  # the old style hash. Fortunately, we can build with both and it'll work
+  # anywhere.
+  #
+  # This is not currently supported on MIPS architectures.
+  ifeq (,$(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
+    my_ldflags += -Wl,--hash-style=both
   endif
 
+  # We don't want to expose the relocation packer to the NDK just yet.
+  LOCAL_PACK_MODULE_RELOCATIONS := false
+
   # Set up the NDK stl variant. Starting from NDK-r5 the c++ stl resides in a separate location.
   # See ndk/docs/CPLUSPLUS-SUPPORT.html
   my_ndk_stl_include_path :=
   my_ndk_stl_shared_lib_fullpath :=
   my_ndk_stl_static_lib :=
   my_ndk_cpp_std_version :=
-  ifndef LOCAL_USE_VNDK
   my_cpu_variant := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)CPU_ABI)
   ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
     my_cpu_variant := mips32r6
@@ -274,7 +266,10 @@
   endif
   endif
   endif
-  endif
+endif
+
+ifneq ($(LOCAL_USE_VNDK),)
+  my_cflags += -D__ANDROID_API__=__ANDROID_API_FUTURE__
 endif
 
 ifndef LOCAL_IS_HOST_MODULE
@@ -295,7 +290,7 @@
 my_ldlibs := $(filter $(my_allowed_ldlibs),$(my_ldlibs))
 endif
 
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
   my_all_ndk_libraries := \
       $(NDK_MIGRATED_LIBS) $(addprefix lib,$(NDK_PREBUILT_SHARED_LIBRARIES))
   my_ndk_shared_libraries := \
@@ -314,7 +309,7 @@
 ifneq ($(LOCAL_NO_PIC),true)
 ifneq ($($(my_prefix)OS),windows)
 ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-my_cflags += -fpie
+my_cflags += -fPIE
 else
 my_cflags += -fPIC
 endif
@@ -519,7 +514,6 @@
 ###########################################################
 my_asflags += -D__ASSEMBLY__
 
-
 ###########################################################
 ## Define PRIVATE_ variables from global vars
 ###########################################################
@@ -528,8 +522,7 @@
 my_target_global_c_includes := \
     $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_INCLUDES)
 my_target_global_c_system_includes := \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES) \
-    $(my_ndk_sysroot_include)
+    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)PROJECT_SYSTEM_INCLUDES)
 else ifdef LOCAL_SDK_VERSION
 my_target_global_c_includes :=
 my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
@@ -1319,6 +1312,36 @@
 asm_objects += $(asm_objects_asm)
 endif
 
+###########################################################
+## When compiling against the VNDK, use LL-NDK libraries
+###########################################################
+ifneq ($(LOCAL_USE_VNDK),)
+  ####################################################
+  ## Soong modules may be built twice, once for /system
+  ## and once for /vendor. If we're using the VNDK,
+  ## switch all soong libraries over to the /vendor
+  ## variant.
+  ####################################################
+  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    # Soong-built libraries should always use the .vendor variant
+    my_whole_static_libraries := $(addsuffix .vendor,$(my_whole_static_libraries))
+    my_static_libraries := $(addsuffix .vendor,$(my_static_libraries))
+    my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
+    my_system_shared_libraries := $(addsuffix .vendor,$(my_system_shared_libraries))
+    my_header_libraries := $(addsuffix .vendor,$(my_header_libraries))
+  else
+    my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
+      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_static_libraries := $(foreach l,$(my_static_libraries),\
+      $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
+      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+    my_header_libraries := $(foreach l,$(my_header_libraries),\
+      $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
+  endif
+endif
 
 ##########################################################
 ## Set up installed module dependency
@@ -1326,7 +1349,7 @@
 ## they may cusomize their install path with LOCAL_MODULE_PATH
 ##########################################################
 # Get the list of INSTALLED libraries as module names.
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
   installed_shared_library_module_names := \
       $(my_shared_libraries)
 else
@@ -1372,35 +1395,28 @@
 ## other NDK-built libraries
 ####################################################
 
-my_link_type := $(intermediates)/link_type
-all_link_types: $(my_link_type)
 ifdef LOCAL_SDK_VERSION
-$(my_link_type): PRIVATE_LINK_TYPE := native:ndk
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := native:ndk
+my_warn_types :=
+my_allowed_types := native:ndk
+else ifdef LOCAL_USE_VNDK
+my_link_type := native:vendor
+my_warn_types :=
+my_allowed_types := native:vendor
 else
-$(my_link_type): PRIVATE_LINK_TYPE := native:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := native:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-my_link_type_deps := $(strip \
-   $(foreach l,$(my_whole_static_libraries) $(my_static_libraries), \
-     $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-ifneq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
-ifneq ($(LOCAL_MODULE_CLASS),HEADER_LIBRARIES)
-my_link_type_deps += $(strip \
-   $(foreach l,$(my_shared_libraries), \
-     $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-endif
-endif
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check module type: $@
-	$(check-link-type)
 
+my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
+ifneq ($(filter-out STATIC_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+my_link_deps += $(addprefix SHARED_LIBRARIES:,$(my_shared_libraries))
+endif
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 
 ###########################################################
 ## Common object handling.
@@ -1499,7 +1515,7 @@
 so_suffix := $($(my_prefix)SHLIB_SUFFIX)
 a_suffix := $($(my_prefix)STATIC_LIB_SUFFIX)
 
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
 built_shared_libraries := \
     $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
       $(addsuffix $(so_suffix), \
@@ -1668,6 +1684,12 @@
     ifeq ($(my_tidy_flags),)
       my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
     endif
+
+    # We might be using the static analyzer through clang-tidy.
+    # https://bugs.llvm.org/show_bug.cgi?id=32914
+    ifneq ($(my_tidy_checks),)
+      my_tidy_flags += "-extra-arg-before=-D__clang_analyzer__"
+    endif
   endif
 endif
 
@@ -1681,7 +1703,7 @@
 # One last verification check for ldlibs
 ifndef LOCAL_IS_HOST_MODULE
 my_allowed_ldlibs :=
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
   my_allowed_ldlibs := $(addprefix -l,$(NDK_PREBUILT_SHARED_LIBRARIES))
 endif
 
@@ -1738,7 +1760,14 @@
 # Export includes
 ###########################################################
 export_includes := $(intermediates)/export_includes
-$(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(my_export_c_include_dirs)
+export_cflags := $(foreach d,$(my_export_c_include_dirs),-I $(d))
+# Soong exports cflags instead of include dirs, so that -isystem can be included.
+ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+export_cflags += $(LOCAL_EXPORT_CFLAGS)
+else ifdef LOCAL_EXPORT_CFLAGS
+$(call pretty-error,LOCAL_EXPORT_CFLAGS can only be used by Soong, use LOCAL_EXPORT_C_INCLUDE_DIRS instead)
+endif
+$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
 # Headers exported by whole static libraries are also exported by this library.
 export_include_deps := $(strip \
    $(foreach l,$(my_whole_static_libraries), \
@@ -1761,10 +1790,8 @@
 $(export_includes) : $(my_export_c_include_deps) $(my_generated_sources) $(export_include_deps) $(LOCAL_EXPORT_C_INCLUDE_DEPS)
 	@echo Export includes file: $< -- $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@.tmp && touch $@.tmp
-ifdef my_export_c_include_dirs
-	$(hide) for d in $(PRIVATE_EXPORT_C_INCLUDE_DIRS); do \
-	        echo "-I $$d" >> $@.tmp; \
-	        done
+ifdef export_cflags
+	$(hide) echo "$(PRIVATE_EXPORT_CFLAGS)" >>$@.tmp
 endif
 ifdef export_include_deps
 	$(hide) for f in $(PRIVATE_REEXPORTED_INCLUDES); do \
@@ -1776,12 +1803,13 @@
 	else \
 	  mv $@.tmp $@ ; \
 	fi
+export_cflags :=
 
 # Kati adds restat=1 to ninja. GNU make does nothing for this.
 .KATI_RESTAT: $(export_includes)
 
 # Make sure export_includes gets generated when you are running mm/mmm
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+$(LOCAL_BUILT_MODULE) : | $(export_includes)
 
 ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
 ifneq (,$(filter-out $(LOCAL_PATH)/%,$(my_export_c_include_dirs)))
@@ -1792,10 +1820,11 @@
     $(SOONG_CONV.$(LOCAL_MODULE).PROBLEMS) $(my_soong_problems)
 SOONG_CONV.$(LOCAL_MODULE).DEPS := \
     $(SOONG_CONV.$(LOCAL_MODULE).DEPS) \
-    $(my_static_libraries) \
-    $(my_whole_static_libraries) \
-    $(my_shared_libraries) \
-    $(my_system_shared_libraries)
+    $(filter-out $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_RUNTIME_LIBRARY),\
+        $(my_static_libraries) \
+        $(my_whole_static_libraries) \
+        $(my_shared_libraries) \
+        $(my_system_shared_libraries))
 SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
 endif
 
diff --git a/core/build-system.html b/core/build-system.html
index 95f35ce..c7938cc 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -592,6 +592,17 @@
 </ul>
 </p>
 
+<h4>LOCAL_ANNOTATION_PROCESSORS</h4>
+<p>Set this to a list of modules built with <code>BUILD_HOST_JAVA_LIBRARY</code>
+to have their jars passed to javac with -processorpath for use as annotation
+processors.</p>
+
+<h4>LOCAL_ANNOTATION_PROCESSOR_CLASSES</h4>
+<p>Set this to a list of classes to be passed to javac as -processor arguments.
+This list is would be unnecessary, as javac will autodetect annotation processor
+classes, except that the Grok tool that is used on the Android source code
+does not autodetect them and requires listing them manually.</p>
+
 <h4>LOCAL_ASSET_FILES</h4>
 <p>In Android.mk files that <code>include $(BUILD_PACKAGE)</code> set this
 to the set of files you want built into your app.  Usually:</p>
@@ -707,6 +718,11 @@
 them here.  For example:</p>
 <p><code>LOCAL_JAVACFLAGS += -Xlint:deprecation</code></p>
 
+<h4>LOCAL_ERROR_PRONE_FLAGS</h4>
+<p>If you have additional flags to pass into the error prone compiler, add
+them here.  For example:</p>
+<p><code>LOCAL_ERROR_PRONE_FLAGS += -Xep:ClassCanBeStatic:ERROR</code></p>
+
 <h4>LOCAL_JAVA_LIBRARIES</h4>
 <p>When linking Java apps and libraries, <code>LOCAL_JAVA_LIBRARIES</code>
 specifies which sets of java classes to include.  Currently there are
diff --git a/core/build_rro_package.mk b/core/build_rro_package.mk
index 24cd9a3..9865b33 100644
--- a/core/build_rro_package.mk
+++ b/core/build_rro_package.mk
@@ -1,10 +1,13 @@
-#########################################################################
+#############################################################################
 ## Standard rules for installing runtime resouce overlay APKs.
 ##
-## Set LOCAL_RRO_SKU to the SKU name if the package should apply only to
-## a particular SKU as set by ro.boot.vendor.overlay.sku system property.
+## Set LOCAL_RRO_THEME to the theme name if the package should apply only to
+## a particular theme as set by ro.boot.vendor.overlay.theme system property.
 ##
-#########################################################################
+## If LOCAL_RRO_THEME is not set, the package will apply always, independent
+## of themes.
+##
+#############################################################################
 
 LOCAL_IS_RUNTIME_RESOURCE_OVERLAY := true
 
@@ -12,10 +15,10 @@
   $(error runtime resource overlay package should not contain sources)
 endif
 
-ifeq (S(LOCAL_RRO_SKU),)
+ifeq (S(LOCAL_RRO_THEME),)
   LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay
 else
-  LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_SKU)
+  LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_THEME)
 endif
 
 include $(BUILD_SYSTEM)/package.mk
diff --git a/core/clang/OWNERS b/core/clang/OWNERS
new file mode 100644
index 0000000..d41d3fc
--- /dev/null
+++ b/core/clang/OWNERS
@@ -0,0 +1,4 @@
+chh@google.com
+pirama@google.com
+srhines@google.com
+yikong@google.com
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
index abed69b..c2473cd 100644
--- a/core/clang/versions.mk
+++ b/core/clang/versions.mk
@@ -1,4 +1,4 @@
 ## Clang/LLVM release versions.
 
-LLVM_PREBUILTS_VERSION ?= clang-3688880
+LLVM_PREBUILTS_VERSION ?= clang-4053586
 LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index fa89758..0e1c88d 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -14,7 +14,7 @@
 #
 
 # Don't bother with the cleanspecs if you are running mm/mmm
-ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother),)
+ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother)$(NO_ANDROID_CLEANSPEC),)
 
 INTERNAL_CLEAN_STEPS :=
 
@@ -142,143 +142,7 @@
 INTERNAL_CLEAN_STEPS :=
 INTERNAL_CLEAN_BUILD_VERSION :=
 
-endif  # if not ONE_SHOT_MAKEFILE dont_bother
-
-# Since products and build variants (unfortunately) share the same
-# PRODUCT_OUT staging directory, things can get out of sync if different
-# build configurations are built in the same tree.  The following logic
-# will notice when the configuration has changed and remove the files
-# necessary to keep things consistent.
-
-previous_build_config_file := $(PRODUCT_OUT)/previous_build_config.mk
-current_build_config_file := $(PRODUCT_OUT)/current_build_config.mk
-
-current_build_config := \
-    $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-force_installclean := false
-
-# Read the current state from the file, if present.
-# Will set PREVIOUS_BUILD_CONFIG.
-#
-PREVIOUS_BUILD_CONFIG :=
--include $(previous_build_config_file)
-PREVIOUS_BUILD_CONFIG := $(strip $(PREVIOUS_BUILD_CONFIG))
-
-ifdef PREVIOUS_BUILD_CONFIG
-  ifneq ($(current_build_config),$(PREVIOUS_BUILD_CONFIG))
-    $(info *** Build configuration changed: "$(PREVIOUS_BUILD_CONFIG)" -> "$(current_build_config)")
-    ifneq ($(DISABLE_AUTO_INSTALLCLEAN),true)
-      force_installclean := true
-    else
-      $(info DISABLE_AUTO_INSTALLCLEAN is set; skipping auto-clean. Your tree may be in an inconsistent state.)
-    endif
-  endif
-endif  # else, this is the first build, so no need to clean.
-
-# Write the new state to the file.
-#
-$(shell \
-  mkdir -p $(dir $(current_build_config_file)) && \
-  echo "PREVIOUS_BUILD_CONFIG := $(current_build_config)" > \
-      $(current_build_config_file) \
- )
-$(shell cmp $(current_build_config_file) $(previous_build_config_file) > /dev/null 2>&1 || \
-  mv -f $(current_build_config_file) $(previous_build_config_file))
-
-PREVIOUS_BUILD_CONFIG :=
-previous_build_config_file :=
-current_build_config_file :=
-current_build_config :=
-
-#
-# installclean logic
-#
-
-# The files/dirs to delete during an installclean.
-#
-# Deletes all of the installed files -- the intent is to remove files
-# that may no longer be installed, either because the user previously
-# installed them, or they were previously installed by default but no
-# longer are.
-#
-# This is faster than a full clean, since we're not deleting the
-# intermediates. Instead of recompiling, we can just copy the results.
-#
-# Host bin, frameworks, and lib* are intentionally omitted, since
-# otherwise we'd have to rebuild any generated files created with those
-# tools.
-installclean_files := \
-	$(HOST_OUT)/obj/NOTICE_FILES \
-	$(HOST_OUT)/obj/PACKAGING \
-	$(HOST_OUT)/coverage \
-	$(HOST_OUT)/cts \
-	$(HOST_OUT)/nativetest* \
-	$(HOST_OUT)/sdk \
-	$(HOST_OUT)/sdk_addon \
-	$(HOST_OUT)/testcases \
-	$(HOST_OUT)/vts \
-	$(HOST_CROSS_OUT)/bin \
-	$(HOST_CROSS_OUT)/coverage \
-	$(HOST_CROSS_OUT)/lib* \
-	$(HOST_CROSS_OUT)/nativetest* \
-	$(PRODUCT_OUT)/*.img \
-	$(PRODUCT_OUT)/*.ini \
-	$(PRODUCT_OUT)/*.txt \
-	$(PRODUCT_OUT)/*.xlb \
-	$(PRODUCT_OUT)/*.zip \
-	$(PRODUCT_OUT)/kernel \
-	$(PRODUCT_OUT)/data \
-	$(PRODUCT_OUT)/skin \
-	$(PRODUCT_OUT)/obj/NOTICE_FILES \
-	$(PRODUCT_OUT)/obj/PACKAGING \
-	$(PRODUCT_OUT)/recovery \
-	$(PRODUCT_OUT)/root \
-	$(PRODUCT_OUT)/system \
-	$(PRODUCT_OUT)/system_other \
-	$(PRODUCT_OUT)/vendor \
-	$(PRODUCT_OUT)/oem \
-	$(PRODUCT_OUT)/obj/FAKE \
-	$(PRODUCT_OUT)/breakpad \
-	$(PRODUCT_OUT)/cache \
-	$(PRODUCT_OUT)/coverage \
-	$(PRODUCT_OUT)/installer \
-	$(PRODUCT_OUT)/odm \
-	$(PRODUCT_OUT)/sysloader \
-	$(PRODUCT_OUT)/testcases \
-
-# The files/dirs to delete during a dataclean, which removes any files
-# in the staging and emulator data partitions.
-dataclean_files := \
-	$(PRODUCT_OUT)/data/* \
-	$(PRODUCT_OUT)/data-qemu/* \
-	$(PRODUCT_OUT)/userdata-qemu.img
-
-# make sure *_OUT is set so that we won't result in deleting random parts
-# of the filesystem.
-ifneq (2,$(words $(HOST_OUT) $(PRODUCT_OUT)))
-  $(error both HOST_OUT and PRODUCT_OUT should be set at this point.)
-endif
-
-# Define the rules for commandline invocation.
-.PHONY: dataclean
-dataclean: FILES := $(dataclean_files)
-dataclean:
-	$(hide) rm -rf $(FILES)
-	@echo "Deleted emulator userdata images."
-
-.PHONY: installclean
-installclean: FILES := $(installclean_files)
-installclean: dataclean
-	$(hide) rm -rf $(FILES)
-	@echo "Deleted images and staging directories."
-
-ifeq ($(force_installclean),true)
-  $(info *** Forcing "make installclean"...)
-  $(info *** rm -rf $(dataclean_files) $(installclean_files))
-  $(shell rm -rf $(dataclean_files) $(installclean_files))
-  $(info *** Done with the cleaning, now starting the real build.)
-endif
-force_installclean :=
+endif  # if not ONE_SHOT_MAKEFILE dont_bother NO_ANDROID_CLEANSPEC
 
 ###########################################################
 
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 3f8d822..4556fde 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -2,7 +2,8 @@
 ## Clear out values of all variables used by rule templates.
 ###########################################################
 
-LOCAL_32_BIT_ONLY:= # '',true
+# '',true
+LOCAL_32_BIT_ONLY:=
 LOCAL_AAPT_FLAGS:=
 LOCAL_AAPT_INCLUDE_ALL_RESOURCES:=
 LOCAL_ADDITIONAL_CERTIFICATES:=
@@ -11,6 +12,8 @@
 LOCAL_ADDITIONAL_JAVA_DIR:=
 LOCAL_AIDL_INCLUDES:=
 LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
+LOCAL_ANNOTATION_PROCESSORS:=
+LOCAL_ANNOTATION_PROCESSOR_CLASSES:=
 LOCAL_APK_LIBRARIES:=
 LOCAL_ARM_MODE:=
 LOCAL_ASFLAGS:=
@@ -67,6 +70,8 @@
 LOCAL_DX_FLAGS:=
 LOCAL_EMMA_COVERAGE_FILTER:=
 LOCAL_EMMA_INSTRUMENT:=
+LOCAL_ERROR_PRONE_FLAGS:=
+LOCAL_EXPORT_CFLAGS:=
 LOCAL_EXPORT_C_INCLUDE_DEPS:=
 LOCAL_EXPORT_C_INCLUDE_DIRS:=
 LOCAL_EXPORT_HEADER_LIBRARY_HEADERS:=
@@ -99,7 +104,8 @@
 LOCAL_JACK_CLASSPATH:=
 LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
 LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
-LOCAL_JACK_ENABLED:=$(DEFAULT_JACK_ENABLED) # '' (ie disabled), disabled, full, incremental
+# '' (ie disabled), disabled, full, incremental, javac_frontend
+LOCAL_JACK_ENABLED:=$(DEFAULT_JACK_ENABLED)
 LOCAL_JACK_FLAGS:=
 LOCAL_JACK_PLUGIN:=
 LOCAL_JACK_PLUGIN_PATH:=
@@ -109,6 +115,8 @@
 LOCAL_JARJAR_RULES:=
 LOCAL_JAR_MANIFEST:=
 LOCAL_JAR_PACKAGES:=
+LOCAL_JAR_PROCESSOR:=
+LOCAL_JAR_PROCESSOR_ARGS:=
 LOCAL_JAVACFLAGS:=
 LOCAL_JAVA_LANGUAGE_VERSION:=
 LOCAL_JAVA_LAYERS_FILE:=
@@ -154,6 +162,7 @@
 LOCAL_NO_FPIE :=
 LOCAL_NO_LIBCOMPILER_RT:=
 LOCAL_NO_LIBGCC:=
+LOCAL_NO_NOTICE_FILE:=
 LOCAL_NO_PIC:=
 LOCAL_NOSANITIZE:=
 LOCAL_NO_STANDARD_LIBRARIES:=
@@ -179,12 +188,14 @@
 LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
 LOCAL_PREBUILT_STRIP_COMMENTS:=
 LOCAL_PRIVILEGED_MODULE:=
-LOCAL_PROGUARD_ENABLED:= # '',full,custom,nosystem,disabled,obfuscation,optimization
+# '',full,custom,nosystem,disabled,obfuscation,optimization
+LOCAL_PROGUARD_ENABLED:=
 LOCAL_PROGUARD_FLAG_FILES:=
 LOCAL_PROGUARD_FLAGS:=
 LOCAL_PROPRIETARY_MODULE:=
 LOCAL_PROTOC_FLAGS:=
-LOCAL_PROTOC_OPTIMIZE_TYPE:= # lite(default),micro,nano,full,nanopb-c,nanopb-c-enable_malloc
+# lite(default),micro,nano,full,nanopb-c,nanopb-c-enable_malloc
+LOCAL_PROTOC_OPTIMIZE_TYPE:=
 LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
 LOCAL_RECORDED_MODULE_TYPE:=
 LOCAL_RENDERSCRIPT_CC:=
@@ -200,7 +211,7 @@
 LOCAL_RES_LIBRARIES:=
 LOCAL_RESOURCE_DIR:=
 LOCAL_RMTYPEDEFS:=
-LOCAL_RRO_SKU:=
+LOCAL_RRO_THEME:=
 LOCAL_RTTI_FLAG:=
 LOCAL_SANITIZE:=
 LOCAL_SANITIZE_DIAG:=
@@ -209,7 +220,8 @@
 LOCAL_SDK_VERSION:=
 LOCAL_SHARED_ANDROID_LIBRARIES:=
 LOCAL_SHARED_LIBRARIES:=
-LOCAL_SOURCE_FILES_ALL_GENERATED:= # '',true
+# '',true
+LOCAL_SOURCE_FILES_ALL_GENERATED:=
 LOCAL_SRC_FILES:=
 LOCAL_SRC_FILES_EXCLUDE:=
 LOCAL_STATIC_ANDROID_LIBRARIES:=
@@ -227,6 +239,7 @@
 LOCAL_UNSTRIPPED_PATH:=
 LOCAL_USE_AAPT2:=$(USE_AAPT2)
 LOCAL_USE_VNDK:=
+LOCAL_VENDOR_MODULE:=
 LOCAL_VTSC_FLAGS:=
 LOCAL_VTS_INCLUDES:=
 LOCAL_WARNINGS_ENABLE:=
diff --git a/core/config.mk b/core/config.mk
index 319a069..5895128 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -79,6 +79,10 @@
 # Some specific paths to tools
 SRC_DROIDDOC_DIR := $(TOPDIR)build/tools/droiddoc
 
+# Set up efficient math functions which are used in make.
+# Here since this file is included by envsetup as well as during build.
+include $(BUILD_SYSTEM)/math.mk
+
 # Various mappings to avoid hard-coding paths all over the place
 include $(BUILD_SYSTEM)/pathmap.mk
 
@@ -129,11 +133,7 @@
 # Parse out any modifier targets.
 # ###############################################################
 
-# The 'showcommands' goal says to show the full command
-# lines being executed, instead of a short message about
-# the kind of operation being done.
-SHOW_COMMANDS:= $(filter showcommands,$(MAKECMDGOALS))
-hide := $(if $(SHOW_COMMANDS),,@)
+hide := @
 
 ################################################################
 # Tools needed in product configuration makefiles.
@@ -308,6 +308,7 @@
 
 export CC_WRAPPER
 export CXX_WRAPPER
+export JAVAC_WRAPPER
 endif
 
 ifdef TARGET_PREFER_32_BIT
@@ -476,6 +477,23 @@
 
 BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS))
 
+# ---------------------------------------------------------------
+# Whether we can expect a full build graph
+ALLOW_MISSING_DEPENDENCIES := $(filter true,$(ALLOW_MISSING_DEPENDENCIES))
+ifneq ($(TARGET_BUILD_APPS),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifeq ($(TARGET_BUILD_PDK),true)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(ONE_SHOT_MAKEFILE),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+.KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
+
 #
 # Tools that are prebuilts for TARGET_BUILD_APPS
 #
@@ -492,13 +510,23 @@
 BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
 DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
 
+#TODO: use a smaller -Xmx value for most libraries;
+#      only core.jar and framework.jar need a heap this big.
+ifndef DX_ALT_JAR
 DX := $(HOST_OUT_EXECUTABLES)/dx
+DX_COMMAND := $(DX) -JXms16M -JXmx2048M
+else
+DX := $(DX_ALT_JAR)
+DX_COMMAND := java -Xms16M -Xmx2048M -jar $(DX)
+endif
+
 MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
 
 SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
 ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
+FILESLIST := $(SOONG_HOST_OUT_EXECUTABLES)/fileslist
 
-JAVAC_FILTER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_filter
+SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
 
 # Always use prebuilts for ckati and makeparallel
 prebuilt_build_tools := prebuilts/build-tools
@@ -589,9 +617,11 @@
 ifeq ($(TARGET_USES_MKE2FS),true)
 MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/mke2fs$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs.sh
+MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
 else
 MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg.sh
+MKE2FS_CONF :=
 endif
 BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX)
 MAKE_SQUASHFS := $(HOST_OUT_EXECUTABLES)/mksquashfs$(HOST_EXECUTABLE_SUFFIX)
@@ -633,6 +663,7 @@
 FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
 VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh
 FEC := $(HOST_OUT_EXECUTABLES)/fec
+BRILLO_UPDATE_PAYLOAD := $(HOST_OUT_EXECUTABLES)/brillo_update_payload
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -651,13 +682,21 @@
 
 # We may not have the right JAVA_HOME/PATH set up yet when this is run from envsetup.sh.
 ifneq ($(CALLED_FROM_SETUP),true)
-HOST_JDK_TOOLS_JAR:= $(shell $(BUILD_SYSTEM)/find-jdk-tools-jar.sh)
+
+# Path to tools.jar, or empty if EXPERIMENTAL_USE_OPENJDK9 is set
+HOST_JDK_TOOLS_JAR :=
+# TODO: Remove HOST_JDK_TOOLS_JAR and all references to it once OpenJDK 8
+# toolchains are no longer supported (i.e. when what is now
+# EXPERIMENTAL_USE_OPENJDK9 becomes the standard). http://b/38418220
+ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
+HOST_JDK_TOOLS_JAR := $(shell $(BUILD_SYSTEM)/find-jdk-tools-jar.sh)
 
 ifneq ($(HOST_JDK_TOOLS_JAR),)
 ifeq ($(wildcard $(HOST_JDK_TOOLS_JAR)),)
 $(error Error: could not find jdk tools.jar at $(HOST_JDK_TOOLS_JAR), please check if your JDK was installed correctly)
 endif
 endif
+endif # ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
 
 # Is the host JDK 64-bit version?
 HOST_JDK_IS_64BIT_VERSION :=
@@ -673,11 +712,25 @@
 MD5SUM:=md5sum
 endif
 
-APICHECK_CLASSPATH := $(HOST_JDK_TOOLS_JAR)
-APICHECK_CLASSPATH := $(APICHECK_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-APICHECK_CLASSPATH := $(APICHECK_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX)
+APICHECK_CLASSPATH_ENTRIES := \
+    $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX) \
+    $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX) \
+    $(HOST_JDK_TOOLS_JAR) \
+    )
+APICHECK_CLASSPATH := $(subst $(space),:,$(strip $(APICHECK_CLASSPATH_ENTRIES)))
+
 APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
 
+# Boolean variable determining if Treble is fully enabled
+PRODUCT_FULL_TREBLE := false
+ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
+  PRODUCT_FULL_TREBLE := $(PRODUCT_FULL_TREBLE_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+  #$(warning no product shipping level defined)
+else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),26),)
+  PRODUCT_FULL_TREBLE := true
+endif
+
 # The default key if not set as LOCAL_CERTIFICATE
 ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
   DEFAULT_SYSTEM_DEV_CERTIFICATE := $(PRODUCT_DEFAULT_DEV_CERTIFICATE)
@@ -789,10 +842,10 @@
 RS_PREBUILT_CLCORE := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/librsrt_$(TARGET_ARCH).bc
 RS_PREBUILT_COMPILER_RT := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/libcompiler_rt.a
 ifeq (true,$(TARGET_IS_64_BIT))
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib64 \
-                       -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/r10/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib64 \
+                       -L prebuilts/ndk/r10/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
 else
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/r10/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
 endif
 
 # API Level lists for Renderscript Compat lib.
@@ -845,8 +898,7 @@
 
 # These goals don't need to collect and include Android.mks/CleanSpec.mks
 # in the source tree.
-dont_bother_goals := clean clobber dataclean installclean \
-    help out \
+dont_bother_goals := out \
     snod systemimage-nodeps \
     stnod systemtarball-nodeps \
     userdataimage-nodeps userdatatarball-nodeps \
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 729ef48..04aedf4 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -63,7 +63,7 @@
 endif
 
 # If CFI is disabled globally, remove it from my_sanitize.
-ifeq ($(strip $(ENABLE_CFI)),)
+ifeq ($(strip $(ENABLE_CFI)),false)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
@@ -74,6 +74,12 @@
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
+# Also disable CFI if ASAN is enabled.
+ifneq ($(filter address,$(my_sanitize)),)
+  my_sanitize := $(filter-out cfi,$(my_sanitize))
+  my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
+endif
+
 # CFI needs gold linker, and mips toolchain does not have one.
 ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/configure_local_jack.mk b/core/configure_local_jack.mk
index 2270c88..f8049a3 100644
--- a/core/configure_local_jack.mk
+++ b/core/configure_local_jack.mk
@@ -18,18 +18,23 @@
 LOCAL_JACK_ENABLED := $(ANDROID_FORCE_JACK_ENABLED)
 endif
 
+ifneq ($(ANDROID_COMPILE_WITH_JACK),true)
+LOCAL_JACK_ENABLED :=
+endif
+
 LOCAL_JACK_ENABLED := $(strip $(LOCAL_JACK_ENABLED))
 LOCAL_MODULE := $(strip $(LOCAL_MODULE))
 
-ifneq ($(LOCAL_JACK_ENABLED),full)
-ifneq ($(LOCAL_JACK_ENABLED),incremental)
+valid_jack_enabled_values := full incremental javac_frontend disabled
+
 ifdef LOCAL_JACK_ENABLED
-ifneq ($(LOCAL_JACK_ENABLED),disabled)
-$(error $(LOCAL_PATH): invalid LOCAL_JACK_ENABLED "$(LOCAL_JACK_ENABLED)" for $(LOCAL_MODULE))
-endif
-endif
-LOCAL_JACK_ENABLED :=
-endif
+  ifneq ($(LOCAL_JACK_ENABLED),$(filter $(firstword $(LOCAL_JACK_ENABLED)),$(valid_jack_enabled_values)))
+    $(error $(LOCAL_PATH): invalid LOCAL_JACK_ENABLED "$(LOCAL_JACK_ENABLED)" for $(LOCAL_MODULE))
+  endif
+
+  ifeq ($(LOCAL_JACK_ENABLED),disabled)
+    LOCAL_JACK_ENABLED :=
+  endif
 endif
 
 ifdef $(LOCAL_MODULE).JACK_VERSION
diff --git a/core/definitions.mk b/core/definitions.mk
index aae269b..15ef6f4 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -681,7 +681,7 @@
 # $(1): library name
 # $(2): Non-empty if IS_HOST_MODULE
 define _java-lib-full-classes.jar
-$(call _java-lib-dir,$(1),$(2))/$(if $(2),javalib,classes)$(COMMON_JAVA_PACKAGE_SUFFIX)
+$(call _java-lib-dir,$(1),$(2))/classes.jar
 endef
 
 # Get the jar files (you can pass to "javac -classpath") of static or shared
@@ -704,14 +704,6 @@
 $(call java-lib-files,$(1),$(2))
 endef
 
-# Get the jar files (you can pass to "javac -classpath") of host dalvik Java libraries.
-# You can also use them as dependency files.
-# A host dalvik Java library is different from a host Java library in that
-# the java lib file is classes.jar, not javalib.jar.
-# $(1): library name list
-define host-dex-java-lib-files
-$(foreach lib,$(1),$(call _java-lib-dir,$(lib),true)/classes.jar)
-endef
 
 ###########################################################
 ## Convert "core ext framework" to "out/.../classes.jack ..."
@@ -854,10 +846,10 @@
 ## Use echo-(warning|error) in a build rule
 ## Use pretty-(warning|error) instead of $(warning)/$(error)
 ###########################################################
-ESC_BOLD := \e[1m
-ESC_WARNING := \e[35m
-ESC_ERROR := \e[31m
-ESC_RESET := \e[0m
+ESC_BOLD := \033[1m
+ESC_WARNING := \033[35m
+ESC_ERROR := \033[31m
+ESC_RESET := \033[0m
 
 # $(1): path (and optionally line) information
 # $(2): message to print
@@ -1025,12 +1017,15 @@
 $(hide) echo >> $2
 endef
 
+# b/37755219
+RS_CC_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0:detect_container_overflow=0
+
 define transform-renderscripts-to-java-and-bc
 @echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
 $(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/res/raw
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/src
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
   -o $(PRIVATE_RS_OUTPUT_DIR)/res/raw \
   -p $(PRIVATE_RS_OUTPUT_DIR)/src \
   -d $(PRIVATE_RS_OUTPUT_DIR) \
@@ -1054,8 +1049,9 @@
 	$(dir $@)/$(notdir $(<:.bc=.o)) \
 	$(RS_PREBUILT_COMPILER_RT) \
 	-o $@ $(TARGET_GLOBAL_LDFLAGS) -Wl,--hash-style=sysv -L prebuilts/gcc/ \
-	$(RS_PREBUILT_LIBPATH) -L $(TARGET_OUT_INTERMEDIATE_LIBRARIES) \
-	-lRSSupport -lm -lc
+	$(RS_PREBUILT_LIBPATH) \
+	$(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupport)/libRSSupport.so \
+	-lm -lc
 endef
 
 ###########################################################
@@ -1066,7 +1062,7 @@
 @echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
 $(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
 $(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
   -o $(PRIVATE_RS_OUTPUT_DIR)/ \
   -d $(PRIVATE_RS_OUTPUT_DIR) \
   -a $@ -MD \
@@ -1834,18 +1830,21 @@
 define transform-to-stripped-keep-mini-debug-info
 @echo "$($(PRIVATE_PREFIX)DISPLAY) Strip (mini debug info): $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
-$(hide) $(PRIVATE_NM) -D $< --format=posix --defined-only | awk '{ print $$1 }' | sort >$@.dynsyms
-$(hide) $(PRIVATE_NM) $< --format=posix --defined-only | awk '{ if ($$2 == "T" || $$2 == "t" || $$2 == "D") print $$1 }' | sort >$@.funcsyms
-$(hide) comm -13 $@.dynsyms $@.funcsyms >$@.keep_symbols
-$(hide) $(PRIVATE_OBJCOPY) --only-keep-debug $< $@.debug
-$(hide) $(PRIVATE_OBJCOPY) --rename-section .debug_frame=saved_debug_frame $@.debug $@.mini_debuginfo
-$(hide) $(PRIVATE_OBJCOPY) -S --remove-section .gdb_index --remove-section .comment --keep-symbols=$@.keep_symbols $@.mini_debuginfo
-$(hide) $(PRIVATE_OBJCOPY) --rename-section saved_debug_frame=.debug_frame $@.mini_debuginfo
-$(hide) $(PRIVATE_STRIP) --strip-all -R .comment $< -o $@
-$(hide) rm -f $@.mini_debuginfo.xz
-$(hide) xz $@.mini_debuginfo
-$(hide) $(PRIVATE_OBJCOPY) --add-section .gnu_debugdata=$@.mini_debuginfo.xz $@
-$(hide) rm -f $@.dynsyms $@.funcsyms $@.keep_symbols $@.debug $@.mini_debuginfo.xz
+$(hide) rm -f $@ $@.dynsyms $@.funcsyms $@.keep_symbols $@.debug $@.mini_debuginfo.xz
+if $(PRIVATE_STRIP) --strip-all -R .comment $< -o $@; then \
+  $(PRIVATE_OBJCOPY) --only-keep-debug $< $@.debug && \
+  $(PRIVATE_NM) -D $< --format=posix --defined-only | awk '{ print $$1 }' | sort >$@.dynsyms && \
+  $(PRIVATE_NM) $< --format=posix --defined-only | awk '{ if ($$2 == "T" || $$2 == "t" || $$2 == "D") print $$1 }' | sort >$@.funcsyms && \
+  comm -13 $@.dynsyms $@.funcsyms >$@.keep_symbols && \
+  $(PRIVATE_OBJCOPY) --rename-section .debug_frame=saved_debug_frame $@.debug $@.mini_debuginfo && \
+  $(PRIVATE_OBJCOPY) -S --remove-section .gdb_index --remove-section .comment --keep-symbols=$@.keep_symbols $@.mini_debuginfo && \
+  $(PRIVATE_OBJCOPY) --rename-section saved_debug_frame=.debug_frame $@.mini_debuginfo && \
+  rm -f $@.mini_debuginfo.xz && \
+  xz $@.mini_debuginfo && \
+  $(PRIVATE_OBJCOPY) --add-section .gnu_debugdata=$@.mini_debuginfo.xz $@; \
+else \
+  cp -f $< $@; \
+fi
 endef
 
 define transform-to-stripped-keep-symbols
@@ -2005,6 +2004,9 @@
 APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)
 endif
 
+# b/37750224
+AAPT_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
 # TODO: Right now we generate the asset resources twice, first as part
 # of generating the Java classes, then at the end when packaging the final
 # assets.  This should be changed to do one of two things: (1) Don't generate
@@ -2019,7 +2021,7 @@
 define create-resource-java-files
 @mkdir -p $(PRIVATE_SOURCE_INTERMEDIATES_DIR)
 @mkdir -p $(dir $(PRIVATE_RESOURCE_PUBLICS_OUTPUT))
-$(hide) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
     $(eval # PRIVATE_PRODUCT_AAPT_CONFIG is intentionally missing-- see comment.) \
     $(addprefix -J , $(PRIVATE_SOURCE_INTERMEDIATES_DIR)) \
     $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2196,9 +2198,9 @@
 # $(2): bootclasspath
 define compile-java
 $(hide) rm -f $@
-$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
 $(hide) mkdir -p $(dir $@)
-$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
 $(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
 $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
@@ -2211,16 +2213,16 @@
 $(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
     | $(NORMALIZE_PATH) | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
 $(hide) if [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
-    ( $(1) -encoding UTF-8 \
+    $(SOONG_JAVAC_WRAPPER) $(1) -encoding UTF-8 \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     $(2) \
     $(addprefix -classpath ,$(strip \
         $(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
-    -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
+    -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) -s $(PRIVATE_ANNO_INTERMEDIATES_DIR) \
     $(PRIVATE_JAVACFLAGS) \
     \@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq \
-    || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) ) 2>&1 | $(JAVAC_FILTER); \
+    || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
 fi
 $(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/tools/java-layers.py \
     $(PRIVATE_JAVA_LAYERS_FILE) \@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq,)
@@ -2267,9 +2269,10 @@
 $(hide) mkdir -p $(PRIVATE_JACK_INTERMEDIATES_DIR)
 $(if $(PRIVATE_JACK_INCREMENTAL_DIR),$(hide) mkdir -p $(PRIVATE_JACK_INCREMENTAL_DIR))
 $(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list)
-$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
-          find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list; \
-fi
+$(if $(PRIVATE_SOURCE_INTERMEDIATES_DIR), \
+    $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
+            find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list; \
+    fi)
 $(if $(PRIVATE_HAS_PROTO_SOURCES), \
     $(hide) find $(PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list )
 $(if $(PRIVATE_HAS_RS_SOURCES), \
@@ -2286,6 +2289,10 @@
     $(hide) $(call add-java-resources-to,$@.res.tmp.zip)
     $(hide) unzip -qo $@.res.tmp.zip -d $@.res.tmp
     $(hide) rm $@.res.tmp.zip)
+$(if $(PRIVATE_JACK_IMPORT_JAR),
+    $(hide) mkdir -p $@.tmpjill.res
+    $(hide) unzip -qo $(PRIVATE_JACK_IMPORT_JAR) -d $@.tmpjill.res
+    $(hide) find $@.tmpjill.res -iname "*.class" -delete)
 $(hide) if [ -s $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
     export tmpEcjArg="@$(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq"; \
 else \
@@ -2298,6 +2305,8 @@
         -D jack.dex.optimize="false") \
     $(if $(PRIVATE_RMTYPEDEFS), \
         -D jack.android.remove-typedef="true") \
+    $(if $(PRIVATE_JACK_IMPORT_JAR), \
+        --import $(PRIVATE_JACK_IMPORT_JAR) --import-resource $@.tmpjill.res) \
     $(addprefix --classpath ,$(strip \
         $(call normalize-path-list,$(PRIVATE_JACK_SHARED_LIBRARIES)))) \
     $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \
@@ -2392,13 +2401,16 @@
 fi
 endef
 
+# b/37756495
+IJAR_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
 ## Rule to create a table of contents from a .jar file.
 ## Must be called with $(eval).
 # $(1): A .jar file
 define _transform-jar-to-toc
 $1.toc: $1 | $(IJAR)
 	@echo Generating TOC: $$@
-	$(hide) $(IJAR) $$< $$@.tmp
+	$(hide) $(IJAR_ASAN_OPTIONS) $(IJAR) $$< $$@.tmp
 	$$(call commit-change-for-toc,$$@)
 endef
 
@@ -2513,14 +2525,27 @@
 $(filter-out -classpath -bootclasspath "",$(subst :,$(space),$(1)))
 endef
 
+# Takes an sdk version that might be PLATFORM_VERSION_CODENAME (for example P),
+# returns a number greater than the highest existing sdk version if it is, or
+# the input if it is not.
+define codename-or-sdk-to-sdk
+$(if $(filter $(1),$(PLATFORM_VERSION_CODENAME)),10000,$(1))
+endef
+
+# --add-opens is required because desugar reflects via java.lang.invoke.MethodHandles.Lookup
 define desugar-classes-jar
 @echo Desugar: $@
 @mkdir -p $(dir $@)
 $(hide) rm -f $@ $@.tmp
-$(hide) java -jar $(DESUGAR) \
+@rm -rf $(dir $@)/desugar_dumped_classes
+@mkdir $(dir $@)/desugar_dumped_classes
+$(hide) java \
+    $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-opens java.base/java.lang.invoke=ALL-UNNAMED,) \
+    -Djdk.internal.lambda.dumpProxyClasses=$(abspath $(dir $@))/desugar_dumped_classes \
+    -jar $(DESUGAR) \
     $(addprefix --bootclasspath_entry ,$(call desugar-bootclasspath,$(PRIVATE_BOOTCLASSPATH))) \
     $(addprefix --classpath_entry ,$(PRIVATE_ALL_JAVA_LIBRARIES)) \
-    --min_sdk_version $(PRIVATE_SDK_VERSION) \
+    --min_sdk_version $(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
     --allow_empty_bootclasspath \
     $(if $(filter --core-library,$(PRIVATE_DX_FLAGS)),--core_library) \
     -i $< -o $@.tmp
@@ -2528,16 +2553,13 @@
 endef
 
 
-#TODO: use a smaller -Xmx value for most libraries;
-#      only core.jar and framework.jar need a heap this big.
 define transform-classes.jar-to-dex
 @echo "target Dex: $(PRIVATE_MODULE)"
 @mkdir -p $(dir $@)
 $(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX) \
-    -JXms16M -JXmx2048M \
+$(hide) $(DX_COMMAND) \
     --dex --output=$(dir $@) \
-    --min-sdk-version=$(PRIVATE_SDK_VERSION) \
+    --min-sdk-version=$(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
     $(if $(NO_OPTIMIZE_DX), \
         --no-optimize) \
     $(if $(GENERATE_DEX_DEBUG), \
@@ -2588,7 +2610,7 @@
 #values; applications can override these by explicitly stating
 #them in their manifest.
 define add-assets-to-package
-$(hide) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
     $(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
     $(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
     $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2672,10 +2694,14 @@
 $(call sign-package-arg,$@)
 endef
 
+# signapk uses internal APIs from sun.security.{pkcs,x509}; see http://b/37137869
 # $(1): the package file we are signing.
 define sign-package-arg
 $(hide) mv $(1) $(1).unsigned
-$(hide) java -Djava.library.path=$(SIGNAPK_JNI_LIBRARY_PATH) -jar $(SIGNAPK_JAR) \
+$(hide) java -Djava.library.path=$(SIGNAPK_JNI_LIBRARY_PATH) \
+    $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-exports java.base/sun.security.pkcs=ALL-UNNAMED,) \
+    $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-exports java.base/sun.security.x509=ALL-UNNAMED,) \
+    -jar $(SIGNAPK_JAR) \
     $(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
     $(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
 $(hide) mv $(1).signed $(1)
@@ -2845,10 +2871,8 @@
 
 # Define a rule to create a symlink to a file.
 # $(1): full path to source
-# $(2): target of the link
-# $(3): full path of the symlink
-# $(4): (optional) when set to true, $(2) is recognized as a path from the build root and
-#       thus -r option is used to link $(3) to $(2). Off by default.
+# $(2): source (may be relative)
+# $(3): full path to destination
 define symlink-file
 $(eval $(_symlink-file))
 endef
@@ -2860,9 +2884,7 @@
 	@echo "Symlink: $$@ -> $(2)"
 	@mkdir -p $(dir $$@)
 	@rm -rf $$@
-	$(if $(filter true,$(4)),\
-            $(hide) python -c "import os.path; import os; os.symlink(os.path.relpath('$(2)','$(dir $(3))'), '$$@')",\
-            $(hide) ln -sf $(2) $$@)
+	$(hide) ln -sf $(2) $$@
 endef
 
 ###########################################################
@@ -3137,114 +3159,15 @@
 endef
 
 ###########################################################
-# Link type checking
-###########################################################
-define check-link-type
-$(hide) mkdir -p $(dir $@) && rm -f $@
-$(hide) $(CHECK_LINK_TYPE) --makefile $(PRIVATE_MAKEFILE) --module $(PRIVATE_MODULE) \
-  --type "$(PRIVATE_LINK_TYPE)" $(addprefix --allowed ,$(PRIVATE_ALLOWED_TYPES)) \
-  $(addprefix --warn ,$(PRIVATE_WARN_TYPES)) $(PRIVATE_DEPS)
-$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-endef
-
-define link-type-partitions
-ifndef LOCAL_IS_HOST_MODULE
-ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
-ifneq ($(filter $(TARGET_OUT_VENDOR)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:vendor
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_OEM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:oem
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:odm
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_DATA)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:data partition:vendor partition:oem partition:odm
-else
-$(1): PRIVATE_WARN_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-else # uninstallable module
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-endif
-endef
-
-###########################################################
-# Basic math functions for positive integers <= 100
-#
-# (SDK versions for example)
-###########################################################
-__MATH_NUMBERS :=  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 \
-                  21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 \
-                  41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 \
-                  61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 \
-                  81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
-
-# Returns true if $(1) is a positive integer <= 100, otherwise returns nothing.
-define math_is_number
-$(strip \
-  $(if $(1),,$(error Argument missing)) \
-  $(if $(word 2,$(1)),$(error Multiple words in a single argument: $(1))) \
-  $(if $(filter $(1),$(__MATH_NUMBERS)),true))
-endef
-
-#$(warning true == $(call math_is_number,2))
-#$(warning == $(call math_is_number,foo))
-#$(call math_is_number,1 2)
-#$(call math_is_number,no 2)
-
-define _math_check_valid
-$(if $(call math_is_number,$(1)),,$(error Only positive integers <= 100 are supported (not $(1))))
-endef
-
-#$(call _math_check_valid,0)
-#$(call _math_check_valid,1)
-#$(call _math_check_valid,100)
-#$(call _math_check_valid,101)
-#$(call _math_check_valid,)
-#$(call _math_check_valid,1 2)
-
-# Returns the greater of $1 or $2.
-# If $1 or $2 is not a positive integer <= 100, then an error is generated.
-define math_max
-$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
-  $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))))
-endef
-
-#$(call math_max)
-#$(call math_max,1)
-#$(call math_max,1 2,3)
-#$(warning 1 == $(call math_max,1,1))
-#$(warning 42 == $(call math_max,5,42))
-#$(warning 42 == $(call math_max,42,5))
-
-define math_gt_or_eq
-$(if $(filter $(1),$(call math_max,$(1),$(2))),true)
-endef
-
-#$(warning $(call math_gt_or_eq, 2, 1))
-#$(warning $(call math_gt_or_eq, 1, 1))
-#$(warning $(if $(call math_gt_or_eq, 1, 2),false,true))
-
-# $1 is the variable name to increment
-define inc_and_print
-$(strip $(eval $(1) := $($(1)) .)$(words $($(1))))
-endef
-
-###########################################################
 ## Compatibility suite tools
 ###########################################################
 
-# Return a list of output directories for a given suite and the current LOCAL_MODULE
+# Return a list of output directories for a given suite and the current LOCAL_MODULE.
+# Can be passed a subdirectory to use for the common testcase directory.
 define compatibility_suite_dirs
   $(strip \
     $(COMPATIBILITY_TESTCASES_OUT_$(1)) \
-    $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE))
+    $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)$(2))
 endef
 
 # For each suite:
@@ -3254,13 +3177,148 @@
 # Requires for each suite: my_compat_dist_$(suite) to be defined.
 define create-suite-dependencies
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
-  $(eval my_compat_files_$(suite) := $(call copy-many-files, $(my_compat_dist_$(suite)))) \
   $(eval COMPATIBILITY.$(suite).FILES := \
-    $(COMPATIBILITY.$(suite).FILES) $(my_compat_files_$(suite))) \
-  $(eval $(my_all_targets) : $(my_compat_files_$(suite))))
+    $(COMPATIBILITY.$(suite).FILES) $(foreach f,$(my_compat_dist_$(suite)),$(call word-colon,2,$(f))))) \
+$(eval $(my_all_targets) : $(call copy-many-files, \
+  $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))))
 endef
 
 ###########################################################
+## Path Cleaning
+###########################################################
+
+# Remove "dir .." combinations (but keep ".. ..")
+#
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-dotdot
+$(strip \
+  $(if $(word 2,$(1)),
+    $(if $(call streq,$(word 2,$(1)),..),
+      $(if $(call streq,$(word 1,$(1)),..),
+        $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+      ,
+        $(call _clean-path-strip-dotdot,$(wordlist 3,$(words $(1)),$(1)))
+      )
+    ,
+      $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+    )
+  ,
+    $(1)
+  )
+)
+endef
+
+# Remove any leading .. from the path (in case of /..)
+#
+# Should only be called if the original path started with /
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-root-dotdots
+$(strip $(if $(call streq,$(firstword $(1)),..),
+  $(call _clean-path-strip-root-dotdots,$(wordlist 2,$(words $(1)),$(1))),
+  $(1)))
+endef
+
+# Call _clean-path-strip-dotdot until the path stops changing
+# $(1): Non-empty if this path started with a /
+# $(2): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-expanded
+$(strip \
+  $(eval _ep := $(call _clean-path-strip-dotdot,$(2)))
+  $(if $(1),$(eval _ep := $(call _clean-path-strip-root-dotdots,$(_ep))))
+  $(if $(call streq,$(2),$(_ep)),
+    $(_ep),
+    $(call _clean-path-expanded,$(1),$(_ep))))
+endef
+
+# Clean the file path -- remove //, dir/.., extra .
+#
+# This should be the same semantics as golang's filepath.Clean
+#
+# $(1): The file path to clean
+define clean-path
+$(strip \
+  $(if $(call streq,$(words $(1)),1),
+    $(eval _rooted := $(filter /%,$(1)))
+    $(eval _expanded_path := $(filter-out .,$(subst /,$(space),$(1))))
+    $(eval _path := $(if $(_rooted),/)$(subst $(space),/,$(call _clean-path-expanded,$(_rooted),$(_expanded_path))))
+    $(if $(_path),
+      $(_path),
+      .
+     )
+  ,
+    $(if $(call streq,$(words $(1)),0),
+      .,
+      $(error Call clean-path with only one path (without spaces))
+    )
+  )
+)
+endef
+
+ifeq ($(TEST_MAKE_clean_path),true)
+  define my_test
+    $(if $(call streq,$(call clean-path,$(1)),$(2)),,
+      $(eval my_failed := true)
+      $(warning clean-path test '$(1)': expected '$(2)', got '$(call clean-path,$(1))'))
+  endef
+  my_failed :=
+
+  # Already clean
+  $(call my_test,abc,abc)
+  $(call my_test,abc/def,abc/def)
+  $(call my_test,a/b/c,a/b/c)
+  $(call my_test,.,.)
+  $(call my_test,..,..)
+  $(call my_test,../..,../..)
+  $(call my_test,../../abc,../../abc)
+  $(call my_test,/abc,/abc)
+  $(call my_test,/,/)
+
+  # Empty is current dir
+  $(call my_test,,.)
+
+  # Remove trailing slash
+  $(call my_test,abc/,abc)
+  $(call my_test,abc/def/,abc/def)
+  $(call my_test,a/b/c/,a/b/c)
+  $(call my_test,./,.)
+  $(call my_test,../,..)
+  $(call my_test,../../,../..)
+  $(call my_test,/abc/,/abc)
+
+  # Remove doubled slash
+  $(call my_test,abc//def//ghi,abc/def/ghi)
+  $(call my_test,//abc,/abc)
+  $(call my_test,///abc,/abc)
+  $(call my_test,//abc//,/abc)
+  $(call my_test,abc//,abc)
+
+  # Remove . elements
+  $(call my_test,abc/./def,abc/def)
+  $(call my_test,/./abc/def,/abc/def)
+  $(call my_test,abc/.,abc)
+
+  # Remove .. elements
+  $(call my_test,abc/def/ghi/../jkl,abc/def/jkl)
+  $(call my_test,abc/def/../ghi/../jkl,abc/jkl)
+  $(call my_test,abc/def/..,abc)
+  $(call my_test,abc/def/../..,.)
+  $(call my_test,/abc/def/../..,/)
+  $(call my_test,abc/def/../../..,..)
+  $(call my_test,/abc/def/../../..,/)
+  $(call my_test,abc/def/../../../ghi/jkl/../../../mno,../../mno)
+  $(call my_test,/../abc,/abc)
+
+  # Combinations
+  $(call my_test,abc/./../def,def)
+  $(call my_test,abc//./../def,def)
+  $(call my_test,abc/../../././../def,../../def)
+
+  ifdef my_failed
+    $(error failed clean-path test)
+  endif
+endif
+
+###########################################################
 ## Other includes
 ###########################################################
 
@@ -3303,3 +3361,40 @@
 #	  sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \
 #	      -e '/^$$/ d' -e 's/$$/ :/' < $*.d >> $*.P; \
 #	  rm -f $*.d
+
+
+###########################################################
+# Append the information to generate a RRO package for the
+# source module.
+#
+#  $(1): Source module name.
+#  $(2): Whether $(3) is a manifest package name or not.
+#  $(3): Manifest package name if $(2) is true.
+#        Otherwise, android manifest file path of the
+#        source module.
+#  $(4): Whether LOCAL_EXPORT_PACKAGE_RESOURCES is set or
+#        not for the source module.
+#  $(5): Resource overlay list.
+###########################################################
+define append_enforce_rro_sources
+  $(eval ENFORCE_RRO_SOURCES += \
+      $(strip $(1))||$(strip $(2))||$(strip $(3))||$(strip $(4))||$(call normalize-path-list, $(strip $(5))))
+endef
+
+###########################################################
+# Generate all RRO packages for source modules stored in
+# ENFORCE_RRO_SOURCES
+###########################################################
+define generate_all_enforce_rro_packages
+$(foreach source,$(ENFORCE_RRO_SOURCES), \
+  $(eval _o := $(subst ||,$(space),$(source))) \
+  $(eval enforce_rro_source_module := $(word 1,$(_o))) \
+  $(eval enforce_rro_source_is_manifest_package_name := $(word 2,$(_o))) \
+  $(eval enforce_rro_source_manifest_package_info := $(word 3,$(_o))) \
+  $(eval enforce_rro_use_res_lib := $(word 4,$(_o))) \
+  $(eval enforce_rro_source_overlays := $(subst :, ,$(word 5,$(_o)))) \
+  $(eval enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro) \
+  $(eval include $(BUILD_SYSTEM)/generate_enforce_rro.mk) \
+  $(eval ALL_MODULES.$(enforce_rro_source_module).REQUIRED += $(enforce_rro_module)) \
+)
+endef
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index b107ded..0606c83 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -23,6 +23,9 @@
 # being used). To bundle everything one should set this to '%'
 SYSTEM_OTHER_ODEX_FILTER ?= app/% priv-app/%
 
+# Method returning whether the install path $(1) should be for system_other.
+install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1)))))
+
 # The default values for pre-opting: always preopt PIC.
 # Conditional to building on linux, as dex2oat currently does not work on darwin.
 ifeq ($(HOST_OS),linux)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index ffb888c..9db5dbf 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -61,7 +61,7 @@
 # $(2): the full install path (including file name) of the corresponding .apk.
 ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 define get-odex-installed-file-path
-$(if $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(2)),
+$(if $(call install-on-system-other, $(2)),
   $(call get-odex-file-path,$(1),$(patsubst $(TARGET_OUT)/%,$(TARGET_OUT_SYSTEM_OTHER)/%,$(2))),
   $(call get-odex-file-path,$(1),$(2)))
 endef
@@ -122,6 +122,7 @@
 	--runtime-arg -Xnorelocate --compile-pic \
 	--no-generate-debug-info --generate-build-id \
 	--abort-on-hard-verifier-error \
+	--force-determinism \
 	--no-inline-from=core-oj.jar \
 	$(PRIVATE_DEX_PREOPT_FLAGS) \
 	$(PRIVATE_ART_FILE_PREOPT_FLAGS) \
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 4ffd6f1..b9c0fc6 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -42,7 +42,7 @@
 # if installing into system, and odex are being installed into system_other, don't strip
 ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifeq ($(LOCAL_DEX_PREOPT),true)
-ifneq ($(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(my_module_path)),)
+ifneq ($(call install-on-system-other, $(my_module_path)),)
 LOCAL_DEX_PREOPT := nostripping
 endif
 endif
@@ -128,17 +128,17 @@
 my_dex_location := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
 $(built_odex): $(my_built_profile)
 $(built_odex): PRIVATE_PROFILE_PREOPT_FLAGS := --profile-file=$(my_built_profile)
-$(my_built_profile): PRIVATE_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+$(my_built_profile): PRIVATE_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
 $(my_built_profile): PRIVATE_DEX_LOCATION := $(my_dex_location)
 $(my_built_profile): PRIVATE_SOURCE_CLASSES := $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
 $(my_built_profile): $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
 $(my_built_profile): $(PROFMAN)
-$(my_built_profile): $(LOCAL_INSTALLED_MODULE)
+$(my_built_profile): $(LOCAL_BUILT_MODULE)
 $(my_built_profile):
 	$(hide) mkdir -p $(dir $@)
 	ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
 		--create-profile-from=$(PRIVATE_SOURCE_CLASSES) \
-		--apk=$(PRIVATE_INSTALLED_MODULE) \
+		--apk=$(PRIVATE_BUILT_MODULE) \
 		--dex-location=$(PRIVATE_DEX_LOCATION) \
 		--reference-profile-file=$@
 else
@@ -151,6 +151,18 @@
 LOCAL_DEX_PREOPT_FLAGS := $(PRODUCT_DEX_PREOPT_DEFAULT_FLAGS)
 endif
 endif
+
+ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS) $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(LOCAL_MODULE)))
+  # Jars of system server, apps loaded into system server, and apps the product wants to be
+  # compiled with the 'speed' compiler filter.
+  LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed
+else
+  # If no compiler filter is specified, default to 'quicken' to save on storage.
+  ifeq (,$(filter --compiler-filter=%, $(LOCAL_DEX_PREOPT_FLAGS)))
+    LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=quicken
+  endif
+endif
+
 $(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
 $(built_vdex): $(built_odex)
 $(built_art): $(built_odex)
@@ -169,7 +181,7 @@
 DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
 DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
 DEXPREOPT.$(LOCAL_MODULE).PRIVILEGED_MODULE := $(LOCAL_PRIVILEGED_MODULE)
-DEXPREOPT.$(LOCAL_MODULE).PROPRIETARY_MODULE := $(LOCAL_PROPRIETARY_MODULE)
+DEXPREOPT.$(LOCAL_MODULE).VENDOR_MODULE := $(LOCAL_VENDOR_MODULE)
 DEXPREOPT.$(LOCAL_MODULE).TARGET_ARCH := $(LOCAL_MODULE_TARGET_ARCH)
 DEXPREOPT.$(LOCAL_MODULE).INSTALLED := $(installed_odex)
 DEXPREOPT.$(LOCAL_MODULE).INSTALLED_STRIPPED := $(LOCAL_INSTALLED_MODULE)
diff --git a/core/distdir.mk b/core/distdir.mk
index 51ec46e..89c5966 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -17,16 +17,8 @@
 # When specifying "dist", the user has asked that we copy the important
 # files from this build into DIST_DIR.
 
-.PHONY: dist
-dist: ;
-
 dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
 MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-ifeq (,$(strip $(filter-out $(INTERNAL_MODIFIER_TARGETS),$(MAKECMDGOALS))))
-# The commandline was something like "make dist" or "make dist showcommands".
-# Add a dependency on a real target.
-dist: $(DEFAULT_GOAL)
-endif
 
 ifdef dist_goal
 
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index 1ca01d5..1b0be07 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -36,7 +36,7 @@
 $(built_dpi_apk): PRIVATE_DEX_FILE := $(built_dex)
 ifndef LOCAL_JACK_ENABLED
 # Use the jarjar processed arhive as the initial package file.
-$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
+$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
 else
 $(built_dpi_apk): PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
 endif # LOCAL_JACK_ENABLED
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index a70ab03..eb12630 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -130,7 +130,13 @@
 ##
 
 droiddoc_templates := \
-    $(sort $(shell find $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR) -type f))
+    $(sort $(shell find $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR) -type f $(if $(ALLOW_MISSING_DEPENDENCIES),2>/dev/null)))
+
+ifdef ALLOW_MISSING_DEPENDENCIES
+  ifndef droiddoc_templates
+    droiddoc_templates := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)
+  endif
+endif
 
 droiddoc := \
 	$(HOST_JDK_TOOLS_JAR) \
@@ -175,6 +181,7 @@
 	$(hide) ( \
 		javadoc \
                 -encoding UTF-8 \
+                -source 1.8 \
                 \@$(PRIVATE_SRC_LIST_FILE) \
                 -J-Xmx1600m \
                 -XDignore.symbol.file \
@@ -215,7 +222,7 @@
                 \@$(PRIVATE_SRC_LIST_FILE) \
                 -J-Xmx1024m \
                 -XDignore.symbol.file \
-                $(if $(LEGACY_USE_JAVA7),,-Xdoclint:none) \
+                -Xdoclint:none \
                 $(PRIVATE_PROFILING_OPTIONS) \
                 $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
                 $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index 74ea3ff..acae48e 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -6,6 +6,7 @@
   TARGET_PRODUCT \
   TARGET_BUILD_VARIANT \
   TARGET_BUILD_TYPE \
+  TARGET_PLATFORM_VERSION \
   TARGET_BUILD_APPS \
   TARGET_ARCH \
   TARGET_ARCH_VARIANT \
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 579338e..949793b 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -23,7 +23,7 @@
 # The basename of this target must be the same as the final output
 # binary name, because it's used to set the "soname" in the binary.
 # The includer of this file will define a rule to build this target.
-linked_module := $(intermediates)/LINKED/$(my_built_module_stem)
+linked_module := $(intermediates)/LINKED/$(notdir $(my_installed_module_stem))
 
 ALL_ORIGINAL_DYNAMIC_BINARIES += $(linked_module)
 
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 3430a49..27cfd7f 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -11,16 +11,72 @@
 #         This can be useful if you set OUT_DIR to be a different directory
 #         than other outputs of your build system.
 
+# Returns all words in $1 up to and including $2
+define find_and_earlier
+  $(strip $(if $(1),
+    $(firstword $(1))
+    $(if $(filter $(firstword $(1)),$(2)),,
+      $(call find_and_earlier,$(wordlist 2,$(words $(1)),$(1)),$(2)))))
+endef
+
+#$(warning $(call find_and_earlier,A B C,A))
+#$(warning $(call find_and_earlier,A B C,B))
+#$(warning $(call find_and_earlier,A B C,C))
+#$(warning $(call find_and_earlier,A B C,D))
+
+define version-list
+$(1)PR1 $(1)PD1 $(1)PD2 $(1)PM1 $(1)PM2
+endef
+
+ALL_VERSIONS := O P Q R S T U V W X Y Z
+ALL_VERSIONS := $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
+
+# Filters ALL_VERSIONS down to the range [$1, $2], and errors if $1 > $2 or $3 is
+# not in [$1, $2]
+# $(1): min platform version
+# $(2): max platform version
+# $(3): default platform version
+define allowed-platform-versions
+$(strip \
+  $(if $(filter $(ALL_VERSIONS),$(1)),,
+    $(error Invalid MIN_PLATFORM_VERSION '$(1)'))
+  $(if $(filter $(ALL_VERSIONS),$(2)),,
+    $(error Invalid MAX_PLATFORM_VERSION '$(2)'))
+  $(if $(filter $(ALL_VERSIONS),$(3)),,
+    $(error Invalid DEFAULT_PLATFORM_VERSION '$(3)'))
+
+  $(eval allowed_versions_ := $(call find_and_earlier,$(ALL_VERSIONS),$(2)))
+
+  $(if $(filter $(allowed_versions_),$(1)),,
+    $(error MIN_PLATFORM_VERSION '$(1)' must be before MAX_PLATFORM_VERSION '$(2)'))
+
+  $(eval allowed_versions_ := $(1) \
+    $(filter-out $(call find_and_earlier,$(allowed_versions_),$(1)),$(allowed_versions_)))
+
+  $(if $(filter $(allowed_versions_),$(3)),,
+    $(error DEFAULT_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
+
+  $(allowed_versions_))
+endef
+
+#$(warning $(call allowed-platform-versions,OPR1,PPR1,OPR1))
+#$(warning $(call allowed-platform-versions,OPM1,PPR1,OPR1))
+
 # Set up version information.
 include $(BUILD_SYSTEM)/version_defaults.mk
 
+ENABLED_VERSIONS := $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+
+$(foreach v,$(ENABLED_VERSIONS), \
+  $(eval IS_AT_LEAST_$(v) := true))
+
 # ---------------------------------------------------------------
 # If you update the build system such that the environment setup
 # or buildspec.mk need to be updated, increment this number, and
 # people who haven't re-run those will have to do so before they
 # can build.  Make sure to also update the corresponding value in
 # buildspec.mk.default and envsetup.sh.
-CORRECT_BUILD_ENV_SEQUENCE_NUMBER := 12
+CORRECT_BUILD_ENV_SEQUENCE_NUMBER := 13
 
 # ---------------------------------------------------------------
 # The product defaults to generic on hardware
@@ -112,10 +168,17 @@
 TARGET_COPY_OUT_SYSTEM := system
 TARGET_COPY_OUT_SYSTEM_OTHER := system_other
 TARGET_COPY_OUT_DATA := data
+TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
 TARGET_COPY_OUT_OEM := oem
 TARGET_COPY_OUT_ODM := odm
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
+
+# Returns the non-sanitized version of the path provided in $1.
+define get_non_asan_path
+$(patsubst $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/%,$(PRODUCT_OUT)/%,$1)
+endef
+
 ###########################################
 # Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
 # we don't know if the device wants to build a separate vendor.img
@@ -127,6 +190,13 @@
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
 ###########################################
 
+#################################################################
+# Set up minimal BOOTCLASSPATH list of jars to build/execute
+# java code with dalvikvm/art.
+TARGET_CORE_JARS := core-oj core-libart conscrypt okhttp bouncycastle apache-xml
+HOST_CORE_JARS := $(addsuffix -hostdex,$(TARGET_CORE_JARS))
+#################################################################
+
 # Read the product specs so we can get TARGET_DEVICE and other
 # variables that we need in order to locate the output files.
 include $(BUILD_SYSTEM)/product_config.mk
@@ -198,6 +268,13 @@
   endif
 endif
 
+# Check BOARD_VNDK_VERSION
+ifdef BOARD_VNDK_VERSION
+  ifneq ($(BOARD_VNDK_VERSION),current)
+    $(error BOARD_VNDK_VERSION: Only "current" is implemented)
+  endif
+endif
+
 # ---------------------------------------------------------------
 # Set up configuration for target machine.
 # The following must be set:
@@ -320,17 +397,26 @@
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT_EXECUTABLES)
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest64
 
-TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+  TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_asan
+else
+  TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
+endif
 TARGET_OUT_HEADERS := $(TARGET_OUT_INTERMEDIATES)/include
 TARGET_OUT_INTERMEDIATE_LIBRARIES := $(TARGET_OUT_INTERMEDIATES)/lib
-TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj
+
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+  TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj_asan
+else
+  TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj
+endif
 
 TARGET_OUT_GEN := $(PRODUCT_OUT)/gen
 TARGET_OUT_COMMON_GEN := $(TARGET_COMMON_OUT_ROOT)/gen
 
 TARGET_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM)
 ifneq ($(filter address,$(SANITIZE_TARGET)),)
-target_out_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)
+target_out_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/system
 else
 target_out_shared_libraries_base := $(TARGET_OUT)
 endif
@@ -365,7 +451,12 @@
 else
 TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
 endif
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
+
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+  $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)_asan
+else
+  $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)/lib
 ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
@@ -410,7 +501,7 @@
 
 TARGET_OUT_VENDOR := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR)
 ifneq ($(filter address,$(SANITIZE_TARGET)),)
-target_out_vendor_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)/vendor
+target_out_vendor_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/vendor
 else
 target_out_vendor_shared_libraries_base := $(TARGET_OUT_VENDOR)
 endif
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index f51ddb8..3509bd2 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -44,12 +44,16 @@
 my_target_crtbegin_dynamic_o :=
 my_target_crtbegin_static_o :=
 my_target_crtend_o :=
+else ifdef LOCAL_USE_VNDK
+my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.vendor.o
+my_target_crtbegin_static_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.vendor.o
+my_target_crtend_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_android.vendor.o
 else
 my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
 my_target_crtbegin_static_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 my_target_crtend_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_android.o
 endif
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
 my_target_crtbegin_dynamic_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_dynamic.o)
 my_target_crtbegin_static_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_static.o)
 my_target_crtend_o := $(wildcard $(my_ndk_sysroot_lib)/crtend_android.o)
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index f6d6e9a..4d41871 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -8,8 +8,8 @@
     $(error $(LOCAL_PATH): $(LOCAL_MODULE): NDK fuzz tests are not supported.)
 endif
 
-LOCAL_CFLAGS += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
-LOCAL_STATIC_LIBRARIES += libLLVMFuzzer
+LOCAL_CFLAGS += -fsanitize-coverage=trace-pc-guard
+LOCAL_STATIC_LIBRARIES += libFuzzer
 
 ifdef LOCAL_MODULE_PATH
 $(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH when building test $(LOCAL_MODULE))
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
new file mode 100644
index 0000000..579089c
--- /dev/null
+++ b/core/generate_enforce_rro.mk
@@ -0,0 +1,30 @@
+include $(CLEAR_VARS)
+
+LOCAL_PACKAGE_NAME := $(enforce_rro_module)
+
+intermediates := $(call intermediates-dir-for,APPS,$(LOCAL_PACKAGE_NAME),,COMMON)
+rro_android_manifest_file := $(intermediates)/AndroidManifest.xml
+
+ifeq (true,$(enforce_rro_source_is_manifest_package_name))
+$(rro_android_manifest_file): PRIVATE_PACKAGE_NAME := $(enforce_rro_source_manifest_package_info)
+$(rro_android_manifest_file): build/tools/generate-enforce-rro-android-manifest.py
+	$(hide) build/tools/generate-enforce-rro-android-manifest.py -u -p $(PRIVATE_PACKAGE_NAME) -o $@
+else
+$(rro_android_manifest_file): PRIVATE_SOURCE_MANIFEST_FILE := $(enforce_rro_source_manifest_package_info)
+$(rro_android_manifest_file): $(enforce_rro_source_manifest_package_info) build/tools/generate-enforce-rro-android-manifest.py
+	$(hide) build/tools/generate-enforce-rro-android-manifest.py -p $(PRIVATE_SOURCE_MANIFEST_FILE) -o $@
+endif
+
+LOCAL_PATH:= $(intermediates)
+
+ifeq ($(enforce_rro_use_res_lib),true)
+LOCAL_RES_LIBRARIES := $(enforce_rro_source_module)
+endif
+
+LOCAL_FULL_MANIFEST_FILE := $(rro_android_manifest_file)
+LOCAL_CERTIFICATE := platform
+
+LOCAL_AAPT_FLAGS += --auto-add-overlay
+LOCAL_RESOURCE_DIR := $(enforce_rro_source_overlays)
+
+include $(BUILD_RRO_PACKAGE)
diff --git a/core/goma.mk b/core/goma.mk
index 4e8318a..2fb37a7 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -45,6 +45,7 @@
   # use both ccache and gomacc.
   CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
   CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
+  JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
 
   # gomacc can start goma client's daemon process automatically, but
   # it is safer and faster to start up it beforehand. We run this as a
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index ca45cef..7101229 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -86,7 +86,7 @@
 ifndef LOCAL_JACK_ENABLED
 
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -96,9 +96,10 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
+        $(annotation_processor_deps) \
         $(NORMALIZE_PATH) \
-        $(JAVAC_FILTER) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-package)
 
 my_desugaring :=
@@ -120,14 +121,10 @@
 	@echo JarJar: $@
 	$(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
 else
-$(full_classes_jarjar_jar): $(full_classes_desugar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+full_classes_jarjar_jar := $(full_classes_desugar_jar)
 endif
 
-$(full_classes_jar): $(full_classes_jarjar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
 
 ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
 # No dex; all we want are the .class files with resources.
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index ba2999f..d30c90d 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -37,6 +37,7 @@
 # emma is hardcoded to use the leaf name of its input for the output file --
 # only the output directory can be changed
 full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(notdir $(full_classes_jarjar_jar))
+full_classes_jar := $(intermediates.COMMON)/classes.jar
 
 LOCAL_INTERMEDIATE_TARGETS += \
     $(full_classes_compiled_jar) \
@@ -57,8 +58,13 @@
 # Run build/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 
+# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
+ifeq ($(RUN_ERROR_PRONE),true)
+LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
+endif
+
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -68,10 +74,13 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
+        $(annotation_processor_deps) \
         $(NORMALIZE_PATH) \
-        $(JAVAC_FILTER) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(ZIPTIME) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-package)
+	$(remove-timestamps-from-package)
 
 javac-check : $(full_classes_compiled_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
@@ -83,9 +92,7 @@
 	@echo JarJar: $@
 	$(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
 else
-$(full_classes_jarjar_jar): $(full_classes_compiled_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+full_classes_jarjar_jar := $(full_classes_compiled_jar)
 endif
 
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
@@ -102,13 +109,9 @@
 # $(full_classes_emma_jar)
 $(full_classes_emma_jar) : $(full_classes_jarjar_jar) | $(EMMA_JAR)
 	$(transform-classes.jar-to-emma)
-
-$(LOCAL_BUILT_MODULE) : $(full_classes_emma_jar)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
-
 else # LOCAL_EMMA_INSTRUMENT
-$(LOCAL_BUILT_MODULE) : $(full_classes_jarjar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+full_classes_emma_jar := $(full_classes_jarjar_jar)
 endif # LOCAL_EMMA_INSTRUMENT
+
+$(eval $(call copy-one-file,$(full_classes_emma_jar),$(LOCAL_BUILT_MODULE)))
+$(eval $(call copy-one-file,$(full_classes_emma_jar),$(full_classes_jar)))
diff --git a/core/install_jni_libs.mk b/core/install_jni_libs.mk
index 625a8a2..35f4f55 100644
--- a/core/install_jni_libs.mk
+++ b/core/install_jni_libs.mk
@@ -18,9 +18,19 @@
 ifneq ($(filter tests samples, $(LOCAL_MODULE_TAGS)),)
 my_embed_jni := true
 endif
-ifeq ($(filter $(TARGET_OUT)/% $(TARGET_OUT_VENDOR)/% $(TARGET_OUT_OEM)/%, $(my_module_path)),)
-# If this app isn't to be installed to system partitions.
-my_embed_jni := true
+ifneq ($(BOARD_VNDK_VERSION),)
+  ifeq ($(filter $(TARGET_OUT)/%, $(my_module_path)),)
+    # If this app isn't to be installed to the system partition, and the device
+    # is fully treble-ized then jni libs are embedded, Otherwise, access to the
+    # directory where the lib is installed to (usually /vendor/lib) needs to be
+    # allowed for system processes, which is a Treble violation.
+    my_embed_jni := true
+  endif
+else
+  ifeq ($(filter $(TARGET_OUT)/% $(TARGET_OUT_VENDOR)/% $(TARGET_OUT_OEM)/%, $(my_module_path)),)
+    # If this app isn't to be installed to system, vendor, or oem partitions.
+    my_embed_jni := true
+  endif
 endif
 
 jni_shared_libraries :=
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index c5804a4..0e92153 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -54,7 +54,8 @@
 # The jni libaries will be installed to the system.img.
 my_jni_filenames := $(notdir $(my_jni_shared_libraries))
 # Make sure the JNI libraries get installed
-my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
+my_shared_library_path := $(call get_non_asan_path,\
+  $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
 # Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
 $(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
 
@@ -108,30 +109,18 @@
 
 # Verify that all included libraries are built against the NDK
 ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
-my_link_type := $(call intermediates-dir-for,APPS,$(LOCAL_MODULE))/$(my_2nd_arch_prefix)jni_link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
-  $(foreach l,$(LOCAL_JNI_SHARED_LIBRARIES),\
-    $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),,,$(my_2nd_arch_prefix))/link_type))
 ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := app:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := native:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := app:sdk
+my_warn_types := native:platform
+my_allowed_types := native:ndk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := app:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := app:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check JNI module types: $@
-	$(check-link-type)
 
-$(LOCAL_BUILT_MODULE): | $(my_link_type)
+my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
 
-my_link_type :=
-my_link_type_deps :=
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 endif
diff --git a/core/jack-default.args b/core/jack-default.args
index 0232301..433bc53 100644
--- a/core/jack-default.args
+++ b/core/jack-default.args
@@ -5,3 +5,5 @@
 -D jack.reporter.level.file=error=--,warning=-
 --verbose error
 -D jack.jayce.cache=false
+-D jack.lambda.grouping-scope=package
+-D jack.lambda.simplify-stateless=true
diff --git a/core/java.mk b/core/java.mk
index 1835983..8dc4b22 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -109,21 +109,16 @@
 LOCAL_PROGUARD_ENABLED :=
 endif
 
-ifdef LOCAL_PROGUARD_ENABLED
-proguard_jar_leaf := proguard.classes.jar
-else
-proguard_jar_leaf := noproguard.classes.jar
-endif
-
 full_classes_compiled_jar := $(intermediates.COMMON)/$(full_classes_compiled_jar_leaf)
-full_classes_desugar_jar := $(intermediates.COMMON)/desugar.classes.jar
+full_classes_processed_jar := $(intermediates.COMMON)/classes-processed.jar
+full_classes_desugar_jar := $(intermediates.COMMON)/classes-desugar.jar
 jarjar_leaf := classes-jarjar.jar
 full_classes_jarjar_jar := $(intermediates.COMMON)/$(jarjar_leaf)
 emma_intermediates_dir := $(intermediates.COMMON)/emma_out
 # emma is hardcoded to use the leaf name of its input for the output file --
 # only the output directory can be changed
 full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(jarjar_leaf)
-full_classes_proguard_jar := $(intermediates.COMMON)/$(proguard_jar_leaf)
+full_classes_proguard_jar := $(intermediates.COMMON)/classes-proguard.jar
 built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 
@@ -346,13 +341,13 @@
 ifndef LOCAL_CHECKED_MODULE
 ifdef full_classes_jar
 ifdef LOCAL_JACK_ENABLED
-LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
-else
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
+ifeq ($(LOCAL_JACK_ENABLED),javac_frontend)
 LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
 else
-LOCAL_CHECKED_MODULE := $(built_dex)
+LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
 endif
+else
+LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
 endif
 endif
 endif
@@ -411,15 +406,7 @@
 # Droiddoc isn't currently able to generate stubs for modules, so we're just
 # allowing it to use the classes.jar as the "stubs" that would be use to link
 # against, for the cases where someone needs the jar to link against.
-# - Use the classes.jar instead of the handful of other intermediates that
-#   we have, because it's the most processed, but still hasn't had dex run on
-#   it, so it's closest to what's on the device.
-# - This extra copy, with the dependency on LOCAL_BUILT_MODULE allows the
-#   PRIVATE_ vars to be preserved.
-$(full_classes_stubs_jar): PRIVATE_SOURCE_FILE := $(full_classes_jar)
-$(full_classes_stubs_jar) : $(full_classes_jar) | $(ACP)
-	@echo Copying $(PRIVATE_SOURCE_FILE)
-	$(hide) $(ACP) -fp $(PRIVATE_SOURCE_FILE) $@
+$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_stubs_jar)))
 ALL_MODULES.$(LOCAL_MODULE).STUBS := $(full_classes_stubs_jar)
 
 # The layers file allows you to enforce a layering between java packages.
@@ -432,7 +419,13 @@
 # This intentionally depends on java_sources, not all_java_sources.
 # Deps for generated source files must be handled separately,
 # via deps on the target that generates the sources.
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+
+# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
+ifeq ($(RUN_ERROR_PRONE),true)
+LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
+endif
+
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
@@ -445,38 +438,60 @@
         $(layers_file) \
         $(RenderScript_file_stamp) \
         $(proto_java_sources_file_stamp) \
+        $(annotation_processor_deps) \
         $(NORMALIZE_PATH) \
-        $(JAVAC_FILTER) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        | $(SOONG_JAVAC_WRAPPER)
 	$(transform-java-to-classes.jar)
 
 javac-check : $(full_classes_compiled_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
 
+ifdef LOCAL_JAR_PROCESSOR
+# LOCAL_JAR_PROCESSOR_ARGS must be evaluated here to set up the rule-local
+# PRIVATE_JAR_PROCESSOR_ARGS variable, but $< and $@ are not available yet.
+# Set ${in} and ${out} so they can be referenced by LOCAL_JAR_PROCESSOR_ARGS
+# using deferred evaluation (LOCAL_JAR_PROCESSOR_ARGS = instead of :=).
+in := $(full_classes_compiled_jar)
+out := $(full_classes_processed_jar).tmp
+$(full_classes_processed_jar): PRIVATE_JAR_PROCESSOR_ARGS := $(LOCAL_JAR_PROCESSOR_ARGS)
+$(full_classes_processed_jar): PRIVATE_JAR_PROCESSOR := $(HOST_OUT_JAVA_LIBRARIES)/$(LOCAL_JAR_PROCESSOR).jar
+$(full_classes_processed_jar): PRIVATE_TMP_OUT := $(out)
+in :=
+out :=
+
+$(full_classes_processed_jar): $(full_classes_compiled_jar) $(LOCAL_JAR_PROCESSOR)
+	@echo Processing $@ with $(PRIVATE_JAR_PROCESSOR)
+	$(hide) rm -f $@ $(PRIVATE_TMP_OUT)
+	$(hide) java -jar $(PRIVATE_JAR_PROCESSOR) $(PRIVATE_JAR_PROCESSOR_ARGS)
+	$(hide) mv $(PRIVATE_TMP_OUT) $@
+
+else
+full_classes_processed_jar := $(full_classes_compiled_jar)
+endif
+
 my_desugaring :=
 ifndef LOCAL_JACK_ENABLED
-ifeq ($(LOCAL_JAVA_LANGUAGE_VERSION),1.8)
+ifndef LOCAL_IS_STATIC_JAVA_LIBRARY
 my_desugaring := true
 $(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(full_classes_compiled_jar) $(DESUGAR)
+$(full_classes_desugar_jar): $(full_classes_processed_jar) $(DESUGAR)
 	$(desugar-classes-jar)
 endif
 endif
 
 ifndef my_desugaring
-full_classes_desugar_jar := $(full_classes_compiled_jar)
+full_classes_desugar_jar := $(full_classes_processed_jar)
 endif
 
-# Run jarjar if necessary, otherwise just copy the file.
+# Run jarjar if necessary
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_jarjar_jar): $(full_classes_desugar_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
 	@echo JarJar: $@
 	$(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
 else
-$(full_classes_jarjar_jar): $(full_classes_desugar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+full_classes_jarjar_jar := $(full_classes_desugar_jar)
 endif
 
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
@@ -497,20 +512,16 @@
 	$(transform-classes.jar-to-emma)
 
 else
-$(full_classes_emma_jar): $(full_classes_jarjar_jar)
-	@echo Copying: $@
-	$(copy-file-to-target)
+full_classes_emma_jar := $(full_classes_jarjar_jar)
 endif
 
-# Keep a copy of the jar just before proguard processing.
 # TODO: this should depend on full_classes_emma_jar once coverage works again
-$(full_classes_jar): $(full_classes_jarjar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+full_classes_pre_proguard_jar := $(full_classes_jarjar_jar)
 
-$(call define-jar-to-toc-rule, $(full_classes_jar))
+# Keep a copy of the jar just before proguard processing.
+$(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(intermediates.COMMON)/classes-pre-proguard.jar))
 
-# Run proguard if necessary, otherwise just copy the file.
+# Run proguard if necessary
 ifdef LOCAL_PROGUARD_ENABLED
 ifneq ($(filter-out full custom nosystem obfuscation optimization shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
     $(warning while processing: $(LOCAL_MODULE))
@@ -622,7 +633,7 @@
 endif
 
 # If not using jack and building against the current SDK version then filter
-# out junit and android.test classes from the application that are to be
+# out the junit, android.test and c.a.i.u.Predicate classes that are to be
 # removed from the Android API as part of b/30188076 but which are still
 # present in the Android API. This is to allow changes to be made to the
 # build to statically include those classes into the application without
@@ -631,7 +642,7 @@
 ifndef LOCAL_JACK_ENABLED
 ifdef LOCAL_SDK_VERSION
 ifeq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**)
+proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!junit/textui/**,!android/test/**,!com/android/internal/util/*)
 endif
 endif
 endif
@@ -639,16 +650,18 @@
 $(full_classes_proguard_jar): PRIVATE_PROGUARD_INJAR_FILTERS := $(proguard_injar_filters)
 $(full_classes_proguard_jar): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
 $(full_classes_proguard_jar): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
-$(full_classes_proguard_jar) : $(full_classes_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) | $(PROGUARD)
+$(full_classes_proguard_jar) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) | $(PROGUARD)
 	$(call transform-jar-to-proguard)
 
 else  # LOCAL_PROGUARD_ENABLED not defined
-$(full_classes_proguard_jar) : $(full_classes_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
-
+full_classes_proguard_jar := $(full_classes_pre_proguard_jar)
 endif # LOCAL_PROGUARD_ENABLED defined
 
+$(eval $(call copy-one-file,$(full_classes_proguard_jar),$(full_classes_jar)))
+
+$(call define-jar-to-toc-rule, $(full_classes_jar))
+
+ifneq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
 ifndef LOCAL_JACK_ENABLED
 $(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
 # If you instrument class files that have local variable debug information in
@@ -660,21 +673,25 @@
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
 $(built_dex_intermediate): PRIVATE_DX_FLAGS += --no-locals
 endif
-$(built_dex_intermediate): $(full_classes_proguard_jar) $(DX)
+$(built_dex_intermediate): $(full_classes_jar) $(DX)
 	$(transform-classes.jar-to-dex)
 endif # LOCAL_JACK_ENABLED is disabled
 
-$(built_dex): $(built_dex_intermediate) | $(ACP)
+$(built_dex): $(built_dex_intermediate)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) rm -f $(dir $@)/classes*.dex
-	$(hide) $(ACP) -fp $(dir $<)/classes*.dex $(dir $@)
+	$(hide) cp -fp $(dir $<)/classes*.dex $(dir $@)
+
+java-dex: $(built_dex)
+
+endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
 
 findbugs_xml := $(intermediates.COMMON)/findbugs.xml
 $(findbugs_xml): PRIVATE_AUXCLASSPATH := $(addprefix -auxclasspath ,$(strip \
     $(call normalize-path-list,$(filter %.jar,$(full_java_libs)))))
 $(findbugs_xml): PRIVATE_FINDBUGS_FLAGS := $(LOCAL_FINDBUGS_FLAGS)
-$(findbugs_xml) : $(full_classes_jar) $(filter %.xml, $(LOCAL_FINDBUGS_FLAGS))
+$(findbugs_xml) : $(full_classes_pre_proguard_jar) $(filter %.xml, $(LOCAL_FINDBUGS_FLAGS))
 	@echo Findbugs: $@
 	$(hide) $(FINDBUGS) -textui -effort:min -xml:withMessages \
 		$(PRIVATE_AUXCLASSPATH) $(PRIVATE_FINDBUGS_FLAGS) \
@@ -741,13 +758,14 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS :=
 endif # LOCAL_PROGUARD_ENABLED defined
 
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS) $(annotation_processor_flags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
 
 jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
         $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
         $(common_proguard_flag_files) $(proguard_flag_files) \
-        $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
+        $(proto_java_sources_file_stamp) $(annotation_processor_deps) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
         $(NORMALIZE_PATH) $(JACK_DEFAULT_ARGS) $(JACK)
 
 $(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
@@ -782,6 +800,26 @@
 $(built_dex_intermediate): PRIVATE_JACK_COVERAGE_OPTIONS :=
 endif
 
+# Compiling with javac to jar, then converting jar to dex with jack
+ifeq ($(LOCAL_JACK_ENABLED),javac_frontend)
+
+# PRIVATE_EXTRA_JAR_ARGS and source files were already handled during javac
+$(built_dex_intermediate): PRIVATE_EXTRA_JAR_ARGS :=
+$(built_dex_intermediate): PRIVATE_JAVA_SOURCES :=
+$(built_dex_intermediate): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
+$(built_dex_intermediate): PRIVATE_HAS_PROTO_SOURCES :=
+$(built_dex_intermediate): PRIVATE_HAS_RS_SOURCES :=
+
+# Incremental compilation is not supported when mixing javac and jack
+$(built_dex_intermediate): PRIVATE_JACK_INCREMENTAL_DIR :=
+
+# Pass output of javac to jack
+$(built_dex_intermediate): PRIVATE_JACK_IMPORT_JAR := $(full_classes_compiled_jar)
+$(built_dex_intermediate): $(full_classes_compiled_jar)
+else # LOCAL_JACK_ENABLED != javac_frontend
+$(built_dex_intermediate): PRIVATE_JACK_IMPORT_JAR :=
+endif # LOCAL_JACK_ENABLED != javac_frontend
+
 $(built_dex_intermediate): PRIVATE_JACK_PLUGIN_PATH := $(LOCAL_JACK_PLUGIN_PATH)
 $(built_dex_intermediate): PRIVATE_JACK_PLUGIN := $(LOCAL_JACK_PLUGIN)
 $(built_dex_intermediate): $(jack_all_deps) $(LOCAL_JACK_PLUGIN_PATH) | setup-jack-server
diff --git a/core/java_common.mk b/core/java_common.mk
index 8fc4fb6..3c9b18f 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -19,12 +19,7 @@
   ifneq (,$(filter $(LOCAL_SDK_VERSION), $(private_sdk_versions_without_any_java_18_support)))
     LOCAL_JAVA_LANGUAGE_VERSION := 1.7
   else
-    # This retains 1.7 for ART build bots only. http://b/27583810
-    ifeq (,$(LEGACY_USE_JAVA7))
-      LOCAL_JAVA_LANGUAGE_VERSION := 1.8
-    else
-      LOCAL_JAVA_LANGUAGE_VERSION := 1.7
-    endif
+    LOCAL_JAVA_LANGUAGE_VERSION := 1.8
   endif
 endif
 LOCAL_JAVACFLAGS += -source $(LOCAL_JAVA_LANGUAGE_VERSION) -target $(LOCAL_JAVA_LANGUAGE_VERSION)
@@ -148,10 +143,24 @@
 need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
 ifdef need_compile_java
 
+annotation_processor_flags :=
+annotation_processor_deps :=
+
+ifdef LOCAL_ANNOTATION_PROCESSORS
+  annotation_processor_jars := $(call java-lib-deps,$(LOCAL_ANNOTATION_PROCESSORS),true)
+  annotation_processor_flags += -processorpath $(call normalize-path-list,$(annotation_processor_jars))
+  annotation_processor_deps += $(annotation_processor_jars)
+
+  # b/25860419: annotation processors must be explicitly specified for grok
+  annotation_processor_flags += $(foreach class,$(LOCAL_ANNOTATION_PROCESSOR_CLASSES),-processor $(class))
+
+  annotation_processor_jars :=
+endif
+
 full_static_java_libs := \
     $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
       $(call intermediates-dir-for, \
-        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/javalib.jar)
+        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/classes.jar)
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JAVA_LIBRARIES := $(full_static_java_libs)
 
@@ -159,6 +168,7 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANNO_INTERMEDIATES_DIR := $(intermediates.COMMON)/anno
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_PROTO_SOURCES := $(if $(proto_sources),true)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/proto
@@ -216,11 +226,11 @@
 ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
 my_bootclasspath := ""
 else
-my_bootclasspath := $(call normalize-path-list,$(call host-dex-java-lib-files,core-oj-hostdex core-libart-hostdex))
+my_bootclasspath := $(call normalize-path-list,$(call java-lib-files,core-oj-hostdex core-libart-hostdex,true))
 endif
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(my_bootclasspath)
 
-full_shared_java_libs := $(call host-dex-java-lib-files,$(LOCAL_JAVA_LIBRARIES))
+full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES),true)
 full_java_lib_deps := $(full_shared_java_libs)
 else # !USE_CORE_LIB_BOOTCLASSPATH
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH :=
@@ -242,7 +252,7 @@
   link_apk_libraries := \
       $(foreach lib,$(apk_libraries), \
         $(call intermediates-dir-for, \
-              APPS,$(lib),,COMMON)/classes.jar)
+              APPS,$(lib),,COMMON)/classes-pre-proguard.jar)
 
   # link against the jar with full original names (before proguard processing).
   full_shared_java_libs += $(link_apk_libraries)
@@ -263,7 +273,7 @@
   link_instr_intermediates_dir.COMMON := $(call intermediates-dir-for, \
       APPS,$(LOCAL_INSTRUMENTATION_FOR),,COMMON)
   # link against the jar with full original names (before proguard processing).
-  link_instr_classes_jar := $(link_instr_intermediates_dir.COMMON)/classes.jar
+  link_instr_classes_jar := $(link_instr_intermediates_dir.COMMON)/classes-pre-proguard.jar
   full_java_libs += $(link_instr_classes_jar)
   full_java_lib_deps += $(link_instr_classes_jar)
 endif  # LOCAL_INSTRUMENTATION_FOR
@@ -381,35 +391,24 @@
 # Verify that all libraries are safe to use
 ###########################################################
 ifndef LOCAL_IS_HOST_MODULE
-my_link_type := $(intermediates.COMMON)/link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
-  $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES),\
-    $(call intermediates-dir-for, \
-      JAVA_LIBRARIES,$(lib),,COMMON)/link_type) \
-  $(foreach lib,$(apk_libraries), \
-    $(call intermediates-dir-for, \
-      APPS,$(lib),,COMMON)/link_type))
 ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
-$(my_link_type): PRIVATE_WARN_TYPES := java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system
+my_link_type := java:system
+my_warn_types := java:platform
+my_allowed_types := java:sdk java:system
 else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := java:system java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk
+my_link_type := java:sdk
+my_warn_types := java:system java:platform
+my_allowed_types := java:sdk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system java:platform
+my_link_type := java:platform
+my_warn_types :=
+my_allowed_types := java:sdk java:system java:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
-	@echo Check Java library module types: $@
-	$(check-link-type)
 
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+my_link_deps := $(addprefix JAVA_LIBRARIES:,$(LOCAL_STATIC_JAVA_LIBRARIES))
+my_link_deps += $(addprefix APPS:,$(apk_libraries))
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
 endif  # !LOCAL_IS_HOST_MODULE
diff --git a/core/java_library.mk b/core/java_library.mk
index f770a54..9db587d 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -35,9 +35,14 @@
 endif
 endif
 
+# For non-static java libraries, other modules should depend on
+# out/target/common/obj/JAVA_LIBRARIES/.../javalib.jar (for jack)
+# or out/target/common/obj/JAVA_LIBRARIES/.../classes.jar (for javac).
+# For static java libraries, other modules should depend on
+# out/target/common/obj/JAVA_LIBRARIES/.../classes.jar
+# There are some dependencies outside the build system that assume static
+# java libraries produce javalib.jar, so we will copy classes.jar there too.
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
-
-# This file will be the one that other modules should depend on.
 common_javalib.jar := $(intermediates.COMMON)/javalib.jar
 LOCAL_INTERMEDIATE_TARGETS += $(common_javalib.jar)
 
@@ -65,27 +70,20 @@
 #################################
 
 ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-# No dex; all we want are the .class files with resources.
-$(common_javalib.jar) : $(java_resource_sources)
-ifdef LOCAL_PROGUARD_ENABLED
-$(common_javalib.jar) : $(full_classes_proguard_jar)
-else
-$(common_javalib.jar) : $(full_classes_jar)
-endif
-	@echo "target Static Jar: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
+# There are some dependencies outside the build system that assume classes.jar
+# is available as javalib.jar so copy it there too.
+$(eval $(call copy-one-file,$(full_classes_jar),$(common_javalib.jar)))
 
 ifdef LOCAL_JACK_ENABLED
-$(LOCAL_BUILT_MODULE) : $(full_classes_jack)
+$(eval $(call copy-one-file,$(full_classes_jack),$(LOCAL_BUILT_MODULE)))
 else
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
+$(eval $(call copy-one-file,$(full_classes_jar),$(LOCAL_BUILT_MODULE)))
 endif
-	$(copy-file-to-target)
 
 else # !LOCAL_IS_STATIC_JAVA_LIBRARY
 
 $(common_javalib.jar): PRIVATE_DEX_FILE := $(built_dex)
-$(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
+$(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
 $(common_javalib.jar): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
 $(common_javalib.jar) : $(built_dex) $(java_resource_sources) | $(ZIPTIME)
 	@echo "target Jar: $(PRIVATE_MODULE) ($@)"
@@ -104,8 +102,7 @@
 ifneq ($(dexpreopt_boot_jar_module),) # boot jar
 # boot jar's rules are defined in dex_preopt.mk
 dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
-$(LOCAL_BUILT_MODULE) : $(dexpreopted_boot_jar)
-	$(call copy-file-to-target)
+$(eval $(call copy-one-file,$(dexpreopted_boot_jar),$(LOCAL_BUILT_MODULE)))
 
 # For libart boot jars, we don't have .odex files.
 else # ! boot jar
@@ -115,8 +112,7 @@
 	@echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
 	$(call dexpreopt-one-file,$<,$@)
 
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
-	$(call copy-file-to-target)
+$(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
@@ -124,8 +120,7 @@
 endif # ! boot jar
 
 else # LOCAL_DEX_PREOPT
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
-	$(call copy-file-to-target)
+$(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
 
 endif # LOCAL_DEX_PREOPT
 endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
diff --git a/core/link_type.mk b/core/link_type.mk
new file mode 100644
index 0000000..ff525cb
--- /dev/null
+++ b/core/link_type.mk
@@ -0,0 +1,27 @@
+# Inputs:
+#   LOCAL_MODULE_CLASS, LOCAL_MODULE, LOCAL_MODULE_MAKEFILE, LOCAL_BUILT_MODULE
+#   from base_rules.mk: my_kind, my_host_cross
+#   my_common: empty or COMMON, like the argument to intermediates-dir-for
+#   my_2nd_arch_prefix: usually LOCAL_2ND_ARCH_VAR_PREFIX, separate for JNI installation
+#
+#   my_link_type: the tags to apply to this module
+#   my_warn_types: the tags to warn about in our dependencies
+#   my_allowed_types: the tags to allow in our dependencies
+#   my_link_deps: the dependencies, in the form of <MODULE_CLASS>:<name>
+#
+
+my_link_prefix := LINK_TYPE:$(call find-idf-prefix,$(my_kind),$(my_host_cross)):$(if $(my_common),$(my_common):_,_:$(if $(my_2nd_arch_prefix),$(my_2nd_arch_prefix),_))
+link_type := $(my_link_prefix):$(LOCAL_MODULE_CLASS):$(LOCAL_MODULE)
+ALL_LINK_TYPES := $(ALL_LINK_TYPES) $(link_type)
+$(link_type).TYPE := $(my_link_type)
+$(link_type).MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
+$(link_type).WARN := $(my_warn_types)
+$(link_type).ALLOWED := $(my_allowed_types)
+$(link_type).DEPS := $(addprefix $(my_link_prefix):,$(my_link_deps))
+$(link_type).BUILT := $(LOCAL_BUILT_MODULE)
+
+link_type :=
+my_allowed_types :=
+my_link_prefix :=
+my_link_type :=
+my_warn_types :=
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
index f7970f0..640aac7 100644
--- a/core/local_vndk.mk
+++ b/core/local_vndk.mk
@@ -3,7 +3,7 @@
 #If LOCAL_SDK_VERSION is set, thats a more restrictive set, so they dont need LOCAL_USE_VNDK
 ifndef LOCAL_IS_HOST_MODULE
 ifndef LOCAL_SDK_VERSION
-  ifneq (,$(filter true,$(LOCAL_PROPRIETARY_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE)))
+  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
     LOCAL_USE_VNDK:=true
   else
     ifneq (,$(filter $(TARGET_OUT_VENDOR)%,$(LOCAL_MODULE_PATH) $(LOCAL_MODULE_PATH_32) $(LOCAL_MODULE_PATH_64)))
diff --git a/core/main.mk b/core/main.mk
index 68a6daf..ea72a9a 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -10,9 +10,6 @@
 endif
 
 ifndef KATI
-USE_SOONG_UI ?= true
-endif
-ifeq ($(USE_SOONG_UI),true)
 
 host_prebuilts := linux-x86
 ifeq ($(shell uname),Darwin)
@@ -27,7 +24,7 @@
 $(sort $(MAKECMDGOALS)) : run_soong_ui
 	@#empty
 
-else # USE_SOONG_UI
+else # KATI
 
 # Absolute path of the present working direcotry.
 # This overrides the shell variable $PWD, which does not necessarily points to
@@ -47,23 +44,10 @@
 .PHONY: droid_targets
 droid_targets:
 
-# Targets that provide quick help on the build system.
-include $(BUILD_SYSTEM)/help.mk
-
 # Set up various standard variables based on configuration
 # and host information.
 include $(BUILD_SYSTEM)/config.mk
 
-ifndef KATI
-ifdef USE_NINJA
-$(warning USE_NINJA is ignored. Ninja is always used.)
-endif
-
-# Mark this is a ninja build.
-$(shell mkdir -p $(OUT_DIR) && touch $(OUT_DIR)/ninja_build)
-include build/core/ninja.mk
-else # KATI
-
 ifneq ($(filter $(dont_bother_goals), $(MAKECMDGOALS)),)
 dont_bother := true
 endif
@@ -104,150 +88,10 @@
 # Include the google-specific config
 -include vendor/google/build/config.mk
 
-VERSION_CHECK_SEQUENCE_NUMBER := 6
-JAVA_NOT_REQUIRED_CHECKED :=
--include $(OUT_DIR)/versions_checked.mk
-ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER)$(JAVA_NOT_REQUIRED),$(VERSIONS_CHECKED)$(JAVA_NOT_REQUIRED_CHECKED))
-
-$(info Checking build tools versions...)
-
-# check for a case sensitive file system
-ifneq (a,$(shell mkdir -p $(OUT_DIR) ; \
-                echo a > $(OUT_DIR)/casecheck.txt; \
-                    echo B > $(OUT_DIR)/CaseCheck.txt; \
-                cat $(OUT_DIR)/casecheck.txt))
-$(warning ************************************************************)
-$(warning You are building on a case-insensitive filesystem.)
-$(warning Please move your source tree to a case-sensitive filesystem.)
-$(warning ************************************************************)
-$(error Case-insensitive filesystems not supported)
-endif
-
-# Make sure that there are no spaces in the absolute path; the
-# build system can't deal with them.
-ifneq ($(words $(shell pwd)),1)
-$(warning ************************************************************)
-$(warning You are building in a directory whose absolute path contains)
-$(warning a space character:)
-$(warning $(space))
-$(warning "$(shell pwd)")
-$(warning $(space))
-$(warning Please move your source tree to a path that does not contain)
-$(warning any spaces.)
-$(warning ************************************************************)
-$(error Directory names containing spaces not supported)
-endif
-
-ifneq ($(JAVA_NOT_REQUIRED),true)
-java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
-javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
-
-# Check for the correct version of java, should be 1.8 by
-# default and only 1.7 if LEGACY_USE_JAVA7 is set.
-ifeq ($(LEGACY_USE_JAVA7),) # if LEGACY_USE_JAVA7 == ''
-required_version := "1.8.x"
-required_javac_version := "1.8"
-java_version := $(shell echo '$(java_version_str)' | grep '[ "]1\.8[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.8[\. "$$]')
-else
-required_version := "1.7.x"
-required_javac_version := "1.7"
-java_version := $(shell echo '$(java_version_str)' | grep '^java .*[ "]1\.7[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.7[\. "$$]')
-endif # if LEGACY_USE_JAVA7 == ''
-
-ifeq ($(strip $(java_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of java.)
-$(info $(space))
-$(info Your version is: $(java_version_str).)
-$(info The required version is: $(required_version))
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/initializing.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-# Check for the current JDK.
-#
-# For Java 1.7/1.8, we require OpenJDK on linux and Oracle JDK on Mac OS.
-requires_openjdk := false
-ifeq ($(BUILD_OS),linux)
-requires_openjdk := true
-endif
-
-
-# Check for the current jdk
-ifeq ($(requires_openjdk), true)
-# The user asked for openjdk, so check that the host
-# java version is really openjdk and not some other JDK.
-ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You asked for an OpenJDK based build but your version is)
-$(info $(java_version_str).)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not OpenJdk
-else # if requires_openjdk
-ifneq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info You use OpenJDK but only Sun/Oracle JDK is supported.)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not Sun Oracle JDK
-endif # if requires_openjdk
-
-KNOWN_INCOMPATIBLE_JAVAC_VERSIONS := google
-incompat_javac := $(foreach v,$(KNOWN_INCOMPATIBLE_JAVAC_VERSIONS),$(findstring $(v),$(javac_version_str)))
-ifneq ($(incompat_javac),)
-javac_version :=
-endif
-
-# Check for the correct version of javac
-ifeq ($(strip $(javac_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of javac.)
-$(info $(space))
-$(info Your version is: $(javac_version_str).)
-ifneq ($(incompat_javac),)
-$(info This '$(incompat_javac)' version is not supported for Android platform builds.)
-$(info Use a publicly available JDK and make sure you have run envsetup.sh / lunch.)
-else
-$(info The required version is: $(required_javac_version))
-endif
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-endif # if JAVA_NOT_REQUIRED
-
-ifndef BUILD_EMULATOR
-  # Emulator binaries are now provided under prebuilts/android-emulator/
-  BUILD_EMULATOR := false
-endif
-
-$(shell echo 'VERSIONS_CHECKED := $(VERSION_CHECK_SEQUENCE_NUMBER)' \
-        > $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'BUILD_EMULATOR ?= $(BUILD_EMULATOR)' \
-        >> $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'JAVA_NOT_REQUIRED_CHECKED := $(JAVA_NOT_REQUIRED)' \
-        >> $(OUT_DIR)/versions_checked.mk)
-endif
-
 # These are the modifier targets that don't do anything themselves, but
 # change the behavior of the build.
 # (must be defined before including definitions.make)
-INTERNAL_MODIFIER_TARGETS := showcommands all
+INTERNAL_MODIFIER_TARGETS := all
 
 # EMMA_INSTRUMENT_STATIC merges the static emma library to each emma-enabled module.
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
@@ -343,16 +187,6 @@
   ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
 endif
 
-# Boolean variable determining if Treble is fully enabled
-PRODUCT_FULL_TREBLE := false
-ifeq ($(PRODUCT_FULL_TREBLE_OVERRIDE),true)
-  PRODUCT_FULL_TREBLE := true
-else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  #$(warning no product shipping level defined)
-else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),26),)
-  PRODUCT_FULL_TREBLE := true
-endif
-
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -469,8 +303,8 @@
 
 ADDITIONAL_BUILD_PROPERTIES += net.bt.name=Android
 
-# enable vm tracing in files for now to help track
-# the cause of ANRs in the content process
+# Sets the location that the runtime dumps stack traces to when signalled
+# with SIGQUIT. Stack trace dumping is turned on for all android builds.
 ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-file=/data/anr/traces.txt
 
 # ------------------------------------------------------------
@@ -492,7 +326,7 @@
 endif
 
 
-# If they only used the modifier goals (showcommands, etc), we'll actually
+# If they only used the modifier goals (all, etc), we'll actually
 # build the default target.
 ifeq ($(filter-out $(INTERNAL_MODIFIER_TARGETS),$(MAKECMDGOALS)),)
 .PHONY: $(INTERNAL_MODIFIER_TARGETS)
@@ -513,6 +347,10 @@
 ADDITIONAL_BUILD_PROPERTIES := $(strip $(ADDITIONAL_BUILD_PROPERTIES))
 .KATI_READONLY := ADDITIONAL_BUILD_PROPERTIES
 
+ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
+ENFORCE_RRO_SOURCES :=
+endif
+
 ifneq ($(ONE_SHOT_MAKEFILE),)
 # We've probably been invoked by the "mm" shell function
 # with a subdirectory's makefile.
@@ -571,6 +409,8 @@
 include $(BUILD_SYSTEM)/pdk_fusion_modules.mk
 endif # PDK_FUSION_PLATFORM_ZIP
 
+droid_targets : blueprint_tools
+
 endif # dont_bother
 
 endif # ONE_SHOT_MAKEFILE
@@ -579,6 +419,13 @@
 # All module makefiles have been included at this point.
 # -------------------------------------------------------------------
 
+# -------------------------------------------------------------------
+# Enforce to generate all RRO packages for modules having resource
+# overlays.
+# -------------------------------------------------------------------
+ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
+$(call generate_all_enforce_rro_packages)
+endif
 
 # -------------------------------------------------------------------
 # Fix up CUSTOM_MODULES to refer to installed files rather than
@@ -731,6 +578,168 @@
 deps :=
 add-required-deps :=
 
+################################################################################
+# Link type checking
+#
+# ALL_LINK_TYPES contains a list of all link type prefixes (generally one per
+# module, but APKs can "link" to both java and native code). The link type
+# prefix consists of all the information needed by intermediates-dir-for:
+#
+#  LINK_TYPE:TARGET:_:2ND:STATIC_LIBRARIES:libfoo
+#
+#   1: LINK_TYPE literal
+#   2: prefix
+#     - TARGET
+#     - HOST
+#     - HOST_CROSS
+#     - AUX
+#   3: Whether to use the common intermediates directory or not
+#     - _
+#     - COMMON
+#   4: Whether it's the second arch or not
+#     - _
+#     - 2ND_
+#   5: Module Class
+#     - STATIC_LIBRARIES
+#     - SHARED_LIBRARIES
+#     - ...
+#   6: Module Name
+#
+# Then fields under that are separated by a period and the field name:
+#   - TYPE: the link types for this module
+#   - MAKEFILE: Where this module was defined
+#   - BUILT: The built module location
+#   - DEPS: the link type prefixes for the module's dependencies
+#   - ALLOWED: the link types to allow in this module's dependencies
+#   - WARN: the link types to warn about in this module's dependencies
+#
+# All of the dependency link types not listed in ALLOWED or WARN will become
+# errors.
+################################################################################
+
+link_type_error :=
+
+define link-type-prefix
+$(word 2,$(subst :,$(space),$(1)))
+endef
+define link-type-common
+$(patsubst _,,$(word 3,$(subst :,$(space),$(1))))
+endef
+define link-type-2ndarchprefix
+$(patsubst _,,$(word 4,$(subst :,$(space),$(1))))
+endef
+define link-type-class
+$(word 5,$(subst :,$(space),$(1)))
+endef
+define link-type-name
+$(word 6,$(subst :,$(space),$(1)))
+endef
+define link-type-os
+$(strip $(eval _p := $(link-type-prefix))\
+  $(if $(filter HOST HOST_CROSS,$(_p)),\
+    $($(_p)_OS),\
+    $(if $(filter AUX,$(_p)),AUX,android)))
+endef
+define link-type-arch
+$($(link-type-prefix)_$(link-type-2ndarchprefix)ARCH)
+endef
+define link-type-name-variant
+$(link-type-name) ($(link-type-class) $(link-type-os)-$(link-type-arch))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-warning
+$(shell $(call echo-warning,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) should not link against $(call link-type-name,$(2)) ($(3))"))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-error
+$(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) can not link against $(call link-type-name,$(2)) ($(3))"))\
+$(eval link_type_error := true)
+endef
+
+link-type-missing :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+  # Print an error message if the linked-to module is missing
+  # $(1): the prefix of the module doing the linking
+  # $(2): the prefix of the missing module
+  define link-type-missing
+    $(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name-variant,$(1)) missing $(call link-type-name-variant,$(2))"))\
+    $(eval available_variants := $(filter %:$(call link-type-name,$(2)),$(ALL_LINK_TYPES)))\
+    $(if $(available_variants),\
+      $(info Available variants:)\
+      $(foreach v,$(available_variants),$(info $(space)$(space)$(call link-type-name-variant,$(v)))))\
+    $(info You can set ALLOW_MISSING_DEPENDENCIES=true in your environment if this is intentional, but that may defer real problems until later in the build.)\
+    $(eval link_type_error := true)
+  endef
+else
+  define link-type-missing
+    $(eval $$(1).MISSING := true)
+  endef
+endif
+
+# Verify that $(1) can link against $(2)
+# Both $(1) and $(2) are the link type prefix defined above
+define verify-link-type
+$(foreach t,$($(2).TYPE),\
+  $(if $(filter-out $($(1).ALLOWED),$(t)),\
+    $(if $(filter $(t),$($(1).WARN)),\
+      $(call link-type-warning,$(1),$(2),$(t)),\
+      $(call link-type-error,$(1),$(2),$(t)))))
+endef
+
+# TODO: Verify all branches/configs have reasonable warnings/errors, and remove
+# these overrides
+link-type-missing = $(eval $$(1).MISSING := true)
+verify-link-type = $(eval $$(1).MISSING := true)
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+  $(foreach d,$($(lt).DEPS),\
+    $(if $($(d).TYPE),\
+      $(call verify-link-type,$(lt),$(d)),\
+      $(call link-type-missing,$(lt),$(d)))))
+
+ifdef link_type_error
+  $(error exiting from previous errors)
+endif
+
+# The intermediate filename for link type rules
+#
+# APPS are special -- they have up to three different rules:
+#  1. The COMMON rule for Java libraries
+#  2. The jni_link_type rule for embedded native code
+#  3. The 2ND_jni_link_type for the second architecture native code
+define link-type-file
+$(call intermediates-dir-for,$(link-type-class),$(link-type-name),$(filter AUX HOST HOST_CROSS,$(link-type-prefix)),$(link-type-common),$(link-type-2ndarchprefix),$(filter HOST_CROSS,$(link-type-prefix)))/$(if $(filter APPS,$(link-type-class)),$(if $(link-type-common),,$(link-type-2ndarchprefix)jni_))link_type
+endef
+
+# Write out the file-based link_type rules for the ALLOW_MISSING_DEPENDENCIES
+# case. We always need to write the file for mm to work, but only need to
+# check it if we weren't able to check it when reading the Android.mk files.
+define link-type-file-rule
+my_link_type_deps := $(foreach l,$($(1).DEPS),$(call link-type-file,$(l)))
+my_link_type_file := $(call link-type-file,$(1))
+$($(1).BUILT): | $$(my_link_type_file)
+$$(my_link_type_file): PRIVATE_DEPS := $$(my_link_type_deps)
+ifeq ($($(1).MISSING),true)
+$$(my_link_type_file): $(CHECK_LINK_TYPE)
+endif
+$$(my_link_type_file): $$(my_link_type_deps)
+	@echo Check module type: $$@
+	$$(hide) mkdir -p $$(dir $$@) && rm -f $$@
+ifeq ($($(1).MISSING),true)
+	$$(hide) $(CHECK_LINK_TYPE) --makefile $($(1).MAKEFILE) --module $(link-type-name) \
+	  --type "$($(1).TYPE)" $(addprefix --allowed ,$($(1).ALLOWED)) \
+	  $(addprefix --warn ,$($(1).WARN)) $$(PRIVATE_DEPS)
+endif
+	$$(hide) echo "$($(1).TYPE)" >$$@
+endef
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+  $(eval $(call link-type-file-rule,$(lt))))
+
 # -------------------------------------------------------------------
 # Figure out our module sets.
 #
@@ -819,7 +828,7 @@
 ifdef overridden_packages
 #  old_modules_to_install := $(modules_to_install)
   modules_to_install := \
-      $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk %/$(p).odex), \
+      $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk %/$(p).odex %/$(p).vdex), \
           $(modules_to_install))
 endif
 #$(error filtered out
@@ -1017,9 +1026,9 @@
 droid_targets: apps_only
 
 # Combine the NOTICE files for a apps_only build
-$(eval $(call combine-notice-files, \
+$(eval $(call combine-notice-files, html, \
     $(target_notice_file_txt), \
-    $(target_notice_file_html), \
+    $(target_notice_file_html_or_xml), \
     "Notices for files for apps:", \
     $(TARGET_OUT_NOTICE_FILES), \
     $(apps_only_installed_files)))
@@ -1082,7 +1091,7 @@
 # umbrella targets to assit engineers in verifying builds
 .PHONY: java native target host java-host java-target native-host native-target \
         java-host-tests java-target-tests native-host-tests native-target-tests \
-        java-tests native-tests host-tests target-tests tests
+        java-tests native-tests host-tests target-tests tests java-dex
 # some synonyms
 .PHONY: host-java target-java host-native target-native \
         target-java-tests target-native-tests
@@ -1116,16 +1125,6 @@
 .PHONY: findbugs
 findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
 
-.PHONY: clean
-clean:
-	@rm -rf $(OUT_DIR)/*
-	@echo "Entire build directory removed."
-
-.PHONY: clobber
-clobber: clean
-
-# The rules for dataclean and installclean are defined in cleanbuild.mk.
-
 #xxx scrape this from ALL_MODULE_NAME_TAGS
 .PHONY: modules
 modules:
@@ -1133,10 +1132,6 @@
 	@echo "$(call module-names-for-tag-list,$(ALL_MODULE_TAGS))" | \
 	      tr -s ' ' '\n' | sort -u | $(COLUMN)
 
-.PHONY: showcommands
-showcommands:
-	@echo >/dev/null
-
 .PHONY: nothing
 nothing:
 	@echo Successfully read the makefiles.
@@ -1148,8 +1143,4 @@
 ndk: $(SOONG_OUT_DIR)/ndk.timestamp
 .PHONY: ndk
 
-.PHONY: all_link_types
-all_link_types:
-
 endif # KATI
-endif # USE_SOONG_UI
diff --git a/core/math.mk b/core/math.mk
new file mode 100644
index 0000000..047d046
--- /dev/null
+++ b/core/math.mk
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+###########################################################
+# Basic math functions for positive integers <= 100
+#
+# (SDK versions for example)
+###########################################################
+__MATH_NUMBERS :=  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 \
+                  21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 \
+                  41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 \
+                  61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 \
+                  81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
+
+# Returns true if $(1) is a positive integer <= 100, otherwise returns nothing.
+define math_is_number
+$(strip \
+  $(if $(1),,$(error Argument missing)) \
+  $(if $(word 2,$(1)),$(error Multiple words in a single argument: $(1))) \
+  $(if $(filter $(1),$(__MATH_NUMBERS)),true))
+endef
+
+#$(warning true == $(call math_is_number,2))
+#$(warning == $(call math_is_number,foo))
+#$(call math_is_number,1 2)
+#$(call math_is_number,no 2)
+
+define _math_check_valid
+$(if $(call math_is_number,$(1)),,$(error Only positive integers <= 100 are supported (not $(1))))
+endef
+
+#$(call _math_check_valid,0)
+#$(call _math_check_valid,1)
+#$(call _math_check_valid,100)
+#$(call _math_check_valid,101)
+#$(call _math_check_valid,)
+#$(call _math_check_valid,1 2)
+
+# Returns the greater of $1 or $2.
+# If $1 or $2 is not a positive integer <= 100, then an error is generated.
+define math_max
+$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
+  $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))))
+endef
+
+#$(call math_max)
+#$(call math_max,1)
+#$(call math_max,1 2,3)
+#$(warning 1 == $(call math_max,1,1))
+#$(warning 42 == $(call math_max,5,42))
+#$(warning 42 == $(call math_max,42,5))
+
+define math_gt_or_eq
+$(if $(filter $(1),$(call math_max,$(1),$(2))),true)
+endef
+
+#$(warning $(call math_gt_or_eq, 2, 1))
+#$(warning $(call math_gt_or_eq, 1, 1))
+#$(warning $(if $(call math_gt_or_eq, 1, 2),false,true))
+
+# $1 is the variable name to increment
+define inc_and_print
+$(strip $(eval $(1) := $($(1)) .)$(words $($(1))))
+endef
diff --git a/core/ninja.mk b/core/ninja.mk
deleted file mode 100644
index af2ede0..0000000
--- a/core/ninja.mk
+++ /dev/null
@@ -1,103 +0,0 @@
-include $(BUILD_SYSTEM)/soong.mk
-
-# Modifier goals we don't need to pass to Ninja.
-.PHONY : $(NINJA_EXCLUDE_GOALS)
-
-define replace_space_and_slash
-$(subst /,_,$(subst $(space),_,$(sort $1)))
-endef
-
-KATI_NINJA_SUFFIX := -$(TARGET_PRODUCT)
-ifneq ($(KATI_GOALS),)
-KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-$(call replace_space_and_slash,$(KATI_GOALS))
-endif
-ifneq ($(ONE_SHOT_MAKEFILE),)
-KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-mmm-$(call replace_space_and_slash,$(ONE_SHOT_MAKEFILE))
-endif
-
-my_checksum_suffix :=
-my_ninja_suffix_too_long := $(filter 1, $(shell v='$(KATI_NINJA_SUFFIX)' && echo $$(($${$(pound)v} > 64))))
-ifneq ($(my_ninja_suffix_too_long),)
-# Replace the suffix with a checksum if it gets too long.
-my_checksum_suffix := $(KATI_NINJA_SUFFIX)
-KATI_NINJA_SUFFIX := -$(word 1, $(shell echo $(my_checksum_suffix) | $(MD5SUM)))
-endif
-
-KATI_BUILD_NINJA := $(OUT_DIR)/build$(KATI_NINJA_SUFFIX).ninja
-KATI_ENV_SH := $(OUT_DIR)/env$(KATI_NINJA_SUFFIX).sh
-
-# Write out a file mapping checksum to the real suffix.
-ifneq ($(my_checksum_suffix),)
-my_ninja_suffix_file := $(basename $(KATI_BUILD_NINJA)).suf
-$(shell mkdir -p $(dir $(my_ninja_suffix_file)) && \
-    echo $(my_checksum_suffix) > $(my_ninja_suffix_file))
-endif
-
-ifeq (,$(NINJA_STATUS))
-NINJA_STATUS := [%p %f/%t]$(space)
-endif
-
-NINJA_EXTRA_ARGS :=
-
-ifneq (,$(filter showcommands,$(ORIGINAL_MAKECMDGOALS)))
-NINJA_EXTRA_ARGS += "-v"
-endif
-
-# Make multiple rules to generate the same target an error instead of
-# proceeding with undefined behavior.
-NINJA_EXTRA_ARGS += -w dupbuild=err
-
-ifneq ($(filter-out false,$(USE_GOMA)),)
-KATI_MAKEPARALLEL := $(MAKEPARALLEL)
-# Ninja runs remote jobs (i.e., commands which contain gomacc) with
-# this parallelism. Note the parallelism of all other jobs is still
-# limited by the -j flag passed to GNU make.
-NINJA_REMOTE_NUM_JOBS ?= 500
-NINJA_EXTRA_ARGS += -j$(NINJA_REMOTE_NUM_JOBS)
-else
-NINJA_MAKEPARALLEL := $(MAKEPARALLEL) --ninja
-
-# We never want Kati to see MAKEFLAGS, as forcefully overriding variables is
-# terrible. The variables in MAKEFLAGS are still available in the environment,
-# so if part of the build wants input from the user, it should be explicitly
-# checking for an environment variable or using ?=
-#
-# makeparallel already clears MAKEFLAGS, so it's not necessary in the GOMA case
-KATI_MAKEPARALLEL := MAKEFLAGS=
-endif
-
-NINJA_ARGS += $(NINJA_EXTRA_ARGS)
-
-COMBINED_BUILD_NINJA := $(OUT_DIR)/combined$(KATI_NINJA_SUFFIX).ninja
-
-$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) FORCE
-	$(hide) echo "builddir = $(OUT_DIR)" > $(COMBINED_BUILD_NINJA)
-	$(hide) echo "include $(KATI_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
-	$(hide) echo "include $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
-	$(hide) echo "build $(COMBINED_BUILD_NINJA): phony $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
-
-$(sort $(DEFAULT_GOAL) $(ANDROID_GOALS)) : ninja_wrapper
-	@#empty
-
-.PHONY: ninja_wrapper
-ninja_wrapper: $(COMBINED_BUILD_NINJA) $(MAKEPARALLEL)
-	@echo Starting build with ninja
-	+$(hide) export NINJA_STATUS="$(NINJA_STATUS)" && source $(KATI_ENV_SH) && exec $(NINJA_MAKEPARALLEL) $(NINJA) -d keepdepfile $(NINJA_GOALS) -C $(TOP) -f $(COMBINED_BUILD_NINJA) $(NINJA_ARGS)
-
-# Dummy Android.mk and CleanSpec.mk files so that kati won't recurse into the
-# out directory
-DUMMY_OUT_MKS := $(OUT_DIR)/Android.mk $(OUT_DIR)/CleanSpec.mk
-$(DUMMY_OUT_MKS):
-	@mkdir -p $(dir $@)
-	$(hide) echo '# This file prevents findleaves.py from traversing this directory further' >$@
-
-KATI_FIND_EMULATOR := --use_find_emulator
-ifeq ($(KATI_EMULATE_FIND),false)
-  KATI_FIND_EMULATOR :=
-endif
-$(KATI_BUILD_NINJA): $(CKATI) $(MAKEPARALLEL) $(DUMMY_OUT_MKS) run_soong FORCE
-	@echo Running kati to generate build$(KATI_NINJA_SUFFIX).ninja...
-	+$(hide) $(KATI_MAKEPARALLEL) $(CKATI) --ninja --ninja_dir=$(OUT_DIR) --ninja_suffix=$(KATI_NINJA_SUFFIX) --regen --ignore_optional_include=$(OUT_DIR)/%.P --detect_android_echo $(KATI_FIND_EMULATOR) -f build/core/main.mk $(KATI_GOALS) --gen_all_targets BUILDING_WITH_NINJA=true SOONG_ANDROID_MK=$(SOONG_ANDROID_MK) SOONG_MAKEVARS_MK=$(SOONG_MAKEVARS_MK)
-
-.PHONY: FORCE
-FORCE:
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index f456b8b..ca2dcee 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -7,18 +7,14 @@
 KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
 
 # Modifier goals we don't need to pass to Ninja.
-NINJA_EXCLUDE_GOALS := showcommands all dist
-.PHONY : $(NINJA_EXCLUDE_GOALS)
+NINJA_EXCLUDE_GOALS := all dist APP-% PRODUCT-%
 
 # A list of goals which affect parsing of makefiles and we need to pass to Kati.
 PARSE_TIME_MAKE_GOALS := \
 	$(PARSE_TIME_MAKE_GOALS) \
 	$(dont_bother_goals) \
 	all \
-	APP-% \
-	DUMP_% \
 	ECLIPSE-% \
-	PRODUCT-% \
 	AUX-% \
 	boottarball-nodeps \
 	brillo_tests \
@@ -67,5 +63,8 @@
     $(sort $(ORIGINAL_MAKECMDGOALS) $(MAKECMDGOALS)))
 # Goals we need to pass to Ninja.
 NINJA_GOALS := $(filter-out $(NINJA_EXCLUDE_GOALS), $(ANDROID_GOALS))
+ifndef NINJA_GOALS
+  NINJA_GOALS := droid
+endif
 # Goals we need to pass to Kati.
-KATI_GOALS := $(filter $(PARSE_TIME_MAKE_GOALS),  $(ANDROID_GOALS))
+KATI_GOALS := $(filter $(PARSE_TIME_MAKE_GOALS), $(ANDROID_GOALS))
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 447e59b..f850fff 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -14,6 +14,14 @@
   notice_file :=
 endif
 
+# Soong generates stub libraries that don't need NOTICE files
+ifdef LOCAL_NO_NOTICE_FILE
+  ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    $(call pretty-error,LOCAL_NO_NOTICE_FILE should not be used by Android.mk files)
+  endif
+  notice_file :=
+endif
+
 ifeq ($(LOCAL_MODULE_CLASS),NOTICE_FILES)
 # If this is a NOTICE-only module, we don't include base_rule.mk,
 # so my_prefix is not set at this point.
diff --git a/core/package.mk b/core/package.mk
index 4fe058d..f3713fc 100644
--- a/core/package.mk
+++ b/core/package.mk
@@ -4,13 +4,15 @@
 
 $(call record-module-type,PACKAGE)
 
-ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
-LOCAL_MULTILIB := first
-endif
-
 my_prefix := TARGET_
 include $(BUILD_SYSTEM)/multilib.mk
 
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+  ifneq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(my_module_multilib),|64)
+    my_module_multilib := first
+  endif
+endif
+
 ifeq ($(TARGET_SUPPORTS_32_BIT_APPS)|$(TARGET_SUPPORTS_64_BIT_APPS),true|true)
   # packages default to building for either architecture,
   # the preferred if its supported, otherwise the non-preferred.
diff --git a/core/package_internal.mk b/core/package_internal.mk
index ca12437..6255ef1 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -92,6 +92,7 @@
   LOCAL_RESOURCE_DIR := $(LOCAL_PATH)/res
 else
   need_compile_res := true
+  LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
 endif
 
 package_resource_overlays := $(strip \
@@ -100,7 +101,34 @@
     $(wildcard $(foreach dir, $(DEVICE_PACKAGE_OVERLAYS), \
       $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
 
+enforce_rro_enabled :=
+ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
+  ifneq ($(package_resource_overlays),)
+    ifeq ($(PRODUCT_ENFORCE_RRO_TARGETS),*)
+      enforce_rro_enabled := true
+    else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
+      enforce_rro_enabled := true
+    endif
+  endif
+
+  ifdef enforce_rro_enabled
+    ifeq (,$(LOCAL_MODULE_PATH))
+      ifeq (true,$(LOCAL_PROPRIETARY_MODULE))
+        enforce_rro_enabled :=
+      else ifeq (true,$(LOCAL_OEM_MODULE))
+        enforce_rro_enabled :=
+      else ifeq (true,$(LOCAL_ODM_MODULE))
+        enforce_rro_enabled :=
+      endif
+    else ifeq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+      enforce_rro_enabled :=
+    endif
+  endif
+endif
+
+ifndef enforce_rro_enabled
 LOCAL_RESOURCE_DIR := $(package_resource_overlays) $(LOCAL_RESOURCE_DIR)
+endif
 
 all_assets := $(strip \
     $(foreach dir, $(LOCAL_ASSET_DIR), \
@@ -340,8 +368,23 @@
 ifeq ($(need_compile_res),true)
 ifdef LOCAL_USE_AAPT2
 my_compiled_res_base_dir := $(intermediates)/flat-res
+renderscript_target_api :=
+ifneq (,$(LOCAL_RENDERSCRIPT_TARGET_API))
+renderscript_target_api := $(LOCAL_RENDERSCRIPT_TARGET_API)
+else
+ifneq (,$(LOCAL_SDK_VERSION))
+# Set target-api for LOCAL_SDK_VERSIONs other than current.
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+renderscript_target_api := $(LOCAL_SDK_VERSION)
+endif
+endif  # LOCAL_SDK_VERSION is set
+endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
+ifneq (,$(renderscript_target_api))
+ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
 my_generated_res_dirs := $(rs_generated_res_dir)
 my_generated_res_dirs_deps := $(RenderScript_file_stamp)
+endif  # renderscript_target_api < 21
+endif  # renderscript_target_api is set
 # Add AAPT2 link specific flags.
 $(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --no-static-lib-packages
 include $(BUILD_SYSTEM)/aapt2.mk
@@ -457,7 +500,7 @@
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
 # Use the jarjar processed arhive as the initial package file.
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
+$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
 $(LOCAL_BUILT_MODULE): $(built_dex)
 else
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE :=
@@ -593,7 +636,7 @@
 # LOCAL_PACKAGE_SPLITS is a list of resource labels.
 # aapt will convert comma inside resource lable to underscore in the file names.
 my_split_suffixes := $(subst $(comma),_,$(LOCAL_PACKAGE_SPLITS))
-built_apk_splits := $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk)
+built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
 installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
 
 # The splits should have been built in the same command building the base apk.
@@ -603,7 +646,7 @@
 # That way the build system will rerun the aapt after the user changes the splitting parameters.
 $(built_apk_splits): PRIVATE_PRIVATE_KEY := $(private_key)
 $(built_apk_splits): PRIVATE_CERTIFICATE := $(certificate)
-$(built_apk_splits) : $(built_module_path)/%.apk : $(LOCAL_BUILT_MODULE)
+$(built_apk_splits) : $(intermediates)/%.apk : $(LOCAL_BUILT_MODULE)
 	$(hide) if [ ! -f $@ ]; then \
 	  echo 'No $@ generated, check your apk splitting parameters.' 1>&2; \
 	  rm $<; exit 1; \
@@ -611,14 +654,14 @@
 	$(sign-package)
 
 # Rules to install the splits
-$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(intermediates)/package_%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
 # Register the additional built and installed files.
 ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
-  $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+  $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
 
 # Make sure to install the splits when you run "make <module_name>".
 $(my_all_targets): $(installed_apk_splits)
@@ -628,7 +671,7 @@
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
     $(foreach s,$(my_split_suffixes),\
-      $(built_module_path)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+      $(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
 
 $(call create-suite-dependencies)
 
@@ -645,3 +688,27 @@
 
 # Reset internal variables.
 all_res_assets :=
+
+ifdef enforce_rro_enabled
+  ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
+    enforce_rro_use_res_lib := true
+  else
+    enforce_rro_use_res_lib := false
+  endif
+
+  ifdef LOCAL_MANIFEST_PACKAGE_NAME
+    enforce_rro_is_manifest_package_name := true
+    enforce_rro_manifest_package_info := $(LOCAL_MANIFEST_PACKAGE_NAME)
+  else
+    enforce_rro_is_manifest_package_name := false
+    enforce_rro_manifest_package_info := $(full_android_manifest)
+  endif
+
+$(call append_enforce_rro_sources, \
+    $(my_register_name), \
+    $(enforce_rro_is_manifest_package_name), \
+    $(enforce_rro_manifest_package_info), \
+    $(enforce_rro_use_res_lib), \
+    $(package_resource_overlays) \
+    )
+endif  # enforce_rro_enabled
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
index 49b30dc..0c03f37 100644
--- a/core/pdk_fusion_modules.mk
+++ b/core/pdk_fusion_modules.mk
@@ -23,7 +23,7 @@
 LOCAL_BUILT_MODULE_STEM:=$(7)
 LOCAL_MODULE_SUFFIX:=$(suffix $(7))
 LOCAL_PRIVILEGED_MODULE:=$(8)
-LOCAL_PROPRIETARY_MODULE:=$(9)
+LOCAL_VENDOR_MODULE:=$(9)
 LOCAL_MODULE_TARGET_ARCH:=$(10)
 LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=$(11)
 LOCAL_CERTIFICATE:=PRESIGNED
@@ -72,7 +72,7 @@
     $(PDK.DEXPREOPT.$(a).DEX_PREOPT_FLAGS),\
     package.apk,\
     $(PDK.DEXPREOPT.$(a).PRIVILEGED_MODULE),\
-    $(PDK.DEXPREOPT.$(a).PROPRIETARY_MODULE),\
+    $(PDK.DEXPREOPT.$(a).VENDOR_MODULE),\
     $(PDK.DEXPREOPT.$(a).TARGET_ARCH),\
     $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).STRIPPED_SRC),\
     )))
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 997f971..9f4c903 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -79,9 +79,13 @@
 endif
 
 ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-ifeq ($(prebuilt_module_is_a_library),true)
-SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
-endif
+  ifeq ($(prebuilt_module_is_a_library),true)
+    SOONG_ALREADY_CONV := $(SOONG_ALREADY_CONV) $(LOCAL_MODULE)
+  endif
+
+  ifdef LOCAL_USE_VNDK
+    SPLIT_VENDOR.$(LOCAL_MODULE_CLASS).$(patsubst %.vendor,%,$(LOCAL_MODULE)) := 1
+  endif
 endif
 
 # Don't install static libraries by default.
@@ -126,27 +130,38 @@
 
 ifdef prebuilt_module_is_a_library
 export_includes := $(intermediates)/export_includes
-$(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
+export_cflags := $(foreach d,$(LOCAL_EXPORT_C_INCLUDE_DIRS),-I $(d))
+# Soong exports cflags instead of include dirs, so that -isystem can be included.
+ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+export_cflags += $(LOCAL_EXPORT_CFLAGS)
+else ifdef LOCAL_EXPORT_CFLAGS
+$(call pretty-error,LOCAL_EXPORT_CFLAGS can only be used by Soong, use LOCAL_EXPORT_C_INCLUDE_DIRS instead)
+endif
+$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
 $(export_includes): $(LOCAL_EXPORT_C_INCLUDE_DEPS)
 	@echo Export includes file: $< -- $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
-ifdef LOCAL_EXPORT_C_INCLUDE_DIRS
-	$(hide) for d in $(PRIVATE_EXPORT_C_INCLUDE_DIRS); do \
-	        echo "-I $$d" >> $@; \
-	        done
+ifdef export_cflags
+	$(hide) echo "$(PRIVATE_EXPORT_CFLAGS)" >$@
 else
 	$(hide) touch $@
 endif
+export_cflags :=
 
-my_link_type := $(intermediates)/link_type
-$(my_link_type): PRIVATE_LINK_TYPE := native:$(if $(LOCAL_SDK_VERSION),ndk,platform)
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
-	@echo Check module type: $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-	$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
+ifdef LOCAL_SDK_VERSION
+my_link_type := native:ndk
+else ifdef LOCAL_USE_VNDK
+my_link_type := native:vendor
+else
+my_link_type := native:platform
+endif
 
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
 endif  # prebuilt_module_is_a_library
 
 # The real dependency will be added after all Android.mks are loaded and the install paths
@@ -156,6 +171,14 @@
 my_shared_libraries := $(LOCAL_SHARED_LIBRARIES)
 # Extra shared libraries introduced by LOCAL_CXX_STL.
 include $(BUILD_SYSTEM)/cxx_stl_setup.mk
+ifdef LOCAL_USE_VNDK
+  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
+  else
+    my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+      $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+  endif
+endif
 $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)DEPENDENCIES_ON_SHARED_LIBRARIES += \
   $(my_register_name):$(LOCAL_INSTALLED_MODULE):$(subst $(space),$(comma),$(my_shared_libraries))
 
@@ -347,7 +370,7 @@
 ## Install split apks.
 ifdef LOCAL_PACKAGE_SPLITS
 # LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
-built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 
 # Rules to sign the split apks.
@@ -360,19 +383,19 @@
 $(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
 $(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
 $(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk
+$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
 	$(copy-file-to-new-target)
 	$(sign-package)
 
 # Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
 # Register the additional built and installed files.
 ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
-  $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(built_module_path)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+  $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
 
 # Make sure to install the splits when you run "make <module_name>".
 $(my_all_targets): $(installed_apk_splits)
@@ -427,28 +450,42 @@
 
 ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
 my_src_jar := $(my_prebuilt_src_file)
-ifeq ($(LOCAL_IS_HOST_MODULE),)
+
+ifdef LOCAL_IS_HOST_MODULE
+# for host java libraries deps should be in the common dir, so we make a copy in
+# the common dir.
+common_classes_jar := $(intermediates.COMMON)/classes.jar
+
+$(common_classes_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(common_classes_jar): PRIVATE_PREFIX := $(my_prefix)
+
+$(common_classes_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
+else # !LOCAL_IS_HOST_MODULE
 # for target java libraries, the LOCAL_BUILT_MODULE is in a product-specific dir,
 # while the deps should be in the common dir, so we make a copy in the common dir.
 common_classes_jar := $(intermediates.COMMON)/classes.jar
+common_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
 common_javalib_jar := $(intermediates.COMMON)/javalib.jar
 
-$(common_classes_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_PREFIX := $(my_prefix)
 
-my_link_type := $(intermediates.COMMON)/link_type
 ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
+my_link_type := java:system
 else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
+my_link_type := java:sdk
 else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
+my_link_type := java:platform
 endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
-	@echo Check module type: $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-	$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
 
 ifeq ($(prebuilt_module_is_dex_javalib),true)
 # For prebuilt shared Java library we don't have classes.jar.
@@ -463,7 +500,7 @@
 my_src_jar := $(intermediates.COMMON)/aar/classes.jar
 
 $(my_src_jar) : $(my_src_aar)
-	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
+	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
 	$(hide) unzip -qo -d $(dir $@) $<
 	# Make sure the extracted classes.jar has a new timestamp.
 	$(hide) touch $@
@@ -473,6 +510,9 @@
 $(common_classes_jar) : $(my_src_jar)
 	$(transform-prebuilt-to-target)
 
+$(common_classes_pre_proguard_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
 $(common_javalib_jar) : $(common_classes_jar)
 	$(transform-prebuilt-to-target)
 
@@ -480,12 +520,34 @@
 
 ifdef LOCAL_USE_AAPT2
 ifneq ($(my_src_aar),)
+LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
+ifeq ($(LOCAL_SDK_RES_VERSION),)
+  LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
+endif
+
+framework_res_package_export :=
+framework_res_package_export_deps :=
+# Please refer to package.mk
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+framework_res_package_export := \
+    $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
+framework_res_package_export_deps := $(framework_res_package_export)
+else
+framework_res_package_export := \
+    $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
+framework_res_package_export_deps := \
+    $(dir $(framework_res_package_export))src/R.stamp
+endif
+endif
+
 my_res_package := $(intermediates.COMMON)/package-res.apk
 
 # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
 $(my_res_package): PRIVATE_AAPT2_CFLAGS :=
+$(my_res_package): PRIVATE_AAPT_FLAGS := --static-lib --no-static-lib-packages
 $(my_res_package): PRIVATE_ANDROID_MANIFEST := $(intermediates.COMMON)/aar/AndroidManifest.xml
-$(my_res_package): PRIVATE_AAPT_INCLUDES :=
+$(my_res_package): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
 $(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
 $(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
 $(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
@@ -493,11 +555,12 @@
 $(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
 $(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 $(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
+$(my_res_package) : $(framework_res_package_export_deps)
 
 full_android_manifest :=
 my_res_resources :=
 my_overlay_resources :=
-my_compiled_res_base_dir :=
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
 R_file_stamp :=
 proguard_options_file :=
 my_generated_res_dirs := $(intermediates.COMMON)/aar/res
@@ -516,6 +579,7 @@
 
 ifneq ($(prebuilt_module_is_dex_javalib),true)
 
+ifdef LOCAL_JACK_ENABLED
 # We may be building classes.jack from a host jar for host dalvik Java library.
 $(intermediates.COMMON)/classes.jack : PRIVATE_JACK_FLAGS:=$(LOCAL_JACK_FLAGS)
 $(intermediates.COMMON)/classes.jack : PRIVATE_JACK_MIN_SDK_VERSION := $(if $(strip $(LOCAL_MIN_SDK_VERSION)),$(LOCAL_MIN_SDK_VERSION),1)
@@ -530,7 +594,7 @@
 # always rebuilt.
 $(intermediates.COMMON)/classes.dex.toc: $(intermediates.COMMON)/classes.jack
 	touch $@
-
+endif # LOCAL_JACK_ENABLED
 endif # ! prebuilt_module_is_dex_javalib
 endif # JAVA_LIBRARIES
 
diff --git a/core/product.mk b/core/product.mk
index fffd7c8..34cd21c 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -94,6 +94,7 @@
     PRODUCT_EXTRA_RECOVERY_KEYS \
     PRODUCT_PACKAGE_OVERLAYS \
     DEVICE_PACKAGE_OVERLAYS \
+    PRODUCT_ENFORCE_RRO_TARGETS \
     PRODUCT_SDK_ATREE_FILES \
     PRODUCT_SDK_ADDON_NAME \
     PRODUCT_SDK_ADDON_COPY_FILES \
@@ -111,7 +112,9 @@
     PRODUCT_SUPPORTS_VERITY_FEC \
     PRODUCT_OEM_PROPERTIES \
     PRODUCT_SYSTEM_PROPERTY_BLACKLIST \
+    PRODUCT_SYSTEM_SERVER_APPS \
     PRODUCT_SYSTEM_SERVER_JARS \
+    PRODUCT_DEXPREOPT_SPEED_APPS \
     PRODUCT_VBOOT_SIGNING_KEY \
     PRODUCT_VBOOT_SIGNING_SUBKEY \
     PRODUCT_VERITY_SIGNING_KEY \
@@ -129,6 +132,7 @@
     VENDOR_EXCEPTION_PATHS \
     PRODUCT_ART_USE_READ_BARRIER \
     PRODUCT_IOT \
+    PRODUCT_SYSTEM_HEADROOM \
 
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 7a2d9cb..2b2d7b3 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -88,6 +88,7 @@
 # Provide "PRODUCT-<prodname>-<goal>" targets, which lets you build
 # a particular configuration without needing to set up the environment.
 #
+ifndef KATI
 product_goals := $(strip $(filter PRODUCT-%,$(MAKECMDGOALS)))
 ifdef product_goals
   # Scrape the product and build names out of the goal,
@@ -113,54 +114,42 @@
     $(error "tests" has been deprecated as a build variant. Use it as a build goal instead.)
   endif
 
-  # The build server wants to do make PRODUCT-dream-installclean
-  # which really means TARGET_PRODUCT=dream make installclean.
+  # The build server wants to do make PRODUCT-dream-sdk
+  # which really means TARGET_PRODUCT=dream make sdk.
   ifneq ($(filter-out $(INTERNAL_VALID_VARIANTS),$(TARGET_BUILD_VARIANT)),)
-    MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
+    override MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
     TARGET_BUILD_VARIANT := userdebug
     default_goal_substitution :=
   else
-    default_goal_substitution := $(DEFAULT_GOAL)
+    default_goal_substitution := droid
   endif
 
   # Replace the PRODUCT-* goal with the build goal that it refers to.
   # Note that this will ensure that it appears in the same relative
   # position, in case it matters.
-  #
-  # Note that modifying this will not affect the goals that make will
-  # attempt to build, but it's important because we inspect this value
-  # in certain situations (like for "make sdk").
-  #
-  MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
-
-  # Define a rule for the PRODUCT-* goal, and make it depend on the
-  # patched-up command-line goals as well as any other goals that we
-  # want to force.
-  #
-.PHONY: $(goal_name)
-$(goal_name): $(MAKECMDGOALS)
+  override MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
 endif
+endif # !KATI
 # else: Use the value set in the environment or buildspec.mk.
 
 # ---------------------------------------------------------------
 # Provide "APP-<appname>" targets, which lets you build
 # an unbundled app.
 #
+ifndef KATI
 unbundled_goals := $(strip $(filter APP-%,$(MAKECMDGOALS)))
 ifdef unbundled_goals
   ifneq ($(words $(unbundled_goals)),1)
     $(error Only one APP-* goal may be specified; saw "$(unbundled_goals)")
   endif
   TARGET_BUILD_APPS := $(strip $(subst -, ,$(patsubst APP-%,%,$(unbundled_goals))))
-  ifneq ($(filter $(DEFAULT_GOAL),$(MAKECMDGOALS)),)
-    MAKECMDGOALS := $(patsubst $(unbundled_goals),,$(MAKECMDGOALS))
+  ifneq ($(filter droid,$(MAKECMDGOALS)),)
+    override MAKECMDGOALS := $(patsubst $(unbundled_goals),,$(MAKECMDGOALS))
   else
-    MAKECMDGOALS := $(patsubst $(unbundled_goals),$(DEFAULT_GOAL),$(MAKECMDGOALS))
+    override MAKECMDGOALS := $(patsubst $(unbundled_goals),droid,$(MAKECMDGOALS))
   endif
-
-.PHONY: $(unbundled_goals)
-$(unbundled_goals): $(MAKECMDGOALS)
 endif # unbundled_goals
+endif
 
 # Default to building dalvikvm on hosts that support it...
 ifeq ($(HOST_OS),linux)
@@ -268,6 +257,8 @@
 # A list of module names of BOOTCLASSPATH (jar files)
 PRODUCT_BOOT_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BOOT_JARS))
 PRODUCT_SYSTEM_SERVER_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_JARS))
+PRODUCT_SYSTEM_SERVER_APPS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_APPS))
+PRODUCT_DEXPREOPT_SPEED_APPS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEXPREOPT_SPEED_APPS))
 
 # Find the device that this product maps to.
 TARGET_DEVICE := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEVICE)
@@ -424,3 +415,11 @@
 # Whether the product is an Android Things variant.
 PRODUCT_IOT := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_IOT))
+
+# Package list to apply enforcing RRO.
+PRODUCT_ENFORCE_RRO_TARGETS := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_RRO_TARGETS))
+
+# Add reserved headroom to a system image.
+PRODUCT_SYSTEM_HEADROOM := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM))
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 1ea4cb8..687536b 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -48,11 +48,14 @@
 ifeq ($(LOCAL_NO_CRT),true)
 my_target_crtbegin_so_o :=
 my_target_crtend_so_o :=
+else ifdef LOCAL_USE_VNDK
+my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_so.vendor.o
+my_target_crtend_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_so.vendor.o
 else
 my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_so.o
 my_target_crtend_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_so.o
 endif
-ifneq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
+ifneq ($(LOCAL_SDK_VERSION),)
 my_target_crtbegin_so_o := $(wildcard $(my_ndk_sysroot_lib)/crtbegin_so.o)
 my_target_crtend_so_o := $(wildcard $(my_ndk_sysroot_lib)/crtend_so.o)
 endif
diff --git a/core/soong.mk b/core/soong.mk
deleted file mode 100644
index 4a74f2e..0000000
--- a/core/soong.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-# We need to rebootstrap soong if SOONG_OUT_DIR or the reverse path from
-# SOONG_OUT_DIR to TOP changes
-SOONG_NEEDS_REBOOTSTRAP :=
-ifneq ($(wildcard $(SOONG_BOOTSTRAP)),)
-  ifneq ($(SOONG_OUT_DIR),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$BUILDDIR)))
-    SOONG_NEEDS_REBOOTSTRAP := FORCE
-    $(warning soong_out_dir changed)
-  endif
-  ifneq ($(strip $(shell build/soong/scripts/reverse_path.py $(SOONG_OUT_DIR))),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$SRCDIR_FROM_BUILDDIR)))
-    SOONG_NEEDS_REBOOTSTRAP := FORCE
-    $(warning reverse path changed)
-  endif
-endif
-
-# Bootstrap soong.
-$(SOONG_BOOTSTRAP): bootstrap.bash $(SOONG_NEEDS_REBOOTSTRAP)
-	$(hide) mkdir -p $(dir $@)
-	$(hide) BUILDDIR=$(SOONG_OUT_DIR) ./bootstrap.bash
-
-# Tell soong that it is embedded in make
-$(SOONG_IN_MAKE):
-	$(hide) mkdir -p $(dir $@)
-	$(hide) touch $@
-
-# Run Soong, this implicitly create an Android.mk listing all soong outputs as
-# prebuilts.
-.PHONY: run_soong
-run_soong: $(SOONG_BOOTSTRAP) $(SOONG_VARIABLES) $(SOONG_IN_MAKE) FORCE
-	$(hide) SKIP_NINJA=true $(SOONG)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index ff8a51d..e21083d 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -13,6 +13,17 @@
 endif
 endif
 
+# Converts a list to a JSON list.
+# $1: List separator.
+# $2: List.
+_json_list = [$(if $(2),"$(subst $(1),"$(comma)",$(2))")]
+
+# Converts a space-separated list to a JSON list.
+json_list = $(call _json_list,$(space),$(1))
+
+# Converts a comma-separated list to a JSON list.
+csv_to_json_list = $(call _json_list,$(comma),$(1))
+
 # Create soong.variables with copies of makefile settings.  Runs every build,
 # but only updates soong.variables if it changes
 SOONG_VARIABLES_TMP := $(SOONG_VARIABLES).$$$$
@@ -23,13 +34,14 @@
 	echo '    "Make_suffix": "-$(TARGET_PRODUCT)",'; \
 	echo ''; \
 	echo '    "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
+	echo '    "Platform_version_all_codenames": $(call csv_to_json_list,$(PLATFORM_VERSION_ALL_CODENAMES)),'; \
 	echo '    "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
 	echo '    "Brillo": $(if $(BRILLO),true,false),'; \
 	echo '    "Malloc_not_svelte": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
-	echo '    "Allow_missing_dependencies": $(if $(TARGET_BUILD_APPS)$(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),true,false),'; \
-	echo '    "SanitizeHost": [$(if $(SANITIZE_HOST),"$(subst $(space),"$(comma)",$(SANITIZE_HOST))")],'; \
-	echo '    "SanitizeDevice": [$(if $(SANITIZE_TARGET),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET))")],'; \
-	echo '    "SanitizeDeviceArch": [$(if $(SANITIZE_TARGET_ARCH),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET_ARCH))")],'; \
+	echo '    "Allow_missing_dependencies": $(if $(ALLOW_MISSING_DEPENDENCIES),true,false),'; \
+	echo '    "SanitizeHost": $(call json_list,$(SANITIZE_HOST)),'; \
+	echo '    "SanitizeDevice": $(call json_list,$(SANITIZE_TARGET)),'; \
+	echo '    "SanitizeDeviceArch": $(call json_list,$(SANITIZE_TARGET_ARCH)),'; \
 	echo '    "HostStaticBinaries": $(if $(strip $(BUILD_HOST_static)),true,false),'; \
 	echo '    "Binder32bit": $(if $(BINDER32BIT),true,false),'; \
 	echo '    "DevicePrefer32BitExecutables": $(if $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)),true,false),'; \
@@ -42,8 +54,8 @@
 	echo '    "TidyChecks": "$(WITH_TIDY_CHECKS)",'; \
 	echo ''; \
 	echo '    "NativeCoverage": $(if $(filter true,$(NATIVE_COVERAGE)),true,false),'; \
-	echo '    "CoveragePaths": [$(if $(COVERAGE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_PATHS)))")],'; \
-	echo '    "CoverageExcludePaths": [$(if $(COVERAGE_EXCLUDE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_EXCLUDE_PATHS)))")],'; \
+	echo '    "CoveragePaths": $(call csv_to_json_list,$(COVERAGE_PATHS)),'; \
+	echo '    "CoverageExcludePaths": $(call csv_to_json_list,$(COVERAGE_EXCLUDE_PATHS)),'; \
 	echo ''; \
 	echo '    "DeviceName": "$(TARGET_DEVICE)",'; \
 	echo '    "DeviceArch": "$(TARGET_ARCH)",'; \
@@ -65,13 +77,15 @@
 	echo '    "CrossHostArch": "$(HOST_CROSS_ARCH)",'; \
 	echo '    "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)",'; \
 	echo '    "Safestack": $(if $(filter true,$(USE_SAFESTACK)),true,false),'; \
-	echo '    "EnableCFI": $(if $(filter true,$(ENABLE_CFI)),true,false),'; \
+	echo '    "EnableCFI": $(if $(filter false,$(ENABLE_CFI)),false,true),'; \
+	echo '    "Device_uses_hwc2": $(if $(filter true,$(TARGET_USES_HWC2)),true,false),'; \
+	echo '    "Override_rs_driver": "$(OVERRIDE_RS_DRIVER)",'; \
+	echo '    "Treble": $(if $(filter true,$(PRODUCT_FULL_TREBLE)),true,false),'; \
+	echo '    "Pdk": $(if $(filter true,$(TARGET_BUILD_PDK)),true,false),'; \
 	echo ''; \
 	echo '    "ArtUseReadBarrier": $(if $(filter false,$(PRODUCT_ART_USE_READ_BARRIER)),false,true),'; \
 	echo ''; \
-	echo '    "BtConfigIncludeDir": "$(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR)",'; \
-	echo ''; \
-	echo '    "SameProcessHalDeps": [$(if $(BOARD_SAME_PROCESS_HAL_DEPS),"$(subst $(space),"$(comma)",$(BOARD_SAME_PROCESS_HAL_DEPS))")]'; \
+	echo '    "BtConfigIncludeDir": "$(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR)"'; \
 	echo '}') > $(SOONG_VARIABLES_TMP); \
 	if ! cmp -s $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); then \
 	  mv $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); \
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index bf72a82..343b949 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -39,6 +39,7 @@
 # A static Java library needs to explicily set LOCAL_RESOURCE_DIR.
 ifdef LOCAL_RESOURCE_DIR
 need_compile_res := true
+LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
 endif
 ifdef LOCAL_USE_AAPT2
 ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
@@ -149,8 +150,23 @@
 ifdef LOCAL_USE_AAPT2
 # One more level with name res so we can zip up the flat resources that can be linked by apps.
 my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res/res
+renderscript_target_api :=
+ifneq (,$(LOCAL_RENDERSCRIPT_TARGET_API))
+renderscript_target_api := $(LOCAL_RENDERSCRIPT_TARGET_API)
+else
+ifneq (,$(LOCAL_SDK_VERSION))
+# Set target-api for LOCAL_SDK_VERSIONs other than current.
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+renderscript_target_api := $(LOCAL_SDK_VERSION)
+endif
+endif  # LOCAL_SDK_VERSION is set
+endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
+ifneq (,$(renderscript_target_api))
+ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
 my_generated_res_dirs := $(rs_generated_res_dir)
 my_generated_res_dirs_deps := $(RenderScript_file_stamp)
+endif  # renderscript_target_api < 21
+endif  # renderscript_target_api is set
 include $(BUILD_SYSTEM)/aapt2.mk
 $(my_res_package) : $(framework_res_package_export_deps)
 else
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index 8ebf89b..0750217 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -37,6 +37,14 @@
 #   - CUSTOM_IMAGE_SELINUX, set to "true" if the image supports selinux.
 #   - CUSTOM_IMAGE_SUPPORT_VERITY, set to "true" if the product supports verity.
 #   - CUSTOM_IMAGE_VERITY_BLOCK_DEVICE
+#   - CUSTOM_IMAGE_AVB_HASH_ENABLE, set to "true" to add AVB HASH footer.
+#   - CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS, additional args of AVB HASH footer.
+#   - CUSTOM_IMAGE_AVB_HASHTREE_ENABLE, set to "true" to add AVB HASHTREE
+#     footer.
+#   - CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS, additional args of AVB
+#     HASHTREE footer.
+#   - CUSTOM_IMAGE_AVB_KEY_PATH, custom AVB signing key.
+#   - CUSTOM_IMAGE_AVB_ALGORITHM, custom AVB signing algorithm.
 #
 # To build all those images, run "make custom_images".
 
@@ -54,6 +62,12 @@
   CUSTOM_IMAGE_SELINUX \
   CUSTOM_IMAGE_SUPPORT_VERITY \
   CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
+  CUSTOM_IMAGE_AVB_HASH_ENABLE \
+  CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS \
+  CUSTOM_IMAGE_AVB_HASHTREE_ENABLE \
+  CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS \
+  CUSTOM_IMAGE_AVB_KEY_PATH \
+  CUSTOM_IMAGE_AVB_ALGORITHM \
 
 # We don't expect product makefile to inherit/override PRODUCT_CUSTOM_IMAGE_MAKEFILES,
 # so we don't put it in the _product_var_list.
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index a0662ca..b1b936a 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -14,4 +14,16 @@
 
 
 .PHONY: device-tests
-device-tests: $(COMPATIBILITY.device-tests.FILES)
+
+device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
+$(device-tests-zip): $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
+	echo $(sort $(COMPATIBILITY.device-tests.FILES)) > $@.list
+	sed -i -e 's/\s\+/\n/g' $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+
+device-tests: $(device-tests-zip)
+$(call dist-for-goals, device-tests, $(device-tests-zip))
+
+tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index ddaede4..763dd51 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -13,4 +13,14 @@
 # limitations under the License.
 
 .PHONY: general-tests
-device-tests: $(COMPATIBILITY.general-tests.FILES)
+
+general-tests-zip := $(PRODUCT_OUT)/general-tests.zip
+$(general-tests-zip): $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
+	echo $(sort $(COMPATIBILITY.general-tests.FILES)) > $@.list
+	sed -i -e 's/\s\+/\n/g' $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+
+general-tests: $(general-tests-zip)
+$(call dist-for-goals, general-tests, $(general-tests-zip))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f0db476..702d8b3 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -62,6 +62,14 @@
   $(eval my_image_copy_files += $(src))\
   $(eval my_copy_pairs += $(src):$(my_staging_dir)/$(word 2,$(pair))))
 
+ifndef CUSTOM_IMAGE_AVB_KEY_PATH
+# If key path isn't specified, use the default signing args.
+my_avb_signing_args := $(INTERNAL_AVB_SIGNING_ARGS)
+else
+my_avb_signing_args := \
+  --algorithm $(CUSTOM_IMAGE_AVB_ALGORITHM) --key $(CUSTOM_IMAGE_AVB_KEY_PATH)
+endif
+
 $(my_built_custom_image): PRIVATE_INTERMEDIATES := $(intermediates)
 $(my_built_custom_image): PRIVATE_MOUNT_POINT := $(CUSTOM_IMAGE_MOUNT_POINT)
 $(my_built_custom_image): PRIVATE_PARTITION_SIZE := $(CUSTOM_IMAGE_PARTITION_SIZE)
@@ -74,6 +82,17 @@
 $(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)
 $(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
 $(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
+$(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
+$(my_built_custom_image): PRIVATE_AVB_SIGNING_ARGS := $(my_avb_signing_args)
+$(my_built_custom_image): PRIVATE_AVB_HASH_ENABLE := $(CUSTOM_IMAGE_AVB_HASH_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASH_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS)
+$(my_built_custom_image): PRIVATE_AVB_HASHTREE_ENABLE := $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS)
+ifeq (true,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
+  $(my_built_custom_image): $(AVBTOOL)
+else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
+  $(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
+endif
 $(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) \
   $(CUSTOM_IMAGE_DICT_FILE)
 	@echo "Build image $@"
@@ -88,6 +107,7 @@
 	# Generate the dict.
 	$(hide) echo "# For all accepted properties, see BuildImage() in tools/releasetools/build_image.py" > $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(hide) echo "mount_point=$(PRIVATE_MOUNT_POINT)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+	$(hide) echo "partition_name=$(PRIVATE_MOUNT_POINT)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(hide) echo "fs_type=$(PRIVATE_FILE_SYSTEM_TYPE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(hide) echo "partition_size=$(PRIVATE_PARTITION_SIZE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
@@ -97,6 +117,14 @@
 	    echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+	$(hide) echo "avb_avbtool=$(PRIVATE_AVB_AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+	$(hide) echo "avb_signing_args=$(PRIVATE_AVB_SIGNING_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+	$(if $(PRIVATE_AVB_HASH_ENABLE),\
+	  $(hide) echo "avb_hash_enable=$(PRIVATE_AVB_HASH_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+	    echo "avb_add_hash_footer_args=$(PRIVATE_AVB_ADD_HASH_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+	$(if $(PRIVATE_AVB_HASHTREE_ENABLE),\
+	  $(hide) echo "avb_hashtree_enable=$(PRIVATE_AVB_HASHTREE_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+	    echo "avb_add_hashtree_footer_args=$(PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	$(if $(PRIVATE_DICT_FILE),\
 	  $(hide) echo "# Properties from $(PRIVATE_DICT_FILE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
 	    cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
@@ -116,3 +144,9 @@
 
 # Archive the built image.
 $(call dist-for-goals, $(my_custom_image_name) custom_images,$(my_installed_custom_image))
+
+my_staging_dir :=
+my_built_modules :=
+my_copy_dest :=
+my_copy_pairs :=
+my_pickup_files :=
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 4dde9fd..629a9b2 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -4,6 +4,7 @@
 # Input variables:
 #   my_modules: a list of module names
 #   my_package_name: the name of the output zip file.
+#   my_copy_pairs: a list of extra files to install (in src:dest format)
 # Output variables:
 #   my_package_zip: the path to the output zip file.
 #
@@ -11,8 +12,8 @@
 
 my_makefile := $(lastword $(filter-out $(lastword $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
 my_staging_dir := $(call intermediates-dir-for,PACKAGING,$(my_package_name))
-my_built_modules :=
-my_copy_pairs :=
+my_built_modules := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)))
+my_copy_pairs := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)):$(my_staging_dir)/$(call word-colon,2,$(p)))
 my_pickup_files :=
 
 # Iterate over the modules and include their direct dependencies stated in the
@@ -25,6 +26,12 @@
   $(eval my_modules_and_deps += $(_explicitly_required))\
 )
 
+# Ignore unknown installed files on partial builds
+my_missing_files :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+endif
+
 # Iterate over modules' built files and installed files;
 # Calculate the dest files in the output zip file.
 
@@ -34,7 +41,7 @@
   $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
     $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
   $(if $(_pickup_files)$(_built_files),,\
-    $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(m)')))\
+    $(call my_missing_files,$(m)))\
   $(eval my_pickup_files += $(_pickup_files))\
   $(foreach i, $(_built_files),\
     $(eval bui_ins := $(subst :,$(space),$(i)))\
@@ -61,3 +68,12 @@
 	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
 	  cp -RfL $(f) $(dir $@) && ) true
 	$(hide) cd $(dir $@) && zip -rqX $(notdir $@) *
+
+my_makefile :=
+my_staging_dir :=
+my_built_modules :=
+my_copy_dest :=
+my_copy_pairs :=
+my_pickup_files :=
+my_missing_files :=
+my_modules_and_deps :=
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 3332fc7..5a89fc9 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -38,55 +38,52 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-# Returns all words in $1 up to and including $2
-define find_and_earlier
-  $(strip $(if $(1),
-    $(firstword $(1))
-    $(if $(filter $(firstword $(1)),$(2)),,
-      $(call find_and_earlier,$(wordlist 2,$(words $(1)),$(1)),$(2)))))
-endef
+DEFAULT_PLATFORM_VERSION := OPR1
+MIN_PLATFORM_VERSION := OPR1
+MAX_PLATFORM_VERSION := PPR1
 
-#$(warning $(call find_and_earlier,A B C,A))
-#$(warning $(call find_and_earlier,A B C,B))
-#$(warning $(call find_and_earlier,A B C,C))
-#$(warning $(call find_and_earlier,A B C,D))
+ALLOWED_VERSIONS := $(call allowed-platform-versions,\
+  $(MIN_PLATFORM_VERSION),\
+  $(MAX_PLATFORM_VERSION),\
+  $(DEFAULT_PLATFORM_VERSION))
 
-define version-list
-$(1) $(1)DR1 $(1)DR2 $(1)MR1 $(1)MR2
-endef
-
-ALL_VERSIONS := O P
-ALL_VERSIONS := $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
-
-ifeq (,$(TARGET_PLATFORM_VERSION))
-  # Default targeted platform version
-  # TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
-  # on this
-  TARGET_PLATFORM_VERSION := O
+ifndef TARGET_PLATFORM_VERSION
+  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+else ifeq ($(TARGET_PLATFORM_VERSION),OPR1)
+  # HACK: lunch currently sets TARGET_PLATFORM_VERSION to
+  # DEFAULT_PLATFORM_VERSION, which causes unnecessary pain
+  # when the old DEFAULT_PLATFORM_VERSION becomes invalid.
+  # For now, silently upgrade OPR1 to the current default.
+  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
 endif
 
-ifeq (,$(filter $(ALL_VERSIONS), $(TARGET_PLATFORM_VERSION)))
-$(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
-$(warning $(ALL_VERSIONS))
-$(error Stopping...)
+ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
+  $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
+  $(error $(ALLOWED_VERSIONS))
 endif
 
-ENABLED_VERSIONS := $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
-
-$(foreach v,$(ENABLED_VERSIONS), \
-  $(eval IS_AT_LEAST_$(v) := true))
-
 # Default versions for each TARGET_PLATFORM_VERSION
+# TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
+# on this
 
 # This is the canonical definition of the platform version,
 # which is the version that we reveal to the end user.
 # Update this value when the platform version changes (rather
 # than overriding it somewhere else).  Can be an arbitrary string.
-PLATFORM_VERSION.O := O
 
-# This is the current development code-name, if the build is not a final
-# release build.  If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.O := O
+# When you add a new PLATFORM_VERSION which will result in a new
+# PLATFORM_SDK_VERSION please ensure you add a corresponding isAtLeast*
+# method in the following java file:
+# frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java
+
+# When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
+# please add that PLATFORM_VERSION to the following text file:
+# cts/tests/tests/os/assets/platform_versions.txt
+PLATFORM_VERSION.OPR1 := O
+
+# These are the current development codenames.
+PLATFORM_VERSION_CODENAME.OPR1 := O
+PLATFORM_VERSION_CODENAME.PPR1 := P
 
 ifndef PLATFORM_VERSION
   PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
@@ -105,6 +102,14 @@
   # intermediate builds).  During development, this number remains at the
   # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
   # the code-name of the new development work.
+
+  # When you change PLATFORM_SDK_VERSION please ensure you also update the
+  # corresponding methods for isAtLeast* in the following java file:
+  # frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java
+
+  # When you increment the PLATFORM_SDK_VERSION please ensure you also
+  # clear out the following text file of all older PLATFORM_VERSION's:
+  # cts/tests/tests/os/assets/platform_versions.txt
   PLATFORM_SDK_VERSION := 25
 endif
 
@@ -127,7 +132,22 @@
   # This is all of the development codenames that are active.  Should be either
   # the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
   # codenames after PLATFORM_VERSION_CODENAME.
-  PLATFORM_VERSION_ALL_CODENAMES := $(PLATFORM_VERSION_CODENAME)
+  PLATFORM_VERSION_ALL_CODENAMES :=
+
+  # Build a list of all possible code names. Avoid duplicates, and stop when we
+  # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+  # that is not included in our build.
+  _versions_in_target := \
+    $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+  $(foreach version,$(_versions_in_target),\
+    $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+    $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+      $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+  # And convert from space separated to comma separated.
+  PLATFORM_VERSION_ALL_CODENAMES := \
+    $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
 endif
 
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
@@ -166,7 +186,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2016-11-05
+      PLATFORM_SECURITY_PATCH := 2017-04-05
 endif
 
 ifndef PLATFORM_BASE_OS
diff --git a/envsetup.sh b/envsetup.sh
index 9680780..b71a825 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -28,9 +28,9 @@
 
 Look at the source to view more functions. The complete list is:
 EOF
-    T=$(gettop)
-    local A
-    A=""
+    local T=$(gettop)
+    local A=""
+    local i
     for i in `cat $T/build/envsetup.sh | sed -n "/^[[:blank:]]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do
       A="$A $i"
     done
@@ -40,7 +40,7 @@
 # Get all the build variables needed by this script in a single call to the build system.
 function build_build_var_cache()
 {
-    T=$(gettop)
+    local T=$(gettop)
     # Grep out the variable names from the script.
     cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
@@ -74,6 +74,7 @@
 function destroy_build_var_cache()
 {
     unset BUILD_VAR_CACHE_READY
+    local v
     for v in $cached_vars; do
       unset var_cache_$v
     done
@@ -93,7 +94,7 @@
     return
     fi
 
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -111,7 +112,7 @@
     return
     fi
 
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -123,7 +124,7 @@
 # check to see if the supplied product is one we can build
 function check_product()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -141,6 +142,7 @@
 # check to see if the supplied variant is valid
 function check_variant()
 {
+    local v
     for v in ${VARIANT_CHOICES[@]}
     do
         if [ "$v" = "$1" ]
@@ -153,7 +155,7 @@
 
 function setpaths()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return
@@ -184,18 +186,19 @@
     fi
 
     # and in with the new
-    prebuiltdir=$(getprebuilt)
-    gccprebuiltdir=$(get_abs_build_var ANDROID_GCC_PREBUILTS)
+    local prebuiltdir=$(getprebuilt)
+    local gccprebuiltdir=$(get_abs_build_var ANDROID_GCC_PREBUILTS)
 
     # defined in core/config.mk
-    targetgccversion=$(get_build_var TARGET_GCC_VERSION)
-    targetgccversion2=$(get_build_var 2ND_TARGET_GCC_VERSION)
+    local targetgccversion=$(get_build_var TARGET_GCC_VERSION)
+    local targetgccversion2=$(get_build_var 2ND_TARGET_GCC_VERSION)
     export TARGET_GCC_VERSION=$targetgccversion
 
     # The gcc toolchain does not exists for windows/cygwin. In this case, do not reference it.
     export ANDROID_TOOLCHAIN=
     export ANDROID_TOOLCHAIN_2ND_ARCH=
     local ARCH=$(get_build_var TARGET_ARCH)
+    local toolchaindir toolchaindir2=
     case $ARCH in
         x86) toolchaindir=x86/x86_64-linux-android-$targetgccversion/bin
             ;;
@@ -217,7 +220,7 @@
         export ANDROID_TOOLCHAIN=$gccprebuiltdir/$toolchaindir
     fi
 
-    if [ -d "$gccprebuiltdir/$toolchaindir2" ]; then
+    if [ "$toolchaindir2" -a -d "$gccprebuiltdir/$toolchaindir2" ]; then
         export ANDROID_TOOLCHAIN_2ND_ARCH=$gccprebuiltdir/$toolchaindir2
     fi
 
@@ -273,7 +276,7 @@
 
 function printconfig()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -296,7 +299,7 @@
 
 function set_sequence_number()
 {
-    export BUILD_ENV_SEQUENCE_NUMBER=12
+    export BUILD_ENV_SEQUENCE_NUMBER=13
 }
 
 function settitle()
@@ -399,6 +402,7 @@
 #
 function chooseproduct()
 {
+    local default_value
     if [ "x$TARGET_PRODUCT" != x ] ; then
         default_value=$TARGET_PRODUCT
     else
@@ -569,50 +573,46 @@
         then
             selection=${LUNCH_MENU_CHOICES[$(($answer-1))]}
         fi
-    elif (echo -n $answer | grep -q -e "^[^\-][^\-]*-[^\-][^\-]*$")
-    then
+    else
         selection=$answer
     fi
 
-    if [ -z "$selection" ]
-    then
-        echo
-        echo "Invalid lunch combo: $answer"
-        return 1
-    fi
-
     export TARGET_BUILD_APPS=
 
-    local variant=$(echo -n $selection | sed -e "s/^[^\-]*-//")
-    check_variant $variant
-    if [ $? -ne 0 ]
-    then
-        echo
-        echo "** Invalid variant: '$variant'"
-        echo "** Must be one of ${VARIANT_CHOICES[@]}"
-        variant=
+    local product variant_and_version variant version
+
+    product=${selection%%-*} # Trim everything after first dash
+    variant_and_version=${selection#*-} # Trim everything up to first dash
+    if [ "$variant_and_version" != "$selection" ]; then
+        variant=${variant_and_version%%-*}
+        if [ "$variant" != "$variant_and_version" ]; then
+            version=${variant_and_version#*-}
+        fi
     fi
 
-    local product=$(echo -n $selection | sed -e "s/-.*$//")
-    TARGET_PRODUCT=$product \
-    TARGET_BUILD_VARIANT=$variant \
-    build_build_var_cache
-    if [ $? -ne 0 ]
+    if [ -z "$product" ]
     then
         echo
-        echo "** Don't have a product spec for: '$product'"
-        echo "** Do you have the right repo manifest?"
-        product=
-    fi
-
-    if [ -z "$product" -o -z "$variant" ]
-    then
-        echo
+        echo "Invalid lunch combo: $selection"
         return 1
     fi
 
-    export TARGET_PRODUCT=$product
-    export TARGET_BUILD_VARIANT=$variant
+    TARGET_PRODUCT=$product \
+    TARGET_BUILD_VARIANT=$variant \
+    TARGET_PLATFORM_VERSION=$version \
+    build_build_var_cache
+    if [ $? -ne 0 ]
+    then
+        return 1
+    fi
+
+    export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
+    export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
+    if [ -n "$version" ]; then
+      export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+    else
+      unset TARGET_PLATFORM_VERSION
+    fi
     export TARGET_BUILD_TYPE=release
 
     echo
@@ -702,7 +702,7 @@
             PWD= /bin/pwd
         else
             local HERE=$PWD
-            T=
+            local T=
             while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
                 \cd ..
                 T=`PWD= /bin/pwd -P`
@@ -750,9 +750,9 @@
 
 function findmakefile()
 {
-    TOPFILE=build/core/envsetup.mk
+    local TOPFILE=build/core/envsetup.mk
     local HERE=$PWD
-    T=
+    local T=
     while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
         T=`PWD= /bin/pwd`
         if [ -f "$T/Android.mk" -o -f "$T/Android.bp" ]; then
@@ -788,6 +788,7 @@
             echo "Couldn't locate a makefile from the current directory."
             return 1
         else
+            local ARG
             for ARG in $@; do
                 case $ARG in
                   GET-INSTALL-PATH) GET_INSTALL_PATH=$ARG;;
@@ -945,7 +946,7 @@
 
 function croot()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ "$T" ]; then
         if [ "$1" ]; then
             \cd $(gettop)/$1
@@ -959,9 +960,9 @@
 
 function cproj()
 {
-    TOPFILE=build/core/envsetup.mk
+    local TOPFILE=build/core/envsetup.mk
     local HERE=$PWD
-    T=
+    local T=
     while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
         T=$PWD
         if [ -f "$T/Android.mk" ]; then
@@ -1212,6 +1213,7 @@
 
 function resgrep()
 {
+    local dir
     for dir in `find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -name res -type d`; do
         find $dir -type f -name '*\.xml' -exec grep --color -n "$@" {} +
     done
@@ -1239,7 +1241,7 @@
     Darwin)
         function mgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' \
+            find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regex '(.*/)?soong/[^/]*.go' \) -type f \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -1253,7 +1255,7 @@
     *)
         function mgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -type f \
+            find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regextype posix-extended -regex '(.*/)?soong/[^/]*.go' \) -type f \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -1273,7 +1275,7 @@
 
 function tracedmdump()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return
@@ -1450,7 +1452,7 @@
         echo "Couldn't locate output files.  Try running 'lunch' first." >&2
         return
     fi
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -1467,7 +1469,7 @@
 # simple shortcut to the runtest command
 function runtest()
 {
-    T=$(gettop)
+    local T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
@@ -1480,7 +1482,8 @@
         echo "Usage: godir <regex>"
         return
     fi
-    T=$(gettop)
+    local T=$(gettop)
+    local FILELIST
     if [ ! "$OUT_DIR" = "" ]; then
         mkdir -p $OUT_DIR
         FILELIST=$OUT_DIR/filelist
diff --git a/target/board/generic/sepolicy/file_contexts b/target/board/generic/sepolicy/file_contexts
index e8d32f7..e9502d9 100644
--- a/target/board/generic/sepolicy/file_contexts
+++ b/target/board/generic/sepolicy/file_contexts
@@ -9,6 +9,7 @@
 /dev/block/vdc               u:object_r:userdata_block_device:s0
 
 /dev/goldfish_pipe           u:object_r:qemu_device:s0
+/dev/goldfish_sync           u:object_r:qemu_device:s0
 /dev/qemu_.*                 u:object_r:qemu_device:s0
 /dev/socket/qemud            u:object_r:qemud_socket:s0
 /dev/ttyGF[0-9]*             u:object_r:serial_device:s0
diff --git a/target/board/generic/sepolicy/logpersist.te b/target/board/generic/sepolicy/logpersist.te
index 0c52986..3fc0250 100644
--- a/target/board/generic/sepolicy/logpersist.te
+++ b/target/board/generic/sepolicy/logpersist.te
@@ -10,3 +10,4 @@
 
 # Write to /dev/ttyS2 and /dev/ttyGF2.
 allow logpersist serial_device:chr_file { write open };
+get_prop(logpersist, qemu_cmdline)
diff --git a/target/board/generic/sepolicy/property.te b/target/board/generic/sepolicy/property.te
index 22d580a..a486702 100644
--- a/target/board/generic/sepolicy/property.te
+++ b/target/board/generic/sepolicy/property.te
@@ -1,3 +1,4 @@
 type qemu_prop, property_type;
+type qemu_cmdline, property_type;
 type radio_noril_prop, property_type;
 type opengles_prop, property_type;
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index 142b062..c66a85f 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -1,4 +1,5 @@
 qemu.                   u:object_r:qemu_prop:s0
+qemu.cmdline            u:object_r:qemu_cmdline:s0
 ro.emu.                 u:object_r:qemu_prop:s0
 ro.emulator.            u:object_r:qemu_prop:s0
 ro.radio.noril          u:object_r:radio_noril_prop:s0
diff --git a/target/board/generic/sepolicy/qemu_props.te b/target/board/generic/sepolicy/qemu_props.te
index 6768ce7..95174d6 100644
--- a/target/board/generic/sepolicy/qemu_props.te
+++ b/target/board/generic/sepolicy/qemu_props.te
@@ -9,3 +9,4 @@
 set_prop(qemu_props, dalvik_prop)
 set_prop(qemu_props, config_prop)
 set_prop(qemu_props, opengles_prop)
+set_prop(qemu_props, qemu_cmdline)
diff --git a/target/product/base.mk b/target/product/base.mk
index 2eb20aa..89027bf 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -23,6 +23,7 @@
     appops \
     am \
     android.policy \
+    android.test.mock \
     android.test.runner \
     app_process \
     applypatch \
@@ -131,7 +132,8 @@
 
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
-    perfprofd
+    perfprofd \
+    sqlite3
 
 PRODUCT_COPY_FILES := $(call add-to-product-copy-files-if-exists,\
     frameworks/base/preloaded-classes:system/etc/preloaded-classes)
diff --git a/target/product/core.mk b/target/product/core.mk
index c4c7cab..5a0195e 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -61,4 +61,14 @@
     VpnDialogs \
     MmsService
 
+# The set of packages whose code can be loaded by the system server.
+PRODUCT_SYSTEM_SERVER_APPS += \
+    FusedLocation \
+    InputDevices \
+    KeyChain \
+    Telecom \
+
+# The set of packages we want to force 'speed' compilation on.
+PRODUCT_DEXPREOPT_SPEED_APPS += \
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_base.mk)
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index fe1a382..5140237 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -88,20 +88,19 @@
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
 
+ifeq ($(TARGET_CORE_JARS),)
+$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
+endif
+
 # The order of PRODUCT_BOOT_JARS matters.
 PRODUCT_BOOT_JARS := \
-    core-oj \
-    core-libart \
-    conscrypt \
-    okhttp \
+    $(TARGET_CORE_JARS) \
     legacy-test \
-    bouncycastle \
     ext \
     framework \
     telephony-common \
     voip-common \
     ims-common \
-    apache-xml \
     org.apache.http.legacy.boot
 
 # The order of PRODUCT_SYSTEM_SERVER_JARS matters.
@@ -110,6 +109,11 @@
     ethernet-service \
     wifi-service
 
+# The set of packages whose code can be loaded by the system server.
+PRODUCT_SYSTEM_SERVER_APPS += \
+    SettingsProvider \
+    WallpaperBackup
+
 # Adoptable external storage supports both ext4 and f2fs
 PRODUCT_PACKAGES += \
     e2fsck \
@@ -125,26 +129,8 @@
 PRODUCT_COPY_FILES += \
     system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
 
-# Different dexopt types for different package update/install times.
-# On eng builds, make "boot" reasons do pure JIT for faster turnaround.
-ifeq (eng,$(TARGET_BUILD_VARIANT))
-    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-        pm.dexopt.first-boot=verify-at-runtime \
-        pm.dexopt.boot=verify-at-runtime
-else
-    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-        pm.dexopt.first-boot=interpret-only \
-        pm.dexopt.boot=verify-profile
-endif
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-    pm.dexopt.install=interpret-only \
-    pm.dexopt.bg-dexopt=speed-profile \
-    pm.dexopt.ab-ota=speed-profile \
-    pm.dexopt.nsys-library=speed \
-    pm.dexopt.shared-apk=speed \
-    pm.dexopt.forced-dexopt=speed \
-    pm.dexopt.core-app=speed
-
+PRODUCT_COPY_FILES += \
+    system/core/rootdir/etc/ld.config.txt:system/etc/ld.config.txt
 
 # Enable boot.oat filtering of compiled classes to reduce boot.oat size. b/28026683
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index ef8794d..9a7fe8e 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -83,20 +83,19 @@
     logd \
     wifi-service
 
+ifeq ($(TARGET_CORE_JARS),)
+$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
+endif
+
 # The order matters
 PRODUCT_BOOT_JARS := \
-    core-oj \
-    core-libart \
-    conscrypt \
-    okhttp \
+    $(TARGET_CORE_JARS) \
     legacy-test \
-    bouncycastle \
     ext \
     framework \
     telephony-common \
     voip-common \
     ims-common \
-    apache-xml \
     nullwebview \
     org.apache.http.legacy.boot
 
@@ -105,6 +104,16 @@
     services \
     wifi-service
 
+# The set of packages whose code can be loaded by the system server.
+PRODUCT_SYSTEM_SERVER_APPS += \
+    FusedLocation \
+    InputDevices \
+    SettingsProvider \
+    WallpaperBackup \
+
+# The set of packages we want to force 'speed' compilation on.
+PRODUCT_DEXPREOPT_SPEED_APPS := \
+
 PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
     ro.zygote=zygote32
 PRODUCT_COPY_FILES += \
@@ -113,26 +122,6 @@
 PRODUCT_PROPERTY_OVERRIDES += \
     ro.carrier=unknown
 
-# Different dexopt types for different package update/install times.
-# On eng builds, make "boot" reasons do pure JIT for faster turnaround.
-ifeq (eng,$(TARGET_BUILD_VARIANT))
-    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-        pm.dexopt.first-boot=verify-at-runtime \
-        pm.dexopt.boot=verify-at-runtime
-else
-    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-        pm.dexopt.first-boot=interpret-only \
-        pm.dexopt.boot=verify-profile
-endif
-PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
-    pm.dexopt.install=interpret-only \
-    pm.dexopt.bg-dexopt=speed-profile \
-    pm.dexopt.ab-ota=speed-profile \
-    pm.dexopt.nsys-library=speed \
-    pm.dexopt.shared-apk=speed \
-    pm.dexopt.forced-dexopt=speed \
-    pm.dexopt.core-app=speed
-
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
 $(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index c77bce2..e04731c 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -20,7 +20,6 @@
 PRODUCT_PACKAGES += \
     adb \
     adbd \
-    android.hardware.configstore@1.0-impl \
     android.hardware.configstore@1.0-service \
     android.hidl.allocator@1.0-service \
     android.hidl.memory@1.0-impl \
@@ -34,8 +33,6 @@
     dumpsys \
     fastboot \
     gralloc.default \
-    grep \
-    gzip \
     healthd \
     hwservicemanager \
     init \
@@ -72,16 +69,12 @@
     logcat \
     logwrapper \
     lshal \
-    mkshrc \
-    reboot \
     recovery \
     service \
     servicemanager \
-    sh \
+    shell_and_utilities \
     surfaceflinger \
     tombstoned \
-    toolbox \
-    toybox \
     tzdatacheck \
 
 # SELinux packages
@@ -93,7 +86,8 @@
     plat_mac_permissions.xml \
     plat_property_contexts \
     plat_seapp_contexts \
-    plat_service_contexts
+    plat_service_contexts \
+    selinux_policy
 
 # AID Generation for
 # <pwd.h> and <grp.h>
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index b08a28a..afa8389 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -61,6 +61,7 @@
     device/generic/goldfish/init.ranchu.rc:root/init.ranchu.rc \
     device/generic/goldfish/fstab.ranchu:root/fstab.ranchu \
     device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc \
+    device/generic/goldfish/input/goldfish_rotary.idc:system/usr/idc/goldfish_rotary.idc \
     frameworks/native/data/etc/android.hardware.usb.accessory.xml:system/etc/permissions/android.hardware.usb.accessory.xml
 
 PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
diff --git a/target/product/product_launched_with_n_mr1.mk b/target/product/product_launched_with_n_mr1.mk
new file mode 100644
index 0000000..65d4d3f
--- /dev/null
+++ b/target/product/product_launched_with_n_mr1.mk
@@ -0,0 +1,2 @@
+#PRODUCT_SHIPPING_API_LEVEL indicates the first api level, device has been commercially launced on.
+PRODUCT_SHIPPING_API_LEVEL := 25
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index fb52d67..b120298 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -16,15 +16,14 @@
 
 # Provides a functioning ART environment without Android frameworks
 
+ifeq ($(TARGET_CORE_JARS),)
+$(error TARGET_CORE_JARS is empty; cannot update PRODUCT_PACKAGES variable)
+endif
+
 # Minimal boot classpath. This should be a subset of PRODUCT_BOOT_JARS, and equivalent to
 # TARGET_CORE_JARS.
 PRODUCT_PACKAGES += \
-    apache-xml \
-    bouncycastle \
-    core-oj \
-    core-libart \
-    conscrypt \
-    okhttp \
+    $(TARGET_CORE_JARS)
 
 # Additional mixins to the boot classpath.
 PRODUCT_PACKAGES += \
@@ -63,6 +62,7 @@
 # ART/dex helpers.
 PRODUCT_PACKAGES += \
     ahat \
+    dexdiag \
     dexdump \
     dexlist \
     hprof-conv \
@@ -82,3 +82,24 @@
     dalvik.vm.usejitprofiles=true \
     dalvik.vm.dexopt.secondary=true \
     dalvik.vm.appimageformat=lz4
+
+# Different dexopt types for different package update/install times.
+# On eng builds, make "boot" reasons only extract for faster turnaround.
+ifeq (eng,$(TARGET_BUILD_VARIANT))
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=extract \
+        pm.dexopt.boot=extract
+else
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=quicken \
+        pm.dexopt.boot=verify
+endif
+
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+    pm.dexopt.install=quicken \
+    pm.dexopt.bg-dexopt=speed-profile \
+    pm.dexopt.ab-ota=speed-profile \
+    pm.dexopt.nsys-library=speed \
+    pm.dexopt.shared-apk=speed \
+    pm.dexopt.forced-dexopt=speed \
+    pm.dexopt.core-app=speed
diff --git a/tests/envsetup_tests.sh b/tests/envsetup_tests.sh
new file mode 100755
index 0000000..abdcd56
--- /dev/null
+++ b/tests/envsetup_tests.sh
@@ -0,0 +1,37 @@
+#!/bin/bash -e
+
+source $(dirname $0)/../envsetup.sh
+
+unset TARGET_PRODUCT TARGET_BUILD_VARIANT TARGET_PLATFORM_VERSION
+
+function check_lunch
+(
+    echo lunch $1
+    set +e
+    lunch $1 > /dev/null 2> /dev/null
+    set -e
+    [ "$TARGET_PRODUCT" = "$2" ] || ( echo "lunch $1: expected TARGET_PRODUCT='$2', got '$TARGET_PRODUCT'" && exit 1 )
+    [ "$TARGET_BUILD_VARIANT" = "$3" ] || ( echo "lunch $1: expected TARGET_BUILD_VARIANT='$3', got '$TARGET_BUILD_VARIANT'" && exit 1 )
+    [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
+)
+
+default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
+valid_version=PPR1
+
+# lunch tests
+check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
+check_lunch "aosp_arm64-userdebug"                      "aosp_arm64" "userdebug" ""
+check_lunch "aosp_arm64-userdebug-$default_version"     "aosp_arm64" "userdebug" "$default_version"
+check_lunch "aosp_arm64-userdebug-$valid_version"       "aosp_arm64" "userdebug" "$valid_version"
+check_lunch "abc"                                       "" "" ""
+check_lunch "aosp_arm64-abc"                            "" "" ""
+check_lunch "aosp_arm64-userdebug-abc"                  "" "" ""
+check_lunch "aosp_arm64-abc-$valid_version"             "" "" ""
+check_lunch "abc-userdebug-$valid_version"              "" "" ""
+check_lunch "-"                                         "" "" ""
+check_lunch "--"                                        "" "" ""
+check_lunch "-userdebug"                                "" "" ""
+check_lunch "-userdebug-"                               "" "" ""
+check_lunch "-userdebug-$valid_version"                 "" "" ""
+check_lunch "aosp_arm64-userdebug-$valid_version-"      "" "" ""
+check_lunch "aosp_arm64-userdebug-$valid_version-abc"   "" "" ""
diff --git a/tools/OWNERS b/tools/OWNERS
new file mode 100644
index 0000000..7a23adc
--- /dev/null
+++ b/tools/OWNERS
@@ -0,0 +1,2 @@
+per-file warn.py = chh@google.com
+per-file checkowners.py = chh@google.com
diff --git a/tools/checkowners.py b/tools/checkowners.py
index 8f450e7..b874955 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -5,6 +5,7 @@
 import argparse
 import re
 import sys
+import urllib
 import urllib2
 
 parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
@@ -29,7 +30,8 @@
 
 def find_address(address):
   if address not in checked_addresses:
-    request = gerrit_server + '/accounts/?suggest&q=' + address
+    request = (gerrit_server + '/accounts/?n=1&o=ALL_EMAILS&q=email:'
+               + urllib.quote(address))
     echo('Checking email address: ' + address)
     result = urllib2.urlopen(request).read()
     expected = '"email": "' + address + '"'
diff --git a/tools/fileslist.py b/tools/fileslist.py
deleted file mode 100755
index b9e7350..0000000
--- a/tools/fileslist.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import json, hashlib, operator, os, sys
-
-def get_file_size(path):
-  st = os.lstat(path)
-  return st.st_size;
-
-def get_file_digest(path):
-  if os.path.isfile(path) == False:
-    return "----------------------------------------------------------------"
-  digest = hashlib.sha256()
-  with open(path, 'rb') as f:
-    while True:
-      buf = f.read(1024*1024)
-      if not buf:
-        break
-      digest.update(buf)
-  return digest.hexdigest();
-
-def main(argv):
-  output = []
-  roots = argv[1:]
-  for root in roots:
-    base = len(root[:root.rfind(os.path.sep)])
-    for dir, dirs, files in os.walk(root):
-      relative = dir[base:]
-      for f in files:
-        try:
-          path = os.path.sep.join((dir, f))
-          row = {
-              "Size": get_file_size(path),
-              "Name": os.path.sep.join((relative, f)),
-              "SHA256": get_file_digest(path),
-            }
-          output.append(row)
-        except os.error:
-          pass
-  output.sort(key=operator.itemgetter("Size", "Name"), reverse=True)
-  print json.dumps(output, indent=2, separators=(',',': '))
-
-if __name__ == '__main__':
-  main(sys.argv)
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 65f8a08..ab7f92d 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -113,6 +113,11 @@
 
 include $(BUILD_HOST_EXECUTABLE)
 fs_config_generate_bin := $(LOCAL_INSTALLED_MODULE)
+# List of all supported vendor, oem and odm Partitions
+fs_config_generate_extra_partition_list := $(strip \
+  $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
+  $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
+  $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm))
 
 ##################################
 # Generate the system/etc/fs_config_dirs binary file for the target
@@ -121,10 +126,13 @@
 
 LOCAL_MODULE := fs_config_dirs
 LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
 include $(BUILD_SYSTEM)/base_rules.mk
 $(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
 	@mkdir -p $(dir $@)
-	$< -D -o $@
+	$< -D $(if $(fs_config_generate_extra_partition_list), \
+	   -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+	   -o $@
 
 ##################################
 # Generate the system/etc/fs_config_files binary file for the target
@@ -133,10 +141,112 @@
 
 LOCAL_MODULE := fs_config_files
 LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
 include $(BUILD_SYSTEM)/base_rules.mk
 $(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
 	@mkdir -p $(dir $@)
-	$< -F -o $@
+	$< -F $(if $(fs_config_generate_extra_partition_list), \
+	   -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+	   -o $@
+
+ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the vendor/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_vendor to PRODUCT_PACKAGES in
+# the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P vendor -o $@
+
+##################################
+# Generate the vendor/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_vendor to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P vendor -o $@
+
+endif
+
+ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the oem/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P oem -o $@
+
+##################################
+# Generate the oem/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P oem -o $@
+
+endif
+
+ifneq ($(filter odm,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the odm/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -D -P odm -o $@
+
+##################################
+# Generate the odm/etc/fs_config_files binary file for the target
+# Add fs_config_files of fs_config_files_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+	@mkdir -p $(dir $@)
+	$< -F -P odm -o $@
+
+endif
 
 # The newer passwd/group targets are only generated if you
 # use the new TARGET_FS_CONFIG_GEN method.
@@ -195,3 +305,37 @@
 my_fs_config_h :=
 fs_config_generate_bin :=
 my_gen_oem_aid :=
+fs_config_generate_extra_partition_list :=
+
+# -----------------------------------------------------------------------------
+# Unit tests.
+# -----------------------------------------------------------------------------
+
+test_c_flags := \
+    -fstack-protector-all \
+    -g \
+    -Wall \
+    -Wextra \
+    -Werror \
+    -fno-builtin \
+    -DANDROID_FILESYSTEM_CONFIG='"android_filesystem_config_test_data.h"'
+
+##################################
+# test executable
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config_generate_test
+LOCAL_SRC_FILES := fs_config_generate.c
+LOCAL_SHARED_LIBRARIES := libcutils
+LOCAL_CFLAGS := $(test_c_flags)
+LOCAL_MODULE_RELATIVE_PATH := fs_config-unit-tests
+LOCAL_GTEST := false
+include $(BUILD_HOST_NATIVE_TEST)
+
+##################################
+# gTest tool
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config-unit-tests
+LOCAL_CFLAGS += $(test_c_flags) -DHOST
+LOCAL_SHARED_LIBRARIES := liblog libcutils libbase
+LOCAL_SRC_FILES := fs_config_test.cpp
+include $(BUILD_HOST_NATIVE_TEST)
diff --git a/tools/fs_config/README b/tools/fs_config/README
index d884e32..5af407f 100644
--- a/tools/fs_config/README
+++ b/tools/fs_config/README
@@ -3,8 +3,7 @@
 |  _  <|   __||  _  ||  |  ||  \/  ||   __|
 \__|\_/\_____/\__|__/|_____/\__ \__/\_____/
 
-
-Generating the android_filesystem_config.h
+Generating the android_filesystem_config.h:
 
 To generate the android_filesystem_config.h file, one can choose from
 one of two methods. The first method, is to declare
@@ -140,3 +139,45 @@
 
 To add new tests, simply add a test_<xxx> method to the test class. It will automatically
 get picked up and added to the test suite.
+
+Using the android_filesystem_config.h:
+
+The tool fs_config_generate is built as a dependency to fs_config_dirs and
+fs_config_files host targets, and #includes the above supplied or generated
+android_filesystem_config.h file, and can be instructed to generate the binary
+data that lands in the device target locations /system/etc/fs_config_dirs and
+/system/etc/fs_config_files and in the host's ${OUT} locations
+${OUT}/target/product/<device>/system/etc/fs_config_dirs and
+${OUT}/target/product/<device>/system/etc/fs_config_files. The binary files
+are interpreted by the libcutils fs_conf() function, along with the built-in
+defaults, to serve as overrides to complete the results. The Target files are
+used by filesystem and adb tools to ensure that the file and directory
+properties are preserved during runtime operations. The host files in the
+${OUT} directory are used in the final stages when building the filesystem
+images to set the file and directory properties.
+
+For systems with separate partition images, such as vendor or oem,
+fs_config_generate can be instructed to filter the specific file references
+to land in each partition's etc/fs_config_dirs or etc/fs_config_files
+locations. The filter can be instructed to blacklist a partition's data by
+providing the comma separated minus sign prefixed partition names. The filter
+can be instructed to whitelist partition data by providing the partition name.
+
+For example:
+- For system.img, but not vendor, oem or odm file references:
+      -P -vendor,-oem,-odm
+  This makes sure the results only contain content associated with the
+  system, and not vendor, oem or odm, blacklisting their content.
+- For vendor.img file references: -P vendor
+- For oem.img file references: -P oem
+- For odm.img file references: -P odm
+
+fs_config_generate --help reports:
+
+Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)
+from device-specific android_filesystem_config.h override. Filter based
+on a comma separated partition list (-P) whitelist or prefixed by a
+minus blacklist. Partitions are identified as path references to
+<partition>/ or system/<partition>
+
+Usage: fs_config_generate -D|-F [-P list] [-o output-file]
diff --git a/tools/fs_config/android_filesystem_config_test_data.h b/tools/fs_config/android_filesystem_config_test_data.h
new file mode 100644
index 0000000..07bc8e5
--- /dev/null
+++ b/tools/fs_config/android_filesystem_config_test_data.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <private/android_filesystem_config.h>
+
+/* Test Data */
+
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
+
+static const struct fs_path_config android_device_dirs[] = {
+    {00555, AID_ROOT, AID_SYSTEM, 0, "system/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "vendor/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "oem/etc"},
+    {00555, AID_ROOT, AID_SYSTEM, 0, "odm/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "oem/data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "odm/data/misc"},
+    {00755, AID_SYSTEM, AID_ROOT, 0, "vendor/data/misc"},
+    {00555, AID_SYSTEM, AID_ROOT, 0, "etc"},
+};
+
+static const struct fs_path_config android_device_files[] = {
+    {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_dirs"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_files"},
+    {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_dirs"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_files"},
+    {00644, AID_SYSTEM, AID_ROOT, 0, "etc/fs_config_files"},
+    {00666, AID_ROOT, AID_SYSTEM, 0, "data/misc/oem"},
+};
diff --git a/tools/fs_config/fs_config_generate.c b/tools/fs_config/fs_config_generate.c
index c06213f..cb7ff9d 100644
--- a/tools/fs_config/fs_config_generate.c
+++ b/tools/fs_config/fs_config_generate.c
@@ -14,9 +14,11 @@
  * limitations under the License.
  */
 
+#include <ctype.h>
 #include <stdbool.h>
 #include <stdio.h>
 #include <stdlib.h>
+#include <string.h>
 #include <unistd.h>
 
 #include <private/android_filesystem_config.h>
@@ -28,38 +30,57 @@
  * the binary format used in the /system/etc/fs_config_dirs and
  * the /system/etc/fs_config_files to be used by the runtimes.
  */
+#ifdef ANDROID_FILESYSTEM_CONFIG
+#include ANDROID_FILESYSTEM_CONFIG
+#else
 #include "android_filesystem_config.h"
+#endif
 
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-  static const struct fs_path_config android_device_dirs[] = {
-};
+static const struct fs_path_config android_device_dirs[] = { };
 #endif
 
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
 static const struct fs_path_config android_device_files[] = {
 #ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
-    { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs" },
+    {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_dirs"},
+    {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_dirs"},
 #endif
-    { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files" },
+    {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_files"},
+    {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_files"},
 };
 #endif
 
 static void usage() {
   fprintf(stderr,
     "Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)\n"
-    "from device-specific android_filesystem_config.h override\n\n"
-    "Usage: fs_config_generate -D|-F [-o output-file]\n");
+    "from device-specific android_filesystem_config.h override.  Filter based\n"
+    "on a comma separated partition list (-P) whitelist or prefixed by a\n"
+    "minus blacklist.  Partitions are identified as path references to\n"
+    "<partition>/ or system/<partition>/\n\n"
+    "Usage: fs_config_generate -D|-F [-P list] [-o output-file]\n");
 }
 
-int main(int argc, char** argv) {
-  const struct fs_path_config *pc;
-  const struct fs_path_config *end;
-  bool dir = false, file = false;
-  FILE *fp = stdout;
-  int opt;
+/* If tool switches to C++, use android-base/macros.h array_size() */
+#ifndef ARRAY_SIZE /* popular macro */
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+#endif
 
-  while((opt = getopt(argc, argv, "DFho:")) != -1) {
-    switch(opt) {
+int main(int argc, char** argv) {
+  const struct fs_path_config* pc;
+  const struct fs_path_config* end;
+  bool dir = false, file = false;
+  const char* partitions = NULL;
+  FILE* fp = stdout;
+  int opt;
+  static const char optstring[] = "DFP:ho:";
+
+  while ((opt = getopt(argc, argv, optstring)) != -1) {
+    switch (opt) {
     case 'D':
       if (file) {
         fprintf(stderr, "Must specify only -D or -F\n");
@@ -76,6 +97,30 @@
       }
       file = true;
       break;
+    case 'P':
+      if (partitions) {
+        fprintf(stderr, "Specify only one partition list\n");
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      while (*optarg && isspace(*optarg)) ++optarg;
+      if (!optarg[0]) {
+        fprintf(stderr, "Partition list empty\n");
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      if (!optarg[1]) {
+        fprintf(stderr, "Partition list too short \"%s\"\n", optarg);
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      if ((optarg[0] == '-') && strchr(optstring, optarg[1]) && !optarg[2]) {
+        fprintf(stderr, "Partition list is a flag \"%s\"\n", optarg);
+        usage();
+        exit(EXIT_FAILURE);
+      }
+      partitions = optarg;
+      break;
     case 'o':
       if (fp != stdout) {
         fprintf(stderr, "Specify only one output file\n");
@@ -97,6 +142,12 @@
     }
   }
 
+  if (optind < argc) {
+    fprintf(stderr, "Unknown non-argument \"%s\"\n", argv[optind]);
+    usage();
+    exit(EXIT_FAILURE);
+  }
+
   if (!file && !dir) {
     fprintf(stderr, "Must specify either -F or -D\n");
     usage();
@@ -105,19 +156,64 @@
 
   if (dir) {
     pc = android_device_dirs;
-    end = &android_device_dirs[sizeof(android_device_dirs) / sizeof(android_device_dirs[0])];
+    end = &android_device_dirs[ARRAY_SIZE(android_device_dirs)];
   } else {
     pc = android_device_files;
-    end = &android_device_files[sizeof(android_device_files) / sizeof(android_device_files[0])];
+    end = &android_device_files[ARRAY_SIZE(android_device_files)];
   }
-  for(; (pc < end) && pc->prefix; pc++) {
+  for (; (pc < end) && pc->prefix; pc++) {
+    bool submit;
     char buffer[512];
     ssize_t len = fs_config_generate(buffer, sizeof(buffer), pc);
     if (len < 0) {
       fprintf(stderr, "Entry too large\n");
       exit(EXIT_FAILURE);
     }
-    if (fwrite(buffer, 1, len, fp) != (size_t)len) {
+    submit = true;
+    if (partitions) {
+      char* partitions_copy = strdup(partitions);
+      char* arg = partitions_copy;
+      char* sv = NULL; /* Do not leave uninitialized, NULL is known safe. */
+      /* Deal with case all iterated partitions are blacklists with no match */
+      bool all_blacklist_but_no_match = true;
+      submit = false;
+
+      if (!partitions_copy) {
+        fprintf(stderr, "Failed to allocate a copy of %s\n", partitions);
+        exit(EXIT_FAILURE);
+      }
+      /* iterate through (officially) comma separated list of partitions */
+      while (!!(arg = strtok_r(arg, ",:; \t\n\r\f", &sv))) {
+        static const char system[] = "system/";
+        size_t plen;
+        bool blacklist = false;
+        if (*arg == '-') {
+          blacklist = true;
+          ++arg;
+        } else {
+          all_blacklist_but_no_match = false;
+        }
+        plen = strlen(arg);
+        /* deal with evil callers */
+        while (arg[plen - 1] == '/') {
+          --plen;
+        }
+        /* check if we have <partition>/ or /system/<partition>/ */
+        if ((!strncmp(pc->prefix, arg, plen) && (pc->prefix[plen] == '/')) ||
+            (!strncmp(pc->prefix, system, strlen(system)) &&
+             !strncmp(pc->prefix + strlen(system), arg, plen) &&
+             (pc->prefix[strlen(system) + plen] == '/'))) {
+          all_blacklist_but_no_match = false;
+          /* we have a match !!! */
+          if (!blacklist) submit = true;
+          break;
+        }
+        arg = NULL;
+      }
+      free(partitions_copy);
+      if (all_blacklist_but_no_match) submit = true;
+    }
+    if (submit && (fwrite(buffer, 1, len, fp) != (size_t)len)) {
       fprintf(stderr, "Write failure\n");
       exit(EXIT_FAILURE);
     }
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 2cf2fd8..c8d1dd3 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -709,7 +709,7 @@
                 int(cap, 0)
                 tmp.append('(' + cap + ')')
             except ValueError:
-                tmp.append('(1ULL << CAP_' + cap.upper() + ')')
+                tmp.append('CAP_MASK_LONG(CAP_' + cap.upper() + ')')
 
         caps = tmp
 
diff --git a/tools/fs_config/fs_config_test.cpp b/tools/fs_config/fs_config_test.cpp
new file mode 100644
index 0000000..f95a4ca
--- /dev/null
+++ b/tools/fs_config/fs_config_test.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <sys/cdefs.h>
+
+#include <string>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/macros.h>
+#include <android-base/strings.h>
+#include <android-base/stringprintf.h>
+#include <gtest/gtest.h>
+#include <private/android_filesystem_config.h>
+#include <private/fs_config.h>
+
+#include "android_filesystem_config_test_data.h"
+
+// must run test in the test directory
+const static char fs_config_generate_command[] = "./fs_config_generate_test";
+
+static std::string popenToString(std::string command) {
+  std::string ret;
+
+  FILE* fp = popen(command.c_str(), "r");
+  if (fp) {
+    if (!android::base::ReadFdToString(fileno(fp), &ret)) ret = "";
+    pclose(fp);
+  }
+  return ret;
+}
+
+static void confirm(std::string&& data, const fs_path_config* config,
+                    ssize_t num_config) {
+  const struct fs_path_config_from_file* pc =
+      reinterpret_cast<const fs_path_config_from_file*>(data.c_str());
+  size_t len = data.size();
+
+  ASSERT_TRUE(config != NULL);
+  ASSERT_LT(0, num_config);
+
+  while (len > 0) {
+    uint16_t host_len = pc->len;
+    if (host_len > len) break;
+
+    EXPECT_EQ(config->mode, pc->mode);
+    EXPECT_EQ(config->uid, pc->uid);
+    EXPECT_EQ(config->gid, pc->gid);
+    EXPECT_EQ(config->capabilities, pc->capabilities);
+    EXPECT_STREQ(config->prefix, pc->prefix);
+
+    EXPECT_LT(0, num_config);
+    --num_config;
+    if (num_config >= 0) ++config;
+    pc = reinterpret_cast<const fs_path_config_from_file*>(
+        reinterpret_cast<const char*>(pc) + host_len);
+    len -= host_len;
+  }
+  EXPECT_EQ(0, num_config);
+}
+
+/* See local android_filesystem_config.h for test data */
+
+TEST(fs_conf_test, dirs) {
+  confirm(popenToString(
+              android::base::StringPrintf("%s -D", fs_config_generate_command)),
+          android_device_dirs, arraysize(android_device_dirs));
+}
+
+TEST(fs_conf_test, files) {
+  confirm(popenToString(
+              android::base::StringPrintf("%s -F", fs_config_generate_command)),
+          android_device_files, arraysize(android_device_files));
+}
+
+static const char vendor_str[] = "vendor/";
+static const char vendor_alt_str[] = "system/vendor/";
+static const char oem_str[] = "oem/";
+static const char oem_alt_str[] = "system/oem/";
+static const char odm_str[] = "odm/";
+static const char odm_alt_str[] = "system/odm/";
+
+TEST(fs_conf_test, system_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (!android::base::StartsWith(config->prefix, vendor_str) &&
+        !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+        !android::base::StartsWith(config->prefix, oem_str) &&
+        !android::base::StartsWith(config->prefix, oem_alt_str) &&
+        !android::base::StartsWith(config->prefix, odm_str) &&
+        !android::base::StartsWith(config->prefix, odm_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P -vendor,-oem,-odm", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, vendor_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, vendor_str) ||
+        android::base::StartsWith(config->prefix, vendor_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P vendor", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, oem_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, oem_str) ||
+        android::base::StartsWith(config->prefix, oem_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P oem", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, odm_dirs) {
+  std::vector<fs_path_config> dirs;
+  const fs_path_config* config = android_device_dirs;
+  for (size_t num = arraysize(android_device_dirs); num; --num) {
+    if (android::base::StartsWith(config->prefix, odm_str) ||
+        android::base::StartsWith(config->prefix, odm_alt_str)) {
+      dirs.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -D -P odm", fs_config_generate_command)),
+          &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, system_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (!android::base::StartsWith(config->prefix, vendor_str) &&
+        !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+        !android::base::StartsWith(config->prefix, oem_str) &&
+        !android::base::StartsWith(config->prefix, oem_alt_str) &&
+        !android::base::StartsWith(config->prefix, odm_str) &&
+        !android::base::StartsWith(config->prefix, odm_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P -vendor,-oem,-odm", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, vendor_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, vendor_str) ||
+        android::base::StartsWith(config->prefix, vendor_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P vendor", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, oem_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, oem_str) ||
+        android::base::StartsWith(config->prefix, oem_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P oem", fs_config_generate_command)),
+          &files[0], files.size());
+}
+
+TEST(fs_conf_test, odm_files) {
+  std::vector<fs_path_config> files;
+  const fs_path_config* config = android_device_files;
+  for (size_t num = arraysize(android_device_files); num; --num) {
+    if (android::base::StartsWith(config->prefix, odm_str) ||
+        android::base::StartsWith(config->prefix, odm_alt_str)) {
+      files.emplace_back(*config);
+    }
+    ++config;
+  }
+  confirm(popenToString(android::base::StringPrintf(
+              "%s -F -P odm", fs_config_generate_command)),
+          &files[0], files.size());
+}
diff --git a/tools/generate-enforce-rro-android-manifest.py b/tools/generate-enforce-rro-android-manifest.py
new file mode 100755
index 0000000..68331cf
--- /dev/null
+++ b/tools/generate-enforce-rro-android-manifest.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Utility to generate the Android manifest file of runtime resource overlay
+package for source module.
+"""
+from xml.dom.minidom import parseString
+import argparse
+import os
+import sys
+
+ANDROID_MANIFEST_TEMPLATE="""<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="%s.auto_generated_rro__"
+    android:versionCode="1"
+    android:versionName="1.0">
+    <overlay android:targetPackage="%s" android:priority="0" android:isStatic="true"/>
+</manifest>
+"""
+
+
+def get_args():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '-u', '--use-package-name', action='store_true',
+        help='Indicate that --package-info is a package name.')
+    parser.add_argument(
+        '-p', '--package-info', required=True,
+        help='Manifest package name or manifest file path of source module.')
+    parser.add_argument(
+        '-o', '--output', required=True,
+        help='Output manifest file path.')
+    return parser.parse_args()
+
+
+def main(argv):
+  args = get_args()
+
+  package_name = args.package_info
+  if not args.use_package_name:
+    with open(args.package_info) as f:
+      data = f.read()
+      f.close()
+      dom = parseString(data)
+      package_name = dom.documentElement.getAttribute('package')
+
+  with open(args.output, 'w+') as f:
+    f.write(ANDROID_MANIFEST_TEMPLATE % (package_name, package_name))
+    f.close()
+
+
+if __name__ == "__main__":
+  main(sys.argv)
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index 5b13bf5..adbf7c2 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -14,14 +14,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-Usage: generate-notice-files [plain text output file] [html output file] [file title] [directory of notices]
+Usage: generate-notice-files --text-output [plain text output file] \
+               --html-output [html output file] \
+               --xml-output [xml output file] \
+               -t [file title] -s [directory of notices]
 
 Generate the Android notice files, including both text and html files.
 
 -h to display this usage message and exit.
 """
 from collections import defaultdict
-import getopt
+import argparse
 import hashlib
 import itertools
 import os
@@ -38,26 +41,6 @@
     "<": "&lt;",
     }
 
-try:
-  opts, args = getopt.getopt(sys.argv[1:], "h")
-except getopt.GetoptError, err:
-    print str(err)
-    print __doc__
-    sys.exit(2)
-
-for o, a in opts:
-  if o == "-h":
-    print __doc__
-    sys.exit(2)
-  else:
-    print >> sys.stderr, "unhandled option %s" % (o,)
-
-if len(args) != 4:
-    print """need exactly four arguments, the two output files, the file title
-             and the directory containing notices, not %d""" % (len(args),)
-    print __doc__
-    sys.exit(1)
-
 def hexify(s):
     return ("%02x"*len(s)) % tuple(map(ord, s))
 
@@ -163,27 +146,123 @@
       print >> output_file, open(value[0]).read()
     output_file.close()
 
-def main(args):
-    txt_output_file = args[0]
-    html_output_file = args[1]
-    file_title = args[2]
+def combine_notice_files_xml(files_with_same_hash, input_dir, output_filename):
+    """Combine notice files in FILE_HASH and output a XML version to OUTPUT_FILENAME."""
+
+    SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
+
+    # Set up a filename to row id table (anchors inside tables don't work in
+    # most browsers, but href's to table row ids do)
+    id_table = {}
+    for file_key in files_with_same_hash.keys():
+        for filename in files_with_same_hash[file_key]:
+             id_table[filename] = file_key
+
+    # Open the output file, and output the header pieces
+    output_file = open(output_filename, "wb")
+
+    print >> output_file, '<?xml version="1.0" encoding="utf-8"?>'
+    print >> output_file, "<licenses>"
+
+    # Flatten the list of lists into a single list of filenames
+    sorted_filenames = sorted(id_table.keys())
+
+    # Print out a nice table of contents
+    for filename in sorted_filenames:
+        stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
+        print >> output_file, '<file-name contentId="%s">%s</file-name>' % (id_table.get(filename), stripped_filename)
+
+    print >> output_file
+    print >> output_file
+
+    processed_file_keys = []
+    # Output the individual notice file lists
+    for filename in sorted_filenames:
+        file_key = id_table.get(filename)
+        if file_key in processed_file_keys:
+            continue
+        processed_file_keys.append(file_key)
+
+        print >> output_file, '<file-content contentId="%s"><![CDATA[%s]]></file-content>' % (file_key, html_escape(open(filename).read()))
+        print >> output_file
+
+    # Finish off the file output
+    print >> output_file, "</licenses>"
+    output_file.close()
+
+def get_args():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--text-output', required=True,
+        help='The text output file path.')
+    parser.add_argument(
+        '--html-output',
+        help='The html output file path.')
+    parser.add_argument(
+        '--xml-output',
+        help='The xml output file path.')
+    parser.add_argument(
+        '-t', '--title', required=True,
+        help='The file title.')
+    parser.add_argument(
+        '-s', '--source-dir', required=True,
+        help='The directory containing notices.')
+    parser.add_argument(
+        '-i', '--included-subdirs', action='append',
+        help='The sub directories which should be included.')
+    parser.add_argument(
+        '-e', '--excluded-subdirs', action='append',
+        help='The sub directories which should be excluded.')
+    return parser.parse_args()
+
+def main(argv):
+    args = get_args()
+
+    txt_output_file = args.text_output
+    html_output_file = args.html_output
+    xml_output_file = args.xml_output
+    file_title = args.title
+    included_subdirs = []
+    excluded_subdirs = []
+    if args.included_subdirs is not None:
+        included_subdirs = args.included_subdirs
+    if args.excluded_subdirs is not None:
+        excluded_subdirs = args.excluded_subdirs
 
     # Find all the notice files and md5 them
-    input_dir = os.path.normpath(args[3])
+    input_dir = os.path.normpath(args.source_dir)
     files_with_same_hash = defaultdict(list)
     for root, dir, files in os.walk(input_dir):
         for file in files:
-            if file.endswith(".txt"):
+            matched = True
+            if len(included_subdirs) > 0:
+                matched = False
+                for subdir in included_subdirs:
+                    if root.startswith(input_dir + '/' + subdir):
+                        matched = True
+                        break
+            elif len(excluded_subdirs) > 0:
+                for subdir in excluded_subdirs:
+                    if root.startswith(input_dir + '/' + subdir):
+                        matched = False
+                        break
+            if matched and file.endswith(".txt"):
                 filename = os.path.join(root, file)
                 file_md5sum = md5sum(filename)
                 files_with_same_hash[file_md5sum].append(filename)
 
     filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(files_with_same_hash.keys())]
 
-    print "Combining NOTICE files into HTML"
-    combine_notice_files_html(filesets, input_dir, html_output_file)
     print "Combining NOTICE files into text"
     combine_notice_files_text(filesets, input_dir, txt_output_file, file_title)
 
+    if html_output_file is not None:
+        print "Combining NOTICE files into HTML"
+        combine_notice_files_html(filesets, input_dir, html_output_file)
+
+    if xml_output_file is not None:
+        print "Combining NOTICE files into XML"
+        combine_notice_files_xml(files_with_same_hash, input_dir, xml_output_file)
+
 if __name__ == "__main__":
-    main(args)
+    main(sys.argv)
diff --git a/tools/releasetools/OWNERS b/tools/releasetools/OWNERS
new file mode 100644
index 0000000..39448cf
--- /dev/null
+++ b/tools/releasetools/OWNERS
@@ -0,0 +1 @@
+tbao@google.com
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 2b7aee4..ff7109a 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -69,6 +69,7 @@
 
 OPTIONS.add_missing = False
 OPTIONS.rebuild_recovery = False
+OPTIONS.replace_updated_files_list = []
 OPTIONS.replace_verity_public_key = False
 OPTIONS.replace_verity_private_key = False
 OPTIONS.is_signing = False
@@ -98,8 +99,7 @@
   assert which in ("system", "vendor")
 
   simg = sparse_img.SparseImage(imgname)
-  care_map_list = []
-  care_map_list.append(which)
+  care_map_list = [which]
 
   care_map_ranges = simg.care_map
   key = which + "_adjusted_partition_size"
@@ -127,6 +127,12 @@
     ofile.write(data)
     ofile.close()
 
+    arc_name = "SYSTEM/" + fn
+    if arc_name in output_zip.namelist():
+      OPTIONS.replace_updated_files_list.append(arc_name)
+    else:
+      common.ZipWrite(output_zip, ofile.name, arc_name)
+
   if OPTIONS.rebuild_recovery:
     print("Building new recovery patch")
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
@@ -167,6 +173,43 @@
   return img.name
 
 
+def AddDtbo(output_zip, prefix="IMAGES/"):
+  """Adds the DTBO image.
+
+  Uses the image under prefix if it already exists. Otherwise looks for the
+  image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
+  """
+
+  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "dtbo.img")
+  if os.path.exists(img.input_name):
+    print("dtbo.img already exists in %s, no need to rebuild..." % (prefix,))
+    return img.input_name
+
+  dtbo_prebuilt_path = os.path.join(
+      OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img")
+  assert os.path.exists(dtbo_prebuilt_path)
+  shutil.copy(dtbo_prebuilt_path, img.name)
+
+  # AVB-sign the image as needed.
+  if OPTIONS.info_dict.get("avb_enable") == "true":
+    avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+    part_size = OPTIONS.info_dict["dtbo_size"]
+    # The AVB hash footer will be replaced if already present.
+    cmd = [avbtool, "add_hash_footer", "--image", img.name,
+           "--partition_size", str(part_size), "--partition_name", "dtbo"]
+    common.AppendAVBSigningArgs(cmd, "dtbo")
+    args = OPTIONS.info_dict.get("avb_dtbo_add_hash_footer_args")
+    if args and args.strip():
+      cmd.extend(shlex.split(args))
+    p = common.Run(cmd, stdout=subprocess.PIPE)
+    p.communicate()
+    assert p.returncode == 0, \
+        "avbtool add_hash_footer of %s failed" % (img.name,)
+
+  img.Write()
+  return img.name
+
+
 def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
   print("creating " + what + ".img...")
 
@@ -222,9 +265,14 @@
   if block_list:
     block_list.Write()
 
+  # Set the 'adjusted_partition_size' that excludes the verity blocks of the
+  # given image. When avb is enabled, this size is the max image size returned
+  # by the avb tool.
   is_verity_partition = "verity_block_device" in image_props
-  verity_supported = image_props.get("verity") == "true"
-  if is_verity_partition and verity_supported:
+  verity_supported = (image_props.get("verity") == "true" or
+                      image_props.get("avb_enable") == "true")
+  is_avb_enable = image_props.get("avb_hashtree_enable") == "true"
+  if verity_supported and (is_verity_partition or is_avb_enable):
     adjusted_blocks_value = image_props.get("partition_size")
     if adjusted_blocks_value:
       adjusted_blocks_key = what + "_adjusted_partition_size"
@@ -285,19 +333,51 @@
   img.Write()
 
 
-def AddVBMeta(output_zip, boot_img_path, system_img_path, prefix="IMAGES/"):
+def AppendVBMetaArgsForPartition(cmd, partition, img_path, public_key_dir):
+  if not img_path:
+    return
+
+  # Check if chain partition is used.
+  key_path = OPTIONS.info_dict.get("avb_" + partition + "_key_path")
+  if key_path:
+    # extract public key in AVB format to be included in vbmeta.img
+    avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+    public_key_path = os.path.join(public_key_dir, "%s.avbpubkey" % partition)
+    p = common.Run([avbtool, "extract_public_key", "--key", key_path,
+                    "--output", public_key_path],
+                   stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    p.communicate()
+    assert p.returncode == 0, \
+        "avbtool extract_public_key fail for partition: %r" % partition
+
+    rollback_index_location = OPTIONS.info_dict[
+        "avb_" + partition + "_rollback_index_location"]
+    cmd.extend(["--chain_partition", "%s:%s:%s" % (
+        partition, rollback_index_location, public_key_path)])
+  else:
+    cmd.extend(["--include_descriptors_from_image", img_path])
+
+
+def AddVBMeta(output_zip, boot_img_path, system_img_path, vendor_img_path,
+              dtbo_img_path, prefix="IMAGES/"):
   """Create a VBMeta image and store it in output_zip."""
   img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
-  avbtool = os.getenv('AVBTOOL') or "avbtool"
-  cmd = [avbtool, "make_vbmeta_image",
-         "--output", img.name,
-         "--include_descriptors_from_image", boot_img_path,
-         "--include_descriptors_from_image", system_img_path,
-         "--generate_dm_verity_cmdline_from_hashtree", system_img_path]
-  common.AppendAVBSigningArgs(cmd)
-  args = OPTIONS.info_dict.get("board_avb_make_vbmeta_image_args", None)
+  avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+  cmd = [avbtool, "make_vbmeta_image", "--output", img.name]
+  common.AppendAVBSigningArgs(cmd, "vbmeta")
+
+  public_key_dir = tempfile.mkdtemp(prefix="avbpubkey-")
+  OPTIONS.tempfiles.append(public_key_dir)
+
+  AppendVBMetaArgsForPartition(cmd, "boot", boot_img_path, public_key_dir)
+  AppendVBMetaArgsForPartition(cmd, "system", system_img_path, public_key_dir)
+  AppendVBMetaArgsForPartition(cmd, "vendor", vendor_img_path, public_key_dir)
+  AppendVBMetaArgsForPartition(cmd, "dtbo", dtbo_img_path, public_key_dir)
+
+  args = OPTIONS.info_dict.get("avb_vbmeta_args")
   if args and args.strip():
     cmd.extend(shlex.split(args))
+
   p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
   p.communicate()
   assert p.returncode == 0, "avbtool make_vbmeta_image failed"
@@ -372,6 +452,27 @@
   img.Write()
 
 
+def ReplaceUpdatedFiles(zip_filename, files_list):
+  """Update all the zip entries listed in the files_list.
+
+  For now the list includes META/care_map.txt, and the related files under
+  SYSTEM/ after rebuilding recovery.
+  """
+
+  cmd = ["zip", "-d", zip_filename] + files_list
+  p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  p.communicate()
+
+  output_zip = zipfile.ZipFile(zip_filename, "a",
+                               compression=zipfile.ZIP_DEFLATED,
+                               allowZip64=True)
+  for item in files_list:
+    file_path = os.path.join(OPTIONS.input_tmp, item)
+    assert os.path.exists(file_path)
+    common.ZipWrite(output_zip, file_path, arcname=item)
+  common.ZipClose(output_zip)
+
+
 def AddImagesToTargetFiles(filename):
   if os.path.isdir(filename):
     OPTIONS.input_tmp = os.path.abspath(filename)
@@ -458,7 +559,7 @@
 
   banner("system")
   system_img_path = AddSystem(
-    output_zip, recovery_img=recovery_image, boot_img=boot_image)
+      output_zip, recovery_img=recovery_image, boot_img=boot_image)
   vendor_img_path = None
   if has_vendor:
     banner("vendor")
@@ -471,13 +572,21 @@
     AddUserdata(output_zip)
     banner("cache")
     AddCache(output_zip)
-  if OPTIONS.info_dict.get("board_bpt_enable", None) == "true":
+
+  if OPTIONS.info_dict.get("board_bpt_enable") == "true":
     banner("partition-table")
     AddPartitionTable(output_zip)
-  if OPTIONS.info_dict.get("board_avb_enable", None) == "true":
+
+  dtbo_img_path = None
+  if OPTIONS.info_dict.get("has_dtbo") == "true":
+    banner("dtbo")
+    dtbo_img_path = AddDtbo(output_zip)
+
+  if OPTIONS.info_dict.get("avb_enable") == "true":
     banner("vbmeta")
     boot_contents = boot_image.WriteToTemp()
-    AddVBMeta(output_zip, boot_contents.name, system_img_path)
+    AddVBMeta(output_zip, boot_contents.name, system_img_path,
+              vendor_img_path, dtbo_img_path)
 
   # For devices using A/B update, copy over images from RADIO/ and/or
   # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
@@ -491,12 +600,14 @@
     # partitions (if present), then write this file to target_files package.
     care_map_list = []
     for line in lines:
-      if line.strip() == "system" and OPTIONS.info_dict.get(
-          "system_verity_block_device", None) is not None:
+      if line.strip() == "system" and (
+          "system_verity_block_device" in OPTIONS.info_dict or
+          OPTIONS.info_dict.get("system_avb_hashtree_enable") == "true"):
         assert os.path.exists(system_img_path)
         care_map_list += GetCareMap("system", system_img_path)
-      if line.strip() == "vendor" and OPTIONS.info_dict.get(
-          "vendor_verity_block_device", None) is not None:
+      if line.strip() == "vendor" and (
+          "vendor_verity_block_device" in OPTIONS.info_dict or
+          OPTIONS.info_dict.get("vendor_avb_hashtree_enable") == "true"):
         assert os.path.exists(vendor_img_path)
         care_map_list += GetCareMap("vendor", vendor_img_path)
 
@@ -534,15 +645,21 @@
         assert os.path.exists(img_path), "cannot find " + img_name
 
     if care_map_list:
-      file_path = "META/care_map.txt"
-      if output_zip:
-        common.ZipWriteStr(output_zip, file_path, '\n'.join(care_map_list))
+      care_map_path = "META/care_map.txt"
+      if output_zip and care_map_path not in output_zip.namelist():
+        common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
       else:
-        with open(os.path.join(OPTIONS.input_tmp, file_path), 'w') as fp:
+        with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
           fp.write('\n'.join(care_map_list))
+        if output_zip:
+          OPTIONS.replace_updated_files_list.append(care_map_path)
 
   if output_zip:
     common.ZipClose(output_zip)
+    if OPTIONS.replace_updated_files_list:
+      ReplaceUpdatedFiles(output_zip.filename,
+                          OPTIONS.replace_updated_files_list)
+
 
 def main(argv):
   def option_handler(o, a):
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index e385866..b8123c0 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -41,10 +41,10 @@
   cmd = ['imgdiff', '-z'] if imgdiff else ['bsdiff']
   cmd.extend([srcfile, tgtfile, patchfile])
 
-  # Not using common.Run(), which would otherwise dump all the bsdiff/imgdiff
-  # commands when OPTIONS.verbose is True - not useful for the case here, since
-  # they contain temp filenames only.
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  # Don't dump the bsdiff/imgdiff commands, which are not useful for the case
+  # here, since they contain temp filenames only.
+  p = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+                 stderr=subprocess.STDOUT)
   output, _ = p.communicate()
 
   if p.returncode != 0:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 16c8018..6de9763 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -97,49 +97,57 @@
   simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
   simg.AppendFillChunk(0, blocks)
 
-def AVBCalcMaxImageSize(avbtool, partition_size, additional_args):
+def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
   """Calculates max image size for a given partition size.
 
   Args:
     avbtool: String with path to avbtool.
+    footer_type: 'hash' or 'hashtree' for generating footer.
     partition_size: The size of the partition in question.
     additional_args: Additional arguments to pass to 'avbtool
       add_hashtree_image'.
   Returns:
     The maximum image size or 0 if an error occurred.
   """
-  cmdline = "%s add_hashtree_footer " % avbtool
-  cmdline += "--partition_size %d " % partition_size
-  cmdline += "--calc_max_image_size "
-  cmdline += additional_args
-  (output, exit_code) = RunCommand(shlex.split(cmdline))
+  cmd =[avbtool, "add_%s_footer" % footer_type,
+        "--partition_size", partition_size, "--calc_max_image_size"]
+  cmd.extend(shlex.split(additional_args))
+
+  (output, exit_code) = RunCommand(cmd)
   if exit_code != 0:
     return 0
   else:
     return int(output)
 
-def AVBAddHashtree(image_path, avbtool, partition_size, partition_name,
-                   signing_args, additional_args):
+def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
+                 partition_name, key_path, algorithm,
+                 additional_args):
   """Adds dm-verity hashtree and AVB metadata to an image.
 
   Args:
     image_path: Path to image to modify.
     avbtool: String with path to avbtool.
+    footer_type: 'hash' or 'hashtree' for generating footer.
     partition_size: The size of the partition in question.
     partition_name: The name of the partition - will be embedded in metadata.
-    signing_args: Arguments for signing the image.
+    key_path: Path to key to use or None.
+    algorithm: Name of algorithm to use or None.
     additional_args: Additional arguments to pass to 'avbtool
       add_hashtree_image'.
   Returns:
     True if the operation succeeded.
   """
-  cmdline = "%s add_hashtree_footer " % avbtool
-  cmdline += "--partition_size %d " % partition_size
-  cmdline += "--partition_name %s " % partition_name
-  cmdline += "--image %s " % image_path
-  cmdline += signing_args + " "
-  cmdline += additional_args
-  (_, exit_code) = RunCommand(shlex.split(cmdline))
+  cmd =[avbtool, "add_%s_footer" % footer_type,
+        "--partition_size", partition_size,
+        "--partition_name", partition_name,
+        "--image", image_path]
+
+  if key_path and algorithm:
+    cmd.extend(["--key", key_path, "--algorithm", algorithm])
+
+  cmd.extend(shlex.split(additional_args))
+
+  (_, exit_code) = RunCommand(cmd)
   return exit_code == 0
 
 def AdjustPartitionSizeForVerity(partition_size, fec_supported):
@@ -410,17 +418,24 @@
     prop_dict["original_partition_size"] = str(partition_size)
     prop_dict["verity_size"] = str(verity_size)
 
-  # Adjust partition size for AVB.
-  if prop_dict.get("avb_enable") == "true":
-    avbtool = prop_dict.get("avb_avbtool")
-    partition_size = int(prop_dict.get("partition_size"))
-    additional_args = prop_dict["avb_add_hashtree_footer_args"]
-    max_image_size = AVBCalcMaxImageSize(avbtool, partition_size,
+  # Adjust partition size for AVB hash footer or AVB hashtree footer.
+  avb_footer_type = ''
+  if prop_dict.get("avb_hash_enable") == "true":
+    avb_footer_type = 'hash'
+  elif prop_dict.get("avb_hashtree_enable") == "true":
+    avb_footer_type = 'hashtree'
+
+  if avb_footer_type:
+    avbtool = prop_dict["avb_avbtool"]
+    partition_size = prop_dict["partition_size"]
+    # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
+    additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
+    max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type, partition_size,
                                          additional_args)
     if max_image_size == 0:
       return False
     prop_dict["partition_size"] = str(max_image_size)
-    prop_dict["original_partition_size"] = str(partition_size)
+    prop_dict["original_partition_size"] = partition_size
 
   if fs_type.startswith("ext"):
     build_command = [prop_dict["ext_mkuserimg"]]
@@ -493,11 +508,11 @@
     shutil.rmtree(staging_system, ignore_errors=True)
     shutil.copytree(origin_in, staging_system, symlinks=True)
 
-  reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
+  has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
   ext4fs_output = None
 
   try:
-    if reserved_blocks and fs_type.startswith("ext4"):
+    if fs_type.startswith("ext4"):
       (ext4fs_output, exit_code) = RunCommand(build_command)
     else:
       (_, exit_code) = RunCommand(build_command)
@@ -518,7 +533,9 @@
   # not writable even with root privilege. It only affects devices using
   # file-based OTA and a kernel version of 3.10 or greater (currently just
   # sprout).
-  if reserved_blocks and fs_type.startswith("ext4"):
+  # Separately, check if there's enough headroom space available. This is useful for
+  # devices with low disk space that have system image variation between builds.
+  if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
     assert ext4fs_output is not None
     ext4fs_stats = re.compile(
         r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
@@ -526,14 +543,21 @@
     m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
     used_blocks = int(m.groupdict().get('used_blocks'))
     total_blocks = int(m.groupdict().get('total_blocks'))
-    reserved_blocks = min(4096, int(total_blocks * 0.02))
-    adjusted_blocks = total_blocks - reserved_blocks
+    reserved_blocks = 0
+    headroom_blocks = 0
+    adjusted_blocks = total_blocks
+    if has_reserved_blocks:
+      reserved_blocks = min(4096, int(total_blocks * 0.02))
+      adjusted_blocks -= reserved_blocks
+    if "partition_headroom" in prop_dict:
+      headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+      adjusted_blocks -= headroom_blocks
     if used_blocks > adjusted_blocks:
       mount_point = prop_dict.get("mount_point")
       print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
-            "reserved: %d blocks, available: %d blocks)" % (
+            "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
                 mount_point, total_blocks, used_blocks, reserved_blocks,
-                adjusted_blocks))
+                headroom_blocks, adjusted_blocks))
       return False
 
   if not fs_spans_partition:
@@ -552,15 +576,18 @@
     if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
       return False
 
-  # Add AVB hashtree and metadata.
-  if "avb_enable" in prop_dict:
-    avbtool = prop_dict.get("avb_avbtool")
-    original_partition_size = int(prop_dict.get("original_partition_size"))
+  # Add AVB HASH or HASHTREE footer (metadata).
+  if avb_footer_type:
+    avbtool = prop_dict["avb_avbtool"]
+    original_partition_size = prop_dict["original_partition_size"]
     partition_name = prop_dict["partition_name"]
-    signing_args = prop_dict["avb_signing_args"]
-    additional_args = prop_dict["avb_add_hashtree_footer_args"]
-    if not AVBAddHashtree(out_file, avbtool, original_partition_size,
-                          partition_name, signing_args, additional_args):
+    # key_path and algorithm are only available when chain partition is used.
+    key_path = prop_dict.get("avb_key_path")
+    algorithm = prop_dict.get("avb_algorithm")
+    # avb_add_hash_footer_args or avb_add_hashtree_footer_args
+    additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
+    if not AVBAddFooter(out_file, avbtool, avb_footer_type, original_partition_size,
+                        partition_name, key_path, algorithm, additional_args):
       return False
 
   if run_fsck and prop_dict.get("skip_fsck") != "true":
@@ -605,7 +632,7 @@
       "verity_key",
       "verity_signer_cmd",
       "verity_fec",
-      "avb_signing_args",
+      "avb_enable",
       "avb_avbtool"
       )
   for p in common_props:
@@ -613,10 +640,16 @@
 
   d["mount_point"] = mount_point
   if mount_point == "system":
+    copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_system_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_system_key_path", "avb_key_path")
+    copy_prop("avb_system_algorithm", "avb_algorithm")
     copy_prop("fs_type", "fs_type")
-    # Copy the generic sysetem fs type first, override with specific one if
+    # Copy the generic system fs type first, override with specific one if
     # available.
     copy_prop("system_fs_type", "fs_type")
+    copy_prop("system_headroom", "partition_headroom")
     copy_prop("system_size", "partition_size")
     copy_prop("system_journal_size", "journal_size")
     copy_prop("system_verity_block_device", "verity_block_device")
@@ -629,13 +662,15 @@
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
     copy_prop("system_squashfs_disable_4k_align", "squashfs_disable_4k_align")
     copy_prop("system_base_fs_file", "base_fs_file")
-    copy_prop("system_avb_enable", "avb_enable")
-    copy_prop("system_avb_add_hashtree_footer_args",
-              "avb_add_hashtree_footer_args")
     copy_prop("system_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "system_other":
     # We inherit the selinux policies of /system since we contain some of its files.
     d["mount_point"] = "system"
+    copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_system_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_system_key_path", "avb_key_path")
+    copy_prop("avb_system_algorithm", "avb_algorithm")
     copy_prop("fs_type", "fs_type")
     copy_prop("system_fs_type", "fs_type")
     copy_prop("system_size", "partition_size")
@@ -646,9 +681,6 @@
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
     copy_prop("system_base_fs_file", "base_fs_file")
-    copy_prop("system_avb_enable", "avb_enable")
-    copy_prop("system_avb_add_hashtree_footer_args",
-              "avb_add_hashtree_footer_args")
     copy_prop("system_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "data":
     # Copy the generic fs type first, override with specific one if available.
@@ -661,6 +693,11 @@
     copy_prop("cache_fs_type", "fs_type")
     copy_prop("cache_size", "partition_size")
   elif mount_point == "vendor":
+    copy_prop("avb_vendor_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_vendor_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_vendor_key_path", "avb_key_path")
+    copy_prop("avb_vendor_algorithm", "avb_algorithm")
     copy_prop("vendor_fs_type", "fs_type")
     copy_prop("vendor_size", "partition_size")
     copy_prop("vendor_journal_size", "journal_size")
@@ -671,9 +708,6 @@
     copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
     copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align")
     copy_prop("vendor_base_fs_file", "base_fs_file")
-    copy_prop("vendor_avb_enable", "avb_enable")
-    copy_prop("vendor_avb_add_hashtree_footer_args",
-              "avb_add_hashtree_footer_args")
     copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index e200f9f..2ad628b 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -107,10 +107,15 @@
   pass
 
 
-def Run(args, **kwargs):
-  """Create and return a subprocess.Popen object, printing the command
-  line on the terminal if -v was specified."""
-  if OPTIONS.verbose:
+def Run(args, verbose=None, **kwargs):
+  """Create and return a subprocess.Popen object.
+
+  Caller can specify if the command line should be printed. The global
+  OPTIONS.verbose will be used if not specified.
+  """
+  if verbose is None:
+    verbose = OPTIONS.verbose
+  if verbose:
     print("  running: ", " ".join(args))
   return subprocess.Popen(args, **kwargs)
 
@@ -340,14 +345,13 @@
     print("%-25s = (%s) %s" % (k, type(v).__name__, v))
 
 
-def AppendAVBSigningArgs(cmd):
+def AppendAVBSigningArgs(cmd, partition):
   """Append signing arguments for avbtool."""
-  keypath = OPTIONS.info_dict.get("board_avb_key_path", None)
-  algorithm = OPTIONS.info_dict.get("board_avb_algorithm", None)
-  if not keypath or not algorithm:
-    algorithm = "SHA256_RSA4096"
-    keypath = "external/avb/test/data/testkey_rsa4096.pem"
-  cmd.extend(["--key", keypath, "--algorithm", algorithm])
+  # e.g., "--key path/to/signing_key --algorithm SHA256_RSA4096"
+  key_path = OPTIONS.info_dict.get("avb_" + partition + "_key_path")
+  algorithm = OPTIONS.info_dict.get("avb_" + partition + "_algorithm")
+  if key_path and algorithm:
+    cmd.extend(["--key", key_path, "--algorithm", algorithm])
 
 
 def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
@@ -485,13 +489,13 @@
     img_keyblock.close()
 
   # AVB: if enabled, calculate and add hash to boot.img.
-  if info_dict.get("board_avb_enable", None) == "true":
-    avbtool = os.getenv('AVBTOOL') or "avbtool"
-    part_size = info_dict.get("boot_size", None)
+  if info_dict.get("avb_enable") == "true":
+    avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
+    part_size = info_dict["boot_size"]
     cmd = [avbtool, "add_hash_footer", "--image", img.name,
            "--partition_size", str(part_size), "--partition_name", "boot"]
-    AppendAVBSigningArgs(cmd)
-    args = info_dict.get("board_avb_boot_add_hash_footer_args", None)
+    AppendAVBSigningArgs(cmd, "boot")
+    args = info_dict.get("avb_boot_add_hash_footer_args")
     if args and args.strip():
       cmd.extend(shlex.split(args))
     p = Run(cmd, stdout=subprocess.PIPE)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 2a9a417..0c44faf 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -130,7 +130,7 @@
            '    getprop("ro.build.thumbprint") == "{tp}" ||\n'
            '    abort("Package expects build fingerprint of {fp} or '
            'thumbprint of {tp}; this device has a fingerprint of " '
-           '+ getprop("ro.build.fingerprint") and a thumbprint of " '
+           '+ getprop("ro.build.fingerprint") + " and a thumbprint of " '
            '+ getprop("ro.build.thumbprint") + ".");').format(fp=fp, tp=tp)
     self.script.append(cmd)
 
@@ -230,11 +230,6 @@
           p.mount_point, mount_flags))
       self.mounts.add(p.mount_point)
 
-  def UnpackPackageDir(self, src, dst):
-    """Unpack a given directory from the OTA package into the given
-    destination directory."""
-    self.script.append('package_extract_dir("%s", "%s");' % (src, dst))
-
   def Comment(self, comment):
     """Write a comment into the update script."""
     self.script.append("")
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index fd98ad2..4422b53 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -71,63 +71,23 @@
     common.Usage(__doc__)
     sys.exit(1)
 
-  OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
+  OPTIONS.input_tmp, input_zip = common.UnzipTemp(
+      args[0], ["IMAGES/*", "OTA/*"])
   output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
   CopyInfo(output_zip)
 
   try:
-    done = False
     images_path = os.path.join(OPTIONS.input_tmp, "IMAGES")
-    if os.path.exists(images_path):
-      # If this is a new target-files, it already contains the images,
-      # and all we have to do is copy them to the output zip.
-      images = os.listdir(images_path)
-      if images:
-        for image in images:
-          if bootable_only and image not in ("boot.img", "recovery.img"):
-            continue
-          if not image.endswith(".img"):
-            continue
-          if image == "recovery-two-step.img":
-            continue
-          common.ZipWrite(
-              output_zip, os.path.join(images_path, image), image)
-        done = True
-
-    if not done:
-      # We have an old target-files that doesn't already contain the
-      # images, so build them.
-      import add_img_to_target_files
-
-      OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
-
-      boot_image = common.GetBootableImage(
-          "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-      if boot_image:
-        boot_image.AddToZip(output_zip)
-
-      if OPTIONS.info_dict.get("no_recovery") != "true":
-        recovery_image = common.GetBootableImage(
-            "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-        if recovery_image:
-          recovery_image.AddToZip(output_zip)
-
-      def banner(s):
-        print("\n\n++++ " + s + " ++++\n\n")
-
-      if not bootable_only:
-        banner("AddSystem")
-        add_img_to_target_files.AddSystem(output_zip, prefix="")
-        try:
-          input_zip.getinfo("VENDOR/")
-          banner("AddVendor")
-          add_img_to_target_files.AddVendor(output_zip, prefix="")
-        except KeyError:
-          pass   # no vendor partition for this device
-        banner("AddUserdata")
-        add_img_to_target_files.AddUserdata(output_zip, prefix="")
-        banner("AddCache")
-        add_img_to_target_files.AddCache(output_zip, prefix="")
+    # A target-files zip must contain the images since Lollipop.
+    assert os.path.exists(images_path)
+    for image in sorted(os.listdir(images_path)):
+      if bootable_only and image not in ("boot.img", "recovery.img"):
+        continue
+      if not image.endswith(".img"):
+        continue
+      if image == "recovery-two-step.img":
+        continue
+      common.ZipWrite(output_zip, os.path.join(images_path, image), image)
 
   finally:
     print("cleaning up...")
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 8222377..1b0f68b 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -180,14 +180,15 @@
 OPTIONS.log_diff = None
 OPTIONS.payload_signer = None
 OPTIONS.payload_signer_args = []
+OPTIONS.extracted_input = None
+OPTIONS.key_passwords = []
 
 METADATA_NAME = 'META-INF/com/android/metadata'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*']
 
 
 def SignOutput(temp_zip_name, output_zip_name):
-  key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
-  pw = key_passwords[OPTIONS.package_key]
+  pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
   common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
                   whole_file=True)
@@ -315,6 +316,59 @@
   return sparse_img.SparseImage(path, mappath, clobbered_blocks)
 
 
+def AddCompatibilityArchive(target_zip, output_zip, system_included=True,
+                            vendor_included=True):
+  """Adds compatibility info from target files into the output zip.
+
+  Metadata used for on-device compatibility verification is retrieved from
+  target_zip then added to compatibility.zip which is added to the output_zip
+  archive.
+
+  Compatibility archive should only be included for devices with a vendor
+  partition as checking provides value when system and vendor are independently
+  versioned.
+
+  Args:
+    target_zip: Zip file containing the source files to be included for OTA.
+    output_zip: Zip file that will be sent for OTA.
+    system_included: If True, the system image will be updated and therefore
+        its metadata should be included.
+    vendor_included: If True, the vendor image will be updated and therefore
+        its metadata should be included.
+  """
+
+  # Determine what metadata we need. Files are names relative to META/.
+  compatibility_files = []
+  vendor_metadata = ("vendor_manifest.xml", "vendor_matrix.xml")
+  system_metadata = ("system_manifest.xml", "system_matrix.xml")
+  if vendor_included:
+    compatibility_files += vendor_metadata
+  if system_included:
+    compatibility_files += system_metadata
+
+  # Create new archive.
+  compatibility_archive = tempfile.NamedTemporaryFile()
+  compatibility_archive_zip = zipfile.ZipFile(compatibility_archive, "w",
+      compression=zipfile.ZIP_DEFLATED)
+
+  # Add metadata.
+  for file_name in compatibility_files:
+    target_file_name = "META/" + file_name
+
+    if target_file_name in target_zip.namelist():
+      data = target_zip.read(target_file_name)
+      common.ZipWriteStr(compatibility_archive_zip, file_name, data)
+
+  # Ensure files are written before we copy into output_zip.
+  compatibility_archive_zip.close()
+
+  # Only add the archive if we have any compatibility info.
+  if compatibility_archive_zip.namelist():
+    common.ZipWrite(output_zip, compatibility_archive.name,
+                    arcname="compatibility.zip",
+                    compress_type=zipfile.ZIP_STORED)
+
+
 def WriteFullOTAPackage(input_zip, output_zip):
   # TODO: how to determine this?  We don't know what version it will
   # be installed on top of. For now, we expect the API just won't
@@ -942,6 +996,9 @@
     if 'care_map.txt' in zip_file.namelist():
       offsets.append(ComputeEntryOffsetSize('care_map.txt'))
 
+    if 'compatibility.zip' in zip_file.namelist():
+      offsets.append(ComputeEntryOffsetSize('compatibility.zip'))
+
     # 'META-INF/com/android/metadata' is required. We don't know its actual
     # offset and length (as well as the values for other entries). So we
     # reserve 10-byte as a placeholder, which is to cover the space for metadata
@@ -964,21 +1021,17 @@
   # The place where the output from the subprocess should go.
   log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
 
-  # Setup signing keys.
-  if OPTIONS.package_key is None:
-    OPTIONS.package_key = OPTIONS.info_dict.get(
-        "default_system_dev_certificate",
-        "build/target/product/security/testkey")
-
   # A/B updater expects a signing key in RSA format. Gets the key ready for
   # later use in step 3, unless a payload_signer has been specified.
   if OPTIONS.payload_signer is None:
     cmd = ["openssl", "pkcs8",
            "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-           "-inform", "DER", "-nocrypt"]
+           "-inform", "DER"]
+    pw = OPTIONS.key_passwords[OPTIONS.package_key]
+    cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
     rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
     cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
+    p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
     p1.communicate()
     assert p1.returncode == 0, "openssl pkcs8 failed"
 
@@ -1113,8 +1166,9 @@
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
-  if OPTIONS.info_dict.get("verity") == "true":
-    target_zip = zipfile.ZipFile(target_file, "r")
+  target_zip = zipfile.ZipFile(target_file, "r")
+  if (OPTIONS.info_dict.get("verity") == "true" or
+      OPTIONS.info_dict.get("avb_enable") == "true"):
     care_map_path = "META/care_map.txt"
     namelist = target_zip.namelist()
     if care_map_path in namelist:
@@ -1123,7 +1177,34 @@
           compress_type=zipfile.ZIP_STORED)
     else:
       print("Warning: cannot find care map file in target_file package")
-    common.ZipClose(target_zip)
+
+  if HasVendorPartition(target_zip):
+    update_vendor = True
+    update_system = True
+
+    # If incremental then figure out what is being updated so metadata only for
+    # the updated image is included.
+    if source_file is not None:
+      input_tmp, input_zip = common.UnzipTemp(
+          target_file, UNZIP_PATTERN)
+      source_tmp, source_zip = common.UnzipTemp(
+          source_file, UNZIP_PATTERN)
+
+      vendor_src = GetImage("vendor", source_tmp)
+      vendor_tgt = GetImage("vendor", input_tmp)
+      system_src = GetImage("system", source_tmp)
+      system_tgt = GetImage("system", input_tmp)
+
+      update_system = system_src.TotalSha1() != system_tgt.TotalSha1()
+      update_vendor = vendor_src.TotalSha1() != vendor_tgt.TotalSha1()
+
+      input_zip.close()
+      source_zip.close()
+
+    target_zip = zipfile.ZipFile(target_file, "r")
+    AddCompatibilityArchive(target_zip, output_zip, update_system,
+                            update_vendor)
+  common.ZipClose(target_zip)
 
   # Write the current metadata entry with placeholders.
   metadata['ota-streaming-property-files'] = ComputeStreamingMetadata(
@@ -1235,6 +1316,8 @@
       OPTIONS.payload_signer = a
     elif o == "--payload_signer_args":
       OPTIONS.payload_signer_args = shlex.split(a)
+    elif o == "--extracted_input_target_files":
+      OPTIONS.extracted_input = a
     else:
       return False
     return True
@@ -1265,6 +1348,7 @@
                                  "log_diff=",
                                  "payload_signer=",
                                  "payload_signer_args=",
+                                 "extracted_input_target_files=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1287,12 +1371,26 @@
 
   # Load the dict file from the zip directly to have a peek at the OTA type.
   # For packages using A/B update, unzipping is not needed.
-  input_zip = zipfile.ZipFile(args[0], "r")
-  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-  common.ZipClose(input_zip)
+  if OPTIONS.extracted_input is not None:
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input, OPTIONS.extracted_input)
+  else:
+    input_zip = zipfile.ZipFile(args[0], "r")
+    OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+    common.ZipClose(input_zip)
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
 
+  # Use the default key to sign the package if not specified with package_key.
+  # package_keys are needed on ab_updates, so always define them if an
+  # ab_update is getting created.
+  if not OPTIONS.no_signing or ab_update:
+    if OPTIONS.package_key is None:
+      OPTIONS.package_key = OPTIONS.info_dict.get(
+          "default_system_dev_certificate",
+          "build/target/product/security/testkey")
+    # Get signing keys
+    OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+
   if ab_update:
     if OPTIONS.incremental_source is not None:
       OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1319,12 +1417,18 @@
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
-  print("unzipping target target-files...")
-  OPTIONS.input_tmp, input_zip = common.UnzipTemp(
-      args[0], UNZIP_PATTERN)
+  if OPTIONS.extracted_input is not None:
+    OPTIONS.input_tmp = OPTIONS.extracted_input
+    OPTIONS.target_tmp = OPTIONS.input_tmp
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
+    input_zip = zipfile.ZipFile(args[0], "r")
+  else:
+    print("unzipping target target-files...")
+    OPTIONS.input_tmp, input_zip = common.UnzipTemp(
+        args[0], UNZIP_PATTERN)
 
-  OPTIONS.target_tmp = OPTIONS.input_tmp
-  OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
+    OPTIONS.target_tmp = OPTIONS.input_tmp
+    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
 
   if OPTIONS.verbose:
     print("--- target info ---")
@@ -1352,13 +1456,6 @@
     raise common.ExternalError(
         "--- target build has specified no recovery ---")
 
-  # Use the default key to sign the package if not specified with package_key.
-  if not OPTIONS.no_signing:
-    if OPTIONS.package_key is None:
-      OPTIONS.package_key = OPTIONS.info_dict.get(
-          "default_system_dev_certificate",
-          "build/target/product/security/testkey")
-
   # Set up the output zip. Create a temporary zip file if signing is needed.
   if OPTIONS.no_signing:
     if os.path.exists(args[1]):
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
new file mode 100755
index 0000000..331122b
--- /dev/null
+++ b/tools/releasetools/ota_package_parser.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import sys
+import traceback
+import zipfile
+
+from rangelib import RangeSet
+
+class Stash(object):
+  """Build a map to track stashed blocks during update simulation."""
+
+  def __init__(self):
+    self.blocks_stashed = 0
+    self.overlap_blocks_stashed = 0
+    self.max_stash_needed = 0
+    self.current_stash_size = 0
+    self.stash_map = {}
+
+  def StashBlocks(self, SHA1, blocks):
+    if SHA1 in self.stash_map:
+      logging.info("already stashed {}: {}".format(SHA1, blocks))
+      return
+    self.blocks_stashed += blocks.size()
+    self.current_stash_size += blocks.size()
+    self.max_stash_needed = max(self.current_stash_size, self.max_stash_needed)
+    self.stash_map[SHA1] = blocks
+
+  def FreeBlocks(self, SHA1):
+    assert self.stash_map.has_key(SHA1), "stash {} not found".format(SHA1)
+    self.current_stash_size -= self.stash_map[SHA1].size()
+    del self.stash_map[SHA1]
+
+  def HandleOverlapBlocks(self, SHA1, blocks):
+    self.StashBlocks(SHA1, blocks)
+    self.overlap_blocks_stashed += blocks.size()
+    self.FreeBlocks(SHA1)
+
+
+class OtaPackageParser(object):
+  """Parse a block-based OTA package."""
+
+  def __init__(self, package):
+    self.package = package
+    self.new_data_size = 0
+    self.patch_data_size = 0
+    self.block_written = 0
+    self.block_stashed = 0
+
+  @staticmethod
+  def GetSizeString(size):
+    assert size >= 0
+    base = 1024.0
+    if size <= base:
+      return "{} bytes".format(size)
+    for units in ['K', 'M', 'G']:
+      if size <= base * 1024 or units == 'G':
+        return "{:.1f}{}".format(size / base, units)
+      base *= 1024
+
+  def ParseTransferList(self, name):
+    """Simulate the transfer commands and calculate the amout of I/O."""
+
+    logging.info("\nSimulating commands in '{}':".format(name))
+    lines = self.package.read(name).strip().splitlines()
+    assert len(lines) >= 4, "{} is too short; Transfer list expects at least" \
+        "4 lines, it has {}".format(name, len(lines))
+    assert int(lines[0]) >= 3
+    logging.info("(version: {})".format(lines[0]))
+
+    blocks_written = 0
+    my_stash = Stash()
+    for line in lines[4:]:
+      cmd_list = line.strip().split(" ")
+      cmd_name = cmd_list[0]
+      try:
+        if cmd_name == "new" or cmd_name == "zero":
+          assert len(cmd_list) == 2, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[1])
+          blocks_written += target_range.size()
+        elif cmd_name == "move":
+          # Example:  move <onehash> <tgt_range> <src_blk_count> <src_range>
+          # [<loc_range> <stashed_blocks>]
+          assert len(cmd_list) >= 5, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[2])
+          blocks_written += target_range.size()
+          if cmd_list[4] == '-':
+            continue
+          SHA1 = cmd_list[1]
+          source_range = RangeSet.parse_raw(cmd_list[4])
+          if target_range.overlaps(source_range):
+            my_stash.HandleOverlapBlocks(SHA1, source_range)
+        elif cmd_name == "bsdiff" or cmd_name == "imgdiff":
+          # Example:  bsdiff <offset> <len> <src_hash> <tgt_hash> <tgt_range>
+          # <src_blk_count> <src_range> [<loc_range> <stashed_blocks>]
+          assert len(cmd_list) >= 8, "command format error: {}".format(line)
+          target_range = RangeSet.parse_raw(cmd_list[5])
+          blocks_written += target_range.size()
+          if cmd_list[7] == '-':
+            continue
+          source_SHA1 = cmd_list[3]
+          source_range = RangeSet.parse_raw(cmd_list[7])
+          if target_range.overlaps(source_range):
+            my_stash.HandleOverlapBlocks(source_SHA1, source_range)
+        elif cmd_name == "stash":
+          assert len(cmd_list) == 3, "command format error: {}".format(line)
+          SHA1 = cmd_list[1]
+          source_range = RangeSet.parse_raw(cmd_list[2])
+          my_stash.StashBlocks(SHA1, source_range)
+        elif cmd_name == "free":
+          assert len(cmd_list) == 2, "command format error: {}".format(line)
+          SHA1 = cmd_list[1]
+          my_stash.FreeBlocks(SHA1)
+      except:
+        logging.error("failed to parse command in: " + line)
+        raise
+
+    self.block_written += blocks_written
+    self.block_stashed += my_stash.blocks_stashed
+
+    logging.info("blocks written: {}  (expected: {})".format(
+        blocks_written, lines[1]))
+    logging.info("max blocks stashed simultaneously: {}  (expected: {})".
+        format(my_stash.max_stash_needed, lines[3]))
+    logging.info("total blocks stashed: {}".format(my_stash.blocks_stashed))
+    logging.info("blocks stashed implicitly: {}".format(
+        my_stash.overlap_blocks_stashed))
+
+  def PrintDataInfo(self, partition):
+    logging.info("\nReading data info for {} partition:".format(partition))
+    new_data = self.package.getinfo(partition + ".new.dat")
+    patch_data = self.package.getinfo(partition + ".patch.dat")
+    logging.info("{:<40}{:<40}".format(new_data.filename, patch_data.filename))
+    logging.info("{:<40}{:<40}".format(
+          "compress_type: " + str(new_data.compress_type),
+          "compress_type: " + str(patch_data.compress_type)))
+    logging.info("{:<40}{:<40}".format(
+          "compressed_size: " + OtaPackageParser.GetSizeString(
+              new_data.compress_size),
+          "compressed_size: " + OtaPackageParser.GetSizeString(
+              patch_data.compress_size)))
+    logging.info("{:<40}{:<40}".format(
+        "file_size: " + OtaPackageParser.GetSizeString(new_data.file_size),
+        "file_size: " + OtaPackageParser.GetSizeString(patch_data.file_size)))
+
+    self.new_data_size += new_data.file_size
+    self.patch_data_size += patch_data.file_size
+
+  def AnalyzePartition(self, partition):
+    assert partition in ("system", "vendor")
+    assert partition + ".new.dat" in self.package.namelist()
+    assert partition + ".patch.dat" in self.package.namelist()
+    assert partition + ".transfer.list" in self.package.namelist()
+
+    self.PrintDataInfo(partition)
+    self.ParseTransferList(partition + ".transfer.list")
+
+  def PrintMetadata(self):
+    metadata_path = "META-INF/com/android/metadata"
+    logging.info("\nMetadata info:")
+    metadata_info = {}
+    for line in self.package.read(metadata_path).strip().splitlines():
+      index = line.find("=")
+      metadata_info[line[0 : index].strip()] = line[index + 1:].strip()
+    assert metadata_info.get("ota-type") == "BLOCK"
+    assert "pre-device" in metadata_info
+    logging.info("device: {}".format(metadata_info["pre-device"]))
+    if "pre-build" in metadata_info:
+      logging.info("pre-build: {}".format(metadata_info["pre-build"]))
+    assert "post-build" in metadata_info
+    logging.info("post-build: {}".format(metadata_info["post-build"]))
+
+  def Analyze(self):
+    logging.info("Analyzing ota package: " + self.package.filename)
+    self.PrintMetadata()
+    assert "system.new.dat" in self.package.namelist()
+    self.AnalyzePartition("system")
+    if "vendor.new.dat" in self.package.namelist():
+      self.AnalyzePartition("vendor")
+
+    #TODO Add analysis of other partitions(e.g. bootloader, boot, radio)
+
+    BLOCK_SIZE = 4096
+    logging.info("\nOTA package analyzed:")
+    logging.info("new data size (uncompressed): " +
+        OtaPackageParser.GetSizeString(self.new_data_size))
+    logging.info("patch data size (uncompressed): " +
+        OtaPackageParser.GetSizeString(self.patch_data_size))
+    logging.info("total data written: " +
+        OtaPackageParser.GetSizeString(self.block_written * BLOCK_SIZE))
+    logging.info("total data stashed: " +
+        OtaPackageParser.GetSizeString(self.block_stashed * BLOCK_SIZE))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Analyze an OTA package.')
+  parser.add_argument("ota_package", help='Path of the OTA package.')
+  args = parser.parse_args(argv)
+
+  logging_format = '%(message)s'
+  logging.basicConfig(level=logging.INFO, format=logging_format)
+
+  try:
+    with zipfile.ZipFile(args.ota_package, 'r') as package:
+      package_parser = OtaPackageParser(package)
+      package_parser.Analyze()
+  except:
+    logging.error("Failed to read " + args.ota_package)
+    traceback.print_exc()
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 394ad0a..c661333 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -92,7 +92,6 @@
 import errno
 import os
 import re
-import shutil
 import subprocess
 import tempfile
 import zipfile
@@ -104,6 +103,7 @@
 
 OPTIONS.extra_apks = {}
 OPTIONS.key_map = {}
+OPTIONS.rebuild_recovery = False
 OPTIONS.replace_ota_keys = False
 OPTIONS.replace_verity_public_key = False
 OPTIONS.replace_verity_private_key = False
@@ -186,27 +186,8 @@
   maxsize = max([len(os.path.basename(i.filename))
                  for i in input_tf_zip.infolist()
                  if i.filename.endswith('.apk')])
-  rebuild_recovery = False
   system_root_image = misc_info.get("system_root_image") == "true"
 
-  # tmpdir will only be used to regenerate the recovery-from-boot patch.
-  tmpdir = tempfile.mkdtemp()
-  def write_to_temp(fn, attr, data):
-    fn = os.path.join(tmpdir, fn)
-    if fn.endswith("/"):
-      fn = os.path.join(tmpdir, fn)
-      os.mkdir(fn)
-    else:
-      d = os.path.dirname(fn)
-      if d and not os.path.exists(d):
-        os.makedirs(d)
-
-      if attr >> 16 == 0xa1ff:
-        os.symlink(data, fn)
-      else:
-        with open(fn, "wb") as f:
-          f.write(data)
-
   for info in input_tf_zip.infolist():
     if info.filename.startswith("IMAGES/"):
       continue
@@ -237,21 +218,17 @@
       print "rewriting %s:" % (info.filename,)
       new_data = RewriteProps(data, misc_info)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
-      if info.filename in ("BOOT/RAMDISK/default.prop",
-                           "ROOT/default.prop",
-                           "RECOVERY/RAMDISK/default.prop"):
-        write_to_temp(info.filename, info.external_attr, new_data)
 
     elif info.filename.endswith("mac_permissions.xml"):
       print "rewriting %s with new keys." % (info.filename,)
       new_data = ReplaceCerts(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
-    # Trigger a rebuild of the recovery patch if needed.
+    # Ask add_img_to_target_files to rebuild the recovery patch if needed.
     elif info.filename in ("SYSTEM/recovery-from-boot.p",
                            "SYSTEM/etc/recovery.img",
                            "SYSTEM/bin/install-recovery.sh"):
-      rebuild_recovery = True
+      OPTIONS.rebuild_recovery = True
 
     # Don't copy OTA keys if we're replacing them.
     elif (OPTIONS.replace_ota_keys and
@@ -263,9 +240,8 @@
               "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
       pass
 
-    # Skip META/misc_info.txt if we will replace the verity private key later.
-    elif (OPTIONS.replace_verity_private_key and
-          info.filename == "META/misc_info.txt"):
+    # Skip META/misc_info.txt since we will write back the new values later.
+    elif info.filename == "META/misc_info.txt":
       pass
 
     # Skip verity public key if we will replace it.
@@ -283,36 +259,16 @@
     elif info.filename == "META/care_map.txt":
       pass
 
-    # Copy BOOT/, RECOVERY/, META/, ROOT/ to rebuild recovery patch. This case
-    # must come AFTER other matching rules.
-    elif (info.filename.startswith("BOOT/") or
-          info.filename.startswith("RECOVERY/") or
-          info.filename.startswith("META/") or
-          info.filename.startswith("ROOT/") or
-          info.filename == "SYSTEM/etc/recovery-resource.dat"):
-      write_to_temp(info.filename, info.external_attr, data)
-      common.ZipWriteStr(output_tf_zip, out_info, data)
-
     # A non-APK file; copy it verbatim.
     else:
       common.ZipWriteStr(output_tf_zip, out_info, data)
 
   if OPTIONS.replace_ota_keys:
-    new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
-    if new_recovery_keys:
-      if system_root_image:
-        recovery_keys_location = "BOOT/RAMDISK/res/keys"
-      else:
-        recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
-      # The "new_recovery_keys" has been already written into the output_tf_zip
-      # while calling ReplaceOtaKeys(). We're just putting the same copy to
-      # tmpdir in case we need to regenerate the recovery-from-boot patch.
-      write_to_temp(recovery_keys_location, 0o755 << 16, new_recovery_keys)
+    ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
 
-  # Replace the keyid string in META/misc_info.txt.
+  # Replace the keyid string in misc_info dict.
   if OPTIONS.replace_verity_private_key:
-    ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info,
-                            OPTIONS.replace_verity_private_key[1])
+    ReplaceVerityPrivateKey(misc_info, OPTIONS.replace_verity_private_key[1])
 
   if OPTIONS.replace_verity_public_key:
     if system_root_image:
@@ -321,33 +277,16 @@
       dest = "BOOT/RAMDISK/verity_key"
     # We are replacing the one in boot image only, since the one under
     # recovery won't ever be needed.
-    new_data = ReplaceVerityPublicKey(
+    ReplaceVerityPublicKey(
         output_tf_zip, dest, OPTIONS.replace_verity_public_key[1])
-    write_to_temp(dest, 0o755 << 16, new_data)
 
   # Replace the keyid string in BOOT/cmdline.
   if OPTIONS.replace_verity_keyid:
-    new_cmdline = ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
-      OPTIONS.replace_verity_keyid[1])
-    # Writing the new cmdline to tmpdir is redundant as the bootimage
-    # gets build in the add_image_to_target_files and rebuild_recovery
-    # is not exercised while building the boot image for the A/B
-    # path
-    write_to_temp("BOOT/cmdline", 0o755 << 16, new_cmdline)
+    ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
+                       OPTIONS.replace_verity_keyid[1])
 
-  if rebuild_recovery:
-    recovery_img = common.GetBootableImage(
-        "recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
-    boot_img = common.GetBootableImage(
-        "boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
-
-    def output_sink(fn, data):
-      common.ZipWriteStr(output_tf_zip, "SYSTEM/" + fn, data)
-
-    common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
-                             info_dict=misc_info)
-
-  shutil.rmtree(tmpdir)
+  # Write back misc_info with the latest values.
+  ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
 
 
 def ReplaceCerts(data):
@@ -526,20 +465,12 @@
 
 
 def ReplaceVerityPublicKey(targetfile_zip, filename, key_path):
-  print "Replacing verity public key with %s" % key_path
-  with open(key_path) as f:
-    data = f.read()
-  common.ZipWriteStr(targetfile_zip, filename, data)
-  return data
+  print "Replacing verity public key with %s" % (key_path,)
+  common.ZipWrite(targetfile_zip, key_path, arcname=filename)
 
 
-def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip,
-                            misc_info, key_path):
-  print "Replacing verity private key with %s" % key_path
-  current_key = misc_info["verity_key"]
-  original_misc_info = targetfile_input_zip.read("META/misc_info.txt")
-  new_misc_info = original_misc_info.replace(current_key, key_path)
-  common.ZipWriteStr(targetfile_output_zip, "META/misc_info.txt", new_misc_info)
+def ReplaceVerityPrivateKey(misc_info, key_path):
+  print "Replacing verity private key with %s" % (key_path,)
   misc_info["verity_key"] = key_path
 
 
@@ -568,7 +499,21 @@
   out_cmdline = out_cmdline.strip()
   print "out_cmdline %s" % (out_cmdline)
   common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", out_cmdline)
-  return out_cmdline
+
+
+def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info):
+  """Replaces META/misc_info.txt.
+
+  Only writes back the ones in the original META/misc_info.txt. Because the
+  current in-memory dict contains additional items computed at runtime.
+  """
+  misc_info_old = common.LoadDictionaryFromLines(
+      input_zip.read('META/misc_info.txt').split('\n'))
+  items = []
+  for key in sorted(misc_info):
+    if key in misc_info_old:
+      items.append('%s=%s' % (key, misc_info[key]))
+  common.ZipWriteStr(output_zip, "META/misc_info.txt", '\n'.join(items))
 
 
 def BuildKeyMap(misc_info, key_mapping_options):
@@ -689,7 +634,9 @@
     sys.exit(1)
 
   input_zip = zipfile.ZipFile(args[0], "r")
-  output_zip = zipfile.ZipFile(args[1], "w")
+  output_zip = zipfile.ZipFile(args[1], "w",
+                               compression=zipfile.ZIP_DEFLATED,
+                               allowZip64=True)
 
   misc_info = common.LoadInfoDict(input_zip)
 
@@ -711,7 +658,12 @@
   common.ZipClose(output_zip)
 
   # Skip building userdata.img and cache.img when signing the target files.
-  new_args = ["--is_signing", args[1]]
+  new_args = ["--is_signing"]
+  # add_img_to_target_files builds the system image from scratch, so the
+  # recovery patch is guaranteed to be regenerated there.
+  if OPTIONS.rebuild_recovery:
+    new_args.append("--rebuild_recovery")
+  new_args.append(args[1])
   add_img_to_target_files.main(new_args)
 
   print "done."
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index cc1fa23..e5a3694 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -41,14 +41,14 @@
     block_image_diff = BlockImageDiff(tgt, src)
 
     transfers = block_image_diff.transfers
-    t0 = Transfer(
-        "t1", "t1", RangeSet("10-15"), RangeSet("0-5"), "move", transfers)
-    t1 = Transfer(
-        "t2", "t2", RangeSet("20-25"), RangeSet("0-7"), "move", transfers)
-    t2 = Transfer(
-        "t3", "t3", RangeSet("30-35"), RangeSet("0-4"), "move", transfers)
-    t3 = Transfer(
-        "t4", "t4", RangeSet("0-10"), RangeSet("40-50"), "move", transfers)
+    t0 = Transfer("t1", "t1", RangeSet("10-15"), RangeSet("0-5"), "t1hash",
+                  "t1hash", "move", transfers)
+    t1 = Transfer("t2", "t2", RangeSet("20-25"), RangeSet("0-7"), "t2hash",
+                  "t2hash", "move", transfers)
+    t2 = Transfer("t3", "t3", RangeSet("30-35"), RangeSet("0-4"), "t3hash",
+                  "t3hash", "move", transfers)
+    t3 = Transfer("t4", "t4", RangeSet("0-10"), RangeSet("40-50"), "t4hash",
+                  "t4hash", "move", transfers)
 
     block_image_diff.GenerateDigraph()
     t3_goes_after_copy = t3.goes_after.copy()
@@ -87,10 +87,10 @@
     block_image_diff = BlockImageDiff(tgt, src, version=3)
 
     transfers = block_image_diff.transfers
-    Transfer("t1", "t1", RangeSet("11-15"), RangeSet("20-29"), "diff",
-             transfers)
-    Transfer("t2", "t2", RangeSet("20-29"), RangeSet("11-15"), "diff",
-             transfers)
+    Transfer("t1", "t1", RangeSet("11-15"), RangeSet("20-29"), "t1hash",
+             "t1hash", "diff", transfers)
+    Transfer("t2", "t2", RangeSet("20-29"), RangeSet("11-15"), "t2hash",
+             "t2hash", "diff", transfers)
 
     block_image_diff.GenerateDigraph()
     block_image_diff.FindVertexSequence()
@@ -121,12 +121,12 @@
     block_image_diff = BlockImageDiff(tgt, src, version=3)
 
     transfers = block_image_diff.transfers
-    t1 = Transfer("t1", "t1", RangeSet("11-15"), RangeSet("1-5"), "diff",
-                  transfers)
-    t2 = Transfer("t2", "t2", RangeSet("21-25"), RangeSet("11-15"), "diff",
-                  transfers)
+    t1 = Transfer("t1", "t1", RangeSet("11-15"), RangeSet("1-5"), "t1hash",
+                  "t1hash", "diff", transfers)
+    t2 = Transfer("t2", "t2", RangeSet("21-25"), RangeSet("11-15"), "t2hash",
+                  "t2hash", "diff", transfers)
     t3 = Transfer("t3", "t3", RangeSet("1-5 30-39"), RangeSet("11-15 30-39"),
-                  "diff", transfers)
+                  "t3hash", "t3hash", "diff", transfers)
 
     block_image_diff.GenerateDigraph()
 
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
new file mode 100755
index 0000000..1dd3159
--- /dev/null
+++ b/tools/releasetools/validate_target_files.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Validate a given (signed) target_files.zip.
+
+It performs checks to ensure the integrity of the input zip.
+ - It verifies the file consistency between the ones in IMAGES/system.img (read
+   via IMAGES/system.map) and the ones under unpacked folder of SYSTEM/. The
+   same check also applies to the vendor image if present.
+"""
+
+import common
+import logging
+import os.path
+import sparse_img
+import sys
+
+
+def _GetImage(which, tmpdir):
+  assert which in ('system', 'vendor')
+
+  path = os.path.join(tmpdir, 'IMAGES', which + '.img')
+  mappath = os.path.join(tmpdir, 'IMAGES', which + '.map')
+
+  # Map file must exist (allowed to be empty).
+  assert os.path.exists(path) and os.path.exists(mappath)
+
+  clobbered_blocks = '0'
+  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
+
+
+def ValidateFileConsistency(input_zip, input_tmp):
+  """Compare the files from image files and unpacked folders."""
+
+  def RoundUpTo4K(value):
+    rounded_up = value + 4095
+    return rounded_up - (rounded_up % 4096)
+
+  def CheckAllFiles(which):
+    logging.info('Checking %s image.', which)
+    image = _GetImage(which, input_tmp)
+    prefix = '/' + which
+    for entry in image.file_map:
+      if not entry.startswith(prefix):
+        continue
+
+      # Read the blocks that the file resides. Note that it will contain the
+      # bytes past the file length, which is expected to be padded with '\0's.
+      ranges = image.file_map[entry]
+      blocks_sha1 = image.RangeSha1(ranges)
+
+      # The filename under unpacked directory, such as SYSTEM/bin/sh.
+      unpacked_name = os.path.join(
+          input_tmp, which.upper(), entry[(len(prefix) + 1):])
+      with open(unpacked_name) as f:
+        file_data = f.read()
+      file_size = len(file_data)
+      file_size_rounded_up = RoundUpTo4K(file_size)
+      file_data += '\0' * (file_size_rounded_up - file_size)
+      file_sha1 = common.File(entry, file_data).sha1
+
+      assert blocks_sha1 == file_sha1, \
+          'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
+              entry, ranges, blocks_sha1, file_sha1)
+
+  logging.info('Validating file consistency.')
+
+  # Verify IMAGES/system.img.
+  CheckAllFiles('system')
+
+  # Verify IMAGES/vendor.img if applicable.
+  if 'VENDOR/' in input_zip.namelist():
+    CheckAllFiles('vendor')
+
+  # Not checking IMAGES/system_other.img since it doesn't have the map file.
+
+
+def main(argv):
+  def option_handler():
+    return True
+
+  args = common.ParseOptions(
+      argv, __doc__, extra_opts="",
+      extra_long_opts=[],
+      extra_option_handler=option_handler)
+
+  if len(args) != 1:
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
+  date_format = '%Y/%m/%d %H:%M:%S'
+  logging.basicConfig(level=logging.INFO, format=logging_format,
+                      datefmt=date_format)
+
+  logging.info("Unzipping the input target_files.zip: %s", args[0])
+  input_tmp, input_zip = common.UnzipTemp(args[0])
+
+  ValidateFileConsistency(input_zip, input_tmp)
+
+  # TODO: Check if the OTA keys have been properly updated (the ones on /system,
+  # in recovery image).
+
+  # TODO(b/35411009): Verify the contents in /system/bin/install-recovery.sh.
+
+  logging.info("Done.")
+
+
+if __name__ == '__main__':
+  try:
+    main(sys.argv[1:])
+  finally:
+    common.Cleanup()
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
index 4506e2f..051a51d 100644
--- a/tools/signapk/Android.mk
+++ b/tools/signapk/Android.mk
@@ -30,7 +30,6 @@
 include $(BUILD_HOST_JAVA_LIBRARY)
 
 ifeq ($(TARGET_BUILD_APPS),)
-ifeq ($(BRILLO),)
 # The post-build signing tools need signapk.jar and its shared libraries,
 # but we don't need this if we're just doing unbundled apps.
 my_dist_files := $(LOCAL_INSTALLED_MODULE) \
@@ -39,4 +38,3 @@
 $(call dist-for-goals,droidcore,$(my_dist_files))
 my_dist_files :=
 endif
-endif
diff --git a/tools/warn.py b/tools/warn.py
index 5be6d9d..44ad368 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -73,14 +73,9 @@
 # New dynamic HTML related function to emit data:
 #   escape_string, strip_escape_string, emit_warning_arrays
 #   emit_js_data():
-#
-# To emit csv files of warning message counts:
-#   flag --gencsv
-#   description_for_csv, string_for_csv:
-#   count_severity(sev, kind):
-#   dump_csv():
 
 import argparse
+import csv
 import multiprocessing
 import os
 import re
@@ -88,6 +83,9 @@
 import sys
 
 parser = argparse.ArgumentParser(description='Convert a build log into HTML')
+parser.add_argument('--csvpath',
+                    help='Save CSV warning file to the passed absolute path',
+                    default=None)
 parser.add_argument('--gencsv',
                     help='Generate a CSV file with number of various warnings',
                     action='store_true',
@@ -140,6 +138,24 @@
   column_headers = [a[1] for a in attributes]
   headers = [a[2] for a in attributes]
 
+
+def tidy_warn_pattern(description, pattern):
+  return {
+      'category': 'C/C++',
+      'severity': Severity.TIDY,
+      'description': 'clang-tidy ' + description,
+      'patterns': [r'.*: .+\[' + pattern + r'\]$']
+  }
+
+
+def simple_tidy_warn_pattern(description):
+  return tidy_warn_pattern(description, description)
+
+
+def group_tidy_warn_pattern(description):
+  return tidy_warn_pattern(description, description + r'-.+')
+
+
 warn_patterns = [
     # pylint:disable=line-too-long,g-inconsistent-quotes
     {'category': 'C/C++', 'severity': Severity.ANALYZER,
@@ -225,6 +241,7 @@
      'description': 'Unused function, variable or label',
      'patterns': [r".*: warning: '.+' defined but not used",
                   r".*: warning: unused function '.+'",
+                  r".*: warning: lambda capture .* is not used",
                   r".*: warning: private field '.+' is not used",
                   r".*: warning: unused variable '.+'"]},
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wunused-value',
@@ -259,6 +276,9 @@
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wformat-extra-args',
      'description': 'Too many arguments for format string',
      'patterns': [r".*: warning: too many arguments for format"]},
+    {'category': 'C/C++', 'severity': Severity.MEDIUM,
+     'description': 'Too many arguments in call',
+     'patterns': [r".*: warning: too many arguments in call to "]},
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wformat-invalid-specifier',
      'description': 'Invalid format specifier',
      'patterns': [r".*: warning: invalid .+ specifier '.+'.+format-invalid-specifier"]},
@@ -438,6 +458,9 @@
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wswitch-enum',
      'description': 'Enum value not handled in switch',
      'patterns': [r".*: warning: .*enumeration value.* not handled in switch.+Wswitch"]},
+    {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wuser-defined-warnings',
+     'description': 'User defined warnings',
+     'patterns': [r".*: warning: .* \[-Wuser-defined-warnings\]$"]},
     {'category': 'java', 'severity': Severity.MEDIUM, 'option': '-encoding',
      'description': 'Java: Non-ascii characters used, but ascii encoding specified',
      'patterns': [r".*: warning: unmappable character for encoding ascii"]},
@@ -453,6 +476,9 @@
     {'category': 'java', 'severity': Severity.MEDIUM,
      'description': '_ used as an identifier',
      'patterns': [r".*: warning: '_' used as an identifier"]},
+    {'category': 'java', 'severity': Severity.HIGH,
+     'description': 'Use of internal proprietary API',
+     'patterns': [r".*: warning: .* is internal proprietary API and may be removed"]},
 
     # Warnings from Javac
     {'category': 'java',
@@ -1384,6 +1410,9 @@
      'description': 'Taking address of temporary',
      'patterns': [r".*: warning: taking address of temporary"]},
     {'category': 'C/C++', 'severity': Severity.MEDIUM,
+     'description': 'Taking address of packed member',
+     'patterns': [r".*: warning: taking address of packed member"]},
+    {'category': 'C/C++', 'severity': Severity.MEDIUM,
      'description': 'Possible broken line continuation',
      'patterns': [r".*: warning: backslash and newline separated by space"]},
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wundefined-var-template',
@@ -1527,6 +1556,9 @@
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wconversion-null',
      'description': 'Converting to non-pointer type from NULL',
      'patterns': [r".*: warning: converting to non-pointer type '.+' from NULL"]},
+    {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wsign-conversion',
+     'description': 'Implicit sign conversion',
+     'patterns': [r".*: warning: implicit conversion changes signedness"]},
     {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wnull-conversion',
      'description': 'Converting NULL to non-pointer type',
      'patterns': [r".*: warning: implicit conversion of NULL constant to '.+'"]},
@@ -1656,6 +1688,9 @@
     {'category': 'C/C++', 'severity': Severity.LOW, 'option': '-Winvalid-pp-token',
      'description': 'Invalid pp token',
      'patterns': [r".*: warning: missing .+Winvalid-pp-token"]},
+    {'category': 'link', 'severity': Severity.LOW,
+     'description': 'need glibc to link',
+     'patterns': [r".*: warning: .* requires at runtime .* glibc .* for linking"]},
 
     {'category': 'C/C++', 'severity': Severity.MEDIUM,
      'description': 'Operator new returns NULL',
@@ -1750,6 +1785,15 @@
      'description': 'Mismatched class vs struct tags',
      'patterns': [r".*: warning: '.+' defined as a .+ here but previously declared as a .+mismatched-tags",
                   r".*: warning: .+ was previously declared as a .+mismatched-tags"]},
+    {'category': 'FindEmulator', 'severity': Severity.HARMLESS,
+     'description': 'FindEmulator: No such file or directory',
+     'patterns': [r".*: warning: FindEmulator: .* No such file or directory"]},
+    {'category': 'google_tests', 'severity': Severity.HARMLESS,
+     'description': 'google_tests: unknown installed file',
+     'patterns': [r".*: warning: .*_tests: Unknown installed file for module"]},
+    {'category': 'make', 'severity': Severity.HARMLESS,
+     'description': 'unusual tags debug eng',
+     'patterns': [r".*: warning: .*: unusual tags debug eng"]},
 
     # these next ones are to deal with formatting problems resulting from the log being mixed up by 'make -j'
     {'category': 'C/C++', 'severity': Severity.SKIP,
@@ -1763,60 +1807,46 @@
      'patterns': [r".*: warning: In file included from .+,"]},
 
     # warnings from clang-tidy
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy readability',
-     'patterns': [r".*: .+\[readability-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy c++ core guidelines',
-     'patterns': [r".*: .+\[cppcoreguidelines-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-default-arguments',
-     'patterns': [r".*: .+\[google-default-arguments\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-runtime-int',
-     'patterns': [r".*: .+\[google-runtime-int\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-runtime-operator',
-     'patterns': [r".*: .+\[google-runtime-operator\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-runtime-references',
-     'patterns': [r".*: .+\[google-runtime-references\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-build',
-     'patterns': [r".*: .+\[google-build-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-explicit',
-     'patterns': [r".*: .+\[google-explicit-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-readability',
-     'patterns': [r".*: .+\[google-readability-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google-global',
-     'patterns': [r".*: .+\[google-global-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy google- other',
-     'patterns': [r".*: .+\[google-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy modernize',
-     'patterns': [r".*: .+\[modernize-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy misc',
-     'patterns': [r".*: .+\[misc-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy performance-faster-string-find',
-     'patterns': [r".*: .+\[performance-faster-string-find\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy performance-for-range-copy',
-     'patterns': [r".*: .+\[performance-for-range-copy\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy performance-implicit-cast-in-loop',
-     'patterns': [r".*: .+\[performance-implicit-cast-in-loop\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy performance-unnecessary-copy-initialization',
-     'patterns': [r".*: .+\[performance-unnecessary-copy-initialization\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy performance-unnecessary-value-param',
-     'patterns': [r".*: .+\[performance-unnecessary-value-param\]$"]},
+    group_tidy_warn_pattern('cert'),
+    group_tidy_warn_pattern('clang-diagnostic'),
+    group_tidy_warn_pattern('cppcoreguidelines'),
+    group_tidy_warn_pattern('llvm'),
+    simple_tidy_warn_pattern('google-default-arguments'),
+    simple_tidy_warn_pattern('google-runtime-int'),
+    simple_tidy_warn_pattern('google-runtime-operator'),
+    simple_tidy_warn_pattern('google-runtime-references'),
+    group_tidy_warn_pattern('google-build'),
+    group_tidy_warn_pattern('google-explicit'),
+    group_tidy_warn_pattern('google-redability'),
+    group_tidy_warn_pattern('google-global'),
+    group_tidy_warn_pattern('google-redability'),
+    group_tidy_warn_pattern('google-redability'),
+    group_tidy_warn_pattern('google'),
+    simple_tidy_warn_pattern('hicpp-explicit-conversions'),
+    simple_tidy_warn_pattern('hicpp-function-size'),
+    simple_tidy_warn_pattern('hicpp-invalid-access-moved'),
+    simple_tidy_warn_pattern('hicpp-member-init'),
+    simple_tidy_warn_pattern('hicpp-delete-operators'),
+    simple_tidy_warn_pattern('hicpp-special-member-functions'),
+    simple_tidy_warn_pattern('hicpp-use-equals-default'),
+    simple_tidy_warn_pattern('hicpp-use-equals-delete'),
+    simple_tidy_warn_pattern('hicpp-no-assembler'),
+    simple_tidy_warn_pattern('hicpp-noexcept-move'),
+    simple_tidy_warn_pattern('hicpp-use-override'),
+    group_tidy_warn_pattern('hicpp'),
+    group_tidy_warn_pattern('modernize'),
+    group_tidy_warn_pattern('misc'),
+    simple_tidy_warn_pattern('performance-faster-string-find'),
+    simple_tidy_warn_pattern('performance-for-range-copy'),
+    simple_tidy_warn_pattern('performance-implicit-cast-in-loop'),
+    simple_tidy_warn_pattern('performance-inefficient-string-concatenation'),
+    simple_tidy_warn_pattern('performance-type-promotion-in-math-fn'),
+    simple_tidy_warn_pattern('performance-unnecessary-copy-initialization'),
+    simple_tidy_warn_pattern('performance-unnecessary-value-param'),
+    group_tidy_warn_pattern('performance'),
+    group_tidy_warn_pattern('readability'),
+
+    # warnings from clang-tidy's clang-analyzer checks
     {'category': 'C/C++', 'severity': Severity.ANALYZER,
      'description': 'clang-analyzer Unreachable code',
      'patterns': [r".*: warning: This statement is never executed.*UnreachableCode"]},
@@ -1857,18 +1887,12 @@
      'description': 'clang-analyzer call path problems',
      'patterns': [r".*: warning: Call Path : .+"]},
     {'category': 'C/C++', 'severity': Severity.ANALYZER,
+     'description': 'clang-analyzer excessive padding',
+     'patterns': [r".*: warning: Excessive padding in '.*'"]},
+    {'category': 'C/C++', 'severity': Severity.ANALYZER,
      'description': 'clang-analyzer other',
      'patterns': [r".*: .+\[clang-analyzer-.+\]$",
                   r".*: Call Path : .+$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy CERT',
-     'patterns': [r".*: .+\[cert-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-tidy llvm',
-     'patterns': [r".*: .+\[llvm-.+\]$"]},
-    {'category': 'C/C++', 'severity': Severity.TIDY,
-     'description': 'clang-diagnostic',
-     'patterns': [r".*: .+\[clang-diagnostic-.+\]$"]},
 
     # catch-all for warnings this script doesn't know about yet
     {'category': 'C/C++', 'severity': Severity.UNKNOWN,
@@ -2387,7 +2411,8 @@
 
 
 def parse_input_file(infile):
-  """Parse input file, match warning lines."""
+  """Parse input file, collect parameters and warning lines."""
+  global android_root
   global platform_version
   global target_product
   global target_variant
@@ -2402,7 +2427,7 @@
     if warning_pattern.match(line):
       line = normalize_warning_line(line)
       warning_lines.add(line)
-    elif line_counter < 50:
+    elif line_counter < 100:
       # save a little bit of time by only doing this for the first few lines
       line_counter += 1
       m = re.search('(?<=^PLATFORM_VERSION=).*', line)
@@ -2414,6 +2439,9 @@
       m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
       if m is not None:
         target_variant = m.group(0)
+      m = re.search('.* TOP=([^ ]*) .*', line)
+      if m is not None:
+        android_root = m.group(1)
   return warning_lines
 
 
@@ -2465,10 +2493,11 @@
     if (FlagURL == "") return line;
     if (FlagSeparator == "") {
       return line.replace(ParseLinePattern,
-        "<a href='" + FlagURL + "/$1'>$1</a>:$2:$3");
+        "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
     }
     return line.replace(ParseLinePattern,
-      "<a href='" + FlagURL + "/$1" + FlagSeparator + "$2'>$1:$2</a>:$3");
+      "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
+        "$2'>$1:$2</a>:$3");
   }
   function createArrayOfDictionaries(n) {
     var result = [];
@@ -2672,48 +2701,46 @@
   return category['description']
 
 
-def string_for_csv(s):
-  # Only some Java warning desciptions have used quotation marks.
-  # TODO(chh): if s has double quote character, s should be quoted.
-  if ',' in s:
-    # TODO(chh): replace a double quote with two double quotes in s.
-    return '"{}"'.format(s)
-  return s
-
-
-def count_severity(sev, kind):
+def count_severity(writer, sev, kind):
   """Count warnings of given severity."""
   total = 0
   for i in warn_patterns:
     if i['severity'] == sev and i['members']:
       n = len(i['members'])
       total += n
-      warning = string_for_csv(kind + ': ' + description_for_csv(i))
-      print '{},,{}'.format(n, warning)
+      warning = kind + ': ' + description_for_csv(i)
+      writer.writerow([n, '', warning])
       # print number of warnings for each project, ordered by project name.
       projects = i['projects'].keys()
       projects.sort()
       for p in projects:
-        print '{},{},{}'.format(i['projects'][p], p, warning)
-  print '{},,{}'.format(total, kind + ' warnings')
+        writer.writerow([i['projects'][p], p, warning])
+  writer.writerow([total, '', kind + ' warnings'])
+
   return total
 
 
 # dump number of warnings in csv format to stdout
-def dump_csv():
+def dump_csv(writer):
   """Dump number of warnings in csv format to stdout."""
   sort_warnings()
   total = 0
   for s in Severity.range:
-    total += count_severity(s, Severity.column_headers[s])
-  print '{},,{}'.format(total, 'All warnings')
+    total += count_severity(writer, s, Severity.column_headers[s])
+  writer.writerow([total, '', 'All warnings'])
 
 
 def main():
   warning_lines = parse_input_file(open(args.buildlog, 'r'))
   parallel_classify_warnings(warning_lines)
+  # If a user pases a csv path, save the fileoutput to the path
+  # If the user also passed gencsv write the output to stdout
+  # If the user did not pass gencsv flag dump the html report to stdout.
+  if args.csvpath:
+    with open(args.csvpath, 'w') as f:
+      dump_csv(csv.writer(f, lineterminator='\n'))
   if args.gencsv:
-    dump_csv()
+    dump_csv(csv.writer(sys.stdout, lineterminator='\n'))
   else:
     dump_html()