Merge "Includes odm SELinux contexts files for recovery"
diff --git a/Changes.md b/Changes.md
index 05f54b8..37bbad0 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,6 +1,6 @@
 # Build System Changes for Android.mk Writers
 
-## Deprecating envsetup.sh variables in Makefiles
+## Deprecating / obsoleting envsetup.sh variables in Makefiles
 
 It is not required to source envsetup.sh before running a build. Many scripts,
 including a majority of our automated build systems, do not do so. Make will
@@ -14,8 +14,9 @@
 
 To fix this, we're marking the variables that are set in envsetup.sh as
 deprecated in the makefiles. This will trigger a warning every time one is read
-(or written) inside Kati. Once all the warnings have been removed, we'll switch
-this to obsolete, and any references will become errors.
+(or written) inside Kati. Once all the warnings have been removed for a
+particular variable, we'll switch it to obsolete, and any references will become
+errors.
 
 ### envsetup.sh variables with make equivalents
 
@@ -85,6 +86,16 @@
 $(TARGET): myscript.py $(sort $(shell find my/python/lib -name '*.py'))
 	PYTHONPATH=my/python/lib:$$PYTHONPATH myscript.py -o $@
 ```
+### Stop using PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE directly {#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE}
+
+Specify Framework Compatibility Matrix Version in device manifest by adding a `target-level`
+attribute to the root element `<manifest>`. If `PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE`
+is 26 or 27, you can add `"target-level"="1"` to your device manifest instead.
+
+### Stop using USE_CLANG_PLATFORM_BUILD {#USE_CLANG_PLATFORM_BUILD}
+
+Clang is the default and only supported Android compiler, so there is no reason
+for this option to exist.
 
 ### Other envsetup.sh variables  {#other_envsetup_variables}
 
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 3ba1f45..5ab64b3 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -444,6 +444,25 @@
 
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*/flat-res)
 
+# Remove old VNDK directories without version
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/vndk)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/vndk-sp)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib64/vndk)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib64/vndk-sp)
+
+# Remove old dex output directories
+$(call add-clean-step, rm -rf $(TARGET_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/with-local/)
+$(call add-clean-step, rm -rf $(TARGET_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/no-local/)
+$(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/with-local/)
+$(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/no-local/)
+
+# Remove legacy VINTF metadata files
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/compatibility_matrix.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/compatibility_matrix.xml)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/OWNERS b/OWNERS
index 89b446a..7a59f70 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,3 @@
 ccross@android.com
 dwillemsen@google.com
+nanzhang@google.com
diff --git a/core/Makefile b/core/Makefile
index 2834825..9f164d8 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4,15 +4,6 @@
 # intermedites-dir-for
 LOCAL_PATH := $(BUILD_SYSTEM)
 
-# Pick a reasonable string to use to identify files.
-ifneq (,$(filter eng.%,$(BUILD_NUMBER)))
-  # BUILD_NUMBER has a timestamp in it, which means that
-  # it will change every time.  Pick a stable value.
-  FILE_NAME_TAG := eng.$(USER)
-else
-  FILE_NAME_TAG := $(BUILD_NUMBER)
-endif
-
 # -----------------------------------------------------------------
 # Define rules to copy PRODUCT_COPY_FILES defined by the product.
 # PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
@@ -123,9 +114,30 @@
 endif
 
 # -----------------------------------------------------------------
+# FINAL_VENDOR_DEFAULT_PROPERTIES will be installed in vendor/default.prop if
+# property_overrides_split_enabled is true. Otherwise it will be installed in
+# ROOT/default.prop.
+ifdef BOARD_VNDK_VERSION
+  ifeq ($(BOARD_VNDK_VERSION),current)
+    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(PLATFORM_VNDK_VERSION)
+  else
+    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(BOARD_VNDK_VERSION)
+  endif
+else
+  FINAL_VENDOR_DEFAULT_PROPERTIES :=
+endif
+FINAL_VENDOR_DEFAULT_PROPERTIES += \
+    $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
+FINAL_VENDOR_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
+    $(FINAL_VENDOR_DEFAULT_PROPERTIES),=)
+
+# -----------------------------------------------------------------
 # prop.default
 ifdef property_overrides_split_enabled
 INSTALLED_DEFAULT_PROP_TARGET := $(TARGET_OUT)/etc/prop.default
+INSTALLED_DEFAULT_PROP_OLD_TARGET := $(TARGET_ROOT_OUT)/default.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_DEFAULT_PROP_OLD_TARGET)
+$(INSTALLED_DEFAULT_PROP_OLD_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET)
 else
 # legacy path
 INSTALLED_DEFAULT_PROP_TARGET := $(TARGET_ROOT_OUT)/default.prop
@@ -136,7 +148,7 @@
     $(call collapse-pairs, $(PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
 ifndef property_overrides_split_enabled
   FINAL_DEFAULT_PROPERTIES += \
-      $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
+      $(call collapse-pairs, $(FINAL_VENDOR_DEFAULT_PROPERTIES))
 endif
 FINAL_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
     $(FINAL_DEFAULT_PROPERTIES),=)
@@ -158,10 +170,10 @@
 	$(hide) echo ro.bootimage.build.date=`$(DATE_FROM_FILE)`>>$@
 	$(hide) echo ro.bootimage.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
 	$(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
-	$(hide) build/tools/post_process_props.py $@
+	$(hide) build/make/tools/post_process_props.py $@
 ifdef property_overrides_split_enabled
 	$(hide) mkdir -p $(TARGET_ROOT_OUT)
-	$(hide) ln -sf system/etc/prop.default $(TARGET_ROOT_OUT)/default.prop
+	$(hide) ln -sf system/etc/prop.default $(INSTALLED_DEFAULT_PROP_OLD_TARGET)
 endif
 
 # -----------------------------------------------------------------
@@ -171,16 +183,6 @@
 INSTALLED_VENDOR_DEFAULT_PROP_TARGET := $(TARGET_OUT_VENDOR)/default.prop
 ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET)
 
-ifdef BOARD_VNDK_VERSION
-FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=${BOARD_VNDK_VERSION}
-else
-FINAL_VENDOR_DEFAULT_PROPERTIES :=
-endif
-FINAL_VENDOR_DEFAULT_PROPERTIES += \
-    $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
-FINAL_VENDOR_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
-    $(FINAL_VENDOR_DEFAULT_PROPERTIES),=)
-
 $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
@@ -189,7 +191,7 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_VENDOR_DEFAULT_PROPERTIES), \
 		echo "$(line)" >> $@;)
-	$(hide) build/tools/post_process_props.py $@
+	$(hide) build/make/tools/post_process_props.py $@
 
 endif  # property_overrides_split_enabled
 
@@ -227,28 +229,37 @@
 
 # The string used to uniquely identify the combined build and product; used by the OTA server.
 ifeq (,$(strip $(BUILD_FINGERPRINT)))
-  ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
-    BF_BUILD_NUMBER := $(USER)$(shell $(DATE) +%m%d%H%M)
+  ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+    BF_BUILD_NUMBER := $(USER)$$($(DATE_FROM_FILE) +%m%d%H%M)
   else
-    BF_BUILD_NUMBER := $(BUILD_NUMBER)
+    BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
   endif
   BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
-ifneq ($(words $(BUILD_FINGERPRINT)),1)
-  $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
-endif
+# unset it for safety.
+BF_BUILD_NUMBER :=
 
-$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) > $(PRODUCT_OUT)/build_fingerprint.txt)
-BUILD_FINGERPRINT_FROM_FILE := $$(cat $(PRODUCT_OUT)/build_fingerprint.txt)
+BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE)))
+  $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))")
+endif
+BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE))
+# unset it for safety.
+BUILD_FINGERPRINT :=
 
 # The string used to uniquely identify the system build; used by the OTA server.
 # This purposefully excludes any product-specific variables.
 ifeq (,$(strip $(BUILD_THUMBPRINT)))
-  BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+  BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
-ifneq ($(words $(BUILD_THUMBPRINT)),1)
-  $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+
+BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE)))
+  $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))")
 endif
+BUILD_THUMBPRINT_FROM_FILE := $$(cat $(BUILD_THUMBPRINT_FILE))
+# unset it for safety.
+BUILD_THUMBPRINT :=
 
 KNOWN_OEM_THUMBPRINT_PROPERTIES := \
     ro.product.brand \
@@ -281,8 +292,8 @@
 $(strip $(subst _,-, $(firstword $(1))))
 endef
 
-BUILDINFO_SH := build/tools/buildinfo.sh
-VENDOR_BUILDINFO_SH := build/tools/vendor_buildinfo.sh
+BUILDINFO_SH := build/make/tools/buildinfo.sh
+VENDOR_BUILDINFO_SH := build/make/tools/vendor_buildinfo.sh
 
 # TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test
 # harness to distinguish builds. Only add _asan for a sanitized build
@@ -336,7 +347,7 @@
 			PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
 			BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
 			BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
-			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
+			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
 			TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
 			TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
 			TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
@@ -360,7 +371,7 @@
 	$(hide) $(foreach line,$(FINAL_BUILD_PROPERTIES), \
 		echo "$(line)" >> $@;)
 	$(hide) cat $(INSTALLED_ANDROID_INFO_TXT_TARGET) | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' >> $@
-	$(hide) build/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
+	$(hide) build/make/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
 
 build_desc :=
 
@@ -399,6 +410,9 @@
 	$(hide) echo ro.vendor.build.date=`$(DATE_FROM_FILE)`>>$@
 	$(hide) echo ro.vendor.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
 	$(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist="$(TARGET_CPU_ABI_LIST)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist32="$(TARGET_CPU_ABI_LIST_32_BIT)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist64="$(TARGET_CPU_ABI_LIST_64_BIT)">>$@
 	$(hide) TARGET_DEVICE="$(TARGET_DEVICE)" \
 			PRODUCT_NAME="$(TARGET_PRODUCT)" \
 			PRODUCT_BRAND="$(PRODUCT_BRAND)" \
@@ -413,9 +427,35 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_VENDOR_BUILD_PROPERTIES), \
 		echo "$(line)" >> $@;)
-	$(hide) build/tools/post_process_props.py $@
+	$(hide) build/make/tools/post_process_props.py $@
 endif  # property_overrides_split_enabled
 
+# -----------------------------------------------------------------
+# product build.prop
+INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+
+FINAL_PRODUCT_PROPERTIES += \
+    $(call collapse-pairs, $(PRODUCT_PRODUCT_PROPERTIES))
+FINAL_PRODUCT_PROPERTIES := $(call uniq-pairs-by-first-component, \
+    $(FINAL_PRODUCT_PROPERTIES),=)
+
+$(INSTALLED_PRODUCT_BUILD_PROP_TARGET):
+	@echo Target product buildinfo: $@
+	@mkdir -p $(dir $@)
+	$(hide) echo > $@
+ifdef BOARD_USES_PRODUCTIMAGE
+	$(hide) echo ro.product.build.date=`$(DATE_FROM_FILE)`>>$@
+	$(hide) echo ro.product.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+	$(hide) echo ro.product.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+endif  # BOARD_USES_PRODUCTIMAGE
+	$(hide) echo "#" >> $@; \
+	        echo "# ADDITIONAL PRODUCT PROPERTIES" >> $@; \
+	        echo "#" >> $@;
+	$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
+		echo "$(line)" >> $@;)
+	$(hide) build/make/tools/post_process_props.py $@
+
 # ----------------------------------------------------------------
 
 # -----------------------------------------------------------------
@@ -458,7 +498,7 @@
 # Create empty package stats file if target builds no jar(s) or apk(s).
 	$(hide) touch $@
 else
-	$(hide) build/tools/dump-package-stats $^ > $@
+	$(hide) build/make/tools/dump-package-stats $^ > $@
 endif
 
 .PHONY: package-stats
@@ -467,8 +507,10 @@
 # -----------------------------------------------------------------
 # Cert-to-package mapping.  Used by the post-build signing tools.
 # Use a macro to add newline to each echo command
-define _apkcerts_echo_with_newline
-$(hide) echo $(1)
+define _apkcerts_write_line
+$(hide) echo -n 'name="$(1).apk" certificate="$2" private_key="$3"' >> $5
+$(if $(4), $(hide) echo -n ' compressed="$4"' >> $5)
+$(hide) echo '' >> $5
 
 endef
 
@@ -488,12 +530,8 @@
 	@rm -f $@
 	$(foreach p,$(PACKAGES),\
 	  $(if $(PACKAGES.$(p).EXTERNAL_KEY),\
-	    $(call _apkcerts_echo_with_newline,\
-	      'name="$(p).apk" certificate="EXTERNAL" \
-	      private_key=""' >> $@),\
-	    $(call _apkcerts_echo_with_newline,\
-	      'name="$(p).apk" certificate="$(PACKAGES.$(p).CERTIFICATE)" \
-	      private_key="$(PACKAGES.$(p).PRIVATE_KEY)"' >> $@)))
+	    $(call _apkcerts_write_line,$(p),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$@),\
+	    $(call _apkcerts_write_line,$(p),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$@)))
 	# In case value of PACKAGES is empty.
 	$(hide) touch $@
 
@@ -520,9 +558,9 @@
 SOONG_CONV_DATA := $(call intermediates-dir-for,PACKAGING,soong_conversion)/soong_conv_data
 $(SOONG_CONV_DATA):
 	@rm -f $@
-	@$(foreach s,$(SOONG_CONV),echo "$(s),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS)))" >>$@;)
+	@$(foreach s,$(SOONG_CONV),echo "$(s),$(SOONG_CONV.$(s).TYPE),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS)))" >>$@;)
 
-SOONG_TO_CONVERT_SCRIPT := build/tools/soong_to_convert.py
+SOONG_TO_CONVERT_SCRIPT := build/make/tools/soong_to_convert.py
 SOONG_TO_CONVERT := $(PRODUCT_OUT)/soong_to_convert.txt
 $(SOONG_TO_CONVERT): $(SOONG_CONV_DATA) $(SOONG_TO_CONVERT_SCRIPT)
 	@rm -f $@
@@ -536,14 +574,22 @@
 	@rm -f $@
 	echo "# Modules using -Wno-error" >> $@
 	for m in $(sort $(SOONG_MODULES_USING_WNO_ERROR) $(MODULES_USING_WNO_ERROR)); do echo $$m >> $@; done
-	echo "# Modules added default -Wall -Werror" >> $@
-	for m in $(sort $(SOONG_MODULES_ADDED_WERROR) $(MODULES_ADDED_WERROR)); do echo $$m >> $@; done
 	echo "# Modules added default -Wall" >> $@
 	for m in $(sort $(SOONG_MODULES_ADDED_WALL) $(MODULES_ADDED_WALL)); do echo $$m >> $@; done
 
 $(call dist-for-goals,droidcore,$(WALL_WERROR))
 
 # -----------------------------------------------------------------
+# Modules missing profile files
+PGO_PROFILE_MISSING := $(PRODUCT_OUT)/pgo_profile_file_missing.txt
+$(PGO_PROFILE_MISSING):
+	@rm -f $@
+	echo "# Modules missing PGO profile files" >> $@
+	for m in $(SOONG_MODULES_MISSING_PGO_PROFILE_FILE); do echo $$m >> $@; done
+
+$(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
+
+# -----------------------------------------------------------------
 # The dev key is used to sign this package, and as the key required
 # for future OTA packages installed by this system.  Actual product
 # deliverables will be re-signed by hand.  We expect this file to
@@ -577,7 +623,7 @@
 pdk_fusion_log_tags_file := $(patsubst $(PRODUCT_OUT)/%,$(_pdk_fusion_intermediates)/%,$(filter $(event_log_tags_file),$(ALL_PDK_FUSION_FILES)))
 
 $(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) $(pdk_fusion_log_tags_file)
-$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/tools/event_log_tags.py
+$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES)
 
@@ -593,7 +639,7 @@
 
 $(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) $(pdk_fusion_log_tags_file)
 $(event_log_tags_file): PRIVATE_MERGED_FILE := $(all_event_log_tags_file)
-$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/tools/event_log_tags.py
+$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES)
 
@@ -818,8 +864,8 @@
 $(2) $(3): PRIVATE_MESSAGE := $(4)
 $(2) $(3): PRIVATE_DIR := $(5)
 $(2) : $(3)
-$(3) : $(6) $(BUILD_SYSTEM)/Makefile build/tools/generate-notice-files.py
-	build/tools/generate-notice-files.py --text-output $(2) \
+$(3) : $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
+	build/make/tools/generate-notice-files.py --text-output $(2) \
 		$(if $(filter $(1),xml_excluded_vendor),-e vendor --xml-output, \
 		  $(if $(filter $(1),xml_vendor),-i vendor --xml-output, \
 		    --html-output)) $(3) \
@@ -838,7 +884,8 @@
 tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt
 tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
 
-ifeq ($(PRODUCT_FULL_TREBLE),true)
+# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior.
+ifeq ($(PRODUCT_NOTICE_SPLIT),true)
 target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml
 target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml.gz
 installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.xml.gz
@@ -1009,7 +1056,7 @@
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
-ifneq ($(filter $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
 INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
 endif
 
@@ -1040,7 +1087,7 @@
 $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
-$(if $(BOARD_HAS_EXT4_RESERVED_BLOCKS),$(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(1))
+$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
@@ -1062,6 +1109,15 @@
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_squashfs_block_size=$(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_fs_type=$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_extfs_inode_count=$(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_PARTITION_SIZE),$(hide) echo "product_size=$(BOARD_PRODUCTIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_JOURNAL_SIZE),$(hide) echo "product_journal_size=$(BOARD_PRODUCTIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_squashfs_compressor=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_squashfs_compressor_opt=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_squashfs_block_size=$(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
 $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
@@ -1076,6 +1132,7 @@
 $(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION),$(hide) echo "product_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
@@ -1096,6 +1153,13 @@
         $(hide) echo "avb_vendor_key_path=$(BOARD_AVB_VENDOR_KEY_PATH)" >> $(1)
         $(hide) echo "avb_vendor_algorithm=$(BOARD_AVB_VENDOR_ALGORITHM)" >> $(1)
         $(hide) echo "avb_vendor_rollback_index_location=$(BOARD_AVB_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_add_hashtree_footer_args=$(BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+        $(hide) echo "avb_product_key_path=$(BOARD_AVB_PRODUCT_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_product_algorithm=$(BOARD_AVB_PRODUCT_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_product_rollback_index_location=$(BOARD_AVB_PRODUCT_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
@@ -1154,6 +1218,9 @@
 ifdef property_overrides_split_enabled
 recovery_build_props += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
 endif
+ifdef BOARD_USES_PRODUCTIMAGE
+recovery_build_props += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+endif
 recovery_resources_common := $(call include-path-for, recovery)/res
 
 # Set recovery_density to the density bucket of the device.
@@ -1199,19 +1266,23 @@
 recovery_wipe :=
 endif
 
-# Prior to A/B update, we used to have:
+# Traditionally with non-A/B OTA we have:
 #   boot.img + recovery-from-boot.p + recovery-resource.dat = recovery.img.
-# recovery-resource.dat is needed only if we carry a patch of the boot and
-# recovery images and invoke install-recovery.sh on the first boot post an
-# OTA update.
+# recovery-resource.dat is needed only if we carry an imgdiff patch of the boot and recovery images
+# and invoke install-recovery.sh on the first boot post an OTA update.
 #
 # We no longer need that if one of the following conditions holds:
-#   a) We carry a full copy of the recovery image
+#   a) We carry a full copy of the recovery image - no patching needed
 #      (BOARD_USES_FULL_RECOVERY_IMAGE = true);
-#   b) We build a single image that contains boot and recovery both
-#      (BOARD_USES_RECOVERY_AS_BOOT = true).
+#   b) We build a single image that contains boot and recovery both - no recovery image to install
+#      (BOARD_USES_RECOVERY_AS_BOOT = true);
+#   c) We build the root into system image - not needing the resource file as we do bsdiff
+#      (BOARD_BUILD_SYSTEM_ROOT_IMAGE = true).
+# Note that condition b) implies condition c), because of the earlier check in this file:
+# "BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true" (not vice
+# versa though).
 
-ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT)))
+ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_BUILD_SYSTEM_ROOT_IMAGE)))
 # Named '.dat' so we don't attempt to use imgdiff for patching it.
 RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat
 else
@@ -1310,12 +1381,10 @@
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
     $(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))), \
     $(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
-  $(if $(and $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),$(filter true,$(BOARD_AVB_ENABLE))), \
-      $(hide) $(AVBTOOL) add_hash_footer \
-        --image $(1) \
-        --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-        --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
-        $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS))
+  $(if $(filter true,$(BOARD_AVB_ENABLE)), \
+    $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
+      $(hide) $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS),\
+      $(hide) $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_RECOVERYIMAGE_PARTITION_SIZE) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
 endef
 
 ADBD := $(TARGET_OUT_EXECUTABLES)/adbd
@@ -1398,7 +1467,8 @@
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES) \
     $(PDK_FUSION_SYSIMG_FILES) \
-    $(RECOVERY_RESOURCE_ZIP))
+    $(RECOVERY_RESOURCE_ZIP)) \
+    $(PDK_FUSION_SYMLINK_STAMP)
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
 
@@ -1422,7 +1492,7 @@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
@@ -1448,23 +1518,36 @@
 endef
 endif
 
+# Create symlink /system/product to /product if necessary.
+ifdef BOARD_USES_PRODUCTIMAGE
+define create-system-product-symlink
+$(hide) if [ -d $(TARGET_OUT)/product ] && [ ! -h $(TARGET_OUT)/product ]; then \
+  echo 'Non-symlink $(TARGET_OUT)/product detected!' 1>&2; \
+  echo 'You cannot install files to $(TARGET_OUT)/product while building a separate product.img!' 1>&2; \
+  exit 1; \
+fi
+$(hide) ln -sf /product $(TARGET_OUT)/product
+endef
+else
+define create-system-product-symlink
+endef
+endif
+
 # $(1): output file
 define build-systemimage-target
   @echo "Target system fs image: $(1)"
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
   $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
       skip_fsck=true)
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-      ./build/tools/releasetools/build_image.py \
+      build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
       || ( echo "Out of space? the tree size of $(TARGET_OUT) is (MB): " 1>&2 ;\
            du -sm $(TARGET_OUT) 1>&2;\
            if [ "$(INTERNAL_USERIMAGES_EXT_VARIANT)" == "ext4" ]; then \
                maxsize=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE); \
-               if [ "$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" == "true" ]; then \
-                   maxsize=$$((maxsize - 4096 * 4096)); \
-               fi; \
                echo "The max is $$(( maxsize / 1048576 )) MB." 1>&2 ;\
            else \
                echo "The max is $$(( $(BOARD_SYSTEMIMAGE_PARTITION_SIZE) / 1048576 )) MB." 1>&2 ;\
@@ -1486,15 +1569,21 @@
 # image size check calculation.
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+diff_tool := $(HOST_OUT_EXECUTABLES)/bsdiff
+else
+diff_tool := $(HOST_OUT_EXECUTABLES)/imgdiff
+endif
 intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch)
 RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p
-$(RECOVERY_FROM_BOOT_PATCH): $(INSTALLED_RECOVERYIMAGE_TARGET) \
-                             $(INSTALLED_BOOTIMAGE_TARGET) \
-			     $(HOST_OUT_EXECUTABLES)/imgdiff \
-	                     $(HOST_OUT_EXECUTABLES)/bsdiff
+$(RECOVERY_FROM_BOOT_PATCH): PRIVATE_DIFF_TOOL := $(diff_tool)
+$(RECOVERY_FROM_BOOT_PATCH): \
+		$(INSTALLED_RECOVERYIMAGE_TARGET) \
+		$(INSTALLED_BOOTIMAGE_TARGET) \
+		$(diff_tool)
 	@echo "Construct recovery from boot"
 	mkdir -p $(dir $@)
-	PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/imgdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
+	$(PRIVATE_DIFF_TOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
 else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true
 RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET)
 endif
@@ -1529,6 +1618,7 @@
 define build-systemtarball-target
   $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   $(MKTARBALL) $(FS_GET_STATS) \
     $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
     $(INSTALLED_SYSTEMTARBALL_TARGET) $(TARGET_OUT)
@@ -1609,6 +1699,10 @@
 	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
 		$(TARGET_COPY_OUT_VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
+		$(TARGET_COPY_OUT_PRODUCT)
+endif
 ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
 	$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
 endif
@@ -1617,7 +1711,8 @@
 endif
 	@# Add dex-preopt files and config.
 	$(if $(PRIVATE_DEX_FILES),$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@ $(PRIVATE_DEX_FILES)))
-	$(hide) zip -qryXj $@ $(PRIVATE_ODEX_CONFIG)
+	$(hide) touch $(PRODUCT_OUT)/pdk.mk
+	$(hide) zip -qryXj $@ $(PRIVATE_ODEX_CONFIG) $(PRODUCT_OUT)/pdk.mk
 
 .PHONY: platform
 platform: $(INSTALLED_PLATFORM_ZIP)
@@ -1682,7 +1777,7 @@
   @mkdir -p $(userdataimage_intermediates) && rm -rf $(userdataimage_intermediates)/userdata_image_info.txt
   $(call generate-userimage-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt, skip_fsck=true)
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-      ./build/tools/releasetools/build_image.py \
+      build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
 endef
@@ -1789,7 +1884,7 @@
   @mkdir -p $(cacheimage_intermediates) && rm -rf $(cacheimage_intermediates)/cache_image_info.txt
   $(call generate-userimage-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt, skip_fsck=true)
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-      ./build/tools/releasetools/build_image.py \
+      build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
 endef
@@ -1824,7 +1919,8 @@
 INTERNAL_SYSTEMOTHERIMAGE_FILES := \
     $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,\
       $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES))
+      $(ALL_PDK_FUSION_FILES)) \
+    $(PDK_FUSION_SYMLINK_STAMP)
 
 INSTALLED_FILES_FILE_SYSTEMOTHER := $(PRODUCT_OUT)/installed-files-system-other.txt
 $(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST)
@@ -1832,7 +1928,7 @@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 systemotherimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,system_other)
@@ -1845,7 +1941,7 @@
   @mkdir -p $(systemotherimage_intermediates) && rm -rf $(systemotherimage_intermediates)/system_other_image_info.txt
   $(call generate-userimage-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt, skip_fsck=true)
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-      ./build/tools/releasetools/build_image.py \
+      build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_SYSTEM_OTHER) $(systemotherimage_intermediates)/system_other_image_info.txt $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
 endef
@@ -1871,7 +1967,8 @@
 INTERNAL_VENDORIMAGE_FILES := \
     $(filter $(TARGET_OUT_VENDOR)/%,\
       $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES))
+      $(ALL_PDK_FUSION_FILES)) \
+    $(PDK_FUSION_SYMLINK_STAMP)
 
 # platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES).
 $(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
@@ -1882,7 +1979,7 @@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
@@ -1895,7 +1992,7 @@
   $(if $(BOARD_VENDOR_KERNEL_MODULES), \
     $(call build-image-kernel-modules,$(BOARD_VENDOR_KERNEL_MODULES),$(TARGET_OUT_VENDOR),vendor/,$(call intermediates-dir-for,PACKAGING,depmod_vendor)))
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-      ./build/tools/releasetools/build_image.py \
+      build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt $(INSTALLED_VENDORIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
 endef
@@ -1917,6 +2014,56 @@
 endif
 
 # -----------------------------------------------------------------
+# product partition image
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+INTERNAL_PRODUCTIMAGE_FILES := \
+    $(filter $(TARGET_OUT_PRODUCT)/%,\
+      $(ALL_DEFAULT_INSTALLED_MODULES)\
+      $(ALL_PDK_FUSION_FILES)) \
+    $(PDK_FUSION_SYMLINK_STAMP)
+
+# platform.zip depends on $(INTERNAL_PRODUCTIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES)
+
+INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
+$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST)
+	@echo Installed file list: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+productimage_intermediates := \
+    $(call intermediates-dir-for,PACKAGING,product)
+BUILT_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+define build-productimage-target
+  $(call pretty,"Target product fs image: $(INSTALLED_PRODUCTIMAGE_TARGET)")
+  @mkdir -p $(TARGET_OUT_PRODUCT)
+  @mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
+  $(call generate-userimage-prop-dictionary, $(productimage_intermediates)/product_image_info.txt, skip_fsck=true)
+  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+      ./build/tools/releasetools/build_image.py \
+      $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_PRODUCTIMAGE_TARGET := $(BUILT_PRODUCTIMAGE_TARGET)
+$(INSTALLED_PRODUCTIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_PRODUCTIMAGE_FILES) $(INSTALLED_FILES_FILE_PRODUCT) $(BUILD_IMAGE_SRCS)
+	$(build-productimage-target)
+
+.PHONY: productimage-nodeps pnod
+productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
+	$(build-productimage-target)
+
+sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_PRODUCTIMAGE
+INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_PRODUCTIMAGE),$(INSTALLED_PRODUCTIMAGE_TARGET)))
+endif
+
+# -----------------------------------------------------------------
 # dtbo image
 ifdef BOARD_PREBUILT_DTBOIMAGE
 INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
@@ -1936,17 +2083,6 @@
 
 endif
 
-# Convert to lower case without requiring a shell, which isn't cacheable.
-to-lower = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,\
-$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,\
-$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,\
-$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
-# Convert to upper case without requiring a shell, which isn't cacheable.
-to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,\
-$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,\
-$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,\
-$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
-
 # -----------------------------------------------------------------
 # vbmeta image
 ifeq ($(BOARD_AVB_ENABLE),true)
@@ -1969,6 +2105,8 @@
 DTBO_FOOTER_ARGS := BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS
 SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
 VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
+RECOVERY_FOOTER_ARGS := BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS
+PRODUCT_FOOTER_ARGS := BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS
 
 # Check and set required build variables for a chain partition.
 # $(1): the partition to enable AVB chain, e.g., BOOT or SYSTEM.
@@ -2021,6 +2159,15 @@
 endif
 endif
 
+ifdef INSTALLED_PRODUCTIMAGE_TARGET
+ifdef BOARD_AVB_PRODUCT_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,PRODUCT))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+endif
+
 ifdef INSTALLED_DTBOIMAGE_TARGET
 ifdef BOARD_AVB_DTBO_KEY_PATH
 $(eval $(call check-and-set-avb-chain-args,DTBO))
@@ -2030,6 +2177,15 @@
 endif
 endif
 
+ifdef INSTALLED_RECOVERYIMAGE_TARGET
+ifdef BOARD_AVB_RECOVERY_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,RECOVERY))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_RECOVERYIMAGE_TARGET)
+endif
+endif
+
 BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
 
 # Add kernel cmdline descriptor for kernel to mount system.img as root with
@@ -2065,9 +2221,15 @@
   $(if $(BOARD_AVB_VENDOR_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VENDOR_KEY_PATH) \
       --output $(1)/vendor.avbpubkey)
+  $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PRODUCT_KEY_PATH) \
+      --output $(1)/product.avbpubkey)
   $(if $(BOARD_AVB_DTBO_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
       --output $(1)/dtbo.avbpubkey)
+  $(if $(BOARD_AVB_RECOVERY_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_RECOVERY_KEY_PATH) \
+      --output $(1)/recovery.avbpubkey)
 endef
 
 define build-vbmetaimage-target
@@ -2083,13 +2245,29 @@
 endef
 
 INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_VBMETAIMAGE_TARGET)
-$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_VENDORIMAGE_TARGET) $(INSTALLED_DTBOIMAGE_TARGET) $(BOARD_AVB_KEY_PATH)
+$(INSTALLED_VBMETAIMAGE_TARGET): \
+		$(AVBTOOL) \
+		$(INSTALLED_BOOTIMAGE_TARGET) \
+		$(INSTALLED_SYSTEMIMAGE) \
+		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
+		$(INSTALLED_DTBOIMAGE_TARGET) \
+		$(INSTALLED_RECOVERYIMAGE_TARGET) \
+		$(BOARD_AVB_KEY_PATH)
 	$(build-vbmetaimage-target)
 
 .PHONY: vbmetaimage-nodeps
 vbmetaimage-nodeps:
 	$(build-vbmetaimage-target)
 
+else ifeq (true,$(BOARD_BUILD_DISABLED_VBMETAIMAGE))
+BUILT_DISABLED_VBMETAIMAGE := $(PRODUCT_OUT)/vbmeta.img
+
+INSTALLED_VBMETAIMAGE_TARGET := $(BUILT_DISABLED_VBMETAIMAGE)
+$(INSTALLED_VBMETAIMAGE_TARGET): $(AVBTOOL)
+	$(hide) $(AVBTOOL) make_vbmeta_image \
+	  --flag 2 --padding_size 4096 --output $@
+
 endif # BOARD_AVB_ENABLE
 
 # -----------------------------------------------------------------
@@ -2148,9 +2326,11 @@
   $(HOST_OUT_EXECUTABLES)/mksquashfs \
   $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh \
   $(HOST_OUT_EXECUTABLES)/make_f2fs \
+  $(HOST_OUT_EXECUTABLES)/sload_f2fs \
   $(HOST_OUT_EXECUTABLES)/simg2img \
   $(HOST_OUT_EXECUTABLES)/e2fsck \
   $(HOST_OUT_EXECUTABLES)/build_verity_tree \
+  $(HOST_OUT_EXECUTABLES)/generate_verity_key \
   $(HOST_OUT_EXECUTABLES)/verity_signer \
   $(HOST_OUT_EXECUTABLES)/verity_verifier \
   $(HOST_OUT_EXECUTABLES)/append2simg \
@@ -2176,6 +2356,7 @@
   $(HOST_LIBRARY_PATH)/liblog$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libcutils$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libselinux$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libcrypto_utils$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libcrypto-host$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libext2fs-host$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libext2_blkid-host$(HOST_SHLIB_SUFFIX) \
@@ -2210,18 +2391,18 @@
   system/extras/verity/build_verity_metadata.py \
   system/extras/ext4_utils/mke2fs.conf \
   external/avb/test/data/testkey_rsa4096.pem \
-  $(shell find system/update_engine/scripts -name \*.pyc -prune -o -type f -print | sort) \
-  $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
-      -name verity_key | sort) \
-  $(shell find device $(wildcard vendor) -type f -name \*.pk8 -o -name verifiedboot\* -o \
-      -name \*.x509.pem -o -name oem\*.prop | sort)
+  $(sort $(shell find system/update_engine/scripts -name \*.pyc -prune -o -type f -print)) \
+  $(sort $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
+      -name verity_key)) \
+  $(sort $(shell find device $(wildcard vendor) -type f -name \*.pk8 -o -name verifiedboot\* -o \
+      -name \*.x509.pem -o -name oem\*.prop))
 
 OTATOOLS_RELEASETOOLS := \
-  $(shell find build/tools/releasetools -name \*.pyc -prune -o -type f | sort)
+  $(sort $(shell find build/make/tools/releasetools -name \*.pyc -prune -o -type f))
 
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
 OTATOOLS_DEPS += \
-  $(shell find external/vboot_reference/tests/devkeys -type f | sort)
+  $(sort $(shell find external/vboot_reference/tests/devkeys -type f))
 endif
 
 $(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) | $(ACP)
@@ -2229,7 +2410,7 @@
 	$(hide) rm -rf $@ $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
 	$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
-	$(hide) $(ACP) -r -d -p build/tools/releasetools/* $(zip_root)/releasetools
+	$(hide) $(ACP) -r -d -p build/make/tools/releasetools/* $(zip_root)/releasetools
 	$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
 	$(hide) (cd $(zip_root) && zip -qryX $(abspath $@) *)
 	$(hide) echo $(OTATOOLS_DEPS) | xargs zip -qryX $(abspath $@)>/dev/null || true
@@ -2290,9 +2471,7 @@
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
 ifeq ($(AB_OTA_UPDATER),true)
-# Build zlib fingerprint if using the AB Updater.
-updater_dep := $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint
-updater_dep += system/update_engine/update_engine.conf
+updater_dep := system/update_engine/update_engine.conf
 else
 # Build OTA tools if not using the AB Updater.
 updater_dep := $(built_ota_tools)
@@ -2322,7 +2501,7 @@
 # $1: root directory
 # $2: add prefix
 define fs_config
-(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC)
+(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) -R "$(2)"
 endef
 
 # Depending on the various images guarantees that the underlying
@@ -2335,6 +2514,7 @@
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_CACHEIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(INSTALLED_VBMETAIMAGE_TARGET) \
 		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
@@ -2343,6 +2523,7 @@
 		$(INSTALLED_2NDBOOTLOADER_TARGET) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
+		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
 		$(SELINUX_FC) \
 		$(APKCERTS_FILE) \
 		$(SOONG_ZIP) \
@@ -2350,11 +2531,12 @@
 		$(HOST_OUT_EXECUTABLES)/imgdiff \
 		$(HOST_OUT_EXECUTABLES)/bsdiff \
 		$(BUILD_IMAGE_SRCS) \
-		$(INSTALLED_VENDOR_MANIFEST) \
-		$(INSTALLED_VENDOR_MATRIX) \
+		$(BUILT_VENDOR_MANIFEST) \
+		$(BUILT_VENDOR_MATRIX) \
 		| $(ACP)
 	@echo "Package target files: $@"
 	$(call create-system-vendor-symlink)
+	$(call create-system-product-symlink)
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@@ -2420,6 +2602,11 @@
 	$(hide) $(call package_files-copy-root, \
 		$(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	@# Contents of the product image
+	$(hide) $(call package_files-copy-root, \
+		$(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+endif
 ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
 	@# Contents of the system_other image
 	$(hide) $(call package_files-copy-root, \
@@ -2451,18 +2638,12 @@
 ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
 	$(hide) echo "boot_size=$(BOARD_BOOTIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),)
-	$(hide) echo "recovery_as_boot=$(BOARD_USES_RECOVERY_AS_BOOT)" >> $(zip_root)/META/misc_info.txt
-endif
 ifeq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) echo "no_recovery=true" >> $(zip_root)/META/misc_info.txt
 endif
 ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
 	$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
-ifdef BOARD_HAS_EXT4_RESERVED_BLOCKS
-	$(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(zip_root)/META/misc_info.txt
-endif
 ifdef TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS
 	@# TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS can be empty to indicate that nothing but defaults should be used.
 	$(hide) echo "recovery_mount_options=$(TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
@@ -2490,6 +2671,10 @@
 	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
 endif
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),)
+	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH))
+endif
 ifneq ($(strip $(SANITIZE_TARGET)),)
 	# We need to create userdata.img with real data because the instrumented libraries are in userdata.img.
 	$(hide) echo "userdata_img_with_data=true" >> $(zip_root)/META/misc_info.txt
@@ -2508,6 +2693,12 @@
 	$(hide) echo "avb_boot_algorithm=$(BOARD_AVB_BOOT_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "avb_boot_rollback_index_location=$(BOARD_AVB_BOOT_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
 endif # BOARD_AVB_BOOT_KEY_PATH
+	$(hide) echo "avb_recovery_add_hash_footer_args=$(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
+ifdef BOARD_AVB_RECOVERY_KEY_PATH
+	$(hide) echo "avb_recovery_key_path=$(BOARD_AVB_RECOVERY_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_recovery_algorithm=$(BOARD_AVB_RECOVERY_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "avb_recovery_rollback_index_location=$(BOARD_AVB_RECOVERY_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_RECOVERY_KEY_PATH
 endif # BOARD_AVB_ENABLE
 ifdef BOARD_BPT_INPUT_FILES
 	$(hide) echo "board_bpt_enable=true" >> $(zip_root)/META/misc_info.txt
@@ -2520,12 +2711,11 @@
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	    ./build/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
+	    build/make/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
 endif
 ifeq ($(AB_OTA_UPDATER),true)
 	@# When using the A/B updater, include the updater config files in the zip.
 	$(hide) cp $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
-	$(hide) cp $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint $(zip_root)/META/zlib_fingerprint.txt
 	$(hide) for part in $(AB_OTA_PARTITIONS); do \
 	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
 	done
@@ -2568,6 +2758,10 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_PRODUCTIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_BOOTIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -2598,18 +2792,25 @@
 ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
 	$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
 endif
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
 endif
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+	@# When using BOARD_BUILD_SYSTEM_ROOT_IMAGE, ROOT always contains the files for the root under
+	@# normal boot. BOOT/RAMDISK exists only if additionally using BOARD_USES_RECOVERY_AS_BOOT.
+	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 	$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
+endif
+else # BOARD_BUILD_SYSTEM_ROOT_IMAGE != true
+	$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
+endif
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
 endif
 ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_OTHER,system/) > $(zip_root)/META/system_other_filesystem_config.txt
 endif
-
-ifeq ($(PRODUCT_FULL_TREBLE),true)
 	@# Metadata for compatibility verification.
 	$(hide) cp $(BUILT_SYSTEM_MANIFEST) $(zip_root)/META/system_manifest.xml
 	$(hide) cp $(BUILT_SYSTEM_COMPATIBILITY_MATRIX) $(zip_root)/META/system_matrix.xml
@@ -2619,10 +2820,9 @@
 ifdef BUILT_VENDOR_MATRIX
 	$(hide) cp $(BUILT_VENDOR_MATRIX) $(zip_root)/META/vendor_matrix.xml
 endif
-endif
 
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	    ./build/tools/releasetools/add_img_to_target_files -a -v -p $(HOST_OUT) $(zip_root)
+	    build/make/tools/releasetools/add_img_to_target_files -a -v -p $(HOST_OUT) $(zip_root)
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
 	$(hide) find $(zip_root)/META | sort >$@.list
@@ -2666,10 +2866,10 @@
 endif
 
 $(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
-		build/tools/releasetools/ota_from_target_files
+		build/make/tools/releasetools/ota_from_target_files
 	@echo "Package OTA: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	   ./build/tools/releasetools/ota_from_target_files -v \
+	   build/make/tools/releasetools/ota_from_target_files -v \
 	   --block \
 	   --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
 	   -p $(HOST_OUT) \
@@ -2718,6 +2918,7 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
@@ -2740,7 +2941,8 @@
 $(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE) \
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
-		$(INSTALLED_VENDORIMAGE_TARGET)
+		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 $(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
 $(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -2840,6 +3042,15 @@
 vendorimage: $(INSTALLED_QEMU_VENDORIMAGE)
 droidcore: $(INSTALLED_QEMU_VENDORIMAGE)
 endif
+ifeq ($(BOARD_USES_PRODUCTIMAGE),true)
+INSTALLED_QEMU_PRODUCTIMAGE := $(PRODUCT_OUT)/product-qemu.img
+$(INSTALLED_QEMU_PRODUCTIMAGE): $(INSTALLED_PRODUCTIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+	@echo Create product-qemu.img
+	(export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/product.img)
+
+productimage: $(INSTALLED_QEMU_PRODUCTIMAGE)
+droidcore: $(INSTALLED_QEMU_PRODUCTIMAGE)
+endif
 endif
 # -----------------------------------------------------------------
 # The emulator package
@@ -3046,3 +3257,9 @@
 ifneq ($(sdk_repo_goal),)
 include $(TOPDIR)development/build/tools/sdk_repo.mk
 endif
+
+#------------------------------------------------------------------
+# Find lsdump paths
+FIND_LSDUMPS_FILE := $(PRODUCT_OUT)/lsdump_paths.txt
+$(FIND_LSDUMPS_FILE) : $(LSDUMP_PATHS)
+	$(hide) rm -rf $@ && echo "$^" > $@
diff --git a/core/aapt2.mk b/core/aapt2.mk
index c582e30..7d9d8ce 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -12,6 +12,7 @@
 # - proguard_options_file
 # - my_generated_res_dirs: Resources generated during the build process and we have to compile them in a single run of aapt2.
 # - my_generated_res_dirs_deps: the dependency to use for my_generated_res_dirs.
+# - my_generated_res_zips: Zip files containing resources
 # - my_apk_split_configs: The configurations for which to generate splits.
 # - built_apk_splits: The paths where AAPT should generate the splits.
 #
@@ -35,7 +36,7 @@
     $(eval $(call aapt2-compile-one-resource-file-rule,$(r),$(o)))\
     $(o))
 
-my_generated_resources_flata :=
+my_resources_flata :=
 # Compile generated resources
 ifneq ($(my_generated_res_dirs),)
 my_generated_resources_flata := $(my_compiled_res_base_dir)/gen_res.flata
@@ -44,12 +45,23 @@
 	@echo "AAPT2 compile $@ <- $(PRIVATE_SOURCE_RES_DIRS)"
 	$(call aapt2-compile-resource-dirs)
 
-my_generated_resources_flata += $(my_generated_resources_flata)
+my_resources_flata += $(my_generated_resources_flata)
+endif
+
+# Compile zipped resources
+ifneq ($(my_generated_res_zips),)
+my_zipped_resources_flata := $(my_compiled_res_base_dir)/zip_res.flata
+$(my_zipped_resources_flata): PRIVATE_SOURCE_RES_ZIPS := $(my_generated_res_zips)
+$(my_zipped_resources_flata) : $(my_generated_res_deps) $(AAPT2) $(ZIPSYNC)
+	@echo "AAPT2 compile $@ <- $(PRIVATE_SOURCE_RES_ZIPS)"
+	$(call aapt2-compile-resource-zips)
+
+my_resources_flata += $(my_zipped_resources_flata)
 endif
 
 # Always set --pseudo-localize, it will be stripped out later for release
 # builds that don't want it.
-$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_generated_resources_flata): \
+$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata): \
   PRIVATE_AAPT2_CFLAGS := --pseudo-localize
 
 my_static_library_resources := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES)),\
@@ -67,7 +79,7 @@
 endif
 
 $(my_res_package): PRIVATE_RES_FLAT := $(my_res_resources_flat)
-$(my_res_package): PRIVATE_OVERLAY_FLAT := $(my_static_library_resources) $(my_generated_resources_flata) $(my_overlay_resources_flat)
+$(my_res_package): PRIVATE_OVERLAY_FLAT := $(my_static_library_resources) $(my_resources_flata) $(my_overlay_resources_flat)
 $(my_res_package): PRIVATE_SHARED_ANDROID_LIBRARIES := $(my_shared_library_resources)
 $(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
 $(my_res_package): PRIVATE_ASSET_DIRS := $(my_asset_dirs)
@@ -95,7 +107,7 @@
 $(my_res_package): $(full_android_manifest) $(my_static_library_resources) $(my_shared_library_resources)
 $(my_res_package): $(my_full_asset_paths)
 $(my_res_package): $(my_res_resources_flat) $(my_overlay_resources_flat) \
-  $(my_generated_resources_flata) $(my_static_library_resources) \
+  $(my_resources_flata) $(my_static_library_resources) \
   $(AAPT2)
 	@echo "AAPT2 link $@"
 	$(call aapt2-link)
@@ -118,3 +130,4 @@
 my_apk_split_configs :=
 my_generated_res_dirs :=
 my_generated_res_dirs_deps :=
+my_generated_res_zips :=
diff --git a/core/aapt_flags.mk b/core/aapt_flags.mk
new file mode 100644
index 0000000..4e3493a
--- /dev/null
+++ b/core/aapt_flags.mk
@@ -0,0 +1,20 @@
+## AAPT Flags
+# aapt doesn't accept multiple --extra-packages flags.
+# We have to collapse them into a single --extra-packages flag here.
+LOCAL_AAPT_FLAGS := $(strip $(LOCAL_AAPT_FLAGS))
+ifdef LOCAL_AAPT_FLAGS
+  ifeq ($(filter 0 1,$(words $(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))),)
+    aapt_flags := $(subst --extra-packages$(space),--extra-packages@,$(LOCAL_AAPT_FLAGS))
+    aapt_flags_extra_packages := $(patsubst --extra-packages@%,%,$(filter --extra-packages@%,$(aapt_flags)))
+    aapt_flags_extra_packages := $(sort $(subst :,$(space),$(aapt_flags_extra_packages)))
+    LOCAL_AAPT_FLAGS := $(filter-out --extra-packages@%,$(aapt_flags)) \
+        --extra-packages $(subst $(space),:,$(aapt_flags_extra_packages))
+    aapt_flags_extra_packages :=
+    aapt_flags :=
+  endif
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
diff --git a/core/allowed_ndk_types.mk b/core/allowed_ndk_types.mk
new file mode 100644
index 0000000..b88b9e8
--- /dev/null
+++ b/core/allowed_ndk_types.mk
@@ -0,0 +1,84 @@
+# Determines the types of NDK modules the current module is allowed to link to.
+# Input variables:
+#   LOCAL_MODULE
+#   LOCAL_MODULE_CLASS
+#   LOCAL_NDK_STL_VARIANT
+#   LOCAL_SDK_VERSION
+# Output variables:
+#   my_ndk_stl_family: Family of the NDK STL.
+#   my_ndk_stl_link_type: STL link type, static or shared.
+#   my_allowed_ndk_types: Types of NDK modules that may be linked.
+#   my_warn_ndk_types: Types of NDK modules that shouldn't be linked, but are.
+
+my_allowed_ndk_types :=
+my_warn_ndk_types :=
+my_ndk_stl_family :=
+my_ndk_stl_link_type :=
+
+ifdef LOCAL_SDK_VERSION
+    ifeq ($(LOCAL_NDK_STL_VARIANT),)
+        my_ndk_stl_family := system
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),system)
+        my_ndk_stl_family := system
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
+        my_ndk_stl_family := libc++
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_static)
+        my_ndk_stl_family := libc++
+        my_ndk_stl_link_type := static
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),none)
+        my_ndk_stl_family := none
+        my_ndk_stl_link_type := none
+    else
+        $(call pretty-error,invalid LOCAL_NDK_STL_VARIANT: $(LOCAL_NDK_STL_VARIANT))
+    endif
+
+    ifeq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
+        # The "none" link type indicates that nothing is actually linked. Since
+        # this is a static library, it's still up to the final use of the
+        # library whether a static or shared STL should be used.
+        my_ndk_stl_link_type := none
+    endif
+
+    # The system STL is only the C++ ABI layer, so it's compatible with any STL.
+    my_allowed_ndk_types += native:ndk:system:shared
+    my_allowed_ndk_types += native:ndk:system:none
+
+    # Libaries that don't use the STL can be linked to anything.
+    my_allowed_ndk_types += native:ndk:none:none
+
+    # And it's always okay to link a static library that uses your own STL type.
+    # Since nothing was actually linked for the static library, it is up to the
+    # first linked library in the dependency chain which gets used.
+    my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):none
+
+    ifeq ($(LOCAL_MODULE_CLASS),APPS)
+        # For an app package, it's actually okay to depend on any set of STLs.
+        # If any of the individual libraries depend on each other they've
+        # already been checked for consistency, and if they don't they'll be
+        # kept isolated by RTLD_LOCAL anyway.
+        my_allowed_ndk_types += \
+            native:ndk:libc++:shared native:ndk:libc++:static
+
+        # The "none" link type that used by static libraries is intentionally
+        # omitted here. We should only be dealing with shared libraries in
+        # LOCAL_JNI_SHARED_LIBRARIES.
+    else ifeq ($(my_ndk_stl_link_type),shared)
+        # Modules linked to a shared STL can only use another shared STL.
+        my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):shared
+    endif
+    # Else we are a non-static library that uses a static STL, and are
+    # incompatible with all other shared libraries that use an STL.
+else
+    my_allowed_ndk_types := \
+        native:ndk:none:none \
+        native:ndk:system:none \
+        native:ndk:system:shared \
+
+    ifeq ($(LOCAL_MODULE_CLASS),APPS)
+        # CTS is bad and it should feel bad: http://b/13249737
+        my_warn_ndk_types += native:ndk:libc++:static
+    endif
+endif
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 0093e02..7d573d3 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -34,10 +34,12 @@
 main_android_manifest := $(full_android_manifest)
 full_android_manifest := $(intermediates.COMMON)/AndroidManifest.xml
 $(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
+$(full_android_manifest): $(ANDROID_MANIFEST_MERGER_CLASSPATH)
 $(full_android_manifest) : $(main_android_manifest) $(my_full_libs_manifest_deps)
 	@echo "Merge android manifest files: $@ <-- $< $(PRIVATE_LIBS_MANIFESTS)"
 	@mkdir -p $(dir $@)
-	$(hide) $(ANDROID_MANIFEST_MERGER) --main $< --libs $(PRIVATE_LIBS_MANIFESTS) \
+	$(hide) $(ANDROID_MANIFEST_MERGER) --main $< \
+	    --libs $(call normalize-path-list,$(PRIVATE_LIBS_MANIFESTS)) \
 	    --out $@
 
 endif
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
new file mode 100644
index 0000000..c359bac
--- /dev/null
+++ b/core/autogen_test_config.mk
@@ -0,0 +1,64 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This build rule allows TradeFed test config file to be created based on
+# following inputs:
+#   is_native: If the test is a native test.
+#   LOCAL_MANIFEST_FILE: Name of the AndroidManifest file for the test. If it's
+#       not set, default value `AndroidManifest.xml` will be used.
+# Output:
+#   autogen_test_config_file: Path to the test config file generated.
+
+autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
+ifeq (true,$(is_native))
+# Auto generating test config file for native test
+$(autogen_test_config_file) : $(NATIVE_TEST_CONFIG_TEMPLATE)
+	@echo "Auto generating test config $(notdir $@)"
+	$(hide) sed 's&{MODULE}&$(PRIVATE_MODULE)&g' $^ > $@
+my_auto_generate_config := true
+else
+# Auto generating test config file for instrumentation test
+ifeq ($(strip $(LOCAL_MANIFEST_FILE)),)
+  LOCAL_MANIFEST_FILE := AndroidManifest.xml
+endif
+ifdef LOCAL_FULL_MANIFEST_FILE
+  my_android_manifest := $(LOCAL_FULL_MANIFEST_FILE)
+else
+  my_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
+endif
+ifneq (,$(wildcard $(my_android_manifest)))
+$(autogen_test_config_file): PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT := $(AUTOGEN_TEST_CONFIG_SCRIPT)
+$(autogen_test_config_file): PRIVATE_TEST_CONFIG_ANDROID_MANIFEST := $(my_android_manifest)
+$(autogen_test_config_file): PRIVATE_EMPTY_TEST_CONFIG := $(EMPTY_TEST_CONFIG)
+$(autogen_test_config_file): PRIVATE_TEMPLATE := $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE)
+$(autogen_test_config_file) : $(my_android_manifest) $(EMPTY_TEST_CONFIG) $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE) $(AUTOGEN_TEST_CONFIG_SCRIPT)
+	@echo "Auto generating test config $(notdir $@)"
+	@rm -f $@
+	$(hide) $(PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT) $@ $(PRIVATE_TEST_CONFIG_ANDROID_MANIFEST) $(PRIVATE_EMPTY_TEST_CONFIG) $(PRIVATE_TEMPLATE)
+my_auto_generate_config := true
+endif # ifeq (,$(wildcard $(my_android_manifest)))
+endif # ifneq (true,$(is_native))
+
+ifeq (true,$(my_auto_generate_config))
+  LOCAL_INTERMEDIATE_TARGETS += $(autogen_test_config_file)
+  $(LOCAL_BUILT_MODULE): $(autogen_test_config_file)
+  ALL_MODULES.$(my_register_name).auto_test_config := true
+else
+  autogen_test_config_file :=
+endif
+
+my_android_manifest :=
+my_auto_generate_config :=
diff --git a/core/aux_config.mk b/core/aux_config.mk
index c40b8cc..6a5cd63 100644
--- a/core/aux_config.mk
+++ b/core/aux_config.mk
@@ -32,7 +32,7 @@
 
 # setup AUX globals
 AUX_SHLIB_SUFFIX := .so
-AUX_GLOBAL_ARFLAGS := crsPD
+AUX_GLOBAL_ARFLAGS := cqsD
 AUX_STATIC_LIB_SUFFIX := .a
 
 # Load ever-lasting "indexed" version of AUX variant environment; it is treated as READ-ONLY from this
@@ -102,10 +102,10 @@
 $(eval AUX_OS_VARIANT_LIST_$(AUX_OS_$(1)):=) \
 $(call aux-variant-setup-paths,$(_name)) \
 $(eval AUX_ALL_VARIANTS += $(_name)) \
-$(eval AUX_ALL_OSES := $(filterout $(AUX_OS_$(_name)),$(AUX_ALL_OSES)) $(AUX_OS_$(_name))) \
-$(eval AUX_ALL_CPUS := $(filterout $(AUX_CPU_$(_name)),$(AUX_ALL_CPUS)) $(AUX_CPU_$(_name))) \
-$(eval AUX_ALL_ARCHS := $(filterout $(AUX_ARCH_$(_name)),$(AUX_ALL_ARCHS)) $(AUX_ARCH_$(_name))) \
-$(eval AUX_ALL_SUBARCHS := $(filterout $(AUX_SUBARCH_$(_name)),$(AUX_ALL_SUBARCHS)) $(AUX_SUBARCH_$(_name)))
+$(eval AUX_ALL_OSES := $(filter-out $(AUX_OS_$(_name)),$(AUX_ALL_OSES)) $(AUX_OS_$(_name))) \
+$(eval AUX_ALL_CPUS := $(filter-out $(AUX_CPU_$(_name)),$(AUX_ALL_CPUS)) $(AUX_CPU_$(_name))) \
+$(eval AUX_ALL_ARCHS := $(filter-out $(AUX_ARCH_$(_name)),$(AUX_ALL_ARCHS)) $(AUX_ARCH_$(_name))) \
+$(eval AUX_ALL_SUBARCHS := $(filter-out $(AUX_SUBARCH_$(_name)),$(AUX_ALL_SUBARCHS)) $(AUX_SUBARCH_$(_name)))
 endef
 
 # Load system configuration referenced by AUX variant config;
@@ -138,7 +138,7 @@
 $(eval _all:=) \
 $(eval _req:=$(addsuffix _$(1),$(aux_env))) \
 $(foreach var,$(_req),$(eval _all += $(var))) \
-$(eval _missing := $(filterout $(_all),$(_req))) \
+$(eval _missing := $(filter-out $(_all),$(_req))) \
 $(if $(_missing),$(error AUX variant $(1) must define vars: $(_missing)))
 endef
 
@@ -154,7 +154,7 @@
 config_roots := $(wildcard device vendor)
 all_configs :=
 ifdef config_roots
-all_configs := $(shell find $(config_roots) -maxdepth 4 -name '*$(variant_sfx)' -o -name '*$(os_sfx)' | sort)
+all_configs := $(sort $(shell find $(config_roots) -maxdepth 4 -name '*$(variant_sfx)' -o -name '*$(os_sfx)'))
 endif
 all_os_configs := $(filter %$(os_sfx),$(all_configs))
 all_variant_configs := $(filter %$(variant_sfx),$(all_configs))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 592650d..22e7aef 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -72,6 +72,8 @@
 LOCAL_OEM_MODULE := true
 else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(_path)),)
 LOCAL_ODM_MODULE := true
+else ifneq ($(filter $(TARGET_OUT_PRODUCT)/%,$(_path)),)
+LOCAL_PRODUCT_MODULE := true
 endif
 _path :=
 
@@ -86,6 +88,7 @@
 endif
 
 include $(BUILD_SYSTEM)/local_vndk.mk
+include $(BUILD_SYSTEM)/local_systemsdk.mk
 
 my_module_tags := $(LOCAL_MODULE_TAGS)
 ifeq ($(my_host_cross),true)
@@ -199,6 +202,8 @@
   partition_tag := _OEM
 else ifeq (true,$(LOCAL_ODM_MODULE))
   partition_tag := _ODM
+else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+  partition_tag := _PRODUCT
 else ifeq (NATIVE_TESTS,$(LOCAL_MODULE_CLASS))
   partition_tag := _DATA
 else
@@ -460,6 +465,50 @@
 endif
 
 ###########################################################
+## Test Data
+###########################################################
+my_test_data_pairs :=
+my_installed_test_data :=
+# Source to relative dst file paths for reuse in LOCAL_COMPATIBILITY_SUITE.
+my_test_data_file_pairs :=
+
+ifneq ($(filter NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ifneq ($(strip $(LOCAL_TEST_DATA)),)
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+
+my_test_data_pairs := $(strip $(foreach td,$(LOCAL_TEST_DATA), \
+    $(eval _file := $(call word-colon,2,$(td))) \
+    $(if $(_file), \
+      $(eval _src_base := $(call word-colon,1,$(td))), \
+      $(eval _src_base := $(LOCAL_PATH)) \
+        $(eval _file := $(call word-colon,1,$(td)))) \
+    $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
+    $(if $(filter /%,$(_src_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_src_base) $(_file))) \
+    $(eval my_test_data_file_pairs := $(my_test_data_file_pairs) $(call append-path,$(_src_base),$(_file)):$(_file)) \
+    $(call append-path,$(_src_base),$(_file)):$(call append-path,$(my_module_path),$(_file))))
+
+my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
+$(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
+
+endif
+endif
+endif
+
+# For test modules that lack a suite tag, set null-suite as the default.
+# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
+# This is because they are the only tests we currently auto-generate test configs for.
+ifndef LOCAL_COMPATIBILITY_SUITE
+ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK, $(LOCAL_MODULE_CLASS)),)
+LOCAL_COMPATIBILITY_SUITE := null-suite
+endif
+ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
+ifneq ($(filter $(my_module_tags),tests),)
+LOCAL_COMPATIBILITY_SUITE := null-suite
+endif
+endif
+endif
+
+###########################################################
 ## Compatibility suite files.
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
@@ -468,19 +517,22 @@
 # separate the multiple architectures into subdirectories of the testcase folder.
 arch_dir :=
 is_native :=
+multi_arch :=
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
   is_native := true
+  multi_arch := true
 endif
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
   is_native := true
+  multi_arch := true
 endif
 ifdef LOCAL_MULTILIB
-  is_native := true
+  multi_arch := true
 endif
-ifdef is_native
+ifdef multi_arch
   arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-  is_native :=
 endif
+multi_arch :=
 
 # The module itself.
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
@@ -500,13 +552,44 @@
     $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
       $(s):$(dir)/$(n)))))
 
+test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
+ifeq (,$(test_config))
+  ifneq (true,$(is_native))
+    is_instrumentation_test := true
+    ifeq (true, $(LOCAL_IS_HOST_MODULE))
+      is_instrumentation_test := false
+    endif
+    # If LOCAL_MODULE_CLASS is not APPS, it's certainly not an instrumentation
+    # test. However, some packages for test data also have LOCAL_MODULE_CLASS
+    # set to APPS. These will require flag LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG
+    # to disable auto-generating test config file.
+    ifneq (APPS, $(LOCAL_MODULE_CLASS))
+      is_instrumentation_test := false
+    endif
+  endif
+  # CTS modules can be used for test data, so test config files must be
+  # explicitly created using AndroidTest.xml
+  ifeq (,$(filter cts, $(LOCAL_COMPATIBILITY_SUITE)))
+    ifneq (true, $(LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG))
+      ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
+        include $(BUILD_SYSTEM)/autogen_test_config.mk
+        test_config := $(autogen_test_config_file)
+        autogen_test_config_file :=
+      endif
+    endif
+  endif
+endif
 
-ifneq (,$(wildcard $(LOCAL_PATH)/AndroidTest.xml))
+is_instrumentation_test :=
+
+ifneq (,$(test_config))
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
-    $(LOCAL_PATH)/AndroidTest.xml:$(dir)/$(LOCAL_MODULE).config)))
+    $(test_config):$(dir)/$(LOCAL_MODULE).config)))
 endif
 
+test_config :=
+
 ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
@@ -521,38 +604,24 @@
 endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
+ifneq ($(my_test_data_file_pairs),)
+$(foreach pair, $(my_test_data_file_pairs), \
+  $(eval parts := $(subst :,$(space),$(pair))) \
+  $(eval src_path := $(word 1,$(parts))) \
+  $(eval file := $(word 2,$(parts))) \
+  $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+    $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
+      $(src_path):$(call append-path,$(dir),$(file))))))
+endif
+
+arch_dir :=
+is_native :=
+
 $(call create-suite-dependencies)
 
 endif  # LOCAL_COMPATIBILITY_SUITE
 
 ###########################################################
-## Test Data
-###########################################################
-my_test_data_pairs :=
-my_installed_test_data :=
-
-ifneq ($(filter NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-ifneq ($(strip $(LOCAL_TEST_DATA)),)
-ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
-
-my_test_data_pairs := $(strip $(foreach td,$(LOCAL_TEST_DATA), \
-    $(eval _file := $(call word-colon,2,$(td))) \
-    $(if $(_file), \
-      $(eval _base := $(call word-colon,1,$(td))), \
-      $(eval _base := $(LOCAL_PATH)) \
-        $(eval _file := $(call word-colon,1,$(td)))) \
-    $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
-    $(if $(filter /%,$(_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_base) $(_file))) \
-    $(call append-path,$(_base),$(_file)):$(call append-path,$(my_module_path),$(_file))))
-
-my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
-$(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
-
-endif
-endif
-endif
-
-###########################################################
 ## Register with ALL_MODULES
 ###########################################################
 
@@ -599,6 +668,9 @@
 ALL_MODULES.$(my_register_name).TARGET_REQUIRED := \
     $(strip $(ALL_MODULES.$(my_register_name).TARGET_REQUIRED)\
         $(LOCAL_TARGET_REQUIRED_MODULES))
+ALL_MODULES.$(my_register_name).HOST_REQUIRED := \
+    $(strip $(ALL_MODULES.$(my_register_name).HOST_REQUIRED)\
+        $(LOCAL_HOST_REQUIRED_MODULES))
 ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
     $(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
 ALL_MODULES.$(my_register_name).MAKEFILE := \
@@ -611,6 +683,7 @@
 ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
 endif
 ALL_MODULES.$(my_register_name).FOR_HOST_CROSS := $(my_host_cross)
+ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := $(LOCAL_COMPATIBILITY_SUITE)
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
 
@@ -666,7 +739,7 @@
 
 
 ifdef j_or_n
-$(j_or_n) $(h_or_t) $(j_or_n)-$(h_or_t) : $(my_checked_module)
+$(j_or_n) $(h_or_t) $(j_or_n)-$(h_or_hc_or_t) : $(my_checked_module)
 ifneq (,$(filter $(my_module_tags),tests))
 $(j_or_n)-$(h_or_t)-tests $(j_or_n)-tests $(h_or_t)-tests : $(my_checked_module)
 endif
diff --git a/core/binary.mk b/core/binary.mk
index e54edbe..c2fa27c 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -88,7 +88,7 @@
   endif
 
   # Make sure we've built the NDK.
-  my_additional_dependencies += $(SOONG_OUT_DIR)/ndk.timestamp
+  my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
 
   # mips32r6 is not supported by the NDK. No released NDK contains these
   # libraries, but the r10 in prebuilts/ndk had a local hack to add them :(
@@ -180,7 +180,6 @@
   my_ndk_stl_include_path :=
   my_ndk_stl_shared_lib_fullpath :=
   my_ndk_stl_static_lib :=
-  my_ndk_cpp_std_version :=
   my_cpu_variant := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)CPU_ABI)
   ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
     my_cpu_variant := mips32r6
@@ -189,85 +188,40 @@
   ifeq (,$(LOCAL_NDK_STL_VARIANT))
     LOCAL_NDK_STL_VARIANT := system
   endif
-  ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
+  ifneq (1,$(words $(filter none system c++_static c++_shared, $(LOCAL_NDK_STL_VARIANT))))
     $(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
   endif
+
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
     my_system_shared_libraries += libstdc++
-  else # LOCAL_NDK_STL_VARIANT is not system
-  ifneq (,$(filter stlport_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/stlport/stlport
-    my_system_shared_libraries += libstdc++
-    ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
-      my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
-      my_ldlibs += -ldl
-    else
-      my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_shared.so
-    endif
-  else # LOCAL_NDK_STL_VARIANT is not stlport_* either
-  ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
-    # Pre-r11 NDKs used libgabi++ for libc++'s C++ ABI, but r11 and later use
-    # libc++abi.
-    #
-    # r13 no longer has the inner directory as a side effect of just using
-    # external/libcxx.
-    ifeq (r10,$(LOCAL_NDK_VERSION))
-      my_ndk_stl_include_path := \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include
-      my_ndk_stl_include_path += \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++/gabi++/include
-    else ifeq (r11,$(LOCAL_NDK_VERSION))
-      my_ndk_stl_include_path := \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include
-      my_ndk_stl_include_path += \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/libcxxabi/include
-    else
-      my_ndk_stl_include_path := \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
-      my_ndk_stl_include_path += \
-        $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
-    endif
+  else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
+    my_ndk_stl_include_path := \
+      $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
+    my_ndk_stl_include_path += \
+      $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
     my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
 
     my_libcxx_libdir := \
       $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)
 
-    ifneq (,$(filter r10 r11,$(LOCAL_NDK_VERSION)))
-      ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
-        my_ndk_stl_static_lib := $(my_libcxx_libdir)/libc++_static.a
-      else
-        my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
-      endif
+    ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
+      my_ndk_stl_static_lib := \
+        $(my_libcxx_libdir)/libc++_static.a \
+        $(my_libcxx_libdir)/libc++abi.a
     else
-      ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
-        my_ndk_stl_static_lib := \
-          $(my_libcxx_libdir)/libc++_static.a \
-          $(my_libcxx_libdir)/libc++abi.a
-      else
-        my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
-      endif
+      my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
+    endif
 
-      my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
-      ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
-        my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
-      endif
+    my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
+    ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
+      my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
     endif
 
     my_ldlibs += -ldl
-
-    my_ndk_cpp_std_version := c++11
-  else # LOCAL_NDK_STL_VARIANT is not c++_* either
-  ifneq (,$(filter gnustl_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/include \
-                               $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/include
-    my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/libgnustl_static.a
   else # LOCAL_NDK_STL_VARIANT must be none
     # Do nothing.
   endif
-  endif
-  endif
-  endif
 endif
 
 ifneq ($(LOCAL_USE_VNDK),)
@@ -387,9 +341,9 @@
 # clang is enabled by default for host builds
 # enable it unless we've specifically disabled clang above
 ifdef LOCAL_IS_HOST_MODULE
-    ifeq ($($(my_prefix)OS),windows)
+    ifneq ($($(my_prefix)CLANG_SUPPORTED),true)
         ifeq ($(my_clang),true)
-            $(error $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Clang is not yet supported for windows binaries)
+            $(call pretty-error,Clang is not yet supported for $($(my_prefix)OS) binaries)
         endif
         my_clang := false
     else
@@ -397,11 +351,6 @@
             my_clang := true
         endif
     endif
-# Add option to make gcc the default for device build
-else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
-    ifeq ($(my_clang),)
-        my_clang := false
-    endif
 else ifeq ($(my_clang),)
     my_clang := true
 endif
@@ -429,11 +378,6 @@
     my_cpp_std_version := $(DEFAULT_GCC_CPP_STD_VERSION)
 endif
 
-ifdef LOCAL_SDK_VERSION
-    # The NDK handles this itself.
-    my_cpp_std_version := $(my_ndk_cpp_std_version)
-endif
-
 ifdef LOCAL_IS_HOST_MODULE
     ifneq ($(my_clang),true)
         # The host GCC doesn't support C++14 (and is deprecated, so likely
@@ -665,6 +609,9 @@
   my_cc := $(my_cc_wrapper) $(my_cc)
 endif
 
+SYNTAX_TOOLS_PREFIX := \
+    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/libexec
+
 ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
   my_cc := CCC_CC=$(CLANG) CLANG=$(CLANG) \
            $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer
@@ -806,7 +753,7 @@
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
 ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-renderscript_target_api := $(LOCAL_SDK_VERSION)
+renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 endif
 endif  # LOCAL_SDK_VERSION is set
 endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
@@ -870,6 +817,9 @@
 ###########################################################
 ## Compile the .proto files to .cc (or .c) and then to .o
 ###########################################################
+ifeq ($(strip $(LOCAL_PROTOC_OPTIMIZE_TYPE)),)
+  LOCAL_PROTOC_OPTIMIZE_TYPE := lite
+endif
 proto_sources := $(filter %.proto,$(my_src_files))
 ifneq ($(proto_sources),)
 proto_gen_dir := $(generated_sources_dir)/proto
@@ -891,7 +841,7 @@
 endif
 my_proto_c_includes := external/protobuf/src
 my_cflags += -DGOOGLE_PROTOBUF_NO_RTTI
-my_protoc_flags := --cpp_out=$(proto_gen_dir)
+my_protoc_flags := --cpp_out=$(if $(filter lite lite-static,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite:,)$(proto_gen_dir)
 my_protoc_deps :=
 endif
 my_proto_c_includes += $(proto_gen_dir)
@@ -1426,10 +1376,12 @@
 ## other NDK-built libraries
 ####################################################
 
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
 ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
-my_warn_types :=
-my_allowed_types := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
 else ifdef LOCAL_USE_VNDK
     _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
     ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -1449,8 +1401,8 @@
     endif
 else
 my_link_type := native:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform
 endif
 
 my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
@@ -1696,7 +1648,6 @@
         ifeq (,$(filter -Werror,$(my_all_cflags)))
           # Add -Wall -Werror unless the project is in the WARNING_ALLOWED project list.
           ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
-            $(eval MODULES_ADDED_WERROR := $(MODULES_ADDED_WERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
             my_cflags := -Wall -Werror $(my_cflags)
           else
             $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
@@ -1737,21 +1688,20 @@
     ifneq ($(LOCAL_TIDY_CHECKS),)
       my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
     endif
-    # Set up global default clang-tidy flags, which is none.
-    my_tidy_flags := $(WITH_TIDY_FLAGS)
-    # Use local clang-tidy flags if specified.
-    ifneq ($(LOCAL_TIDY_FLAGS),)
-      my_tidy_flags := $(LOCAL_TIDY_FLAGS)
-    endif
+    my_tidy_flags += $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS)
     # If tidy flags are not specified, default to check all header files.
     ifeq ($(my_tidy_flags),)
       my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
     endif
+    # If clang-tidy is not enabled globally, add the -quiet flag.
+    ifeq (,$(filter 1 true,$(WITH_TIDY)))
+      my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
+    endif
 
     # We might be using the static analyzer through clang-tidy.
     # https://bugs.llvm.org/show_bug.cgi?id=32914
     ifneq ($(my_tidy_checks),)
-      my_tidy_flags += "-extra-arg-before=-D__clang_analyzer__"
+      my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
     endif
   endif
 endif
@@ -1890,6 +1840,7 @@
         $(my_whole_static_libraries) \
         $(my_shared_libraries) \
         $(my_system_shared_libraries))
+SOONG_CONV.$(LOCAL_MODULE).TYPE := native
 SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
 endif
 
diff --git a/core/build-system.html b/core/build-system.html
index c7938cc..3d86e24 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -762,6 +762,19 @@
 Dialer, Contacts, etc.  This will probably change or go away when we switch
 to an ant-based build system for the apps.</p>
 
+<h4>LOCAL_PATCH_MODULE (experimental option)</h4>
+<p>As of January 2018, you almost certainly don't need this option, so please
+ask and only use it if you understand what you're doing. This feature is
+experimental and may go away in future.</p>
+<p>
+When compiling language level 9+ .java code in packages that are part of a
+a system module, <code>LOCAL_PATCH_MODULE</code> names the module that your
+sources and dependencies should be patched into. The Android runtime currently
+(Jan 2018) doesn't implement the JEP 261 module system so this option is only
+supported at compile time. It should only be needed to compile tests in packages
+that exist in libcore and which are inconvenient to move elsewhere.
+</p>
+
 <h4>LOCAL_PATH</h4>
 <p>The directory your Android.mk file is in. You can set it by putting the
 following as the first line in your Android.mk:</p>
diff --git a/core/build_id.mk b/core/build_id.mk
index 00272aa..ff4d013 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,4 +18,4 @@
 # (like "CRB01").  It must be a single word, and is
 # capitalized by convention.
 
-export BUILD_ID=OC-DR1
+export BUILD_ID=OC-MR1
diff --git a/core/clang/HOST_CROSS_x86.mk b/core/clang/HOST_CROSS_x86.mk
index bf48f95..ffd7811 100644
--- a/core/clang/HOST_CROSS_x86.mk
+++ b/core/clang/HOST_CROSS_x86.mk
@@ -1 +1 @@
-$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686.a
+$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
diff --git a/core/clang/HOST_x86.mk b/core/clang/HOST_x86.mk
index 0722b2a..2803517 100644
--- a/core/clang/HOST_x86.mk
+++ b/core/clang/HOST_x86.mk
@@ -1 +1 @@
-$(clang_2nd_arch_prefix)HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686.a
+$(clang_2nd_arch_prefix)HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
deleted file mode 100644
index f3a206a..0000000
--- a/core/clang/versions.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-## Clang/LLVM release versions.
-
-LLVM_PREBUILTS_VERSION ?= clang-4393122
-LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 99bd691..d1ba354 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -7,6 +7,7 @@
 LOCAL_AAPT2_ONLY:=
 LOCAL_AAPT_FLAGS:=
 LOCAL_AAPT_INCLUDE_ALL_RESOURCES:=
+LOCAL_AAPT_NAMESPACES:=
 LOCAL_ADDITIONAL_CERTIFICATES:=
 LOCAL_ADDITIONAL_DEPENDENCIES:=
 LOCAL_ADDITIONAL_HTML_DIR:=
@@ -35,6 +36,7 @@
 LOCAL_CLASSPATH:=
 LOCAL_COMPATIBILITY_SUITE:=
 LOCAL_COMPATIBILITY_SUPPORT_FILES:=
+LOCAL_COMPRESSED_MODULE:=
 LOCAL_CONLYFLAGS:=
 LOCAL_COPY_HEADERS:=
 LOCAL_COPY_HEADERS_TO:=
@@ -54,6 +56,7 @@
 LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
 LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING:=
 LOCAL_DEX_PREOPT:= # '',true,false,nostripping
+LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG:=
 LOCAL_DONT_CHECK_MODULE:=
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
@@ -77,6 +80,7 @@
 LOCAL_EXPORT_C_INCLUDE_DIRS:=
 LOCAL_EXPORT_HEADER_LIBRARY_HEADERS:=
 LOCAL_EXPORT_PACKAGE_RESOURCES:=
+LOCAL_EXPORT_PROGUARD_FLAG_FILES:=
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
 LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
 LOCAL_EXTRACT_APK:=
@@ -95,6 +99,7 @@
 LOCAL_GTEST:=true
 LOCAL_HAL_STATIC_LIBRARIES:=
 LOCAL_HEADER_LIBRARIES:=
+LOCAL_HOST_REQUIRED_MODULES:=
 LOCAL_INIT_RC:=
 LOCAL_INSTALLED_MODULE:=
 LOCAL_INSTALLED_MODULE_STEM:=
@@ -121,7 +126,6 @@
 LOCAL_JAR_PROCESSOR:=
 LOCAL_JAR_PROCESSOR_ARGS:=
 LOCAL_JAVACFLAGS:=
-LOCAL_JAVAC_SHARD_SIZE:=
 LOCAL_JAVA_LANGUAGE_VERSION:=
 LOCAL_JAVA_LAYERS_FILE:=
 LOCAL_JAVA_LIBRARIES:=
@@ -179,6 +183,7 @@
 LOCAL_PACKAGE_NAME:=
 LOCAL_PACKAGE_SPLITS:=
 LOCAL_PACK_MODULE_RELOCATIONS:=
+LOCAL_PATCH_MODULE:=
 LOCAL_PICKUP_FILES:=
 LOCAL_POST_INSTALL_CMD:=
 LOCAL_POST_LINK_CMD:=
@@ -191,8 +196,10 @@
 LOCAL_PREBUILT_OBJ_FILES:=
 LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
 LOCAL_PREBUILT_STRIP_COMMENTS:=
+LOCAL_PRIVATE_PLATFORM_APIS:=
 LOCAL_PRIVILEGED_MODULE:=
-# '',full,custom,nosystem,disabled,obfuscation,optimization
+# '',full,custom,disabled,obfuscation,optimization
+LOCAL_PRODUCT_MODULE:=
 LOCAL_PROGUARD_ENABLED:=
 LOCAL_PROGUARD_FLAG_FILES:=
 LOCAL_PROGUARD_FLAGS:=
@@ -226,9 +233,16 @@
 LOCAL_SDK_VERSION:=
 LOCAL_SHARED_ANDROID_LIBRARIES:=
 LOCAL_SHARED_LIBRARIES:=
-LOCAL_SOONG_HEADER_JAR :=
+LOCAL_SOONG_CLASSES_JAR :=
 LOCAL_SOONG_DEX_JAR :=
+LOCAL_SOONG_EXPORT_PROGUARD_FLAGS :=
+LOCAL_SOONG_HEADER_JAR :=
 LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
+LOCAL_SOONG_PROGUARD_DICT :=
+LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
+LOCAL_SOONG_RRO_DIRS :=
+LOCAL_DROIDDOC_STUBS_SRCJAR :=
+LOCAL_DROIDDOC_DOC_ZIP :=
 # '',true
 LOCAL_SOURCE_FILES_ALL_GENERATED:=
 LOCAL_SRC_FILES:=
@@ -241,6 +255,7 @@
 LOCAL_STRIP_MODULE:=
 LOCAL_SYSTEM_SHARED_LIBRARIES:=none
 LOCAL_TARGET_REQUIRED_MODULES:=
+LOCAL_TEST_CONFIG_OPTIONS:=
 LOCAL_TEST_DATA:=
 LOCAL_TEST_MODULE_TO_PROGUARD_WITH:=
 LOCAL_TIDY:=
@@ -249,6 +264,7 @@
 LOCAL_UNINSTALLABLE_MODULE:=
 LOCAL_UNSTRIPPED_PATH:=
 LOCAL_USE_AAPT2:=$(USE_AAPT2)
+LOCAL_USE_R8:=
 LOCAL_USE_VNDK:=
 LOCAL_VENDOR_MODULE:=
 LOCAL_VTSC_FLAGS:=
@@ -425,6 +441,13 @@
 LOCAL_WHOLE_STATIC_LIBRARIES_32:=
 LOCAL_WHOLE_STATIC_LIBRARIES_64:=
 
+# Robolectric variables
+LOCAL_INSTRUMENT_SOURCE_DIRS :=
+LOCAL_ROBOTEST_FAILURE_FATAL :=
+LOCAL_ROBOTEST_FILES :=
+LOCAL_ROBOTEST_TIMEOUT :=
+LOCAL_TEST_PACKAGE :=
+
 # Aux specific variables
 LOCAL_AUX_ARCH :=
 LOCAL_AUX_CPU :=
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 73b1c04..01cf3f5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -50,7 +50,7 @@
 endif
 
 ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv5te
+$(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
 endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
diff --git a/core/combo/arch/arm/armv5te-vfp.mk b/core/combo/arch/arm/armv5te-vfp.mk
deleted file mode 100644
index 75299ac..0000000
--- a/core/combo/arch/arm/armv5te-vfp.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-# At the moment, use the same settings than the one
-# for armv5te, since TARGET_ARCH_VARIANT := armv5te-vfp
-# will only be used to select an optimized VFP-capable assembly
-# interpreter loop for Dalvik.
-#
-include $(BUILD_COMBOS)/arch/arm/armv5te.mk
-
diff --git a/core/combo/arch/arm/armv5te.mk b/core/combo/arch/arm/armv5te.mk
deleted file mode 100644
index bd75695..0000000
--- a/core/combo/arch/arm/armv5te.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-# Configuration for Linux on ARM.
-# Generating binaries for the ARMv5TE architecture and higher
-#
-
diff --git a/core/combo/select.mk b/core/combo/select.mk
index 5e181b9..eab4c72 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -28,7 +28,7 @@
 
 # Set reasonable defaults for the various variables
 
-$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD
+$(combo_var_prefix)GLOBAL_ARFLAGS := cqsD -format=gnu
 
 $(combo_var_prefix)STATIC_LIB_SUFFIX := .a
 
diff --git a/core/config.mk b/core/config.mk
index 4bae6b3..e72b55a 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -60,21 +60,23 @@
 
 # Mark variables deprecated/obsolete
 CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
-$(KATI_deprecated_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
-$(KATI_deprecated_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
-$(KATI_deprecated_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
-$(KATI_deprecated_var ANDROID_HOST_OUT,Use HOST_OUT instead. See $(CHANGES_URL)#ANDROID_HOST_OUT)
-$(KATI_deprecated_var ANDROID_PRODUCT_OUT,Use PRODUCT_OUT instead. See $(CHANGES_URL)#ANDROID_PRODUCT_OUT)
-$(KATI_deprecated_var ANDROID_HOST_OUT_TESTCASES,Use HOST_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_HOST_OUT_TESTCASES)
-$(KATI_deprecated_var ANDROID_TARGET_OUT_TESTCASES,Use TARGET_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_TARGET_OUT_TESTCASES)
-$(KATI_deprecated_var ANDROID_BUILD_TOP,Use '.' instead. See $(CHANGES_URL)#ANDROID_BUILD_TOP)
-$(KATI_deprecated_var \
+$(KATI_obsolete_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
+$(KATI_obsolete_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
+$(KATI_obsolete_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
+$(KATI_obsolete_var ANDROID_HOST_OUT,Use HOST_OUT instead. See $(CHANGES_URL)#ANDROID_HOST_OUT)
+$(KATI_obsolete_var ANDROID_PRODUCT_OUT,Use PRODUCT_OUT instead. See $(CHANGES_URL)#ANDROID_PRODUCT_OUT)
+$(KATI_obsolete_var ANDROID_HOST_OUT_TESTCASES,Use HOST_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_HOST_OUT_TESTCASES)
+$(KATI_obsolete_var ANDROID_TARGET_OUT_TESTCASES,Use TARGET_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_TARGET_OUT_TESTCASES)
+$(KATI_obsolete_var ANDROID_BUILD_TOP,Use '.' instead. See $(CHANGES_URL)#ANDROID_BUILD_TOP)
+$(KATI_obsolete_var \
   ANDROID_TOOLCHAIN \
   ANDROID_TOOLCHAIN_2ND_ARCH \
   ANDROID_DEV_SCRIPTS \
   ANDROID_EMULATOR_PREBUILTS \
   ANDROID_PRE_BUILD_PATHS \
   ,See $(CHANGES_URL)#other_envsetup_variables)
+$(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
+$(KATI_obsolete_var USE_CLANG_PLATFORM_BUILD,Clang is the only supported Android compiler. See $(CHANGES_URL)#USE_CLANG_PLATFORM_BUILD)
 
 CHANGES_URL :=
 
@@ -87,15 +89,6 @@
 dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
 MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
 
-# Tell python not to spam the source tree with .pyc files.  This
-# only has an effect on python 2.6 and above.
-export PYTHONDONTWRITEBYTECODE := 1
-
-ifneq ($(filter --color=always, $(GREP_OPTIONS)),)
-$(warning The build system needs unmodified output of grep.)
-$(error Please remove --color=always from your  $$GREP_OPTIONS)
-endif
-
 UNAME := $(shell uname -sm)
 
 SRC_TARGET_DIR := $(TOPDIR)build/target
@@ -104,7 +97,7 @@
 SRC_TEST_API_DIR := $(TOPDIR)prebuilts/sdk/test-api
 
 # Some specific paths to tools
-SRC_DROIDDOC_DIR := $(TOPDIR)build/tools/droiddoc
+SRC_DROIDDOC_DIR := $(TOPDIR)build/make/tools/droiddoc
 
 # Set up efficient math functions which are used in make.
 # Here since this file is included by envsetup as well as during build.
@@ -155,6 +148,15 @@
 BUILD_HOST_DALVIK_JAVA_LIBRARY := $(BUILD_SYSTEM)/host_dalvik_java_library.mk
 BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY := $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
 
+BUILD_HOST_TEST_CONFIG := $(BUILD_SYSTEM)/host_test_config.mk
+BUILD_TARGET_TEST_CONFIG := $(BUILD_SYSTEM)/target_test_config.mk
+
+INSTRUMENTATION_TEST_CONFIG_TEMPLATE := $(BUILD_SYSTEM)/instrumentation_test_config_template.xml
+NATIVE_TEST_CONFIG_TEMPLATE := $(BUILD_SYSTEM)/native_test_config_template.xml
+EMPTY_TEST_CONFIG := $(BUILD_SYSTEM)/empty_test_config.xml
+
+# Tool to generate TradeFed test config file automatically.
+AUTOGEN_TEST_CONFIG_SCRIPT := build/make/tools/auto_gen_test_config.py
 
 # ###############################################################
 # Parse out any modifier targets.
@@ -165,7 +167,7 @@
 ################################################################
 # Tools needed in product configuration makefiles.
 ################################################################
-NORMALIZE_PATH := build/tools/normalize_path.py
+NORMALIZE_PATH := build/make/tools/normalize_path.py
 
 # $(1): the paths to be normalized
 define normalize-paths
@@ -402,33 +404,11 @@
   WITH_STATIC_ANALYZER :=
 endif
 
-# define clang/llvm versions and base directory.
-include $(BUILD_SYSTEM)/clang/versions.mk
-
 # Unset WITH_TIDY_ONLY if global WITH_TIDY_ONLY is not true nor 1.
 ifeq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
   WITH_TIDY_ONLY :=
 endif
 
-PATH_TO_CLANG_TIDY := \
-    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/clang-tidy
-ifeq ($(wildcard $(PATH_TO_CLANG_TIDY)),)
-  ifneq (,$(filter 1 true,$(WITH_TIDY)))
-    $(warning *** Disable WITH_TIDY because $(PATH_TO_CLANG_TIDY) does not exist)
-  endif
-  PATH_TO_CLANG_TIDY :=
-endif
-
-# Disable WITH_STATIC_ANALYZER if tool can't be found
-SYNTAX_TOOLS_PREFIX := \
-    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/tools/scan-build/libexec
-ifneq ($(strip $(WITH_STATIC_ANALYZER)),)
-  ifeq ($(wildcard $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer),)
-    $(warning *** Disable WITH_STATIC_ANALYZER because $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer does not exist)
-    WITH_STATIC_ANALYZER :=
-  endif
-endif
-
 # Pick a Java compiler.
 include $(BUILD_SYSTEM)/combo/javac.mk
 
@@ -459,9 +439,9 @@
 
 # What to build:
 # pdk fusion if:
-# 1) PDK_FUSION_PLATFORM_ZIP is passed in from the environment
+# 1) PDK_FUSION_PLATFORM_ZIP / PDK_FUSION_PLATFORM_DIR is passed in from the environment
 # or
-# 2) the platform.zip exists in the default location
+# 2) the platform.zip / pdk.mk exists in the default location
 # or
 # 3) fusion is a command line build goal,
 #    PDK_FUSION_PLATFORM_ZIP is needed anyway, then do we need the 'fusion' goal?
@@ -470,27 +450,44 @@
 # or
 # 2) TARGET_BUILD_PDK is passed in from the environment
 
-# if PDK_FUSION_PLATFORM_ZIP is specified, do not override.
-ifndef PDK_FUSION_PLATFORM_ZIP
-# Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
-# but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
-# with vendor/pdk/TARGET_PRODUCT.
-_pdk_fusion_default_platform_zip = $(strip \
-  $(wildcard vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
-  $(wildcard vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
-  $(wildcard vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
-  $(wildcard vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip))
-ifneq (,$(_pdk_fusion_default_platform_zip))
-PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
-TARGET_BUILD_PDK := true
-endif # _pdk_fusion_default_platform_zip
-endif # !PDK_FUSION_PLATFORM_ZIP
+# if PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR is specified, do not override.
+ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)))
+  # Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
+  # but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
+  # with vendor/pdk/TARGET_PRODUCT.
+  # Others are set up with vendor/pdk/TARGET_DEVICE/TARGET_DEVICE-userdebug
+  _pdk_fusion_search_paths := \
+    vendor/pdk/$(TARGET_DEVICE)/$(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)/platform \
+    vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \
+    vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform \
+    vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \
+    vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform
+
+  _pdk_fusion_default_platform_zip := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/platform.zip)))
+  ifneq (,$(_pdk_fusion_default_platform_zip))
+    PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
+    _pdk_fusion_default_platform_zip :=
+  else
+    _pdk_fusion_default_platform_mk := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/pdk.mk)))
+    ifneq (,$(_pdk_fusion_default_platform_mk))
+      PDK_FUSION_PLATFORM_DIR := $(dir $(word 1,$(_pdk_fusion_default_platform_mk)))
+      _pdk_fusion_default_platform_mk :=
+    endif
+  endif # _pdk_fusion_default_platform_zip
+  _pdk_fusion_search_paths :=
+endif # !PDK_FUSION_PLATFORM_ZIP && !PDK_FUSION_PLATFORM_DIR
+
+ifneq (,$(PDK_FUSION_PLATFORM_ZIP))
+  ifneq (,$(PDK_FUSION_PLATFORM_DIR))
+    $(error Only one of PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR may be specified)
+  endif
+endif
 
 ifneq (,$(filter pdk fusion, $(MAKECMDGOALS)))
 TARGET_BUILD_PDK := true
 ifneq (,$(filter fusion, $(MAKECMDGOALS)))
-ifndef PDK_FUSION_PLATFORM_ZIP
-  $(error Specify PDK_FUSION_PLATFORM_ZIP to do a PDK fusion.)
+ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)))
+  $(error Specify PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR to do a PDK fusion.)
 endif
 endif  # fusion
 endif  # pdk or fusion
@@ -498,7 +495,19 @@
 ifdef PDK_FUSION_PLATFORM_ZIP
 TARGET_BUILD_PDK := true
 ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
-  $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
+  ifneq (,$(wildcard $(dir $(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
+    PDK_FUSION_PLATFORM_DIR := $(dir $(PDK_FUSION_PLATFORM_ZIP))
+    PDK_FUSION_PLATFORM_ZIP :=
+  else
+    $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
+  endif
+endif
+endif
+
+ifdef PDK_FUSION_PLATFORM_DIR
+TARGET_BUILD_PDK := true
+ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_DIR)/pdk.mk))
+  $(error Cannot find file $(PDK_FUSION_PLATFORM_DIR)/pdk.mk.)
 endif
 endif
 
@@ -542,6 +551,11 @@
   USE_D8 := true
 endif
 
+# Default R8 behavior when USE_R8 is not specified.
+ifndef USE_R8
+  USE_R8 := true
+endif
+
 #
 # Tools that are prebuilts for TARGET_BUILD_APPS
 #
@@ -566,8 +580,6 @@
   ZIPALIGN := $(prebuilt_build_tools_bin)/zipalign
 endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK
 
-R8_COMPAT_PROGUARD := $(HOST_OUT_EXECUTABLES)/r8-compat-proguard
-
 ifeq (,$(TARGET_BUILD_APPS))
   # Use RenderScript prebuilts for unbundled builds but not PDK builds
   LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc
@@ -588,6 +600,8 @@
 SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
 SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
 MERGE_ZIPS := $(SOONG_HOST_OUT_EXECUTABLES)/merge_zips
+XMLLINT := $(SOONG_HOST_OUT_EXECUTABLES)/xmllint
+XZ := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/bin/xz
 ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
 ZIPTIME := $(prebuilt_build_tools_bin)/ziptime
 
@@ -596,13 +610,13 @@
 
 LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
 # The default PKGDATADIR built in the prebuilt bison is a relative path
-# external/bison/data.
+# prebuilts/build-tools/common/bison.
 # To run bison from elsewhere you need to set up enviromental variable
 # BISON_PKGDATADIR.
-BISON_PKGDATADIR := $(PWD)/external/bison/data
-BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison
+BISON_PKGDATADIR := $(PWD)/prebuilts/build-tools/common/bison
+BISON := prebuilts/build-tools/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bin/bison
 YACC := $(BISON) -d
-BISON_DATA := $(wildcard external/bison/data/* external/bison/data/*/*)
+BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
 
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
@@ -650,17 +664,17 @@
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
 IMG2SIMG := $(HOST_OUT_EXECUTABLES)/img2simg$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
-MKTARBALL := build/tools/mktarball.sh
+MKTARBALL := build/make/tools/mktarball.sh
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
 JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
 DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
-FAT16COPY := build/tools/fat16copy.py
-CHECK_LINK_TYPE := build/tools/check_link_type.py
+FAT16COPY := build/make/tools/fat16copy.py
+CHECK_LINK_TYPE := build/make/tools/check_link_type.py
 
 PROGUARD := external/proguard/bin/proguard.sh
-JAVATAGS := build/tools/java-event-log-tags.py
-MERGETAGS := build/tools/merge-event-log-tags.py
-BUILD_IMAGE_SRCS := $(wildcard build/tools/releasetools/*.py)
+JAVATAGS := build/make/tools/java-event-log-tags.py
+MERGETAGS := build/make/tools/merge-event-log-tags.py
+BUILD_IMAGE_SRCS := $(wildcard build/make/tools/releasetools/*.py)
 APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
@@ -672,6 +686,7 @@
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
+HIDDENAPI := $(HOST_OUT_EXECUTABLES)/hiddenapi
 
 # relocation packer
 RELOCATION_PACKER := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/relocation_packer/relocation_packer
@@ -680,12 +695,23 @@
 FINDBUGS := $(FINDBUGS_DIR)/findbugs
 
 # Tool to merge AndroidManifest.xmls
-ANDROID_MANIFEST_MERGER := $(JAVA) -classpath prebuilts/devtools/tools/lib/manifest-merger.jar com.android.manifmerger.Main merge
+ANDROID_MANIFEST_MERGER_CLASSPATH := \
+    prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/26.0.0-beta2/manifest-merger-26.0.0-beta2.jar \
+    prebuilts/gradle-plugin/com/android/tools/sdk-common/26.0.0-beta2/sdk-common-26.0.0-beta2.jar \
+    prebuilts/gradle-plugin/com/android/tools/common/26.0.0-beta2/common-26.0.0-beta2.jar \
+    prebuilts/misc/common/guava/guava-21.0.jar
+ANDROID_MANIFEST_MERGER := $(JAVA) \
+    -classpath $(subst $(space),:,$(strip $(ANDROID_MANIFEST_MERGER_CLASSPATH))) \
+    com.android.manifmerger.Merger
 
 COLUMN:= column
 
 ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
+ifeq ($(RUN_ERROR_PRONE),true)
 USE_OPENJDK9 :=
+else
+USE_OPENJDK9 := true
+endif
 TARGET_OPENJDK9 :=
 else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),false)
 USE_OPENJDK9 :=
@@ -698,14 +724,8 @@
 TARGET_OPENJDK9 := true
 endif
 
-# Path to tools.jar, or empty if USE_OPENJDK9 is unset
-HOST_JDK_TOOLS_JAR :=
-# TODO: Remove HOST_JDK_TOOLS_JAR and all references to it once OpenJDK 8
-# toolchains are no longer supported (i.e. when USE_OPENJDK9 is enforced).
-# http://b/38418220
-ifndef USE_OPENJDK9
-HOST_JDK_TOOLS_JAR := $(ANDROID_JAVA_TOOLCHAIN)/../lib/tools.jar
-endif # ifndef USE_OPENJDK9
+# Path to tools.jar
+HOST_JDK_TOOLS_JAR := $(ANDROID_JAVA8_HOME)/lib/tools.jar
 
 # It's called md5 on Mac OS and md5sum on Linux
 ifeq ($(HOST_OS),darwin)
@@ -723,6 +743,19 @@
 
 APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
 
+# Boolean variable determining if the whitelist for compatible properties is enabled
+PRODUCT_COMPATIBLE_PROPERTY := false
+ifneq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),)
+  PRODUCT_COMPATIBLE_PROPERTY := $(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+  #$(warning no product shipping level defined)
+else ifneq ($(call math_lt,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
+  PRODUCT_COMPATIBLE_PROPERTY := true
+endif
+
+.KATI_READONLY := \
+    PRODUCT_COMPATIBLE_PROPERTY
+
 # Boolean variable determining if Treble is fully enabled
 PRODUCT_FULL_TREBLE := false
 ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
@@ -733,15 +766,25 @@
   PRODUCT_FULL_TREBLE := true
 endif
 
+# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior and remove
+#    references to it here and below.
+ifdef PRODUCT_NOTICE_SPLIT_OVERRIDE
+   $(error PRODUCT_NOTICE_SPLIT_OVERRIDE cannot be set.)
+endif
+
 requirements := \
     PRODUCT_TREBLE_LINKER_NAMESPACES \
     PRODUCT_SEPOLICY_SPLIT \
     PRODUCT_ENFORCE_VINTF_MANIFEST \
+    PRODUCT_NOTICE_SPLIT
 
 # If it is overriden, then the requirement override is taken, otherwise it's
 # PRODUCT_FULL_TREBLE
 $(foreach req,$(requirements),$(eval \
     $(req) := $(if $($(req)_OVERRIDE),$($(req)_OVERRIDE),$(PRODUCT_FULL_TREBLE))))
+# If the requirement is false for any reason, then it's not PRODUCT_FULL_TREBLE
+$(foreach req,$(requirements),$(eval \
+    PRODUCT_FULL_TREBLE := $(if $(filter false,$($(req))),false,$(PRODUCT_FULL_TREBLE))))
 
 PRODUCT_FULL_TREBLE_OVERRIDE ?=
 $(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
@@ -754,6 +797,47 @@
 
 requirements :=
 
+# BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED can be true only if early-mount of
+# partitions is supported. But the early-mount must be supported for full
+# treble products, and so BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED should be set
+# by default for full treble products.
+ifeq ($(PRODUCT_FULL_TREBLE),true)
+  BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
+endif
+
+# If PRODUCT_USE_VNDK is true and BOARD_VNDK_VERSION is not defined yet,
+# BOARD_VNDK_VERSION will be set to "current" as default.
+# PRODUCT_USE_VNDK will be true in Android-P or later launching devices.
+PRODUCT_USE_VNDK := false
+ifneq ($(PRODUCT_USE_VNDK_OVERRIDE),)
+  PRODUCT_USE_VNDK := $(PRODUCT_USE_VNDK_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+  # No shipping level defined
+else ifeq ($(call math_gt_or_eq,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
+  PRODUCT_USE_VNDK := $(PRODUCT_FULL_TREBLE)
+endif
+
+ifeq ($(PRODUCT_USE_VNDK),true)
+  ifndef BOARD_VNDK_VERSION
+    BOARD_VNDK_VERSION := current
+  endif
+endif
+
+$(KATI_obsolete_var PRODUCT_USE_VNDK_OVERRIDE,Use PRODUCT_USE_VNDK instead)
+.KATI_READONLY := \
+    PRODUCT_USE_VNDK
+
+ifdef PRODUCT_SHIPPING_API_LEVEL
+  ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),27),)
+    ifneq ($(TARGET_USES_MKE2FS),true)
+      $(error When PRODUCT_SHIPPING_API_LEVEL >= 27, TARGET_USES_MKE2FS must be true)
+    endif
+  endif
+  ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
+    $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+  endif
+endif
+
 # The default key if not set as LOCAL_CERTIFICATE
 ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
   DEFAULT_SYSTEM_DEV_CERTIFICATE := $(PRODUCT_DEFAULT_DEV_CERTIFICATE)
@@ -761,42 +845,39 @@
   DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey
 endif
 
-FRAMEWORK_MANIFEST_FILE := system/libhidl/manifest.xml
-
-# Compatibility matrix versioning:
-# MATRIX_LEVEL_OVERRIDE defined: MATRIX_LEVEL = MATRIX_LEVEL_OVERRIDE
-# MATRIX_LEVEL_OVERRIDE undefined:
-#   FULL_TREBLE != true: MATRIX_LEVEL = legacy
-#   FULL_TREBLE == true:
-#     SHIPPING_API_LEVEL defined: MATRIX_LEVEL = SHIPPING_API_LEVEL
-#     SHIPPING_API_LEVEL undefined: MATRIX_LEVEL = PLATFORM_SDK_VERSION
-# MATRIX_LEVEL == legacy => legacy.xml
-# MATRIX_LEVEL <= 26 => 26.xml
-# MATRIX_LEVEL == 27 => 27.xml # define when 27 releases
-# MATRIX_LEVEL == 28 => 28.xml # define when 28 releases
-# ...
-# otherwise => current.xml
-
-ifneq ($(PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE),)
-  PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
-else ifneq ($(PRODUCT_FULL_TREBLE),true)
-  PRODUCT_COMPATIBILITY_MATRIX_LEVEL := legacy
-else ifneq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PRODUCT_SHIPPING_API_LEVEL)
-else
-  PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PLATFORM_SDK_VERSION)
-endif
-
-ifeq ($(strip $(PRODUCT_COMPATIBILITY_MATRIX_LEVEL)),legacy)
-  FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.legacy.xml
-else ifeq ($(call math_gt_or_eq,$(PRODUCT_COMPATIBILITY_MATRIX_LEVEL),27),)
-  FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.26.xml
-else
-  FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.current.xml
-endif
-
 BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
-BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
+BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
+
+# SEPolicy versions
+
+# PLATFORM_SEPOLICY_VERSION is a number of the form "NN.m" with "NN" mapping to
+# PLATFORM_SDK_VERSION and "m" as a minor number which allows for SELinux
+# changes independent of PLATFORM_SDK_VERSION.  This value will be set to
+# 10000.0 to represent tip-of-tree development that is inherently unstable and
+# thus designed not to work with any shipping vendor policy.  This is similar in
+# spirit to how DEFAULT_APP_TARGET_SDK is set.
+# The minor version ('m' component) must be updated every time a platform release
+# is made which breaks compatibility with the previous platform sepolicy version,
+# not just on every increase in PLATFORM_SDK_VERSION.  The minor version should
+# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
+sepolicy_major_vers := 27
+sepolicy_minor_vers := 0
+
+ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
+$(error sepolicy_major_version does not match PLATFORM_SDK_VERSION, please update.)
+endif
+ifneq (REL,$(PLATFORM_VERSION_CODENAME))
+    sepolicy_major_vers := 10000
+    sepolicy_minor_vers := 0
+endif
+PLATFORM_SEPOLICY_VERSION := $(join $(addsuffix .,$(sepolicy_major_vers)), $(sepolicy_minor_vers))
+sepolicy_major_vers :=
+sepolicy_minor_vers :=
+
+# A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
+PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
+    26.0 \
+    27.0
 
 # ###############################################################
 # Set up final options.
@@ -834,12 +915,6 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
 endif
 
-# These will come from Soong, drop the environment versions
-unexport CLANG
-unexport CLANG_CXX
-unexport CCC_CC
-unexport CCC_CXX
-
 # ###############################################################
 # Collect a list of the SDK versions that we could compile against
 # For use with the LOCAL_SDK_VERSION variable for include $(BUILD_PACKAGE)
@@ -875,20 +950,20 @@
     $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android.jar,%, \
     $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android.jar)))
 
-# We don't have prebuilt test_current SDK yet.
-TARGET_AVAILABLE_SDK_VERSIONS := test_current $(TARGET_AVAILABLE_SDK_VERSIONS)
+TARGET_AVAILABLE_SDK_VERSIONS := $(addprefix system_,$(call numerically_sort,\
+    $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android_system.jar,%, \
+    $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android_system.jar)))) \
+    $(TARGET_AVAILABLE_SDK_VERSIONS)
+
+# We don't have prebuilt test_current and core_current SDK yet.
+TARGET_AVAILABLE_SDK_VERSIONS := test_current core_current $(TARGET_AVAILABLE_SDK_VERSIONS)
 
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,27,$(TARGET_AVAILABLE_SDK_VERSIONS))
 
-INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
-INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
-INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
-INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
-INTERNAL_PLATFORM_SYSTEM_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-exact.txt
-INTERNAL_PLATFORM_TEST_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-api.txt
-INTERNAL_PLATFORM_TEST_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-removed.txt
-INTERNAL_PLATFORM_TEST_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-exact.txt
+INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-light-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-dark-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-blacklist.txt
 
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
@@ -931,6 +1006,7 @@
     cacheimage-nodeps \
     bptimage-nodeps \
     vnod vendorimage-nodeps \
+    pnod productimage-nodeps \
     systemotherimage-nodeps \
     ramdisk-nodeps \
     bootimage-nodeps \
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 9415143..35d077c 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -217,19 +217,17 @@
 
       # Respect LOCAL_NOSANITIZE for integer-overflow flags.
       ifeq ($(filter signed-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
-        my_cflags += -fsanitize=signed-integer-overflow
+        my_sanitize += signed-integer-overflow
       endif
       ifeq ($(filter unsigned-integer-overflow, $(strip $(LOCAL_NOSANITIZE))),)
-        my_cflags += -fsanitize=unsigned-integer-overflow
+        my_sanitize += unsigned-integer-overflow
       endif
-      my_cflags += -fsanitize-trap=all
-      my_cflags += -ftrap-function=abort
       my_cflags += $(INTEGER_OVERFLOW_EXTRA_CFLAGS)
 
       # Check for diagnostics mode (on by default).
       ifneq ($(filter integer_overflow,$(my_sanitize_diag)),)
-        my_cflags += -fno-sanitize-trap=signed-integer-overflow,unsigned-integer-overflow
-        my_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_RUNTIME_LIBRARY) $(my_shared_libraries)
+        my_sanitize_diag += signed-integer-overflow
+        my_sanitize_diag += unsigned-integer-overflow
       endif
     endif
   endif
@@ -326,6 +324,27 @@
   endif
 endif
 
+# Use minimal diagnostics when integer overflow is enabled
+ifndef LOCAL_IS_HOST_MODULE
+  # Pre-emptively add UBSAN minimal runtime incase a static library dependency requires it
+  ifeq ($(filter STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+    ifndef LOCAL_SDK_VERSION
+      my_static_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_MINIMAL_RUNTIME_LIBRARY)
+    endif
+  endif
+  ifneq ($(filter unsigned-integer-overflow signed-integer-overflow integer,$(my_sanitize)),)
+    ifeq ($(filter unsigned-integer-overflow signed-integer overflow integer,$(my_sanitize_diag)),)
+      ifeq ($(filter cfi,$(my_sanitize_diag)),)
+        ifeq ($(filter address,$(my_sanitize)),)
+          my_cflags += -fsanitize-minimal-runtime
+          my_cflags += -fno-sanitize-trap=integer
+          my_cflags += -fno-sanitize-recover=integer
+        endif
+      endif
+    endif
+  endif
+endif
+
 ifneq ($(strip $(LOCAL_SANITIZE_RECOVER)),)
   recover_arg := $(subst $(space),$(comma),$(LOCAL_SANITIZE_RECOVER)),
   my_cflags += -fsanitize-recover=$(recover_arg)
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index f07659d..5171b8a 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -74,6 +74,16 @@
 ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
     my_cflags += -D_USING_LIBCXX
 
+    ifeq ($($(my_prefix)OS),darwin)
+        # libc++'s headers are annotated with availability macros that indicate
+        # which version of Mac OS was the first to ship with a libc++ feature
+        # available in its *system's* libc++.dylib. We do not use the system's
+        # library, but rather ship our own. As such, these availability
+        # attributes are meaningless for us but cause build breaks when we try
+        # to use code that would not be available in the system's dylib.
+        my_cppflags += -D_LIBCPP_DISABLE_AVAILABILITY
+    endif
+
     # Note that the structure of this means that LOCAL_CXX_STL := libc++ will
     # use the static libc++ for static executables.
     ifeq ($(my_link_type),dynamic)
diff --git a/core/definitions.mk b/core/definitions.mk
index c8368b4..b2b3bbb 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -167,7 +167,7 @@
 # $(1): directory to search under
 # Ignores $(1)/Android.mk
 define first-makefiles-under
-$(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
+$(shell build/make/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
         --mindepth=2 $(addprefix --dir=,$(1)) Android.mk)
 endef
 
@@ -1723,6 +1723,10 @@
 $(if $(PRIVATE_ALL_OBJECTS),,$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) d $(1) $(dir $(1))dummy.o \
   && rm -f $(dir $(1))dummy.o)
 endef
+else
+create-dummy.o-if-no-objs =
+get-dummy.o-if-no-objs =
+delete-dummy.o-if-no-objs =
 endif  # HOST_OS is darwin
 
 # Explicitly delete the archive first so that ar doesn't
@@ -1849,7 +1853,7 @@
   $(PRIVATE_OBJCOPY) -S --remove-section .gdb_index --remove-section .comment --keep-symbols=$@.keep_symbols $@.mini_debuginfo && \
   $(PRIVATE_OBJCOPY) --rename-section saved_debug_frame=.debug_frame $@.mini_debuginfo && \
   rm -f $@.mini_debuginfo.xz && \
-  xz $@.mini_debuginfo && \
+  $(XZ) $@.mini_debuginfo && \
   $(PRIVATE_OBJCOPY) --add-section .gnu_debugdata=$@.mini_debuginfo.xz $@; \
 else \
   cp -f $< $@; \
@@ -2075,6 +2079,13 @@
   $(PRIVATE_AAPT2_CFLAGS) --legacy
 endef
 
+# TODO(b/74574557): use aapt2 compile --zip if it gets implemented
+define aapt2-compile-resource-zips
+@mkdir -p $(dir $@)
+$(ZIPSYNC) -d $@.contents -l $@.list $(PRIVATE_SOURCE_RES_ZIPS)
+$(hide) $(AAPT2) compile -o $@ --dir $@.tmp $(PRIVATE_AAPT2_CFLAGS) --legacy
+endef
+
 # Set up rule to compile one resource file with aapt2.
 # Must be called with $(eval).
 # $(1): the source file
@@ -2218,16 +2229,6 @@
 $(hide) tr ' ' '\n' < $@.tmp | $(NORMALIZE_PATH) | sort -u > $@
 endef
 
-# $(1): sharding number.
-# $(2): Java source files paths.
-define save-sharded-java-source-list
-$(java_source_list_file).shard.$(1): $(2) $$(NORMALIZE_PATH)
-	@echo "shard java source list: $$@"
-	rm -f $$@
-	$$(call dump-words-to-file,$(2),$$@.tmp)
-	$(hide) tr ' ' '\n' < $$@.tmp | $$(NORMALIZE_PATH) | sort -u > $$@
-endef
-
 # Common definition to invoke javac on the host and target.
 #
 # $(1): javac
@@ -2237,23 +2238,31 @@
 $(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
 $(hide) mkdir -p $(dir $@)
 $(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
-$(hide) if [ -s $(PRIVATE_JAVA_SOURCE_LIST) ] ; then \
+$(if $(PRIVATE_SRCJARS),\
+    $(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS))
+$(hide) if [ -s $(PRIVATE_JAVA_SOURCE_LIST) $(if $(PRIVATE_SRCJARS),-o -s $(PRIVATE_SRCJAR_LIST_FILE) )] ; then \
     $(SOONG_JAVAC_WRAPPER) $(JAVAC_WRAPPER) $(1) -encoding UTF-8 \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     $(if $(PRIVATE_USE_SYSTEM_MODULES), \
-      $(addprefix --system=,$(PRIVATE_SYSTEM_MODULES)), \
+      $(addprefix --system=,$(PRIVATE_SYSTEM_MODULES_DIR)), \
       $(addprefix -bootclasspath ,$(strip \
           $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH)) \
           $(PRIVATE_EMPTY_BOOTCLASSPATH)))) \
-    $(addprefix -classpath ,$(strip \
-        $(call normalize-path-list,$(2)))) \
+    $(if $(PRIVATE_USE_SYSTEM_MODULES), \
+      $(if $(PRIVATE_PATCH_MODULE), \
+        --patch-module=$(PRIVATE_PATCH_MODULE)=$(call normalize-path-list,. $(2)))) \
+    $(addprefix -classpath ,$(call normalize-path-list,$(strip \
+      $(if $(PRIVATE_USE_SYSTEM_MODULES), \
+        $(filter-out $(PRIVATE_SYSTEM_MODULES_LIBS),$(PRIVATE_BOOTCLASSPATH))) \
+      $(2)))) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) -s $(PRIVATE_ANNO_INTERMEDIATES_DIR) \
     $(PRIVATE_JAVACFLAGS) \
     \@$(PRIVATE_JAVA_SOURCE_LIST) \
+    $(if $(PRIVATE_SRCJARS),\@$(PRIVATE_SRCJAR_LIST_FILE)) \
     || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
 fi
-$(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/tools/java-layers.py \
+$(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/make/tools/java-layers.py \
     $(PRIVATE_JAVA_LAYERS_FILE) @$(PRIVATE_JAVA_SOURCE_LIST),)
 $(if $(PRIVATE_JAR_EXCLUDE_FILES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
     -name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
@@ -2271,55 +2280,22 @@
 $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@))
 endef
 
-# $(1): Javac output jar name.
-# $(2): Java source list file.
-# $(3): Java header libs.
-# $(4): Javac sharding number.
-# $(5): Javac sources deps (the arg may neeed $$ in case of containing '#')
-define create-classes-full-debug.jar
-$(1): PRIVATE_JAVACFLAGS := $$(LOCAL_JAVACFLAGS) $$(annotation_processor_flags)
-$(1): PRIVATE_JAR_EXCLUDE_FILES := $$(LOCAL_JAR_EXCLUDE_FILES)
-$(1): PRIVATE_JAR_PACKAGES := $$(LOCAL_JAR_PACKAGES)
-$(1): PRIVATE_JAR_EXCLUDE_PACKAGES := $$(LOCAL_JAR_EXCLUDE_PACKAGES)
-$(1): PRIVATE_DONT_DELETE_JAR_META_INF := $$(LOCAL_DONT_DELETE_JAR_META_INF)
-$(1): PRIVATE_JAVA_SOURCE_LIST := $(2)
-$(1): PRIVATE_ALL_JAVA_HEADER_LIBRARIES := $(3)
-$(1): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes$(4)
-$(1): PRIVATE_ANNO_INTERMEDIATES_DIR := $(intermediates.COMMON)/anno$(4)
-$(1): \
-    $(2) \
-    $(3) \
-    $(5) \
-    $$(full_java_bootclasspath_libs) \
-    $$(full_java_system_modules_deps) \
-    $$(layers_file) \
-    $$(annotation_processor_deps) \
-    $$(NORMALIZE_PATH) \
-    $$(JAR_ARGS) \
-    | $$(SOONG_JAVAC_WRAPPER)
-	@echo "Target Java: $$@ ($$(PRIVATE_CLASS_INTERMEDIATES_DIR))"
-	$$(call compile-java,$$(TARGET_JAVAC),$$(PRIVATE_ALL_JAVA_HEADER_LIBRARIES))
-endef
-
 define transform-java-to-header.jar
 @echo "$($(PRIVATE_PREFIX)DISPLAY) Turbine: $(PRIVATE_MODULE)"
 @mkdir -p $(dir $@)
 @rm -rf $(dir $@)/classes-turbine
 @mkdir $(dir $@)/classes-turbine
-$(hide) if [ -s $(PRIVATE_JAVA_SOURCE_LIST) ] ; then \
+$(hide) if [ -s $(PRIVATE_JAVA_SOURCE_LIST) -o -n "$(PRIVATE_SRCJARS)" ] ; then \
     $(JAVA) -jar $(TURBINE) \
     --output $@.premerged --temp_dir $(dir $@)/classes-turbine \
-    --sources \@$(PRIVATE_JAVA_SOURCE_LIST) \
-    --javacopts $(PRIVATE_JAVACFLAGS) $(COMMON_JDK_FLAGS) \
-    $(addprefix --bootclasspath ,$(strip \
-         $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH)) \
-         $(PRIVATE_EMPTY_BOOTCLASSPATH))) \
-    $(addprefix --classpath ,$(strip \
-        $(call normalize-path-list,$(PRIVATE_ALL_JAVA_HEADER_LIBRARIES)))) \
+    --sources \@$(PRIVATE_JAVA_SOURCE_LIST) --source_jars $(PRIVATE_SRCJARS) \
+    --javacopts $(PRIVATE_JAVACFLAGS) $(COMMON_JDK_FLAGS) -- \
+    $(addprefix --bootclasspath ,$(strip $(PRIVATE_BOOTCLASSPATH))) \
+    $(addprefix --classpath ,$(strip $(PRIVATE_ALL_JAVA_HEADER_LIBRARIES))) \
     || ( rm -rf $(dir $@)/classes-turbine ; exit 41 ) && \
-    $(MERGE_ZIPS) -j -stripDir META-INF $@.tmp $@.premerged $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
+    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $@.premerged $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
 else \
-    $(MERGE_ZIPS) -j -stripDir META-INF $@.tmp $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
+    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
 fi
 $(hide) $(ZIPTIME) $@.tmp
 $(hide) $(call commit-change-for-toc,$@)
@@ -2411,6 +2387,21 @@
     $<
 endef
 
+
+define transform-classes-d8.jar-to-dex
+@echo "target Dex: $(PRIVATE_MODULE)"
+@mkdir -p $(dir $@)
+$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
+$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
+$(hide) $(DX_COMMAND) \
+    --output $(dir $@) \
+    --min-api $(PRIVATE_MIN_SDK_VERSION) \
+    $(subst --main-dex-list=, --main-dex-list , \
+        $(filter-out --core-library --multi-dex --minimal-main-dex,$(PRIVATE_DX_FLAGS))) \
+    $(dir $@)d8_input.jar
+$(hide) rm -f $(dir $@)d8_input.jar
+endef
+
 # Create a mostly-empty .jar file that we'll add to later.
 # The MacOS jar tool doesn't like creating empty jar files,
 # so we need to give it something.
@@ -2558,6 +2549,15 @@
   fi
 endef
 
+# Compress a package using the standard gzip algorithm.
+define compress-package
+$(hide) \
+  mv $@ $@.uncompressed; \
+  $(MINIGZIP) -c $@.uncompressed > $@.compressed; \
+  rm -f $@.uncompressed; \
+  mv $@.compressed $@;
+endef
+
 # Remove dynamic timestamps from packages
 #
 define remove-timestamps-from-package
@@ -2568,11 +2568,12 @@
 #
 define uncompress-dexs
 $(hide) if (zipinfo $@ '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
-  rm -rf $(dir $@)uncompresseddexs && mkdir $(dir $@)uncompresseddexs; \
-  unzip -q $@ '*.dex' -d $(dir $@)uncompresseddexs && \
+  tmpdir=$@.tmpdir; \
+  rm -rf $$tmpdir && mkdir $$tmpdir; \
+  unzip -q $@ '*.dex' -d $$tmpdir && \
   zip -qd $@ '*.dex' && \
-  ( cd $(dir $@)uncompresseddexs && find . -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
-  rm -rf $(dir $@)uncompresseddexs; \
+  ( cd $$tmpdir && find . -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
+  rm -rf $$tmpdir; \
   fi
 endef
 
@@ -2652,9 +2653,9 @@
 # $(1): source file
 # $(2): destination file, must end with .xml.
 define copy-xml-file-checked
-$(2): $(1)
+$(2): $(1) $(XMLLINT)
 	@echo "Copy xml: $$@"
-	$(hide) xmllint $$< >/dev/null  # Don't print the xml file to stdout.
+	$(hide) $(XMLLINT) $$< >/dev/null  # Don't print the xml file to stdout.
 	$$(copy-file-to-target)
 endef
 
@@ -2757,18 +2758,53 @@
 # $(3): LOCAL_DEX_PREOPT, if nostripping then leave classes*.dex
 define dexpreopt-copy-jar
 $(2): $(1)
-	@echo $(if $(filter nostripping,$(3)),"Copy: $$@","Copy without dex: $$@")
+	@echo "Copy: $$@"
 	$$(copy-file-to-target)
 	$(if $(filter nostripping,$(3)),,$$(call dexpreopt-remove-classes.dex,$$@))
 endef
 
-# $(1): the .jar or .apk to remove classes.dex
+# $(1): the .jar or .apk to remove classes.dex. Note that if all dex files
+# are uncompressed in the archive, then dexopt will not do a copy of the dex
+# files and we should not strip.
 define dexpreopt-remove-classes.dex
-$(hide) zip --quiet --delete $(1) classes.dex; \
+$(hide) if (zipinfo $1 '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
+zip --quiet --delete $(1) classes.dex; \
 dex_index=2; \
 while zip --quiet --delete $(1) classes$${dex_index}.dex > /dev/null; do \
   let dex_index=dex_index+1; \
-done
+done \
+fi
+endef
+
+define hiddenapi-copy-dex-files
+$(2): $(1) $(HIDDENAPI) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@rm -rf $(dir $(2))
+	@mkdir -p $(dir $(2))
+	find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | sort | \
+		xargs -I{} cp -f {} $(dir $(2))
+	find $(dir $(2)) -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+endef
+
+define hiddenapi-copy-soong-jar
+$(2): PRIVATE_FOLDER := $(dir $(2))dex-hiddenapi
+$(2): $(1) $(HIDDENAPI) $(SOONG_ZIP) $(MERGE_ZIPS) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@echo "Hidden API: $$@"
+	$$(copy-file-to-target)
+	@rm -rf $${PRIVATE_FOLDER}
+	@mkdir -p $${PRIVATE_FOLDER}
+	unzip -q $(2) 'classes*.dex' -d $${PRIVATE_FOLDER}
+	find $${PRIVATE_FOLDER} -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	$(SOONG_ZIP) -o $${PRIVATE_FOLDER}/classes.dex.jar -C $${PRIVATE_FOLDER} -D $${PRIVATE_FOLDER}
+	$(MERGE_ZIPS) -D -zipToNotStrip $${PRIVATE_FOLDER}/classes.dex.jar -stripFile "classes*.dex" \
+		$(2) $${PRIVATE_FOLDER}/classes.dex.jar $(1)
 endef
 
 ###########################################################
@@ -3018,7 +3054,9 @@
   HOST_DALVIK_JAVA_LIBRARY \
   HOST_DALVIK_STATIC_JAVA_LIBRARY \
   base_rules \
-  HEADER_LIBRARY
+  HEADER_LIBRARY \
+  HOST_TEST_CONFIG \
+  TARGET_TEST_CONFIG
 
 $(foreach s,$(STATS.MODULE_TYPE),$(eval STATS.MODULE_TYPE.$(s) :=))
 define record-module-type
@@ -3386,3 +3424,43 @@
   $(eval ALL_MODULES.$(enforce_rro_source_module).REQUIRED += $(enforce_rro_module)) \
 )
 endef
+
+###########################################################
+## Find system_$(VER) in LOCAL_SDK_VERSION
+##
+## $(1): LOCAL_SDK_VERSION
+###########################################################
+define has-system-sdk-version
+$(filter system_%,$(1))
+endef
+
+###########################################################
+## Get numerical version in LOCAL_SDK_VERSION
+##
+## $(1): LOCAL_SDK_VERSION
+###########################################################
+define get-numeric-sdk-version
+$(filter-out current,\
+  $(if $(call has-system-sdk-version,$(1)),$(patsubst system_%,%,$(1)),$(1)))
+endef
+
+# Convert to lower case without requiring a shell, which isn't cacheable.
+to-lower=$(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
+
+# Convert to upper case without requiring a shell, which isn't cacheable.
+to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
+
+# Sanity-check to-lower and to-upper
+lower := abcdefghijklmnopqrstuvwxyz-_
+upper := ABCDEFGHIJKLMNOPQRSTUVWXYZ-_
+
+ifneq ($(lower),$(call to-lower,$(upper)))
+  $(error to-lower sanity check failure)
+endif
+
+ifneq ($(upper),$(call to-upper,$(lower)))
+  $(error to-upper sanity check failure)
+endif
+
+lower :=
+upper :=
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 0dcb07f..270e5f4 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -35,15 +35,27 @@
 # Conditional to building on linux, as dex2oat currently does not work on darwin.
 ifeq ($(HOST_OS),linux)
   WITH_DEXPREOPT ?= true
-# For an eng build only pre-opt the boot image and system server. This gives reasonable performance
-# and still allows a simple workflow: building in frameworks/base and syncing.
   ifeq (eng,$(TARGET_BUILD_VARIANT))
+    # Don't strip for quick development turnarounds.
+    DEX_PREOPT_DEFAULT := nostripping
+    # For an eng build only pre-opt the boot image and system server. This gives reasonable performance
+    # and still allows a simple workflow: building in frameworks/base and syncing.
     WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= true
   endif
-# Add mini-debug-info to the boot classpath unless explicitly asked not to.
+  # Add mini-debug-info to the boot classpath unless explicitly asked not to.
   ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
     PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
   endif
+
+  # Non eng linux builds must have preopt enabled so that system server doesn't run as interpreter
+  # only. b/74209329
+  ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
+    ifneq (true,$(WITH_DEXPREOPT))
+      ifneq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
+        $(call pretty-error, DEXPREOPT must be enabled for user and userdebug builds)
+      endif
+    endif
+  endif
 endif
 
 GLOBAL_DEXPREOPT_FLAGS :=
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79e72c1..f289c22 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -7,11 +7,14 @@
 # Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
 ifeq ($(USE_DEX2OAT_DEBUG),false)
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoat$(HOST_EXECUTABLE_SUFFIX)
 else
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoatd$(HOST_EXECUTABLE_SUFFIX)
 endif
 
 DEX2OAT_DEPENDENCY += $(DEX2OAT)
+PATCHOAT_DEPENDENCY += $(PATCHOAT)
 
 # Use the first preloaded-classes file in PRODUCT_COPY_FILES.
 PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
@@ -87,14 +90,19 @@
 # is converted into to boot.art (to match the legacy assumption that boot.art
 # exists), and the rest are converted to boot-<name>.art.
 # In addition, each .art file has an associated .oat file.
-LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).oat boot-$(jar).vdex)
-LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.oat boot.vdex
+LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).art.rel boot-$(jar).oat)
+LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.art.rel boot.oat
+LIBART_TARGET_BOOT_ART_VDEX_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).vdex)
+LIBART_TARGET_BOOT_ART_VDEX_FILES += boot.vdex
 
 # If we use a boot image profile.
 my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
 ifeq (,$(my_use_profile_for_boot_image))
-# If not set, use the default.
-my_use_profile_for_boot_image := false
+# If not set, set the default to true if we are not a PDK build. PDK builds
+# can't build the profile since they don't have frameworks/base.
+ifneq (true,$(TARGET_BUILD_PDK))
+my_use_profile_for_boot_image := true
+endif
 endif
 
 ifeq (true,$(my_use_profile_for_boot_image))
@@ -114,6 +122,7 @@
 	@mkdir -p $(dir $@)
 	ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
 		--create-profile-from=$(PRIVATE_PROFILE_INPUT_LOCATION) \
+		--skip-apk-verification \
 		$(addprefix --apk=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
 		$(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
 		--reference-profile-file=$@
@@ -126,6 +135,8 @@
 
 endif
 
+LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES := $(addprefix $(PRODUCT_OUT)/$(DEXPREOPT_BOOT_JAR_DIR)/,$(LIBART_TARGET_BOOT_ART_VDEX_FILES))
+
 my_2nd_arch_prefix :=
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
 
@@ -133,10 +144,24 @@
 ifdef TARGET_2ND_ARCH
 my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-my_2nd_arch_prefix :=
 endif
 endif
 
+# Copy shared vdex to the directory and create corresponding symlinks in primary and secondary arch.
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : PRIMARY_ARCH_DIR := $(dir $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : SECOND_ARCH_DIR := $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : $(DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)
+	@echo "Install: $@"
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) cp "$(dir $<)$(notdir $@)" "$@"
+	# Make symlink for both the archs. In the case its single arch the symlink will just get overridden.
+	@mkdir -p $(PRIMARY_ARCH_DIR)
+	$(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(PRIMARY_ARCH_DIR)$(notdir $@)
+	@mkdir -p $(SECOND_ARCH_DIR)
+	$(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(SECOND_ARCH_DIR)$(notdir $@)
+
+my_2nd_arch_prefix :=
 
 ########################################################################
 # For a single jar or APK
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8b71198..a5e7e88 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -30,6 +30,8 @@
 $(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE := $(PRODUCT_OUT)$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME)
 $(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
     $(LIBART_TARGET_BOOT_ART_EXTRA_FILES))
+$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
+    $(LIBART_TARGET_BOOT_ART_VDEX_FILES))
 
 # If we have a compiled-classes file, create a parameter.
 COMPILED_CLASSES_FLAGS :=
@@ -45,7 +47,7 @@
 
 # The rule to install boot.art
 # Depends on installed boot.oat, boot-*.art, boot-*.oat
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES)
 	@echo "Install: $@"
 	$(copy-file-to-target)
 
@@ -71,17 +73,36 @@
 # Note: this is technically incorrect. Compiled code contains stack checks which may depend
 #       on ASAN settings.
 
+# Use ANDROID_LOG_TAGS to suppress most logging by default...
+ifeq (,$(ART_BOOT_IMAGE_EXTRA_ARGS))
+DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:e"
+else
+# ...unless the boot image is generated specifically for testing, then allow all logging.
+DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:v"
+endif
+
+# An additional message to print on dex2oat failure.
+DEX2OAT_FAILURE_MESSAGE := ERROR: Dex2oat failed to compile a boot image.
+DEX2OAT_FAILURE_MESSAGE += It is likely that the boot classpath is inconsistent.
+ifeq ($(ONE_SHOT_MAKEFILE),)
+  DEX2OAT_FAILURE_MESSAGE += Rebuild with ART_BOOT_IMAGE_EXTRA_ARGS="--runtime-arg -verbose:verifier" to see verification errors.
+else
+  DEX2OAT_FAILURE_MESSAGE += Build with m, mma, or mmma instead of mm or mmm to remedy the situation.
+endif
+
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
 # Use dex2oat debug version for better error reporting
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(my_out_boot_image_profile_location)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(PATCHOAT_DEPENDENCY) $(my_out_boot_image_profile_location)
 	@echo "target dex2oat: $@"
 	@mkdir -p $(dir $@)
 	@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
-	@rm -f $(dir $@)/*.art $(dir $@)/*.oat
+	@rm -f $(dir $@)/*.art $(dir $@)/*.oat $(dir $@)/*.art.rel
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
-	$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
+	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
+	$(hide) $(DEX2OAT_BOOT_IMAGE_LOG_TAGS) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
 		--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
 		$(PRIVATE_BOOT_IMAGE_FLAGS) \
 		$(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
@@ -99,6 +120,12 @@
 		--multi-image --no-inline-from=core-oj.jar \
 		--abort-on-hard-verifier-error \
 		--abort-on-soft-verifier-error \
-		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS)
+		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) \
+		|| ( echo "$(DEX2OAT_FAILURE_MESSAGE)" ; false ) && \
+	$(DEX2OAT_BOOT_IMAGE_LOG_TAGS) ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
+		--input-image-location=$(PRIVATE_IMAGE_LOCATION) \
+		--output-image-relocation-directory=$(dir $@) \
+		--instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
+		--base-offset-delta=0x10000000
 
 endif
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index f420b18..2b2800b 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -1,6 +1,20 @@
 # dexpreopt_odex_install.mk is used to define odex creation rules for JARs and APKs
 # This file depends on variables set in base_rules.mk
-# Output variables: LOCAL_DEX_PREOPT, built_odex, dexpreopt_boot_jar_module
+# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX, built_odex,
+#                   dexpreopt_boot_jar_module
+
+# We explicitly uncompress APKs of privileged apps, and used by
+# privileged apps
+LOCAL_UNCOMPRESS_DEX := false
+ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
+ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+  LOCAL_UNCOMPRESS_DEX := true
+else
+  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+    LOCAL_UNCOMPRESS_DEX := true
+  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_PRIVILEGED_MODULE
+endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 
 # Setting LOCAL_DEX_PREOPT based on WITH_DEXPREOPT, LOCAL_DEX_PREOPT, etc
 LOCAL_DEX_PREOPT := $(strip $(LOCAL_DEX_PREOPT))
@@ -46,14 +60,27 @@
 endif
 endif
 
-# if installing into system, and odex are being installed into system_other, don't strip
-ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifeq ($(LOCAL_DEX_PREOPT),true)
+
+# Don't strip with dexes we explicitly uncompress (dexopt will not store the dex code).
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+LOCAL_DEX_PREOPT := nostripping
+endif  # LOCAL_UNCOMPRESS_DEX
+
+# system_other isn't there for an OTA, so don't strip
+# if module is on system, and odex is on system_other.
+ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifneq ($(call install-on-system-other, $(my_module_path)),)
 LOCAL_DEX_PREOPT := nostripping
-endif
-endif
-endif
+endif  # install-on-system-other
+endif  # BOARD_USES_SYSTEM_OTHER_ODEX
+
+# We also don't strip if all dexs are uncompressed (dexopt will not store the dex code),
+# but that requires to inspect the source file, which is too early at this point (as we
+# don't know if the source file will actually be used).
+# See dexpreopt-remove-classes.dex.
+
+endif  # LOCAL_DEX_PREOPT
 
 built_odex :=
 built_vdex :=
@@ -65,17 +92,10 @@
 built_installed_vdex :=
 built_installed_art :=
 
-ifdef LOCAL_DEX_PREOPT
-
 ifeq (false,$(WITH_DEX_PREOPT_GENERATE_PROFILE))
 LOCAL_DEX_PREOPT_GENERATE_PROFILE := false
 endif
 
-ifdef LOCAL_VENDOR_MODULE
-ifeq (true,$(LOCAL_DEX_PREOPT_GENERATE_PROFILE))
-$(error profiles are not supported for vendor modules)
-endif
-else
 ifndef LOCAL_DEX_PREOPT_GENERATE_PROFILE
 # If LOCAL_DEX_PREOPT_GENERATE_PROFILE is not defined, default it based on the existence of the
 # profile class listing. TODO: Use product specific directory here.
@@ -83,14 +103,56 @@
 LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING := $(my_classes_directory)/$(LOCAL_MODULE).prof.txt
 ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)))
 # Profile listing exists, use it to generate the profile.
-ifeq ($(LOCAL_DEX_PREOPT_APP_IMAGE),)
-LOCAL_DEX_PREOPT_APP_IMAGE := true
-endif
 LOCAL_DEX_PREOPT_GENERATE_PROFILE := true
 endif
 endif
+
+ifeq (true,$(LOCAL_DEX_PREOPT_GENERATE_PROFILE))
+
+ifdef LOCAL_VENDOR_MODULE
+$(call pretty-error, Internal error: profiles are not supported for vendor modules)
+else
+LOCAL_DEX_PREOPT_APP_IMAGE := true
 endif
 
+ifndef LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING
+$(call pretty-error,Must have specified class listing (LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING))
+endif
+ifeq (,$(dex_preopt_profile_src_file))
+$(call pretty-error, Internal error: dex_preopt_profile_src_file must be set)
+endif
+my_built_profile := $(dir $(LOCAL_BUILT_MODULE))/profile.prof
+my_dex_location := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
+# Remove compressed APK extension.
+my_dex_location := $(patsubst %.gz,%,$(my_dex_location))
+$(my_built_profile): PRIVATE_BUILT_MODULE := $(dex_preopt_profile_src_file)
+$(my_built_profile): PRIVATE_DEX_LOCATION := $(my_dex_location)
+$(my_built_profile): PRIVATE_SOURCE_CLASSES := $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
+$(my_built_profile): $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
+$(my_built_profile): $(PROFMAN)
+$(my_built_profile): $(dex_preopt_profile_src_file)
+$(my_built_profile):
+	$(hide) mkdir -p $(dir $@)
+	ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
+		--create-profile-from=$(PRIVATE_SOURCE_CLASSES) \
+		--apk=$(PRIVATE_BUILT_MODULE) \
+		--dex-location=$(PRIVATE_DEX_LOCATION) \
+		--reference-profile-file=$@
+dex_preopt_profile_src_file:=
+
+# Remove compressed APK extension.
+my_installed_profile := $(patsubst %.gz,%,$(LOCAL_INSTALLED_MODULE)).prof
+
+# my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
+$(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
+build_installed_profile:=$(my_built_profile):$(my_installed_profile)
+else
+build_installed_profile:=
+my_installed_profile :=
+endif
+
+ifdef LOCAL_DEX_PREOPT
+
 dexpreopt_boot_jar_module := $(filter $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE))
 ifdef dexpreopt_boot_jar_module
 # For libart, the boot jars' odex files are replaced by $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE).
@@ -146,32 +208,9 @@
 
 ifdef built_odex
 ifeq (true,$(LOCAL_DEX_PREOPT_GENERATE_PROFILE))
-ifndef LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING
-$(call pretty-error,Must have specified class listing (LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING))
-endif
-my_built_profile := $(dir $(LOCAL_BUILT_MODULE))/profile.prof
-my_dex_location := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
 $(built_odex): $(my_built_profile)
 $(built_odex): PRIVATE_PROFILE_PREOPT_FLAGS := --profile-file=$(my_built_profile)
-$(my_built_profile): PRIVATE_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
-$(my_built_profile): PRIVATE_DEX_LOCATION := $(my_dex_location)
-$(my_built_profile): PRIVATE_SOURCE_CLASSES := $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
-$(my_built_profile): $(LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING)
-$(my_built_profile): $(PROFMAN)
-$(my_built_profile): $(LOCAL_BUILT_MODULE)
-$(my_built_profile):
-	$(hide) mkdir -p $(dir $@)
-	ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
-		--create-profile-from=$(PRIVATE_SOURCE_CLASSES) \
-		--apk=$(PRIVATE_BUILT_MODULE) \
-		--dex-location=$(PRIVATE_DEX_LOCATION) \
-		--reference-profile-file=$@
-my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
-$(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
-build_installed_profile:=$(my_built_profile):$(my_installed_profile)
 else
-build_installed_profile:=
-my_installed_profile :=
 $(built_odex): PRIVATE_PROFILE_PREOPT_FLAGS :=
 endif
 
@@ -187,6 +226,12 @@
 my_system_server_compiler_filter := speed
 endif
 
+my_default_compiler_filter := $(PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER)
+ifeq (,$(my_default_compiler_filter))
+# If no default compiler filter is specified, default to 'quicken' to save on storage.
+my_default_compiler_filter := quicken
+endif
+
 ifeq (,$(filter --compiler-filter=%, $(LOCAL_DEX_PREOPT_FLAGS)))
   ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
     # Jars of system server, use the product option if it is set, speed otherwise.
@@ -201,13 +246,39 @@
         # For non system server jars, use speed-profile when we have a profile.
         LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed-profile
       else
-        # If no compiler filter is specified, default to 'quicken' to save on storage.
-        LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=quicken
+        LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
       endif
     endif
   endif
 endif
 
+my_generate_dm := $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES)
+ifeq (,$(filter $(LOCAL_DEX_PREOPT_FLAGS),--compiler-filter=verify))
+# Generating DM files only makes sense for verify, avoid doing for non verify compiler filter APKs.
+my_generate_dm := false
+endif
+
+# No reason to use a dm file if the dex is already uncompressed.
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+my_generate_dm := false
+endif
+
+ifeq (true,$(my_generate_dm))
+LOCAL_DEX_PREOPT_FLAGS += --copy-dex-files=false
+LOCAL_DEX_PREOPT := nostripping
+my_built_dm := $(dir $(LOCAL_BUILT_MODULE))generated.dm
+my_installed_dm := $(patsubst %.apk,%,$(LOCAL_INSTALLED_MODULE)).dm
+my_copied_vdex := $(dir $(LOCAL_BUILT_MODULE))primary.vdex
+$(eval $(call copy-one-file,$(built_vdex),$(my_copied_vdex)))
+$(my_built_dm): PRIVATE_INPUT_VDEX := $(my_copied_vdex)
+$(my_built_dm): $(my_copied_vdex) $(ZIPTIME)
+	$(hide) mkdir -p $(dir $@)
+	$(hide) rm -f $@
+	$(hide) zip -qD -j -X -9 $@ $(PRIVATE_INPUT_VDEX)
+	$(ZIPTIME) $@
+$(eval $(call copy-one-file,$(my_built_dm),$(my_installed_dm)))
+endif
+
 # PRODUCT_SYSTEM_SERVER_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
 my_system_server_debug_info := $(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)
 ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
@@ -223,20 +294,35 @@
   endif
 endif
 
+# Set the compiler reason to 'prebuilt' to identify the oat files produced
+# during the build, as opposed to compiled on the device.
+LOCAL_DEX_PREOPT_FLAGS += --compilation-reason=prebuilt
+
 $(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
 $(built_vdex): $(built_odex)
 $(built_art): $(built_odex)
 endif
 
-# Add the installed_odex to the list of installed files for this module.
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
-ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_profile)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(build_installed_profile)
+ifneq (true,$(my_generate_dm))
+  # Add the installed_odex to the list of installed files for this module if we aren't generating a
+  # dm file.
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
+
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+
+  # Make sure to install the .odex and .vdex when you run "make <module_name>"
+  $(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
+else
+  ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_dm)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_dm) $(my_installed_dm)
+
+  # Make sure to install the .dm when you run "make <module_name>"
+  $(my_all_targets): $(installed_dm)
+endif
 
 # Record dex-preopt config.
 DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
@@ -250,8 +336,10 @@
 DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
   $(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
 
-
-# Make sure to install the .odex and .vdex when you run "make <module_name>"
-$(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art) $(my_installed_profile)
-
 endif # LOCAL_DEX_PREOPT
+
+# Profile doesn't depend on LOCAL_DEX_PREOPT.
+ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_profile)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(build_installed_profile)
+
+$(my_all_targets): $(my_installed_profile)
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index ac5c4a9..f32daf5 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -18,8 +18,8 @@
 $(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 $(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
 $(built_dpi_apk): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
 $(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
 endif
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index 2bac984..b174f31 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -71,12 +71,23 @@
   else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
     LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_test_stubs_current)
+  else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
+    LOCAL_JAVA_LIBRARIES := core.current.stubs $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core.current.stubs)
   else
-    LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
-    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
+    # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
+    # use the stub for <ver> when building for apps.
+    _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
+    LOCAL_JAVA_LIBRARIES := sdk_v$(_version) $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(_version))
+    _version :=
   endif
 else
-  LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+  ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+    LOCAL_JAVA_LIBRARIES := core-oj core-libart
+  else
+    LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+  endif
   $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
 endif  # LOCAL_SDK_VERSION
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
@@ -91,6 +102,7 @@
 $(full_target): PRIVATE_SOURCE_PATH := $(call normalize-path-list,$(LOCAL_DROIDDOC_SOURCE_PATH))
 $(full_target): PRIVATE_JAVA_FILES := $(filter %.java,$(full_src_files))
 $(full_target): PRIVATE_JAVA_FILES += $(addprefix $($(my_prefix)OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
+$(full_target): PRIVATE_JAVA_FILES += $(filter %.java,$(LOCAL_GENERATED_SOURCES))
 $(full_target): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
 $(full_target): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
 $(full_target): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
@@ -172,19 +184,20 @@
 # keep -bootclasspath here since it works in combination with -source 1.8.
 $(full_target): \
         $(full_src_files) \
+        $(LOCAL_GENERATED_SOURCES) \
         $(droiddoc_templates) \
         $(droiddoc) \
         $(html_dir_files) \
         $(full_java_libs) \
-        $(EXTRACT_SRCJARS) \
+        $(ZIPSYNC) \
         $(LOCAL_SRCJARS) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	@echo Docs droiddoc: $(PRIVATE_OUT_DIR)
 	$(hide) mkdir -p $(dir $@)
-	$(hide) rm -rf $(PRIVATE_STUB_OUT_DIR) $(PRIVATE_SRCJAR_INTERMEDIATES_DIR)
+	$(hide) rm -rf $(PRIVATE_STUB_OUT_DIR)
 	$(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
 			$(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
-	$(EXTRACT_SRCJARS) $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) $(PRIVATE_SRCJAR_LIST_FILE) $(PRIVATE_SRCJARS)
+	$(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
 	$(hide) ( \
 		$(JAVADOC) \
                 -encoding UTF-8 \
@@ -230,13 +243,12 @@
 # For OpenJDK 8 we can use -bootclasspath to define the core libraries code.
 $(full_target): PRIVATE_BOOTCLASSPATH_ARG := $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH))
 endif
-$(full_target): $(full_src_files) $(full_java_libs) $(EXTRACT_SRCJARS) $(LOCAL_SRCJARS) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+$(full_target): $(full_src_files) $(LOCAL_GENERATED_SOURCES) $(full_java_libs) $(ZIPSYNC) $(LOCAL_SRCJARS) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	@echo Docs javadoc: $(PRIVATE_OUT_DIR)
 	@mkdir -p $(dir $@)
-	rm -rf $(PRIVATE_SRCJAR_INTERMEDIATES_DIR)
 	$(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
 			$(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
-	$(EXTRACT_SRCJARS) $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) $(PRIVATE_SRCJAR_LIST_FILE) $(PRIVATE_SRCJARS)
+	$(ZIPSYNC) -d $(PRIVATE_SRCJAR_INTERMEDIATES_DIR) -l $(PRIVATE_SRCJAR_LIST_FILE) -f "*.java" $(PRIVATE_SRCJARS)
 	$(hide) ( \
 		$(JAVADOC) \
                 -encoding UTF-8 \
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index f44b8a8..ebbe71c 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -145,15 +145,15 @@
 
 ifeq ($(my_strip_module),mini-debug-info)
 # Strip the binary, but keep debug frames and symbol table in a compressed .gnu_debugdata section.
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM)
+$(strip_output): $(strip_input) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM) $(XZ)
 	$(transform-to-stripped-keep-mini-debug-info)
 else ifneq ($(filter true no_debuglink,$(my_strip_module)),)
 # Strip the binary
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
+$(strip_output): $(strip_input) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
 	$(transform-to-stripped)
 else ifeq ($(my_strip_module),keep_symbols)
 # Strip only the debug frames, but leave the symbol table.
-$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
+$(strip_output): $(strip_input) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
 	$(transform-to-stripped-keep-symbols)
 
 # A product may be configured to strip everything in some build variants.
diff --git a/core/empty_test_config.xml b/core/empty_test_config.xml
new file mode 100644
index 0000000..7c9daff
--- /dev/null
+++ b/core/empty_test_config.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- No AndroidTest.xml was provided and the manifest does not include
+     instrumentation, hence this apk is not instrumentable.
+-->
+<configuration description="Empty Configuration" />
diff --git a/core/envsetup.mk b/core/envsetup.mk
index ddf903c..12b5869 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -179,6 +179,7 @@
 TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
 TARGET_COPY_OUT_OEM := oem
 TARGET_COPY_OUT_ODM := odm
+TARGET_COPY_OUT_PRODUCT := product
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
 
@@ -198,6 +199,17 @@
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
 ###########################################
 
+###########################################
+# Define TARGET_COPY_OUT_PRODUCT to a placeholder, for at this point
+# we don't know if the device wants to build a separate product.img
+# or just build product stuff into system.img.
+# A device can set up TARGET_COPY_OUT_PRODUCT to "product" in its
+# BoardConfig.mk.
+# We'll substitute with the real value after loading BoardConfig.mk.
+_product_path_placeholder := ||PRODUCT-PATH-PH||
+TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
+###########################################
+
 #################################################################
 # Set up minimal BOOTCLASSPATH list of jars to build/execute
 # java code with dalvikvm/art.
@@ -273,6 +285,29 @@
 else ifdef BOARD_USES_VENDORIMAGE
 $(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
 endif
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT
+ifeq ($(TARGET_COPY_OUT_PRODUCT),$(_product_path_placeholder))
+TARGET_COPY_OUT_PRODUCT := system/product
+else ifeq ($(filter product system/product,$(TARGET_COPY_OUT_PRODUCT)),)
+$(error TARGET_COPY_OUT_PRODUCT must be either 'product' or 'system/product', seeing '$(TARGET_COPY_OUT_PRODUCT)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_PRODUCTIMAGE :=
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_PRODUCT),product)
+BOARD_USES_PRODUCTIMAGE := true
+else ifdef BOARD_USES_PRODUCTIMAGE
+$(error TARGET_COPY_OUT_PRODUCT must be set to 'product' to use a product image)
+endif
+
 ###########################################
 # Ensure that only TARGET_RECOVERY_UPDATER_LIBS *or* AB_OTA_UPDATER is set.
 TARGET_RECOVERY_UPDATER_LIBS ?=
@@ -285,6 +320,11 @@
 endif
 
 # Check BOARD_VNDK_VERSION
+define check_vndk_version
+  $(eval vndk_path := prebuilts/vndk/v$(1)) \
+  $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
+endef
+
 ifdef BOARD_VNDK_VERSION
   ifneq ($(BOARD_VNDK_VERSION),current)
     $(error BOARD_VNDK_VERSION: Only "current" is implemented)
@@ -295,6 +335,17 @@
   TARGET_VENDOR_TEST_SUFFIX :=
 endif
 
+ifdef PRODUCT_EXTRA_VNDK_VERSIONS
+  $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
+endif
+
+# Ensure that BOARD_SYSTEMSDK_VERSIONS are all within PLATFORM_SYSTEMSDK_VERSIONS
+_unsupported_systemsdk_versions := $(filter-out $(PLATFORM_SYSTEMSDK_VERSIONS),$(BOARD_SYSTEMSDK_VERSIONS))
+ifneq (,$(_unsupported_systemsdk_versions))
+  $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
+          Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
+endif
+
 # ---------------------------------------------------------------
 # Set up configuration for target machine.
 # The following must be set:
@@ -357,12 +408,13 @@
 HOST_OUT_SDK_ADDON := $(HOST_OUT)/sdk_addon
 HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64
 HOST_OUT_COVERAGE := $(HOST_OUT)/coverage
+HOST_OUT_TESTCASES := $(HOST_OUT)/testcases
 
 HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin
 HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib
 HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest
 HOST_CROSS_OUT_COVERAGE := $(HOST_CROSS_OUT)/coverage
-HOST_OUT_TESTCASES := $(HOST_OUT)/testcases
+HOST_CROSS_OUT_TESTCASES := $(HOST_CROSS_OUT)/testcases
 
 HOST_OUT_INTERMEDIATES := $(HOST_OUT)/obj
 HOST_OUT_INTERMEDIATE_LIBRARIES := $(HOST_OUT_INTERMEDIATES)/lib
@@ -382,6 +434,8 @@
 
 HOST_CROSS_OUT_GEN := $(HOST_CROSS_OUT)/gen
 
+HOST_OUT_TEST_CONFIG := $(HOST_OUT)/test_config
+
 # Out for HOST_2ND_ARCH
 HOST_2ND_ARCH_VAR_PREFIX := 2ND_
 HOST_2ND_ARCH_MODULE_SUFFIX := _32
@@ -457,6 +511,7 @@
 TARGET_OUT_NOTICE_FILES := $(TARGET_OUT_INTERMEDIATES)/NOTICE_FILES
 TARGET_OUT_FAKE := $(PRODUCT_OUT)/fake_packages
 TARGET_OUT_TESTCASES := $(PRODUCT_OUT)/testcases
+TARGET_OUT_TEST_CONFIG := $(PRODUCT_OUT)/test_config
 
 ifeq ($(SANITIZE_LITE),true)
 # When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
@@ -606,6 +661,39 @@
 endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
 
+TARGET_OUT_PRODUCT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_product_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/system
+ifeq ($(SANITIZE_LITE),true)
+# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
+# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
+target_out_product_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/product
+else
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+else
+target_out_product_shared_libraries_base := $(TARGET_OUT)
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+
+ifeq ($(TARGET_IS_64_BIT),true)
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib64
+else
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+TARGET_OUT_PRODUCT_JAVA_LIBRARIES:= $(TARGET_OUT_PRODUCT)/framework
+TARGET_OUT_PRODUCT_APPS := $(target_out_product_app_base)/app
+TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(target_out_product_app_base)/priv-app
+TARGET_OUT_PRODUCT_ETC := $(TARGET_OUT_PRODUCT)/etc
+
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
+else
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS := $(TARGET_OUT_PRODUCT_APPS)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(TARGET_OUT_PRODUCT_APPS_PRIVILEGED)
+
 TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
 
 TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
@@ -645,13 +733,3 @@
 ifeq ($(CALLED_FROM_SETUP),true)
 PRINT_BUILD_CONFIG ?= true
 endif
-
-ifeq ($(USE_CLANG_PLATFORM_BUILD),)
-USE_CLANG_PLATFORM_BUILD := true
-endif
-
-ifneq ($(USE_CLANG_PLATFORM_BUILD),true)
-ifneq ($(USE_CLANG_PLATFORM_BUILD),false)
-$(error USE_CLANG_PLATFORM_BUILD must be true or false)
-endif
-endif
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index 0aec275..4a62fbf 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -12,7 +12,9 @@
 LOCAL_MODULE_SUFFIX := $(TARGET_EXECUTABLE_SUFFIX)
 endif
 
+ifdef target-executable-hook
 $(call target-executable-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index 579089c..62a8c8d 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -7,12 +7,12 @@
 
 ifeq (true,$(enforce_rro_source_is_manifest_package_name))
 $(rro_android_manifest_file): PRIVATE_PACKAGE_NAME := $(enforce_rro_source_manifest_package_info)
-$(rro_android_manifest_file): build/tools/generate-enforce-rro-android-manifest.py
-	$(hide) build/tools/generate-enforce-rro-android-manifest.py -u -p $(PRIVATE_PACKAGE_NAME) -o $@
+$(rro_android_manifest_file): build/make/tools/generate-enforce-rro-android-manifest.py
+	$(hide) build/make/tools/generate-enforce-rro-android-manifest.py -u -p $(PRIVATE_PACKAGE_NAME) -o $@
 else
 $(rro_android_manifest_file): PRIVATE_SOURCE_MANIFEST_FILE := $(enforce_rro_source_manifest_package_info)
-$(rro_android_manifest_file): $(enforce_rro_source_manifest_package_info) build/tools/generate-enforce-rro-android-manifest.py
-	$(hide) build/tools/generate-enforce-rro-android-manifest.py -p $(PRIVATE_SOURCE_MANIFEST_FILE) -o $@
+$(rro_android_manifest_file): $(enforce_rro_source_manifest_package_info) build/make/tools/generate-enforce-rro-android-manifest.py
+	$(hide) build/make/tools/generate-enforce-rro-android-manifest.py -p $(PRIVATE_SOURCE_MANIFEST_FILE) -o $@
 endif
 
 LOCAL_PATH:= $(intermediates)
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index a522f0b..1ef0ccb 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,8 @@
 
 include $(BUILD_SYSTEM)/java_common.mk
 
+include $(BUILD_SYSTEM)/sdk_check.mk
+
 $(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
 
 # List of dependencies for anything that needs all java sources in place
@@ -74,6 +76,7 @@
     $(java_sources) \
     $(java_resource_sources) \
     $(proto_java_sources_file_stamp) \
+    $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 $(java_source_list_file): $(java_sources_deps)
@@ -84,6 +87,9 @@
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
 $(full_classes_compiled_jar): \
     $(java_source_list_file) \
     $(java_sources_deps) \
@@ -93,6 +99,7 @@
     $(annotation_processor_deps) \
     $(NORMALIZE_PATH) \
     $(JAR_ARGS) \
+    $(ZIPSYNC) \
     | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-dalvik-package)
 
@@ -100,6 +107,7 @@
 
 $(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_turbine_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
+$(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
 $(full_classes_turbine_jar): \
     $(java_source_list_file) \
     $(java_sources_deps) \
@@ -134,7 +142,7 @@
                               $(full_static_java_libs)  | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
@@ -150,6 +158,7 @@
 
 $(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
 
+ifneq ($(USE_D8_DESUGAR),true)
 my_desugaring :=
 ifeq ($(LOCAL_JAVA_LANGUAGE_VERSION),1.8)
 my_desugaring := true
@@ -157,6 +166,9 @@
 $(full_classes_desugar_jar): $(full_classes_jar) $(full_java_header_libs) $(DESUGAR)
 	$(desugar-classes-jar)
 endif
+else
+my_desugaring :=
+endif
 
 ifndef my_desugaring
 full_classes_desugar_jar := $(full_classes_jar)
@@ -172,8 +184,12 @@
 else # !LOCAL_IS_STATIC_JAVA_LIBRARY
 $(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
 $(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_desugar_jar) $(DX)
+$(built_dex): $(full_classes_desugar_jar) $(DX) $(ZIP2ZIP)
+ifneq ($(USE_D8_DESUGAR),true)
 	$(transform-classes.jar-to-dex)
+else
+	$(transform-classes-d8.jar-to-dex)
+endif
 
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
 $(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
@@ -185,9 +201,9 @@
 
 endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
 
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-  my_default_app_target_sdk := $(LOCAL_SDK_VERSION)
-  my_sdk_version := $(LOCAL_SDK_VERSION)
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+  my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+  my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
   my_default_app_target_sdk := $(DEFAULT_APP_TARGET_SDK)
   my_sdk_version := $(PLATFORM_SDK_VERSION)
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 19200fd..c4f9f66 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -12,7 +12,9 @@
 LOCAL_MODULE_SUFFIX := $($(my_prefix)EXECUTABLE_SUFFIX)
 endif
 
+ifdef host-executable-hook
 $(call host-executable-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 9bead3f..db24542 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -55,19 +55,15 @@
 include $(BUILD_SYSTEM)/java_common.mk
 
 # The layers file allows you to enforce a layering between java packages.
-# Run build/tools/java-layers.py for more details.
+# Run build/make/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 
-# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
-ifeq ($(RUN_ERROR_PRONE),true)
-LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
-endif
-
 # List of dependencies for anything that needs all java sources in place
 java_sources_deps := \
     $(java_sources) \
     $(java_resource_sources) \
     $(proto_java_sources_file_stamp) \
+    $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 $(java_source_list_file): $(java_sources_deps)
@@ -78,6 +74,9 @@
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
 $(full_classes_compiled_jar): \
     $(java_source_list_file) \
     $(java_sources_deps) \
@@ -87,6 +86,7 @@
     $(NORMALIZE_PATH) \
     $(ZIPTIME) \
     $(JAR_ARGS) \
+    $(ZIPSYNC) \
     | $(SOONG_JAVAC_WRAPPER)
 	$(transform-host-java-to-package)
 	$(remove-timestamps-from-package)
@@ -99,7 +99,7 @@
                               $(full_static_java_libs) | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             -stripDir META-INF -zipToNotStrip $< $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
 # Run jarjar if necessary, otherwise just copy the file.
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 8df4b37..51e2d94 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -48,3 +48,8 @@
 
 LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
+
+# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
+ifeq ($(RUN_ERROR_PRONE),true)
+LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
+endif
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index 5e199cc..0a3b317 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -20,7 +20,9 @@
 $(error $(LOCAL_PATH): Cannot set module stem for a library)
 endif
 
+ifdef host-shared-library-hook
 $(call host-shared-library-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/target/board/generic_armv5/BoardConfig.mk b/core/host_test_config.mk
similarity index 68%
rename from target/board/generic_armv5/BoardConfig.mk
rename to core/host_test_config.mk
index 016937a..b9975e5 100644
--- a/target/board/generic_armv5/BoardConfig.mk
+++ b/core/host_test_config.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,10 +14,12 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/BoardConfig.mk
+#
+# Common rules for building a TradeFed test XML file for host side tests.
+#
 
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
+$(call record-module-type,HOST_TEST_CONFIG)
 
-WITH_DEXPREOPT := false
+LOCAL_IS_HOST_MODULE := true
+
+include $(BUILD_SYSTEM)/test_config_common.mk
diff --git a/core/install_jni_libs.mk b/core/install_jni_libs.mk
index 6b550c1..b7d83dc 100644
--- a/core/install_jni_libs.mk
+++ b/core/install_jni_libs.mk
@@ -18,19 +18,14 @@
 ifneq ($(filter tests samples, $(LOCAL_MODULE_TAGS)),)
 my_embed_jni := true
 endif
-ifeq ($(PRODUCT_FULL_TREBLE),true)
-  ifeq ($(filter $(TARGET_OUT)/%, $(my_module_path)),)
-    # If this app isn't to be installed to the system partition, and the device
-    # is fully treble-ized then jni libs are embedded, Otherwise, access to the
-    # directory where the lib is installed to (usually /vendor/lib) needs to be
-    # allowed for system processes, which is a Treble violation.
-    my_embed_jni := true
-  endif
-else
-  ifeq ($(filter $(TARGET_OUT)/% $(TARGET_OUT_VENDOR)/% $(TARGET_OUT_OEM)/%, $(my_module_path)),)
-    # If this app isn't to be installed to system, vendor, or oem partitions.
-    my_embed_jni := true
-  endif
+ifeq ($(filter $(TARGET_OUT)/% $(TARGET_OUT_VENDOR)/% $(TARGET_OUT_OEM)/%, $(my_module_path)),)
+# If this app isn't to be installed to system partitions.
+my_embed_jni := true
+endif
+# If we're installing this APP as a compressed module, we include all JNI libraries
+# in the compressed artifact, rather than as separate files on the partition in question.
+ifdef LOCAL_COMPRESSED_MODULE
+my_embed_jni := true
 endif
 
 jni_shared_libraries :=
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 265d482..ab5fd2c 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -24,16 +24,11 @@
 ifdef my_embed_jni
 # App explicitly requires the prebuilt NDK stl shared libraies.
 # The NDK stl shared libraries should never go to the system image.
-ifneq ($(filter $(LOCAL_NDK_STL_VARIANT), stlport_shared c++_shared),)
+ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
 ifndef LOCAL_SDK_VERSION
 $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
     LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
 endif
-endif
-ifeq (stlport_shared,$(LOCAL_NDK_STL_VARIANT))
-my_jni_shared_libraries += \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/stlport/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libstlport_shared.so
-else ifeq (c++_shared,$(LOCAL_NDK_STL_VARIANT))
 my_jni_shared_libraries += \
     $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
 endif
@@ -108,15 +103,16 @@
 endif  # outer my_prebuilt_jni_libs
 
 # Verify that all included libraries are built against the NDK
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
 ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
 ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := app:sdk
-my_warn_types := native:platform
-my_allowed_types := native:ndk
+my_warn_types := native:platform $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
 else
 my_link_type := app:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform native:vendor native:vndk native:vndk_private
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private
 endif
 
 my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
diff --git a/core/instrumentation_test_config_template.xml b/core/instrumentation_test_config_template.xml
new file mode 100644
index 0000000..18ea676
--- /dev/null
+++ b/core/instrumentation_test_config_template.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {LABEL}.">
+    <option name="test-suite-tag" value="apct" />
+    <option name="test-suite-tag" value="apct-instrumentation" />
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
+        <option name="test-file-name" value="{MODULE}.apk" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.{TEST_TYPE}" >
+        <option name="package" value="{PACKAGE}" />
+        <option name="runner" value="{RUNNER}" />
+    </test>
+</configuration>
diff --git a/core/jacoco.mk b/core/jacoco.mk
index f51790d..6406df4 100644
--- a/core/jacoco.mk
+++ b/core/jacoco.mk
@@ -84,8 +84,8 @@
 	mkdir -p $(PRIVATE_INSTRUMENTED_PATH)
 	java -jar $(JACOCO_CLI_JAR) \
 	  instrument \
-	  -quiet \
-	  -dest '$(PRIVATE_INSTRUMENTED_PATH)' \
+	  --quiet \
+	  --dest '$(PRIVATE_INSTRUMENTED_PATH)' \
 	  $(PRIVATE_UNZIPPED_PATH)
 	touch $(PRIVATE_INSTRUMENTED_TIMESTAMP_PATH)
 
diff --git a/core/java.mk b/core/java.mk
index 9143bc1..e263906 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -15,6 +15,10 @@
 endif # !PDK_JAVA
 endif #PDK
 
+ifndef LOCAL_USE_R8
+LOCAL_USE_R8 := $(USE_R8)
+endif
+
 LOCAL_NO_STANDARD_LIBRARIES:=$(strip $(LOCAL_NO_STANDARD_LIBRARIES))
 LOCAL_SDK_VERSION:=$(strip $(LOCAL_SDK_VERSION))
 
@@ -59,15 +63,6 @@
 intermediates := $(call local-intermediates-dir)
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 
-# Choose leaf name for the compiled jar file.
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-full_classes_compiled_jar_leaf := classes-no-debug-var.jar
-built_dex_intermediate_leaf := no-local
-else
-full_classes_compiled_jar_leaf := classes-full-debug.jar
-built_dex_intermediate_leaf := with-local
-endif
-
 ifeq ($(LOCAL_PROGUARD_ENABLED),disabled)
 LOCAL_PROGUARD_ENABLED :=
 endif
@@ -75,14 +70,14 @@
 full_classes_turbine_jar := $(intermediates.COMMON)/classes-turbine.jar
 full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
-full_classes_compiled_jar := $(intermediates.COMMON)/$(full_classes_compiled_jar_leaf)
+full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
 full_classes_processed_jar := $(intermediates.COMMON)/classes-processed.jar
 full_classes_desugar_jar := $(intermediates.COMMON)/classes-desugar.jar
-jarjar_leaf := classes-jarjar.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/$(jarjar_leaf)
+full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
 full_classes_proguard_jar := $(intermediates.COMMON)/classes-proguard.jar
 full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
-built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)/classes.dex
+built_dex_intermediate := $(intermediates.COMMON)/dex/classes.dex
+built_dex_hiddenapi := $(intermediates.COMMON)/dex-hiddenapi/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 java_source_list_file := $(intermediates.COMMON)/java-source-list
 
@@ -113,9 +108,8 @@
 
 ###############################################################
 ## .rs files: RenderScript sources to .java files and .bc files
-## .fs files: Filterscript sources to .java files and .bc files
 ###############################################################
-renderscript_sources := $(filter %.rs %.fs,$(LOCAL_SRC_FILES))
+renderscript_sources := $(filter %.rs,$(LOCAL_SRC_FILES))
 # Because names of the java files from RenderScript are unknown until the
 # .rs file(s) are compiled, we have to depend on a timestamp file.
 RenderScript_file_stamp :=
@@ -134,8 +128,8 @@
 else
   ifneq (,$(LOCAL_SDK_VERSION))
     # Set target-api for LOCAL_SDK_VERSIONs other than current.
-    ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-      renderscript_target_api := $(LOCAL_SDK_VERSION)
+    ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+      renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
     endif
   endif  # LOCAL_SDK_VERSION is set
 endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
@@ -159,7 +153,7 @@
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
 
 # prepend the RenderScript system include path
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
+ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
 # if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS
 LOCAL_RENDERSCRIPT_INCLUDES := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \
@@ -176,7 +170,7 @@
 LOCAL_RENDERSCRIPT_INCLUDES := $(LOCAL_RENDERSCRIPT_INCLUDES_OVERRIDE)
 endif
 
-bc_files := $(patsubst %.fs,%.bc, $(patsubst %.rs,%.bc, $(notdir $(renderscript_sources))))
+bc_files := $(patsubst %.rs,%.bc, $(notdir $(renderscript_sources)))
 bc_dep_files := $(addprefix $(renderscript_intermediate.COMMON)/,$(patsubst %.bc,%.d,$(bc_files)))
 
 $(RenderScript_file_stamp): PRIVATE_RS_INCLUDES := $(LOCAL_RENDERSCRIPT_INCLUDES)
@@ -275,7 +269,7 @@
 
 aidl_preprocess_import :=
 ifdef LOCAL_SDK_VERSION
-ifneq ($(filter current system_current test_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
+ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
   # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
   aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
 else
@@ -323,7 +317,7 @@
 logtags_sources := $(addprefix $(LOCAL_PATH)/, $(logtags_sources))
 
 $(logtags_java_sources): PRIVATE_MERGED_TAG := $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt
-$(logtags_java_sources): $(intermediates.COMMON)/logtags/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt $(JAVATAGS) build/tools/event_log_tags.py
+$(logtags_java_sources): $(intermediates.COMMON)/logtags/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt $(JAVATAGS) build/make/tools/event_log_tags.py
 	$(transform-logtags-to-java)
 
 else
@@ -336,39 +330,26 @@
 java_intermediate_sources := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
 all_java_sources := $(java_sources) $(java_intermediate_sources)
 
-enable_sharding :=
-ifneq ($(TURBINE_ENABLED),false)
-ifneq ($(LOCAL_JAVAC_SHARD_SIZE),)
-ifneq ($(LOCAL_JAR_PROCESSOR),)
-$(call pretty-error,Cannot set both LOCAL_JAVAC_SHARD_SIZE and LOCAL_JAR_PROCESSOR!)
-endif # LOCAL_JAR_PROCESSOR is not empty
-enable_sharding := true
-
-num_shards := $(call int_divide,$(words $(java_sources)),$(LOCAL_JAVAC_SHARD_SIZE))
-ifneq ($(words $(java_sources)),$(call int_multiply,$(LOCAL_JAVAC_SHARD_SIZE),$(num_shards)))
-# increment number of shards by 1.
-num_shards := $(call int_plus,$(num_shards),1)
-endif
-
-shard_idx_list := $(call int_range_list,1,$(num_shards))
-sharded_java_source_list_files += $(foreach x,$(shard_idx_list),$(java_source_list_file).shard.$(x))
-sharded_jar_list += $(foreach x,$(shard_idx_list),$(full_classes_compiled_jar).shard.$(x))
-
-# always put dynamically-located .java files (generated by Proto/resource, etc) in a new final shard.
-# increment number of shards by 1.
-num_shards := $(call int_plus,$(num_shards),1)
-sharded_java_source_list_files += $(java_source_list_file).shard.$(num_shards)
-sharded_jar_list += $(full_classes_compiled_jar).shard.$(num_shards)
-LOCAL_INTERMEDIATE_TARGETS += $(sharded_java_source_list_files)
-LOCAL_INTERMEDIATE_TARGETS += $(sharded_jar_list)
-endif # LOCAL_JAVAC_SHARD_SIZE is not empty
-endif # TURBINE_ENABLED != false
-
 include $(BUILD_SYSTEM)/java_common.mk
 
+include $(BUILD_SYSTEM)/sdk_check.mk
+
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_RS_SOURCES := $(if $(renderscript_sources),true)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RS_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/renderscript
 
+# Set the profile source so that the odex / profile code included from java.mk
+# can find it.
+#
+# TODO: b/64896089, this is broken when called from package_internal.mk, since the file
+# we preopt from is a temporary file. This will be addressed in a follow up, possibly
+# by disabling stripping for profile guided preopt (which may be desirable for other
+# reasons anyway).
+#
+# Note that we set this only when called from package_internal.mk and not in other cases.
+ifneq (,$(called_from_package_internal)
+dex_preopt_profile_src_file := $(LOCAL_BUILT_MODULE)
+endif
+
 #######################################
 # defines built_odex along with rule to install odex
 include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
@@ -377,7 +358,7 @@
 # Make sure there's something to build.
 ifdef full_classes_jar
 ifndef need_compile_java
-$(error $(LOCAL_PATH): Target java module does not define any source or resource files)
+$(call pretty-error,Target java module does not define any source or resource files)
 endif
 endif
 
@@ -394,7 +375,7 @@
 ALL_MODULES.$(LOCAL_MODULE).STUBS := $(full_classes_stubs_jar)
 
 # The layers file allows you to enforce a layering between java packages.
-# Run build/tools/java-layers.py for more details.
+# Run build/make/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
@@ -424,56 +405,16 @@
     $(java_resource_sources) \
     $(RenderScript_file_stamp) \
     $(proto_java_sources_file_stamp) \
+    $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 $(java_source_list_file): $(java_sources_deps)
 	$(write-java-source-list)
 
-ifdef enable_sharding
-$(foreach x,$(shard_idx_list),\
-  $(eval $(call save-sharded-java-source-list,$(x),\
-    $(wordlist $(call int_plus,1,$(call int_multiply,$(LOCAL_JAVAC_SHARD_SIZE),$(call int_subtract,$(x),1))),\
-      $(call int_multiply,$(LOCAL_JAVAC_SHARD_SIZE),$(x)),$(sort $(java_sources))))))
-
-# always put dynamically-located .java files (generated by Proto/resource, etc) in a new final shard.
-$(java_source_list_file).shard.$(num_shards): PRIVATE_JAVA_INTERMEDIATE_SOURCES := $(java_intermediate_sources)
-$(java_source_list_file).shard.$(num_shards): $(java_resource_sources) \
-    $(RenderScript_file_stamp) \
-    $(proto_java_sources_file_stamp) \
-    $(LOCAL_ADDITIONAL_DEPENDENCIES) \
-    $(NORMALIZE_PATH)
-	$(hide) rm -f $@
-	$(call dump-words-to-file,$(PRIVATE_JAVA_INTERMEDIATE_SOURCES),$@.tmp)
-	$(call fetch-additional-java-source,$@.tmp)
-	$(hide) tr ' ' '\n' < $@.tmp | $(NORMALIZE_PATH) | sort -u > $@
-
-# Javac sharding with header libs including its own header jar as one of dependency.
-$(foreach x,$(shard_idx_list),\
-  $(eval $(call create-classes-full-debug.jar,$(full_classes_compiled_jar).shard.$(x),\
-    $(java_source_list_file).shard.$(x),\
-      $(full_java_header_libs) $(full_classes_header_jar),$(x),\
-        $(wordlist $(call int_plus,1,$(call int_multiply,$(LOCAL_JAVAC_SHARD_SIZE),$(call int_subtract,$(x),1))),\
-          $(call int_multiply,$(LOCAL_JAVAC_SHARD_SIZE),$(x)),$(sort $(java_sources))))))
-
-# Javac sharding for last shard with additional Java dependencies.
-$(eval $(call create-classes-full-debug.jar,$(full_classes_compiled_jar).shard.$(num_shards),\
-  $(java_source_list_file).shard.$(num_shards),$(full_java_header_libs) $(full_classes_header_jar),$(strip \
-    $(num_shards)),$$(java_resource_sources) $$(RenderScript_file_stamp) \
-      $$(proto_java_sources_file_stamp) $$(LOCAL_ADDITIONAL_DEPENDENCIES)))
-
-$(full_classes_compiled_jar): PRIVATE_SHARDED_JAR_LIST := $(sharded_jar_list)
-$(full_classes_compiled_jar): $(sharded_jar_list) | $(MERGE_ZIPS)
-	$(MERGE_ZIPS) -j $@ $(PRIVATE_SHARDED_JAR_LIST)
-else
-# we can't use single $ for java_sources_deps since it may contain hash '#' sign.
-$(eval $(call create-classes-full-debug.jar,$(full_classes_compiled_jar),\
-  $(java_source_list_file),$(full_java_header_libs),,$$(java_sources_deps)))
-
-endif # ifdef enable_sharding
-
 ifneq ($(TURBINE_ENABLED),false)
 
 $(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
+$(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
 $(full_classes_turbine_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_turbine_jar): \
     $(java_source_list_file) \
@@ -504,6 +445,31 @@
 
 endif # TURBINE_ENABLED != false
 
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
+$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
+$(full_classes_compiled_jar): PRIVATE_JAVA_SOURCE_LIST := $(java_source_list_file)
+$(full_classes_compiled_jar): PRIVATE_ALL_JAVA_HEADER_LIBRARIES := $(full_java_header_libs)
+$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
+$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
+$(full_classes_compiled_jar): \
+    $(java_source_list_file) \
+    $(full_java_header_libs) \
+    $(java_sources_deps) \
+    $(full_java_bootclasspath_libs) \
+    $(full_java_system_modules_deps) \
+    $(layers_file) \
+    $(annotation_processor_deps) \
+    $(NORMALIZE_PATH) \
+    $(JAR_ARGS) \
+    $(ZIPSYNC) \
+    | $(SOONG_JAVAC_WRAPPER)
+	@echo "Target Java: $@
+	$(call compile-java,$(TARGET_JAVAC),$(PRIVATE_ALL_JAVA_HEADER_LIBRARIES))
+
 javac-check : $(full_classes_compiled_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
 
@@ -513,7 +479,7 @@
                               $(full_static_java_libs) | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
@@ -555,38 +521,42 @@
 
 $(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
 
+LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_jar)
+
+#######################################
+include $(BUILD_SYSTEM)/jacoco.mk
+#######################################
+
 # Temporarily enable --multi-dex until proguard supports v53 class files
 # ( http://b/67673860 ) or we move away from proguard altogether.
 ifdef TARGET_OPENJDK9
 LOCAL_DX_FLAGS := $(filter-out --multi-dex,$(LOCAL_DX_FLAGS)) --multi-dex
 endif
 
+ifneq ($(USE_D8_DESUGAR),true)
 my_desugaring :=
 ifndef LOCAL_IS_STATIC_JAVA_LIBRARY
 my_desugaring := true
 $(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(full_classes_jar) $(full_java_header_libs) $(DESUGAR)
+$(full_classes_desugar_jar): $(LOCAL_FULL_CLASSES_JACOCO_JAR) $(full_java_header_libs) $(DESUGAR)
 	$(desugar-classes-jar)
 endif
+else
+my_desugaring :=
+endif
 
 ifndef my_desugaring
-full_classes_desugar_jar := $(full_classes_jar)
+full_classes_desugar_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
 endif
 
-LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_desugar_jar)
-
-#######################################
-include $(BUILD_SYSTEM)/jacoco.mk
-#######################################
-
-full_classes_pre_proguard_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
+full_classes_pre_proguard_jar := $(full_classes_desugar_jar)
 
 # Keep a copy of the jar just before proguard processing.
 $(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(intermediates.COMMON)/classes-pre-proguard.jar))
 
 # Run proguard if necessary
 ifdef LOCAL_PROGUARD_ENABLED
-ifneq ($(filter-out full custom nosystem obfuscation optimization shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
+ifneq ($(filter-out full custom obfuscation optimization,$(LOCAL_PROGUARD_ENABLED)),)
     $(warning while processing: $(LOCAL_MODULE))
     $(error invalid value for LOCAL_PROGUARD_ENABLED: $(LOCAL_PROGUARD_ENABLED))
 endif
@@ -602,7 +572,7 @@
 my_proguard_sdk_raise :=
 ifdef LOCAL_SDK_VERSION
 ifdef TARGET_BUILD_APPS
-ifeq (,$(filter current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifeq (,$(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
   my_proguard_sdk_raise := $(call java-lib-header-files, sdk_vcurrent)
 endif
 else
@@ -624,19 +594,16 @@
 
 common_proguard_flags := -forceprocessing
 
-common_proguard_flag_files :=
-ifeq ($(filter nosystem,$(LOCAL_PROGUARD_ENABLED)),)
-common_proguard_flag_files += $(BUILD_SYSTEM)/proguard.flags
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.emma.flags
-endif
-# If this is a test package, add proguard keep flags for tests.
+common_proguard_flag_files := $(BUILD_SYSTEM)/proguard.flags
 ifneq ($(LOCAL_INSTRUMENTATION_FOR)$(filter tests,$(LOCAL_MODULE_TAGS)),)
-common_proguard_flag_files += $(BUILD_SYSTEM)/proguard_tests.flags
-ifeq ($(filter shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
 common_proguard_flags += -dontshrink # don't shrink tests by default
-endif # shrinktests
 endif # test package
+ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
+  ifdef LOCAL_USE_AAPT2
+    common_proguard_flag_files += $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+        $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags)
+  endif
+endif
 ifneq ($(common_proguard_flag_files),)
 common_proguard_flags += $(addprefix -include , $(common_proguard_flag_files))
 # This is included from $(BUILD_SYSTEM)/proguard.flags
@@ -678,12 +645,11 @@
 
 endif # no obfuscation
 endif # LOCAL_INSTRUMENTATION_FOR
-endif  # LOCAL_PROGUARD_ENABLED is not nosystem
 
 proguard_flag_files := $(addprefix $(LOCAL_PATH)/, $(LOCAL_PROGUARD_FLAG_FILES))
-ifeq ($(USE_R8),true)
+ifeq ($(LOCAL_USE_R8),true)
 proguard_flag_files += $(addprefix $(LOCAL_PATH)/, $(LOCAL_R8_FLAG_FILES))
-endif # USE_R8
+endif # LOCAL_USE_R8
 LOCAL_PROGUARD_FLAGS += $(addprefix -include , $(proguard_flag_files))
 
 ifdef LOCAL_TEST_MODULE_TO_PROGUARD_WITH
@@ -706,7 +672,7 @@
 endif
 
 ifneq ($(filter obfuscation,$(LOCAL_PROGUARD_ENABLED)),)
-ifneq ($(USE_R8),true)
+ifneq ($(LOCAL_USE_R8),true)
   $(full_classes_proguard_jar): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary)
 else
   $(built_dex_intermediate): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary)
@@ -714,7 +680,7 @@
 endif
 
 # If R8 is not enabled run Proguard.
-ifneq ($(USE_R8),true)
+ifneq ($(LOCAL_USE_R8),true)
 # Changes to these dependencies need to be replicated below when using R8
 # instead of Proguard + dx.
 $(full_classes_proguard_jar): PRIVATE_PROGUARD_INJAR_FILTERS := $(proguard_injar_filters)
@@ -722,10 +688,10 @@
 $(full_classes_proguard_jar): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
 $(full_classes_proguard_jar) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) $(legacy_proguard_lib_deps) | $(PROGUARD)
 	$(call transform-jar-to-proguard)
-else # !USE_R8
+else # !LOCAL_USE_R8
 # Running R8 instead of Proguard, proguarded jar is actually the pre-Proguarded jar.
 full_classes_proguard_jar := $(full_classes_pre_proguard_jar)
-endif # !USE_R8
+endif # !LOCAL_USE_R8
 
 else  # LOCAL_PROGUARD_ENABLED not defined
 proguard_flag_files :=
@@ -734,19 +700,10 @@
 
 ifneq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
 $(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-# If you instrument class files that have local variable debug information in
-# them emma does not correctly maintain the local variable table.
-# This will cause an error when you try to convert the class files for Android.
-# The workaround here is to build different dex file here based on emma switch
-# then later copy into classes.dex. When emma is on, dx is run with --no-locals
-# option to remove local variable information
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-$(built_dex_intermediate): PRIVATE_DX_FLAGS += --no-locals
-endif
 
 my_r8 :=
 ifdef LOCAL_PROGUARD_ENABLED
-ifeq ($(USE_R8),true)
+ifeq ($(LOCAL_USE_R8),true)
 # These are the dependencies for the proguarded jar when running
 # Proguard + dx. They are used for the generated dex when using R8, as
 # R8 does Proguard + dx
@@ -756,15 +713,26 @@
 $(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
 $(built_dex_intermediate) : $(full_classes_proguard_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD)
 	$(transform-jar-to-dex-r8)
-endif # USE_R8
+endif # LOCAL_USE_R8
 endif # LOCAL_PROGUARD_ENABLED
 
 ifndef my_r8
-$(built_dex_intermediate): $(full_classes_proguard_jar) $(DX)
+$(built_dex_intermediate): $(full_classes_proguard_jar) $(DX) $(ZIP2ZIP)
+ifneq ($(USE_D8_DESUGAR),true)
 	$(transform-classes.jar-to-dex)
+else
+	$(transform-classes-d8.jar-to-dex)
+endif
 endif
 
-$(built_dex): $(built_dex_intermediate)
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
+  $(eval $(call hiddenapi-copy-dex-files,$(built_dex_intermediate),$(built_dex_hiddenapi)))
+  built_dex_copy_from := $(built_dex_hiddenapi)
+else # !is_boot_jar
+  built_dex_copy_from := $(built_dex_intermediate)
+endif # is_boot_jar
+
+$(built_dex): $(built_dex_copy_from)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) rm -f $(dir $@)/classes*.dex
@@ -800,9 +768,9 @@
 
 endif  # full_classes_jar is defined
 
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-  my_default_app_target_sdk := $(LOCAL_SDK_VERSION)
-  my_sdk_version := $(LOCAL_SDK_VERSION)
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+  my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+  my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
   my_default_app_target_sdk := $(DEFAULT_APP_TARGET_SDK)
   my_sdk_version := $(PLATFORM_SDK_VERSION)
diff --git a/core/java_common.mk b/core/java_common.mk
index 4dd0de6..dea0435 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -1,5 +1,11 @@
 # Common to host and target Java modules.
 
+my_soong_problems :=
+
+ifneq ($(filter ../%,$(LOCAL_SRC_FILES)),)
+my_soong_problems += dotdot_srcs
+endif
+
 ###########################################################
 ## Java version
 ###########################################################
@@ -30,8 +36,32 @@
 LOCAL_JAVACFLAGS += -source $(LOCAL_JAVA_LANGUAGE_VERSION) -target $(LOCAL_JAVA_LANGUAGE_VERSION)
 
 ###########################################################
+
+# OpenJDK versions up to 8 shipped with bootstrap and tools jars
+# (rt.jar, jce.jar, tools.jar etc.). These are no longer part of
+# OpenJDK 9, but we still make them available for host tools that
+# are targeting older versions.
+USE_HOST_BOOTSTRAP_JARS := true
+ifeq (,$(filter $(LOCAL_JAVA_LANGUAGE_VERSION), 1.6 1.7 1.8))
+USE_HOST_BOOTSTRAP_JARS := false
+endif
+
+###########################################################
+
+# Drop HOST_JDK_TOOLS_JAR from classpath when targeting versions > 9 (which don't have it).
+# TODO: Remove HOST_JDK_TOOLS_JAR and all references to it once host
+# bootstrap jars are no longer supported (ie. when USE_HOST_BOOTSTRAP_JARS
+# is always false). http://b/38418220
+ifneq ($(USE_HOST_BOOTSTRAP_JARS),true)
+LOCAL_CLASSPATH := $(filter-out $(HOST_JDK_TOOLS_JAR),$(LOCAL_CLASSPATH))
+endif
+
+###########################################################
 ## .proto files: Compile proto files to .java
 ###########################################################
+ifeq ($(strip $(LOCAL_PROTOC_OPTIMIZE_TYPE)),)
+  LOCAL_PROTOC_OPTIMIZE_TYPE := lite
+endif
 proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
 # Because names of the .java files compiled from .proto files are unknown until the
 # .proto files are compiled, we use a timestamp file as depedency.
@@ -61,7 +91,7 @@
   endif
 endif
 $(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(if $(filter lite,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite$(if $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS),:,),)$(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
 $(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
 	$(call transform-proto-to-java)
 
@@ -147,7 +177,7 @@
 #####################################
 ## Warn if there is unrecognized file in LOCAL_SRC_FILES.
 my_unknown_src_files := $(filter-out \
-  %.java %.aidl %.proto %.logtags %.fs %.rs, \
+  %.java %.aidl %.proto %.logtags %.rs, \
   $(LOCAL_SRC_FILES) $(LOCAL_INTERMEDIATE_SOURCES) $(LOCAL_GENERATED_SOURCES))
 ifneq ($(my_unknown_src_files),)
 $(warning $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Unused source files: $(my_unknown_src_files))
@@ -158,7 +188,7 @@
 # LOCAL_SOURCE_FILES_ALL_GENERATED is set only if the module does not have static source files,
 # but generated source files in its LOCAL_INTERMEDIATE_SOURCE_DIR.
 # You have to set up the dependency in some other way.
-need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
+need_compile_java := $(strip $(all_java_sources)$(LOCAL_SRCJARS)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
 ifdef need_compile_java
 
 annotation_processor_flags :=
@@ -228,9 +258,15 @@
       full_java_bootclasspath_libs := $(call java-lib-header-files,android_system_stubs_current)
     else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
       full_java_bootclasspath_libs := $(call java-lib-header-files,android_test_stubs_current)
+    else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
+      full_java_bootclasspath_libs := $(call java-lib-header-files,core.current.stubs)
     else
-      full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(LOCAL_SDK_VERSION))
-    endif # current, system_current, or test_current
+      # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
+      # use the stub for <ver> when building for apps.
+      _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
+      full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(_version))
+      _version :=
+    endif # current, system_current, system_${VER}, test_current or core_current
   endif # LOCAL_SDK_VERSION
 
   ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
@@ -273,7 +309,23 @@
     full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES),true)
     full_shared_java_header_libs := $(call java-lib-header-files,$(LOCAL_JAVA_LIBRARIES),true)
   else # !USE_CORE_LIB_BOOTCLASSPATH
-
+    # Give host-side tools a version of OpenJDK's standard libraries
+    # close to what they're targeting. As of Dec 2017, AOSP is only
+    # bundling OpenJDK 8 and 9, so nothing < 8 is available.
+    #
+    # When building with OpenJDK 8, the following should have no
+    # effect since those jars would be available by default.
+    #
+    # When building with OpenJDK 9 but targeting a version < 1.8,
+    # putting them on the bootclasspath means that:
+    # a) code can't (accidentally) refer to OpenJDK 9 specific APIs
+    # b) references to existing APIs are not reinterpreted in an
+    #    OpenJDK 9-specific way, eg. calls to subclasses of
+    #    java.nio.Buffer as in http://b/70862583
+    ifeq ($(USE_HOST_BOOTSTRAP_JARS),true)
+      full_java_bootclasspath_libs += $(ANDROID_JAVA8_HOME)/jre/lib/jce.jar
+      full_java_bootclasspath_libs += $(ANDROID_JAVA8_HOME)/jre/lib/rt.jar
+    endif
     full_shared_java_libs := $(addprefix $(HOST_OUT_JAVA_LIBRARIES)/,\
       $(addsuffix $(COMMON_JAVA_PACKAGE_SUFFIX),$(LOCAL_JAVA_LIBRARIES)))
     full_shared_java_header_libs := $(full_shared_java_libs)
@@ -304,7 +356,10 @@
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := $(full_java_bootclasspath_libs)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EMPTY_BOOTCLASSPATH := $(empty_bootclasspath)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES := $(my_system_modules_dir)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES := $(my_system_modules)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES_DIR := $(my_system_modules_dir)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES_LIBS := $(call java-lib-files,$(SOONG_SYSTEM_MODULES_LIBS_$(my_system_modules)))
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PATCH_MODULE := $(LOCAL_PATCH_MODULE)
 
 ifndef LOCAL_IS_HOST_MODULE
 # This is set by packages that are linking to other packages that export
@@ -356,35 +411,6 @@
 endif
 
 ##########################################################
-ifndef LOCAL_IS_HOST_MODULE
-## AAPT Flags
-# aapt doesn't accept multiple --extra-packages flags.
-# We have to collapse them into a single --extra-packages flag here.
-LOCAL_AAPT_FLAGS := $(strip $(LOCAL_AAPT_FLAGS))
-ifdef LOCAL_AAPT_FLAGS
-ifeq ($(filter 0 1,$(words $(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))),)
-aapt_flags := $(subst --extra-packages$(space),--extra-packages@,$(LOCAL_AAPT_FLAGS))
-aapt_flags_extra_packages := $(patsubst --extra-packages@%,%,$(filter --extra-packages@%,$(aapt_flags)))
-aapt_flags_extra_packages := $(sort $(subst :,$(space),$(aapt_flags_extra_packages)))
-LOCAL_AAPT_FLAGS := $(filter-out --extra-packages@%,$(aapt_flags)) \
-    --extra-packages $(subst $(space),:,$(aapt_flags_extra_packages))
-aapt_flags_extra_packages :=
-aapt_flags :=
-endif
-endif
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) $(PRODUCT_AAPT_FLAGS)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
-
-ifdef aidl_sources
-ALL_MODULES.$(my_register_name).AIDL_FILES := $(aidl_sources)
-endif
-ifdef renderscript_sources
-ALL_MODULES.$(my_register_name).RS_FILES := $(renderscript_sources_fullpath)
-endif
-endif  # !LOCAL_IS_HOST_MODULE
 
 full_java_libs := $(full_shared_java_libs) $(full_static_java_libs) $(LOCAL_CLASSPATH)
 full_java_header_libs := $(full_shared_java_header_libs) $(full_static_java_header_libs)
@@ -403,15 +429,23 @@
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
+else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+my_link_type := java:system
+my_warn_types := java:platform
+my_allowed_types := java:sdk java:system java:core
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
+my_warn_types :=
+my_allowed_types := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 my_warn_types := java:system java:platform
-my_allowed_types := java:sdk
+my_allowed_types := java:sdk java:core
 else
 my_link_type := java:platform
 my_warn_types :=
-my_allowed_types := java:sdk java:system java:platform
+my_allowed_types := java:sdk java:system java:platform java:core
 endif
 
 ifdef LOCAL_AAPT2_ONLY
@@ -428,3 +462,17 @@
 my_common := COMMON
 include $(BUILD_SYSTEM)/link_type.mk
 endif  # !LOCAL_IS_HOST_MODULE
+
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+
+SOONG_CONV.$(LOCAL_MODULE).PROBLEMS := \
+    $(SOONG_CONV.$(LOCAL_MODULE).PROBLEMS) $(my_soong_problems)
+SOONG_CONV.$(LOCAL_MODULE).DEPS := \
+    $(SOONG_CONV.$(LOCAL_MODULE).DEPS) \
+    $(LOCAL_STATIC_JAVA_LIBRARIES) \
+    $(LOCAL_JAVA_LIBRARIES) \
+    $(LOCAL_JNI_SHARED_LIBRARIES)
+SOONG_CONV.$(LOCAL_MODULE).TYPE := java
+SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
+
+endif
diff --git a/core/java_library.mk b/core/java_library.mk
index d7c08cc..1b914f5 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -31,6 +31,7 @@
 # java libraries produce javalib.jar, so we will copy classes.jar there too.
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 common_javalib.jar := $(intermediates.COMMON)/javalib.jar
+dex_preopt_profile_src_file := $(common_javalib.jar)
 LOCAL_INTERMEDIATE_TARGETS += $(common_javalib.jar)
 
 ifeq ($(LOCAL_PROGUARD_ENABLED),disabled)
@@ -41,6 +42,8 @@
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
 endif # LOCAL_EMMA_INSTRUMENT
 endif # EMMA_INSTRUMENT_STATIC
 else
@@ -69,10 +72,10 @@
 	$(call add-dex-to-package-arg,$@.tmp)
 	$(hide) $(ZIPTIME) $@.tmp
 	$(call commit-change-for-toc,$@)
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
 	$(align-package)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_UNCOMPRESS_DEX
 
 .KATI_RESTAT: $(common_javalib.jar)
 
diff --git a/core/local_systemsdk.mk b/core/local_systemsdk.mk
new file mode 100644
index 0000000..6dab346
--- /dev/null
+++ b/core/local_systemsdk.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifdef BOARD_SYSTEMSDK_VERSIONS
+  # Apps and jars in vendor or odm partition are forced to build against System SDK.
+  _is_vendor_app :=
+  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+    # Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
+    # set correctly before this is included.
+    _is_vendor_app := true
+  endif
+  ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
+    ifndef LOCAL_SDK_VERSION
+      ifeq ($(_is_vendor_app),true)
+        LOCAL_SDK_VERSION := system_current
+      endif
+    endif
+  endif
+endif
+
+# Ensure that the selected System SDK version is one of the supported versions.
+# The range of support versions becomes narrower when BOARD_SYSTEMSDK_VERSIONS
+# is set, which is a subset of PLATFORM_SYSTEMSDK_VERSIONS.
+ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+  ifneq ($(_is_vendor_app),true)
+    # apps bundled in system partition can use all system sdk versions provided by the platform
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  else ifdef BOARD_SYSTEMSDK_VERSIONS
+    # When BOARD_SYSTEMSDK_VERSIONS is set, vendors apps are restricted to use those versions
+    # which is equal to or smaller than PLATFORM_SYSTEMSDK_VERSIONS
+    _supported_systemsdk_versions := $(BOARD_SYSTEMSDK_VERSIONS)
+  else
+    # If not, vendor apks are treated equally to system apps
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  endif
+  _system_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+  ifneq ($(_system_sdk_version),$(filter $(_system_sdk_version),$(_supported_systemsdk_versions)))
+    $(call pretty-error,Incompatible LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)'. \
+           System SDK version '$(_system_sdk_version)' is not supported. Supported versions are: $(_supported_systemsdk_versions))
+  endif
+  _system_sdk_version :=
+  _supported_systemsdk_versions :=
+endif
diff --git a/core/main.mk b/core/main.mk
index 6f7366d..da88613 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -60,14 +60,24 @@
 # without changing the command line every time.  Avoids rebuilds
 # when using ninja.
 $(shell mkdir -p $(OUT_DIR) && \
-    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt && \
-    echo -n $(BUILD_DATETIME) > $(OUT_DIR)/build_date.txt)
+    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+
 ifeq ($(HOST_OS),darwin)
 DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
 else
 DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
 endif
 
+# Pick a reasonable string to use to identify files.
+ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+  # BUILD_NUMBER has a timestamp in it, which means that
+  # it will change every time.  Pick a stable value.
+  FILE_NAME_TAG := eng.$(USER)
+else
+  FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
+endif
+
 # Make an empty directory, which can be used to make empty jars
 EMPTY_DIRECTORY := $(OUT_DIR)/empty
 $(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*)
@@ -97,11 +107,21 @@
 # (must be defined before including definitions.make)
 INTERNAL_MODIFIER_TARGETS := all
 
-# EMMA_INSTRUMENT_STATIC merges the static emma library to each emma-enabled module.
+# EMMA_INSTRUMENT_STATIC merges the static jacoco library to each
+# jacoco-enabled module.
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
 EMMA_INSTRUMENT := true
 endif
 
+ifeq (true,$(EMMA_INSTRUMENT))
+# Adding the jacoco library can cause the inclusion of
+# some typically banned classes
+# So if the user didn't specify SKIP_BOOT_JARS_CHECK, enable it here
+ifndef SKIP_BOOT_JARS_CHECK
+SKIP_BOOT_JARS_CHECK := true
+endif
+endif
+
 #
 # -----------------------------------------------------------------
 # Validate ADDITIONAL_DEFAULT_PROPERTIES.
@@ -116,6 +136,8 @@
 $(error ADDITIONAL_BUILD_PROPERTIES must not be set before here: $(ADDITIONAL_BUILD_PROPERTIES))
 endif
 
+ADDITIONAL_BUILD_PROPERTIES :=
+
 #
 # -----------------------------------------------------------------
 # Add the product-defined properties to the build properties.
@@ -175,19 +197,34 @@
 
 #
 # -----------------------------------------------------------------
-# Enable dynamic linker developer warnings for userdebug, eng
-# and non-REL builds
+# Enable dynamic linker and hidden API developer warnings for
+# userdebug, eng and non-REL builds
 ifneq ($(TARGET_BUILD_VARIANT),user)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
+                                 ro.art.hiddenapi.warning=1
 else
 # Enable it for user builds as long as they are not final.
 ifneq ($(PLATFORM_VERSION_CODENAME),REL)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
+                                 ro.art.hiddenapi.warning=1
 endif
 endif
 
 ADDITIONAL_BUILD_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
 
+$(KATI_obsolete_var PRODUCT_FULL_TREBLE,\
+	Code should be written to work regardless of a device being Treble or \
+	variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
+	possible.)
+
+# Sets ro.actionable_compatible_property.enabled to know on runtime whether the whitelist
+# of actionable compatible properties is enabled or not.
+ifeq ($(PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE),true)
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=false
+else
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
+endif
+
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -311,7 +348,7 @@
 
 # Sets the location that the runtime dumps stack traces to when signalled
 # with SIGQUIT. Stack trace dumping is turned on for all android builds.
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-file=/data/anr/traces.txt
+ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-dir=/data/anr
 
 # ------------------------------------------------------------
 # Define a function that, given a list of module tags, returns
@@ -410,10 +447,10 @@
 
 $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
 
-ifdef PDK_FUSION_PLATFORM_ZIP
+ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
 # Bring in the PDK platform.zip modules.
 include $(BUILD_SYSTEM)/pdk_fusion_modules.mk
-endif # PDK_FUSION_PLATFORM_ZIP
+endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR
 
 droid_targets : blueprint_tools
 
@@ -583,6 +620,31 @@
 endef
 $(call add-all-host-to-target-required-modules-deps)
 
+# Sets up dependencies such that whenever a target module is installed,
+# any host modules listed in $(ALL_MODULES.$(m).HOST_REQUIRED) will also be installed
+define add-all-target-to-host-required-modules-deps
+$(foreach m,$(ALL_MODULES), \
+  $(eval req_mods := $(ALL_MODULES.$(m).HOST_REQUIRED))\
+  $(if $(req_mods), \
+    $(eval req_files := )\
+    $(foreach req_mod,$(req_mods), \
+      $(eval req_file := $(filter $(HOST_OUT)/%, $(call module-installed-files,$(req_mod)))) \
+      $(if $(strip $(req_file)),\
+        ,\
+        $(error $(m).LOCAL_HOST_REQUIRED_MODULES : illegal value $(req_mod) : not a host module. If you want to specify target modules to be required to be installed along with your target module, add those module names to LOCAL_REQUIRED_MODULES instead)\
+      )\
+      $(eval req_files := $(req_files)$(space)$(req_file))\
+    )\
+    $(eval req_files := $(strip $(req_files)))\
+    $(eval mod_files := $(filter $(TARGET_OUT_ROOT)/%, $(call module-installed-files,$(m))))\
+    $(eval mod_files := $(filter-out $(req_files),$(mod_files)))\
+    $(if $(mod_files),\
+      $(eval $(call add-required-deps, $(mod_files),$(req_files))) \
+    )\
+  )\
+)
+endef
+$(call add-all-target-to-host-required-modules-deps)
 
 t_m :=
 h_m :=
@@ -1016,6 +1078,9 @@
 .PHONY: vendorimage
 vendorimage: $(INSTALLED_VENDORIMAGE_TARGET)
 
+.PHONY: productimage
+productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
+
 .PHONY: systemotherimage
 systemotherimage: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
 
@@ -1039,10 +1104,13 @@
 	$(INSTALLED_CACHEIMAGE_TARGET) \
 	$(INSTALLED_BPTIMAGE_TARGET) \
 	$(INSTALLED_VENDORIMAGE_TARGET) \
+	$(INSTALLED_PRODUCTIMAGE_TARGET) \
 	$(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
 	$(INSTALLED_FILES_FILE) \
 	$(INSTALLED_FILES_FILE_VENDOR) \
-	$(INSTALLED_FILES_FILE_SYSTEMOTHER)
+	$(INSTALLED_FILES_FILE_PRODUCT) \
+	$(INSTALLED_FILES_FILE_SYSTEMOTHER) \
+	soong_docs
 
 # dist_files only for putting your library into the dist directory with a full build.
 .PHONY: dist_files
@@ -1106,6 +1174,7 @@
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
     $(INSTALLED_FILES_FILE_VENDOR) \
+    $(INSTALLED_FILES_FILE_PRODUCT) \
     $(INSTALLED_FILES_FILE_SYSTEMOTHER) \
     $(INSTALLED_BUILD_PROP_TARGET) \
     $(BUILT_TARGET_FILES_PACKAGE) \
@@ -1187,6 +1256,9 @@
 .PHONY: findbugs
 findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
 
+.PHONY: findlsdumps
+findlsdumps: $(FIND_LSDUMPS_FILE)
+
 #xxx scrape this from ALL_MODULE_NAME_TAGS
 .PHONY: modules
 modules:
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
new file mode 100644
index 0000000..a960529
--- /dev/null
+++ b/core/native_test_config_template.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}.">
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="{MODULE}" />
+    </test>
+</configuration>
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 9f99c7a..858a1bf 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -111,6 +111,8 @@
         enforce_rro_enabled :=
       else ifeq (true,$(LOCAL_ODM_MODULE))
         enforce_rro_enabled :=
+      else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+        enforce_rro_enabled :=
       endif
     else ifeq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
       enforce_rro_enabled :=
@@ -226,8 +228,20 @@
 LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
 endif
 
+ifdef LOCAL_COMPRESSED_MODULE
+ifneq (true,$(LOCAL_COMPRESSED_MODULE))
+$(call pretty-error, Unknown value for LOCAL_COMPRESSED_MODULE $(LOCAL_COMPRESSED_MODULE))
+endif
+endif
+
+ifdef LOCAL_COMPRESSED_MODULE
+PACKAGES.$(LOCAL_PACKAGE_NAME).COMPRESSED := gz
+LOCAL_BUILT_MODULE_STEM := package.apk.gz
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk.gz
+else  # !LOCAL_COMPRESSED_MODULE
 LOCAL_BUILT_MODULE_STEM := package.apk
 LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
+endif
 
 LOCAL_PROGUARD_ENABLED:=$(strip $(LOCAL_PROGUARD_ENABLED))
 ifndef LOCAL_PROGUARD_ENABLED
@@ -260,9 +274,11 @@
 
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
-ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
+ifneq ($(LOCAL_SRC_FILES)$(LOCAL_SRCJARS)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
 # Only add jacocoagent if the package contains some java code
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
 endif # Contains java code
 else
 ifdef LOCAL_SDK_VERSION
@@ -298,11 +314,19 @@
 LOCAL_AAPT_FLAGS += --auto-add-overlay --extra-packages com.android.databinding.library
 endif  # LOCAL_DATA_BINDING
 
+# If the module is a compressed module, we don't pre-opt it because its final
+# installation location will be the data partition.
+ifdef LOCAL_COMPRESSED_MODULE
+LOCAL_DEX_PREOPT := false
+endif
+
 include $(BUILD_SYSTEM)/android_manifest.mk
 
+called_from_package_internal := true
 #################################
 include $(BUILD_SYSTEM)/java.mk
 #################################
+called_from_package_internal :=
 
 LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
 ifeq ($(LOCAL_SDK_RES_VERSION),)
@@ -337,10 +361,13 @@
 # Make sure the data-binding process happens before javac and generation of R.java.
 $(R_file_stamp): $(data_binding_stamp)
 $(java_source_list_file): $(data_binding_stamp)
-$(foreach x,$(sharded_java_source_list_files),$(eval $(x): $(data_binding_stamp)))
 $(full_classes_compiled_jar): $(data_binding_stamp)
 endif  # LOCAL_DATA_BINDING
 
+resource_export_package :=
+
+include $(BUILD_SYSTEM)/aapt_flags.mk
+
 ifeq ($(need_compile_res),true)
 
 ###############################
@@ -350,6 +377,10 @@
 my_apk_split_configs :=
 
 ifdef LOCAL_PACKAGE_SPLITS
+ifdef LOCAL_COMPRESSED_MODULE
+$(error $(LOCAL_MODULE): LOCAL_COMPRESSED_MODULE is not currently supported for split installs)
+endif  # LOCAL_COMPRESSED_MODULE
+
 my_apk_split_configs := $(LOCAL_PACKAGE_SPLITS)
 my_split_suffixes := $(subst $(comma),_,$(my_apk_split_configs))
 built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
@@ -364,8 +395,8 @@
 else
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-renderscript_target_api := $(LOCAL_SDK_VERSION)
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 endif
 endif  # LOCAL_SDK_VERSION is set
 endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
@@ -377,8 +408,13 @@
 endif  # renderscript_target_api is set
 my_asset_dirs := $(LOCAL_ASSET_DIR)
 my_full_asset_paths := $(all_assets)
+
 # Add AAPT2 link specific flags.
-$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --no-static-lib-packages
+$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
+ifndef LOCAL_AAPT_NAMESPACES
+  $(my_res_package): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
+endif
+
 include $(BUILD_SYSTEM)/aapt2.mk
 else  # LOCAL_USE_AAPT2
 
@@ -403,7 +439,6 @@
 
 $(proguard_options_file): $(R_file_stamp)
 
-resource_export_package :=
 ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
 # Put this module's resources into a PRODUCT-agnositc package that
 # other packages can use to build their own PRODUCT-agnostic R.java (etc.)
@@ -431,39 +466,34 @@
 # The R.java file must exist by the time the java source
 # list is generated
 $(java_source_list_file): $(R_file_stamp)
-$(foreach x,$(sharded_java_source_list_files),$(eval $(x): $(R_file_stamp)))
 
 endif  # need_compile_res
 
-ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-# We need to explicitly clear this var so that we don't
-# inherit the value from whomever caused us to be built.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_INCLUDES :=
-else
+framework_res_package_export :=
+
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
 # Most packages should link against the resources defined by framework-res.
 # Even if they don't have their own resources, they may use framework
 # resources.
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+ifeq ($(LOCAL_SDK_RES_VERSION),core_current)
+# core_current doesn't contain any framework resources.
+else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 # for released sdk versions, the platform resources were built into android.jar.
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
-framework_res_package_export_deps := $(framework_res_package_export)
 else # LOCAL_SDK_RES_VERSION
 framework_res_package_export := \
     $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-# We can't depend directly on the export.apk file; it won't get its
-# PRIVATE_ vars set up correctly if we do.  Instead, depend on the
-# corresponding R.stamp file, which lists the export.apk as a dependency.
-framework_res_package_export_deps := \
-    $(dir $(framework_res_package_export))src/R.stamp
 endif # LOCAL_SDK_RES_VERSION
+endif # LOCAL_NO_STANDARD_LIBRARIES
+
 all_library_res_package_exports := \
     $(framework_res_package_export) \
     $(foreach lib,$(LOCAL_RES_LIBRARIES),\
         $(call intermediates-dir-for,APPS,$(lib),,COMMON)/package-export.apk)
 
 all_library_res_package_export_deps := \
-    $(framework_res_package_export_deps) \
+    $(framework_res_package_export) \
     $(foreach lib,$(LOCAL_RES_LIBRARIES),\
         $(call intermediates-dir-for,APPS,$(lib),,COMMON)/src/R.stamp)
 $(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_res_package_export_deps)
@@ -473,7 +503,6 @@
 ifdef LOCAL_USE_AAPT2
 $(my_res_package) : $(all_library_res_package_export_deps)
 endif
-endif # LOCAL_NO_STANDARD_LIBRARIES
 
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
@@ -553,6 +582,9 @@
 else
 $(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_LIST := $(all_res_assets)
 $(LOCAL_BUILT_MODULE) : $(all_res_assets) $(full_android_manifest) $(AAPT) $(ZIPALIGN)
+endif  # LOCAL_USE_AAPT2
+ifdef LOCAL_COMPRESSED_MODULE
+$(LOCAL_BUILT_MODULE) : $(MINIGZIP)
 endif
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
 ifdef LOCAL_USE_AAPT2
@@ -575,25 +607,23 @@
 	$(call add-jar-resources-to-package,$@,$(PRIVATE_FULL_CLASSES_JAR),$(PRIVATE_RESOURCE_INTERMEDIATES_DIR))
 endif
 endif  # full_classes_jar
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	@# No need to align, sign-package below will do it.
+	$(uncompress-dexs)
+endif
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
 	$(hide) cp -f $@ $(dir $@)package.dex.apk
 endif  # BUILD_PLATFORM_ZIP
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
-	@# No need to align, sign-package below will do it.
-	$(uncompress-dexs)
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
-endif
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	$(uncompress-dexs)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_DEX_PREOPT
 	$(sign-package)
+ifdef LOCAL_COMPRESSED_MODULE
+	$(compress-package)
+endif  # LOCAL_COMPRESSED_MODULE
 
 ###############################
 ## Build dpi-specific apks, if it's apps_only build.
@@ -613,6 +643,10 @@
 $(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(built_dex)
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 	$(add-dex-to-package)
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	$(uncompress-dexs)
+	$(align-package)
+endif
 	$(hide) mv $@ $@.input
 	$(call dexpreopt-one-file,$@.input,$@)
 	$(hide) rm $@.input
diff --git a/core/pathmap.mk b/core/pathmap.mk
index a1c20c9..af33f5d 100644
--- a/core/pathmap.mk
+++ b/core/pathmap.mk
@@ -80,6 +80,7 @@
 	    telecomm \
 	    telephony \
 	    wifi \
+	    lowpan \
 	    keystore \
 	    rs \
 	 )
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index dba748c..9fe6d47 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -1,10 +1,13 @@
 # This file defines the rule to fuse the platform.zip into the current PDK build.
 PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR :=
 PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR := \
-	host/common/obj/JAVA_LIBRARIES/bouncycastle-host_intermediates
+	host/common/obj/JAVA_LIBRARIES/bouncycastle-host_intermediates \
+	host/common/obj/JAVA_LIBRARIES/compatibility-host-util_intermediates \
+	host/common/obj/JAVA_LIBRARIES/cts-tradefed-harness_intermediates \
+	host/common/obj/JAVA_LIBRARIES/hosttestlib_intermediates
 PDK_PLATFORM_JAVA_ZIP_CONTENTS :=
 
-ifneq (,$(filter platform-java, $(MAKECMDGOALS))$(PDK_FUSION_PLATFORM_ZIP))
+ifneq (,$(filter platform-java, $(MAKECMDGOALS))$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
 # additional items to add to platform.zip for platform-java build
 # For these dirs, add classes.jar and javalib.jar from the dir to platform.zip
 # all paths under out dir
@@ -21,6 +24,7 @@
   target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
   target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
   target/common/obj/JAVA_LIBRARIES/legacy-test_intermediates \
+  target/common/obj/JAVA_LIBRARIES/legacy-android-test_intermediates \
   target/common/obj/JAVA_LIBRARIES/ext_intermediates \
   target/common/obj/JAVA_LIBRARIES/framework_intermediates \
   target/common/obj/JAVA_LIBRARIES/hwbinder_intermediates \
@@ -40,39 +44,60 @@
 	$(PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR)
 
 PDK_PLATFORM_JAVA_ZIP_CONTENTS += $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR),\
-    $(lib_dir)/classes.jar $(lib_dir)/classes.jar.toc \
+    $(lib_dir)/classes.jar $(lib_dir)/classes-header.jar \
     $(lib_dir)/javalib.jar  $(lib_dir)/classes*.dex \
     $(lib_dir)/classes.dex.toc )
 
 # check and override java support level
-ifneq ($(TARGET_BUILD_PDK)$(PDK_FUSION_PLATFORM_ZIP),)
-ifneq ($(wildcard external/proguard),)
-TARGET_BUILD_JAVA_SUPPORT_LEVEL := sdk
-else # no proguard
-TARGET_BUILD_JAVA_SUPPORT_LEVEL :=
-endif
-# platform supprot is set after checking platform.zip
+ifneq ($(TARGET_BUILD_PDK)$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR),)
+  ifneq ($(wildcard external/proguard),)
+    TARGET_BUILD_JAVA_SUPPORT_LEVEL := sdk
+  else # no proguard
+    TARGET_BUILD_JAVA_SUPPORT_LEVEL :=
+  endif
+  # platform support is set after checking platform.zip
 endif # PDK
 
+ifneq (,$(PDK_FUSION_PLATFORM_DIR)$(PDK_FUSION_PLATFORM_ZIP))
+
+_pdk_fusion_intermediates :=
+_pdk_fusion_stamp :=
+_pdk_fusion_file_list :=
+_pdk_fusion_java_file_list :=
+PDK_FUSION_SYMLINK_STAMP :=
+
+ifdef PDK_FUSION_PLATFORM_DIR
+  _pdk_fusion_intermediates := $(PDK_FUSION_PLATFORM_DIR)
+  _pdk_fusion_file_list := $(sort \
+    $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type f))
+  _pdk_fusion_java_file_list := $(filter target/common/%,$(_pdk_fusion_file_list))
+  _pdk_fusion_file_list := $(filter-out target/common/%,$(_pdk_fusion_file_list))
+
+  PDK_FUSION_SYMLINK_STAMP := $(call intermediates-dir-for, PACKAGING, pdk_fusion)/pdk_symlinks.stamp
+
+  symlink_list := $(sort \
+    $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type l))
+$(PDK_FUSION_SYMLINK_STAMP): PRIVATE_SYMLINKS := $(foreach s,$(symlink_list),\
+    $(s):$(shell readlink $(PDK_FUSION_PLATFORM_DIR)/$(s)))
+$(PDK_FUSION_SYMLINK_STAMP):
+	$(foreach s,$(PRIVATE_SYMLINKS),\
+	  mkdir -p $(PRODUCT_OUT)/$(dir $(call word-colon,1,$(s))) && \
+	  ln -sf $(call word-colon,2,$(s)) $(PRODUCT_OUT)/$(call word-colon,1,$(s)) &&) true
+	touch $@
+
+  symlink_list :=
+endif # PDK_FUSION_PLATFORM_DIR
+
 ifdef PDK_FUSION_PLATFORM_ZIP
-_pdk_fusion_intermediates := $(call intermediates-dir-for, PACKAGING, pdk_fusion)
-_pdk_fusion_stamp := $(_pdk_fusion_intermediates)/pdk_fusion.stamp
+  _pdk_fusion_intermediates := $(call intermediates-dir-for, PACKAGING, pdk_fusion)
+  _pdk_fusion_stamp := $(_pdk_fusion_intermediates)/pdk_fusion.stamp
 
-_pdk_fusion_file_list := $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) \
-    '*[^/]' -x 'target/common/*' 2>/dev/null)
-_pdk_fusion_java_file_list := \
-	$(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) 'target/common/*' 2>/dev/null)
-_pdk_fusion_files := $(addprefix $(_pdk_fusion_intermediates)/,\
-    $(_pdk_fusion_file_list) $(_pdk_fusion_java_file_list))
-
-ifneq ($(_pdk_fusion_java_file_list),)
-# This represents whether java build can use platform API or not
-# This should not be used in Android.mk
-TARGET_BUILD_PDK_JAVA_PLATFORM := true
-ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),)
-TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform
-endif
-endif
+  _pdk_fusion_file_list := $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) \
+      '*[^/]' -x 'target/common/*' 2>/dev/null)
+  _pdk_fusion_java_file_list := \
+      $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) 'target/common/*' 2>/dev/null)
+  _pdk_fusion_files := $(addprefix $(_pdk_fusion_intermediates)/,\
+      $(_pdk_fusion_file_list) $(_pdk_fusion_java_file_list))
 
 $(_pdk_fusion_stamp) : $(PDK_FUSION_PLATFORM_ZIP)
 	@echo "Unzip $(dir $@) <- $<"
@@ -81,9 +106,17 @@
 	$(call split-long-arguments,-touch,$(_pdk_fusion_files))
 	$(hide) touch $@
 
-
 $(_pdk_fusion_files) : $(_pdk_fusion_stamp)
+endif # PDK_FUSION_PLATFORM_ZIP
 
+ifneq ($(_pdk_fusion_java_file_list),)
+  # This represents whether java build can use platform API or not
+  # This should not be used in Android.mk
+  TARGET_BUILD_PDK_JAVA_PLATFORM := true
+  ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),)
+    TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform
+  endif
+endif
 
 # Implicit pattern rules to copy the fusion files to the system image directory.
 # Note that if there is already explicit rule in the build system to generate a file,
@@ -101,63 +134,57 @@
 	$(hide) cp -fpPR $< $@
 
 ifeq (true,$(TARGET_BUILD_PDK_JAVA_PLATFORM))
+  PDK_FUSION_OUT_DIR := $(OUT_DIR)
 
-PDK_FUSION_OUT_DIR := $(OUT_DIR)
+  define JAVA_dependency_template
+  $(call add-dependency,$(PDK_FUSION_OUT_DIR)/$(strip $(1)),\
+    $(foreach d,$(filter $(2),$(_pdk_fusion_java_file_list)),$(PDK_FUSION_OUT_DIR)/$(d)))
+  endef
 
-define JAVA_dependency_template
-$(call add-dependency,$(PDK_FUSION_OUT_DIR)/$(strip $(1)),\
-  $(foreach d,$(filter $(2),$(_pdk_fusion_java_file_list)),$(PDK_FUSION_OUT_DIR)/$(d)))
-endef
+  # needs explicit dependency as package-export.apk is not explicitly pulled
+  $(eval $(call JAVA_dependency_template,\
+  target/common/obj/APPS/framework-res_intermediates/src/R.stamp,\
+  target/common/obj/APPS/framework-res_intermediates/package-export.apk))
 
-# needs explicit dependency as package-export.apk is not explicitly pulled
-$(eval $(call JAVA_dependency_template,\
-target/common/obj/APPS/framework-res_intermediates/src/R.stamp,\
-target/common/obj/APPS/framework-res_intermediates/package-export.apk))
-
-# javalib.jar should pull classes.jar as classes.jar is not explicitly pulled.
-$(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\
-$(eval $(call JAVA_dependency_template,$(lib_dir)/javalib.jar,\
-$(lib_dir)/classes.jar)))
+  # javalib.jar should pull classes.jar as classes.jar is not explicitly pulled.
+  $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\
+  $(eval $(call JAVA_dependency_template,$(lib_dir)/javalib.jar,\
+  $(lib_dir)/classes.jar)))
 
 # implicit rules for all other target files
 $(TARGET_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/target/common/% $(_pdk_fusion_stamp)
 	@mkdir -p $(dir $@)
 	$(hide) cp -fpPR $< $@
-endif
+endif # TARGET_BUILD_PDK_JAVA_PLATFORM
 
 ALL_PDK_FUSION_FILES := $(addprefix $(PRODUCT_OUT)/, $(_pdk_fusion_file_list))
 
-endif # PDK_FUSION_PLATFORM_ZIP
+endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR
 
 ifeq ($(TARGET_BUILD_PDK),true)
-$(info PDK TARGET_BUILD_JAVA_SUPPORT_LEVEL $(TARGET_BUILD_JAVA_SUPPORT_LEVEL))
-ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),)
-
-# SDK used for Java build under PDK
-PDK_BUILD_SDK_VERSION := $(lastword $(TARGET_AVAILABLE_SDK_VERSIONS))
-$(info PDK Build uses SDK $(PDK_BUILD_SDK_VERSION))
-
-else # PDK_JAVA
-
-$(info PDK Build uses the current platform API)
-
-endif # PDK_JAVA
-
+  $(info PDK TARGET_BUILD_JAVA_SUPPORT_LEVEL $(TARGET_BUILD_JAVA_SUPPORT_LEVEL))
+  ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),)
+    # SDK used for Java build under PDK
+    PDK_BUILD_SDK_VERSION := $(lastword $(TARGET_AVAILABLE_SDK_VERSIONS))
+    $(info PDK Build uses SDK $(PDK_BUILD_SDK_VERSION))
+  else # PDK_JAVA
+    $(info PDK Build uses the current platform API)
+  endif # PDK_JAVA
 endif # BUILD_PDK
 
 ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_BUILD_PDK)))
-# files under $(PRODUCT_OUT)/symbols to help debugging.
-# Source not included to PDK due to dependency issue, so provide symbols instead.
+  # files under $(PRODUCT_OUT)/symbols to help debugging.
+  # Source not included to PDK due to dependency issue, so provide symbols instead.
 
-# We may not be building all of them.
-# The platform.zip just silently ignores the nonexistent ones.
-PDK_SYMBOL_FILES_LIST := \
-    system/bin/app_process32 \
-    system/bin/app_process64
+  # We may not be building all of them.
+  # The platform.zip just silently ignores the nonexistent ones.
+  PDK_SYMBOL_FILES_LIST := \
+      system/bin/app_process32 \
+      system/bin/app_process64
 
-ifdef PDK_FUSION_PLATFORM_ZIP
-# symbols should be explicitly pulled for fusion build
-$(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\
-  $(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f))))
-endif # PLATFORM_ZIP
-endif # platform.zip build or PDK
+  ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
+    # symbols should be explicitly pulled for fusion build
+    $(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\
+      $(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f))))
+  endif # PLATFORM_ZIP || PLATFORM_DIR
+endif # platform.zip/dir build or PDK
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
index 0c03f37..9aabd0f 100644
--- a/core/pdk_fusion_modules.mk
+++ b/core/pdk_fusion_modules.mk
@@ -2,10 +2,16 @@
 # We use these rules to rebuild .odex files of the .jar/.apk inside the platform.zip.
 #
 
+ifdef PDK_FUSION_PLATFORM_ZIP
 pdk_dexpreopt_config_mk := $(TARGET_OUT_INTERMEDIATES)/pdk_dexpreopt_config.mk
 
 $(shell rm -f $(pdk_dexpreopt_config_mk) && mkdir -p $(dir $(pdk_dexpreopt_config_mk)) && \
         unzip -qo $(PDK_FUSION_PLATFORM_ZIP) -d $(dir $(pdk_dexpreopt_config_mk)) pdk_dexpreopt_config.mk 2>/dev/null)
+endif
+
+ifdef PDK_FUSION_PLATFORM_DIR
+pdk_dexpreopt_config_mk := $(PDK_FUSION_PLATFORM_DIR)/pdk_dexpreopt_config.mk
+endif
 
 -include $(pdk_dexpreopt_config_mk)
 
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 47bd1b2..96e2613 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -20,19 +20,19 @@
 
 ifdef LOCAL_PREBUILT_MODULE_FILE
   my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+else ifdef LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+  my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+  LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) :=
+else ifdef LOCAL_SRC_FILES_$(my_32_64_bit_suffix)
+  my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
+  LOCAL_SRC_FILES_$(my_32_64_bit_suffix) :=
+else ifdef LOCAL_SRC_FILES
+  my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
+  LOCAL_SRC_FILES :=
+else ifdef LOCAL_REPLACE_PREBUILT_APK_INSTALLED
+  # This is handled specially below
 else
-  ifdef LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-    my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
-    LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) :=
-  else
-    ifdef LOCAL_SRC_FILES_$(my_32_64_bit_suffix)
-      my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
-      LOCAL_SRC_FILES_$(my_32_64_bit_suffix) :=
-    else
-      my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
-      LOCAL_SRC_FILES :=
-    endif
-  endif
+  $(call pretty-error,No source files specified)
 endif
 
 LOCAL_CHECKED_MODULE := $(my_prebuilt_src_file)
@@ -105,13 +105,34 @@
   prebuilt_module_is_dex_javalib :=
 endif
 
-ifeq ($(LOCAL_MODULE_CLASS),APPS)
-LOCAL_BUILT_MODULE_STEM := package.apk
-ifndef LOCAL_INSTALLED_MODULE_STEM
-LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
+ifdef LOCAL_COMPRESSED_MODULE
+ifneq (true,$(LOCAL_COMPRESSED_MODULE))
+$(call pretty-error, Unknown value for LOCAL_COMPRESSED_MODULE $(LOCAL_COMPRESSED_MODULE))
 endif
 endif
 
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
+ifdef LOCAL_COMPRESSED_MODULE
+LOCAL_BUILT_MODULE_STEM := package.apk.gz
+else
+LOCAL_BUILT_MODULE_STEM := package.apk
+endif  # LOCAL_COMPRESSED_MODULE
+
+ifndef LOCAL_INSTALLED_MODULE_STEM
+ifdef LOCAL_COMPRESSED_MODULE
+PACKAGES.$(LOCAL_MODULE).COMPRESSED := gz
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk.gz
+else
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
+endif  # LOCAL_COMPRESSED_MODULE
+endif  # LOCAL_INSTALLED_MODULE_STEM
+
+else  # $(LOCAL_MODULE_CLASS) != APPS)
+ifdef LOCAL_COMPRESSED_MODULE
+$(error $(LOCAL_MODULE) : LOCAL_COMPRESSED_MODULE can only be defined for module class APPS)
+endif  # LOCAL_COMPRESSED_MODULE
+endif
+
 ifneq ($(filter true keep_symbols no_debuglink mini-debug-info,$(my_strip_module) $(my_pack_module_relocations)),)
   ifdef LOCAL_IS_HOST_MODULE
     $(error Cannot strip/pack host module LOCAL_PATH=$(LOCAL_PATH))
@@ -152,8 +173,10 @@
 endif
 export_cflags :=
 
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
 ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
 else ifdef LOCAL_USE_VNDK
     _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
     ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -276,6 +299,8 @@
 endif
 endif
 
+dex_preopt_profile_src_file := $(my_prebuilt_src_file)
+
 rs_compatibility_jni_libs :=
 include $(BUILD_SYSTEM)/install_jni_libs.mk
 
@@ -328,6 +353,12 @@
 endif
 endif
 
+# If the module is a compressed module, we don't pre-opt it because its final
+# installation location will be the data partition.
+ifdef LOCAL_COMPRESSED_MODULE
+LOCAL_DEX_PREOPT := false
+endif
+
 #######################################
 # defines built_odex along with rule to install odex
 include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
@@ -352,18 +383,16 @@
 endif
 $(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs)
 
+ifdef LOCAL_COMPRESSED_MODULE
+$(built_module) : $(MINIGZIP)
+endif
+
 $(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(SIGNAPK_JAR)
 	$(transform-prebuilt-to-target)
 	$(uncompress-shared-libs)
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
-else
-  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	  $(uncompress-dexs)
-  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
+endif  # LOCAL_UNCOMPRESS_DEX
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
@@ -382,18 +411,33 @@
 else  # LOCAL_CERTIFICATE == PRESIGNED
 	$(align-package)
 endif  # LOCAL_CERTIFICATE
+ifdef LOCAL_COMPRESSED_MODULE
+	$(compress-package)
+endif  # LOCAL_COMPRESSED_MODULE
 endif  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
 
 ###############################
-## Rule to build the odex file
+## Rule to build the odex file.
+# In case we don't strip the built module, use it, as dexpreopt
+# can do optimizations based on whether the built module only
+# contains uncompressed dex code.
 ifdef LOCAL_DEX_PREOPT
+ifeq (nostripping,$(LOCAL_DEX_PREOPT))
+$(built_odex) : $(built_module)
+	$(call dexpreopt-one-file,$<,$@)
+else
 $(built_odex) : $(my_prebuilt_src_file)
 	$(call dexpreopt-one-file,$<,$@)
 endif
+endif
 
 ###############################
 ## Install split apks.
 ifdef LOCAL_PACKAGE_SPLITS
+ifdef LOCAL_COMPRESSED_MODULE
+$(error $(LOCAL_MODULE): LOCAL_COMPRESSED_MODULE is not currently supported for split installs)
+endif  # LOCAL_COMPRESSED_MODULE
+
 # LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
 built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
@@ -500,6 +544,10 @@
 
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
+else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+my_link_type := java:system
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 else
@@ -524,12 +572,16 @@
 ifneq ($(my_src_aar),)
 # This is .aar file, archive of classes.jar and Android resources.
 my_src_jar := $(intermediates.COMMON)/aar/classes.jar
+my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
 
+$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS := $(my_src_proguard_options)
 $(my_src_jar) : $(my_src_aar)
 	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
 	$(hide) unzip -qo -d $(dir $@) $<
 	# Make sure the extracted classes.jar has a new timestamp.
 	$(hide) touch $@
+	# Make sure the proguard file exists and has a new timestamp.
+	$(hide) touch $(dir $@)/proguard.txt
 
 endif
 
@@ -553,24 +605,24 @@
 
 ifdef LOCAL_USE_AAPT2
 ifneq ($(my_src_aar),)
+
+$(intermediates.COMMON)/export_proguard_flags : $(my_src_proguard_options)
+	$(transform-prebuilt-to-target)
+
 LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
 ifeq ($(LOCAL_SDK_RES_VERSION),)
   LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
 endif
 
 framework_res_package_export :=
-framework_res_package_export_deps :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
 ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
-framework_res_package_export_deps := $(framework_res_package_export)
 else
 framework_res_package_export := \
     $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-framework_res_package_export_deps := \
-    $(dir $(framework_res_package_export))src/R.stamp
 endif
 endif
 
@@ -588,7 +640,7 @@
 $(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
 $(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 $(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
-$(my_res_package) : $(framework_res_package_export_deps)
+$(my_res_package) : $(framework_res_package_export)
 
 full_android_manifest :=
 my_res_resources :=
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 268688a..576d14d 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -77,7 +77,7 @@
 	  $(foreach d,$(PRODUCTS.$(strip $(p)).INHERITS_FROM), echo \"$(d)\" -\> \"$(p)\" >> $@.in;))
 	$(foreach p,$(PRIVATE_PRODUCTS),$(call emit-product-node-props,$(p),$@.in))
 	$(hide) echo '}' >> $@.in
-	$(hide) ./build/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
+	$(hide) build/make/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
 
 # Evaluates to the name of the product file
 # $(1) product file
@@ -104,6 +104,7 @@
 	$(hide) echo 'PRODUCT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_SYSTEM_DEFAULT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_SYSTEM_DEFAULT_PROPERTIES)' >> $$@
+	$(hide) echo 'PRODUCT_PRODUCT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PRODUCT_PROPERTIES)' >> $$@
 	$(hide) echo 'PRODUCT_CHARACTERISTICS=$$(PRODUCTS.$(strip $(1)).PRODUCT_CHARACTERISTICS)' >> $$@
 	$(hide) echo 'PRODUCT_COPY_FILES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES)' >> $$@
 	$(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_OTA_PUBLIC_KEYS)' >> $$@
@@ -121,11 +122,11 @@
 
 $(call product-debug-filename, $(p)): \
 			$(OUT_DIR)/products/$(strip $(1)).txt \
-			build/tools/product_debug.py \
+			build/make/tools/product_debug.py \
 			$(this_makefile)
 	@echo Product debug html file: $$@
 	$(hide) mkdir -p $$(dir $$@)
-	$(hide) cat $$< | build/tools/product_debug.py > $$@
+	$(hide) cat $$< | build/make/tools/product_debug.py > $$@
 endef
 
 product_debug_files:=
diff --git a/core/product.mk b/core/product.mk
index c01a856..19ede82 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -88,6 +88,7 @@
     PRODUCT_BRAND \
     PRODUCT_PROPERTY_OVERRIDES \
     PRODUCT_DEFAULT_PROPERTY_OVERRIDES \
+    PRODUCT_PRODUCT_PROPERTIES \
     PRODUCT_CHARACTERISTICS \
     PRODUCT_COPY_FILES \
     PRODUCT_OTA_PUBLIC_KEYS \
@@ -102,6 +103,7 @@
     PRODUCT_SDK_ADDON_COPY_MODULES \
     PRODUCT_SDK_ADDON_DOC_MODULES \
     PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP \
+    PRODUCT_SOONG_NAMESPACES \
     PRODUCT_DEFAULT_WIFI_CHANNELS \
     PRODUCT_DEFAULT_DEV_CERTIFICATE \
     PRODUCT_RESTRICT_VENDOR_FILES \
@@ -124,17 +126,21 @@
     PRODUCT_VERITY_SIGNING_KEY \
     PRODUCT_SYSTEM_VERITY_PARTITION \
     PRODUCT_VENDOR_VERITY_PARTITION \
+    PRODUCT_PRODUCT_VERITY_PARTITION \
     PRODUCT_SYSTEM_SERVER_DEBUG_INFO \
     PRODUCT_DEX_PREOPT_MODULE_CONFIGS \
+    PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER \
     PRODUCT_DEX_PREOPT_DEFAULT_FLAGS \
     PRODUCT_DEX_PREOPT_BOOT_FLAGS \
     PRODUCT_DEX_PREOPT_PROFILE_DIR \
     PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
+    PRODUCT_DEX_PREOPT_GENERATE_DM_FILES \
     PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
     PRODUCT_SYSTEM_SERVER_COMPILER_FILTER \
     PRODUCT_SANITIZER_MODULE_CONFIGS \
     PRODUCT_SYSTEM_BASE_FS_PATH \
     PRODUCT_VENDOR_BASE_FS_PATH \
+    PRODUCT_PRODUCT_BASE_FS_PATH \
     PRODUCT_SHIPPING_API_LEVEL \
     VENDOR_PRODUCT_RESTRICT_VENDOR_FILES \
     VENDOR_EXCEPTION_MODULES \
@@ -148,6 +154,8 @@
     PRODUCT_ADB_KEYS \
     PRODUCT_CFI_INCLUDE_PATHS \
     PRODUCT_CFI_EXCLUDE_PATHS \
+    PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE \
+    PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
 
 define dump-product
 $(info ==== $(1) ====)\
@@ -301,14 +309,15 @@
 	BOARD_FLASH_BLOCK_SIZE \
 	BOARD_VENDORIMAGE_PARTITION_SIZE \
 	BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
+	BOARD_PRODUCTIMAGE_PARTITION_SIZE \
+	BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_INSTALLER_CMDLINE \
 
 
 _product_stash_var_list += \
 	DEFAULT_SYSTEM_DEV_CERTIFICATE \
 	WITH_DEXPREOPT \
-	WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY \
-	WITH_DEXPREOPT_APP_IMAGE
+	WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY
 
 #
 # Mark the variables in _product_stash_var_list as readonly
diff --git a/core/product_config.mk b/core/product_config.mk
index 4e2d5ae..2620adb 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -365,6 +365,13 @@
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
 .KATI_READONLY := PRODUCT_SYSTEM_DEFAULT_PROPERTIES
 
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to build.prop of product partition
+PRODUCT_PRODUCT_PROPERTIES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_PROPERTIES))
+.KATI_READONLY := PRODUCT_PRODUCT_PROPERTIES
+
 # Should we use the default resources or add any product specific overlays
 PRODUCT_PACKAGE_OVERLAYS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGE_OVERLAYS))
@@ -385,8 +392,12 @@
 PRODUCT_EXTRA_RECOVERY_KEYS := $(sort \
     $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_EXTRA_RECOVERY_KEYS))
 
+PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
 PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
+PRODUCT_DEX_PREOPT_GENERATE_DM_FILES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
 PRODUCT_DEX_PREOPT_BOOT_FLAGS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_FLAGS))
 PRODUCT_DEX_PREOPT_PROFILE_DIR := \
@@ -480,3 +491,15 @@
 # Whether any paths should have CFI enabled for components
 PRODUCT_CFI_INCLUDE_PATHS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CFI_INCLUDE_PATHS))
+
+# which Soong namespaces to export to Make
+PRODUCT_SOONG_NAMESPACES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SOONG_NAMESPACES))
+
+# A flag to override PRODUCT_COMPATIBLE_PROPERTY
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE))
+
+# Whether the whitelist of actionable compatible properties should be disabled or not
+PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE))
diff --git a/core/proguard.emma.flags b/core/proguard.emma.flags
deleted file mode 100644
index bf94086..0000000
--- a/core/proguard.emma.flags
+++ /dev/null
@@ -1,4 +0,0 @@
-# Keep everything for the emma classes
--keep class com.vladium.** {
-  *;
-}
diff --git a/core/proguard_tests.flags b/core/proguard_tests.flags
deleted file mode 100644
index 1f840bc..0000000
--- a/core/proguard_tests.flags
+++ /dev/null
@@ -1,26 +0,0 @@
-# Keep everything for tests
-# This flag has been moved to the makefiles and is set for tests by default.
-#-dontshrink
-
-# But we may want to obfuscate if the main app gets obfuscated.
-# This flag has been moved to the makefiles.
-#-dontobfuscate
-
-#-keep class * extends junit.framework.TestCase {
-#  public void test*();
-#}
-
-#-keepclasseswithmembers class * {
-#  public static void run();
-#  public static junit.framework.Test suite();
-#}
-
-# some AllTests don't include run().
-#-keepclasseswithmembers class * {
-#  public static junit.framework.Test suite();
-#}
-
-#-keep class * extends junit.framework.TestSuite
-#-keep class * extends android.app.Instrumentation
-#-keep class * extends android.test.TestSuiteProvider
-
diff --git a/core/sdk_check.mk b/core/sdk_check.mk
new file mode 100644
index 0000000..c09fc7c
--- /dev/null
+++ b/core/sdk_check.mk
@@ -0,0 +1,37 @@
+
+# Enforcement checks that LOCAL_SDK_VERSION and LOCAL_PRIVATE_PLATFORM_APIS are
+# set correctly.
+# Should be included by java targets that allow specifying LOCAL_SDK_VERSION.
+# The JAVA_SDK_ENFORCEMENT_WARNING and JAVA_SDK_ENFORCEMENT_ERROR variables may
+# be set to a particular module class to enable warnings and errors for that
+# subtype.
+
+whitelisted_modules := framework-res__auto_generated_rro
+
+
+ifeq (,$(JAVA_SDK_ENFORCEMENT_ERROR))
+  JAVA_SDK_ENFORCEMENT_ERROR := APPS
+endif
+
+ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_PRIVATE_PLATFORM_APIS),)
+  ifeq (,$(filter $(LOCAL_MODULE),$(whitelisted_modules)))
+    ifneq ($(JAVA_SDK_ENFORCEMENT_WARNING)$(JAVA_SDK_ENFORCEMENT_ERROR),)
+      my_message := Must specify LOCAL_SDK_VERSION or LOCAL_PRIVATE_PLATFORM_APIS,
+      ifeq ($(LOCAL_MODULE_CLASS),$(JAVA_SDK_ENFORCEMENT_ERROR))
+        $(call pretty-error,$(my_message))
+      endif
+      ifeq ($(LOCAL_MODULE_CLASS),$(JAVA_SDK_ENFORCEMENT_WARNING))
+        $(call pretty-warning,$(my_message))
+      endif
+      my_message :=
+    endif
+  endif
+else ifneq ($(LOCAL_SDK_VERSION),)
+  ifneq ($(LOCAL_PRIVATE_PLATFORM_APIS),)
+    my_message := Specifies both LOCAL_SDK_VERSION ($(LOCAL_SDK_VERSION)) and
+    my_message += LOCAL_PRIVATE_PLATFORM_APIS ($(LOCAL_PRIVATE_PLATFORM_APIS))
+    my_message += but should specify only one
+    $(call pretty-error,$(my_message))
+    my_message :=
+  endif
+endif
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 687536b..ab887e0 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -20,7 +20,9 @@
 $(error $(LOCAL_PATH): Cannot set module stem for a library)
 endif
 
+ifdef target-shared-library-hook
 $(call target-shared-library-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
new file mode 100644
index 0000000..ae0d196
--- /dev/null
+++ b/core/soong_app_prebuilt.mk
@@ -0,0 +1,104 @@
+# App prebuilt coming from Soong.
+# Extra inputs:
+# LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+  $(call pretty-error,soong_app_prebuilt.mk may only be used from Soong)
+endif
+
+LOCAL_MODULE_SUFFIX := .apk
+LOCAL_BUILT_MODULE_STEM := package.apk
+
+#######################################
+include $(BUILD_SYSTEM)/base_rules.mk
+#######################################
+
+full_classes_jar := $(intermediates.COMMON)/classes.jar
+full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
+full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
+
+$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
+$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
+
+ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
+    $(intermediates.COMMON)/jacoco-report-classes.jar))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/jacoco-report-classes.jar)
+endif
+
+ifdef LOCAL_SOONG_PROGUARD_DICT
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
+    $(intermediates.COMMON)/proguard_dictionary))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/proguard_dictionary)
+endif
+
+ifneq ($(TURBINE_ENABLED),false)
+ifdef LOCAL_SOONG_HEADER_JAR
+$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
+else
+$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
+endif
+endif # TURBINE_ENABLED != false
+
+
+$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
+
+ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+resource_export_package := $(intermediates.COMMON)/package-export.apk
+resource_export_stamp := $(intermediates.COMMON)/src/R.stamp
+
+$(resource_export_package): PRIVATE_STAMP := $(resource_export_stamp)
+$(resource_export_package): .KATI_IMPLICIT_OUTPUTS := $(resource_export_stamp)
+$(resource_export_package): $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
+	@echo "Copy: $$@"
+	$(copy-file-to-target)
+	touch $(PRIVATE_STAMP)
+$(call add-dependency,$(LOCAL_BUILT_MODULE),$(resource_export_package))
+
+endif # LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+
+java-dex: $(LOCAL_SOONG_DEX_JAR)
+
+ifdef LOCAL_DEX_PREOPT
+# defines built_odex along with rule to install odex
+include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+
+$(built_odex): $(LOCAL_SOONG_DEX_JAR)
+	$(call dexpreopt-one-file,$<,$@)
+endif
+
+PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
+ifdef LOCAL_CERTIFICATE
+  PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE)
+  PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(patsubst %.x509.pem,%.pk8,$(LOCAL_CERTIFICATE))
+endif
+
+ifndef LOCAL_IS_HOST_MODULE
+ifeq ($(LOCAL_SDK_VERSION),system_current)
+my_link_type := java:system
+else ifneq ($(LOCAL_SDK_VERSION),)
+my_link_type := java:sdk
+else
+my_link_type := java:platform
+endif
+# warn/allowed types are both empty because Soong modules can't depend on
+# make-defined modules.
+my_warn_types :=
+my_allowed_types :=
+
+my_link_deps :=
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
+endif # !LOCAL_IS_HOST_MODULE
+
+ifdef LOCAL_SOONG_RRO_DIRS
+  $(call append_enforce_rro_sources, \
+      $(my_register_name), \
+      false, \
+      $(LOCAL_FULL_MANIFEST_FILE), \
+      $(LOCAL_EXPORT_PACKAGE_RESOURCES), \
+      $(LOCAL_SOONG_RRO_DIRS))
+endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a90e5af..bf40fe3 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -42,6 +42,9 @@
 
 $(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
 
+$(call add_json_str,  BuildId,                           $(BUILD_ID))
+$(call add_json_str,  BuildNumberFromFile,               $$$(BUILD_NUMBER_FROM_FILE))
+
 $(call add_json_val,  Platform_sdk_version,              $(PLATFORM_SDK_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
 $(call add_json_csv,  Platform_version_future_codenames, $(PLATFORM_VERSION_FUTURE_CODENAMES))
@@ -81,6 +84,8 @@
 $(call add_json_str,  AAPTPreferredConfig,               $(PRODUCT_AAPT_PREF_CONFIG))
 $(call add_json_list, AAPTPrebuiltDPI,                   $(PRODUCT_AAPT_PREBUILT_DPI))
 
+$(call add_json_str,  DefaultAppCertificate,             $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+
 $(call add_json_str,  AppsDefaultVersionName,            $(APPS_DEFAULT_VERSION_NAME))
 
 $(call add_json_list, SanitizeHost,                      $(SANITIZE_HOST))
@@ -103,24 +108,35 @@
 
 $(call add_json_bool, ArtUseReadBarrier,                 $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
 $(call add_json_bool, Binder32bit,                       $(BINDER32BIT))
-$(call add_json_bool, Brillo,                            $(BRILLO))
 $(call add_json_str,  BtConfigIncludeDir,                $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
 $(call add_json_bool, Device_uses_hwc2,                  $(filter true,$(TARGET_USES_HWC2)))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_PROJECT_SYSTEM_INCLUDES))
 $(call add_json_bool, DevicePrefer32BitExecutables,      $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)))
-$(call add_json_val,  DeviceUsesClang,                   $(if $(USE_CLANG_PLATFORM_BUILD),$(USE_CLANG_PLATFORM_BUILD),false))
 $(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
+$(call add_json_str,  Platform_vndk_version,             $(PLATFORM_VNDK_VERSION))
+$(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
+$(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
+$(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
 $(call add_json_bool, Malloc_not_svelte,                 $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
-$(call add_json_bool, Treble,                            $(filter true,$(PRODUCT_FULL_TREBLE)))
+
+$(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
+$(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
+
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
+$(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
+$(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
 
 $(call add_json_bool, UseGoma,                           $(filter-out false,$(USE_GOMA)))
 
 $(call add_json_str,  DistDir,                           $(if $(dist_goal), $(DIST_DIR)))
 
+$(call add_json_list, NamespacesToExport,                $(PRODUCT_SOONG_NAMESPACES))
+
+$(call add_json_list, PgoAdditionalProfileDirs,          $(PGO_ADDITIONAL_PROFILE_DIRS))
+
 _contents := $(subst $(comma)$(newline)__SV_END,$(newline)}$(newline),$(_contents)__SV_END)
 
 $(file >$(SOONG_VARIABLES).tmp,$(_contents))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index ccbe745..63a1e2e 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -16,32 +16,72 @@
 #######################################
 
 full_classes_jar := $(intermediates.COMMON)/classes.jar
+full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
 common_javalib.jar := $(intermediates.COMMON)/javalib.jar
 
 $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
+$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
+
+ifdef LOCAL_DROIDDOC_STUBS_SRCJAR
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_SRCJAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
+endif
+
+ifdef LOCAL_DROIDDOC_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip)
+endif
 
 ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
   $(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
     $(intermediates.COMMON)/jacoco-report-classes.jar))
+  $(call add-dependency,$(common_javalib.jar),\
+    $(intermediates.COMMON)/jacoco-report-classes.jar)
 endif
 
-ifneq ($(TURBINE_DISABLED),false)
+ifdef LOCAL_SOONG_EXPORT_PROGUARD_FLAGS
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_EXPORT_PROGUARD_FLAGS),\
+    $(intermediates.COMMON)/export_proguard_flags))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/export_proguard_flags)
+endif
+
+ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+my_res_package := $(intermediates.COMMON)/package-res.apk
+
+$(my_res_package): $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
+	@echo "Copy: $$@"
+	$(copy-file-to-target)
+
+$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_res_package))
+
+endif # LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
+
+ifneq ($(TURBINE_ENABLED),false)
 ifdef LOCAL_SOONG_HEADER_JAR
 $(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
 else
 $(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
 endif
-endif # TURBINE_DISABLED != false
+endif # TURBINE_ENABLED != false
 
 ifdef LOCAL_SOONG_DEX_JAR
   ifndef LOCAL_IS_HOST_MODULE
-    $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)  # is_boot_jar
+      $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    else # !is_boot_jar
+      $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    endif # is_boot_jar
     $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar) $(full_classes_header_jar)))
 
+    dex_preopt_profile_src_file := $(common_javalib.jar)
+
     # defines built_odex along with rule to install odex
     include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
 
+    dex_preopt_profile_src_file :=
+
     ifdef LOCAL_DEX_PREOPT
       ifneq ($(dexpreopt_boot_jar_module),) # boot jar
         # boot jar's rules are defined in dex_preopt.mk
@@ -77,17 +117,19 @@
 ifndef LOCAL_IS_HOST_MODULE
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
-my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+my_link_type := java:system
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
-my_warn_types := java:system java:platform
-my_allowed_types := java:sdk
 else
 my_link_type := java:platform
-my_warn_types :=
-my_allowed_types := java:sdk java:system java:platform
 endif
+# warn/allowed types are both empty because Soong modules can't depend on
+# make-defined modules.
+my_warn_types :=
+my_allowed_types :=
 
 my_link_deps :=
 my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 5ffb88d..64e16c2 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -88,7 +88,7 @@
 
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 endif  # LOCAL_USE_AAPT2
-endif  # LOCAL_RESOURCE_DIR
+endif  # need_compile_res
 
 all_res_assets := $(all_resources)
 
@@ -103,25 +103,40 @@
 endif
 
 framework_res_package_export :=
-framework_res_package_export_deps :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
 ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
-framework_res_package_export_deps := $(framework_res_package_export)
 else
 framework_res_package_export := \
     $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-framework_res_package_export_deps := \
-    $(dir $(framework_res_package_export))src/R.stamp
 endif
 endif
 
+ifdef LOCAL_USE_AAPT2
+import_proguard_flag_files := $(strip $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+    $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags))
+$(intermediates.COMMON)/export_proguard_flags: $(import_proguard_flag_files) $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
+	@echo "Export proguard flags: $@"
+	rm -f $@
+	touch $@
+	for f in $+; do \
+		echo -e "\n# including $$f" >>$@; \
+		cat $$f >>$@; \
+	done
+import_proguard_flag_files :=
+endif
+
+include $(BUILD_SYSTEM)/aapt_flags.mk
+
 # add --non-constant-id to prevent inlining constants.
 # AAR needs text symbol file R.txt.
 ifdef LOCAL_USE_AAPT2
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --static-lib --no-static-lib-packages
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --static-lib
+ifndef LOCAL_AAPT_NAMESPACES
+  $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
+endif
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_CONFIG :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
@@ -149,8 +164,8 @@
 else
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-renderscript_target_api := $(LOCAL_SDK_VERSION)
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
+renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 endif
 endif  # LOCAL_SDK_VERSION is set
 endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
@@ -161,10 +176,10 @@
 endif  # renderscript_target_api < 21
 endif  # renderscript_target_api is set
 include $(BUILD_SYSTEM)/aapt2.mk
-$(my_res_package) : $(framework_res_package_export_deps)
+$(my_res_package) : $(framework_res_package_export)
 else
 $(R_file_stamp): PRIVATE_RESOURCE_LIST := $(all_resources)
-$(R_file_stamp) : $(all_resources) $(full_android_manifest) $(AAPT) $(framework_res_package_export_deps)
+$(R_file_stamp) : $(all_resources) $(full_android_manifest) $(AAPT) $(framework_res_package_export)
 	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
 	$(create-resource-java-files)
 	$(hide) find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name R.java | xargs cat > $@
@@ -172,7 +187,6 @@
 
 $(LOCAL_BUILT_MODULE): $(R_file_stamp)
 $(java_source_list_file): $(R_file_stamp)
-$(foreach x,$(sharded_java_source_list_files),$(eval $(x): $(R_file_stamp)))
 $(full_classes_compiled_jar): $(R_file_stamp)
 $(full_classes_turbine_jar): $(R_file_stamp)
 
diff --git a/target/board/generic_armv5/device.mk b/core/target_test_config.mk
similarity index 70%
copy from target/board/generic_armv5/device.mk
copy to core/target_test_config.mk
index 7c4aaf2..61f5d2b 100644
--- a/target/board/generic_armv5/device.mk
+++ b/core/target_test_config.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,10 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/device.mk
+#
+# Common rules for building a TradeFed test XML file for target side tests.
+#
+
+$(call record-module-type,TARGET_TEST_CONFIG)
+
+include $(BUILD_SYSTEM)/test_config_common.mk
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 59a3a9e..b5c3a7c 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -8,15 +8,9 @@
   ifndef LOCAL_SDK_VERSION
     LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
   else
-    ifneq (,$(filter c++_%,$(LOCAL_NDK_STL_VARIANT)))
-        my_ndk_gtest_suffix := _c++
-    else ifneq ($(filter stlport_,$(LOCAL_NDK_STL_VARIANT)),)
-        my_ndk_gtest_suffix := _stlport
-    else ifneq ($(filter gnustl_,$(LOCAL_NDK_STL_VARIANT)),)
-        my_ndk_gtest_suffix := _gnustl
-    else # system STL, use stlport
-        my_ndk_gtest_suffix := _stlport
-    endif
+    # TODO(danalbert): Remove the suffix from the module since we only need the
+    # one variant now.
+    my_ndk_gtest_suffix := _c++
     LOCAL_STATIC_LIBRARIES += \
         libgtest_main_ndk$(my_ndk_gtest_suffix) \
         libgtest_ndk$(my_ndk_gtest_suffix)
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
index 3975d20..1d867d1 100644
--- a/core/tasks/apicheck.mk
+++ b/core/tasks/apicheck.mk
@@ -54,7 +54,7 @@
     -error 16 -error 17 -error 18 , \
     cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
     check-public-api, \
-    $(call doc-timestamp-for,api-stubs) \
+    $(OUT_DOCS)/api-stubs-docs-stubs.srcjar \
     ))
 
 # Check that the API we're building hasn't changed from the not-yet-released
@@ -71,7 +71,7 @@
     -error 25 -error 26 -error 27, \
     cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
     check-public-api, \
-    $(call doc-timestamp-for,api-stubs) \
+    $(OUT_DOCS)/api-stubs-docs-stubs.srcjar \
     ))
 
 .PHONY: update-public-api
@@ -100,7 +100,7 @@
     -error 16 -error 17 -error 18 , \
     cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
     check-system-api, \
-    $(call doc-timestamp-for,system-api-stubs) \
+    $(OUT_DOCS)/system-api-stubs-docs-stubs.srcjar \
     ))
 
 # Check that the System API we're building hasn't changed from the not-yet-released
@@ -117,7 +117,7 @@
     -error 25 -error 26 -error 27, \
     cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
     check-system-api, \
-    $(call doc-timestamp-for,system-api-stubs) \
+    $(OUT_DOCS)/system-api-stubs-docs-stubs.srcjar \
     ))
 
 .PHONY: update-system-api
@@ -149,7 +149,7 @@
     -error 25 -error 26 -error 27, \
     cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
     check-test-api, \
-    $(call doc-timestamp-for,test-api-stubs) \
+    $(OUT_DOCS)/test-api-stubs-docs-stubs.srcjar \
     ))
 
 .PHONY: update-test-api
diff --git a/core/tasks/check_boot_jars/check_boot_jars.py b/core/tasks/check_boot_jars/check_boot_jars.py
index 1b4540c..9d71553 100755
--- a/core/tasks/check_boot_jars/check_boot_jars.py
+++ b/core/tasks/check_boot_jars/check_boot_jars.py
@@ -39,7 +39,7 @@
   return True
 
 
-def CheckJar(jar):
+def CheckJar(whitelist_path, jar):
   """Check a jar file.
   """
   # Get the list of files inside the jar file.
@@ -55,8 +55,9 @@
       package_name = package_name.replace('/', '.')
       # Skip class without a package name
       if package_name and not whitelist_re.match(package_name):
-        print >> sys.stderr, ('Error: %s contains class file %s, which is not in the whitelist'
-                              % (jar, f))
+        print >> sys.stderr, ('Error: %s contains class file %s, whose package name %s is not '
+                              'in the whitelist %s of packages allowed on the bootclasspath.'
+                              % (jar, f, package_name, whitelist_path))
         return False
   return True
 
@@ -65,13 +66,14 @@
   if len(argv) < 2:
     print __doc__
     return 1
+  whitelist_path = argv[0]
 
-  if not LoadWhitelist(argv[0]):
+  if not LoadWhitelist(whitelist_path):
     return 1
 
   passed = True
   for jar in argv[1:]:
-    if not CheckJar(jar):
+    if not CheckJar(whitelist_path, jar):
       passed = False
   if not passed:
     return 1
diff --git a/core/tasks/check_emu_boot.mk b/core/tasks/check_emu_boot.mk
new file mode 100644
index 0000000..4870677
--- /dev/null
+++ b/core/tasks/check_emu_boot.mk
@@ -0,0 +1,23 @@
+check_emu_boot0 := $(DIST_DIR)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-emulator-boot-test-result.txt
+$(check_emu_boot0) : PRIVATE_PREFIX := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
+$(check_emu_boot0) : PRIVATE_EMULATOR_BOOT_TEST_SH := device/generic/goldfish/tools/emulator_boot_test.sh
+$(check_emu_boot0) : PRIVATE_BOOT_COMPLETE_STRING := "emulator: INFO: boot completed"
+$(check_emu_boot0) : PRIVATE_BOOT_FAIL_STRING := "emulator: ERROR: fail to boot after"
+$(check_emu_boot0) : PRIVATE_SUCCESS_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-SUCCESS.txt
+$(check_emu_boot0) : PRIVATE_FAIL_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-FAIL.txt
+$(check_emu_boot0) : $(INSTALLED_QEMU_SYSTEMIMAGE)  $(INSTALLED_QEMU_VENDORIMAGE) \
+                 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(PRODUCT_OUT)/userdata.img) \
+                 $(PRODUCT_OUT)/ramdisk.img device/generic/goldfish/tools/emulator_boot_test.sh
+	@mkdir -p $(dir $(check_emu_boot0))
+	$(hide) rm -f $(check_emu_boot0)
+	$(hide) rm -f $(PRIVATE_SUCCESS_FILE)
+	$(hide) rm -f $(PRIVATE_FAIL_FILE)
+	(export ANDROID_PRODUCT_OUT=$$(cd $(PRODUCT_OUT);pwd);\
+		export ANDROID_BUILD_TOP=$$(pwd);\
+		$(PRIVATE_EMULATOR_BOOT_TEST_SH) > $(check_emu_boot0))
+	(if grep -q $(PRIVATE_BOOT_COMPLETE_STRING) $(check_emu_boot0);\
+	then echo boot_succeeded > $(PRIVATE_SUCCESS_FILE); fi)
+	(if grep -q $(PRIVATE_BOOT_FAIL_STRING) $(check_emu_boot0);\
+	then echo boot_failed > $(PRIVATE_FAIL_FILE); fi)
+.PHONY: check_emu_boot
+check_emu_boot: $(check_emu_boot0)
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index d679c59..a2b626e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -6,7 +6,7 @@
 #
 #      http://www.apache.org/licenses/LICENSE-2.0
 #
-# Unless required by applicable law or agrls eed to in writing, software
+# Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
@@ -16,14 +16,23 @@
 .PHONY: device-tests
 
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
-$(device-tests-zip): $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
+# Create an artifact to include a list of test config files in device-tests.
+device-tests-list-zip := $(PRODUCT_OUT)/device-tests_list.zip
+$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip)
+$(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
+
+$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
 	echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
-	rm -f $@.list $@-host.list $@-target.list
+	rm -f $(PRIVATE_device_tests_list)
+	$(hide) grep -e .*.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
+	$(hide) grep -e .*.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $(PRIVATE_device_tests_list)
 
 device-tests: $(device-tests-zip)
-$(call dist-for-goals, device-tests, $(device-tests-zip))
+$(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip))
 
 tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index bf4eb1d..c7f1dc9 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -6,7 +6,7 @@
 #
 #      http://www.apache.org/licenses/LICENSE-2.0
 #
-# Unless required by applicable law or agrls eed to in writing, software
+# Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
@@ -15,12 +15,21 @@
 .PHONY: general-tests
 
 general-tests-zip := $(PRODUCT_OUT)/general-tests.zip
-$(general-tests-zip): $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
+# Create an artifact to include a list of test config files in general-tests.
+general-tests-list-zip := $(PRODUCT_OUT)/general-tests_list.zip
+$(general-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(general-tests-list-zip)
+$(general-tests-zip) : PRIVATE_general_tests_list := $(PRODUCT_OUT)/general-tests_list
+
+$(general-tests-zip) : $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
 	echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
-	rm -f $@.list $@-host.list $@-target.list
+	rm -f $(PRIVATE_general_tests_list)
+	$(hide) grep -e .*.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_general_tests_list)
+	$(hide) grep -e .*.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_general_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $(general-tests-list-zip) -C $(dir $@) -f $(PRIVATE_general_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $(PRIVATE_general_tests_list)
 
 general-tests: $(general-tests-zip)
-$(call dist-for-goals, general-tests, $(general-tests-zip))
+$(call dist-for-goals, general-tests, $(general-tests-zip) $(general-tests-list-zip))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index e9b2ac7..b45526f 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -11,6 +11,8 @@
 			'"path": [$(foreach w,$(sort $(ALL_MODULES.$(m).PATH)),"$(w)", )], ' \
 			'"tags": [$(foreach w,$(sort $(ALL_MODULES.$(m).TAGS)),"$(w)", )], ' \
 			'"installed": [$(foreach w,$(sort $(ALL_MODULES.$(m).INSTALLED)),"$(w)", )], ' \
+			'"compatibility_suites": [$(foreach w,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)", )], ' \
+			'"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)], ' \
 			'},\n' \
 	 ) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
 	$(hide) echo '}' >> $@
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index 32d56a7..66eec22 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -35,7 +35,7 @@
 	@mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
 	$(call generate-userimage-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt, skip_fsck=true)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-	  ./build/tools/releasetools/build_image.py \
+	  build/make/tools/releasetools/build_image.py \
 	  $(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@ $(TARGET_OUT)
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_OEMIMAGE_PARTITION_SIZE))
 
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 593c3cd..197d41a 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -115,7 +115,7 @@
 	$(hide) $(SOONG_ZIP) -o $@ -C $(dir $(PRIVATE_STAGING_DIR)) -D $(PRIVATE_STAGING_DIR)
 
 $(full_target_img): PRIVATE_STAGING_DIR := $(call append-path,$(staging),$(addon_dir_img))/images/$(TARGET_CPU_ABI)
-$(full_target_img): $(full_target) $(addon_img_source_prop) | $(SOONG_ZIP)
+$(full_target_img): $(full_target) $(addon_img_source_prop) | $(ACP) $(SOONG_ZIP)
 	@echo Packaging SDK Addon System-Image: $@
 	$(hide) mkdir -p $(dir $@)
 	$(ACP) -r $(PRODUCT_OUT)/data $(PRIVATE_STAGING_DIR)/data
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
new file mode 100644
index 0000000..36275b0
--- /dev/null
+++ b/core/tasks/test_mapping.mk
@@ -0,0 +1,34 @@
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Create an artifact to include TEST_MAPPING files in source tree.
+
+.PHONY: test_mapping
+
+intermediates := $(call intermediates-dir-for,PACKAGING,test_mapping)
+test_mappings_zip := $(intermediates)/test_mappings.zip
+test_mapping_list := $(OUT_DIR)/.module_paths/TEST_MAPPING.list
+test_mappings := $(file <$(test_mapping_list))
+$(test_mappings_zip) : PRIVATE_test_mappings := $(subst $(newline),\n,$(test_mappings))
+
+$(test_mappings_zip) : $(test_mappings) $(SOONG_ZIP)
+	@echo "Building artifact to include TEST_MAPPING files."
+	rm -rf $@
+	echo -e "$(PRIVATE_test_mappings)" > $@.list
+	$(SOONG_ZIP) -o $@ -C . -l $@.list
+	rm -f $@.list
+
+test_mapping : $(test_mappings_zip)
+
+$(call dist-for-goals, dist_files test_mapping,$(test_mappings_zip))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 7c38546..a1151e9 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,9 +151,9 @@
 	    cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	# Generate the image.
 	$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
-	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
+	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
-	  ./build/tools/releasetools/build_image.py \
+	  build/make/tools/releasetools/build_image.py \
 	  $(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
 
 my_installed_custom_image := $(PRODUCT_OUT)/$(notdir $(my_built_custom_image))
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 20b8314..6117414 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -19,6 +19,8 @@
 #   test_suite_tradefed: the name of this test suite's tradefed wrapper
 #   test_suite_dynamic_config: the path to this test suite's dynamic configuration file
 #   test_suite_readme: the path to a README file for this test suite
+#   test_suite_prebuilt_tools: the set of prebuilt tools to be included directly
+#                         in the 'tools' subdirectory of the test suite.
 # Output variables:
 #   compatibility_zip: the path to the output zip file.
 
@@ -26,6 +28,7 @@
 test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
 test_tools := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/loganalysis.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util-tests.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-common-util-tests.jar \
@@ -39,10 +42,10 @@
 compatibility_zip := $(out_dir).zip
 $(compatibility_zip): PRIVATE_NAME := android-$(test_suite_name)
 $(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir)
-$(compatibility_zip): PRIVATE_TOOLS := $(test_tools)
+$(compatibility_zip): PRIVATE_TOOLS := $(test_tools) $(test_suite_prebuilt_tools)
 $(compatibility_zip): PRIVATE_SUITE_NAME := $(test_suite_name)
 $(compatibility_zip): PRIVATE_DYNAMIC_CONFIG := $(test_suite_dynamic_config)
-$(compatibility_zip): $(test_artifacts) $(test_tools) $(test_suite_dynamic_config) $(SOONG_ZIP) | $(ADB) $(ACP)
+$(compatibility_zip): $(test_artifacts) $(test_tools) $(test_suite_prebuilt_tools) $(test_suite_dynamic_config) $(SOONG_ZIP) | $(ADB) $(ACP)
 # Make dir structure
 	$(hide) mkdir -p $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases
 # Copy tools
@@ -56,3 +59,4 @@
 test_suite_tradefed :=
 test_suite_dynamic_config :=
 test_suite_readme :=
+test_suite_prebuilt_tools :=
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
new file mode 100644
index 0000000..bcbdfcf
--- /dev/null
+++ b/core/tasks/tradefed-tests-list.mk
@@ -0,0 +1,38 @@
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir
+.PHONY: tradefed-tests-list
+
+tradefed_tests :=
+$(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \
+  $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml")))
+tradefed_tests_list_intermediates := $(call intermediates-dir-for,PACKAGING,tradefed_tests_list,HOST,COMMON)
+tradefed_tests_list_zip := $(tradefed_tests_list_intermediates)/tradefed-tests_list.zip
+all_tests :=
+$(foreach test, $(tradefed_tests), \
+  $(eval all_tests += $(word 2,$(subst /res/config/,$(space),$(test)))))
+$(tradefed_tests_list_zip) : PRIVATE_tradefed_tests := $(subst .xml,,$(subst $(space),\n,$(sort $(all_tests))))
+$(tradefed_tests_list_zip) : PRIVATE_tradefed_tests_list := $(tradefed_tests_list_intermediates)/tradefed-tests_list
+
+$(tradefed_tests_list_zip) : $(tradefed_tests) $(SOONG_ZIP)
+	@echo "Package: $@"
+	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
+	$(hide) echo -e "$(PRIVATE_tradefed_tests)" > $(PRIVATE_tradefed_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(dir $@) -f $(PRIVATE_tradefed_tests_list)
+
+tradefed-tests-list : $(tradefed_tests_list_zip)
+$(call dist-for-goals, tradefed-tests-list, $(tradefed_tests_list_zip))
+
+tests: tradefed-tests-list
diff --git a/core/tasks/vndk.mk b/core/tasks/vndk.mk
index d824a41..3604aed 100644
--- a/core/tasks/vndk.mk
+++ b/core/tasks/vndk.mk
@@ -17,6 +17,12 @@
 # BOARD_VNDK_VERSION must be set to 'current' in order to generate a VNDK snapshot.
 ifeq ($(BOARD_VNDK_VERSION),current)
 
+# PLATFORM_VNDK_VERSION must be set.
+ifneq (,$(PLATFORM_VNDK_VERSION))
+
+# BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'.
+ifneq ($(BOARD_VNDK_RUNTIME_DISABLE),true)
+
 # Returns arch-specific libclang_rt.ubsan* library name.
 # Because VNDK_CORE_LIBRARIES includes all arch variants for libclang_rt.ubsan*
 # libs, the arch-specific libs are selected separately.
@@ -24,72 +30,169 @@
 # Args:
 #   $(1): if not empty, evaluates for TARGET_2ND_ARCH
 define clang-ubsan-vndk-core
-  $(eval prefix := $(if $(1),2ND_,))
-  $(addsuffix .vendor,$($(addprefix $(prefix),UBSAN_RUNTIME_LIBRARY)))
+$(strip \
+  $(eval prefix := $(if $(1),2ND_,)) \
+  $(addsuffix .vendor,$($(addprefix $(prefix),UBSAN_RUNTIME_LIBRARY))) \
+)
 endef
 
+# Returns list of file paths of the intermediate objs
+#
 # Args:
-#   $(1): list of lib names without '.so' suffix (e.g., libX.vendor)
-#   $(2): if not empty, evaluates for TARGET_2ND_ARCH
+#   $(1): list of module and filename pairs (e.g., ld.config.txt:ld.config.27.txt ...)
+#   $(2): target class (e.g., SHARED_LIBRARIES, STATIC_LIBRARIES, ETC)
+#   $(3): if not empty, evaluates for TARGET_2ND_ARCH
 define paths-of-intermediates
-  $(strip \
-    $(foreach lib,$(1), \
-      $(call append-path,$(call intermediates-dir-for,SHARED_LIBRARIES,$(lib),,,$(2)),$(lib).so)))
+$(strip \
+  $(foreach pair,$(1), \
+    $(eval split_pair := $(subst :,$(space),$(pair))) \
+    $(eval module := $(word 1,$(split_pair))) \
+    $(eval filename := $(word 2,$(split_pair))) \
+    $(eval dir := $(call intermediates-dir-for,$(2),$(module),,,$(3))) \
+    $(call append-path,$(dir),$(filename)) \
+  ) \
+)
 endef
 
-vndk_core_libs := $(addsuffix .vendor,$(filter-out libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
-vndk_sp_libs := $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
-vndk_snapshot_dependencies := \
-  $(vndk_core_libs) \
-  $(vndk_sp_libs)
+# Returns paths of notice files under $(TARGET_OUT_NOTICE_FILES)
+#
+# Args:
+#   $(1): list of lib names (e.g., libfoo.vendor)
+#   $(2): vndk lib type, one of 'vndk' or 'vndk-sp'
+define paths-of-notice-files
+$(strip \
+  $(eval lib_dir := lib$(if $(TARGET_IS_64BIT),64,)) \
+  $(eval vndk_dir := $(2)-$(PLATFORM_VNDK_VERSION)) \
+  $(foreach lib,$(1), \
+    $(eval notice_file_name := $(patsubst %.vendor,%.so.txt,$(lib))) \
+    $(TARGET_OUT_NOTICE_FILES)/src/system/$(lib_dir)/$(vndk_dir)/$(notice_file_name) \
+  ) \
+)
+endef
 
 # If in the future libclang_rt.ubsan* is removed from the VNDK-core list,
 # need to update the related logic in this file.
 ifeq (,$(filter libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
-  $(error libclang_rt.ubsan* is no longer a VNDK-core library.)
+  $(warning libclang_rt.ubsan* is no longer a VNDK-core library. Please update this file.)
+  vndk_core_libs := $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES))
+else
+  vndk_core_libs := $(addsuffix .vendor,$(filter-out libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
+
+  vndk_core_libs += $(call clang-ubsan-vndk-core)
+  ifdef TARGET_2ND_ARCH
+    vndk_core_libs += $(call clang-ubsan-vndk-core,true)
+  endif
 endif
 
-# for TARGET_ARCH
-clang_ubsan_vndk_core_$(TARGET_ARCH) := $(call clang-ubsan-vndk-core)
-vndk_snapshot_dependencies += \
-  $(clang_ubsan_vndk_core_$(TARGET_ARCH))
+vndk_sp_libs := $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
+vndk_private_libs := $(addsuffix .vendor,$(VNDK_PRIVATE_LIBRARIES))
 
-ifdef TARGET_2ND_ARCH
-clang_ubsan_vndk_core_$(TARGET_2ND_ARCH) := $(call clang-ubsan-vndk-core,true)
-vndk_snapshot_dependencies += \
-  $(clang_ubsan_vndk_core_$(TARGET_2ND_ARCH))
-endif
+vndk_snapshot_libs := \
+  $(vndk_core_libs) \
+  $(vndk_sp_libs)
 
+vndk_prebuilt_txts := \
+  ld.config.txt \
+  vndksp.libraries.txt \
+  llndk.libraries.txt
+
+vndk_snapshot_top := $(call intermediates-dir-for,PACKAGING,vndk-snapshot)
+vndk_snapshot_out := $(vndk_snapshot_top)/vndk-snapshot
+vndk_snapshot_configs_out := $(vndk_snapshot_top)/configs
+
+#######################################
+# vndkcore.libraries.txt
+vndkcore.libraries.txt := $(vndk_snapshot_configs_out)/vndkcore.libraries.txt
+$(vndkcore.libraries.txt): $(vndk_core_libs)
+	@echo 'Generating: $@'
+	@rm -f $@
+	@mkdir -p $(dir $@)
+	$(hide) echo -n > $@
+	$(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
+
+
+#######################################
+# vndkprivate.libraries.txt
+vndkprivate.libraries.txt := $(vndk_snapshot_configs_out)/vndkprivate.libraries.txt
+$(vndkprivate.libraries.txt): $(vndk_private_libs)
+	@echo 'Generating: $@'
+	@rm -f $@
+	@mkdir -p $(dir $@)
+	$(hide) echo -n > $@
+	$(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so >> $@;)
+
+
+#######################################
+# module_paths.txt
+module_paths.txt := $(vndk_snapshot_configs_out)/module_paths.txt
+$(module_paths.txt): $(vndk_snapshot_libs)
+	@echo 'Generating: $@'
+	@rm -f $@
+	@mkdir -p $(dir $@)
+	$(hide) echo -n > $@
+	$(hide) $(foreach lib,$^,echo $(patsubst %.vendor,%,$(lib)).so $(ALL_MODULES.$(lib).PATH) >> $@;)
+
+
+vndk_snapshot_configs := \
+  $(vndkcore.libraries.txt) \
+  $(vndkprivate.libraries.txt) \
+  $(module_paths.txt)
+
+#######################################
+# vndk_snapshot_zip
+vndk_snapshot_variant := $(vndk_snapshot_out)/$(TARGET_ARCH)
+vndk_lib_dir := $(vndk_snapshot_variant)/arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)
+vndk_lib_dir_2nd := $(vndk_snapshot_variant)/arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)
 vndk_snapshot_zip := $(PRODUCT_OUT)/android-vndk-$(TARGET_ARCH).zip
-vndk_snapshot_out := $(call intermediates-dir-for,PACKAGING,vndk-snapshot)
+
 $(vndk_snapshot_zip): PRIVATE_VNDK_SNAPSHOT_OUT := $(vndk_snapshot_out)
 
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_$(TARGET_ARCH) := \
-  $(vndk_snapshot_out)/arch-$(TARGET_ARCH)/shared/vndk-core
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_$(TARGET_ARCH) := \
-  $(call paths-of-intermediates,$(vndk_core_libs) $(clang_ubsan_vndk_core_$(TARGET_ARCH)))
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_$(TARGET_ARCH) := \
-  $(vndk_snapshot_out)/arch-$(TARGET_ARCH)/shared/vndk-sp
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_$(TARGET_ARCH) := \
-  $(call paths-of-intermediates,$(vndk_sp_libs))
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_lib_dir)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
+
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_lib_dir)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
+
+$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_variant)/configs
+$(vndk_snapshot_zip): PRIVATE_CONFIGS_INTERMEDIATES := \
+  $(call paths-of-intermediates,$(foreach txt,$(vndk_prebuilt_txts), \
+    $(txt):$(patsubst %.txt,%.$(PLATFORM_VNDK_VERSION).txt,$(txt))),ETC) \
+  $(vndk_snapshot_configs)
+
+$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_variant)/NOTICE_FILES
+$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_INTERMEDIATES := \
+  $(call paths-of-notice-files,$(vndk_core_libs),vndk) \
+  $(call paths-of-notice-files,$(vndk_sp_libs),vndk-sp)
 
 ifdef TARGET_2ND_ARCH
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_$(TARGET_2ND_ARCH) := \
-  $(vndk_snapshot_out)/arch-$(TARGET_2ND_ARCH)/shared/vndk-core
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_$(TARGET_2ND_ARCH) := \
-  $(call paths-of-intermediates,$(vndk_core_libs) $(clang_ubsan_vndk_core_$(TARGET_2ND_ARCH)),true)
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_$(TARGET_2ND_ARCH) := \
-  $(vndk_snapshot_out)/arch-$(TARGET_2ND_ARCH)/shared/vndk-sp
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_$(TARGET_2ND_ARCH) := \
-  $(call paths-of-intermediates,$(vndk_sp_libs),true)
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
 endif
 
 # Args
 #   $(1): destination directory
-#   $(2): list of libs to copy
+#   $(2): list of files to copy
 $(vndk_snapshot_zip): private-copy-vndk-intermediates = \
-	@mkdir -p $(1); \
-	$(foreach lib,$(2),cp -p $(lib) $(call append-path,$(1),$(subst .vendor,,$(notdir $(lib))));)
+  $(if $(2),$(strip \
+    @mkdir -p $(1); \
+    $(foreach file,$(2), \
+      if [ -e $(file) ]; then \
+        cp -p $(file) $(call append-path,$(1),$(subst .vendor,,$(notdir $(file)))); \
+      fi; \
+    ) \
+  ))
+
+vndk_snapshot_dependencies := \
+  $(vndk_snapshot_libs) \
+  $(vndk_prebuilt_txts) \
+  $(vndk_snapshot_configs)
 
 $(vndk_snapshot_zip): $(vndk_snapshot_dependencies) $(SOONG_ZIP)
 	@echo 'Generating VNDK snapshot: $@'
@@ -97,28 +200,60 @@
 	@rm -rf $(PRIVATE_VNDK_SNAPSHOT_OUT)
 	@mkdir -p $(PRIVATE_VNDK_SNAPSHOT_OUT)
 	$(call private-copy-vndk-intermediates, \
-		$(PRIVATE_VNDK_CORE_OUT_$(TARGET_ARCH)),$(PRIVATE_VNDK_CORE_INTERMEDIATES_$(TARGET_ARCH)))
+		$(PRIVATE_VNDK_CORE_OUT),$(PRIVATE_VNDK_CORE_INTERMEDIATES))
 	$(call private-copy-vndk-intermediates, \
-		$(PRIVATE_VNDK_SP_OUT_$(TARGET_ARCH)),$(PRIVATE_VNDK_SP_INTERMEDIATES_$(TARGET_ARCH)))
+		$(PRIVATE_VNDK_SP_OUT),$(PRIVATE_VNDK_SP_INTERMEDIATES))
+	$(call private-copy-vndk-intermediates, \
+		$(PRIVATE_CONFIGS_OUT),$(PRIVATE_CONFIGS_INTERMEDIATES))
+	$(call private-copy-vndk-intermediates, \
+		$(PRIVATE_NOTICE_FILES_OUT),$(PRIVATE_NOTICE_FILES_INTERMEDIATES))
 ifdef TARGET_2ND_ARCH
 	$(call private-copy-vndk-intermediates, \
-		$(PRIVATE_VNDK_CORE_OUT_$(TARGET_2ND_ARCH)),$(PRIVATE_VNDK_CORE_INTERMEDIATES_$(TARGET_2ND_ARCH)))
+		$(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
 	$(call private-copy-vndk-intermediates, \
-		$(PRIVATE_VNDK_SP_OUT_$(TARGET_2ND_ARCH)),$(PRIVATE_VNDK_SP_INTERMEDIATES_$(TARGET_2ND_ARCH)))
+		$(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
 endif
-	$(hide) $(SOONG_ZIP) -o $@ -P vndk-snapshot -C $(PRIVATE_VNDK_SNAPSHOT_OUT) \
-	-D $(PRIVATE_VNDK_SNAPSHOT_OUT)
+	$(hide) $(SOONG_ZIP) -o $@ -C $(PRIVATE_VNDK_SNAPSHOT_OUT) -D $(PRIVATE_VNDK_SNAPSHOT_OUT)
 
 .PHONY: vndk
 vndk: $(vndk_snapshot_zip)
 
 $(call dist-for-goals, vndk, $(vndk_snapshot_zip))
 
+# clear global vars
+clang-ubsan-vndk-core :=
+paths-of-intermediates :=
+paths-of-notice-files :=
+vndk_core_libs :=
+vndk_sp_libs :=
+vndk_snapshot_libs :=
+vndk_prebuilt_txts :=
+vndk_snapshot_configs :=
+vndk_snapshot_top :=
+vndk_snapshot_out :=
+vndk_snapshot_configs_out :=
+vndk_snapshot_variant :=
+vndk_lib_dir :=
+vndk_lib_dir_2nd :=
+vndk_snapshot_dependencies :=
+
+else # BOARD_VNDK_RUNTIME_DISABLE is set to 'true'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'."
+endif # BOARD_VNDK_RUNTIME_DISABLE
+
+else # PLATFORM_VNDK_VERSION is NOT set
+error_msg := "CANNOT generate VNDK snapshot. PLATFORM_VNDK_VERSION must be set."
+endif # PLATFORM_VNDK_VERSION
+
 else # BOARD_VNDK_VERSION is NOT set to 'current'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'."
+endif # BOARD_VNDK_VERSION
+
+ifneq (,$(error_msg))
 
 .PHONY: vndk
 vndk:
-	$(call echo-error,$(current_makefile),CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'.)
+	$(call echo-error,$(current_makefile),$(error_msg))
 	exit 1
 
-endif # BOARD_VNDK_VERSION
+endif
diff --git a/core/test_config_common.mk b/core/test_config_common.mk
new file mode 100644
index 0000000..487f9f2
--- /dev/null
+++ b/core/test_config_common.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_MODULE_CLASS := TEST_CONFIG
+
+# Output test config files to testcases directory.
+ifeq (,$(filter general-tests, $(LOCAL_COMPATIBILITY_SUITE)))
+  LOCAL_COMPATIBILITY_SUITE += general-tests
+endif
+
+LOCAL_MODULE_SUFFIX := .config
+
+my_test_config_file := $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE).xml)
+LOCAL_SRC_FILES :=
+
+include $(BUILD_SYSTEM)/base_rules.mk
+
+# The test config is not in a standalone XML file.
+ifndef my_test_config_file
+
+ifndef LOCAL_TEST_CONFIG_OPTIONS
+  $(call pretty-error,LOCAL_TEST_CONFIG_OPTIONS must be set if the test XML file is not provided.)
+endif
+
+my_base_test_config_file := $(LOCAL_PATH)/AndroidTest.xml
+my_test_config_file := $(dir $(LOCAL_BUILT_MODULE))AndroidTest.xml
+
+$(my_test_config_file) : PRIVATE_test_config_options := $(LOCAL_TEST_CONFIG_OPTIONS)
+$(my_test_config_file) : $(my_base_test_config_file)
+	@echo "Create $(notdir $@) with options: $(PRIVATE_test_config_options)."
+	$(eval _option_xml := \
+		$(foreach option,$(PRIVATE_test_config_options), \
+			$(eval p := $(subst :,$(space),$(option))) \
+			<option name="$(word 1,$(p))" value="$(word 2,$(p))" \/>\n))
+	$(hide) sed 's&</configuration>&$(_option_xml)</configuration>&' $< > $@
+
+endif # my_test_config_file
+
+$(LOCAL_BUILT_MODULE) : $(my_test_config_file)
+	$(copy-file-to-target)
+
+my_base_test_config_file :=
+my_test_config_file :=
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index abf3067..e83d6fa 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -24,8 +24,9 @@
 #     DEFAULT_APP_TARGET_SDK
 #     BUILD_ID
 #     BUILD_NUMBER
-#     BUILD_DATETIME
 #     PLATFORM_SECURITY_PATCH
+#     PLATFORM_VNDK_VERSION
+#     PLATFORM_SYSTEMSDK_VERSIONS
 #
 
 # Look for an optional file containing overrides of the defaults,
@@ -49,12 +50,6 @@
 
 ifndef TARGET_PLATFORM_VERSION
   TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
-else ifeq ($(TARGET_PLATFORM_VERSION),OPR1)
-  # HACK: lunch currently sets TARGET_PLATFORM_VERSION to
-  # DEFAULT_PLATFORM_VERSION, which causes unnecessary pain
-  # when the old DEFAULT_PLATFORM_VERSION becomes invalid.
-  # For now, silently upgrade OPR1 to the current default.
-  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
 endif
 
 ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
@@ -77,7 +72,8 @@
 # frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java
 
 # When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
-# please add that PLATFORM_VERSION to the following text file:
+# please add that PLATFORM_VERSION as well as clean up obsolete PLATFORM_VERSION's
+# in the following text file:
 # cts/tests/tests/os/assets/platform_versions.txt
 PLATFORM_VERSION.PPR1 := P
 
@@ -110,7 +106,7 @@
   # When you increment the PLATFORM_SDK_VERSION please ensure you also
   # clear out the following text file of all older PLATFORM_VERSION's:
   # cts/tests/tests/os/assets/platform_versions.txt
-  PLATFORM_SDK_VERSION := 26
+  PLATFORM_SDK_VERSION := 27
 endif
 
 ifndef PLATFORM_VERSION_CODENAME
@@ -171,7 +167,7 @@
     # assuming the device can only support APIs as of the previous official
     # public release.
     # This value will always be 0 for release builds.
-    PLATFORM_PREVIEW_SDK_VERSION := 1
+    PLATFORM_PREVIEW_SDK_VERSION := 0
   endif
 endif
 
@@ -188,13 +184,57 @@
   endif
 endif
 
+ifndef PLATFORM_VNDK_VERSION
+  # This is the definition of the VNDK version for the current VNDK libraries.
+  # The version is only available when PLATFORM_VERSION_CODENAME == REL.
+  # Otherwise, it will be set to a CODENAME version. The ABI is allowed to be
+  # changed only before the Android version is released. Once
+  # PLATFORM_VNDK_VERSION is set to actual version, the ABI for this version
+  # will be frozon and emit build errors if any ABI for the VNDK libs are
+  # changed.
+  # After that the snapshot of the VNDK with this version will be generated.
+  #
+  # The VNDK version follows PLATFORM_SDK_VERSION.
+  ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+    PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+  else
+    PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
+  endif
+endif
+
+ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
+  # This is the oldest version of system SDK that the platform supports. Contrary
+  # to the public SDK where platform essentially supports all previous SDK versions,
+  # platform supports only a few number of recent system SDK versions as some of
+  # old system APIs are gradually deprecated, removed and then deleted.
+  # However, currently in P, we only support the single latest version since there
+  # is no old system SDK versions. Therefore, this is set to empty for now. This
+  # should later (in post P) be set to a number, like 28.
+  PLATFORM_SYSTEMSDK_MIN_VERSION :=
+endif
+
+# This is the list of system SDK versions that the current platform supports.
+PLATFORM_SYSTEMSDK_VERSIONS :=
+ifneq (,$(PLATFORM_SYSTEMSDK_MIN_VERSION))
+  $(if $(call math_is_number,$(PLATFORM_SYSTEMSDK_MIN_VERSION)),,\
+    $(error PLATFORM_SYSTEMSDK_MIN_VERSION must be a number, but was $(PLATFORM_SYSTEMSDK_MIN_VERSION)))
+  PLATFORM_SYSTEMSDK_VERSIONS := $(call int_range_list,$(PLATFORM_SYSTEMSDK_MIN_VERSION),$(PLATFORM_SDK_VERSION))
+endif
+# Platform always supports the current version
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
+else
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_VERSION_CODENAME)
+endif
+PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
+
 ifndef PLATFORM_SECURITY_PATCH
     #  Used to indicate the security patch that has been applied to the device.
     #  It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2017-10-05
+      PLATFORM_SECURITY_PATCH := 2017-12-01
 endif
 
 ifndef PLATFORM_BASE_OS
@@ -226,6 +266,12 @@
 DATE := date -d @$(BUILD_DATETIME)
 endif
 
+# Everything should be using BUILD_DATETIME_FROM_FILE instead.
+# BUILD_DATETIME and DATE can be removed once BUILD_NUMBER moves
+# to soong_ui.
+BUILD_DATETIME :=
+
+HAS_BUILD_NUMBER := true
 ifndef BUILD_NUMBER
   # BUILD_NUMBER should be set to the source control value that
   # represents the current state of the source code.  E.g., a
@@ -237,4 +283,5 @@
   # from this date/time" value.  Make it start with a non-digit so that
   # anyone trying to parse it as an integer will probably get "0".
   BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+  HAS_BUILD_NUMBER := false
 endif
diff --git a/envsetup.sh b/envsetup.sh
index 9a616ef..cf61950 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
              Selects <product_name> as the product to build, and <build_variant> as the variant to
              build, and stores those selections in the environment to be read by subsequent
              invocations of 'm' etc.
-- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 - croot:     Changes directory to the top of the tree.
 - m:         Makes from the top of the tree.
 - mm:        Builds all of the modules in the current directory, but not their dependencies.
@@ -51,7 +51,7 @@
     cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     # Call the build system to dump the "<val>=<value>" pairs as a shell script.
-    build_dicts_script=`\cd $T; build/soong/soong_ui.bash --dumpvars-mode \
+    build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
                         --vars="$cached_vars" \
                         --abs-vars="$cached_abs_vars" \
                         --var-prefix=var_cache_ \
@@ -318,7 +318,12 @@
 
 function settitle()
 {
-    if [ "$STAY_OFF_MY_LAWN" = "" ]; then
+    # This used to be opt-out with STAY_OFF_MY_LAWN, but this breaks folks
+    # actually using PROMPT_COMMAND (https://issuetracker.google.com/38402256),
+    # and the attempt to set the title doesn't do anything for the default
+    # window manager in debian right now, so switch it to opt-in for anyone
+    # who actually wants this.
+    if [ "$ANDROID_BUILD_SET_WINDOW_TITLE" = "true" ]; then
         local arch=$(gettargetarch)
         local product=$TARGET_PRODUCT
         local variant=$TARGET_BUILD_VARIANT
@@ -656,10 +661,10 @@
 function tapas()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/tapasHelp.sh
@@ -683,7 +688,6 @@
     case $arch in
       x86)    product=aosp_x86;;
       mips)   product=aosp_mips;;
-      armv5)  product=generic_armv5;;
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
       mips64)  product=aosp_mips64;;
@@ -737,33 +741,11 @@
     fi
 }
 
-# Return driver for "make", if any (eg. static analyzer)
-function getdriver()
-{
-    local T="$1"
-    test "$WITH_STATIC_ANALYZER" = "0" && unset WITH_STATIC_ANALYZER
-    if [ -n "$WITH_STATIC_ANALYZER" ]; then
-        # Use scan-build to collect all static analyzer reports into directory
-        # /tmp/scan-build-yyyy-mm-dd-hhmmss-*
-        # The clang compiler passed by --use-analyzer here is not important.
-        # build/make/core/binary.mk will set CLANG_CXX and CLANG before calling
-        # c++-analyzer and ccc-analyzer.
-        local CLANG_VERSION=$(get_build_var LLVM_PREBUILTS_VERSION)
-        local BUILD_OS=$(get_build_var BUILD_OS)
-        local CLANG_DIR="$T/prebuilts/clang/host/${BUILD_OS}-x86/${CLANG_VERSION}"
-        echo "\
-${CLANG_DIR}/tools/scan-build/bin/scan-build \
---use-analyzer ${CLANG_DIR}/bin/clang \
---status-bugs"
-    fi
-}
-
 function m()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     if [ "$T" ]; then
-        _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return 1
@@ -790,11 +772,10 @@
 function mm()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     # If we're sitting in the root of the build tree, just do a
     # normal build.
     if [ -f build/soong/soong_ui.bash ]; then
-        _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
     else
         # Find the closest Android.mk file.
         local M=$(findmakefile)
@@ -829,7 +810,7 @@
             if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
               MODULES=tidy_only
             fi
-            ONE_SHOT_MAKEFILE=$M _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $MODULES $ARGS
+            ONE_SHOT_MAKEFILE=$M _wrap_build $T/build/soong/soong_ui.bash --make-mode $MODULES $ARGS
         fi
     fi
 }
@@ -837,7 +818,6 @@
 function mmm()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     if [ "$T" ]; then
         local MAKEFILE=
         local MODULES=
@@ -897,7 +877,7 @@
         fi
         # Convert "/" to "-".
         MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-        ONE_SHOT_MAKEFILE="$MAKEFILE" _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $MODULES $MODULES_IN_PATHS $ARGS
+        ONE_SHOT_MAKEFILE="$MAKEFILE" _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $MODULES $MODULES_IN_PATHS $ARGS
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return 1
@@ -907,9 +887,8 @@
 function mma()
 {
   local T=$(gettop)
-  local DRV=$(getdriver $T)
   if [ -f build/soong/soong_ui.bash ]; then
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
   else
     if [ ! "$T" ]; then
       echo "Couldn't locate the top of the tree.  Try setting TOP."
@@ -921,14 +900,13 @@
     local MODULES_IN_PATHS=MODULES-IN-$(dirname ${M})
     # Convert "/" to "-".
     MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@ $MODULES_IN_PATHS
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@ $MODULES_IN_PATHS
   fi
 }
 
 function mmma()
 {
   local T=$(gettop)
-  local DRV=$(getdriver $T)
   if [ "$T" ]; then
     local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/')
     local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/')
@@ -959,7 +937,7 @@
     done
     # Convert "/" to "-".
     MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $ARGS $MODULES_IN_PATHS
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $ARGS $MODULES_IN_PATHS
   else
     echo "Couldn't locate the top of the tree.  Try setting TOP."
     return 1
diff --git a/help.sh b/help.sh
index 3f39c77..c143542 100755
--- a/help.sh
+++ b/help.sh
@@ -38,6 +38,8 @@
                             Stands for "System, NO Dependencies"
     vnod                    Quickly rebuild the vendor image from built packages
                             Stands for "Vendor, NO Dependencies"
+    pnod                    Quickly rebuild the product image from built packages
+                            Stands for "Product, NO Dependencies"
 
 
 So, for example, you could run:
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 058ac1d..38b3e34 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
 cd ../..
 TOP="${PWD}"
 
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 
 tapas selects individual apps to be built by the Android build system. Unlike
 "lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 4b5af3c..7fe45eb 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -20,7 +20,7 @@
 board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
 endif
 $(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt)
-	$(hide) build/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
+	$(hide) build/make/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
 	$(call pretty,"Generated: ($@)")
 ifdef board_info_txt
 	$(hide) grep -v '#' $< > $@
@@ -34,14 +34,17 @@
 ifdef DEVICE_MANIFEST_FILE
 # $(DEVICE_MANIFEST_FILE) can be a list of files
 include $(CLEAR_VARS)
-LOCAL_MODULE        := manifest.xml
+LOCAL_MODULE        := device_manifest.xml
+LOCAL_MODULE_STEM   := manifest.xml
 LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
 
 GEN := $(local-generated-sources-dir)/manifest.xml
 $(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
 $(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
 	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
+	PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+	PRODUCT_SHIPPING_API_LEVEL=$(PRODUCT_SHIPPING_API_LEVEL) \
 	$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
 		-i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE))
 
@@ -49,102 +52,3 @@
 include $(BUILD_PREBUILT)
 BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
 endif
-
-# Device Compatibility Matrix
-ifdef DEVICE_MATRIX_FILE
-include $(CLEAR_VARS)
-LOCAL_MODULE        := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-$(GEN): $(DEVICE_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37342627): put BOARD_VNDK_VERSION & BOARD_VNDK_LIBRARIES into device matrix.
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_VENDOR_MATRIX := $(LOCAL_BUILT_MODULE)
-endif
-
-# Framework Manifest
-include $(CLEAR_VARS)
-LOCAL_MODULE        := system_manifest.xml
-LOCAL_MODULE_STEM   := manifest.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/manifest.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
-ifdef BUILT_VENDOR_MATRIX
-$(GEN): $(BUILT_VENDOR_MATRIX)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MATRIX)"
-endif
-endif
-
-$(GEN): $(FRAMEWORK_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@ $(PRIVATE_FLAGS)
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_MANIFEST := $(LOCAL_BUILT_MODULE)
-
-# Framework Compatibility Matrix
-include $(CLEAR_VARS)
-LOCAL_MODULE        := system_compatibility_matrix.xml
-LOCAL_MODULE_STEM   := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
-ifdef BUILT_VENDOR_MANIFEST
-$(GEN): $(BUILT_VENDOR_MANIFEST)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MANIFEST)"
-endif
-endif
-
-ifeq (true,$(BOARD_AVB_ENABLE))
-$(GEN): $(AVBTOOL)
-# INTERNAL_AVB_SYSTEM_SIGNING_ARGS consists of BOARD_AVB_SYSTEM_KEY_PATH and
-# BOARD_AVB_SYSTEM_ALGORITHM. We should add the dependency of key path, which
-# is a file, here.
-$(GEN): $(BOARD_AVB_SYSTEM_KEY_PATH)
-# Use deferred assignment (=) instead of immediate assignment (:=).
-# Otherwise, cannot get INTERNAL_AVB_SYSTEM_SIGNING_ARGS.
-FRAMEWORK_VBMETA_VERSION = $$("$(AVBTOOL)" add_hashtree_footer \
-                              --print_required_libavb_version \
-                              $(INTERNAL_AVB_SYSTEM_SIGNING_ARGS) \
-                              $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
-else
-FRAMEWORK_VBMETA_VERSION := 0.0
-endif
-
-# All kernel versions that the system image works with.
-KERNEL_VERSIONS := 3.18 4.4 4.9
-KERNEL_CONFIG_DATA := kernel/configs
-
-$(GEN): $(foreach version,$(KERNEL_VERSIONS),\
-	$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg))
-$(GEN): PRIVATE_FLAGS += $(foreach version,$(KERNEL_VERSIONS),\
-	--kernel=$(version):$(call normalize-path-list,\
-		$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg)))
-
-KERNEL_VERSIONS :=
-KERNEL_CONFIG_DATA :=
-
-$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37405869) (b/37715375) inject avb versions as well for devices that have avb enabled.
-	POLICYVERS=$(POLICYVERS) \
-		BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
-		FRAMEWORK_VBMETA_VERSION=$(FRAMEWORK_VBMETA_VERSION) \
-		$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@ $(PRIVATE_FLAGS)
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_COMPATIBILITY_MATRIX := $(LOCAL_BUILT_MODULE)
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 331f082..70c78a8 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -25,6 +25,7 @@
 TARGET_CPU_ABI2 := armeabi
 HAVE_HTC_AUDIO_DRIVER := true
 BOARD_USES_GENERIC_AUDIO := true
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
 
 # no hardware camera
 USE_CAMERA_STUB := true
@@ -57,11 +58,15 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
 
 BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
 BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+
+BOARD_VNDK_VERSION := current
\ No newline at end of file
diff --git a/target/board/generic/sepolicy/OWNERS b/target/board/generic/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic/sepolicy/OWNERS
+++ b/target/board/generic/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
 jeffv@google.com
-klyubin@google.com
 dcashman@google.com
 jbires@google.com
 sspatil@google.com
diff --git a/target/board/generic/sepolicy/bootanim.te b/target/board/generic/sepolicy/bootanim.te
index 4be1c8a..e4f7c73 100644
--- a/target/board/generic/sepolicy/bootanim.te
+++ b/target/board/generic/sepolicy/bootanim.te
@@ -2,4 +2,7 @@
 allow bootanim ashmem_device:chr_file execute;
 #TODO: This can safely be ignored until b/62954877 is fixed
 dontaudit bootanim system_data_file:dir read;
+
+allow bootanim graphics_device:chr_file { read ioctl open };
+
 set_prop(bootanim, qemu_prop)
diff --git a/target/board/generic/sepolicy/file.te b/target/board/generic/sepolicy/file.te
deleted file mode 100644
index f4ae9e4..0000000
--- a/target/board/generic/sepolicy/file.te
+++ /dev/null
@@ -1 +0,0 @@
-type sysfs_writable, fs_type, sysfs_type, mlstrustedobject;
diff --git a/target/board/generic/sepolicy/file_contexts b/target/board/generic/sepolicy/file_contexts
index f550f4d..521c65e 100644
--- a/target/board/generic/sepolicy/file_contexts
+++ b/target/board/generic/sepolicy/file_contexts
@@ -15,7 +15,6 @@
 /dev/qemu_.*                 u:object_r:qemu_device:s0
 /dev/ttyGF[0-9]*             u:object_r:serial_device:s0
 /dev/ttyS2                   u:object_r:console_device:s0
-/sys/qemu_trace(/.*)?        u:object_r:sysfs_writable:s0
 /vendor/bin/init\.ranchu-core\.sh u:object_r:goldfish_setup_exec:s0
 /vendor/bin/init\.ranchu-net\.sh u:object_r:goldfish_setup_exec:s0
 /vendor/bin/qemu-props       u:object_r:qemu_props_exec:s0
@@ -23,6 +22,7 @@
 /vendor/bin/hw/android\.hardware\.drm@1\.0-service\.widevine          u:object_r:hal_drm_widevine_exec:s0
 
 /vendor/lib(64)?/hw/gralloc\.ranchu\.so   u:object_r:same_process_hal_file:s0
+/vendor/lib(64)?/hw/gralloc\.goldfish\.default\.so   u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/libEGL_emulation\.so          u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/libGLESv1_CM_emulation\.so    u:object_r:same_process_hal_file:s0
 /vendor/lib(64)?/libGLESv2_emulation\.so       u:object_r:same_process_hal_file:s0
diff --git a/target/board/generic/sepolicy/genfs_contexts b/target/board/generic/sepolicy/genfs_contexts
index bdcead1..91cedf1 100644
--- a/target/board/generic/sepolicy/genfs_contexts
+++ b/target/board/generic/sepolicy/genfs_contexts
@@ -2,3 +2,16 @@
 # /sys/bus/platform/devices/ANDR0001:00/properties/android/ which is a symlink to
 # /sys/devices/platform/ANDR0001:00/properties/android/
 genfscon sysfs /devices/platform/ANDR0001:00/properties/android u:object_r:sysfs_dt_firmware_android:s0
+
+# We expect /sys/class/power_supply/* and everything it links to to be labeled
+# as sysfs_batteryinfo.
+genfscon sysfs /devices/platform/GFSH0001:00/power_supply u:object_r:sysfs_batteryinfo:s0
+
+# /sys/class/rtc
+genfscon sysfs /devices/pnp0/00:00/rtc u:object_r:sysfs_rtc:s0
+genfscon sysfs /devices/platform/GFSH0007:00/rtc u:object_r:sysfs_rtc:s0
+
+# /sys/class/net
+genfscon sysfs /devices/pci0000:00/0000:00:08.0/virtio5/net u:object_r:sysfs_net:s0
+genfscon sysfs /devices/virtual/mac80211_hwsim/hwsim0/net u:object_r:sysfs_net:s0
+genfscon sysfs /devices/virtual/mac80211_hwsim/hwsim1/net u:object_r:sysfs_net:s0
diff --git a/target/board/generic/sepolicy/goldfish_setup.te b/target/board/generic/sepolicy/goldfish_setup.te
index bcd49bd..eb913e9 100644
--- a/target/board/generic/sepolicy/goldfish_setup.te
+++ b/target/board/generic/sepolicy/goldfish_setup.te
@@ -4,6 +4,7 @@
 
 init_daemon_domain(goldfish_setup)
 
+set_prop(goldfish_setup, debug_prop);
 allow goldfish_setup self:capability { net_admin net_raw };
 allow goldfish_setup self:udp_socket { create ioctl };
 allow goldfish_setup vendor_toolbox_exec:file execute_no_trans;
diff --git a/target/board/generic/sepolicy/hal_cas_default.te b/target/board/generic/sepolicy/hal_cas_default.te
new file mode 100644
index 0000000..3ed3bee
--- /dev/null
+++ b/target/board/generic/sepolicy/hal_cas_default.te
@@ -0,0 +1 @@
+vndbinder_use(hal_cas_default);
diff --git a/target/board/generic/sepolicy/hal_fingerprint_default.te b/target/board/generic/sepolicy/hal_fingerprint_default.te
new file mode 100644
index 0000000..e5b06f1
--- /dev/null
+++ b/target/board/generic/sepolicy/hal_fingerprint_default.te
@@ -0,0 +1,5 @@
+# TODO(b/36644492): Remove data_between_core_and_vendor_violators once
+# hal_fingerprint no longer directly accesses fingerprintd_data_file.
+typeattribute hal_fingerprint_default data_between_core_and_vendor_violators;
+allow hal_fingerprint_default fingerprintd_data_file:file create_file_perms;
+allow hal_fingerprint_default fingerprintd_data_file:dir rw_dir_perms;
diff --git a/target/board/generic/sepolicy/hal_graphics_allocator_default.te b/target/board/generic/sepolicy/hal_graphics_allocator_default.te
new file mode 100644
index 0000000..0c8e27d
--- /dev/null
+++ b/target/board/generic/sepolicy/hal_graphics_allocator_default.te
@@ -0,0 +1,2 @@
+allow hal_graphics_allocator_default graphics_device:dir search;
+allow hal_graphics_allocator_default graphics_device:chr_file { ioctl open read write };
diff --git a/target/board/generic/sepolicy/healthd.te b/target/board/generic/sepolicy/healthd.te
new file mode 100644
index 0000000..ced6704
--- /dev/null
+++ b/target/board/generic/sepolicy/healthd.te
@@ -0,0 +1,2 @@
+# Allow to read /sys/class/power_supply directory
+allow healthd sysfs:dir r_dir_perms;
diff --git a/target/board/generic/sepolicy/property.te b/target/board/generic/sepolicy/property.te
index a486702..56e02ef 100644
--- a/target/board/generic/sepolicy/property.te
+++ b/target/board/generic/sepolicy/property.te
@@ -1,4 +1,3 @@
 type qemu_prop, property_type;
 type qemu_cmdline, property_type;
 type radio_noril_prop, property_type;
-type opengles_prop, property_type;
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index c66a85f..3a61b6b 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -3,4 +3,3 @@
 ro.emu.                 u:object_r:qemu_prop:s0
 ro.emulator.            u:object_r:qemu_prop:s0
 ro.radio.noril          u:object_r:radio_noril_prop:s0
-ro.opengles.            u:object_r:opengles_prop:s0
diff --git a/target/board/generic/sepolicy/system_server.te b/target/board/generic/sepolicy/system_server.te
index 9063095..dd70b12 100644
--- a/target/board/generic/sepolicy/system_server.te
+++ b/target/board/generic/sepolicy/system_server.te
@@ -1,2 +1 @@
-get_prop(system_server, opengles_prop)
 get_prop(system_server, radio_noril_prop)
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 9beb9a3..be8ea39 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -20,6 +20,7 @@
 TARGET_ARCH_VARIANT := armv8-a
 TARGET_CPU_VARIANT := generic
 TARGET_CPU_ABI := arm64-v8a
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
 
 TARGET_2ND_ARCH := arm
 TARGET_2ND_CPU_ABI := armeabi-v7a
@@ -88,11 +89,15 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
 
 BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
 BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+
+BOARD_VNDK_VERSION := current
diff --git a/target/board/generic_arm64_a/BoardConfig.mk b/target/board/generic_arm64_a/BoardConfig.mk
index 88bd87c..34a8ac0 100644
--- a/target/board/generic_arm64_a/BoardConfig.mk
+++ b/target/board/generic_arm64_a/BoardConfig.mk
@@ -14,38 +14,7 @@
 # limitations under the License.
 #
 
-# Common boardconfig settings for generic AOSP products targetting mobile
-# (phone/table) devices.
-
-# Bootloader is not part of generic AOSP image
-TARGET_NO_BOOTLOADER := true
-
-# Kernel is also not part of generic AOSP image
-TARGET_NO_KERNEL := true
-
-# system.img is always ext4 with sparse option
-TARGET_USERIMAGES_USE_EXT4 := true
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false
-TARGET_USES_MKE2FS := true
-
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
-# Generic AOSP image always requires separate vendor.img
-BOARD_USES_VENDORIMAGE := true
-TARGET_COPY_OUT_VENDOR := vendor
-
-# Generic AOSP image does NOT support HWC1
-TARGET_USES_HWC2 := true
+include build/make/target/board/treble_common_64.mk
 
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
@@ -54,16 +23,7 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
 TARGET_2ND_CPU_VARIANT := generic
-
-TARGET_USES_64_BIT_BINDER := true
-
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736	# 1.5 GB
-
-BOARD_FLASH_BLOCK_SIZE := 512
-
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-
diff --git a/target/board/generic_arm64_a/system.prop b/target/board/generic_arm64_a/system.prop
deleted file mode 100644
index e69de29..0000000
--- a/target/board/generic_arm64_a/system.prop
+++ /dev/null
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index 0c07566..00afee6 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -14,39 +14,7 @@
 # limitations under the License.
 #
 
-# Common boardconfig settings for generic AOSP products targetting mobile
-# (phone/table) devices.
-
-# Bootloader is not part of generic AOSP image
-TARGET_NO_BOOTLOADER := true
-
-# Kernel is also not part of generic AOSP image
-TARGET_NO_KERNEL := true
-
-# system.img is always ext4 with sparse option
-TARGET_USERIMAGES_USE_EXT4 := true
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false
-TARGET_USES_MKE2FS := true
-
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
-# Generic AOSP image always requires separate vendor.img
-BOARD_USES_VENDORIMAGE := true
-TARGET_COPY_OUT_VENDOR := vendor
-
-# Generic AOSP image does NOT support HWC1
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+include build/make/target/board/treble_common_64.mk
 
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
@@ -55,30 +23,19 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
-# TODO(jiyong) can we set krait here?
-TARGET_2ND_CPU_VARIANT := cortex-a15
-
-TARGET_USES_64_BIT_BINDER := true
+TARGET_2ND_CPU_VARIANT := generic
 
 # Enable A/B update
 TARGET_NO_RECOVERY := true
 BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
 
 # TODO(jiyong) These might be SoC specific.
-BOARD_ROOT_EXTRA_FOLDERS := bt_firmware firmware firmware/radio persist
+BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
 BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
 
-# TODO(b/35603549): this is currently set to 2.5GB to support sailfish/marlin
-# Fix this!
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 2147483648
-
-BOARD_FLASH_BLOCK_SIZE := 512
-
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-
 # TODO(b/36764215): remove this setting when the generic system image
 # no longer has QCOM-specific directories under /.
 BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/generic_arm64_ab/sepolicy/OWNERS b/target/board/generic_arm64_ab/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic_arm64_ab/sepolicy/OWNERS
+++ b/target/board/generic_arm64_ab/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
 jeffv@google.com
-klyubin@google.com
 dcashman@google.com
 jbires@google.com
 sspatil@google.com
diff --git a/target/board/generic_arm64_ab/sepolicy/file.te b/target/board/generic_arm64_ab/sepolicy/file.te
index 4645533..7adfdfa 100644
--- a/target/board/generic_arm64_ab/sepolicy/file.te
+++ b/target/board/generic_arm64_ab/sepolicy/file.te
@@ -1,6 +1,5 @@
 # TODO(b/36764215): remove this file when the generic system image
 # no longer has these directories
-type bt_firmware_file, file_type;
 type persist_file, file_type;
 
 # Default type for anything under /firmware.
diff --git a/target/board/generic_arm64_ab/sepolicy/file_contexts b/target/board/generic_arm64_ab/sepolicy/file_contexts
index 92a4ff8..0a80559 100644
--- a/target/board/generic_arm64_ab/sepolicy/file_contexts
+++ b/target/board/generic_arm64_ab/sepolicy/file_contexts
@@ -2,7 +2,6 @@
 # no longer has these directories. They are specific to QCOM.
 
 # /
-/bt_firmware(/.*)?      u:object_r:bt_firmware_file:s0
 /tombstones             u:object_r:rootfs:s0
 /dsp                    u:object_r:rootfs:s0
 
diff --git a/target/board/generic_arm64_ab/system.prop b/target/board/generic_arm64_ab/system.prop
deleted file mode 100644
index e69de29..0000000
--- a/target/board/generic_arm64_ab/system.prop
+++ /dev/null
diff --git a/target/board/generic_arm_a/BoardConfig.mk b/target/board/generic_arm_a/BoardConfig.mk
index 6530e6e..d930749 100644
--- a/target/board/generic_arm_a/BoardConfig.mk
+++ b/target/board/generic_arm_a/BoardConfig.mk
@@ -14,48 +14,13 @@
 # limitations under the License.
 #
 
-# Common boardconfig settings for generic AOSP products targetting mobile
-# (phone/table) devices.
+include build/make/target/board/treble_common_32.mk
 
-# Bootloader is not part of generic AOSP image
-TARGET_NO_BOOTLOADER := true
-
-# Kernel is also not part of generic AOSP image
-TARGET_NO_KERNEL := true
-
-# system.img is always ext4 with sparse option
-TARGET_USERIMAGES_USE_EXT4 := true
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false
-TARGET_USES_MKE2FS := true
-
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
-# Generic AOSP image always requires separate vendor.img
-BOARD_USES_VENDORIMAGE := true
-TARGET_COPY_OUT_VENDOR := vendor
-
-# Generic AOSP image does NOT support HWC1
-TARGET_USES_HWC2 := true
+# Overwrite the setting in treble_common_32.mk for non-A/B arm GSI
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 805306368 # 768MB
 
 TARGET_ARCH := arm
 TARGET_ARCH_VARIANT := armv7-a-neon
 TARGET_CPU_ABI := armeabi-v7a
 TARGET_CPU_ABI2 := armeabi
 TARGET_CPU_VARIANT := generic
-
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
-
-BOARD_FLASH_BLOCK_SIZE := 512
-
-BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
-
diff --git a/target/board/generic_arm_a/system.prop b/target/board/generic_arm_a/system.prop
deleted file mode 100644
index e69de29..0000000
--- a/target/board/generic_arm_a/system.prop
+++ /dev/null
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
new file mode 100644
index 0000000..b21e907
--- /dev/null
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -0,0 +1,38 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include build/make/target/board/treble_common_32.mk
+
+# Overwrite the setting in treble_common_32.mk for non-A/B arm GSI
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 805306368 # 768MB
+
+TARGET_ARCH := arm
+TARGET_ARCH_VARIANT := armv7-a-neon
+TARGET_CPU_ABI := armeabi-v7a
+TARGET_CPU_ABI2 := armeabi
+TARGET_CPU_VARIANT := generic
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+
+# TODO(jiyong) These might be SoC specific.
+BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
+BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+
+# TODO(b/36764215): remove this setting when the generic system image
+# no longer has QCOM-specific directories under /.
+BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/generic_armv5/AndroidBoard.mk b/target/board/generic_armv5/AndroidBoard.mk
deleted file mode 100644
index 7daff27..0000000
--- a/target/board/generic_armv5/AndroidBoard.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
--include build/target/board/generic/AndroidBoard.mk
diff --git a/target/board/generic_armv5/README.txt b/target/board/generic_armv5/README.txt
deleted file mode 100644
index 25d590a..0000000
--- a/target/board/generic_armv5/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-The "generic_armv5" product defines a non-hardware-specific target
-without a kernel or bootloader.
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_armv5/system.prop b/target/board/generic_armv5/system.prop
deleted file mode 100644
index 137a0f9..0000000
--- a/target/board/generic_armv5/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/system/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/board/generic_mips/BoardConfig.mk b/target/board/generic_mips/BoardConfig.mk
index fb66d21..523408b 100644
--- a/target/board/generic_mips/BoardConfig.mk
+++ b/target/board/generic_mips/BoardConfig.mk
@@ -65,10 +65,12 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
 
 BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_mips/system.prop b/target/board/generic_mips/system.prop
index 137a0f9..973db2c 100644
--- a/target/board/generic_mips/system.prop
+++ b/target/board/generic_mips/system.prop
@@ -2,5 +2,5 @@
 # system.prop for generic sdk
 #
 
-rild.libpath=/system/lib/libreference-ril.so
+rild.libpath=/vendor/lib/libreference-ril.so
 rild.libargs=-d /dev/ttyS0
diff --git a/target/board/generic_mips64/BoardConfig.mk b/target/board/generic_mips64/BoardConfig.mk
index 67bb51f..2052d7b 100644
--- a/target/board/generic_mips64/BoardConfig.mk
+++ b/target/board/generic_mips64/BoardConfig.mk
@@ -80,8 +80,6 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
@@ -89,3 +87,7 @@
 BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
 
 DEX_PREOPT_DEFAULT := nostripping
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index 000a9a3..f71e72b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -10,6 +10,11 @@
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
 TARGET_PRELINK_MODULE := false
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
+
+#emulator now uses 64bit kernel to run 32bit x86 image
+#
+TARGET_USES_64_BIT_BINDER := true
 
 # The IA emulator (qemu) uses the Goldfish devices
 HAVE_HTC_AUDIO_DRIVER := true
@@ -48,8 +53,6 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
@@ -57,3 +60,9 @@
 BOARD_SEPOLICY_DIRS += \
         build/target/board/generic/sepolicy \
         build/target/board/generic_x86/sepolicy
+
+BOARD_VNDK_VERSION := current
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_x86/sepolicy/OWNERS b/target/board/generic_x86/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic_x86/sepolicy/OWNERS
+++ b/target/board/generic_x86/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
 jeffv@google.com
-klyubin@google.com
 dcashman@google.com
 jbires@google.com
 sspatil@google.com
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 883dd2e..a9c5142 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -10,6 +10,7 @@
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := x86_64
 TARGET_PRELINK_MODULE := false
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
 
 TARGET_2ND_CPU_ABI := x86
 TARGET_2ND_ARCH := x86
@@ -50,8 +51,6 @@
 # when finalizing them.
 BOARD_VENDORIMAGE_PARTITION_SIZE := 100000000
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
@@ -60,3 +59,9 @@
 BOARD_SEPOLICY_DIRS += \
         build/target/board/generic/sepolicy \
         build/target/board/generic_x86/sepolicy
+
+BOARD_VNDK_VERSION := current
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_x86_64_a/BoardConfig.mk
similarity index 67%
copy from target/board/generic_armv5/BoardConfig.mk
copy to target/board/generic_x86_64_a/BoardConfig.mk
index 016937a..2c02604 100644
--- a/target/board/generic_armv5/BoardConfig.mk
+++ b/target/board/generic_x86_64_a/BoardConfig.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,10 +14,12 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/BoardConfig.mk
+include build/make/target/board/treble_common_64.mk
 
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
 
-WITH_DEXPREOPT := false
+TARGET_2ND_CPU_ABI := x86
+TARGET_2ND_ARCH := x86
+TARGET_2ND_ARCH_VARIANT := x86_64
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_x86_64_ab/BoardConfig.mk
similarity index 61%
copy from target/board/generic_armv5/BoardConfig.mk
copy to target/board/generic_x86_64_ab/BoardConfig.mk
index 016937a..e49863a 100644
--- a/target/board/generic_armv5/BoardConfig.mk
+++ b/target/board/generic_x86_64_ab/BoardConfig.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,10 +14,16 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/BoardConfig.mk
+include build/make/target/board/treble_common_64.mk
 
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
 
-WITH_DEXPREOPT := false
+TARGET_2ND_CPU_ABI := x86
+TARGET_2ND_ARCH := x86
+TARGET_2ND_ARCH_VARIANT := x86_64
+
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/generic_armv5/device.mk b/target/board/generic_x86_a/BoardConfig.mk
similarity index 75%
copy from target/board/generic_armv5/device.mk
copy to target/board/generic_x86_a/BoardConfig.mk
index 7c4aaf2..67cb07d 100644
--- a/target/board/generic_armv5/device.mk
+++ b/target/board/generic_x86_a/BoardConfig.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,8 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/device.mk
+include build/make/target/board/treble_common_32.mk
+
+TARGET_CPU_ABI := x86
+TARGET_ARCH := x86
+TARGET_ARCH_VARIANT := x86
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_x86_ab/BoardConfig.mk
similarity index 68%
copy from target/board/generic_armv5/BoardConfig.mk
copy to target/board/generic_x86_ab/BoardConfig.mk
index 016937a..6e51102 100644
--- a/target/board/generic_armv5/BoardConfig.mk
+++ b/target/board/generic_x86_ab/BoardConfig.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,10 +14,12 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/BoardConfig.mk
+include build/make/target/board/treble_common_32.mk
 
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
+TARGET_CPU_ABI := x86
+TARGET_ARCH := x86
+TARGET_ARCH_VARIANT := x86
 
-WITH_DEXPREOPT := false
+# Enable A/B update
+TARGET_NO_RECOVERY := true
+BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
new file mode 100644
index 0000000..a8c9bc5
--- /dev/null
+++ b/target/board/treble_common.mk
@@ -0,0 +1,60 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Common boardconfig settings for generic AOSP products targetting mobile
+# (phone/table) devices.
+
+# VNDK
+BOARD_VNDK_VERSION := current
+
+# Properties
+TARGET_SYSTEM_PROP := build/make/target/board/treble_system.prop
+BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
+
+# Bootloader, kernel and recovery are not part of generic AOSP image
+TARGET_NO_BOOTLOADER := true
+TARGET_NO_KERNEL := true
+
+# system.img is always ext4 with sparse option
+TARGET_USERIMAGES_USE_EXT4 := true
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false
+TARGET_USES_MKE2FS := true
+
+# Generic AOSP image always requires separate vendor.img
+TARGET_COPY_OUT_VENDOR := vendor
+
+# Generic AOSP image does NOT support HWC1
+TARGET_USES_HWC2 := true
+# Set emulator framebuffer display device buffer count to 3
+NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+
+# Audio
+USE_XML_AUDIO_POLICY_CONF := 1
+
+# b/64700195: add minimum support for odm.img
+# Currently odm.img can only be built by `make custom_images`.
+# Adding /odm mount point under root directory.
+BOARD_ROOT_EXTRA_FOLDERS += odm
+
+# Android Verified Boot (AVB):
+#   Builds a special vbmeta.img that disables AVB verification.
+#   Otherwise, AVB will prevent the device from booting the generic system.img.
+#   Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
+#   metadata into system.img.
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
+endif
+BOARD_BUILD_DISABLED_VBMETAIMAGE := true
diff --git a/target/board/generic_armv5/device.mk b/target/board/treble_common_32.mk
similarity index 65%
copy from target/board/generic_armv5/device.mk
copy to target/board/treble_common_32.mk
index 7c4aaf2..dbe0899 100644
--- a/target/board/generic_armv5/device.mk
+++ b/target/board/treble_common_32.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,8 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/device.mk
+include build/make/target/board/treble_common.mk
+
+# Partition size defaults to 1 GB (1024 MB) for 32-bit products. It can
+# be overwritten in specific BoardConfig.mk, if so desired.
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1073741824
diff --git a/target/board/generic_armv5/device.mk b/target/board/treble_common_64.mk
similarity index 66%
copy from target/board/generic_armv5/device.mk
copy to target/board/treble_common_64.mk
index 7c4aaf2..0a6eb17 100644
--- a/target/board/generic_armv5/device.mk
+++ b/target/board/treble_common_64.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,10 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/device.mk
+include build/make/target/board/treble_common.mk
+
+# Enable 64-bits binder
+TARGET_USES_64_BIT_BINDER := true
+
+# Partition size is default 1.5GB (1536MB) for 64 bits projects
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
diff --git a/target/board/treble_system.prop b/target/board/treble_system.prop
new file mode 100644
index 0000000..4b54aaf
--- /dev/null
+++ b/target/board/treble_system.prop
@@ -0,0 +1,5 @@
+# GSI always generate dex pre-opt in system image
+ro.cp_system_other_odex=0
+
+# GSI always disables adb authentication
+ro.adb.secure=0
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 285fc39..85330b3 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -36,7 +36,6 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/full.mk \
-    $(LOCAL_DIR)/generic_armv5.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
     $(LOCAL_DIR)/full_x86.mk \
     $(LOCAL_DIR)/aosp_mips.mk \
@@ -52,8 +51,11 @@
     $(LOCAL_DIR)/generic_mips.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/aosp_arm_a.mk \
+    $(LOCAL_DIR)/aosp_arm_ab.mk \
     $(LOCAL_DIR)/full.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
+    $(LOCAL_DIR)/aosp_x86_a.mk \
+    $(LOCAL_DIR)/aosp_x86_ab.mk \
     $(LOCAL_DIR)/aosp_x86_arm.mk \
     $(LOCAL_DIR)/full_x86.mk \
     $(LOCAL_DIR)/aosp_mips.mk \
@@ -63,6 +65,8 @@
     $(LOCAL_DIR)/aosp_arm64_ab.mk \
     $(LOCAL_DIR)/aosp_mips64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
+    $(LOCAL_DIR)/aosp_x86_64_a.mk \
+    $(LOCAL_DIR)/aosp_x86_64_ab.mk \
     $(LOCAL_DIR)/sdk_phone_armv7.mk \
     $(LOCAL_DIR)/sdk_phone_x86.mk \
     $(LOCAL_DIR)/sdk_phone_mips.mk \
diff --git a/target/product/aosp_arm64_a.mk b/target/product/aosp_arm64_a.mk
index 4258f04..3c7af33 100644
--- a/target/product/aosp_arm64_a.mk
+++ b/target/product/aosp_arm64_a.mk
@@ -16,13 +16,10 @@
 
 # PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
 # /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/generic_arm64_a/
-# system.prop.
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
 
-include build/make/target/product/treble_common.mk
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
+include build/make/target/product/treble_common_64.mk
 
 PRODUCT_NAME := aosp_arm64_a
 PRODUCT_DEVICE := generic_arm64_a
diff --git a/target/product/aosp_arm64_ab.mk b/target/product/aosp_arm64_ab.mk
index ae50c42..c96cb91 100644
--- a/target/product/aosp_arm64_ab.mk
+++ b/target/product/aosp_arm64_ab.mk
@@ -16,10 +16,10 @@
 
 # PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
 # /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/generic_arm64_a/
-# system.prop.
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
 
-include build/make/target/product/treble_common.mk
+include build/make/target/product/treble_common_64.mk
 
 AB_OTA_UPDATER := true
 AB_OTA_PARTITIONS := system
@@ -27,9 +27,6 @@
     update_engine \
     update_verifier
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-
 PRODUCT_NAME := aosp_arm64_ab
 PRODUCT_DEVICE := generic_arm64_ab
 PRODUCT_BRAND := Android
diff --git a/target/product/aosp_arm_a.mk b/target/product/aosp_arm_a.mk
index 6be8e82..3060fa9 100644
--- a/target/product/aosp_arm_a.mk
+++ b/target/product/aosp_arm_a.mk
@@ -16,12 +16,10 @@
 
 # PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
 # /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/generic_arm_a/
-# system.prop.
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
 
-include build/make/target/product/treble_common.mk
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
+include build/make/target/product/treble_common_32.mk
 
 PRODUCT_NAME := aosp_arm_a
 PRODUCT_DEVICE := generic_arm_a
diff --git a/target/product/aosp_arm_ab.mk b/target/product/aosp_arm_ab.mk
new file mode 100644
index 0000000..98b2f99
--- /dev/null
+++ b/target/product/aosp_arm_ab.mk
@@ -0,0 +1,33 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common_32.mk
+
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
+PRODUCT_PACKAGES += \
+    update_engine \
+    update_verifier
+
+PRODUCT_NAME := aosp_arm_ab
+PRODUCT_DEVICE := generic_arm_ab
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on ARM32
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 03203ce..96c9e33 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu-64
 
 include $(SRC_TARGET_DIR)/product/full_x86.mk
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 693bdaf..086a76f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -25,7 +25,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
diff --git a/target/product/aosp_x86_64_a.mk b/target/product/aosp_x86_64_a.mk
new file mode 100644
index 0000000..a7fb740
--- /dev/null
+++ b/target/product/aosp_x86_64_a.mk
@@ -0,0 +1,27 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common_64.mk
+
+PRODUCT_NAME := aosp_x86_64_a
+PRODUCT_DEVICE := generic_x86_64_a
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86_64
diff --git a/target/product/aosp_x86_64_ab.mk b/target/product/aosp_x86_64_ab.mk
new file mode 100644
index 0000000..4590dc5
--- /dev/null
+++ b/target/product/aosp_x86_64_ab.mk
@@ -0,0 +1,33 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common_64.mk
+
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
+PRODUCT_PACKAGES += \
+    update_engine \
+    update_verifier
+
+PRODUCT_NAME := aosp_x86_64_ab
+PRODUCT_DEVICE := generic_x86_64_ab
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86_64
diff --git a/target/product/aosp_x86_a.mk b/target/product/aosp_x86_a.mk
new file mode 100644
index 0000000..9ed2995
--- /dev/null
+++ b/target/product/aosp_x86_a.mk
@@ -0,0 +1,27 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common_32.mk
+
+PRODUCT_NAME := aosp_x86_a
+PRODUCT_DEVICE := generic_x86_a
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86
diff --git a/target/product/aosp_x86_ab.mk b/target/product/aosp_x86_ab.mk
new file mode 100644
index 0000000..404a4da
--- /dev/null
+++ b/target/product/aosp_x86_ab.mk
@@ -0,0 +1,33 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common_32.mk
+
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
+PRODUCT_PACKAGES += \
+    update_engine \
+    update_verifier
+
+PRODUCT_NAME := aosp_x86_ab
+PRODUCT_DEVICE := generic_x86_ab
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86
diff --git a/target/product/base.mk b/target/product/base.mk
index a182f18..14ff1c2 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -31,6 +31,7 @@
     bit \
     blkid \
     bmgr \
+    bpfloader \
     bugreport \
     bugreportz \
     cameraserver \
@@ -76,8 +77,6 @@
     libnetlink \
     libnetutils \
     libpdfium \
-    libradio \
-    libradioservice \
     libradio_metadata \
     libreference-ril \
     libreverbwrapper \
@@ -116,6 +115,7 @@
     mtpd \
     ndc \
     netd \
+    perfetto \
     ping \
     ping6 \
     platform.xml \
@@ -134,12 +134,15 @@
     svc \
     tc \
     telecom \
+    traced \
+    traced_probes \
     vdc \
     vold \
     wm
 
 # Essential HAL modules
 PRODUCT_PACKAGES += \
+    android.hardware.cas@1.0-service \
     android.hardware.media.omx@1.0-service
 
 # XML schema files
diff --git a/target/product/core.mk b/target/product/core.mk
index cab8d97..bbc2b75 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -56,6 +56,7 @@
     StorageManager \
     Telecom \
     TeleService \
+    Traceur \
     VpnDialogs \
     vr \
     MmsService
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
new file mode 100644
index 0000000..72d30f5
--- /dev/null
+++ b/target/product/core_64_bit_only.mk
@@ -0,0 +1,30 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit from this product for devices that support only 64-bit apps using:
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+# The inheritance for this must come before the inheritance chain that leads
+# to core_minimal.mk.
+
+# Copy the 64-bit zygote startup script
+PRODUCT_COPY_FILES += system/core/rootdir/init.zygote64.rc:root/init.zygote64.rc
+
+# Set the zygote property to select the 64-bit script.
+# This line must be parsed before the one in core_minimal.mk
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += ro.zygote=zygote64
+
+TARGET_SUPPORTS_32_BIT_APPS := false
+TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_base.mk b/target/product/core_base.mk
index 151e8de..7dc0010 100644
--- a/target/product/core_base.mk
+++ b/target/product/core_base.mk
@@ -62,9 +62,4 @@
     mdnsd \
     requestsync \
 
-# Wifi modules
-PRODUCT_PACKAGES += \
-    wifi-service \
-    wificond \
-
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_minimal.mk)
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index 05e3b45..16599cb 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -71,6 +71,7 @@
     libfilterfw \
     libkeystore \
     libgatekeeper \
+    libneuralnetworks \
     libwebviewchromium_loader \
     libwebviewchromium_plat_support \
     libwilhelm \
@@ -84,6 +85,7 @@
     telephony-common \
     uiautomator \
     uncrypt \
+    vndk_snapshot_package \
     voip-common \
     webview \
     webview_zygote \
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index c86b862..122f5c7 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -59,6 +59,7 @@
     iptables \
     gatekeeperd \
     keystore \
+    ld.config.txt \
     ld.mc \
     libaaudio \
     libOpenMAXAL \
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 9e684f7..3f1d6df 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -20,10 +20,13 @@
 PRODUCT_PACKAGES += \
     adb \
     adbd \
-    android.hardware.configstore@1.1-service \
+    usbd \
+    android.hardware.configstore@1.0-service \
     android.hidl.allocator@1.0-service \
     android.hidl.memory@1.0-impl \
+    android.hidl.memory@1.0-impl.vendor \
     atrace \
+    blank_screen \
     bootanimation \
     bootstat \
     charger \
@@ -48,6 +51,7 @@
     libbinder \
     libc \
     libc_malloc_debug \
+    libc_malloc_hooks \
     libcutils \
     libdl \
     libgui \
@@ -76,14 +80,18 @@
     shell_and_utilities \
     storaged \
     surfaceflinger \
+    thermalserviced \
     tombstoned \
     tzdatacheck \
     vndservice \
     vndservicemanager \
-    compatibility_matrix.xml \
-    manifest.xml \
-    system_manifest.xml \
-    system_compatibility_matrix.xml \
+
+# VINTF data
+PRODUCT_PACKAGES += \
+    device_compatibility_matrix.xml \
+    device_manifest.xml \
+    framework_manifest.xml \
+    framework_compatibility_matrix.xml \
 
 # SELinux packages are added as dependencies of the selinux_policy
 # phony package.
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index 58245cb..0f33f38 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -26,6 +26,7 @@
 PRODUCT_PACKAGES += \
     egl.cfg \
     gralloc.goldfish \
+    gralloc.goldfish.default \
     gralloc.ranchu \
     libGLESv1_CM_emulation \
     lib_renderControl_enc \
@@ -35,6 +36,9 @@
     libOpenglSystemCommon \
     libGLESv2_emulation \
     libGLESv1_enc \
+    libEGL_swiftshader \
+    libGLESv1_CM_swiftshader \
+    libGLESv2_swiftshader \
     qemu-props \
     camera.goldfish \
     camera.goldfish.jpeg \
@@ -127,3 +131,9 @@
 PRODUCT_CHARACTERISTICS := emulator
 
 PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+
+#watchdog tiggers reboot because location service is not
+#responding, disble it for now
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+config.disable_location=true
diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk
index 375c679..af4097d 100644
--- a/target/product/full_base_telephony.mk
+++ b/target/product/full_base_telephony.mk
@@ -24,7 +24,7 @@
     ro.com.android.dataroaming=true
 
 PRODUCT_COPY_FILES := \
-    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+    device/sample/etc/apns-full-conf.xml:system/etc/apns-conf.xml \
     frameworks/native/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
diff --git a/target/board/generic_armv5/device.mk b/target/product/go_defaults.mk
similarity index 76%
rename from target/board/generic_armv5/device.mk
rename to target/product/go_defaults.mk
index 7c4aaf2..faa1852 100644
--- a/target/board/generic_armv5/device.mk
+++ b/target/product/go_defaults.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,6 @@
 # limitations under the License.
 #
 
-include build/target/board/generic/device.mk
+# Inherit common Android Go defaults.
+$(call inherit-product, build/target/product/go_defaults_common.mk)
+
diff --git a/target/product/go_defaults_512.mk b/target/product/go_defaults_512.mk
new file mode 100644
index 0000000..56ab29b
--- /dev/null
+++ b/target/product/go_defaults_512.mk
@@ -0,0 +1,28 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit common Android Go defaults.
+$(call inherit-product, build/target/product/go_defaults_common.mk)
+
+# 512MB specific properties.
+
+# lmkd can kill more now.
+PRODUCT_PROPERTY_OVERRIDES += \
+     ro.lmk.medium=700 \
+
+# madvise random in ART to reduce page cache thrashing.
+PRODUCT_PROPERTY_OVERRIDES += \
+     dalvik.vm.madvise-random=true
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
new file mode 100644
index 0000000..fbb9567
--- /dev/null
+++ b/target/product/go_defaults_common.mk
@@ -0,0 +1,57 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Sets Android Go recommended default values for propreties.
+
+# Set lowram options
+PRODUCT_PROPERTY_OVERRIDES += \
+     ro.config.low_ram=true \
+     ro.lmk.critical_upgrade=true \
+     ro.lmk.upgrade_pressure=40
+
+# set threshold to filter unused apps
+PRODUCT_PROPERTY_OVERRIDES += \
+     pm.dexopt.downgrade_after_inactive_days=10
+
+
+# Speed profile services and wifi-service to reduce RAM and storage.
+PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
+
+# Always preopt extracted APKs to prevent extracting out of the APK for gms
+# modules.
+PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK := true
+
+# Use a profile based boot image for this device. Note that this is currently a
+# generic profile and not Android Go optimized.
+PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true
+PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt
+
+# set the compiler filter for shared apks to quicken.
+# Rationale: speed has a lot of dex code expansion, it uses more ram and space
+# compared to quicken. Using quicken for shared APKs on Go devices may save RAM.
+# Note that this is a trade-off: here we trade clean pages for dirty pages,
+# extra cpu and battery. That's because the quicken files will be jit-ed in all
+# the processes that load of shared apk and the code cache is not shared.
+# Some notable apps that will be affected by this are gms and chrome.
+# b/65591595.
+PRODUCT_PROPERTY_OVERRIDES += \
+     pm.dexopt.shared=quicken
+
+# Default heap sizes. Allow up to 256m for large heaps to make sure a single app
+# doesn't take all of the RAM.
+PRODUCT_PROPERTY_OVERRIDES += dalvik.vm.heapgrowthlimit=128m
+PRODUCT_PROPERTY_OVERRIDES += dalvik.vm.heapsize=256m
+
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 36c3de8..846cd70 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -15,10 +15,91 @@
 #
 
 # This is a build configuration that just contains a list of languages.
-# It helps in situations where languages must come first in the list,
-# mostly because screen densities interfere with the list of locales and
-# the system misbehaves when a density is the first locale.
-
-# These are all the locales that have translations and are displayable
-# by TextView in this branch.
-PRODUCT_LOCALES := en_US en_AU en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ sq_AL gu_IN pa_IN be_BY bs_BA sr_Latn_RS
+#
+# These are all the locales that have translations.
+PRODUCT_LOCALES := \
+        en_US \
+        af_ZA \
+        am_ET \
+        ar_EG \
+        ar_XB \
+        az_AZ \
+        be_BY \
+        bg_BG \
+        bn_BD \
+        bs_BA \
+        ca_ES \
+        cs_CZ \
+        da_DK \
+        de_DE \
+        el_GR \
+        en_AU \
+        en_CA \
+        en_GB \
+        en_IN \
+        en_XA \
+        en_XC \
+        es_ES \
+        es_US \
+        et_EE \
+        eu_ES \
+        fa_IR \
+        fi_FI \
+        fr_CA \
+        fr_FR \
+        gl_ES \
+        gu_IN \
+        hi_IN \
+        hr_HR \
+        hu_HU \
+        hy_AM \
+        in_ID \
+        is_IS \
+        it_IT \
+        iw_IL \
+        ja_JP \
+        ka_GE \
+        kk_KZ \
+        km_KH \
+        kn_IN \
+        ko_KR \
+        ky_KG \
+        lo_LA \
+        lt_LT \
+        lv_LV \
+        mk_MK \
+        ml_IN \
+        mn_MN \
+        mr_IN \
+        ms_MY \
+        my_MM \
+        nb_NO \
+        ne_NP \
+        nl_NL \
+        pa_IN \
+        pl_PL \
+        pt_BR \
+        pt_PT \
+        ro_RO \
+        ru_RU \
+        si_LK \
+        sk_SK \
+        sl_SI \
+        sq_AL \
+        sr_Latn_RS \
+        sr_RS \
+        sv_SE \
+        sw_TZ \
+        ta_IN \
+        te_IN \
+        th_TH \
+        tl_PH \
+        tr_TR \
+        uk_UA \
+        ur_PK \
+        uz_UZ \
+        vi_VN \
+        zh_CN \
+        zh_HK \
+        zh_TW \
+        zu_ZA
diff --git a/target/product/product_launched_with_o_mr1.mk b/target/product/product_launched_with_o_mr1.mk
new file mode 100644
index 0000000..25620aa
--- /dev/null
+++ b/target/product/product_launched_with_o_mr1.mk
@@ -0,0 +1,2 @@
+#PRODUCT_SHIPPING_API_LEVEL indicates the first api level, device has been commercially launced on.
+PRODUCT_SHIPPING_API_LEVEL := 27
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 6e7038e..f9030cf 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -56,6 +56,9 @@
 PRODUCT_PACKAGES += \
     cacerts \
 
+PRODUCT_PACKAGES += \
+    hiddenapi-package-whitelist.xml \
+
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     dalvik.vm.image-dex2oat-Xms=64m \
     dalvik.vm.image-dex2oat-Xmx=64m \
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
index 75a63e6..b79b8c6 100644
--- a/target/product/sdk_base.mk
+++ b/target/product/sdk_base.mk
@@ -17,7 +17,6 @@
 PRODUCT_PROPERTY_OVERRIDES :=
 
 PRODUCT_PACKAGES := \
-	ApiDemos \
 	CellBroadcastReceiver \
 	CubeLiveWallpapers \
 	CustomLocale \
@@ -25,7 +24,6 @@
 	Dialer \
 	EmulatorSmokeTests \
 	Gallery2 \
-	GestureBuilder \
 	Launcher3 \
 	Camera2 \
 	librs_jni \
@@ -40,8 +38,6 @@
 	rild \
 	screenrecord \
 	SdkSetup \
-	SmokeTest \
-	SmokeTestApp \
 	SoftKeyboard \
 	sqlite3 \
 	SystemUI \
@@ -92,7 +88,7 @@
 	frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
 	frameworks/base/data/sounds/effects/VideoStop.ogg:system/media/audio/ui/VideoStop.ogg \
 	device/generic/goldfish/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml \
-	device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles_V1_0.xml \
+	device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles.xml \
 	frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
 	frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
 	device/generic/goldfish/camera/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 1e82773..32d71eb 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu-64
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index a18c4f8..e40ebb5 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -25,7 +25,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index 829d5f1..5352e77 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -14,9 +14,27 @@
 # limitations under the License.
 #
 
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+# Generic system image inherits from AOSP with telephony
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
+
 # Split selinux policy
 PRODUCT_FULL_TREBLE_OVERRIDE := true
 
+# vintf utility:
+PRODUCT_PACKAGES += \
+    vintf
+
+# The Messaging app:
+#   Needed for android.telecom.cts.ExtendedInCallServiceTest#testOnCannedTextResponsesLoaded
+PRODUCT_PACKAGES += \
+    messaging
+
 # All VNDK libraries (HAL interfaces, VNDK, VNDK-SP, LL-NDK)
 PRODUCT_PACKAGES += vndk_package
 
@@ -24,8 +42,6 @@
 PRODUCT_PACKAGES += \
     libvulkan \
 
-# Audio:
-USE_XML_AUDIO_POLICY_CONF := 1
 # The following policy XML files are used as fallback for
 # vendors/devices not using XML to configure audio policy.
 PRODUCT_COPY_FILES += \
@@ -40,3 +56,35 @@
 #   audio.a2dp.default to support A2DP if board has the capability.
 PRODUCT_PACKAGES += \
     audio.a2dp.default
+
+# Net:
+#   Vendors can use the platform-provided network configuration utilities (ip,
+#   iptable, etc.) to configure the Linux networking stack, but these utilities
+#   do not yet include a HIDL interface wrapper. This is a solution on
+#   Android O.
+PRODUCT_PACKAGES += \
+    netutils-wrapper-1.0
+
+# Telephony:
+#   Provide a default APN configuration
+PRODUCT_COPY_FILES += \
+    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+
+# NFC:
+#   Provide default libnfc-nci.conf file for devices that does not have one in
+#   vendor/etc
+PRODUCT_COPY_FILES += \
+    device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
+
+# Support for the devices with no VNDK enforcing
+PRODUCT_COPY_FILES += \
+    build/make/target/product/vndk/init.gsi.rc:system/etc/init/init.gsi.rc \
+    build/make/target/product/vndk/init.noenforce.rc:system/etc/init/gsi/init.noenforce.rc
+
+# Name space configuration file for non-enforcing VNDK
+PRODUCT_PACKAGES += \
+    ld.config.noenforce.txt
+
+# Set current VNDK version for GSI
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+    ro.gsi.vndk.version=$(PLATFORM_VNDK_VERSION)
diff --git a/target/product/generic_armv5.mk b/target/product/treble_common_32.mk
similarity index 60%
rename from target/product/generic_armv5.mk
rename to target/product/treble_common_32.mk
index daa321a..0023c3b 100644
--- a/target/product/generic_armv5.mk
+++ b/target/product/treble_common_32.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2017 The Android Open-Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,12 +14,9 @@
 # limitations under the License.
 #
 
-# This is a generic product that isn't specialized for a specific device.
-# It includes the base Android platform.
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic.mk)
-
-# Overrides
-PRODUCT_BRAND := generic_armv5
-PRODUCT_DEVICE := generic_armv5
-PRODUCT_NAME := generic_armv5
+include build/make/target/product/treble_common.mk
diff --git a/target/product/treble_common_64.mk b/target/product/treble_common_64.mk
new file mode 100644
index 0000000..fc3c16f
--- /dev/null
+++ b/target/product/treble_common_64.mk
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2017 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
+# /vendor/[build|default].prop when build split is on. In order to have sysprops
+# on the generic system image, place them in build/make/target/board/
+# treble_system.prop.
+
+include build/make/target/product/treble_common.mk
+
+# For now this will allow 64-bit apps, but still compile all apps with JNI
+# for 32-bit only.
+
+# Copy different zygote settings for vendor.img to select by setting property
+# ro.zygote=zygote64_32 or ro.zygote=zygote32_64:
+#   1. 64-bit primary, 32-bit secondary OR
+#   2. 32-bit primary, 64-bit secondary
+#   3. 64-bit only is currently forbidden (b/64280459#comment6)
+PRODUCT_COPY_FILES += \
+    system/core/rootdir/init.zygote64_32.rc:root/init.zygote64_32.rc \
+    system/core/rootdir/init.zygote32_64.rc:root/init.zygote32_64.rc
+
+TARGET_SUPPORTS_32_BIT_APPS := true
+TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index 6e8a85f..93aaf37 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -2,25 +2,6 @@
 LOCAL_PATH:= $(call my-dir)
 
 #####################################################################
-# Setting the VNDK version. Version is 10000.0 for not-yet-published
-# platform and xx.y for released platform.
-vndk_major_ver := 10000
-vndk_minor_ver := 0
-
-#TODO(b/68027291): Revive this check when we have stable VNDK in P or later.
-#ifneq ($(vndk_major_ver), $(PLATFORM_SDK_VERSION))
-#$(error vndk_major_version does not match PLATFORM_SDK_VERSION, please update.)
-#endif
-
-ifneq (REL,$(PLATFORM_VERSION_CODENAME))
-    vndk_major_ver := 10000
-    vndk_minor_ver := 0
-endif
-PLATFORM_VNDK_VERSION := $(vndk_major_ver).$(vndk_minor_ver)
-vndk_major_ver :=
-vndk_minor_ver :=
-
-#####################################################################
 # Create the list of vndk libraries from the source code.
 INTERNAL_VNDK_LIB_LIST := $(call intermediates-dir-for,PACKAGING,vndk)/libs.txt
 $(INTERNAL_VNDK_LIB_LIST):
@@ -103,6 +84,12 @@
     $(LLNDK_LIBRARIES) \
     llndk.libraries.txt \
     vndksp.libraries.txt
-
 include $(BUILD_PHONY_PACKAGE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := vndk_snapshot_package
+LOCAL_REQUIRED_MODULES := \
+    $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
+include $(BUILD_PHONY_PACKAGE)
+
 endif # BOARD_VNDK_VERSION is set
diff --git a/target/product/vndk/init.gsi.rc b/target/product/vndk/init.gsi.rc
new file mode 100644
index 0000000..3e6b1fb
--- /dev/null
+++ b/target/product/vndk/init.gsi.rc
@@ -0,0 +1,2 @@
+# If ro.vndk.version is not defined, import init.noenforce.rc
+import /system/etc/init/gsi/init.${ro.vndk.version:-noenforce}.rc
diff --git a/target/product/vndk/init.noenforce.rc b/target/product/vndk/init.noenforce.rc
new file mode 100644
index 0000000..6cf1df7
--- /dev/null
+++ b/target/product/vndk/init.noenforce.rc
@@ -0,0 +1,5 @@
+on early-init
+    # If ro.vndk.version is not set, use ld.config.nonenforce.txt
+    export LD_CONFIG_FILE /system/etc/ld.config.noenforce.txt
+    # To use current VNDK libs, set ro.vndk.version to system vndk version
+    setprop ro.vndk.version ${ro.gsi.vndk.version}
diff --git a/tools/adbs b/tools/adbs
deleted file mode 100755
index a8f06c0..0000000
--- a/tools/adbs
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import os.path
-import re
-import string
-import sys
-
-sys.path.insert(0, os.path.dirname(__file__) + "/../../development/scripts")
-import stack_core
-import symbol
-
-if __name__ == '__main__':
-  # pass the options to adb
-  adb_cmd  = "adb " + ' '.join(sys.argv[1:])
-
-  # create tracer for line parsing
-  tracer = stack_core.TraceConverter()
-
-  # invoke the adb command and filter its output
-  stream = os.popen(adb_cmd)
-  while (True):
-    line = stream.readline()
-    if (line == ''):
-      break
-    if(tracer.ProcessLine(line) == False):
-      print(line.strip())
-      sys.stdout.flush()
-
-  # adb itself aborts
-  stream.close()
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
new file mode 100755
index 0000000..c7c5bdc
--- /dev/null
+++ b/tools/auto_gen_test_config.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A tool to generate TradeFed test config file.
+"""
+
+import os
+import shutil
+import sys
+from xml.dom.minidom import parse
+
+ATTRIBUTE_LABEL = 'android:label'
+ATTRIBUTE_RUNNER = 'android:name'
+ATTRIBUTE_PACKAGE = 'package'
+
+PLACEHOLDER_LABEL = '{LABEL}'
+PLACEHOLDER_MODULE = '{MODULE}'
+PLACEHOLDER_PACKAGE = '{PACKAGE}'
+PLACEHOLDER_RUNNER = '{RUNNER}'
+PLACEHOLDER_TEST_TYPE = '{TEST_TYPE}'
+
+
+def main(argv):
+  """Entry point of auto_gen_test_config.
+
+  Args:
+    argv: A list of arguments.
+  Returns:
+    0 if no error, otherwise 1.
+  """
+  if len(argv) != 4:
+    sys.stderr.write(
+        'Invalid arguements. The script requires 4 arguments for file paths: '
+        'target_config android_manifest empty_config '
+        'instrumentation_test_config_template.\n')
+    return 1
+  target_config = argv[0]
+  android_manifest = argv[1]
+  empty_config = argv[2]
+  instrumentation_test_config_template = argv[3]
+
+  manifest = parse(android_manifest)
+  instrumentation_elements = manifest.getElementsByTagName('instrumentation')
+  manifest_elements = manifest.getElementsByTagName('manifest')
+  if len(instrumentation_elements) != 1 or len(manifest_elements) != 1:
+    # Failed to locate instrumentation or manifest element in AndroidManifest.
+    # file. Empty test config file will be created.
+    shutil.copyfile(empty_config, target_config)
+    return 0
+
+  module = os.path.splitext(os.path.basename(target_config))[0]
+  instrumentation = instrumentation_elements[0]
+  manifest = manifest_elements[0]
+  if instrumentation.attributes.has_key(ATTRIBUTE_LABEL):
+    label = instrumentation.attributes[ATTRIBUTE_LABEL].value
+  else:
+    label = module
+  runner = instrumentation.attributes[ATTRIBUTE_RUNNER].value
+  package = manifest.attributes[ATTRIBUTE_PACKAGE].value
+  test_type = ('InstrumentationTest'
+               if runner.endswith('.InstrumentationTestRunner')
+               else 'AndroidJUnitTest')
+
+  with open(instrumentation_test_config_template) as template:
+    config = template.read()
+    config = config.replace(PLACEHOLDER_LABEL, label)
+    config = config.replace(PLACEHOLDER_MODULE, module)
+    config = config.replace(PLACEHOLDER_PACKAGE, package)
+    config = config.replace(PLACEHOLDER_TEST_TYPE, test_type)
+    config = config.replace(PLACEHOLDER_RUNNER, runner)
+    with open(target_config, 'w') as config_file:
+      config_file.write(config)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
new file mode 100644
index 0000000..e68c27f
--- /dev/null
+++ b/tools/auto_gen_test_config_test.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+#
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for auto_gen_test_config."""
+
+import os
+import shutil
+import tempfile
+import unittest
+
+import auto_gen_test_config
+
+TEST_MODULE = 'TestModule'
+
+MANIFEST_INVALID = """<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android">
+</manifest>
+"""
+
+MANIFEST_JUNIT_TEST = """<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+  package="com.android.my.tests.x">
+    <instrumentation
+        android:name="android.support.test.runner.AndroidJUnitRunner"
+        android:targetPackage="com.android.my.tests" />
+</manifest>
+"""
+
+MANIFEST_INSTRUMENTATION_TEST = """<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+  package="com.android.my.tests.x">
+    <instrumentation
+        android:name="android.test.InstrumentationTestRunner"
+        android:targetPackage="com.android.my.tests"
+        android:label="My Tests" />
+</manifest>
+"""
+
+EXPECTED_JUNIT_TEST_CONFIG = """<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs TestModule.">
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="test-file-name" value="TestModule.apk" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="com.android.my.tests.x" />
+        <option name="runner" value="android.support.test.runner.AndroidJUnitRunner" />
+    </test>
+</configuration>
+"""
+
+EXPECTED_INSTRUMENTATION_TEST_CONFIG = """<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs My Tests.">
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="test-file-name" value="TestModule.apk" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.InstrumentationTest" >
+        <option name="package" value="com.android.my.tests.x" />
+        <option name="runner" value="android.test.InstrumentationTestRunner" />
+    </test>
+</configuration>
+"""
+
+TOOLS_DIR = os.path.dirname(os.path.dirname(__file__))
+EMPTY_TEST_CONFIG = os.path.join(
+    TOOLS_DIR, '..', 'core', 'empty_test_config.xml')
+INSTRUMENTATION_TEST_CONFIG_TEMPLATE = os.path.join(
+    TOOLS_DIR, '..', 'core', 'instrumentation_test_config_template.xml')
+
+
+class AutoGenTestConfigUnittests(unittest.TestCase):
+  """Unittests for auto_gen_test_config."""
+
+  def setUp(self):
+    """Setup directory for test."""
+    self.test_dir = tempfile.mkdtemp()
+    self.config_file = os.path.join(self.test_dir, TEST_MODULE + '.config')
+    self.manifest_file = os.path.join(self.test_dir, 'AndroidManifest.xml')
+
+  def tearDown(self):
+    """Cleanup the test directory."""
+    shutil.rmtree(self.test_dir, ignore_errors=True)
+
+  def testInvalidManifest(self):
+    """An empty test config should be generated if AndroidManifest is invalid.
+    """
+    with open(self.manifest_file, 'w') as f:
+      f.write(MANIFEST_INVALID)
+
+    argv = [self.config_file,
+            self.manifest_file,
+            EMPTY_TEST_CONFIG,
+            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+    auto_gen_test_config.main(argv)
+    with open(self.config_file) as config_file:
+      with open(EMPTY_TEST_CONFIG) as empty_config:
+        self.assertEqual(config_file.read(), empty_config.read())
+
+  def testCreateJUnitTestConfig(self):
+    """Test creating test config for AndroidJUnitTest.
+    """
+    with open(self.manifest_file, 'w') as f:
+      f.write(MANIFEST_JUNIT_TEST)
+
+    argv = [self.config_file,
+            self.manifest_file,
+            EMPTY_TEST_CONFIG,
+            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+    auto_gen_test_config.main(argv)
+    with open(self.config_file) as config_file:
+      self.assertEqual(config_file.read(), EXPECTED_JUNIT_TEST_CONFIG)
+
+  def testCreateInstrumentationTestConfig(self):
+    """Test creating test config for InstrumentationTest.
+    """
+    with open(self.manifest_file, 'w') as f:
+      f.write(MANIFEST_INSTRUMENTATION_TEST)
+
+    argv = [self.config_file,
+            self.manifest_file,
+            EMPTY_TEST_CONFIG,
+            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+    auto_gen_test_config.main(argv)
+    with open(self.config_file) as config_file:
+      self.assertEqual(
+          config_file.read(), EXPECTED_INSTRUMENTATION_TEST_CONFIG)
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/check_builds.sh b/tools/check_builds.sh
index c255bf0..7e4ea7c 100644
--- a/tools/check_builds.sh
+++ b/tools/check_builds.sh
@@ -65,7 +65,7 @@
         inputs="$inputs $TEST_BUILD_DIR/dist-$1/installed-files.txt"
         shift
     done
-    build/tools/compare_fileslist.py $inputs > $TEST_BUILD_DIR/sizes.html
+    build/make/tools/compare_fileslist.py $inputs > $TEST_BUILD_DIR/sizes.html
 }
 
 function check_builds
@@ -87,6 +87,6 @@
         fi
         shift
     done
-    build/tools/compare_fileslist.py $inputs > $TEST_BUILD_DIR/sizes.html
+    build/make/tools/compare_fileslist.py $inputs > $TEST_BUILD_DIR/sizes.html
 }
 
diff --git a/tools/docker/.gitignore b/tools/docker/.gitignore
new file mode 100644
index 0000000..df0b367
--- /dev/null
+++ b/tools/docker/.gitignore
@@ -0,0 +1 @@
+gitconfig
diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile
new file mode 100644
index 0000000..ec65aaf
--- /dev/null
+++ b/tools/docker/Dockerfile
@@ -0,0 +1,25 @@
+FROM ubuntu:14.04
+ARG userid
+ARG groupid
+ARG username
+
+RUN apt-get update && apt-get install -y git-core gnupg flex bison gperf build-essential zip curl zlib1g-dev gcc-multilib g++-multilib libc6-dev-i386 lib32ncurses5-dev x11proto-core-dev libx11-dev lib32z-dev ccache libgl1-mesa-dev libxml2-utils xsltproc unzip python openjdk-7-jdk
+
+RUN curl -o jdk8.tgz https://android.googlesource.com/platform/prebuilts/jdk/jdk8/+archive/master.tar.gz \
+ && tar -zxf jdk8.tgz linux-x86 \
+ && mv linux-x86 /usr/lib/jvm/java-8-openjdk-amd64 \
+ && rm -rf jdk8.tgz
+
+RUN curl -o /usr/local/bin/repo https://storage.googleapis.com/git-repo-downloads/repo \
+ && echo "e147f0392686c40cfd7d5e6f332c6ee74c4eab4d24e2694b3b0a0c037bf51dc5  /usr/local/bin/repo" | sha256sum --strict -c - \
+ && chmod a+x /usr/local/bin/repo
+
+RUN groupadd -g $groupid $username \
+ && useradd -m -u $userid -g $groupid $username \
+ && echo $username >/root/username \
+ && echo "export USER="$username >>/home/$username/.gitconfig
+COPY gitconfig /home/$username/.gitconfig
+RUN chown $userid:$groupid /home/$username/.gitconfig
+ENV HOME=/home/$username
+
+ENTRYPOINT chroot --userspec=$(cat /root/username):$(cat /root/username) / /bin/bash -i
diff --git a/tools/docker/README.md b/tools/docker/README.md
new file mode 100644
index 0000000..304fd18
--- /dev/null
+++ b/tools/docker/README.md
@@ -0,0 +1,18 @@
+The Dockerfile in this directory sets up an Ubuntu Trusty image ready to build
+a variety of Android branches (>= Lollipop). It's particulary useful to build
+older branches that required 14.04 if you've upgraded to something newer.
+
+First, build the image:
+```
+# Copy your host gitconfig, or create a stripped down version
+$ cp ~/.gitconfig gitconfig
+$ docker build --build-arg userid=$(id -u) --build-arg groupid=$(id -g) --build-arg username=$(id -un) -t android-build-trusty .
+```
+
+Then you can start up new instances with:
+```
+$ docker run -it --rm -v $ANDROID_BUILD_TOP:/src android-build-trusty
+> cd /src; source build/envsetup.sh
+> lunch aosp_arm-eng
+> m -j50
+```
diff --git a/tools/droiddoc/test/stubs/run.sh b/tools/droiddoc/test/stubs/run.sh
index 2ea15a6..7c8a894 100755
--- a/tools/droiddoc/test/stubs/run.sh
+++ b/tools/droiddoc/test/stubs/run.sh
@@ -14,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-DIR=build/tools/droiddoc/test/stubs
+DIR=build/make/tools/droiddoc/test/stubs
 
 pushd $TOP
 
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 3773d38..1247896 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -261,6 +261,7 @@
 
 LOCAL_MODULE := passwd
 LOCAL_MODULE_CLASS := ETC
+LOCAL_VENDOR_MODULE := true
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
@@ -279,6 +280,7 @@
 
 LOCAL_MODULE := group
 LOCAL_MODULE_CLASS := ETC
+LOCAL_VENDOR_MODULE := true
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
diff --git a/tools/fs_config/fs_config.c b/tools/fs_config/fs_config.c
index 48f300b..2952875 100644
--- a/tools/fs_config/fs_config.c
+++ b/tools/fs_config/fs_config.c
@@ -67,17 +67,18 @@
 }
 
 static void usage() {
-  fprintf(stderr, "Usage: fs_config [-D product_out_path] [-S context_file] [-C]\n");
+  fprintf(stderr, "Usage: fs_config [-D product_out_path] [-S context_file] [-R root] [-C]\n");
 }
 
 int main(int argc, char** argv) {
   char buffer[1024];
   const char* context_file = NULL;
   const char* product_out_path = NULL;
+  char* root_path = NULL;
   struct selabel_handle* sehnd = NULL;
   int print_capabilities = 0;
   int opt;
-  while((opt = getopt(argc, argv, "CS:D:")) != -1) {
+  while((opt = getopt(argc, argv, "CS:R:D:")) != -1) {
     switch(opt) {
     case 'C':
       print_capabilities = 1;
@@ -85,6 +86,9 @@
     case 'S':
       context_file = optarg;
       break;
+    case 'R':
+      root_path = optarg;
+      break;
     case 'D':
       product_out_path = optarg;
       break;
@@ -98,6 +102,14 @@
     sehnd = get_sehnd(context_file);
   }
 
+  if (root_path != NULL) {
+    size_t root_len = strlen(root_path);
+    /* Trim any trailing slashes from the root path. */
+    while (root_len && root_path[--root_len] == '/') {
+      root_path[root_len] = '\0';
+    }
+  }
+
   while (fgets(buffer, 1023, stdin) != NULL) {
     int is_dir = 0;
     int i;
@@ -122,6 +134,10 @@
     unsigned uid = 0, gid = 0, mode = 0;
     uint64_t capabilities;
     fs_config(buffer, is_dir, product_out_path, &uid, &gid, &mode, &capabilities);
+    if (root_path != NULL && strcmp(buffer, root_path) == 0) {
+      /* The root of the filesystem needs to be an empty string. */
+      strcpy(buffer, "");
+    }
     printf("%s %d %d %o", buffer, uid, gid, mode);
 
     if (sehnd != NULL) {
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index c8d1dd3..d51d075 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -146,18 +146,27 @@
             found (str): The file found in, not required to be specified.
 
         Raises:
+            ValueError: if the friendly name is longer than 31 characters as
+                that is bionic's internal buffer size for name.
             ValueError: if value is not a valid string number as processed by
                 int(x, 0)
         """
         self.identifier = identifier
         self.value = value
         self.found = found
-        self.normalized_value = str(int(value, 0))
+        try:
+            self.normalized_value = str(int(value, 0))
+        except ValueException:
+            raise ValueError('Invalid "value", not aid number, got: \"%s\"' % value)
 
         # Where we calculate the friendly name
         friendly = identifier[len(AID.PREFIX):].lower()
         self.friendly = AID._fixup_friendly(friendly)
 
+        if len(self.friendly) > 31:
+            raise ValueError('AID names must be under 32 characters "%s"' % self.friendly)
+
+
     def __eq__(self, other):
 
         return self.identifier == other.identifier \
@@ -639,10 +648,8 @@
 
         try:
             aid = AID(section_name, value, file_name)
-        except ValueError:
-            sys.exit(
-                error_message('Invalid "value", not aid number, got: \"%s\"' %
-                              value))
+        except ValueError as exception:
+            sys.exit(error_message(exception))
 
         # Values must be within OEM range
         if not Utils.in_any_range(int(aid.value, 0), self._oem_ranges):
diff --git a/tools/libhost/Android.bp b/tools/libhost/Android.bp
index e5a5ecf..4c9100f 100644
--- a/tools/libhost/Android.bp
+++ b/tools/libhost/Android.bp
@@ -10,6 +10,7 @@
     name: "libhost",
     target: {
         windows: {
+            cflags: ["-Wno-unused-parameter"],
             enabled: true,
         },
     },
diff --git a/tools/libhost/CopyFile.c b/tools/libhost/CopyFile.c
index bd65f1e..f9bda86 100644
--- a/tools/libhost/CopyFile.c
+++ b/tools/libhost/CopyFile.c
@@ -352,7 +352,12 @@
  * need to trash it so we can create one.
  */
 #if defined(_WIN32)
-extern int copySymlink(const char* src, const char* dst, const struct stat* pSrcStat, unsigned int options) __attribute__((error("no symlinks on Windows")));
+extern int copySymlink(const char* src, const char* dst, const struct stat* pSrcStat, unsigned int options)
+#ifdef __clang__
+  __attribute__((unavailable("no symlinks on Windows")));
+#else
+  __attribute__((error("no symlinks on Windows")));
+#endif
 #else
 static int copySymlink(const char* src, const char* dst, const struct stat* pSrcStat, unsigned int options)
 {
@@ -574,8 +579,10 @@
         } else {
             retVal = copyDirectory(src, dst, &srcStat, options);
         }
+#if !defined(_WIN32)
     } else if (S_ISLNK(srcStat.st_mode)) {
         retVal = copySymlink(src, dst, &srcStat, options);
+#endif
     } else if (S_ISREG(srcStat.st_mode)) {
         retVal = copyRegular(src, dst, &srcStat, options);
     } else {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index a882685..f68976e 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -45,19 +45,12 @@
 
 from __future__ import print_function
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print("Python 2.7 or newer is required.", file=sys.stderr)
-  sys.exit(1)
-
 import datetime
-import hashlib
 import os
 import shlex
 import shutil
 import subprocess
-import tempfile
+import sys
 import uuid
 import zipfile
 
@@ -66,6 +59,10 @@
 import rangelib
 import sparse_img
 
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
 OPTIONS = common.OPTIONS
 
 OPTIONS.add_missing = False
@@ -76,6 +73,10 @@
 OPTIONS.is_signing = False
 
 
+# Partitions that should have their care_map added to META/care_map.txt.
+PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product')
+
+
 class OutputFile(object):
   def __init__(self, output_zip, input_dir, prefix, name):
     self._output_zip = output_zip
@@ -95,13 +96,19 @@
 
 
 def GetCareMap(which, imgname):
-  """Generate care_map of system (or vendor) partition"""
+  """Returns the care_map string for the given partition.
 
-  assert which in ("system", "vendor")
+  Args:
+    which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
+    imgname: The filename of the image.
+
+  Returns:
+    (which, care_map_ranges): care_map_ranges is the raw string of the care_map
+    RangeSet.
+  """
+  assert which in PARTITIONS_WITH_CARE_MAP
 
   simg = sparse_img.SparseImage(imgname)
-  care_map_list = [which]
-
   care_map_ranges = simg.care_map
   key = which + "_adjusted_partition_size"
   adjusted_blocks = OPTIONS.info_dict.get(key)
@@ -110,17 +117,16 @@
     care_map_ranges = care_map_ranges.intersect(rangelib.RangeSet(
         "0-%d" % (adjusted_blocks,)))
 
-  care_map_list.append(care_map_ranges.to_string_raw())
-  return care_map_list
+  return [which, care_map_ranges.to_string_raw()]
 
 
-def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
+def AddSystem(output_zip, recovery_img=None, boot_img=None):
   """Turn the contents of SYSTEM into a system image and store it in
   output_zip. Returns the name of the system image file."""
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.img")
   if os.path.exists(img.input_name):
-    print("system.img already exists in %s, no need to rebuild..." % (prefix,))
+    print("system.img already exists; no need to rebuild...")
     return img.input_name
 
   def output_sink(fn, data):
@@ -139,51 +145,66 @@
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
                              boot_img, info_dict=OPTIONS.info_dict)
 
-  block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system.map")
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.map")
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system", img,
               block_list=block_list)
 
   return img.name
 
 
-def AddSystemOther(output_zip, prefix="IMAGES/"):
+def AddSystemOther(output_zip):
   """Turn the contents of SYSTEM_OTHER into a system_other image
   and store it in output_zip."""
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system_other.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_other.img")
   if os.path.exists(img.input_name):
-    print("system_other.img already exists in %s, no need to rebuild..." % (
-        prefix,))
+    print("system_other.img already exists; no need to rebuild...")
     return
 
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
 
 
-def AddVendor(output_zip, prefix="IMAGES/"):
+def AddVendor(output_zip):
   """Turn the contents of VENDOR into a vendor image and store in it
   output_zip."""
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vendor.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
   if os.path.exists(img.input_name):
-    print("vendor.img already exists in %s, no need to rebuild..." % (prefix,))
+    print("vendor.img already exists; no need to rebuild...")
     return img.input_name
 
-  block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vendor.map")
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "vendor", img,
               block_list=block_list)
   return img.name
 
 
-def AddDtbo(output_zip, prefix="IMAGES/"):
+def AddProduct(output_zip):
+  """Turn the contents of PRODUCT into a product image and store it in
+  output_zip."""
+
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "product.img")
+  if os.path.exists(img.input_name):
+    print("product.img already exists; no need to rebuild...")
+    return img.input_name
+
+  block_list = OutputFile(
+      output_zip, OPTIONS.input_tmp, "IMAGES", "product.map")
+  CreateImage(
+      OPTIONS.input_tmp, OPTIONS.info_dict, "product", img,
+      block_list=block_list)
+  return img.name
+
+
+def AddDtbo(output_zip):
   """Adds the DTBO image.
 
-  Uses the image under prefix if it already exists. Otherwise looks for the
+  Uses the image under IMAGES/ if it already exists. Otherwise looks for the
   image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
   """
-
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "dtbo.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
   if os.path.exists(img.input_name):
-    print("dtbo.img already exists in %s, no need to rebuild..." % (prefix,))
+    print("dtbo.img already exists; no need to rebuild...")
     return img.input_name
 
   dtbo_prebuilt_path = os.path.join(
@@ -277,7 +298,7 @@
       info_dict[adjusted_blocks_key] = int(adjusted_blocks_value)/4096 - 1
 
 
-def AddUserdata(output_zip, prefix="IMAGES/"):
+def AddUserdata(output_zip):
   """Create a userdata image and store it in output_zip.
 
   In most case we just create and store an empty userdata.img;
@@ -286,10 +307,9 @@
   in OPTIONS.info_dict.
   """
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "userdata.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "userdata.img")
   if os.path.exists(img.input_name):
-    print("userdata.img already exists in %s, no need to rebuild..." % (
-        prefix,))
+    print("userdata.img already exists; no need to rebuild...")
     return
 
   # Skip userdata.img if no size.
@@ -308,8 +328,7 @@
   if OPTIONS.info_dict.get("userdata_img_with_data") == "true":
     user_dir = os.path.join(OPTIONS.input_tmp, "DATA")
   else:
-    user_dir = tempfile.mkdtemp()
-    OPTIONS.tempfiles.append(user_dir)
+    user_dir = common.MakeTempDir()
 
   fstab = OPTIONS.info_dict["fstab"]
   if fstab:
@@ -346,7 +365,7 @@
     cmd.extend(["--include_descriptors_from_image", img_path])
 
 
-def AddVBMeta(output_zip, partitions, prefix="IMAGES/"):
+def AddVBMeta(output_zip, partitions):
   """Creates a VBMeta image and store it in output_zip.
 
   Args:
@@ -355,18 +374,16 @@
         values. Only valid partition names are accepted, which include 'boot',
         'recovery', 'system', 'vendor', 'dtbo'.
   """
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vbmeta.img")
   if os.path.exists(img.input_name):
-    print("vbmeta.img already exists in %s; not rebuilding..." % (prefix,))
+    print("vbmeta.img already exists; not rebuilding...")
     return img.input_name
 
   avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
   cmd = [avbtool, "make_vbmeta_image", "--output", img.name]
   common.AppendAVBSigningArgs(cmd, "vbmeta")
 
-  public_key_dir = tempfile.mkdtemp(prefix="avbpubkey-")
-  OPTIONS.tempfiles.append(public_key_dir)
-
+  public_key_dir = common.MakeTempDir(prefix="avbpubkey-")
   for partition, path in partitions.items():
     assert partition in common.AVB_PARTITIONS, 'Unknown partition: %s' % (
         partition,)
@@ -388,9 +405,9 @@
         if os.path.exists(image_path):
           continue
         found = False
-        for dir in ['IMAGES', 'RADIO', 'VENDOR_IMAGES', 'PREBUILT_IMAGES']:
+        for dir_name in ['IMAGES', 'RADIO', 'VENDOR_IMAGES', 'PREBUILT_IMAGES']:
           alt_path = os.path.join(
-              OPTIONS.input_tmp, dir, os.path.basename(image_path))
+              OPTIONS.input_tmp, dir_name, os.path.basename(image_path))
           if os.path.exists(alt_path):
             split_args[index + 1] = alt_path
             found = True
@@ -404,11 +421,13 @@
   img.Write()
 
 
-def AddPartitionTable(output_zip, prefix="IMAGES/"):
+def AddPartitionTable(output_zip):
   """Create a partition table image and store it in output_zip."""
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "partition-table.img")
-  bpt = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "partition-table.bpt")
+  img = OutputFile(
+      output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.img")
+  bpt = OutputFile(
+      output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.bpt")
 
   # use BPTTOOL from environ, or "bpttool" if empty or not set.
   bpttool = os.getenv("BPTTOOL") or "bpttool"
@@ -433,12 +452,12 @@
   bpt.Write()
 
 
-def AddCache(output_zip, prefix="IMAGES/"):
+def AddCache(output_zip):
   """Create an empty cache image and store it in output_zip."""
 
-  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "cache.img")
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "cache.img")
   if os.path.exists(img.input_name):
-    print("cache.img already exists in %s, no need to rebuild..." % (prefix,))
+    print("cache.img already exists; no need to rebuild...")
     return
 
   image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
@@ -454,8 +473,7 @@
   timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
   image_props["timestamp"] = int(timestamp)
 
-  user_dir = tempfile.mkdtemp()
-  OPTIONS.tempfiles.append(user_dir)
+  user_dir = common.MakeTempDir()
 
   fstab = OPTIONS.info_dict["fstab"]
   if fstab:
@@ -467,17 +485,133 @@
   img.Write()
 
 
+def AddRadioImagesForAbOta(output_zip, ab_partitions):
+  """Adds the radio images needed for A/B OTA to the output file.
+
+  It parses the list of A/B partitions, looks for the missing ones from RADIO/
+  or VENDOR_IMAGES/ dirs, and copies them to IMAGES/ of the output file (or
+  dir).
+
+  It also ensures that on returning from the function all the listed A/B
+  partitions must have their images available under IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+
+  Raises:
+    AssertionError: If it can't find an image.
+  """
+  for partition in ab_partitions:
+    img_name = partition.strip() + ".img"
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    if os.path.exists(img_radio_path):
+      if output_zip:
+        common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+      else:
+        shutil.copy(img_radio_path, prebuilt_path)
+      continue
+
+    # Walk through VENDOR_IMAGES/ since files could be under subdirs.
+    img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
+    for root, _, files in os.walk(img_vendor_dir):
+      if img_name in files:
+        if output_zip:
+          common.ZipWrite(output_zip, os.path.join(root, img_name),
+                          "IMAGES/" + img_name)
+        else:
+          shutil.copy(os.path.join(root, img_name), prebuilt_path)
+        break
+
+    # Assert that the image is present under IMAGES/ now.
+    if output_zip:
+      # Zip spec says: All slashes MUST be forward slashes.
+      img_path = 'IMAGES/' + img_name
+      assert img_path in output_zip.namelist(), "cannot find " + img_name
+    else:
+      img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+      assert os.path.exists(img_path), "cannot find " + img_name
+
+
+def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
+  """Generates and adds care_map.txt for system and vendor partitions.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+    image_paths: A map from the partition name to the image path.
+  """
+  care_map_list = []
+  for partition in ab_partitions:
+    partition = partition.strip()
+    if partition not in PARTITIONS_WITH_CARE_MAP:
+      continue
+
+    verity_block_device = "{}_verity_block_device".format(partition)
+    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+    if (verity_block_device in OPTIONS.info_dict or
+        OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+      image_path = image_paths[partition]
+      assert os.path.exists(image_path)
+      care_map_list += GetCareMap(partition, image_path)
+
+  if care_map_list:
+    care_map_path = "META/care_map.txt"
+    if output_zip and care_map_path not in output_zip.namelist():
+      common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
+    else:
+      with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
+        fp.write('\n'.join(care_map_list))
+      if output_zip:
+        OPTIONS.replace_updated_files_list.append(care_map_path)
+
+
+def AddPackRadioImages(output_zip, images):
+  """Copies images listed in META/pack_radioimages.txt from RADIO/ to IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    images: A list of image names.
+
+  Raises:
+    AssertionError: If a listed image can't be found.
+  """
+  for image in images:
+    img_name = image.strip()
+    _, ext = os.path.splitext(img_name)
+    if not ext:
+      img_name += ".img"
+
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    assert os.path.exists(img_radio_path), \
+        "Failed to find %s at %s" % (img_name, img_radio_path)
+
+    if output_zip:
+      common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+    else:
+      shutil.copy(img_radio_path, prebuilt_path)
+
+
 def ReplaceUpdatedFiles(zip_filename, files_list):
-  """Update all the zip entries listed in the files_list.
+  """Updates all the ZIP entries listed in files_list.
 
   For now the list includes META/care_map.txt, and the related files under
   SYSTEM/ after rebuilding recovery.
   """
-
-  cmd = ["zip", "-d", zip_filename] + files_list
-  p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-  p.communicate()
-
+  common.ZipDelete(zip_filename, files_list)
   output_zip = zipfile.ZipFile(zip_filename, "a",
                                compression=zipfile.ZIP_DEFLATED,
                                allowZip64=True)
@@ -498,39 +632,43 @@
   The images will be created under IMAGES/ in the input target_files.zip.
 
   Args:
-      filename: the target_files.zip, or the zip root directory.
+    filename: the target_files.zip, or the zip root directory.
   """
   if os.path.isdir(filename):
     OPTIONS.input_tmp = os.path.abspath(filename)
-    input_zip = None
   else:
-    OPTIONS.input_tmp, input_zip = common.UnzipTemp(filename)
+    OPTIONS.input_tmp = common.UnzipTemp(filename)
 
   if not OPTIONS.add_missing:
     if os.path.isdir(os.path.join(OPTIONS.input_tmp, "IMAGES")):
       print("target_files appears to already contain images.")
       sys.exit(1)
 
-  # vendor.img is unlike system.img or system_other.img. Because it could be
-  # built from source, or dropped into target_files.zip as a prebuilt blob. We
-  # consider either of them as vendor.img being available, which could be used
-  # when generating vbmeta.img for AVB.
+  OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
+
+  has_recovery = OPTIONS.info_dict.get("no_recovery") != "true"
+
+  # {vendor,product}.img is unlike system.img or system_other.img. Because it
+  # could be built from source, or dropped into target_files.zip as a prebuilt
+  # blob. We consider either of them as {vendor,product}.img being available,
+  # which could be used when generating vbmeta.img for AVB.
   has_vendor = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) or
                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
                                             "vendor.img")))
+  has_product = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) or
+                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
+                                             "product.img")))
   has_system_other = os.path.isdir(os.path.join(OPTIONS.input_tmp,
                                                 "SYSTEM_OTHER"))
 
-  if input_zip:
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
-
-    common.ZipClose(input_zip)
+  # Set up the output destination. It writes to the given directory for dir
+  # mode; otherwise appends to the given ZIP.
+  if os.path.isdir(filename):
+    output_zip = None
+  else:
     output_zip = zipfile.ZipFile(filename, "a",
                                  compression=zipfile.ZIP_DEFLATED,
                                  allowZip64=True)
-  else:
-    OPTIONS.info_dict = common.LoadInfoDict(filename, filename)
-    output_zip = None
 
   # Always make input_tmp/IMAGES available, since we may stage boot / recovery
   # images there even under zip mode. The directory will be cleaned up as part
@@ -539,19 +677,6 @@
   if not os.path.isdir(images_dir):
     os.makedirs(images_dir)
 
-  has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true")
-
-  if OPTIONS.info_dict.get("avb_enable") == "true":
-    fp = None
-    if "build.prop" in OPTIONS.info_dict:
-      build_prop = OPTIONS.info_dict["build.prop"]
-      if "ro.build.fingerprint" in build_prop:
-        fp = build_prop["ro.build.fingerprint"]
-      elif "ro.build.thumbprint" in build_prop:
-        fp = build_prop["ro.build.thumbprint"]
-    if fp:
-      OPTIONS.info_dict["avb_salt"] = hashlib.sha256(fp).hexdigest()
-
   # A map between partition names and their paths, which could be used when
   # generating AVB vbmeta image.
   partitions = dict()
@@ -598,12 +723,16 @@
           recovery_two_step_image.AddToZip(output_zip)
 
   banner("system")
-  partitions['system'] = system_img_path = AddSystem(
+  partitions['system'] = AddSystem(
       output_zip, recovery_img=recovery_image, boot_img=boot_image)
 
   if has_vendor:
     banner("vendor")
-    partitions['vendor'] = vendor_img_path = AddVendor(output_zip)
+    partitions['vendor'] = AddVendor(output_zip)
+
+  if has_product:
+    banner("product")
+    partitions['product'] = AddProduct(output_zip)
 
   if has_system_other:
     banner("system_other")
@@ -627,96 +756,28 @@
     banner("vbmeta")
     AddVBMeta(output_zip, partitions)
 
-  # For devices using A/B update, copy over images from RADIO/ and/or
-  # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
-  # images ready under IMAGES/. All images should have '.img' as extension.
   banner("radio")
-  ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
-  if os.path.exists(ab_partitions):
-    with open(ab_partitions, 'r') as f:
-      lines = f.readlines()
-    # For devices using A/B update, generate care_map for system and vendor
-    # partitions (if present), then write this file to target_files package.
-    care_map_list = []
-    for line in lines:
-      if line.strip() == "system" and (
-          "system_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_system_hashtree_enable") == "true"):
-        assert os.path.exists(system_img_path)
-        care_map_list += GetCareMap("system", system_img_path)
-      if line.strip() == "vendor" and (
-          "vendor_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_vendor_hashtree_enable") == "true"):
-        assert os.path.exists(vendor_img_path)
-        care_map_list += GetCareMap("vendor", vendor_img_path)
+  ab_partitions_txt = os.path.join(OPTIONS.input_tmp, "META",
+                                   "ab_partitions.txt")
+  if os.path.exists(ab_partitions_txt):
+    with open(ab_partitions_txt, 'r') as f:
+      ab_partitions = f.readlines()
 
-      img_name = line.strip() + ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
+    # For devices using A/B update, copy over images from RADIO/ and/or
+    # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
+    # images ready under IMAGES/. All images should have '.img' as extension.
+    AddRadioImagesForAbOta(output_zip, ab_partitions)
 
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      img_vendor_dir = os.path.join(
-        OPTIONS.input_tmp, "VENDOR_IMAGES")
-      if os.path.exists(img_radio_path):
-        if output_zip:
-          common.ZipWrite(output_zip, img_radio_path,
-                          os.path.join("IMAGES", img_name))
-        else:
-          shutil.copy(img_radio_path, prebuilt_path)
-      else:
-        for root, _, files in os.walk(img_vendor_dir):
-          if img_name in files:
-            if output_zip:
-              common.ZipWrite(output_zip, os.path.join(root, img_name),
-                os.path.join("IMAGES", img_name))
-            else:
-              shutil.copy(os.path.join(root, img_name), prebuilt_path)
-            break
-
-      if output_zip:
-        # Zip spec says: All slashes MUST be forward slashes.
-        img_path = 'IMAGES/' + img_name
-        assert img_path in output_zip.namelist(), "cannot find " + img_name
-      else:
-        img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-        assert os.path.exists(img_path), "cannot find " + img_name
-
-    if care_map_list:
-      care_map_path = "META/care_map.txt"
-      if output_zip and care_map_path not in output_zip.namelist():
-        common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
-      else:
-        with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
-          fp.write('\n'.join(care_map_list))
-        if output_zip:
-          OPTIONS.replace_updated_files_list.append(care_map_path)
+    # Generate care_map.txt for system and vendor partitions (if present), then
+    # write this file to target_files package.
+    AddCareMapTxtForAbOta(output_zip, ab_partitions, partitions)
 
   # Radio images that need to be packed into IMAGES/, and product-img.zip.
-  pack_radioimages = os.path.join(
+  pack_radioimages_txt = os.path.join(
       OPTIONS.input_tmp, "META", "pack_radioimages.txt")
-  if os.path.exists(pack_radioimages):
-    with open(pack_radioimages, 'r') as f:
-      lines = f.readlines()
-    for line in lines:
-      img_name = line.strip()
-      _, ext = os.path.splitext(img_name)
-      if not ext:
-        img_name += ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
-
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      assert os.path.exists(img_radio_path), \
-          "Failed to find %s at %s" % (img_name, img_radio_path)
-      if output_zip:
-        common.ZipWrite(output_zip, img_radio_path,
-                        os.path.join("IMAGES", img_name))
-      else:
-        shutil.copy(img_radio_path, prebuilt_path)
+  if os.path.exists(pack_radioimages_txt):
+    with open(pack_radioimages_txt, 'r') as f:
+      AddPackRadioImages(output_zip, f.readlines())
 
   if output_zip:
     common.ZipClose(output_zip)
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 1ef55ff..24c5b2d 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -15,7 +15,6 @@
 from __future__ import print_function
 
 import array
-import common
 import copy
 import functools
 import heapq
@@ -27,9 +26,10 @@
 import subprocess
 import sys
 import threading
-
 from collections import deque, OrderedDict
 from hashlib import sha1
+
+import common
 from rangelib import RangeSet
 
 
@@ -191,8 +191,6 @@
     self.tgt_sha1 = tgt_sha1
     self.src_sha1 = src_sha1
     self.style = style
-    self.intact = (getattr(tgt_ranges, "monotonic", False) and
-                   getattr(src_ranges, "monotonic", False))
 
     # We use OrderedDict rather than dict so that the output is repeatable;
     # otherwise it would depend on the hash values of the Transfer objects.
@@ -237,19 +235,93 @@
 class HeapItem(object):
   def __init__(self, item):
     self.item = item
-    # Negate the score since python's heap is a min-heap and we want
-    # the maximum score.
+    # Negate the score since python's heap is a min-heap and we want the
+    # maximum score.
     self.score = -item.score
+
   def clear(self):
     self.item = None
+
   def __bool__(self):
-    return self.item is None
+    return self.item is not None
+
+  # Python 2 uses __nonzero__, while Python 3 uses __bool__.
+  __nonzero__ = __bool__
+
+  # The rest operations are generated by functools.total_ordering decorator.
   def __eq__(self, other):
     return self.score == other.score
+
   def __le__(self, other):
     return self.score <= other.score
 
 
+class ImgdiffStats(object):
+  """A class that collects imgdiff stats.
+
+  It keeps track of the files that will be applied imgdiff while generating
+  BlockImageDiff. It also logs the ones that cannot use imgdiff, with specific
+  reasons. The stats is only meaningful when imgdiff not being disabled by the
+  caller of BlockImageDiff. In addition, only files with supported types
+  (BlockImageDiff.FileTypeSupportedByImgdiff()) are allowed to be logged.
+  """
+
+  USED_IMGDIFF = "APK files diff'd with imgdiff"
+  USED_IMGDIFF_LARGE_APK = "Large APK files split and diff'd with imgdiff"
+
+  # Reasons for not applying imgdiff on APKs.
+  SKIPPED_TRIMMED = "Not used imgdiff due to trimmed RangeSet"
+  SKIPPED_NONMONOTONIC = "Not used imgdiff due to having non-monotonic ranges"
+  SKIPPED_SHARED_BLOCKS = "Not used imgdiff due to using shared blocks"
+  SKIPPED_INCOMPLETE = "Not used imgdiff due to incomplete RangeSet"
+
+  # The list of valid reasons, which will also be the dumped order in a report.
+  REASONS = (
+      USED_IMGDIFF,
+      USED_IMGDIFF_LARGE_APK,
+      SKIPPED_TRIMMED,
+      SKIPPED_NONMONOTONIC,
+      SKIPPED_SHARED_BLOCKS,
+      SKIPPED_INCOMPLETE,
+  )
+
+  def  __init__(self):
+    self.stats = {}
+
+  def Log(self, filename, reason):
+    """Logs why imgdiff can or cannot be applied to the given filename.
+
+    Args:
+      filename: The filename string.
+      reason: One of the reason constants listed in REASONS.
+
+    Raises:
+      AssertionError: On unsupported filetypes or invalid reason.
+    """
+    assert BlockImageDiff.FileTypeSupportedByImgdiff(filename)
+    assert reason in self.REASONS
+
+    if reason not in self.stats:
+      self.stats[reason] = set()
+    self.stats[reason].add(filename)
+
+  def Report(self):
+    """Prints a report of the collected imgdiff stats."""
+
+    def print_header(header, separator):
+      print(header)
+      print(separator * len(header) + '\n')
+
+    print_header('  Imgdiff Stats Report  ', '=')
+    for key in self.REASONS:
+      if key not in self.stats:
+        continue
+      values = self.stats[key]
+      section_header = ' {} (count: {}) '.format(key, len(values))
+      print_header(section_header, '-')
+      print(''.join(['  {}\n'.format(name) for name in values]))
+
+
 # BlockImageDiff works on two image objects.  An image object is
 # anything that provides the following attributes:
 #
@@ -305,6 +377,7 @@
     self.touched_src_ranges = RangeSet()
     self.touched_src_sha1 = None
     self.disable_imgdiff = disable_imgdiff
+    self.imgdiff_stats = ImgdiffStats() if not disable_imgdiff else None
 
     assert version in (3, 4)
 
@@ -326,6 +399,65 @@
   def max_stashed_size(self):
     return self._max_stashed_size
 
+  @staticmethod
+  def FileTypeSupportedByImgdiff(filename):
+    """Returns whether the file type is supported by imgdiff."""
+    return filename.lower().endswith(('.apk', '.jar', '.zip'))
+
+  def CanUseImgdiff(self, name, tgt_ranges, src_ranges, large_apk=False):
+    """Checks whether we can apply imgdiff for the given RangeSets.
+
+    For files in ZIP format (e.g., APKs, JARs, etc.) we would like to use
+    'imgdiff -z' if possible. Because it usually produces significantly smaller
+    patches than bsdiff.
+
+    This is permissible if all of the following conditions hold.
+      - The imgdiff hasn't been disabled by the caller (e.g. squashfs);
+      - The file type is supported by imgdiff;
+      - The source and target blocks are monotonic (i.e. the data is stored with
+        blocks in increasing order);
+      - Both files don't contain shared blocks;
+      - Both files have complete lists of blocks;
+      - We haven't removed any blocks from the source set.
+
+    If all these conditions are satisfied, concatenating all the blocks in the
+    RangeSet in order will produce a valid ZIP file (plus possibly extra zeros
+    in the last block). imgdiff is fine with extra zeros at the end of the file.
+
+    Args:
+      name: The filename to be diff'd.
+      tgt_ranges: The target RangeSet.
+      src_ranges: The source RangeSet.
+      large_apk: Whether this is to split a large APK.
+
+    Returns:
+      A boolean result.
+    """
+    if self.disable_imgdiff or not self.FileTypeSupportedByImgdiff(name):
+      return False
+
+    if not tgt_ranges.monotonic or not src_ranges.monotonic:
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_NONMONOTONIC)
+      return False
+
+    if (tgt_ranges.extra.get('uses_shared_blocks') or
+        src_ranges.extra.get('uses_shared_blocks')):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_SHARED_BLOCKS)
+      return False
+
+    if tgt_ranges.extra.get('incomplete') or src_ranges.extra.get('incomplete'):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_INCOMPLETE)
+      return False
+
+    if tgt_ranges.extra.get('trimmed') or src_ranges.extra.get('trimmed'):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_TRIMMED)
+      return False
+
+    reason = (ImgdiffStats.USED_IMGDIFF_LARGE_APK if large_apk
+              else ImgdiffStats.USED_IMGDIFF)
+    self.imgdiff_stats.Log(name, reason)
+    return True
+
   def Compute(self, prefix):
     # When looking for a source file to use as the diff input for a
     # target file, we try:
@@ -354,10 +486,15 @@
 
     # Double-check our work.
     self.AssertSequenceGood()
+    self.AssertSha1Good()
 
     self.ComputePatches(prefix)
     self.WriteTransfers(prefix)
 
+    # Report the imgdiff stats.
+    if common.OPTIONS.verbose and not self.disable_imgdiff:
+      self.imgdiff_stats.Report()
+
   def WriteTransfers(self, prefix):
     def WriteSplitTransfers(out, style, target_blocks):
       """Limit the size of operand in command 'new' and 'zero' to 1024 blocks.
@@ -410,7 +547,7 @@
       #   <# blocks> - <stash refs...>
 
       size = xf.src_ranges.size()
-      src_str = [str(size)]
+      src_str_buffer = [str(size)]
 
       unstashed_src_ranges = xf.src_ranges
       mapped_stashes = []
@@ -420,7 +557,7 @@
         sr = xf.src_ranges.map_within(sr)
         mapped_stashes.append(sr)
         assert sh in stashes
-        src_str.append("%s:%s" % (sh, sr.to_string_raw()))
+        src_str_buffer.append("%s:%s" % (sh, sr.to_string_raw()))
         stashes[sh] -= 1
         if stashes[sh] == 0:
           free_string.append("free %s\n" % (sh,))
@@ -428,17 +565,17 @@
           stashes.pop(sh)
 
       if unstashed_src_ranges:
-        src_str.insert(1, unstashed_src_ranges.to_string_raw())
+        src_str_buffer.insert(1, unstashed_src_ranges.to_string_raw())
         if xf.use_stash:
           mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges)
-          src_str.insert(2, mapped_unstashed.to_string_raw())
+          src_str_buffer.insert(2, mapped_unstashed.to_string_raw())
           mapped_stashes.append(mapped_unstashed)
           self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
       else:
-        src_str.insert(1, "-")
+        src_str_buffer.insert(1, "-")
         self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
 
-      src_str = " ".join(src_str)
+      src_str = " ".join(src_str_buffer)
 
       # version 3+:
       #   zero <rangeset>
@@ -559,11 +696,11 @@
       max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
       print("max stashed blocks: %d  (%d bytes), "
             "limit: %d bytes (%.2f%%)\n" % (
-            max_stashed_blocks, self._max_stashed_size, max_allowed,
-            self._max_stashed_size * 100.0 / max_allowed))
+                max_stashed_blocks, self._max_stashed_size, max_allowed,
+                self._max_stashed_size * 100.0 / max_allowed))
     else:
       print("max stashed blocks: %d  (%d bytes), limit: <unknown>\n" % (
-            max_stashed_blocks, self._max_stashed_size))
+          max_stashed_blocks, self._max_stashed_size))
 
   def ReviseStashSize(self):
     print("Revising stash size...")
@@ -703,28 +840,13 @@
               # transfer is intact.
               assert not self.disable_imgdiff
               imgdiff = True
-              if not xf.intact:
+              if (xf.src_ranges.extra.get('trimmed') or
+                  xf.tgt_ranges.extra.get('trimmed')):
                 imgdiff = False
                 xf.patch = None
             else:
-              # For files in zip format (eg, APKs, JARs, etc.) we would
-              # like to use imgdiff -z if possible (because it usually
-              # produces significantly smaller patches than bsdiff).
-              # This is permissible if:
-              #
-              #  - imgdiff is not disabled, and
-              #  - the source and target files are monotonic (ie, the
-              #    data is stored with blocks in increasing order), and
-              #  - we haven't removed any blocks from the source set.
-              #
-              # If these conditions are satisfied then appending all the
-              # blocks in the set together in order will produce a valid
-              # zip file (plus possibly extra zeros in the last block),
-              # which is what imgdiff needs to operate.  (imgdiff is
-              # fine with extra zeros at the end of the file.)
-              imgdiff = (not self.disable_imgdiff and xf.intact and
-                         xf.tgt_name.split(".")[-1].lower()
-                         in ("apk", "jar", "zip"))
+              imgdiff = self.CanUseImgdiff(
+                  xf.tgt_name, xf.tgt_ranges, xf.src_ranges)
             xf.style = "imgdiff" if imgdiff else "bsdiff"
             diff_queue.append((index, imgdiff, patch_num))
             patch_num += 1
@@ -741,10 +863,6 @@
       diff_total = len(diff_queue)
       patches = [None] * diff_total
       error_messages = []
-      warning_messages = []
-      if sys.stdout.isatty():
-        global diff_done
-        diff_done = 0
 
       # Using multiprocessing doesn't give additional benefits, due to the
       # pattern of the code. The diffing work is done by subprocess.call, which
@@ -760,22 +878,27 @@
             if not diff_queue:
               return
             xf_index, imgdiff, patch_index = diff_queue.pop()
+            xf = self.transfers[xf_index]
 
-          xf = self.transfers[xf_index]
+            if sys.stdout.isatty():
+              diff_left = len(diff_queue)
+              progress = (diff_total - diff_left) * 100 / diff_total
+              # '\033[K' is to clear to EOL.
+              print(' [%3d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
+              sys.stdout.flush()
+
           patch = xf.patch
           if not patch:
             src_ranges = xf.src_ranges
             tgt_ranges = xf.tgt_ranges
 
-            # Needs lock since WriteRangeDataToFd() is stateful (calling seek).
-            with lock:
-              src_file = common.MakeTempFile(prefix="src-")
-              with open(src_file, "wb") as fd:
-                self.src.WriteRangeDataToFd(src_ranges, fd)
+            src_file = common.MakeTempFile(prefix="src-")
+            with open(src_file, "wb") as fd:
+              self.src.WriteRangeDataToFd(src_ranges, fd)
 
-              tgt_file = common.MakeTempFile(prefix="tgt-")
-              with open(tgt_file, "wb") as fd:
-                self.tgt.WriteRangeDataToFd(tgt_ranges, fd)
+            tgt_file = common.MakeTempFile(prefix="tgt-")
+            with open(tgt_file, "wb") as fd:
+              self.tgt.WriteRangeDataToFd(tgt_ranges, fd)
 
             message = []
             try:
@@ -783,40 +906,16 @@
             except ValueError as e:
               message.append(
                   "Failed to generate %s for %s: tgt=%s, src=%s:\n%s" % (
-                  "imgdiff" if imgdiff else "bsdiff",
-                  xf.tgt_name if xf.tgt_name == xf.src_name else
+                      "imgdiff" if imgdiff else "bsdiff",
+                      xf.tgt_name if xf.tgt_name == xf.src_name else
                       xf.tgt_name + " (from " + xf.src_name + ")",
-                  xf.tgt_ranges, xf.src_ranges, e.message))
-              # TODO(b/68016761): Better handle the holes in mke2fs created
-              # images.
-              if imgdiff:
-                try:
-                  patch = compute_patch(src_file, tgt_file, imgdiff=False)
-                  message.append(
-                      "Fell back and generated with bsdiff instead for %s" % (
-                      xf.tgt_name,))
-                  xf.style = "bsdiff"
-                  with lock:
-                    warning_messages.extend(message)
-                  del message[:]
-                except ValueError as e:
-                  message.append(
-                      "Also failed to generate with bsdiff for %s:\n%s" % (
-                      xf.tgt_name, e.message))
-
+                      xf.tgt_ranges, xf.src_ranges, e.message))
             if message:
               with lock:
                 error_messages.extend(message)
 
           with lock:
             patches[patch_index] = (xf_index, patch)
-            if sys.stdout.isatty():
-              global diff_done
-              diff_done += 1
-              progress = diff_done * 100 / diff_total
-              # '\033[K' is to clear to EOL.
-              print(' [%d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
-              sys.stdout.flush()
 
       threads = [threading.Thread(target=diff_worker)
                  for _ in range(self.threads)]
@@ -828,11 +927,6 @@
       if sys.stdout.isatty():
         print('\n')
 
-      if warning_messages:
-        print('WARNING:')
-        print('\n'.join(warning_messages))
-        print('\n\n\n')
-
       if error_messages:
         print('ERROR:')
         print('\n'.join(error_messages))
@@ -853,11 +947,26 @@
         if common.OPTIONS.verbose:
           tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
           print("%10d %10d (%6.2f%%) %7s %s %s %s" % (
-                xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
-                xf.style,
-                xf.tgt_name if xf.tgt_name == xf.src_name else (
-                    xf.tgt_name + " (from " + xf.src_name + ")"),
-                xf.tgt_ranges, xf.src_ranges))
+              xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
+              xf.style,
+              xf.tgt_name if xf.tgt_name == xf.src_name else (
+                  xf.tgt_name + " (from " + xf.src_name + ")"),
+              xf.tgt_ranges, xf.src_ranges))
+
+  def AssertSha1Good(self):
+    """Check the SHA-1 of the src & tgt blocks in the transfer list.
+
+    Double check the SHA-1 value to avoid the issue in b/71908713, where
+    SparseImage.RangeSha1() messed up with the hash calculation in multi-thread
+    environment. That specific problem has been fixed by protecting the
+    underlying generator function 'SparseImage._GetRangeData()' with lock.
+    """
+    for xf in self.transfers:
+      tgt_sha1 = self.tgt.RangeSha1(xf.tgt_ranges)
+      assert xf.tgt_sha1 == tgt_sha1
+      if xf.style == "diff":
+        src_sha1 = self.src.RangeSha1(xf.src_ranges)
+        assert xf.src_sha1 == src_sha1
 
   def AssertSequenceGood(self):
     # Simulate the sequences of transfers we will output, and check that:
@@ -956,7 +1065,7 @@
           out_of_order += 1
           assert xf.src_ranges.overlaps(u.tgt_ranges)
           xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
-          xf.intact = False
+          xf.src_ranges.extra['trimmed'] = True
 
       if xf.style == "diff" and not xf.src_ranges:
         # nothing left to diff from; treat as new data
@@ -1075,7 +1184,8 @@
       while sinks:
         new_sinks = OrderedDict()
         for u in sinks:
-          if u not in G: continue
+          if u not in G:
+            continue
           s2.appendleft(u)
           del G[u]
           for iu in u.incoming:
@@ -1088,7 +1198,8 @@
       while sources:
         new_sources = OrderedDict()
         for u in sources:
-          if u not in G: continue
+          if u not in G:
+            continue
           s1.append(u)
           del G[u]
           for iu in u.outgoing:
@@ -1097,7 +1208,8 @@
               new_sources[iu] = None
         sources = new_sources
 
-      if not G: break
+      if not G:
+        break
 
       # Find the "best" vertex to put next.  "Best" is the one that
       # maximizes the net difference in source blocks saved we get by
@@ -1154,14 +1266,16 @@
       intersections = OrderedDict()
       for s, e in a.tgt_ranges:
         for i in range(s, e):
-          if i >= len(source_ranges): break
+          if i >= len(source_ranges):
+            break
           # Add all the Transfers in source_ranges[i] to the (ordered) set.
           if source_ranges[i] is not None:
             for j in source_ranges[i]:
               intersections[j] = None
 
       for b in intersections:
-        if a is b: continue
+        if a is b:
+          continue
 
         # If the blocks written by A are read by B, then B needs to go before A.
         i = a.tgt_ranges.intersect(b.src_ranges)
@@ -1178,9 +1292,22 @@
   def FindTransfers(self):
     """Parse the file_map to generate all the transfers."""
 
-    def AddSplitTransfers(tgt_name, src_name, tgt_ranges, src_ranges, style,
-                          by_id):
-      """Add one or multiple Transfer()s by splitting large files."""
+    def AddSplitTransfersWithFixedSizeChunks(tgt_name, src_name, tgt_ranges,
+                                             src_ranges, style, by_id):
+      """Add one or multiple Transfer()s by splitting large files.
+
+      For BBOTA v3, we need to stash source blocks for resumable feature.
+      However, with the growth of file size and the shrink of the cache
+      partition source blocks are too large to be stashed. If a file occupies
+      too many blocks, we split it into smaller pieces by getting multiple
+      Transfer()s.
+
+      The downside is that after splitting, we may increase the package size
+      since the split pieces don't align well. According to our experiments,
+      1/8 of the cache size as the per-piece limit appears to be optimal.
+      Compared to the fixed 1024-block limit, it reduces the overall package
+      size by 30% for volantis, and 20% for angler and bullhead."""
+
       pieces = 0
       while (tgt_ranges.size() > max_blocks_per_transfer and
              src_ranges.size() > max_blocks_per_transfer):
@@ -1207,21 +1334,15 @@
                  self.tgt.RangeSha1(tgt_ranges), self.src.RangeSha1(src_ranges),
                  style, by_id)
 
-    def FindZipsAndAddSplitTransfers(tgt_name, src_name, tgt_ranges,
-                                     src_ranges, style, by_id):
-      """Find all the zip archives and add split transfers for the other files.
+    def AddSplitTransfers(tgt_name, src_name, tgt_ranges, src_ranges, style,
+                          by_id):
+      """Find all the zip files and split the others with a fixed chunk size.
 
-      For BBOTA v3, we need to stash source blocks for resumable feature.
-      However, with the growth of file size and the shrink of the cache
-      partition source blocks are too large to be stashed. If a file occupies
-      too many blocks, we split it into smaller pieces by getting multiple
-      Transfer()s.
-
-      The downside is that after splitting, we may increase the package size
-      since the split pieces don't align well. According to our experiments,
-      1/8 of the cache size as the per-piece limit appears to be optimal.
-      Compared to the fixed 1024-block limit, it reduces the overall package
-      size by 30% for volantis, and 20% for angler and bullhead."""
+      This function will construct a list of zip archives, which will later be
+      split by imgdiff to reduce the final patch size. For the other files,
+      we will plainly split them based on a fixed chunk size with the potential
+      patch size penalty.
+      """
 
       assert style == "diff"
 
@@ -1233,16 +1354,17 @@
                  style, by_id)
         return
 
-      if tgt_name.split(".")[-1].lower() in ("apk", "jar", "zip"):
-        split_enable = (not self.disable_imgdiff and src_ranges.monotonic and
-                        tgt_ranges.monotonic)
-        if split_enable and (self.tgt.RangeSha1(tgt_ranges) !=
-                             self.src.RangeSha1(src_ranges)):
+      # Split large APKs with imgdiff, if possible. We're intentionally checking
+      # file types one more time (CanUseImgdiff() checks that as well), before
+      # calling the costly RangeSha1()s.
+      if (self.FileTypeSupportedByImgdiff(tgt_name) and
+          self.tgt.RangeSha1(tgt_ranges) != self.src.RangeSha1(src_ranges)):
+        if self.CanUseImgdiff(tgt_name, tgt_ranges, src_ranges, True):
           large_apks.append((tgt_name, src_name, tgt_ranges, src_ranges))
           return
 
-      AddSplitTransfers(tgt_name, src_name, tgt_ranges, src_ranges,
-                        style, by_id)
+      AddSplitTransfersWithFixedSizeChunks(tgt_name, src_name, tgt_ranges,
+                                           src_ranges, style, by_id)
 
     def AddTransfer(tgt_name, src_name, tgt_ranges, src_ranges, style, by_id,
                     split=False):
@@ -1290,9 +1412,10 @@
 
         if tgt_changed < tgt_size * crop_threshold:
           assert tgt_changed + tgt_skipped.size() == tgt_size
-          print('%10d %10d (%6.2f%%) %s' % (tgt_skipped.size(), tgt_size,
-                tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
-          FindZipsAndAddSplitTransfers(
+          print('%10d %10d (%6.2f%%) %s' % (
+              tgt_skipped.size(), tgt_size,
+              tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
+          AddSplitTransfers(
               "%s-skipped" % (tgt_name,),
               "%s-skipped" % (src_name,),
               tgt_skipped, src_skipped, style, by_id)
@@ -1309,7 +1432,7 @@
             return
 
       # Add the transfer(s).
-      FindZipsAndAddSplitTransfers(
+      AddSplitTransfers(
           tgt_name, src_name, tgt_ranges, src_ranges, style, by_id)
 
     def ParseAndValidateSplitInfo(patch_size, tgt_ranges, src_ranges,
@@ -1370,8 +1493,8 @@
       assert patch_start == patch_size
       return split_info_list
 
-    def AddSplitTransferForLargeApks():
-      """Create split transfers for large apk files.
+    def SplitLargeApks():
+      """Split the large apks files.
 
       Example: Chrome.apk will be split into
         src-0: Chrome.apk-0, tgt-0: Chrome.apk-0
@@ -1394,11 +1517,10 @@
 
         src_file = common.MakeTempFile(prefix="src-")
         tgt_file = common.MakeTempFile(prefix="tgt-")
-        with transfer_lock:
-          with open(src_file, "wb") as src_fd:
-            self.src.WriteRangeDataToFd(src_ranges, src_fd)
-          with open(tgt_file, "wb") as tgt_fd:
-            self.tgt.WriteRangeDataToFd(tgt_ranges, tgt_fd)
+        with open(src_file, "wb") as src_fd:
+          self.src.WriteRangeDataToFd(src_ranges, src_fd)
+        with open(tgt_file, "wb") as tgt_fd:
+          self.tgt.WriteRangeDataToFd(tgt_ranges, tgt_fd)
 
         patch_file = common.MakeTempFile(prefix="patch-")
         patch_info_file = common.MakeTempFile(prefix="split_info-")
@@ -1406,15 +1528,11 @@
                "--block-limit={}".format(max_blocks_per_transfer),
                "--split-info=" + patch_info_file,
                src_file, tgt_file, patch_file]
-        p = common.Run(cmd, stdout=subprocess.PIPE)
-        p.communicate()
-        if p.returncode != 0:
-          print("Failed to create patch between {} and {},"
-                " falling back to bsdiff".format(src_name, tgt_name))
-          with transfer_lock:
-            AddSplitTransfers(tgt_name, src_name, tgt_ranges, src_ranges,
-                              "diff", self.transfers)
-          continue
+        p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        imgdiff_output, _ = p.communicate()
+        assert p.returncode == 0, \
+            "Failed to create imgdiff patch between {} and {}:\n{}".format(
+                src_name, tgt_name, imgdiff_output)
 
         with open(patch_info_file) as patch_info:
           lines = patch_info.readlines()
@@ -1424,29 +1542,29 @@
                                                     tgt_ranges, src_ranges,
                                                     lines)
         for index, (patch_start, patch_length, split_tgt_ranges,
-            split_src_ranges) in enumerate(split_info_list):
+                    split_src_ranges) in enumerate(split_info_list):
           with open(patch_file) as f:
             f.seek(patch_start)
             patch_content = f.read(patch_length)
 
           split_src_name = "{}-{}".format(src_name, index)
           split_tgt_name = "{}-{}".format(tgt_name, index)
-          transfer_split = Transfer(split_tgt_name, split_src_name,
-                                    split_tgt_ranges, split_src_ranges,
-                                    self.tgt.RangeSha1(split_tgt_ranges),
-                                    self.src.RangeSha1(split_src_ranges),
-                                    "diff", self.transfers)
-          transfer_split.patch = patch_content
+          split_large_apks.append((split_tgt_name,
+                                   split_src_name,
+                                   split_tgt_ranges,
+                                   split_src_ranges,
+                                   patch_content))
 
     print("Finding transfers...")
 
     large_apks = []
+    split_large_apks = []
     cache_size = common.OPTIONS.cache_size
     split_threshold = 0.125
     max_blocks_per_transfer = int(cache_size * split_threshold /
                                   self.tgt.blocksize)
     empty = RangeSet()
-    for tgt_fn, tgt_ranges in self.tgt.file_map.items():
+    for tgt_fn, tgt_ranges in sorted(self.tgt.file_map.items()):
       if tgt_fn == "__ZERO":
         # the special "__ZERO" domain is all the blocks not contained
         # in any file and that are filled with zeros.  We have a
@@ -1490,13 +1608,23 @@
       AddTransfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
 
     transfer_lock = threading.Lock()
-    threads = [threading.Thread(target=AddSplitTransferForLargeApks)
+    threads = [threading.Thread(target=SplitLargeApks)
                for _ in range(self.threads)]
     for th in threads:
       th.start()
     while threads:
       threads.pop().join()
 
+    # Sort the split transfers for large apks to generate a determinate package.
+    split_large_apks.sort()
+    for (tgt_name, src_name, tgt_ranges, src_ranges,
+         patch) in split_large_apks:
+      transfer_split = Transfer(tgt_name, src_name, tgt_ranges, src_ranges,
+                                self.tgt.RangeSha1(tgt_ranges),
+                                self.src.RangeSha1(src_ranges),
+                                "diff", self.transfers)
+      transfer_split.patch = patch
+
   def AbbreviateSourceNames(self):
     for k in self.src.file_map.keys():
       b = os.path.basename(k)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e11f5a6..123ec7c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -15,27 +15,33 @@
 # limitations under the License.
 
 """
-Build image output_image_file from input_directory, properties_file, and target_out_dir
+Builds output_image from the given input_directory, properties_file,
+and writes the image to target_output_directory.
 
-Usage:  build_image input_directory properties_file output_image_file target_out_dir
-
+Usage:  build_image.py input_directory properties_file output_image \\
+            target_output_directory
 """
+
+from __future__ import print_function
+
 import os
 import os.path
 import re
-import subprocess
-import sys
-import common
 import shlex
 import shutil
+import subprocess
+import sys
+
+import common
 import sparse_img
-import tempfile
+
 
 OPTIONS = common.OPTIONS
 
 FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
 BLOCK_SIZE = 4096
 
+
 def RunCommand(cmd, verbose=None):
   """Echo and run the given command.
 
@@ -56,6 +62,7 @@
     print(output.rstrip())
   return (output, p.returncode)
 
+
 def GetVerityFECSize(partition_size):
   cmd = ["fec", "-s", str(partition_size)]
   output, exit_code = RunCommand(cmd, False)
@@ -63,6 +70,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVerityTreeSize(partition_size):
   cmd = ["build_verity_tree", "-s", str(partition_size)]
   output, exit_code = RunCommand(cmd, False)
@@ -70,6 +78,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVerityMetadataSize(partition_size):
   cmd = ["system/extras/verity/build_verity_metadata.py", "size",
          str(partition_size)]
@@ -78,6 +87,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVeritySize(partition_size, fec_supported):
   success, verity_tree_size = GetVerityTreeSize(partition_size)
   if not success:
@@ -93,16 +103,19 @@
     return verity_size + fec_size
   return verity_size
 
+
 def GetSimgSize(image_file):
   simg = sparse_img.SparseImage(image_file, build_map=False)
   return simg.blocksize * simg.total_blocks
 
+
 def ZeroPadSimg(image_file, pad_size):
   blocks = pad_size // BLOCK_SIZE
   print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
   simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
   simg.AppendFillChunk(0, blocks)
 
+
 def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
   """Calculates max image size for a given partition size.
 
@@ -115,8 +128,8 @@
   Returns:
     The maximum image size or 0 if an error occurred.
   """
-  cmd =[avbtool, "add_%s_footer" % footer_type,
-        "--partition_size", partition_size, "--calc_max_image_size"]
+  cmd = [avbtool, "add_%s_footer" % footer_type,
+         "--partition_size", partition_size, "--calc_max_image_size"]
   cmd.extend(shlex.split(additional_args))
 
   (output, exit_code) = RunCommand(cmd)
@@ -125,6 +138,7 @@
   else:
     return int(output)
 
+
 def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
                  partition_name, key_path, algorithm, salt,
                  additional_args):
@@ -140,14 +154,15 @@
     algorithm: Name of algorithm to use or None.
     salt: The salt to use (a hexadecimal string) or None.
     additional_args: Additional arguments to pass to 'avbtool
-      add_hashtree_image'.
+        add_hashtree_image'.
+
   Returns:
     True if the operation succeeded.
   """
-  cmd =[avbtool, "add_%s_footer" % footer_type,
-        "--partition_size", partition_size,
-        "--partition_name", partition_name,
-        "--image", image_path]
+  cmd = [avbtool, "add_%s_footer" % footer_type,
+         "--partition_size", partition_size,
+         "--partition_name", partition_name,
+         "--image", image_path]
 
   if key_path and algorithm:
     cmd.extend(["--key", key_path, "--algorithm", algorithm])
@@ -159,12 +174,15 @@
   (_, exit_code) = RunCommand(cmd)
   return exit_code == 0
 
+
 def AdjustPartitionSizeForVerity(partition_size, fec_supported):
   """Modifies the provided partition size to account for the verity metadata.
 
   This information is used to size the created image appropriately.
+
   Args:
     partition_size: the size of the partition to be verified.
+
   Returns:
     A tuple of the size of the partition adjusted for verity metadata, and
     the size of verity metadata.
@@ -201,30 +219,34 @@
   AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
   return (result, verity_size)
 
+
 AdjustPartitionSizeForVerity.results = {}
 
+
 def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
                    padding_size):
   cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
          verity_path, verity_fec_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build FEC data! Error: %s" % output
+    print("Could not build FEC data! Error: %s" % output)
     return False
   return True
 
+
 def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
   cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
          verity_image_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build verity tree! Error: %s" % output
+    print("Could not build verity tree! Error: %s" % output)
     return False
   root, salt = output.split()
   prop_dict["verity_root_hash"] = root
   prop_dict["verity_salt"] = salt
   return True
 
+
 def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                         block_device, signer_path, key, signer_args,
                         verity_disable):
@@ -237,10 +259,11 @@
     cmd.append("--verity_disable")
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build verity metadata! Error: %s" % output
+    print("Could not build verity metadata! Error: %s" % output)
     return False
   return True
 
+
 def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
   """Appends the unsparse image to the given sparse image.
 
@@ -253,18 +276,23 @@
   cmd = ["append2simg", sparse_image_path, unsparse_image_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "%s: %s" % (error_message, output)
+    print("%s: %s" % (error_message, output))
     return False
   return True
 
+
 def Append(target, file_to_append, error_message):
-  # appending file_to_append to target
-  with open(target, "a") as out_file:
-    with open(file_to_append, "r") as input_file:
+  """Appends file_to_append to target."""
+  try:
+    with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
       for line in input_file:
         out_file.write(line)
+  except IOError:
+    print(error_message)
+    return False
   return True
 
+
 def BuildVerifiedImage(data_image_path, verity_image_path,
                        verity_metadata_path, verity_fec_path,
                        padding_size, fec_supported):
@@ -286,6 +314,7 @@
     return False
   return True
 
+
 def UnsparseImage(sparse_image_path, replace=True):
   img_dir = os.path.dirname(sparse_image_path)
   unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
@@ -296,12 +325,15 @@
     else:
       return True, unsparse_image_path
   inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
-  (_, exit_code) = RunCommand(inflate_command)
+  (inflate_output, exit_code) = RunCommand(inflate_command)
   if exit_code != 0:
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        inflate_command, exit_code, inflate_output))
     os.remove(unsparse_image_path)
     return False, None
   return True, unsparse_image_path
 
+
 def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
   """Creates an image that is verifiable using dm-verity.
 
@@ -323,7 +355,7 @@
   signer_args = OPTIONS.verity_signer_args
 
   # make a tempdir
-  tempdir_name = tempfile.mkdtemp(suffix="_verity_images")
+  tempdir_name = common.MakeTempDir(suffix="_verity_images")
 
   # get partial image paths
   verity_image_path = os.path.join(tempdir_name, "verity.img")
@@ -332,7 +364,6 @@
 
   # build the verity tree and get the root hash and salt
   if not BuildVerityTree(out_file, verity_image_path, prop_dict):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
   # build the metadata blocks
@@ -342,7 +373,6 @@
   if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                              block_dev, signer_path, signer_key, signer_args,
                              verity_disable):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
   # build the full verified image
@@ -358,23 +388,59 @@
                             verity_fec_path,
                             padding_size,
                             fec_supported):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
-  shutil.rmtree(tempdir_name, ignore_errors=True)
   return True
 
-def ConvertBlockMapToBaseFs(block_map_file):
-  fd, base_fs_file = tempfile.mkstemp(prefix="script_gen_",
-                                      suffix=".base_fs")
-  os.close(fd)
 
+def ConvertBlockMapToBaseFs(block_map_file):
+  base_fs_file = common.MakeTempFile(prefix="script_gen_", suffix=".base_fs")
   convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
   (_, exit_code) = RunCommand(convert_command)
-  if exit_code != 0:
-    os.remove(base_fs_file)
-    return None
-  return base_fs_file
+  return base_fs_file if exit_code == 0 else None
+
+
+def CheckHeadroom(ext4fs_output, prop_dict):
+  """Checks if there's enough headroom space available.
+
+  Headroom is the reserved space on system image (via PRODUCT_SYSTEM_HEADROOM),
+  which is useful for devices with low disk space that have system image
+  variation between builds. The 'partition_headroom' in prop_dict is the size
+  in bytes, while the numbers in 'ext4fs_output' are for 4K-blocks.
+
+  Args:
+    ext4fs_output: The output string from mke2fs command.
+    prop_dict: The property dict.
+
+  Returns:
+    The check result.
+
+  Raises:
+    AssertionError: On invalid input.
+  """
+  assert ext4fs_output is not None
+  assert prop_dict.get('fs_type', '').startswith('ext4')
+  assert 'partition_headroom' in prop_dict
+  assert 'mount_point' in prop_dict
+
+  ext4fs_stats = re.compile(
+      r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
+      r'(?P<total_blocks>[0-9]+) blocks')
+  last_line = ext4fs_output.strip().split('\n')[-1]
+  m = ext4fs_stats.match(last_line)
+  used_blocks = int(m.groupdict().get('used_blocks'))
+  total_blocks = int(m.groupdict().get('total_blocks'))
+  headroom_blocks = int(prop_dict['partition_headroom']) / BLOCK_SIZE
+  adjusted_blocks = total_blocks - headroom_blocks
+  if used_blocks > adjusted_blocks:
+    mount_point = prop_dict["mount_point"]
+    print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
+          "headroom: %d blocks, available: %d blocks)" % (
+              mount_point, total_blocks, used_blocks, headroom_blocks,
+              adjusted_blocks))
+    return False
+  return True
+
 
 def BuildImage(in_dir, prop_dict, out_file, target_out=None):
   """Build an image to out_file from in_dir with property prop_dict.
@@ -383,7 +449,8 @@
     in_dir: path of input directory.
     prop_dict: property dictionary.
     out_file: path of the output image file.
-    target_out: path of the product out directory to read device specific FS config files.
+    target_out: path of the product out directory to read device specific FS
+        config files.
 
   Returns:
     True iff the image is built successfully.
@@ -392,17 +459,15 @@
   # /system and the ramdisk, and can be mounted at the root of the file system.
   origin_in = in_dir
   fs_config = prop_dict.get("fs_config")
-  base_fs_file = None
-  if (prop_dict.get("system_root_image") == "true"
-      and prop_dict["mount_point"] == "system"):
-    in_dir = tempfile.mkdtemp()
-    # Change the mount point to "/"
+  if (prop_dict.get("system_root_image") == "true" and
+      prop_dict["mount_point"] == "system"):
+    in_dir = common.MakeTempDir()
+    # Change the mount point to "/".
     prop_dict["mount_point"] = "/"
     if fs_config:
       # We need to merge the fs_config files of system and ramdisk.
-      fd, merged_fs_config = tempfile.mkstemp(prefix="root_fs_config",
-                                              suffix=".txt")
-      os.close(fd)
+      merged_fs_config = common.MakeTempFile(prefix="root_fs_config",
+                                             suffix=".txt")
       with open(merged_fs_config, "w") as fw:
         if "ramdisk_fs_config" in prop_dict:
           with open(prop_dict["ramdisk_fs_config"]) as fr:
@@ -413,7 +478,7 @@
 
   build_command = []
   fs_type = prop_dict.get("fs_type", "")
-  run_fsck = False
+  run_e2fsck = False
 
   fs_spans_partition = True
   if fs_type.startswith("squash"):
@@ -427,8 +492,8 @@
   # verified.
   if verity_supported and is_verity_partition:
     partition_size = int(prop_dict.get("partition_size"))
-    (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size,
-                                                                verity_fec_supported)
+    (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(
+        partition_size, verity_fec_supported)
     if not adjusted_size:
       return False
     prop_dict["partition_size"] = str(adjusted_size)
@@ -447,8 +512,8 @@
     partition_size = prop_dict["partition_size"]
     # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
     additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
-    max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type, partition_size,
-                                         additional_args)
+    max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type,
+                                         partition_size, additional_args)
     if max_image_size == 0:
       return False
     prop_dict["partition_size"] = str(max_image_size)
@@ -458,7 +523,7 @@
     build_command = [prop_dict["ext_mkuserimg"]]
     if "extfs_sparse_flag" in prop_dict:
       build_command.append(prop_dict["extfs_sparse_flag"])
-      run_fsck = True
+      run_e2fsck = True
     build_command.extend([in_dir, out_file, fs_type,
                           prop_dict["mount_point"]])
     build_command.append(prop_dict["partition_size"])
@@ -490,6 +555,8 @@
         build_command.extend(["-U", prop_dict["uuid"]])
       if "hash_seed" in prop_dict:
         build_command.extend(["-S", prop_dict["hash_seed"]])
+    if "ext4_share_dup_blocks" in prop_dict:
+      build_command.append("-c")
     if "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
   elif fs_type.startswith("squash"):
@@ -506,17 +573,28 @@
       build_command.extend(["-c", prop_dict["selinux_fc"]])
     if "block_list" in prop_dict:
       build_command.extend(["-B", prop_dict["block_list"]])
+    if "squashfs_block_size" in prop_dict:
+      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
     if "squashfs_compressor" in prop_dict:
       build_command.extend(["-z", prop_dict["squashfs_compressor"]])
     if "squashfs_compressor_opt" in prop_dict:
       build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
-    if "squashfs_block_size" in prop_dict:
-      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
-    if "squashfs_disable_4k_align" in prop_dict and prop_dict.get("squashfs_disable_4k_align") == "true":
+    if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
     build_command = ["mkf2fsuserimg.sh"]
     build_command.extend([out_file, prop_dict["partition_size"]])
+    if fs_config:
+      build_command.extend(["-C", fs_config])
+    build_command.extend(["-f", in_dir])
+    if target_out:
+      build_command.extend(["-D", target_out])
+    if "selinux_fc" in prop_dict:
+      build_command.extend(["-s", prop_dict["selinux_fc"]])
+    build_command.extend(["-t", prop_dict["mount_point"]])
+    if "timestamp" in prop_dict:
+      build_command.extend(["-T", str(prop_dict["timestamp"])])
+    build_command.extend(["-L", prop_dict["mount_point"]])
   else:
     print("Error: unknown filesystem type '%s'" % (fs_type))
     return False
@@ -531,56 +609,15 @@
     shutil.rmtree(staging_system, ignore_errors=True)
     shutil.copytree(origin_in, staging_system, symlinks=True)
 
-  has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
-  ext4fs_output = None
-
-  try:
-    if fs_type.startswith("ext4"):
-      (ext4fs_output, exit_code) = RunCommand(build_command)
-    else:
-      (_, exit_code) = RunCommand(build_command)
-  finally:
-    if in_dir != origin_in:
-      # Clean up temporary directories and files.
-      shutil.rmtree(in_dir, ignore_errors=True)
-      if fs_config:
-        os.remove(fs_config)
-    if base_fs_file is not None:
-      os.remove(base_fs_file)
+  (mkfs_output, exit_code) = RunCommand(build_command)
   if exit_code != 0:
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        build_command, exit_code, mkfs_output))
     return False
 
-  # Bug: 21522719, 22023465
-  # There are some reserved blocks on ext4 FS (lesser of 4096 blocks and 2%).
-  # We need to deduct those blocks from the available space, since they are
-  # not writable even with root privilege. It only affects devices using
-  # file-based OTA and a kernel version of 3.10 or greater (currently just
-  # sprout).
-  # Separately, check if there's enough headroom space available. This is useful for
-  # devices with low disk space that have system image variation between builds.
-  if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
-    assert ext4fs_output is not None
-    ext4fs_stats = re.compile(
-        r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
-        r'(?P<total_blocks>[0-9]+) blocks')
-    m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
-    used_blocks = int(m.groupdict().get('used_blocks'))
-    total_blocks = int(m.groupdict().get('total_blocks'))
-    reserved_blocks = 0
-    headroom_blocks = 0
-    adjusted_blocks = total_blocks
-    if has_reserved_blocks:
-      reserved_blocks = min(4096, int(total_blocks * 0.02))
-      adjusted_blocks -= reserved_blocks
-    if "partition_headroom" in prop_dict:
-      headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
-      adjusted_blocks -= headroom_blocks
-    if used_blocks > adjusted_blocks:
-      mount_point = prop_dict.get("mount_point")
-      print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
-            "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
-                mount_point, total_blocks, used_blocks, reserved_blocks,
-                headroom_blocks, adjusted_blocks))
+  # Check if there's enough headroom space available for ext4 image.
+  if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
+    if not CheckHeadroom(mkfs_output, prop_dict):
       return False
 
   if not fs_spans_partition:
@@ -594,7 +631,7 @@
     if verity_supported and is_verity_partition:
       ZeroPadSimg(out_file, partition_size - image_size)
 
-  # create the verified image if this is to be verified
+  # Create the verified image if this is to be verified.
   if verity_supported and is_verity_partition:
     if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
       return False
@@ -610,22 +647,28 @@
     salt = prop_dict.get("avb_salt")
     # avb_add_hash_footer_args or avb_add_hashtree_footer_args
     additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
-    if not AVBAddFooter(out_file, avbtool, avb_footer_type, original_partition_size,
-                        partition_name, key_path, algorithm, salt, additional_args):
+    if not AVBAddFooter(out_file, avbtool, avb_footer_type,
+                        original_partition_size, partition_name, key_path,
+                        algorithm, salt, additional_args):
       return False
 
-  if run_fsck and prop_dict.get("skip_fsck") != "true":
+  if run_e2fsck and prop_dict.get("skip_fsck") != "true":
     success, unsparse_image = UnsparseImage(out_file, replace=False)
     if not success:
       return False
 
     # Run e2fsck on the inflated image file
     e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
-    (_, exit_code) = RunCommand(e2fsck_command)
+    (e2fsck_output, exit_code) = RunCommand(e2fsck_command)
 
     os.remove(unsparse_image)
 
-  return exit_code == 0
+    if exit_code != 0:
+      print("Error: '%s' failed with exit code %d:\n%s" % (
+          e2fsck_command, exit_code, e2fsck_output))
+      return False
+
+  return True
 
 
 def ImagePropFromGlobalDict(glob_dict, mount_point):
@@ -682,7 +725,7 @@
     copy_prop("system_root_image", "system_root_image")
     copy_prop("ramdisk_dir", "ramdisk_dir")
     copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
-    copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("system_squashfs_compressor", "squashfs_compressor")
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -690,7 +733,8 @@
     copy_prop("system_base_fs_file", "base_fs_file")
     copy_prop("system_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "system_other":
-    # We inherit the selinux policies of /system since we contain some of its files.
+    # We inherit the selinux policies of /system since we contain some of its
+    # files.
     d["mount_point"] = "system"
     copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_system_add_hashtree_footer_args",
@@ -702,7 +746,6 @@
     copy_prop("system_size", "partition_size")
     copy_prop("system_journal_size", "journal_size")
     copy_prop("system_verity_block_device", "verity_block_device")
-    copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
     copy_prop("system_squashfs_compressor", "squashfs_compressor")
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -713,7 +756,7 @@
     copy_prop("fs_type", "fs_type")
     copy_prop("userdata_fs_type", "fs_type")
     copy_prop("userdata_size", "partition_size")
-    copy_prop("flash_logical_block_size","flash_logical_block_size")
+    copy_prop("flash_logical_block_size", "flash_logical_block_size")
     copy_prop("flash_erase_block_size", "flash_erase_block_size")
   elif mount_point == "cache":
     copy_prop("cache_fs_type", "fs_type")
@@ -728,18 +771,33 @@
     copy_prop("vendor_size", "partition_size")
     copy_prop("vendor_journal_size", "journal_size")
     copy_prop("vendor_verity_block_device", "verity_block_device")
-    copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("vendor_squashfs_compressor", "squashfs_compressor")
     copy_prop("vendor_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
     copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align")
     copy_prop("vendor_base_fs_file", "base_fs_file")
     copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
+  elif mount_point == "product":
+    copy_prop("avb_product_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_product_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_product_key_path", "avb_key_path")
+    copy_prop("avb_product_algorithm", "avb_algorithm")
+    copy_prop("product_fs_type", "fs_type")
+    copy_prop("product_size", "partition_size")
+    copy_prop("product_journal_size", "journal_size")
+    copy_prop("product_verity_block_device", "verity_block_device")
+    copy_prop("product_squashfs_compressor", "squashfs_compressor")
+    copy_prop("product_squashfs_compressor_opt", "squashfs_compressor_opt")
+    copy_prop("product_squashfs_block_size", "squashfs_block_size")
+    copy_prop("product_squashfs_disable_4k_align", "squashfs_disable_4k_align")
+    copy_prop("product_base_fs_file", "base_fs_file")
+    copy_prop("product_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
     copy_prop("oem_size", "partition_size")
     copy_prop("oem_journal_size", "journal_size")
-    copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
     copy_prop("oem_extfs_inode_count", "extfs_inode_count")
   d["partition_name"] = mount_point
   return d
@@ -761,7 +819,7 @@
 
 def main(argv):
   if len(argv) != 4:
-    print __doc__
+    print(__doc__)
     sys.exit(1)
 
   in_dir = argv[0]
@@ -789,17 +847,22 @@
       mount_point = "vendor"
     elif image_filename == "oem.img":
       mount_point = "oem"
+    elif image_filename == "product.img":
+      mount_point = "product"
     else:
-      print >> sys.stderr, "error: unknown image file name ", image_filename
-      exit(1)
+      print("error: unknown image file name ", image_filename, file=sys.stderr)
+      sys.exit(1)
 
     image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
 
   if not BuildImage(in_dir, image_properties, out_file, target_out):
-    print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
-                                                                in_dir)
-    exit(1)
+    print("error: failed to build %s from %s" % (out_file, in_dir),
+          file=sys.stderr)
+    sys.exit(1)
 
 
 if __name__ == '__main__':
-  main(sys.argv[1:])
+  try:
+    main(sys.argv[1:])
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 8106d06..3cac90a 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -21,11 +21,7 @@
 from __future__ import print_function
 
 import argparse
-import common
-import os
-import os.path
 import re
-import site
 import subprocess
 import sys
 import tempfile
@@ -34,15 +30,7 @@
 from hashlib import sha1
 from hashlib import sha256
 
-# 'update_payload' package is under 'system/update_engine/scripts/', which
-# should be included in PYTHONPATH. Try to set it up automatically if
-# if ANDROID_BUILD_TOP is available.
-top = os.getenv('ANDROID_BUILD_TOP')
-if top:
-  site.addsitedir(os.path.join(top, 'system', 'update_engine', 'scripts'))
-
-from update_payload.payload import Payload
-from update_payload.update_metadata_pb2 import Signatures
+import common
 
 
 def CertUsesSha256(cert):
@@ -108,10 +96,7 @@
   use_sha256 = CertUsesSha256(cert)
   print('Use SHA-256: %s' % (use_sha256,))
 
-  if use_sha256:
-    h = sha256()
-  else:
-    h = sha1()
+  h = sha256() if use_sha256 else sha1()
   h.update(package_bytes[:signed_len])
   package_digest = h.hexdigest().lower()
 
@@ -161,40 +146,6 @@
 
 def VerifyAbOtaPayload(cert, package):
   """Verifies the payload and metadata signatures in an A/B OTA payload."""
-
-  def VerifySignatureBlob(hash_file, blob):
-    """Verifies the input hash_file against the signature blob."""
-    signatures = Signatures()
-    signatures.ParseFromString(blob)
-
-    extracted_sig_file = common.MakeTempFile(
-        prefix='extracted-sig-', suffix='.bin')
-    # In Android, we only expect one signature.
-    assert len(signatures.signatures) == 1, \
-        'Invalid number of signatures: %d' % len(signatures.signatures)
-    signature = signatures.signatures[0]
-    length = len(signature.data)
-    assert length == 256, 'Invalid signature length %d' % (length,)
-    with open(extracted_sig_file, 'w') as f:
-      f.write(signature.data)
-
-    # Verify the signature file extracted from the payload, by reversing the
-    # signing operation. Alternatively, this can be done by calling 'openssl
-    # rsautl -verify -certin -inkey <cert.pem> -in <extracted_sig_file> -out
-    # <output>', then to assert that
-    # <output> == SHA-256 DigestInfo prefix || <hash_file>.
-    cmd = ['openssl', 'pkeyutl', '-verify', '-certin', '-inkey', cert,
-           '-pkeyopt', 'digest:sha256', '-in', hash_file,
-           '-sigfile', extracted_sig_file]
-    p = common.Run(cmd, stdout=subprocess.PIPE)
-    result, _ = p.communicate()
-
-    # https://github.com/openssl/openssl/pull/3213
-    # 'openssl pkeyutl -verify' (prior to 1.1.0) returns non-zero return code,
-    # even on successful verification. To avoid the false alarm with older
-    # openssl, check the output directly.
-    assert result.strip() == 'Signature Verified Successfully', result.strip()
-
   package_zip = zipfile.ZipFile(package, 'r')
   if 'payload.bin' not in package_zip.namelist():
     common.ZipClose(package_zip)
@@ -202,37 +153,23 @@
 
   print('Verifying A/B OTA payload signatures...')
 
-  package_dir = tempfile.mkdtemp(prefix='package-')
-  common.OPTIONS.tempfiles.append(package_dir)
+  # Dump pubkey from the certificate.
+  pubkey = common.MakeTempFile(prefix="key-", suffix=".pem")
+  with open(pubkey, 'wb') as pubkey_fp:
+    pubkey_fp.write(common.ExtractPublicKey(cert))
 
+  package_dir = common.MakeTempDir(prefix='package-')
+
+  # Signature verification with delta_generator.
   payload_file = package_zip.extract('payload.bin', package_dir)
-  payload = Payload(open(payload_file, 'rb'))
-  payload.Init()
-
-  # Extract the payload hash and metadata hash from the payload.bin.
-  payload_hash_file = common.MakeTempFile(prefix='hash-', suffix='.bin')
-  metadata_hash_file = common.MakeTempFile(prefix='hash-', suffix='.bin')
-  cmd = ['brillo_update_payload', 'hash',
-         '--unsigned_payload', payload_file,
-         '--signature_size', '256',
-         '--metadata_hash_file', metadata_hash_file,
-         '--payload_hash_file', payload_hash_file]
-  p = common.Run(cmd, stdout=subprocess.PIPE)
-  p.communicate()
-  assert p.returncode == 0, 'brillo_update_payload hash failed'
-
-  # Payload signature verification.
-  assert payload.manifest.HasField('signatures_offset')
-  payload_signature = payload.ReadDataBlob(
-      payload.manifest.signatures_offset, payload.manifest.signatures_size)
-  VerifySignatureBlob(payload_hash_file, payload_signature)
-
-  # Metadata signature verification.
-  metadata_signature = payload.ReadDataBlob(
-      -payload.header.metadata_signature_len,
-      payload.header.metadata_signature_len)
-  VerifySignatureBlob(metadata_hash_file, metadata_signature)
-
+  cmd = ['delta_generator',
+         '--in_file=' + payload_file,
+         '--public_key=' + pubkey]
+  proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  stdoutdata, _ = proc.communicate()
+  assert proc.returncode == 0, \
+      'Failed to verify payload with delta_generator: %s\n%s' % (package,
+                                                                 stdoutdata)
   common.ZipClose(package_zip)
 
   # Verified successfully upon reaching here.
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index f9aa4fa..b9f39a6 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,25 +39,26 @@
 
 """
 
+import os
+import re
+import subprocess
 import sys
+import zipfile
+
+import common
 
 if sys.hexversion < 0x02070000:
   print >> sys.stderr, "Python 2.7 or newer is required."
   sys.exit(1)
 
-import os
-import re
-import shutil
-import subprocess
-import zipfile
 
-import common
-
-# Work around a bug in python's zipfile module that prevents opening
-# of zipfiles if any entry has an extra field of between 1 and 3 bytes
-# (which is common with zipaligned APKs).  This overrides the
-# ZipInfo._decodeExtra() method (which contains the bug) with an empty
-# version (since we don't need to decode the extra field anyway).
+# Work around a bug in Python's zipfile module that prevents opening of zipfiles
+# if any entry has an extra field of between 1 and 3 bytes (which is common with
+# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
+# contains the bug) with an empty version (since we don't need to decode the
+# extra field anyway).
+# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
+# Python 3.5.0 alpha 1.
 class MyZipInfo(zipfile.ZipInfo):
   def _decodeExtra(self):
     pass
@@ -235,26 +236,48 @@
     self.certmap = None
 
   def LoadZipFile(self, filename):
-    d, z = common.UnzipTemp(filename, ['*.apk'])
-    try:
-      self.apks = {}
-      self.apks_by_basename = {}
-      for dirpath, _, filenames in os.walk(d):
-        for fn in filenames:
-          if fn.endswith(".apk"):
-            fullname = os.path.join(dirpath, fn)
-            displayname = fullname[len(d)+1:]
-            apk = APK(fullname, displayname)
-            self.apks[apk.filename] = apk
-            self.apks_by_basename[os.path.basename(apk.filename)] = apk
+    # First read the APK certs file to figure out whether there are compressed
+    # APKs in the archive. If we do have compressed APKs in the archive, then we
+    # must decompress them individually before we perform any analysis.
 
-            self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
-            self.max_fn_len = max(self.max_fn_len, len(apk.filename))
-    finally:
-      shutil.rmtree(d)
+    # This is the list of wildcards of files we extract from |filename|.
+    apk_extensions = ['*.apk']
 
-    self.certmap = common.ReadApkCerts(z)
-    z.close()
+    self.certmap, compressed_extension = common.ReadApkCerts(
+        zipfile.ZipFile(filename, "r"))
+    if compressed_extension:
+      apk_extensions.append("*.apk" + compressed_extension)
+
+    d = common.UnzipTemp(filename, apk_extensions)
+    self.apks = {}
+    self.apks_by_basename = {}
+    for dirpath, _, filenames in os.walk(d):
+      for fn in filenames:
+        # Decompress compressed APKs before we begin processing them.
+        if compressed_extension and fn.endswith(compressed_extension):
+          # First strip the compressed extension from the file.
+          uncompressed_fn = fn[:-len(compressed_extension)]
+
+          # Decompress the compressed file to the output file.
+          common.Gunzip(os.path.join(dirpath, fn),
+                        os.path.join(dirpath, uncompressed_fn))
+
+          # Finally, delete the compressed file and use the uncompressed file
+          # for further processing. Note that the deletion is not strictly
+          # required, but is done here to ensure that we're not using too much
+          # space in the temporary directory.
+          os.remove(os.path.join(dirpath, fn))
+          fn = uncompressed_fn
+
+        if fn.endswith(".apk"):
+          fullname = os.path.join(dirpath, fn)
+          displayname = fullname[len(d)+1:]
+          apk = APK(fullname, displayname)
+          self.apks[apk.filename] = apk
+          self.apks_by_basename[os.path.basename(apk.filename)] = apk
+
+          self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
+          self.max_fn_len = max(self.max_fn_len, len(apk.filename))
 
   def CheckSharedUids(self):
     """Look for any instances where packages signed with different
@@ -264,7 +287,7 @@
       if apk.shared_uid:
         apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
 
-    for uid in sorted(apks_by_uid.keys()):
+    for uid in sorted(apks_by_uid):
       apks = apks_by_uid[uid]
       for apk in apks[1:]:
         if apk.certs != apks[0].certs:
@@ -439,3 +462,5 @@
     print "   ERROR: %s" % (e,)
     print
     sys.exit(1)
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 75c86cc..787de98 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -18,23 +18,24 @@
 import errno
 import getopt
 import getpass
+import gzip
 import imp
 import os
 import platform
 import re
 import shlex
 import shutil
+import string
 import subprocess
 import sys
 import tempfile
 import threading
 import time
 import zipfile
+from hashlib import sha1, sha256
 
 import blockimgdiff
-
-from hashlib import sha1 as sha1
-
+import sparse_img
 
 class Options(object):
   def __init__(self):
@@ -77,7 +78,7 @@
 
 
 # The partitions allowed to be signed by AVB (Android verified boot 2.0).
-AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'dtbo')
+AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product', 'dtbo')
 
 
 class ErrorCode(object):
@@ -125,6 +126,11 @@
   return subprocess.Popen(args, **kwargs)
 
 
+def RoundUpTo4K(value):
+  rounded_up = value + 4095
+  return rounded_up - (rounded_up % 4096)
+
+
 def CloseInheritedPipes():
   """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
   before doing other work."""
@@ -215,21 +221,6 @@
             vendor_base_fs_file,))
         del d["vendor_base_fs_file"]
 
-  try:
-    data = read_helper("META/imagesizes.txt")
-    for line in data.split("\n"):
-      if not line:
-        continue
-      name, value = line.split(" ", 1)
-      if not value:
-        continue
-      if name == "blocksize":
-        d[name] = value
-      else:
-        d[name + "_size"] = value
-  except KeyError:
-    pass
-
   def makeint(key):
     if key in d:
       d[key] = int(d[key], 0)
@@ -258,6 +249,20 @@
 
   d["build.prop"] = LoadBuildProp(read_helper, 'SYSTEM/build.prop')
   d["vendor.build.prop"] = LoadBuildProp(read_helper, 'VENDOR/build.prop')
+
+  # Set up the salt (based on fingerprint or thumbprint) that will be used when
+  # adding AVB footer.
+  if d.get("avb_enable") == "true":
+    fp = None
+    if "build.prop" in d:
+      build_prop = d["build.prop"]
+      if "ro.build.fingerprint" in build_prop:
+        fp = build_prop["ro.build.fingerprint"]
+      elif "ro.build.thumbprint" in build_prop:
+        fp = build_prop["ro.build.thumbprint"]
+    if fp:
+      d["avb_salt"] = sha256(fp).hexdigest()
+
   return d
 
 
@@ -500,15 +505,15 @@
     img_unsigned.close()
     img_keyblock.close()
 
-  # AVB: if enabled, calculate and add hash to boot.img.
+  # AVB: if enabled, calculate and add hash to boot.img or recovery.img.
   if info_dict.get("avb_enable") == "true":
     avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
-    part_size = info_dict["boot_size"]
+    part_size = info_dict[partition_name + "_size"]
     cmd = [avbtool, "add_hash_footer", "--image", img.name,
            "--partition_size", str(part_size), "--partition_name",
            partition_name]
     AppendAVBSigningArgs(cmd, partition_name)
-    args = info_dict.get("avb_boot_add_hash_footer_args")
+    args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
     if args and args.strip():
       cmd.extend(shlex.split(args))
     p = Run(cmd, stdout=subprocess.PIPE)
@@ -565,18 +570,22 @@
   return None
 
 
-def UnzipTemp(filename, pattern=None):
-  """Unzip the given archive into a temporary directory and return the name.
-
-  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
-  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
-
-  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
-  main file), open for reading.
+def Gunzip(in_filename, out_filename):
+  """Gunzip the given gzip compressed file to a given output file.
   """
+  with gzip.open(in_filename, "rb") as in_file, open(out_filename, "wb") as out_file:
+    shutil.copyfileobj(in_file, out_file)
 
-  tmp = tempfile.mkdtemp(prefix="targetfiles-")
-  OPTIONS.tempfiles.append(tmp)
+
+def UnzipTemp(filename, pattern=None):
+  """Unzips the given archive into a temporary directory and returns the name.
+
+  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a temp dir,
+  then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
+
+  Returns:
+    The name of the temporary directory.
+  """
 
   def unzip_to_dir(filename, dirname):
     cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
@@ -588,6 +597,7 @@
       raise ExternalError("failed to unzip input target-files \"%s\"" %
                           (filename,))
 
+  tmp = MakeTempDir(prefix="targetfiles-")
   m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
   if m:
     unzip_to_dir(m.group(1), tmp)
@@ -596,7 +606,66 @@
   else:
     unzip_to_dir(filename, tmp)
 
-  return tmp, zipfile.ZipFile(filename, "r")
+  return tmp
+
+
+def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
+  """Returns a SparseImage object suitable for passing to BlockImageDiff.
+
+  This function loads the specified sparse image from the given path, and
+  performs additional processing for OTA purpose. For example, it always adds
+  block 0 to clobbered blocks list. It also detects files that cannot be
+  reconstructed from the block list, for whom we should avoid applying imgdiff.
+
+  Args:
+    which: The partition name, which must be "system" or "vendor".
+    tmpdir: The directory that contains the prebuilt image and block map file.
+    input_zip: The target-files ZIP archive.
+    allow_shared_blocks: Whether having shared blocks is allowed.
+
+  Returns:
+    A SparseImage object, with file_map info loaded.
+  """
+  assert which in ("system", "vendor")
+
+  path = os.path.join(tmpdir, "IMAGES", which + ".img")
+  mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
+
+  # The image and map files must have been created prior to calling
+  # ota_from_target_files.py (since LMP).
+  assert os.path.exists(path) and os.path.exists(mappath)
+
+  # In ext4 filesystems, block 0 might be changed even being mounted R/O. We add
+  # it to clobbered_blocks so that it will be written to the target
+  # unconditionally. Note that they are still part of care_map. (Bug: 20939131)
+  clobbered_blocks = "0"
+
+  image = sparse_img.SparseImage(path, mappath, clobbered_blocks,
+                                 allow_shared_blocks=allow_shared_blocks)
+
+  # block.map may contain less blocks, because mke2fs may skip allocating blocks
+  # if they contain all zeros. We can't reconstruct such a file from its block
+  # list. Tag such entries accordingly. (Bug: 65213616)
+  for entry in image.file_map:
+    # "/system/framework/am.jar" => "SYSTEM/framework/am.jar".
+    arcname = string.replace(entry, which, which.upper(), 1)[1:]
+    # Skip artificial names, such as "__ZERO", "__NONZERO-1".
+    if arcname not in input_zip.namelist():
+      continue
+
+    info = input_zip.getinfo(arcname)
+    ranges = image.file_map[entry]
+
+    # If a RangeSet has been tagged as using shared blocks while loading the
+    # image, its block list must be already incomplete due to that reason. Don't
+    # give it 'incomplete' tag to avoid messing up the imgdiff stats.
+    if ranges.extra.get('uses_shared_blocks'):
+      continue
+
+    if RoundUpTo4K(info.file_size) > ranges.size() * 4096:
+      ranges.extra['incomplete'] = True
+
+  return image
 
 
 def GetKeyPasswords(keylist):
@@ -785,28 +854,82 @@
 
 
 def ReadApkCerts(tf_zip):
-  """Given a target_files ZipFile, parse the META/apkcerts.txt file
-  and return a {package: cert} dict."""
+  """Parses the APK certs info from a given target-files zip.
+
+  Given a target-files ZipFile, parses the META/apkcerts.txt entry and returns a
+  tuple with the following elements: (1) a dictionary that maps packages to
+  certs (based on the "certificate" and "private_key" attributes in the file;
+  (2) a string representing the extension of compressed APKs in the target files
+  (e.g ".gz", ".bro").
+
+  Args:
+    tf_zip: The input target_files ZipFile (already open).
+
+  Returns:
+    (certmap, ext): certmap is a dictionary that maps packages to certs; ext is
+        the extension string of compressed APKs (e.g. ".gz"), or None if there's
+        no compressed APKs.
+  """
   certmap = {}
+  compressed_extension = None
+
+  # META/apkcerts.txt contains the info for _all_ the packages known at build
+  # time. Filter out the ones that are not installed.
+  installed_files = set()
+  for name in tf_zip.namelist():
+    basename = os.path.basename(name)
+    if basename:
+      installed_files.add(basename)
+
   for line in tf_zip.read("META/apkcerts.txt").split("\n"):
     line = line.strip()
     if not line:
       continue
-    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
-                 r'private_key="(.*)"$', line)
-    if m:
-      name, cert, privkey = m.groups()
-      public_key_suffix_len = len(OPTIONS.public_key_suffix)
-      private_key_suffix_len = len(OPTIONS.private_key_suffix)
-      if cert in SPECIAL_CERT_STRINGS and not privkey:
-        certmap[name] = cert
-      elif (cert.endswith(OPTIONS.public_key_suffix) and
-            privkey.endswith(OPTIONS.private_key_suffix) and
-            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
-        certmap[name] = cert[:-public_key_suffix_len]
-      else:
-        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
-  return certmap
+    m = re.match(
+        r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
+        r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
+        line)
+    if not m:
+      continue
+
+    matches = m.groupdict()
+    cert = matches["CERT"]
+    privkey = matches["PRIVKEY"]
+    name = matches["NAME"]
+    this_compressed_extension = matches["COMPRESSED"]
+
+    public_key_suffix_len = len(OPTIONS.public_key_suffix)
+    private_key_suffix_len = len(OPTIONS.private_key_suffix)
+    if cert in SPECIAL_CERT_STRINGS and not privkey:
+      certmap[name] = cert
+    elif (cert.endswith(OPTIONS.public_key_suffix) and
+          privkey.endswith(OPTIONS.private_key_suffix) and
+          cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
+      certmap[name] = cert[:-public_key_suffix_len]
+    else:
+      raise ValueError("Failed to parse line from apkcerts.txt:\n" + line)
+
+    if not this_compressed_extension:
+      continue
+
+    # Only count the installed files.
+    filename = name + '.' + this_compressed_extension
+    if filename not in installed_files:
+      continue
+
+    # Make sure that all the values in the compression map have the same
+    # extension. We don't support multiple compression methods in the same
+    # system image.
+    if compressed_extension:
+      if this_compressed_extension != compressed_extension:
+        raise ValueError(
+            "Multiple compressed extensions: {} vs {}".format(
+                compressed_extension, this_compressed_extension))
+    else:
+      compressed_extension = this_compressed_extension
+
+  return (certmap,
+          ("." + compressed_extension) if compressed_extension else None)
 
 
 COMMON_DOCSTRING = """
@@ -914,12 +1037,24 @@
   return fn
 
 
+def MakeTempDir(prefix='tmp', suffix=''):
+  """Makes a temporary dir that will be cleaned up with a call to Cleanup().
+
+  Returns:
+    The absolute pathname of the new directory.
+  """
+  dir_name = tempfile.mkdtemp(suffix=suffix, prefix=prefix)
+  OPTIONS.tempfiles.append(dir_name)
+  return dir_name
+
+
 def Cleanup():
   for i in OPTIONS.tempfiles:
     if os.path.isdir(i):
-      shutil.rmtree(i)
+      shutil.rmtree(i, ignore_errors=True)
     else:
       os.remove(i)
+  del OPTIONS.tempfiles[:]
 
 
 class PasswordManager(object):
@@ -1106,6 +1241,28 @@
   zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
+def ZipDelete(zip_filename, entries):
+  """Deletes entries from a ZIP file.
+
+  Since deleting entries from a ZIP file is not supported, it shells out to
+  'zip -d'.
+
+  Args:
+    zip_filename: The name of the ZIP file.
+    entries: The name of the entry, or the list of names to be deleted.
+
+  Raises:
+    AssertionError: In case of non-zero return from 'zip'.
+  """
+  if isinstance(entries, basestring):
+    entries = [entries]
+  cmd = ["zip", "-d", zip_filename] + entries
+  proc = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  stdoutdata, _ = proc.communicate()
+  assert proc.returncode == 0, "Failed to delete %s:\n%s" % (entries,
+                                                             stdoutdata)
+
+
 def ZipClose(zip_file):
   # http://b/18015246
   # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
@@ -1290,7 +1447,7 @@
           p.kill()
           th.join()
 
-      if err or p.returncode != 0:
+      if p.returncode != 0:
         print("WARNING: failure running %s:\n%s\n" % (
             diff_program, "".join(err)))
         self.patch = None
@@ -1368,20 +1525,16 @@
     self.disable_imgdiff = disable_imgdiff
 
     if version is None:
-      version = 1
-      if OPTIONS.info_dict:
-        version = max(
-            int(i) for i in
-            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+      version = max(
+          int(i) for i in
+          OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
     assert version >= 3
     self.version = version
 
     b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
                                     version=self.version,
                                     disable_imgdiff=self.disable_imgdiff)
-    tmpdir = tempfile.mkdtemp()
-    OPTIONS.tempfiles.append(tmpdir)
-    self.path = os.path.join(tmpdir, partition)
+    self.path = os.path.join(MakeTempDir(), partition)
     b.Compute(self.path)
     self._required_cache = b.max_stashed_size
     self.touched_src_ranges = b.touched_src_ranges
@@ -1629,50 +1782,93 @@
 
 
 def ParseCertificate(data):
-  """Parse a PEM-format certificate."""
-  cert = []
+  """Parses and converts a PEM-encoded certificate into DER-encoded.
+
+  This gives the same result as `openssl x509 -in <filename> -outform DER`.
+
+  Returns:
+    The decoded certificate string.
+  """
+  cert_buffer = []
   save = False
   for line in data.split("\n"):
     if "--END CERTIFICATE--" in line:
       break
     if save:
-      cert.append(line)
+      cert_buffer.append(line)
     if "--BEGIN CERTIFICATE--" in line:
       save = True
-  cert = "".join(cert).decode('base64')
+  cert = "".join(cert_buffer).decode('base64')
   return cert
 
+
+def ExtractPublicKey(cert):
+  """Extracts the public key (PEM-encoded) from the given certificate file.
+
+  Args:
+    cert: The certificate filename.
+
+  Returns:
+    The public key string.
+
+  Raises:
+    AssertionError: On non-zero return from 'openssl'.
+  """
+  # The behavior with '-out' is different between openssl 1.1 and openssl 1.0.
+  # While openssl 1.1 writes the key into the given filename followed by '-out',
+  # openssl 1.0 (both of 1.0.1 and 1.0.2) doesn't. So we collect the output from
+  # stdout instead.
+  cmd = ['openssl', 'x509', '-pubkey', '-noout', '-in', cert]
+  proc = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  pubkey, stderrdata = proc.communicate()
+  assert proc.returncode == 0, \
+      'Failed to dump public key from certificate: %s\n%s' % (cert, stderrdata)
+  return pubkey
+
+
 def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
                       info_dict=None):
-  """Generate a binary patch that creates the recovery image starting
-  with the boot image.  (Most of the space in these images is just the
-  kernel, which is identical for the two, so the resulting patch
-  should be efficient.)  Add it to the output zip, along with a shell
-  script that is run from init.rc on first boot to actually do the
-  patching and install the new recovery image.
+  """Generates the recovery-from-boot patch and writes the script to output.
 
-  recovery_img and boot_img should be File objects for the
-  corresponding images.  info should be the dictionary returned by
-  common.LoadInfoDict() on the input target_files.
+  Most of the space in the boot and recovery images is just the kernel, which is
+  identical for the two, so the resulting patch should be efficient. Add it to
+  the output zip, along with a shell script that is run from init.rc on first
+  boot to actually do the patching and install the new recovery image.
+
+  Args:
+    input_dir: The top-level input directory of the target-files.zip.
+    output_sink: The callback function that writes the result.
+    recovery_img: File object for the recovery image.
+    boot_img: File objects for the boot image.
+    info_dict: A dict returned by common.LoadInfoDict() on the input
+        target_files. Will use OPTIONS.info_dict if None has been given.
   """
-
   if info_dict is None:
     info_dict = OPTIONS.info_dict
 
-  full_recovery_image = info_dict.get("full_recovery_image", None) == "true"
+  full_recovery_image = info_dict.get("full_recovery_image") == "true"
 
   if full_recovery_image:
     output_sink("etc/recovery.img", recovery_img.data)
 
   else:
-    diff_program = ["imgdiff"]
+    system_root_image = info_dict.get("system_root_image") == "true"
     path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
-    if os.path.exists(path):
-      diff_program.append("-b")
-      diff_program.append(path)
-      bonus_args = "-b /system/etc/recovery-resource.dat"
-    else:
+    # With system-root-image, boot and recovery images will have mismatching
+    # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
+    # to handle such a case.
+    if system_root_image:
+      diff_program = ["bsdiff"]
       bonus_args = ""
+      assert not os.path.exists(path)
+    else:
+      diff_program = ["imgdiff"]
+      if os.path.exists(path):
+        diff_program.append("-b")
+        diff_program.append(path)
+        bonus_args = "-b /system/etc/recovery-resource.dat"
+      else:
+        bonus_args = ""
 
     d = Difference(recovery_img, boot_img, diff_program=diff_program)
     _, _, patch = d.ComputePatch()
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 0c44faf..7a81928 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,14 +77,14 @@
     with temporary=True) to this one."""
     self.script.extend(other.script)
 
-  def AssertOemProperty(self, name, values):
+  def AssertOemProperty(self, name, values, oem_no_mount):
     """Assert that a property on the OEM paritition matches allowed values."""
     if not name:
       raise ValueError("must specify an OEM property")
     if not values:
       raise ValueError("must specify the OEM value")
-    get_prop_command = None
-    if common.OPTIONS.oem_no_mount:
+
+    if oem_no_mount:
       get_prop_command = 'getprop("%s")' % name
     else:
       get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 4422b53..e6e8c9f 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -71,8 +71,7 @@
     common.Usage(__doc__)
     sys.exit(1)
 
-  OPTIONS.input_tmp, input_zip = common.UnzipTemp(
-      args[0], ["IMAGES/*", "OTA/*"])
+  OPTIONS.input_tmp = common.UnzipTemp(args[0], ["IMAGES/*", "OTA/*"])
   output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
   CopyInfo(output_zip)
 
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 60aa84e..aba43e2 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -49,8 +49,10 @@
 
   -o  (--oem_settings)  <main_file[,additional_files...]>
       Comma seperated list of files used to specify the expected OEM-specific
-      properties on the OEM partition of the intended device.
-      Multiple expected values can be used by providing multiple files.
+      properties on the OEM partition of the intended device. Multiple expected
+      values can be used by providing multiple files. Only the first dict will
+      be used to compute fingerprint, while the rest will be used to assert
+      OEM-specific properties.
 
   --oem_no_mount
       For devices with OEM-specific properties but without an OEM partition,
@@ -58,7 +60,7 @@
       very rarely used, since it's expected to have a dedicated OEM partition
       for OEM-specific properties. Only meaningful when -o is specified.
 
-  -w  (--wipe_user_data)
+  --wipe_user_data
       Generate an OTA package that will wipe the user data partition
       when installed.
 
@@ -90,6 +92,24 @@
       first, so that any changes made to the system partition are done
       using the new recovery (new kernel, etc.).
 
+  --include_secondary
+      Additionally include the payload for secondary slot images (default:
+      False). Only meaningful when generating A/B OTAs.
+
+      By default, an A/B OTA package doesn't contain the images for the
+      secondary slot (e.g. system_other.img). Specifying this flag allows
+      generating a separate payload that will install secondary slot images.
+
+      Such a package needs to be applied in a two-stage manner, with a reboot
+      in-between. During the first stage, the updater applies the primary
+      payload only. Upon finishing, it reboots the device into the newly updated
+      slot. It then continues to install the secondary payload to the inactive
+      slot, but without switching the active slot at the end (needs the matching
+      support in update_engine, i.e. SWITCH_SLOT_ON_REBOOT flag).
+
+      Due to the special install procedure, the secondary payload will be always
+      generated as a full payload.
+
   --block
       Generate a block-based OTA for non-A/B device. We have deprecated the
       support for file-based OTA since O. Block-based OTA will be used by
@@ -109,9 +129,6 @@
       Specifies the threshold that will be used to compute the maximum
       allowed stash size (defaults to 0.8).
 
-  --gen_verify
-      Generate an OTA package that verifies the partitions.
-
   --log_diff <file>
       Generate a log file that shows the differences in the source and target
       builds for an incremental package. This option is only meaningful when
@@ -127,27 +144,34 @@
 
   --payload_signer_args <args>
       Specify the arguments needed for payload signer.
+
+  --skip_postinstall
+      Skip the postinstall hooks when generating an A/B OTA package (default:
+      False). Note that this discards ALL the hooks, including non-optional
+      ones. Should only be used if caller knows it's safe to do so (e.g. all the
+      postinstall work is to dexopt apps and a data wipe will happen immediately
+      after). Only meaningful when generating A/B OTAs.
 """
 
 from __future__ import print_function
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print("Python 2.7 or newer is required.", file=sys.stderr)
-  sys.exit(1)
-
-import copy
 import multiprocessing
 import os.path
-import subprocess
 import shlex
+import shutil
+import struct
+import subprocess
+import sys
 import tempfile
 import zipfile
 
 import common
 import edify_generator
-import sparse_img
+
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
 
 OPTIONS = common.OPTIONS
 OPTIONS.package_key = None
@@ -162,28 +186,345 @@
 if OPTIONS.worker_threads == 0:
   OPTIONS.worker_threads = 1
 OPTIONS.two_step = False
+OPTIONS.include_secondary = False
 OPTIONS.no_signing = False
 OPTIONS.block_based = True
 OPTIONS.updater_binary = None
 OPTIONS.oem_source = None
 OPTIONS.oem_no_mount = False
-OPTIONS.fallback_to_full = True
 OPTIONS.full_radio = False
 OPTIONS.full_bootloader = False
 # Stash size cannot exceed cache_size * threshold.
 OPTIONS.cache_size = None
 OPTIONS.stash_threshold = 0.8
-OPTIONS.gen_verify = False
 OPTIONS.log_diff = None
 OPTIONS.payload_signer = None
 OPTIONS.payload_signer_args = []
 OPTIONS.extracted_input = None
 OPTIONS.key_passwords = []
+OPTIONS.skip_postinstall = False
+
 
 METADATA_NAME = 'META-INF/com/android/metadata'
+POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*']
 
 
+class BuildInfo(object):
+  """A class that holds the information for a given build.
+
+  This class wraps up the property querying for a given source or target build.
+  It abstracts away the logic of handling OEM-specific properties, and caches
+  the commonly used properties such as fingerprint.
+
+  There are two types of info dicts: a) build-time info dict, which is generated
+  at build time (i.e. included in a target_files zip); b) OEM info dict that is
+  specified at package generation time (via command line argument
+  '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
+  having "oem_fingerprint_properties" in build-time info dict), all the queries
+  would be answered based on build-time info dict only. Otherwise if using
+  OEM-specific properties, some of them will be calculated from two info dicts.
+
+  Users can query properties similarly as using a dict() (e.g. info['fstab']),
+  or to query build properties via GetBuildProp() or GetVendorBuildProp().
+
+  Attributes:
+    info_dict: The build-time info dict.
+    is_ab: Whether it's a build that uses A/B OTA.
+    oem_dicts: A list of OEM dicts.
+    oem_props: A list of OEM properties that should be read from OEM dicts; None
+        if the build doesn't use any OEM-specific property.
+    fingerprint: The fingerprint of the build, which would be calculated based
+        on OEM properties if applicable.
+    device: The device name, which could come from OEM dicts if applicable.
+  """
+
+  def __init__(self, info_dict, oem_dicts):
+    """Initializes a BuildInfo instance with the given dicts.
+
+    Arguments:
+      info_dict: The build-time info dict.
+      oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
+          that it always uses the first dict to calculate the fingerprint or the
+          device name. The rest would be used for asserting OEM properties only
+          (e.g.  one package can be installed on one of these devices).
+    """
+    self.info_dict = info_dict
+    self.oem_dicts = oem_dicts
+
+    self._is_ab = info_dict.get("ab_update") == "true"
+    self._oem_props = info_dict.get("oem_fingerprint_properties")
+
+    if self._oem_props:
+      assert oem_dicts, "OEM source required for this build"
+
+    # These two should be computed only after setting self._oem_props.
+    self._device = self.GetOemProperty("ro.product.device")
+    self._fingerprint = self.CalculateFingerprint()
+
+  @property
+  def is_ab(self):
+    return self._is_ab
+
+  @property
+  def device(self):
+    return self._device
+
+  @property
+  def fingerprint(self):
+    return self._fingerprint
+
+  @property
+  def oem_props(self):
+    return self._oem_props
+
+  def __getitem__(self, key):
+    return self.info_dict[key]
+
+  def get(self, key, default=None):
+    return self.info_dict.get(key, default)
+
+  def GetBuildProp(self, prop):
+    """Returns the inquired build property."""
+    try:
+      return self.info_dict.get("build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+
+  def GetVendorBuildProp(self, prop):
+    """Returns the inquired vendor build property."""
+    try:
+      return self.info_dict.get("vendor.build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError(
+          "couldn't find %s in vendor.build.prop" % (prop,))
+
+  def GetOemProperty(self, key):
+    if self.oem_props is not None and key in self.oem_props:
+      return self.oem_dicts[0][key]
+    return self.GetBuildProp(key)
+
+  def CalculateFingerprint(self):
+    if self.oem_props is None:
+      return self.GetBuildProp("ro.build.fingerprint")
+    return "%s/%s/%s:%s" % (
+        self.GetOemProperty("ro.product.brand"),
+        self.GetOemProperty("ro.product.name"),
+        self.GetOemProperty("ro.product.device"),
+        self.GetBuildProp("ro.build.thumbprint"))
+
+  def WriteMountOemScript(self, script):
+    assert self.oem_props is not None
+    recovery_mount_options = self.info_dict.get("recovery_mount_options")
+    script.Mount("/oem", recovery_mount_options)
+
+  def WriteDeviceAssertions(self, script, oem_no_mount):
+    # Read the property directly if not using OEM properties.
+    if not self.oem_props:
+      script.AssertDevice(self.device)
+      return
+
+    # Otherwise assert OEM properties.
+    if not self.oem_dicts:
+      raise common.ExternalError(
+          "No OEM file provided to answer expected assertions")
+
+    for prop in self.oem_props.split():
+      values = []
+      for oem_dict in self.oem_dicts:
+        if prop in oem_dict:
+          values.append(oem_dict[prop])
+      if not values:
+        raise common.ExternalError(
+            "The OEM file is missing the property %s" % (prop,))
+      script.AssertOemProperty(prop, values, oem_no_mount)
+
+
+class PayloadSigner(object):
+  """A class that wraps the payload signing works.
+
+  When generating a Payload, hashes of the payload and metadata files will be
+  signed with the device key, either by calling an external payload signer or
+  by calling openssl with the package key. This class provides a unified
+  interface, so that callers can just call PayloadSigner.Sign().
+
+  If an external payload signer has been specified (OPTIONS.payload_signer), it
+  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
+  that the signing key should be provided as part of the payload_signer_args.
+  Otherwise without an external signer, it uses the package key
+  (OPTIONS.package_key) and calls openssl for the signing works.
+  """
+
+  def __init__(self):
+    if OPTIONS.payload_signer is None:
+      # Prepare the payload signing key.
+      private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
+      pw = OPTIONS.key_passwords[OPTIONS.package_key]
+
+      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
+      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
+      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
+      cmd.extend(["-out", signing_key])
+
+      get_signing_key = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT)
+      stdoutdata, _ = get_signing_key.communicate()
+      assert get_signing_key.returncode == 0, \
+          "Failed to get signing key: {}".format(stdoutdata)
+
+      self.signer = "openssl"
+      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
+                          "-pkeyopt", "digest:sha256"]
+    else:
+      self.signer = OPTIONS.payload_signer
+      self.signer_args = OPTIONS.payload_signer_args
+
+  def Sign(self, in_file):
+    """Signs the given input file. Returns the output filename."""
+    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
+    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
+    signing = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = signing.communicate()
+    assert signing.returncode == 0, \
+        "Failed to sign the input file: {}".format(stdoutdata)
+    return out_file
+
+
+class Payload(object):
+  """Manages the creation and the signing of an A/B OTA Payload."""
+
+  PAYLOAD_BIN = 'payload.bin'
+  PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+  SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
+  SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
+
+  def __init__(self, secondary=False):
+    """Initializes a Payload instance.
+
+    Args:
+      secondary: Whether it's generating a secondary payload (default: False).
+    """
+    # The place where the output from the subprocess should go.
+    self._log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
+    self.payload_file = None
+    self.payload_properties = None
+    self.secondary = secondary
+
+  def Generate(self, target_file, source_file=None, additional_args=None):
+    """Generates a payload from the given target-files zip(s).
+
+    Args:
+      target_file: The filename of the target build target-files zip.
+      source_file: The filename of the source build target-files zip; or None if
+          generating a full OTA.
+      additional_args: A list of additional args that should be passed to
+          brillo_update_payload script; or None.
+    """
+    if additional_args is None:
+      additional_args = []
+
+    payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+    cmd = ["brillo_update_payload", "generate",
+           "--payload", payload_file,
+           "--target_image", target_file]
+    if source_file is not None:
+      cmd.extend(["--source_image", source_file])
+    cmd.extend(additional_args)
+    p = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    stdoutdata, _ = p.communicate()
+    assert p.returncode == 0, \
+        "brillo_update_payload generate failed: {}".format(stdoutdata)
+
+    self.payload_file = payload_file
+    self.payload_properties = None
+
+  def Sign(self, payload_signer):
+    """Generates and signs the hashes of the payload and metadata.
+
+    Args:
+      payload_signer: A PayloadSigner() instance that serves the signing work.
+
+    Raises:
+      AssertionError: On any failure when calling brillo_update_payload script.
+    """
+    assert isinstance(payload_signer, PayloadSigner)
+
+    # 1. Generate hashes of the payload and metadata files.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ["brillo_update_payload", "hash",
+           "--unsigned_payload", self.payload_file,
+           "--signature_size", "256",
+           "--metadata_hash_file", metadata_sig_file,
+           "--payload_hash_file", payload_sig_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload hash failed"
+
+    # 2. Sign the hashes.
+    signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
+    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+
+    # 3. Insert the signatures back into the payload file.
+    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                              suffix=".bin")
+    cmd = ["brillo_update_payload", "sign",
+           "--unsigned_payload", self.payload_file,
+           "--payload", signed_payload_file,
+           "--signature_size", "256",
+           "--metadata_signature_file", signed_metadata_sig_file,
+           "--payload_signature_file", signed_payload_sig_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload sign failed"
+
+    # 4. Dump the signed payload properties.
+    properties_file = common.MakeTempFile(prefix="payload-properties-",
+                                          suffix=".txt")
+    cmd = ["brillo_update_payload", "properties",
+           "--payload", signed_payload_file,
+           "--properties_file", properties_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload properties failed"
+
+    if self.secondary:
+      with open(properties_file, "a") as f:
+        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+
+    if OPTIONS.wipe_user_data:
+      with open(properties_file, "a") as f:
+        f.write("POWERWASH=1\n")
+
+    self.payload_file = signed_payload_file
+    self.payload_properties = properties_file
+
+  def WriteToZip(self, output_zip):
+    """Writes the payload to the given zip.
+
+    Args:
+      output_zip: The output ZipFile instance.
+    """
+    assert self.payload_file is not None
+    assert self.payload_properties is not None
+
+    if self.secondary:
+      payload_arcname = Payload.SECONDARY_PAYLOAD_BIN
+      payload_properties_arcname = Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT
+    else:
+      payload_arcname = Payload.PAYLOAD_BIN
+      payload_properties_arcname = Payload.PAYLOAD_PROPERTIES_TXT
+
+    # Add the signed payload file and properties into the zip. In order to
+    # support streaming, we pack them as ZIP_STORED. So these entries can be
+    # read directly with the offset and length pairs.
+    common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+    common.ZipWrite(output_zip, self.payload_properties,
+                    arcname=payload_properties_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+
+
 def SignOutput(temp_zip_name, output_zip_name):
   pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
@@ -191,37 +532,15 @@
                   whole_file=True)
 
 
-def AppendAssertions(script, info_dict, oem_dicts=None):
-  oem_props = info_dict.get("oem_fingerprint_properties")
-  if not oem_props:
-    device = GetBuildProp("ro.product.device", info_dict)
-    script.AssertDevice(device)
-  else:
-    if not oem_dicts:
-      raise common.ExternalError(
-          "No OEM file provided to answer expected assertions")
-    for prop in oem_props.split():
-      values = []
-      for oem_dict in oem_dicts:
-        if oem_dict.get(prop):
-          values.append(oem_dict[prop])
-      if not values:
-        raise common.ExternalError(
-            "The OEM file is missing the property %s" % prop)
-      script.AssertOemProperty(prop, values)
-
-
-def _LoadOemDicts(script, recovery_mount_options=None):
+def _LoadOemDicts(oem_source):
   """Returns the list of loaded OEM properties dict."""
-  oem_dicts = None
-  if OPTIONS.oem_source is None:
-    raise common.ExternalError("OEM source required for this build")
-  if not OPTIONS.oem_no_mount and script:
-    script.Mount("/oem", recovery_mount_options)
+  if not oem_source:
+    return None
+
   oem_dicts = []
-  for oem_file in OPTIONS.oem_source:
-    oem_dicts.append(common.LoadDictionaryFromLines(
-        open(oem_file).readlines()))
+  for oem_file in oem_source:
+    with open(oem_file) as fp:
+      oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
   return oem_dicts
 
 
@@ -272,55 +591,34 @@
     return False
 
 
-def HasTrebleEnabled(target_files_zip, info_dict):
+def HasTrebleEnabled(target_files_zip, target_info):
   return (HasVendorPartition(target_files_zip) and
-          GetBuildProp("ro.treble.enabled", info_dict) == "true")
+          target_info.GetBuildProp("ro.treble.enabled") == "true")
 
 
-def GetOemProperty(name, oem_props, oem_dict, info_dict):
-  if oem_props is not None and name in oem_props:
-    return oem_dict[name]
-  return GetBuildProp(name, info_dict)
+def WriteFingerprintAssertion(script, target_info, source_info):
+  source_oem_props = source_info.oem_props
+  target_oem_props = target_info.oem_props
+
+  if source_oem_props is None and target_oem_props is None:
+    script.AssertSomeFingerprint(
+        source_info.fingerprint, target_info.fingerprint)
+  elif source_oem_props is not None and target_oem_props is not None:
+    script.AssertSomeThumbprint(
+        target_info.GetBuildProp("ro.build.thumbprint"),
+        source_info.GetBuildProp("ro.build.thumbprint"))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_info.fingerprint,
+        target_info.GetBuildProp("ro.build.thumbprint"))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_info.fingerprint,
+        source_info.GetBuildProp("ro.build.thumbprint"))
 
 
-def CalculateFingerprint(oem_props, oem_dict, info_dict):
-  if oem_props is None:
-    return GetBuildProp("ro.build.fingerprint", info_dict)
-  return "%s/%s/%s:%s" % (
-      GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
-      GetBuildProp("ro.build.thumbprint", info_dict))
-
-
-def GetImage(which, tmpdir):
-  """Returns an image object suitable for passing to BlockImageDiff.
-
-  'which' partition must be "system" or "vendor". A prebuilt image and file
-  map must already exist in tmpdir.
-  """
-
-  assert which in ("system", "vendor")
-
-  path = os.path.join(tmpdir, "IMAGES", which + ".img")
-  mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
-
-  # The image and map files must have been created prior to calling
-  # ota_from_target_files.py (since LMP).
-  assert os.path.exists(path) and os.path.exists(mappath)
-
-  # Bug: http://b/20939131
-  # In ext4 filesystems, block 0 might be changed even being mounted
-  # R/O. We add it to clobbered_blocks so that it will be written to the
-  # target unconditionally. Note that they are still part of care_map.
-  clobbered_blocks = "0"
-
-  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
-
-
-def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip,
-                                           target_info_dict,
-                                           source_info_dict=None):
+def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
+                                           source_info=None):
   """Adds compatibility info into the output zip if it's Treble-enabled target.
 
   Metadata used for on-device compatibility verification is retrieved from
@@ -333,9 +631,9 @@
   Args:
     target_zip: Zip file containing the source files to be included for OTA.
     output_zip: Zip file that will be sent for OTA.
-    target_info_dict: The dict that holds the target build info.
-    source_info_dict: The dict that holds the source build info, if generating
-        an incremental OTA; None otherwise.
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, if
+        generating an incremental OTA; None otherwise.
   """
 
   def AddCompatibilityArchive(system_updated, vendor_updated):
@@ -358,8 +656,8 @@
 
     # Create new archive.
     compatibility_archive = tempfile.NamedTemporaryFile()
-    compatibility_archive_zip = zipfile.ZipFile(compatibility_archive, "w",
-        compression=zipfile.ZIP_DEFLATED)
+    compatibility_archive_zip = zipfile.ZipFile(
+        compatibility_archive, "w", compression=zipfile.ZIP_DEFLATED)
 
     # Add metadata.
     for file_name in compatibility_files:
@@ -380,59 +678,58 @@
 
   # Will only proceed if the target has enabled the Treble support (as well as
   # having a /vendor partition).
-  if not HasTrebleEnabled(target_zip, target_info_dict):
+  if not HasTrebleEnabled(target_zip, target_info):
     return
 
   # We don't support OEM thumbprint in Treble world (which calculates
   # fingerprints in a different way as shown in CalculateFingerprint()).
-  assert not target_info_dict.get("oem_fingerprint_properties")
+  assert not target_info.oem_props
 
   # Full OTA carries the info for system/vendor both.
-  if source_info_dict is None:
+  if source_info is None:
     AddCompatibilityArchive(True, True)
     return
 
-  assert not source_info_dict.get("oem_fingerprint_properties")
+  assert not source_info.oem_props
 
-  source_fp = GetBuildProp("ro.build.fingerprint", source_info_dict)
-  target_fp = GetBuildProp("ro.build.fingerprint", target_info_dict)
+  source_fp = source_info.fingerprint
+  target_fp = target_info.fingerprint
   system_updated = source_fp != target_fp
 
-  source_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        source_info_dict)
-  target_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        target_info_dict)
+  source_fp_vendor = source_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
+  target_fp_vendor = target_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
   vendor_updated = source_fp_vendor != target_fp_vendor
 
   AddCompatibilityArchive(system_updated, vendor_updated)
 
 
-def WriteFullOTAPackage(input_zip, output_zip):
-  # TODO: how to determine this?  We don't know what version it will
-  # be installed on top of. For now, we expect the API just won't
-  # change very often. Similarly for fstab, it might have changed
-  # in the target build.
-  script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+def WriteFullOTAPackage(input_zip, output_file):
+  target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
 
-  recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if oem_props:
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  # We don't know what version it will be installed on top of. We expect the API
+  # just won't change very often. Similarly for fstab, it might have changed in
+  # the target build.
+  target_api_version = target_info["recovery_api_version"]
+  script = edify_generator.EdifyGenerator(target_api_version, target_info)
 
-  target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict)
-  metadata = {
-      "post-build": target_fp,
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
-  }
+  if target_info.oem_props and not OPTIONS.oem_no_mount:
+    target_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info)
+
+  if not OPTIONS.no_signing:
+    staging_file = common.MakeTempFile(suffix='.zip')
+  else:
+    staging_file = output_file
+
+  output_zip = zipfile.ZipFile(
+      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
 
   device_specific = common.DeviceSpecificParams(
       input_zip=input_zip,
-      input_version=OPTIONS.info_dict["recovery_api_version"],
+      input_version=target_api_version,
       output_zip=output_zip,
       script=script,
       input_tmp=OPTIONS.input_tmp,
@@ -441,13 +738,12 @@
 
   assert HasRecoveryPatch(input_zip)
 
-  metadata["ota-type"] = "BLOCK"
-
-  ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
-  ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
+  # Assertions (e.g. downgrade check, device properties check).
+  ts = target_info.GetBuildProp("ro.build.date.utc")
+  ts_text = target_info.GetBuildProp("ro.build.date")
   script.AssertOlderBuild(ts, ts_text)
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.FullOTA_Assertions()
 
   # Two-step package strategy (in chronological order, which is *not*
@@ -473,9 +769,9 @@
   recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
                                          OPTIONS.input_tmp, "RECOVERY")
   if OPTIONS.two_step:
-    if not OPTIONS.info_dict.get("multistage_support", None):
+    if not target_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
-    fs = OPTIONS.info_dict["fstab"]["/misc"]
+    fs = target_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
     bcb_dev = {"bcb_dev": fs.device}
@@ -497,7 +793,7 @@
     script.Comment("Stage 3/3")
 
   # Dump fingerprints
-  script.Print("Target: %s" % target_fp)
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   device_specific.FullOTA_InstallBegin()
 
@@ -508,15 +804,17 @@
   if HasVendorPartition(input_zip):
     system_progress -= 0.1
 
-  recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
-
   script.ShowProgress(system_progress, 0)
 
+  # See the notes in WriteBlockIncrementalOTAPackage().
+  allow_shared_blocks = target_info.get('ext4_share_dup_blocks') == "true"
+
   # Full OTA is done as an "incremental" against an empty source image. This
   # has the effect of writing new data from the package to the entire
   # partition, but lets us reuse the updater code that writes incrementals to
   # do it.
-  system_tgt = GetImage("system", OPTIONS.input_tmp)
+  system_tgt = common.GetSparseImage("system", OPTIONS.input_tmp, input_zip,
+                                     allow_shared_blocks)
   system_tgt.ResetFileMap()
   system_diff = common.BlockDifference("system", system_tgt, src=None)
   system_diff.WriteScript(script, output_zip)
@@ -527,15 +825,15 @@
   if HasVendorPartition(input_zip):
     script.ShowProgress(0.1, 0)
 
-    vendor_tgt = GetImage("vendor", OPTIONS.input_tmp)
+    vendor_tgt = common.GetSparseImage("vendor", OPTIONS.input_tmp, input_zip,
+                                       allow_shared_blocks)
     vendor_tgt.ResetFileMap()
     vendor_diff = common.BlockDifference("vendor", vendor_tgt)
     vendor_diff.WriteScript(script, output_zip)
 
-  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip,
-                                         OPTIONS.info_dict)
+  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
 
-  common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
+  common.CheckSize(boot_img.data, "boot.img", target_info)
   common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
 
   script.ShowProgress(0.05, 5)
@@ -573,7 +871,15 @@
   script.SetProgress(1)
   script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
   metadata["ota-required-cache"] = str(script.required_cache)
-  WriteMetadata(metadata, output_zip)
+
+  # We haven't written the metadata entry, which will be done in
+  # FinalizeMetadata.
+  common.ZipClose(output_zip)
+
+  needed_property_files = (
+      NonAbOtaPropertyFiles(),
+  )
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
 
 
 def WriteMetadata(metadata, output_zip):
@@ -582,29 +888,12 @@
                      compress_type=zipfile.ZIP_STORED)
 
 
-def GetBuildProp(prop, info_dict):
-  """Returns the inquired build property from a given info_dict."""
-  try:
-    return info_dict.get("build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
-
-
-def GetVendorBuildProp(prop, info_dict):
-  """Returns the inquired vendor build property from a given info_dict."""
-  try:
-    return info_dict.get("vendor.build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError(
-        "couldn't find %s in vendor.build.prop" % (prop,))
-
-
-def HandleDowngradeMetadata(metadata):
+def HandleDowngradeMetadata(metadata, target_info, source_info):
   # Only incremental OTAs are allowed to reach here.
   assert OPTIONS.incremental_source is not None
 
-  post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
-  pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+  post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
+  pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
   is_downgrade = long(post_timestamp) < long(pre_timestamp)
 
   if OPTIONS.downgrade:
@@ -614,96 +903,471 @@
     metadata["ota-downgrade"] = "yes"
   elif OPTIONS.timestamp:
     if not is_downgrade:
-      raise RuntimeError("--timestamp specified but no timestamp hack needed: "
-                         "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+      raise RuntimeError("--override_timestamp specified but no timestamp hack "
+                         "needed: pre: %s, post: %s" % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = str(long(pre_timestamp) + 1)
   else:
     if is_downgrade:
       raise RuntimeError("Downgrade detected based on timestamp check: "
-                         "pre: %s, post: %s. Need to specify --timestamp OR "
-                         "--downgrade to allow building the incremental." % (
-                             pre_timestamp, post_timestamp))
+                         "pre: %s, post: %s. Need to specify "
+                         "--override_timestamp OR --downgrade to allow "
+                         "building the incremental." % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = post_timestamp
 
 
-def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
-  source_version = OPTIONS.source_info_dict["recovery_api_version"]
-  target_version = OPTIONS.target_info_dict["recovery_api_version"]
+def GetPackageMetadata(target_info, source_info=None):
+  """Generates and returns the metadata dict.
 
-  if source_version == 0:
-    print("WARNING: generating edify script for a source that "
-          "can't install it.")
-  script = edify_generator.EdifyGenerator(
-      source_version, OPTIONS.target_info_dict,
-      fstab=OPTIONS.source_info_dict["fstab"])
+  It generates a dict() that contains the info to be written into an OTA
+  package (META-INF/com/android/metadata). It also handles the detection of
+  downgrade / timestamp override / data wipe based on the global options.
 
-  recovery_mount_options = OPTIONS.source_info_dict.get(
-      "recovery_mount_options")
-  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
-  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if source_oem_props and target_oem_props:
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  Args:
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, or
+        None if generating full OTA.
+
+  Returns:
+    A dict to be written into package metadata entry.
+  """
+  assert isinstance(target_info, BuildInfo)
+  assert source_info is None or isinstance(source_info, BuildInfo)
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict),
-      "ota-type": "BLOCK",
+      'post-build' : target_info.fingerprint,
+      'post-build-incremental' : target_info.GetBuildProp(
+          'ro.build.version.incremental'),
+      'post-sdk-level' : target_info.GetBuildProp(
+          'ro.build.version.sdk'),
+      'post-security-patch-level' : target_info.GetBuildProp(
+          'ro.build.version.security_patch'),
   }
 
-  HandleDowngradeMetadata(metadata)
+  if target_info.is_ab:
+    metadata['ota-type'] = 'AB'
+    metadata['ota-required-cache'] = '0'
+  else:
+    metadata['ota-type'] = 'BLOCK'
+
+  if OPTIONS.wipe_user_data:
+    metadata['ota-wipe'] = 'yes'
+
+  is_incremental = source_info is not None
+  if is_incremental:
+    metadata['pre-build'] = source_info.fingerprint
+    metadata['pre-build-incremental'] = source_info.GetBuildProp(
+        'ro.build.version.incremental')
+    metadata['pre-device'] = source_info.device
+  else:
+    metadata['pre-device'] = target_info.device
+
+  # Detect downgrades, or fill in the post-timestamp.
+  if is_incremental:
+    HandleDowngradeMetadata(metadata, target_info, source_info)
+  else:
+    metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+  return metadata
+
+
+class PropertyFiles(object):
+  """A class that computes the property-files string for an OTA package.
+
+  A property-files string is a comma-separated string that contains the
+  offset/size info for an OTA package. The entries, which must be ZIP_STORED,
+  can be fetched directly with the package URL along with the offset/size info.
+  These strings can be used for streaming A/B OTAs, or allowing an updater to
+  download package metadata entry directly, without paying the cost of
+  downloading entire package.
+
+  Computing the final property-files string requires two passes. Because doing
+  the whole package signing (with signapk.jar) will possibly reorder the ZIP
+  entries, which may in turn invalidate earlier computed ZIP entry offset/size
+  values.
+
+  This class provides functions to be called for each pass. The general flow is
+  as follows.
+
+    property_files = PropertyFiles()
+    # The first pass, which writes placeholders before doing initial signing.
+    property_files.Compute()
+    SignOutput()
+
+    # The second pass, by replacing the placeholders with actual data.
+    property_files.Finalize()
+    SignOutput()
+
+  And the caller can additionally verify the final result.
+
+    property_files.Verify()
+  """
+
+  def __init__(self):
+    self.name = None
+    self.required = ()
+    self.optional = ()
+
+  def Compute(self, input_zip):
+    """Computes and returns a property-files string with placeholders.
+
+    We reserve extra space for the offset and size of the metadata entry itself,
+    although we don't know the final values until the package gets signed.
+
+    Args:
+      input_zip: The input ZIP file.
+
+    Returns:
+      A string with placeholders for the metadata offset/size info, e.g.
+      "payload.bin:679:343,payload_properties.txt:378:45,metadata:        ".
+    """
+    return self._GetPropertyFilesString(input_zip, reserve_space=True)
+
+  def Finalize(self, input_zip, reserved_length):
+    """Finalizes a property-files string with actual METADATA offset/size info.
+
+    The input ZIP file has been signed, with the ZIP entries in the desired
+    place (signapk.jar will possibly reorder the ZIP entries). Now we compute
+    the ZIP entry offsets and construct the property-files string with actual
+    data. Note that during this process, we must pad the property-files string
+    to the reserved length, so that the METADATA entry size remains the same.
+    Otherwise the entries' offsets and sizes may change again.
+
+    Args:
+      input_zip: The input ZIP file.
+      reserved_length: The reserved length of the property-files string during
+          the call to Compute(). The final string must be no more than this
+          size.
+
+    Returns:
+      A property-files string including the metadata offset/size info, e.g.
+      "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379  ".
+
+    Raises:
+      AssertionError: If the reserved length is insufficient to hold the final
+          string.
+    """
+    result = self._GetPropertyFilesString(input_zip, reserve_space=False)
+    assert len(result) <= reserved_length, \
+        'Insufficient reserved space: reserved={}, actual={}'.format(
+            reserved_length, len(result))
+    result += ' ' * (reserved_length - len(result))
+    return result
+
+  def Verify(self, input_zip, expected):
+    """Verifies the input ZIP file contains the expected property-files string.
+
+    Args:
+      input_zip: The input ZIP file.
+      expected: The property-files string that's computed from Finalize().
+
+    Raises:
+      AssertionError: On finding a mismatch.
+    """
+    actual = self._GetPropertyFilesString(input_zip)
+    assert actual == expected, \
+        "Mismatching streaming metadata: {} vs {}.".format(actual, expected)
+
+  def _GetPropertyFilesString(self, zip_file, reserve_space=False):
+    """Constructs the property-files string per request."""
+
+    def ComputeEntryOffsetSize(name):
+      """Computes the zip entry offset and size."""
+      info = zip_file.getinfo(name)
+      offset = info.header_offset + len(info.FileHeader())
+      size = info.file_size
+      return '%s:%d:%d' % (os.path.basename(name), offset, size)
+
+    tokens = []
+    tokens.extend(self._GetPrecomputed(zip_file))
+    for entry in self.required:
+      tokens.append(ComputeEntryOffsetSize(entry))
+    for entry in self.optional:
+      if entry in zip_file.namelist():
+        tokens.append(ComputeEntryOffsetSize(entry))
+
+    # 'META-INF/com/android/metadata' is required. We don't know its actual
+    # offset and length (as well as the values for other entries). So we reserve
+    # 10-byte as a placeholder, which is to cover the space for metadata entry
+    # ('xx:xxx', since it's ZIP_STORED which should appear at the beginning of
+    # the zip), as well as the possible value changes in other entries.
+    if reserve_space:
+      tokens.append('metadata:' + ' ' * 10)
+    else:
+      tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
+
+    return ','.join(tokens)
+
+  def _GetPrecomputed(self, input_zip):
+    """Computes the additional tokens to be included into the property-files.
+
+    This applies to tokens without actual ZIP entries, such as
+    payload_metadadata.bin. We want to expose the offset/size to updaters, so
+    that they can download the payload metadata directly with the info.
+
+    Args:
+      input_zip: The input zip file.
+
+    Returns:
+      A list of strings (tokens) to be added to the property-files string.
+    """
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return []
+
+
+class StreamingPropertyFiles(PropertyFiles):
+  """A subclass for computing the property-files for streaming A/B OTAs."""
+
+  def __init__(self):
+    super(StreamingPropertyFiles, self).__init__()
+    self.name = 'ota-streaming-property-files'
+    self.required = (
+        # payload.bin and payload_properties.txt must exist.
+        'payload.bin',
+        'payload_properties.txt',
+    )
+    self.optional = (
+        # care_map.txt is available only if dm-verity is enabled.
+        'care_map.txt',
+        # compatibility.zip is available only if target supports Treble.
+        'compatibility.zip',
+    )
+
+
+class AbOtaPropertyFiles(StreamingPropertyFiles):
+  """The property-files for A/B OTA that includes payload_metadata.bin info.
+
+  Since P, we expose one more token (aka property-file), in addition to the ones
+  for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
+  'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
+  doesn't exist as a separate ZIP entry, but can be used to verify if the
+  payload can be applied on the given device.
+
+  For backward compatibility, we keep both of the 'ota-streaming-property-files'
+  and the newly added 'ota-property-files' in P. The new token will only be
+  available in 'ota-property-files'.
+  """
+
+  def __init__(self):
+    super(AbOtaPropertyFiles, self).__init__()
+    self.name = 'ota-property-files'
+
+  def _GetPrecomputed(self, input_zip):
+    offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
+    return ['payload_metadata.bin:{}:{}'.format(offset, size)]
+
+  @staticmethod
+  def _GetPayloadMetadataOffsetAndSize(input_zip):
+    """Computes the offset and size of the payload metadata for a given package.
+
+    (From system/update_engine/update_metadata.proto)
+    A delta update file contains all the deltas needed to update a system from
+    one specific version to another specific version. The update format is
+    represented by this struct pseudocode:
+
+    struct delta_update_file {
+      char magic[4] = "CrAU";
+      uint64 file_format_version;
+      uint64 manifest_size;  // Size of protobuf DeltaArchiveManifest
+
+      // Only present if format_version > 1:
+      uint32 metadata_signature_size;
+
+      // The Bzip2 compressed DeltaArchiveManifest
+      char manifest[metadata_signature_size];
+
+      // The signature of the metadata (from the beginning of the payload up to
+      // this location, not including the signature itself). This is a
+      // serialized Signatures message.
+      char medatada_signature_message[metadata_signature_size];
+
+      // Data blobs for files, no specific format. The specific offset
+      // and length of each data blob is recorded in the DeltaArchiveManifest.
+      struct {
+        char data[];
+      } blobs[];
+
+      // These two are not signed:
+      uint64 payload_signatures_message_size;
+      char payload_signatures_message[];
+    };
+
+    'payload-metadata.bin' contains all the bytes from the beginning of the
+    payload, till the end of 'medatada_signature_message'.
+    """
+    payload_info = input_zip.getinfo('payload.bin')
+    payload_offset = payload_info.header_offset + len(payload_info.FileHeader())
+    payload_size = payload_info.file_size
+
+    with input_zip.open('payload.bin', 'r') as payload_fp:
+      header_bin = payload_fp.read(24)
+
+    # network byte order (big-endian)
+    header = struct.unpack("!IQQL", header_bin)
+
+    # 'CrAU'
+    magic = header[0]
+    assert magic == 0x43724155, "Invalid magic: {:x}".format(magic)
+
+    manifest_size = header[2]
+    metadata_signature_size = header[3]
+    metadata_total = 24 + manifest_size + metadata_signature_size
+    assert metadata_total < payload_size
+
+    return (payload_offset, metadata_total)
+
+
+class NonAbOtaPropertyFiles(PropertyFiles):
+  """The property-files for non-A/B OTA.
+
+  For non-A/B OTA, the property-files string contains the info for METADATA
+  entry, with which a system updater can be fetched the package metadata prior
+  to downloading the entire package.
+  """
+
+  def __init__(self):
+    super(NonAbOtaPropertyFiles, self).__init__()
+    self.name = 'ota-property-files'
+
+
+def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
+  """Finalizes the metadata and signs an A/B OTA package.
+
+  In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
+  that contains the offsets and sizes for the ZIP entries. An example
+  property-files string is as follows.
+
+    "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379"
+
+  OTA server can pass down this string, in addition to the package URL, to the
+  system update client. System update client can then fetch individual ZIP
+  entries (ZIP_STORED) directly at the given offset of the URL.
+
+  Args:
+    metadata: The metadata dict for the package.
+    input_file: The input ZIP filename that doesn't contain the package METADATA
+        entry yet.
+    output_file: The final output ZIP filename.
+    needed_property_files: The list of PropertyFiles' to be generated.
+  """
+  output_zip = zipfile.ZipFile(
+      input_file, 'a', compression=zipfile.ZIP_DEFLATED)
+
+  # Write the current metadata entry with placeholders.
+  for property_files in needed_property_files:
+    metadata[property_files.name] = property_files.Compute(output_zip)
+  WriteMetadata(metadata, output_zip)
+  common.ZipClose(output_zip)
+
+  # SignOutput(), which in turn calls signapk.jar, will possibly reorder the
+  # ZIP entries, as well as padding the entry headers. We do a preliminary
+  # signing (with an incomplete metadata entry) to allow that to happen. Then
+  # compute the ZIP entry offsets, write back the final metadata and do the
+  # final signing.
+  if OPTIONS.no_signing:
+    prelim_signing = input_file
+  else:
+    prelim_signing = common.MakeTempFile(suffix='.zip')
+    SignOutput(input_file, prelim_signing)
+
+  # Open the signed zip. Compute the final metadata that's needed for streaming.
+  with zipfile.ZipFile(prelim_signing, 'r') as prelim_signing_zip:
+    for property_files in needed_property_files:
+      metadata[property_files.name] = property_files.Finalize(
+          prelim_signing_zip, len(metadata[property_files.name]))
+
+  # Replace the METADATA entry.
+  common.ZipDelete(prelim_signing, METADATA_NAME)
+  output_zip = zipfile.ZipFile(
+      prelim_signing, 'a', compression=zipfile.ZIP_DEFLATED)
+  WriteMetadata(metadata, output_zip)
+  common.ZipClose(output_zip)
+
+  # Re-sign the package after updating the metadata entry.
+  if OPTIONS.no_signing:
+    output_file = prelim_signing
+  else:
+    SignOutput(prelim_signing, output_file)
+
+  # Reopen the final signed zip to double check the streaming metadata.
+  with zipfile.ZipFile(output_file, 'r') as output_zip:
+    for property_files in needed_property_files:
+      property_files.Verify(output_zip, metadata[property_files.name].strip())
+
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
+  target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+  source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+
+  target_api_version = target_info["recovery_api_version"]
+  source_api_version = source_info["recovery_api_version"]
+  if source_api_version == 0:
+    print("WARNING: generating edify script for a source that "
+          "can't install it.")
+
+  script = edify_generator.EdifyGenerator(
+      source_api_version, target_info, fstab=source_info["fstab"])
+
+  if target_info.oem_props or source_info.oem_props:
+    if not OPTIONS.oem_no_mount:
+      source_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info, source_info)
+
+  if not OPTIONS.no_signing:
+    staging_file = common.MakeTempFile(suffix='.zip')
+  else:
+    staging_file = output_file
+
+  output_zip = zipfile.ZipFile(
+      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
 
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
-      source_version=source_version,
+      source_version=source_api_version,
       target_zip=target_zip,
-      target_version=target_version,
+      target_version=target_api_version,
       output_zip=output_zip,
       script=script,
       metadata=metadata,
-      info_dict=OPTIONS.source_info_dict)
-
-  source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict)
-  target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.target_info_dict)
-  metadata["pre-build"] = source_fp
-  metadata["post-build"] = target_fp
-  metadata["pre-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.source_info_dict)
-  metadata["post-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.target_info_dict)
+      info_dict=source_info)
 
   source_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
-      OPTIONS.source_info_dict)
+      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
   target_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
   updating_boot = (not OPTIONS.two_step and
                    (source_boot.data != target_boot.data))
 
   target_recovery = common.GetBootableImage(
       "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
 
-  system_src = GetImage("system", OPTIONS.source_tmp)
-  system_tgt = GetImage("system", OPTIONS.target_tmp)
+  # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
+  # shared blocks (i.e. some blocks will show up in multiple files' block
+  # list). We can only allocate such shared blocks to the first "owner", and
+  # disable imgdiff for all later occurrences.
+  allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or
+                         target_info.get('ext4_share_dup_blocks') == "true")
+  system_src = common.GetSparseImage("system", OPTIONS.source_tmp, source_zip,
+                                     allow_shared_blocks)
+  system_tgt = common.GetSparseImage("system", OPTIONS.target_tmp, target_zip,
+                                     allow_shared_blocks)
 
-  blockimgdiff_version = 1
-  if OPTIONS.info_dict:
-    blockimgdiff_version = max(
-        int(i) for i in
-        OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+  blockimgdiff_version = max(
+      int(i) for i in target_info.get("blockimgdiff_versions", "1").split(","))
+  assert blockimgdiff_version >= 3
 
   # Check the first block of the source system partition for remount R/W only
   # if the filesystem is ext4.
-  system_src_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+  system_src_partition = source_info["fstab"]["/system"]
   check_first_block = system_src_partition.fs_type == "ext4"
   # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
   # in zip formats. However with squashfs, a) all files are compressed in LZ4;
   # b) the blocks listed in block map may not contain all the bytes for a given
   # file (because they're rounded to be 4K-aligned).
-  system_tgt_partition = OPTIONS.target_info_dict["fstab"]["/system"]
+  system_tgt_partition = target_info["fstab"]["/system"]
   disable_imgdiff = (system_src_partition.fs_type == "squashfs" or
                      system_tgt_partition.fs_type == "squashfs")
   system_diff = common.BlockDifference("system", system_tgt, system_src,
@@ -714,12 +1378,14 @@
   if HasVendorPartition(target_zip):
     if not HasVendorPartition(source_zip):
       raise RuntimeError("can't generate incremental that adds /vendor")
-    vendor_src = GetImage("vendor", OPTIONS.source_tmp)
-    vendor_tgt = GetImage("vendor", OPTIONS.target_tmp)
+    vendor_src = common.GetSparseImage("vendor", OPTIONS.source_tmp, source_zip,
+                                       allow_shared_blocks)
+    vendor_tgt = common.GetSparseImage("vendor", OPTIONS.target_tmp, target_zip,
+                                       allow_shared_blocks)
 
     # Check first block of vendor partition for remount R/W only if
     # disk type is ext4
-    vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+    vendor_partition = source_info["fstab"]["/vendor"]
     check_first_block = vendor_partition.fs_type == "ext4"
     disable_imgdiff = vendor_partition.fs_type == "squashfs"
     vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
@@ -730,10 +1396,10 @@
     vendor_diff = None
 
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.target_info_dict,
-      OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
-  AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
+  # Assertions (e.g. device properties check).
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -759,12 +1425,12 @@
   #    (allow recovery to mark itself finished and reboot)
 
   if OPTIONS.two_step:
-    if not OPTIONS.source_info_dict.get("multistage_support", None):
+    if not source_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
-    fs = OPTIONS.source_info_dict["fstab"]["/misc"]
+    fs = source_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev": fs.device}
+    bcb_dev = {"bcb_dev" : fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
     script.AppendExtra("""
 if get_stage("%(bcb_dev)s") == "2/3" then
@@ -784,39 +1450,14 @@
     script.Comment("Stage 1/3")
 
   # Dump fingerprints
-  script.Print("Source: %s" % (source_fp,))
-  script.Print("Target: %s" % (target_fp,))
+  script.Print("Source: {}".format(source_info.fingerprint))
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   script.Print("Verifying current system...")
 
   device_specific.IncrementalOTA_VerifyBegin()
 
-  # When blockimgdiff version is less than 3 (non-resumable block-based OTA),
-  # patching on a device that's already on the target build will damage the
-  # system. Because operations like move don't check the block state, they
-  # always apply the changes unconditionally.
-  if blockimgdiff_version <= 2:
-    if source_oem_props is None:
-      script.AssertSomeFingerprint(source_fp)
-    else:
-      script.AssertSomeThumbprint(
-          GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-
-  else: # blockimgdiff_version > 2
-    if source_oem_props is None and target_oem_props is None:
-      script.AssertSomeFingerprint(source_fp, target_fp)
-    elif source_oem_props is not None and target_oem_props is not None:
-      script.AssertSomeThumbprint(
-          GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
-          GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-    elif source_oem_props is None and target_oem_props is not None:
-      script.AssertFingerprintOrThumbprint(
-          source_fp,
-          GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
-    else:
-      script.AssertFingerprintOrThumbprint(
-          target_fp,
-          GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+  WriteFingerprintAssertion(script, target_info, source_info)
 
   # Check the required cache size (i.e. stashed blocks).
   size = []
@@ -826,8 +1467,7 @@
     size.append(vendor_diff.required_cache)
 
   if updating_boot:
-    boot_type, boot_device = common.GetTypeAndDevice(
-        "/boot", OPTIONS.source_info_dict)
+    boot_type, boot_device = common.GetTypeAndDevice("/boot", source_info)
     d = common.Difference(target_boot, source_boot)
     _, _, d = d.ComputePatch()
     if d is None:
@@ -917,7 +1557,6 @@
   if OPTIONS.wipe_user_data:
     script.Print("Erasing user data...")
     script.FormatPartition("/data")
-    metadata["ota-wipe"] = "yes"
 
   if OPTIONS.two_step:
     script.AppendExtra("""
@@ -934,354 +1573,172 @@
   else:
     script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
   metadata["ota-required-cache"] = str(script.required_cache)
-  WriteMetadata(metadata, output_zip)
+
+  # We haven't written the metadata entry yet, which will be handled in
+  # FinalizeMetadata().
+  common.ZipClose(output_zip)
+
+  # Sign the generated zip package unless no_signing is specified.
+  needed_property_files = (
+      NonAbOtaPropertyFiles(),
+  )
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
 
 
-def WriteVerifyPackage(input_zip, output_zip):
-  script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
+  """Returns a target-files.zip file for generating secondary payload.
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
-  recovery_mount_options = OPTIONS.info_dict.get(
-      "recovery_mount_options")
-  oem_dicts = None
-  if oem_props:
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  Although the original target-files.zip already contains secondary slot
+  images (i.e. IMAGES/system_other.img), we need to rename the files to the
+  ones without _other suffix. Note that we cannot instead modify the names in
+  META/ab_partitions.txt, because there are no matching partitions on device.
 
-  target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict)
-  metadata = {
-      "post-build": target_fp,
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
-  }
+  For the partitions that don't have secondary images, the ones for primary
+  slot will be used. This is to ensure that we always have valid boot, vbmeta,
+  bootloader images in the inactive slot.
 
-  device_specific = common.DeviceSpecificParams(
-      input_zip=input_zip,
-      input_version=OPTIONS.info_dict["recovery_api_version"],
-      output_zip=output_zip,
-      script=script,
-      input_tmp=OPTIONS.input_tmp,
-      metadata=metadata,
-      info_dict=OPTIONS.info_dict)
+  Args:
+    input_file: The input target-files.zip file.
+    skip_postinstall: Whether to skip copying the postinstall config file.
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
+  Returns:
+    The filename of the target-files.zip for generating secondary payload.
+  """
+  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+  target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
 
-  script.Print("Verifying device images against %s..." % target_fp)
-  script.AppendExtra("")
+  input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
+  with zipfile.ZipFile(input_file, 'r') as input_zip:
+    infolist = input_zip.infolist()
 
-  script.Print("Verifying boot...")
-  boot_img = common.GetBootableImage(
-      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-  boot_type, boot_device = common.GetTypeAndDevice(
-      "/boot", OPTIONS.info_dict)
-  script.Verify("%s:%s:%d:%s" % (
-      boot_type, boot_device, boot_img.size, boot_img.sha1))
-  script.AppendExtra("")
+  for info in infolist:
+    unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
+    if info.filename == 'IMAGES/system_other.img':
+      common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
 
-  script.Print("Verifying recovery...")
-  recovery_img = common.GetBootableImage(
-      "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-  recovery_type, recovery_device = common.GetTypeAndDevice(
-      "/recovery", OPTIONS.info_dict)
-  script.Verify("%s:%s:%d:%s" % (
-      recovery_type, recovery_device, recovery_img.size, recovery_img.sha1))
-  script.AppendExtra("")
+    # Primary images and friends need to be skipped explicitly.
+    elif info.filename in ('IMAGES/system.img',
+                           'IMAGES/system.map'):
+      pass
 
-  system_tgt = GetImage("system", OPTIONS.input_tmp)
-  system_tgt.ResetFileMap()
-  system_diff = common.BlockDifference("system", system_tgt, src=None)
-  system_diff.WriteStrictVerifyScript(script)
+    # Skip copying the postinstall config if requested.
+    elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
+      pass
 
-  if HasVendorPartition(input_zip):
-    vendor_tgt = GetImage("vendor", OPTIONS.input_tmp)
-    vendor_tgt.ResetFileMap()
-    vendor_diff = common.BlockDifference("vendor", vendor_tgt, src=None)
-    vendor_diff.WriteStrictVerifyScript(script)
+    elif info.filename.startswith(('META/', 'IMAGES/')):
+      common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
 
-  # Device specific partitions, such as radio, bootloader and etc.
-  device_specific.VerifyOTA_Assertions()
+  common.ZipClose(target_zip)
 
-  script.SetProgress(1.0)
-  script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
-  metadata["ota-required-cache"] = str(script.required_cache)
-  WriteMetadata(metadata, output_zip)
+  return target_file
+
+
+def GetTargetFilesZipWithoutPostinstallConfig(input_file):
+  """Returns a target-files.zip that's not containing postinstall_config.txt.
+
+  This allows brillo_update_payload script to skip writing all the postinstall
+  hooks in the generated payload. The input target-files.zip file will be
+  duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
+  contain the postinstall_config.txt entry, the input file will be returned.
+
+  Args:
+    input_file: The input target-files.zip filename.
+
+  Returns:
+    The filename of target-files.zip that doesn't contain postinstall config.
+  """
+  # We should only make a copy if postinstall_config entry exists.
+  with zipfile.ZipFile(input_file, 'r') as input_zip:
+    if POSTINSTALL_CONFIG not in input_zip.namelist():
+      return input_file
+
+  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+  shutil.copyfile(input_file, target_file)
+  common.ZipDelete(target_file, POSTINSTALL_CONFIG)
+  return target_file
 
 
 def WriteABOTAPackageWithBrilloScript(target_file, output_file,
                                       source_file=None):
-  """Generate an Android OTA package that has A/B update payload."""
-
-  def ComputeStreamingMetadata(zip_file, reserve_space=False,
-                               expected_length=None):
-    """Compute the streaming metadata for a given zip.
-
-    When 'reserve_space' is True, we reserve extra space for the offset and
-    length of the metadata entry itself, although we don't know the final
-    values until the package gets signed. This function will be called again
-    after signing. We then write the actual values and pad the string to the
-    length we set earlier. Note that we can't use the actual length of the
-    metadata entry in the second run. Otherwise the offsets for other entries
-    will be changing again.
-    """
-
-    def ComputeEntryOffsetSize(name):
-      """Compute the zip entry offset and size."""
-      info = zip_file.getinfo(name)
-      offset = info.header_offset + len(info.FileHeader())
-      size = info.file_size
-      return '%s:%d:%d' % (os.path.basename(name), offset, size)
-
-    # payload.bin and payload_properties.txt must exist.
-    offsets = [ComputeEntryOffsetSize('payload.bin'),
-               ComputeEntryOffsetSize('payload_properties.txt')]
-
-    # care_map.txt is available only if dm-verity is enabled.
-    if 'care_map.txt' in zip_file.namelist():
-      offsets.append(ComputeEntryOffsetSize('care_map.txt'))
-
-    if 'compatibility.zip' in zip_file.namelist():
-      offsets.append(ComputeEntryOffsetSize('compatibility.zip'))
-
-    # 'META-INF/com/android/metadata' is required. We don't know its actual
-    # offset and length (as well as the values for other entries). So we
-    # reserve 10-byte as a placeholder, which is to cover the space for metadata
-    # entry ('xx:xxx', since it's ZIP_STORED which should appear at the
-    # beginning of the zip), as well as the possible value changes in other
-    # entries.
-    if reserve_space:
-      offsets.append('metadata:' + ' ' * 10)
-    else:
-      offsets.append(ComputeEntryOffsetSize(METADATA_NAME))
-
-    value = ','.join(offsets)
-    if expected_length is not None:
-      assert len(value) <= expected_length, \
-          'Insufficient reserved space: reserved=%d, actual=%d' % (
-              expected_length, len(value))
-      value += ' ' * (expected_length - len(value))
-    return value
-
-  # The place where the output from the subprocess should go.
-  log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
-
-  # A/B updater expects a signing key in RSA format. Gets the key ready for
-  # later use in step 3, unless a payload_signer has been specified.
-  if OPTIONS.payload_signer is None:
-    cmd = ["openssl", "pkcs8",
-           "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-           "-inform", "DER"]
-    pw = OPTIONS.key_passwords[OPTIONS.package_key]
-    cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
-    rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
-    cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
-    p1.communicate()
-    assert p1.returncode == 0, "openssl pkcs8 failed"
-
+  """Generates an Android OTA package that has A/B update payload."""
   # Stage the output zip package for package signing.
-  temp_zip_file = tempfile.NamedTemporaryFile()
-  output_zip = zipfile.ZipFile(temp_zip_file, "w",
+  if not OPTIONS.no_signing:
+    staging_file = common.MakeTempFile(suffix='.zip')
+  else:
+    staging_file = output_file
+  output_zip = zipfile.ZipFile(staging_file, "w",
                                compression=zipfile.ZIP_DEFLATED)
 
+  if source_file is not None:
+    target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+    source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+  else:
+    target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+    source_info = None
+
   # Metadata to comply with Android OTA package format.
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
-  oem_dicts = None
-  if oem_props:
-    oem_dicts = _LoadOemDicts(None)
+  metadata = GetPackageMetadata(target_info, source_info)
 
-  metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                         OPTIONS.info_dict),
-      "post-build-incremental" : GetBuildProp("ro.build.version.incremental",
-                                              OPTIONS.info_dict),
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "ota-required-cache": "0",
-      "ota-type": "AB",
-  }
+  if OPTIONS.skip_postinstall:
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
 
-  if source_file is not None:
-    metadata["pre-build"] = CalculateFingerprint(oem_props,
-                                                 oem_dicts and oem_dicts[0],
-                                                 OPTIONS.source_info_dict)
-    metadata["pre-build-incremental"] = GetBuildProp(
-        "ro.build.version.incremental", OPTIONS.source_info_dict)
+  # Generate payload.
+  payload = Payload()
+  payload.Generate(target_file, source_file)
 
-    HandleDowngradeMetadata(metadata)
-  else:
-    metadata["post-timestamp"] = GetBuildProp(
-        "ro.build.date.utc", OPTIONS.info_dict)
+  # Sign the payload.
+  payload_signer = PayloadSigner()
+  payload.Sign(payload_signer)
 
-  # 1. Generate payload.
-  payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-  cmd = ["brillo_update_payload", "generate",
-         "--payload", payload_file,
-         "--target_image", target_file]
-  if source_file is not None:
-    cmd.extend(["--source_image", source_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload generate failed"
+  # Write the payload into output zip.
+  payload.WriteToZip(output_zip)
 
-  # 2. Generate hashes of the payload and metadata files.
-  payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-  metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-  cmd = ["brillo_update_payload", "hash",
-         "--unsigned_payload", payload_file,
-         "--signature_size", "256",
-         "--metadata_hash_file", metadata_sig_file,
-         "--payload_hash_file", payload_sig_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload hash failed"
-
-  # 3. Sign the hashes and insert them back into the payload file.
-  signed_payload_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                suffix=".bin")
-  signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                 suffix=".bin")
-  # 3a. Sign the payload hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", payload_sig_file,
-              "-out", signed_payload_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign payload failed"
-
-  # 3b. Sign the metadata hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", metadata_sig_file,
-              "-out", signed_metadata_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign metadata failed"
-
-  # 3c. Insert the signatures back into the payload file.
-  signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
-                                            suffix=".bin")
-  cmd = ["brillo_update_payload", "sign",
-         "--unsigned_payload", payload_file,
-         "--payload", signed_payload_file,
-         "--signature_size", "256",
-         "--metadata_signature_file", signed_metadata_sig_file,
-         "--payload_signature_file", signed_payload_sig_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload sign failed"
-
-  # 4. Dump the signed payload properties.
-  properties_file = common.MakeTempFile(prefix="payload-properties-",
-                                        suffix=".txt")
-  cmd = ["brillo_update_payload", "properties",
-         "--payload", signed_payload_file,
-         "--properties_file", properties_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload properties failed"
-
-  if OPTIONS.wipe_user_data:
-    with open(properties_file, "a") as f:
-      f.write("POWERWASH=1\n")
-    metadata["ota-wipe"] = "yes"
-
-  # Add the signed payload file and properties into the zip. In order to
-  # support streaming, we pack payload.bin, payload_properties.txt and
-  # care_map.txt as ZIP_STORED. So these entries can be read directly with
-  # the offset and length pairs.
-  common.ZipWrite(output_zip, signed_payload_file, arcname="payload.bin",
-                  compress_type=zipfile.ZIP_STORED)
-  common.ZipWrite(output_zip, properties_file,
-                  arcname="payload_properties.txt",
-                  compress_type=zipfile.ZIP_STORED)
+  # Generate and include the secondary payload that installs secondary images
+  # (e.g. system_other.img).
+  if OPTIONS.include_secondary:
+    # We always include a full payload for the secondary slot, even when
+    # building an incremental OTA. See the comments for "--include_secondary".
+    secondary_target_file = GetTargetFilesZipForSecondaryImages(
+        target_file, OPTIONS.skip_postinstall)
+    secondary_payload = Payload(secondary=True)
+    secondary_payload.Generate(secondary_target_file)
+    secondary_payload.Sign(payload_signer)
+    secondary_payload.WriteToZip(output_zip)
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r")
-  if (OPTIONS.info_dict.get("verity") == "true" or
-      OPTIONS.info_dict.get("avb_enable") == "true"):
+  if (target_info.get("verity") == "true" or
+      target_info.get("avb_enable") == "true"):
     care_map_path = "META/care_map.txt"
     namelist = target_zip.namelist()
     if care_map_path in namelist:
       care_map_data = target_zip.read(care_map_path)
+      # In order to support streaming, care_map.txt needs to be packed as
+      # ZIP_STORED.
       common.ZipWriteStr(output_zip, "care_map.txt", care_map_data,
-          compress_type=zipfile.ZIP_STORED)
+                         compress_type=zipfile.ZIP_STORED)
     else:
       print("Warning: cannot find care map file in target_file package")
 
-  # OPTIONS.source_info_dict must be None for incrementals.
-  if source_file is None:
-    assert OPTIONS.source_info_dict is None
-
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.info_dict, OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
   common.ZipClose(target_zip)
 
-  # Write the current metadata entry with placeholders.
-  metadata['ota-streaming-property-files'] = ComputeStreamingMetadata(
-      output_zip, reserve_space=True)
-  WriteMetadata(metadata, output_zip)
+  # We haven't written the metadata entry yet, which will be handled in
+  # FinalizeMetadata().
   common.ZipClose(output_zip)
 
-  # SignOutput(), which in turn calls signapk.jar, will possibly reorder the
-  # zip entries, as well as padding the entry headers. We do a preliminary
-  # signing (with an incomplete metadata entry) to allow that to happen. Then
-  # compute the zip entry offsets, write back the final metadata and do the
-  # final signing.
-  prelim_signing = tempfile.NamedTemporaryFile()
-  SignOutput(temp_zip_file.name, prelim_signing.name)
-  common.ZipClose(temp_zip_file)
-
-  # Open the signed zip. Compute the final metadata that's needed for streaming.
-  prelim_zip = zipfile.ZipFile(prelim_signing, "r",
-                               compression=zipfile.ZIP_DEFLATED)
-  expected_length = len(metadata['ota-streaming-property-files'])
-  metadata['ota-streaming-property-files'] = ComputeStreamingMetadata(
-      prelim_zip, reserve_space=False, expected_length=expected_length)
-
-  # Copy the zip entries, as we cannot update / delete entries with zipfile.
-  final_signing = tempfile.NamedTemporaryFile()
-  output_zip = zipfile.ZipFile(final_signing, "w",
-                               compression=zipfile.ZIP_DEFLATED)
-  for item in prelim_zip.infolist():
-    if item.filename == METADATA_NAME:
-      continue
-
-    data = prelim_zip.read(item.filename)
-    out_info = copy.copy(item)
-    common.ZipWriteStr(output_zip, out_info, data)
-
-  # Now write the final metadata entry.
-  WriteMetadata(metadata, output_zip)
-  common.ZipClose(prelim_zip)
-  common.ZipClose(output_zip)
-
-  # Re-sign the package after updating the metadata entry.
-  SignOutput(final_signing.name, output_file)
-  final_signing.close()
-
-  # Reopen the final signed zip to double check the streaming metadata.
-  output_zip = zipfile.ZipFile(output_file, "r")
-  actual = metadata['ota-streaming-property-files'].strip()
-  expected = ComputeStreamingMetadata(output_zip)
-  assert actual == expected, \
-      "Mismatching streaming metadata: %s vs %s." % (actual, expected)
-  common.ZipClose(output_zip)
+  # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
+  # all the info of the latter. However, system updaters and OTA servers need to
+  # take time to switch to the new flag. We keep both of the flags for
+  # P-timeframe, and will remove StreamingPropertyFiles in later release.
+  needed_property_files = (
+      AbOtaPropertyFiles(),
+      StreamingPropertyFiles(),
+  )
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
 
 
 def main(argv):
@@ -1295,7 +1752,7 @@
       OPTIONS.full_radio = True
     elif o == "--full_bootloader":
       OPTIONS.full_bootloader = True
-    elif o in ("-w", "--wipe_user_data"):
+    elif o == "--wipe_user_data":
       OPTIONS.wipe_user_data = True
     elif o == "--downgrade":
       OPTIONS.downgrade = True
@@ -1316,6 +1773,8 @@
                          "integers are allowed." % (a, o))
     elif o in ("-2", "--two_step"):
       OPTIONS.two_step = True
+    elif o == "--include_secondary":
+      OPTIONS.include_secondary = True
     elif o == "--no_signing":
       OPTIONS.no_signing = True
     elif o == "--verify":
@@ -1324,16 +1783,12 @@
       OPTIONS.block_based = True
     elif o in ("-b", "--binary"):
       OPTIONS.updater_binary = a
-    elif o in ("--no_fallback_to_full",):
-      OPTIONS.fallback_to_full = False
     elif o == "--stash_threshold":
       try:
         OPTIONS.stash_threshold = float(a)
       except ValueError:
         raise ValueError("Cannot parse value %r for option %r - expecting "
                          "a float" % (a, o))
-    elif o == "--gen_verify":
-      OPTIONS.gen_verify = True
     elif o == "--log_diff":
       OPTIONS.log_diff = a
     elif o == "--payload_signer":
@@ -1342,12 +1797,14 @@
       OPTIONS.payload_signer_args = shlex.split(a)
     elif o == "--extracted_input_target_files":
       OPTIONS.extracted_input = a
+    elif o == "--skip_postinstall":
+      OPTIONS.skip_postinstall = True
     else:
       return False
     return True
 
   args = common.ParseOptions(argv, __doc__,
-                             extra_opts="b:k:i:d:we:t:2o:",
+                             extra_opts="b:k:i:d:e:t:2o:",
                              extra_long_opts=[
                                  "package_key=",
                                  "incremental_from=",
@@ -1359,19 +1816,19 @@
                                  "extra_script=",
                                  "worker_threads=",
                                  "two_step",
+                                 "include_secondary",
                                  "no_signing",
                                  "block",
                                  "binary=",
                                  "oem_settings=",
                                  "oem_no_mount",
                                  "verify",
-                                 "no_fallback_to_full",
                                  "stash_threshold=",
-                                 "gen_verify",
                                  "log_diff=",
                                  "payload_signer=",
                                  "payload_signer_args=",
                                  "extracted_input_target_files=",
+                                 "skip_postinstall",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1392,14 +1849,35 @@
   assert not (OPTIONS.downgrade and OPTIONS.timestamp), \
       "Cannot have --downgrade AND --override_timestamp both"
 
-  # Load the dict file from the zip directly to have a peek at the OTA type.
-  # For packages using A/B update, unzipping is not needed.
+  # Load the build info dicts from the zip directly or the extracted input
+  # directory. We don't need to unzip the entire target-files zips, because they
+  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
+  # When loading the info dicts, we don't need to provide the second parameter
+  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
+  # some properties with their actual paths, such as 'selinux_fc',
+  # 'ramdisk_dir', which won't be used during OTA generation.
   if OPTIONS.extracted_input is not None:
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input, OPTIONS.extracted_input)
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    input_zip = zipfile.ZipFile(args[0], "r")
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-    common.ZipClose(input_zip)
+    with zipfile.ZipFile(args[0], 'r') as input_zip:
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+
+  if OPTIONS.verbose:
+    print("--- target info ---")
+    common.DumpInfoDict(OPTIONS.info_dict)
+
+  # Load the source build dict if applicable.
+  if OPTIONS.incremental_source is not None:
+    OPTIONS.target_info_dict = OPTIONS.info_dict
+    with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+
+    if OPTIONS.verbose:
+      print("--- source info ---")
+      common.DumpInfoDict(OPTIONS.source_info_dict)
+
+  # Load OEM dicts if provided.
+  OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
 
@@ -1415,20 +1893,6 @@
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
 
   if ab_update:
-    if OPTIONS.incremental_source is not None:
-      OPTIONS.target_info_dict = OPTIONS.info_dict
-      source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
-      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-      common.ZipClose(source_zip)
-
-    if OPTIONS.verbose:
-      print("--- target info ---")
-      common.DumpInfoDict(OPTIONS.info_dict)
-
-      if OPTIONS.incremental_source is not None:
-        print("--- source info ---")
-        common.DumpInfoDict(OPTIONS.source_info_dict)
-
     WriteABOTAPackageWithBrilloScript(
         target_file=args[0],
         output_file=args[1],
@@ -1437,109 +1901,67 @@
     print("done.")
     return
 
+  # Sanity check the loaded info dicts first.
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
+
+  # Non-A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size")
+  if cache_size is None:
+    print("--- can't determine the cache partition size ---")
+  OPTIONS.cache_size = cache_size
+
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
-    input_zip = zipfile.ZipFile(args[0], "r")
   else:
     print("unzipping target target-files...")
-    OPTIONS.input_tmp, input_zip = common.UnzipTemp(
-        args[0], UNZIP_PATTERN)
+    OPTIONS.input_tmp = common.UnzipTemp(args[0], UNZIP_PATTERN)
+  OPTIONS.target_tmp = OPTIONS.input_tmp
 
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
-
-  if OPTIONS.verbose:
-    print("--- target info ---")
-    common.DumpInfoDict(OPTIONS.info_dict)
-
-  # If the caller explicitly specified the device-specific extensions
-  # path via -s/--device_specific, use that.  Otherwise, use
-  # META/releasetools.py if it is present in the target target_files.
-  # Otherwise, take the path of the file from 'tool_extensions' in the
-  # info dict and look for that in the local filesystem, relative to
-  # the current directory.
-
+  # If the caller explicitly specified the device-specific extensions path via
+  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+  # is present in the target target_files. Otherwise, take the path of the file
+  # from 'tool_extensions' in the info dict and look for that in the local
+  # filesystem, relative to the current directory.
   if OPTIONS.device_specific is None:
     from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
     if os.path.exists(from_input):
       print("(using device-specific extensions from target_files)")
       OPTIONS.device_specific = from_input
     else:
-      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
 
   if OPTIONS.device_specific is not None:
     OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  if OPTIONS.info_dict.get("no_recovery") == "true":
-    raise common.ExternalError(
-        "--- target build has specified no recovery ---")
-
-  # Set up the output zip. Create a temporary zip file if signing is needed.
-  if OPTIONS.no_signing:
-    if os.path.exists(args[1]):
-      os.unlink(args[1])
-    output_zip = zipfile.ZipFile(args[1], "w",
-                                 compression=zipfile.ZIP_DEFLATED)
-  else:
-    temp_zip_file = tempfile.NamedTemporaryFile()
-    output_zip = zipfile.ZipFile(temp_zip_file, "w",
-                                 compression=zipfile.ZIP_DEFLATED)
-
-  # Non A/B OTAs rely on /cache partition to store temporary files.
-  cache_size = OPTIONS.info_dict.get("cache_size", None)
-  if cache_size is None:
-    print("--- can't determine the cache partition size ---")
-  OPTIONS.cache_size = cache_size
-
-  # Generate a verify package.
-  if OPTIONS.gen_verify:
-    WriteVerifyPackage(input_zip, output_zip)
-
   # Generate a full OTA.
-  elif OPTIONS.incremental_source is None:
-    WriteFullOTAPackage(input_zip, output_zip)
+  if OPTIONS.incremental_source is None:
+    with zipfile.ZipFile(args[0], 'r') as input_zip:
+      WriteFullOTAPackage(
+          input_zip,
+          output_file=args[1])
 
-  # Generate an incremental OTA. It will fall back to generate a full OTA on
-  # failure unless no_fallback_to_full is specified.
+  # Generate an incremental OTA.
   else:
     print("unzipping source target-files...")
-    OPTIONS.source_tmp, source_zip = common.UnzipTemp(
-        OPTIONS.incremental_source,
-        UNZIP_PATTERN)
-    OPTIONS.target_info_dict = OPTIONS.info_dict
-    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
-                                                   OPTIONS.source_tmp)
-    if OPTIONS.verbose:
-      print("--- source info ---")
-      common.DumpInfoDict(OPTIONS.source_info_dict)
-    try:
-      WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
-      if OPTIONS.log_diff:
-        out_file = open(OPTIONS.log_diff, 'w')
+    OPTIONS.source_tmp = common.UnzipTemp(
+        OPTIONS.incremental_source, UNZIP_PATTERN)
+    with zipfile.ZipFile(args[0], 'r') as input_zip, \
+        zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+      WriteBlockIncrementalOTAPackage(
+          input_zip,
+          source_zip,
+          output_file=args[1])
+
+    if OPTIONS.log_diff:
+      with open(OPTIONS.log_diff, 'w') as out_file:
         import target_files_diff
-        target_files_diff.recursiveDiff('',
-                                        OPTIONS.source_tmp,
-                                        OPTIONS.input_tmp,
-                                        out_file)
-        out_file.close()
-    except ValueError:
-      if not OPTIONS.fallback_to_full:
-        raise
-      print("--- failed to build incremental; falling back to full ---")
-      OPTIONS.incremental_source = None
-      WriteFullOTAPackage(input_zip, output_zip)
-
-  common.ZipClose(output_zip)
-
-  # Sign the generated zip package unless no_signing is specified.
-  if not OPTIONS.no_signing:
-    SignOutput(temp_zip_file.name, args[1])
-    temp_zip_file.close()
+        target_files_diff.recursiveDiff(
+            '', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
 
   print("done.")
 
diff --git a/tools/releasetools/pylintrc b/tools/releasetools/pylintrc
index 90de1af..7b3405c 100644
--- a/tools/releasetools/pylintrc
+++ b/tools/releasetools/pylintrc
@@ -144,9 +144,6 @@
 
 [BASIC]
 
-# Required attributes for module, separated by a comma
-required-attributes=
-
 # List of builtins function names that should not be used, separated by a comma
 bad-functions=map,filter,input
 
@@ -357,10 +354,6 @@
 
 [CLASSES]
 
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
 # List of method names used to declare (i.e. assign) instance attributes.
 defining-attr-methods=__init__,__new__,setUp
 
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 87380a5..36becf4 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -13,18 +13,26 @@
 # limitations under the License.
 
 from __future__ import print_function
+
 import heapq
 import itertools
 
+
 __all__ = ["RangeSet"]
 
+
 class RangeSet(object):
-  """A RangeSet represents a set of nonoverlapping ranges on the
-  integers (ie, a set of integers, but efficient when the set contains
-  lots of runs."""
+  """A RangeSet represents a set of non-overlapping ranges on integers.
+
+  Attributes:
+    monotonic: Whether the input has all its integers in increasing order.
+    extra: A dict that can be used by the caller, e.g. to store info that's
+        only meaningful to caller.
+  """
 
   def __init__(self, data=None):
     self.monotonic = False
+    self._extra = {}
     if isinstance(data, str):
       self._parse_internal(data)
     elif data:
@@ -56,18 +64,24 @@
   def __repr__(self):
     return '<RangeSet("' + self.to_string() + '")>'
 
+  @property
+  def extra(self):
+    return self._extra
+
   @classmethod
   def parse(cls, text):
-    """Parse a text string consisting of a space-separated list of
-    blocks and ranges, eg "10-20 30 35-40".  Ranges are interpreted to
-    include both their ends (so the above example represents 18
-    individual blocks.  Returns a RangeSet object.
+    """Parses a text string into a RangeSet.
 
-    If the input has all its blocks in increasing order, then returned
-    RangeSet will have an extra attribute 'monotonic' that is set to
-    True.  For example the input "10-20 30" is monotonic, but the input
-    "15-20 30 10-14" is not, even though they represent the same set
-    of blocks (and the two RangeSets will compare equal with ==).
+    The input text string consists of a space-separated list of blocks and
+    ranges, e.g. "10-20 30 35-40". Ranges are interpreted to include both their
+    ends (so the above example represents 18 individual blocks). Returns a
+    RangeSet object.
+
+    If the input has all its blocks in increasing order, then the 'monotonic'
+    attribute of the returned RangeSet will be set to True. For example the
+    input "10-20 30" is monotonic, but the input "15-20 30 10-14" is not, even
+    though they represent the same set of blocks (and the two RangeSets will
+    compare equal with ==).
     """
     return cls(text)
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index e4ef2c1..fa62c8f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -90,26 +90,31 @@
       the existing ones in info dict.
 """
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print >> sys.stderr, "Python 2.7 or newer is required."
-  sys.exit(1)
+from __future__ import print_function
 
 import base64
-import cStringIO
 import copy
 import errno
+import gzip
 import os
 import re
+import shutil
 import stat
 import subprocess
+import sys
 import tempfile
 import zipfile
+from xml.etree import ElementTree
 
 import add_img_to_target_files
 import common
 
+
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
+
 OPTIONS = common.OPTIONS
 
 OPTIONS.extra_apks = {}
@@ -124,9 +129,8 @@
 OPTIONS.avb_algorithms = {}
 OPTIONS.avb_extra_args = {}
 
-def GetApkCerts(tf_zip):
-  certmap = common.ReadApkCerts(tf_zip)
 
+def GetApkCerts(certmap):
   # apply the key remapping to the contents of the file
   for apk, cert in certmap.iteritems():
     certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -140,28 +144,50 @@
   return certmap
 
 
-def CheckAllApksSigned(input_tf_zip, apk_key_map):
+def CheckAllApksSigned(input_tf_zip, apk_key_map, compressed_extension):
   """Check that all the APKs we want to sign have keys specified, and
   error out if they don't."""
   unknown_apks = []
+  compressed_apk_extension = None
+  if compressed_extension:
+    compressed_apk_extension = ".apk" + compressed_extension
   for info in input_tf_zip.infolist():
-    if info.filename.endswith(".apk"):
+    if (info.filename.endswith(".apk") or
+        (compressed_apk_extension and
+         info.filename.endswith(compressed_apk_extension))):
       name = os.path.basename(info.filename)
+      if compressed_apk_extension and name.endswith(compressed_apk_extension):
+        name = name[:-len(compressed_extension)]
       if name not in apk_key_map:
         unknown_apks.append(name)
   if unknown_apks:
-    print "ERROR: no key specified for:\n\n ",
-    print "\n  ".join(unknown_apks)
-    print "\nUse '-e <apkname>=' to specify a key (which may be an"
-    print "empty string to not sign this apk)."
+    print("ERROR: no key specified for:\n")
+    print("  " + "\n  ".join(unknown_apks))
+    print("\nUse '-e <apkname>=' to specify a key (which may be an empty "
+          "string to not sign this apk).")
     sys.exit(1)
 
 
-def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map):
+def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map,
+            is_compressed):
   unsigned = tempfile.NamedTemporaryFile()
   unsigned.write(data)
   unsigned.flush()
 
+  if is_compressed:
+    uncompressed = tempfile.NamedTemporaryFile()
+    with gzip.open(unsigned.name, "rb") as in_file, \
+         open(uncompressed.name, "wb") as out_file:
+      shutil.copyfileobj(in_file, out_file)
+
+    # Finally, close the "unsigned" file (which is gzip compressed), and then
+    # replace it with the uncompressed version.
+    #
+    # TODO(narayan): All this nastiness can be avoided if python 3.2 is in use,
+    # we could just gzip / gunzip in-memory buffers instead.
+    unsigned.close()
+    unsigned = uncompressed
+
   signed = tempfile.NamedTemporaryFile()
 
   # For pre-N builds, don't upgrade to SHA-256 JAR signatures based on the APK's
@@ -183,10 +209,22 @@
     min_api_level = 1
 
   common.SignFile(unsigned.name, signed.name, keyname, pw,
-      min_api_level=min_api_level,
-      codename_to_api_level_map=codename_to_api_level_map)
+                  min_api_level=min_api_level,
+                  codename_to_api_level_map=codename_to_api_level_map)
 
-  data = signed.read()
+  data = None
+  if is_compressed:
+    # Recompress the file after it has been signed.
+    compressed = tempfile.NamedTemporaryFile()
+    with open(signed.name, "rb") as in_file, \
+         gzip.open(compressed.name, "wb") as out_file:
+      shutil.copyfileobj(in_file, out_file)
+
+    data = compressed.read()
+    compressed.close()
+  else:
+    data = signed.read()
+
   unsigned.close()
   signed.close()
 
@@ -195,11 +233,18 @@
 
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_key_map, key_passwords, platform_api_level,
-                       codename_to_api_level_map):
+                       codename_to_api_level_map,
+                       compressed_extension):
 
-  maxsize = max([len(os.path.basename(i.filename))
-                 for i in input_tf_zip.infolist()
-                 if i.filename.endswith('.apk')])
+  compressed_apk_extension = None
+  if compressed_extension:
+    compressed_apk_extension = ".apk" + compressed_extension
+
+  maxsize = max(
+      [len(os.path.basename(i.filename)) for i in input_tf_zip.infolist()
+       if (i.filename.endswith('.apk') or
+           (compressed_apk_extension and
+            i.filename.endswith(compressed_apk_extension)))])
   system_root_image = misc_info.get("system_root_image") == "true"
 
   for info in input_tf_zip.infolist():
@@ -210,17 +255,24 @@
     out_info = copy.copy(info)
 
     # Sign APKs.
-    if info.filename.endswith(".apk"):
+    if (info.filename.endswith(".apk") or
+        (compressed_apk_extension and
+         info.filename.endswith(compressed_apk_extension))):
+      is_compressed = (compressed_extension and
+                       info.filename.endswith(compressed_apk_extension))
       name = os.path.basename(info.filename)
+      if is_compressed:
+        name = name[:-len(compressed_extension)]
+
       key = apk_key_map[name]
       if key not in common.SPECIAL_CERT_STRINGS:
-        print "    signing: %-*s (%s)" % (maxsize, name, key)
+        print("    signing: %-*s (%s)" % (maxsize, name, key))
         signed_data = SignApk(data, key, key_passwords[key], platform_api_level,
-            codename_to_api_level_map)
+                              codename_to_api_level_map, is_compressed)
         common.ZipWriteStr(output_tf_zip, out_info, signed_data)
       else:
         # an APK we're not supposed to sign.
-        print "NOT signing: %s" % (name,)
+        print("NOT signing: %s" % (name,))
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
     # System properties.
@@ -232,15 +284,17 @@
                            "ROOT/default.prop",  # legacy
                            "RECOVERY/RAMDISK/prop.default",
                            "RECOVERY/RAMDISK/default.prop"):  # legacy
-      print "rewriting %s:" % (info.filename,)
+      print("Rewriting %s:" % (info.filename,))
       if stat.S_ISLNK(info.external_attr >> 16):
         new_data = data
       else:
-        new_data = RewriteProps(data, misc_info)
+        new_data = RewriteProps(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
+    # Replace the certs in *mac_permissions.xml (there could be multiple, such
+    # as {system,vendor}/etc/selinux/{plat,nonplat}_mac_permissions.xml).
     elif info.filename.endswith("mac_permissions.xml"):
-      print "rewriting %s with new keys." % (info.filename,)
+      print("Rewriting %s with new keys." % (info.filename,))
       new_data = ReplaceCerts(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
@@ -291,10 +345,7 @@
     ReplaceVerityPrivateKey(misc_info, OPTIONS.replace_verity_private_key[1])
 
   if OPTIONS.replace_verity_public_key:
-    if system_root_image:
-      dest = "ROOT/verity_key"
-    else:
-      dest = "BOOT/RAMDISK/verity_key"
+    dest = "ROOT/verity_key" if system_root_image else "BOOT/RAMDISK/verity_key"
     # We are replacing the one in boot image only, since the one under
     # recovery won't ever be needed.
     ReplaceVerityPublicKey(
@@ -313,38 +364,67 @@
 
 
 def ReplaceCerts(data):
-  """Given a string of data, replace all occurences of a set
-  of X509 certs with a newer set of X509 certs and return
-  the updated data string."""
-  for old, new in OPTIONS.key_map.iteritems():
-    try:
-      if OPTIONS.verbose:
-        print "    Replacing %s.x509.pem with %s.x509.pem" % (old, new)
-      f = open(old + ".x509.pem")
-      old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
-      f.close()
-      f = open(new + ".x509.pem")
-      new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
-      f.close()
-      # Only match entire certs.
-      pattern = "\\b"+old_cert16+"\\b"
-      (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
-      if OPTIONS.verbose:
-        print "    Replaced %d occurence(s) of %s.x509.pem with " \
-            "%s.x509.pem" % (num, old, new)
-    except IOError as e:
-      if e.errno == errno.ENOENT and not OPTIONS.verbose:
-        continue
+  """Replaces all the occurences of X.509 certs with the new ones.
 
-      print "    Error accessing %s. %s. Skip replacing %s.x509.pem " \
-          "with %s.x509.pem." % (e.filename, e.strerror, old, new)
+  The mapping info is read from OPTIONS.key_map. Non-existent certificate will
+  be skipped. After the replacement, it additionally checks for duplicate
+  entries, which would otherwise fail the policy loading code in
+  frameworks/base/services/core/java/com/android/server/pm/SELinuxMMAC.java.
+
+  Args:
+    data: Input string that contains a set of X.509 certs.
+
+  Returns:
+    A string after the replacement.
+
+  Raises:
+    AssertionError: On finding duplicate entries.
+  """
+  for old, new in OPTIONS.key_map.iteritems():
+    if OPTIONS.verbose:
+      print("    Replacing %s.x509.pem with %s.x509.pem" % (old, new))
+
+    try:
+      with open(old + ".x509.pem") as old_fp:
+        old_cert16 = base64.b16encode(
+            common.ParseCertificate(old_fp.read())).lower()
+      with open(new + ".x509.pem") as new_fp:
+        new_cert16 = base64.b16encode(
+            common.ParseCertificate(new_fp.read())).lower()
+    except IOError as e:
+      if OPTIONS.verbose or e.errno != errno.ENOENT:
+        print("    Error accessing %s: %s.\nSkip replacing %s.x509.pem with "
+              "%s.x509.pem." % (e.filename, e.strerror, old, new))
+      continue
+
+    # Only match entire certs.
+    pattern = "\\b" + old_cert16 + "\\b"
+    (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
+
+    if OPTIONS.verbose:
+      print("    Replaced %d occurence(s) of %s.x509.pem with %s.x509.pem" % (
+          num, old, new))
+
+  # Verify that there're no duplicate entries after the replacement. Note that
+  # it's only checking entries with global seinfo at the moment (i.e. ignoring
+  # the ones with inner packages). (Bug: 69479366)
+  root = ElementTree.fromstring(data)
+  signatures = [signer.attrib['signature'] for signer in root.findall('signer')]
+  assert len(signatures) == len(set(signatures)), \
+      "Found duplicate entries after cert replacement: {}".format(data)
 
   return data
 
 
 def EditTags(tags):
-  """Given a string containing comma-separated tags, apply the edits
-  specified in OPTIONS.tag_changes and return the updated string."""
+  """Applies the edits to the tag string as specified in OPTIONS.tag_changes.
+
+  Args:
+    tags: The input string that contains comma-separated tags.
+
+  Returns:
+    The updated tags (comma-separated and sorted).
+  """
   tags = set(tags.split(","))
   for ch in OPTIONS.tag_changes:
     if ch[0] == "-":
@@ -354,20 +434,27 @@
   return ",".join(sorted(tags))
 
 
-def RewriteProps(data, misc_info):
+def RewriteProps(data):
+  """Rewrites the system properties in the given string.
+
+  Each property is expected in 'key=value' format. The properties that contain
+  build tags (i.e. test-keys, dev-keys) will be updated accordingly by calling
+  EditTags().
+
+  Args:
+    data: Input string, separated by newlines.
+
+  Returns:
+    The string with modified properties.
+  """
   output = []
   for line in data.split("\n"):
     line = line.strip()
     original_line = line
     if line and line[0] != '#' and "=" in line:
       key, value = line.split("=", 1)
-      if (key in ("ro.build.fingerprint", "ro.vendor.build.fingerprint")
-          and misc_info.get("oem_fingerprint_properties") is None):
-        pieces = value.split("/")
-        pieces[-1] = EditTags(pieces[-1])
-        value = "/".join(pieces)
-      elif (key in ("ro.build.thumbprint", "ro.vendor.build.thumbprint")
-            and misc_info.get("oem_fingerprint_properties") is not None):
+      if key in ("ro.build.fingerprint", "ro.build.thumbprint",
+                 "ro.vendor.build.fingerprint", "ro.vendor.build.thumbprint"):
         pieces = value.split("/")
         pieces[-1] = EditTags(pieces[-1])
         value = "/".join(pieces)
@@ -390,8 +477,8 @@
         value = " ".join(value)
       line = key + "=" + value
     if line != original_line:
-      print "  replace: ", original_line
-      print "     with: ", line
+      print("  replace: ", original_line)
+      print("     with: ", line)
     output.append(line)
   return "\n".join(output) + "\n"
 
@@ -407,7 +494,7 @@
     extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
                            for k in extra_recovery_keys.split()]
     if extra_recovery_keys:
-      print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
+      print("extra recovery-only key(s): " + ", ".join(extra_recovery_keys))
   else:
     extra_recovery_keys = []
 
@@ -421,8 +508,8 @@
     mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
 
   if mapped_keys:
-    print "using:\n   ", "\n   ".join(mapped_keys)
-    print "for OTA package verification"
+    print("using:\n   ", "\n   ".join(mapped_keys))
+    print("for OTA package verification")
   else:
     devkey = misc_info.get("default_system_dev_certificate",
                            "build/target/product/security/testkey")
@@ -456,7 +543,11 @@
   # put into a zipfile system/etc/security/otacerts.zip.
   # We DO NOT include the extra_recovery_keys (if any) here.
 
-  temp_file = cStringIO.StringIO()
+  try:
+    from StringIO import StringIO
+  except ImportError:
+    from io import StringIO
+  temp_file = StringIO()
   certs_zip = zipfile.ZipFile(temp_file, "w")
   for k in mapped_keys:
     common.ZipWrite(certs_zip, k)
@@ -472,11 +563,8 @@
       print("\n  WARNING: Found more than one OTA keys; Using the first one"
             " as payload verification key.\n\n")
 
-    print "Using %s for payload verification." % (mapped_keys[0],)
-    cmd = common.Run(
-        ["openssl", "x509", "-pubkey", "-noout", "-in", mapped_keys[0]],
-        stdout=subprocess.PIPE)
-    pubkey, _ = cmd.communicate()
+    print("Using %s for payload verification." % (mapped_keys[0],))
+    pubkey = common.ExtractPublicKey(mapped_keys[0])
     common.ZipWriteStr(
         output_tf_zip,
         "SYSTEM/etc/update_engine/update-payload-key.pub.pem",
@@ -489,41 +577,62 @@
   return new_recovery_keys
 
 
-def ReplaceVerityPublicKey(targetfile_zip, filename, key_path):
-  print "Replacing verity public key with %s" % (key_path,)
-  common.ZipWrite(targetfile_zip, key_path, arcname=filename)
+def ReplaceVerityPublicKey(output_zip, filename, key_path):
+  """Replaces the verity public key at the given path in the given zip.
+
+  Args:
+    output_zip: The output target_files zip.
+    filename: The archive name in the output zip.
+    key_path: The path to the public key.
+  """
+  print("Replacing verity public key with %s" % (key_path,))
+  common.ZipWrite(output_zip, key_path, arcname=filename)
 
 
 def ReplaceVerityPrivateKey(misc_info, key_path):
-  print "Replacing verity private key with %s" % (key_path,)
+  """Replaces the verity private key in misc_info dict.
+
+  Args:
+    misc_info: The info dict.
+    key_path: The path to the private key in PKCS#8 format.
+  """
+  print("Replacing verity private key with %s" % (key_path,))
   misc_info["verity_key"] = key_path
 
 
-def ReplaceVerityKeyId(targetfile_input_zip, targetfile_output_zip, keypath):
-  in_cmdline = targetfile_input_zip.read("BOOT/cmdline")
-  # copy in_cmdline to output_zip if veritykeyid is not present in in_cmdline
-  if "veritykeyid" not in in_cmdline:
-    common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", in_cmdline)
-    return in_cmdline
-  out_cmdline = []
-  for param in in_cmdline.split():
-    if "veritykeyid" in param:
-      # extract keyid using openssl command
-      p = common.Run(
-          ["openssl", "x509", "-in", keypath, "-text"],
-          stdout=subprocess.PIPE)
-      keyid, stderr = p.communicate()
-      keyid = re.search(
-          r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
-      print "Replacing verity keyid with %s error=%s" % (keyid, stderr)
-      out_cmdline.append("veritykeyid=id:%s" % (keyid,))
-    else:
-      out_cmdline.append(param)
+def ReplaceVerityKeyId(input_zip, output_zip, key_path):
+  """Replaces the veritykeyid parameter in BOOT/cmdline.
 
-  out_cmdline = ' '.join(out_cmdline)
-  out_cmdline = out_cmdline.strip()
-  print "out_cmdline %s" % (out_cmdline)
-  common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", out_cmdline)
+  Args:
+    input_zip: The input target_files zip, which should be already open.
+    output_zip: The output target_files zip, which should be already open and
+        writable.
+    key_path: The path to the PEM encoded X.509 certificate.
+  """
+  in_cmdline = input_zip.read("BOOT/cmdline")
+  # Copy in_cmdline to output_zip if veritykeyid is not present.
+  if "veritykeyid" not in in_cmdline:
+    common.ZipWriteStr(output_zip, "BOOT/cmdline", in_cmdline)
+    return
+
+  out_buffer = []
+  for param in in_cmdline.split():
+    if "veritykeyid" not in param:
+      out_buffer.append(param)
+      continue
+
+    # Extract keyid using openssl command.
+    p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
+                   stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    keyid, stderr = p.communicate()
+    assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
+    keyid = re.search(
+        r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
+    print("Replacing verity keyid with {}".format(keyid))
+    out_buffer.append("veritykeyid=id:%s" % (keyid,))
+
+  out_cmdline = ' '.join(out_buffer).strip() + '\n'
+  common.ZipWriteStr(output_zip, "BOOT/cmdline", out_cmdline)
 
 
 def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info):
@@ -545,11 +654,12 @@
   """Replaces the AVB signing keys."""
 
   AVB_FOOTER_ARGS_BY_PARTITION = {
-    'boot' : 'avb_boot_add_hash_footer_args',
-    'dtbo' : 'avb_dtbo_add_hash_footer_args',
-    'system' : 'avb_system_add_hashtree_footer_args',
-    'vendor' : 'avb_vendor_add_hashtree_footer_args',
-    'vbmeta' : 'avb_vbmeta_args',
+      'boot' : 'avb_boot_add_hash_footer_args',
+      'dtbo' : 'avb_dtbo_add_hash_footer_args',
+      'recovery' : 'avb_recovery_add_hash_footer_args',
+      'system' : 'avb_system_add_hashtree_footer_args',
+      'vendor' : 'avb_vendor_add_hashtree_footer_args',
+      'vbmeta' : 'avb_vbmeta_args',
   }
 
   def ReplaceAvbPartitionSigningKey(partition):
@@ -560,15 +670,15 @@
     algorithm = OPTIONS.avb_algorithms.get(partition)
     assert algorithm, 'Missing AVB signing algorithm for %s' % (partition,)
 
-    print 'Replacing AVB signing key for %s with "%s" (%s)' % (
-        partition, key, algorithm)
+    print('Replacing AVB signing key for %s with "%s" (%s)' % (
+        partition, key, algorithm))
     misc_info['avb_' + partition + '_algorithm'] = algorithm
     misc_info['avb_' + partition + '_key_path'] = key
 
     extra_args = OPTIONS.avb_extra_args.get(partition)
     if extra_args:
-      print 'Setting extra AVB signing args for %s to "%s"' % (
-          partition, extra_args)
+      print('Setting extra AVB signing args for %s to "%s"' % (
+          partition, extra_args))
       args_key = AVB_FOOTER_ARGS_BY_PARTITION[partition]
       misc_info[args_key] = (misc_info.get(args_key, '') + ' ' + extra_args)
 
@@ -711,29 +821,29 @@
       argv, __doc__,
       extra_opts="e:d:k:ot:",
       extra_long_opts=[
-        "extra_apks=",
-        "default_key_mappings=",
-        "key_mapping=",
-        "replace_ota_keys",
-        "tag_changes=",
-        "replace_verity_public_key=",
-        "replace_verity_private_key=",
-        "replace_verity_keyid=",
-        "avb_vbmeta_algorithm=",
-        "avb_vbmeta_key=",
-        "avb_vbmeta_extra_args=",
-        "avb_boot_algorithm=",
-        "avb_boot_key=",
-        "avb_boot_extra_args=",
-        "avb_dtbo_algorithm=",
-        "avb_dtbo_key=",
-        "avb_dtbo_extra_args=",
-        "avb_system_algorithm=",
-        "avb_system_key=",
-        "avb_system_extra_args=",
-        "avb_vendor_algorithm=",
-        "avb_vendor_key=",
-        "avb_vendor_extra_args=",
+          "extra_apks=",
+          "default_key_mappings=",
+          "key_mapping=",
+          "replace_ota_keys",
+          "tag_changes=",
+          "replace_verity_public_key=",
+          "replace_verity_private_key=",
+          "replace_verity_keyid=",
+          "avb_vbmeta_algorithm=",
+          "avb_vbmeta_key=",
+          "avb_vbmeta_extra_args=",
+          "avb_boot_algorithm=",
+          "avb_boot_key=",
+          "avb_boot_extra_args=",
+          "avb_dtbo_algorithm=",
+          "avb_dtbo_key=",
+          "avb_dtbo_extra_args=",
+          "avb_system_algorithm=",
+          "avb_system_key=",
+          "avb_system_extra_args=",
+          "avb_vendor_algorithm=",
+          "avb_vendor_key=",
+          "avb_vendor_extra_args=",
       ],
       extra_option_handler=option_handler)
 
@@ -750,8 +860,9 @@
 
   BuildKeyMap(misc_info, key_mapping_options)
 
-  apk_key_map = GetApkCerts(input_zip)
-  CheckAllApksSigned(input_zip, apk_key_map)
+  certmap, compressed_extension = common.ReadApkCerts(input_zip)
+  apk_key_map = GetApkCerts(certmap)
+  CheckAllApksSigned(input_zip, apk_key_map, compressed_extension)
 
   key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
   platform_api_level, _ = GetApiLevelAndCodename(input_zip)
@@ -760,7 +871,8 @@
   ProcessTargetFiles(input_zip, output_zip, misc_info,
                      apk_key_map, key_passwords,
                      platform_api_level,
-                     codename_to_api_level_map)
+                     codename_to_api_level_map,
+                     compressed_extension)
 
   common.ZipClose(input_zip)
   common.ZipClose(output_zip)
@@ -774,16 +886,14 @@
   new_args.append(args[1])
   add_img_to_target_files.main(new_args)
 
-  print "done."
+  print("done.")
 
 
 if __name__ == '__main__':
   try:
     main(sys.argv[1:])
-  except common.ExternalError, e:
-    print
-    print "   ERROR: %s" % (e,)
-    print
+  except common.ExternalError as e:
+    print("\n   ERROR: %s\n" % (e,))
     sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 7eb60d9..083da7a 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -15,6 +15,7 @@
 import bisect
 import os
 import struct
+import threading
 from hashlib import sha1
 
 import rangelib
@@ -32,7 +33,7 @@
   """
 
   def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
-               mode="rb", build_map=True):
+               mode="rb", build_map=True, allow_shared_blocks=False):
     self.simg_f = f = open(simg_fn, mode)
 
     header_bin = f.read(28)
@@ -111,6 +112,8 @@
         raise ValueError("Unknown chunk type 0x%04X not supported" %
                          (chunk_type,))
 
+    self.generator_lock = threading.Lock()
+
     self.care_map = rangelib.RangeSet(care_data)
     self.offset_index = [i[0] for i in offset_map]
 
@@ -126,7 +129,8 @@
     self.extended = extended
 
     if file_map_fn:
-      self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks)
+      self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
+                            allow_shared_blocks)
     else:
       self.file_map = {"__DATA": self.care_map}
 
@@ -173,40 +177,47 @@
     particular is not necessarily equal to the number of ranges in
     'ranges'.
 
-    This generator is stateful -- it depends on the open file object
-    contained in this SparseImage, so you should not try to run two
+    Use a lock to protect the generator so that we will not run two
     instances of this generator on the same object simultaneously."""
 
     f = self.simg_f
-    for s, e in ranges:
-      to_read = e-s
-      idx = bisect.bisect_right(self.offset_index, s) - 1
-      chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
-
-      # for the first chunk we may be starting partway through it.
-      remain = chunk_len - (s - chunk_start)
-      this_read = min(remain, to_read)
-      if filepos is not None:
-        p = filepos + ((s - chunk_start) * self.blocksize)
-        f.seek(p, os.SEEK_SET)
-        yield f.read(this_read * self.blocksize)
-      else:
-        yield fill_data * (this_read * (self.blocksize >> 2))
-      to_read -= this_read
-
-      while to_read > 0:
-        # continue with following chunks if this range spans multiple chunks.
-        idx += 1
+    with self.generator_lock:
+      for s, e in ranges:
+        to_read = e-s
+        idx = bisect.bisect_right(self.offset_index, s) - 1
         chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
-        this_read = min(chunk_len, to_read)
+
+        # for the first chunk we may be starting partway through it.
+        remain = chunk_len - (s - chunk_start)
+        this_read = min(remain, to_read)
         if filepos is not None:
-          f.seek(filepos, os.SEEK_SET)
+          p = filepos + ((s - chunk_start) * self.blocksize)
+          f.seek(p, os.SEEK_SET)
           yield f.read(this_read * self.blocksize)
         else:
           yield fill_data * (this_read * (self.blocksize >> 2))
         to_read -= this_read
 
-  def LoadFileBlockMap(self, fn, clobbered_blocks):
+        while to_read > 0:
+          # continue with following chunks if this range spans multiple chunks.
+          idx += 1
+          chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+          this_read = min(chunk_len, to_read)
+          if filepos is not None:
+            f.seek(filepos, os.SEEK_SET)
+            yield f.read(this_read * self.blocksize)
+          else:
+            yield fill_data * (this_read * (self.blocksize >> 2))
+          to_read -= this_read
+
+  def LoadFileBlockMap(self, fn, clobbered_blocks, allow_shared_blocks):
+    """Loads the given block map file.
+
+    Args:
+      fn: The filename of the block map file.
+      clobbered_blocks: A RangeSet instance for the clobbered blocks.
+      allow_shared_blocks: Whether having shared blocks is allowed.
+    """
     remaining = self.care_map
     self.file_map = out = {}
 
@@ -214,6 +225,18 @@
       for line in f:
         fn, ranges = line.split(None, 1)
         ranges = rangelib.RangeSet.parse(ranges)
+
+        if allow_shared_blocks:
+          # Find the shared blocks that have been claimed by others.
+          shared_blocks = ranges.subtract(remaining)
+          if shared_blocks:
+            ranges = ranges.subtract(shared_blocks)
+            if not ranges:
+              continue
+
+            # Tag the entry so that we can skip applying imgdiff on this file.
+            ranges.extra['uses_shared_blocks'] = True
+
         out[fn] = ranges
         assert ranges.size() == ranges.intersect(remaining).size()
 
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
new file mode 100644
index 0000000..9a0f78e
--- /dev/null
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -0,0 +1,339 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+import unittest
+import zipfile
+
+import common
+import test_utils
+from add_img_to_target_files import (
+    AddCareMapTxtForAbOta, AddPackRadioImages, AddRadioImagesForAbOta,
+    GetCareMap)
+from rangelib import RangeSet
+
+
+OPTIONS = common.OPTIONS
+
+
+class AddImagesToTargetFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    OPTIONS.input_tmp = common.MakeTempDir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _create_images(images, prefix):
+    """Creates images under OPTIONS.input_tmp/prefix."""
+    path = os.path.join(OPTIONS.input_tmp, prefix)
+    if not os.path.exists(path):
+      os.mkdir(path)
+
+    for image in images:
+      image_path = os.path.join(path, image + '.img')
+      with open(image_path, 'wb') as image_fp:
+        image_fp.write(image.encode())
+
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    if not os.path.exists(images_path):
+      os.mkdir(images_path)
+    return images, images_path
+
+  def test_AddRadioImagesForAbOta_imageExists(self):
+    """Tests the case with existing images under IMAGES/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'IMAGES')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio(self):
+    """Tests the case that copies images from RADIO/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'RADIO')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddRadioImagesForAbOta(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddRadioImagesForAbOta_copyFromVendorImages(self):
+    """Tests the case that copies images from VENDOR_IMAGES/."""
+    vendor_images_path = os.path.join(OPTIONS.input_tmp, 'VENDOR_IMAGES')
+    os.mkdir(vendor_images_path)
+
+    partitions = ['aboot', 'xbl']
+    for index, partition in enumerate(partitions):
+      subdir = os.path.join(vendor_images_path, 'subdir-{}'.format(index))
+      os.mkdir(subdir)
+
+      partition_image_path = os.path.join(subdir, partition + '.img')
+      with open(partition_image_path, 'wb') as partition_fp:
+        partition_fp.write(partition.encode())
+
+    # Set up the output dir.
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    os.mkdir(images_path)
+
+    AddRadioImagesForAbOta(None, partitions)
+
+    for partition in partitions:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, partition + '.img')))
+
+  def test_AddRadioImagesForAbOta_missingImages(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+    self.assertRaises(AssertionError, AddRadioImagesForAbOta, None,
+                      images + ['baz'])
+
+  def test_AddRadioImagesForAbOta_missingImages_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, AddRadioImagesForAbOta, output_zip,
+                        images + ['baz'])
+
+  def test_AddPackRadioImages(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_with_suffix(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    images_with_suffix = [image + '.img' for image in images]
+    AddPackRadioImages(None, images_with_suffix)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_zipOutput(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddPackRadioImages(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddPackRadioImages_imageExists(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Additionally create images under IMAGES/ so that they should be skipped.
+    images, images_path = self._create_images(['foo', 'bar'], 'IMAGES')
+
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_missingImages(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    self.assertRaises(AssertionError, AddPackRadioImages, None,
+                      images + ['baz'])
+
+  @staticmethod
+  def _test_AddCareMapTxtForAbOta():
+    """Helper function to set up the test for test_AddCareMapTxtForAbOta()."""
+    OPTIONS.info_dict = {
+        'system_verity_block_device' : '/dev/block/system',
+        'vendor_verity_block_device' : '/dev/block/vendor',
+    }
+
+    # Prepare the META/ folder.
+    meta_path = os.path.join(OPTIONS.input_tmp, 'META')
+    if not os.path.exists(meta_path):
+      os.mkdir(meta_path)
+
+    system_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    vendor_image = test_utils.construct_sparse_image([
+        (0xCAC2, 10)])
+
+    image_paths = {
+        'system' : system_image,
+        'vendor' : vendor_image,
+    }
+    return image_paths
+
+  def test_AddCareMapTxtForAbOta(self):
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_withNonCareMapPartitions(self):
+    """Partitions without care_map should be ignored."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    AddCareMapTxtForAbOta(
+        None, ['boot', 'system', 'vendor', 'vbmeta'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_withAvb(self):
+    """Tests the case for device using AVB."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    OPTIONS.info_dict = {
+        'avb_system_hashtree_enable' : 'true',
+        'avb_vendor_hashtree_enable' : 'true',
+    }
+
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_verityNotEnabled(self):
+    """No care_map.txt should be generated if verity not enabled."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    OPTIONS.info_dict = {}
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    self.assertFalse(os.path.exists(care_map_file))
+
+  def test_AddCareMapTxtForAbOta_missingImageFile(self):
+    """Missing image file should be considered fatal."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    image_paths['vendor'] = ''
+    self.assertRaises(AssertionError, AddCareMapTxtForAbOta, None,
+                      ['system', 'vendor'], image_paths)
+
+  def test_AddCareMapTxtForAbOta_zipOutput(self):
+    """Tests the case with ZIP output."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      care_map = verify_zip.read('META/care_map.txt').decode('ascii')
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_zipOutput_careMapEntryExists(self):
+    """Tests the case with ZIP output which already has care_map entry."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      # Create an existing META/care_map.txt entry.
+      common.ZipWriteStr(output_zip, 'META/care_map.txt', 'dummy care_map.txt')
+
+      # Request to add META/care_map.txt again.
+      AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+    # The one under OPTIONS.input_tmp must have been replaced.
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+    # The existing entry should be scheduled to be replaced.
+    self.assertIn('META/care_map.txt', OPTIONS.replace_updated_files_list)
+
+  def test_GetCareMap(self):
+    sparse_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    OPTIONS.info_dict = {
+        'system_adjusted_partition_size' : 12,
+    }
+    name, care_map = GetCareMap('system', sparse_image)
+    self.assertEqual('system', name)
+    self.assertEqual(RangeSet("0-5 10-12").to_string_raw(), care_map)
+
+  def test_GetCareMap_invalidPartition(self):
+    self.assertRaises(AssertionError, GetCareMap, 'oem', None)
+
+  def test_GetCareMap_invalidAdjustedPartitionSize(self):
+    sparse_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    OPTIONS.info_dict = {
+        'system_adjusted_partition_size' : -12,
+    }
+    self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index e5a3694..ceada18 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -16,12 +16,44 @@
 
 from __future__ import print_function
 
-import common
 import unittest
 
-from blockimgdiff import BlockImageDiff, EmptyImage, Transfer
+import common
+from blockimgdiff import (BlockImageDiff, EmptyImage, HeapItem, ImgdiffStats,
+                          Transfer)
 from rangelib import RangeSet
 
+
+class HealpItemTest(unittest.TestCase):
+
+  class Item(object):
+    def __init__(self, score):
+      self.score = score
+
+  def test_init(self):
+    item1 = HeapItem(self.Item(15))
+    item2 = HeapItem(self.Item(20))
+    item3 = HeapItem(self.Item(15))
+    self.assertTrue(item1)
+    self.assertTrue(item2)
+    self.assertTrue(item3)
+
+    self.assertNotEqual(item1, item2)
+    self.assertEqual(item1, item3)
+    # HeapItem uses negated scores.
+    self.assertGreater(item1, item2)
+    self.assertLessEqual(item1, item3)
+    self.assertTrue(item1 <= item3)
+    self.assertFalse(item2 >= item1)
+
+  def test_clear(self):
+    item = HeapItem(self.Item(15))
+    self.assertTrue(item)
+
+    item.clear()
+    self.assertFalse(item)
+
+
 class BlockImageDiffTest(unittest.TestCase):
 
   def test_GenerateDigraphOrder(self):
@@ -141,3 +173,102 @@
     # Insufficient cache to stash 15 blocks (size * 0.8 < 15).
     common.OPTIONS.cache_size = 15 * 4096
     self.assertEqual(15, block_image_diff.ReviseStashSize())
+
+  def test_FileTypeSupportedByImgdiff(self):
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/priv-app/Settings/Settings.apk"))
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/framework/am.jar"))
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/etc/security/otacerts.zip"))
+
+    self.assertFalse(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/framework/arm/boot.oat"))
+    self.assertFalse(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/priv-app/notanapk"))
+
+  def test_CanUseImgdiff(self):
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage())
+    self.assertTrue(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app1.apk", RangeSet("10-15"), RangeSet("0-5")))
+    self.assertTrue(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app2.apk", RangeSet("20 25"), RangeSet("30-31"), True))
+
+    self.assertDictEqual(
+        {
+            ImgdiffStats.USED_IMGDIFF : {"/system/app/app1.apk"},
+            ImgdiffStats.USED_IMGDIFF_LARGE_APK : {"/vendor/app/app2.apk"},
+        },
+        block_image_diff.imgdiff_stats.stats)
+
+
+  def test_CanUseImgdiff_ineligible(self):
+    # Disabled by caller.
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage(),
+                                      disable_imgdiff=True)
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app1.apk", RangeSet("10-15"), RangeSet("0-5")))
+
+    # Unsupported file type.
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage())
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/bin/gzip", RangeSet("10-15"), RangeSet("0-5")))
+
+    # At least one of the ranges is in non-monotonic order.
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app2.apk", RangeSet("10-15"),
+            RangeSet("15-20 30 10-14")))
+
+    # At least one of the ranges has been modified.
+    src_ranges = RangeSet("0-5")
+    src_ranges.extra['trimmed'] = True
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app3.apk", RangeSet("10-15"), src_ranges))
+
+    # At least one of the ranges is incomplete.
+    src_ranges = RangeSet("0-5")
+    src_ranges.extra['incomplete'] = True
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app4.apk", RangeSet("10-15"), src_ranges))
+
+    # The stats are correctly logged.
+    self.assertDictEqual(
+        {
+            ImgdiffStats.SKIPPED_NONMONOTONIC : {'/system/app/app2.apk'},
+            ImgdiffStats.SKIPPED_TRIMMED : {'/vendor/app/app3.apk'},
+            ImgdiffStats.SKIPPED_INCOMPLETE: {'/vendor/app/app4.apk'},
+        },
+        block_image_diff.imgdiff_stats.stats)
+
+
+class ImgdiffStatsTest(unittest.TestCase):
+
+  def test_Log(self):
+    imgdiff_stats = ImgdiffStats()
+    imgdiff_stats.Log("/system/app/app2.apk", ImgdiffStats.USED_IMGDIFF)
+    self.assertDictEqual(
+        {
+            ImgdiffStats.USED_IMGDIFF: {'/system/app/app2.apk'},
+        },
+        imgdiff_stats.stats)
+
+  def test_Log_invalidInputs(self):
+    imgdiff_stats = ImgdiffStats()
+
+    self.assertRaises(AssertionError, imgdiff_stats.Log, "/system/bin/gzip",
+                      ImgdiffStats.USED_IMGDIFF)
+
+    self.assertRaises(AssertionError, imgdiff_stats.Log, "/system/app/app1.apk",
+                      "invalid reason")
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
new file mode 100644
index 0000000..161faff
--- /dev/null
+++ b/tools/releasetools/test_build_image.py
@@ -0,0 +1,94 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import unittest
+
+import common
+from build_image import CheckHeadroom, RunCommand
+
+
+class BuildImageTest(unittest.TestCase):
+
+  # Available: 1000 blocks.
+  EXT4FS_OUTPUT = (
+      "Created filesystem with 2777/129024 inodes and 515099/516099 blocks")
+
+  def test_CheckHeadroom_SizeUnderLimit(self):
+    # Required headroom: 1000 blocks.
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '4096000',
+        'mount_point' : 'system',
+    }
+    self.assertTrue(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
+
+  def test_CheckHeadroom_InsufficientHeadroom(self):
+    # Required headroom: 1001 blocks.
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '4100096',
+        'mount_point' : 'system',
+    }
+    self.assertFalse(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
+
+  def test_CheckHeadroom_WrongFsType(self):
+    prop_dict = {
+        'fs_type' : 'f2fs',
+        'partition_headroom' : '4100096',
+        'mount_point' : 'system',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
+
+  def test_CheckHeadroom_MissingProperties(self):
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '4100096',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
+
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'mount_point' : 'system',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
+
+  def test_CheckHeadroom_WithMke2fsOutput(self):
+    """Tests the result parsing from actual call to mke2fs."""
+    input_dir = common.MakeTempDir()
+    output_image = common.MakeTempFile(suffix='.img')
+    command = ['mkuserimg_mke2fs.sh', input_dir, output_image, 'ext4',
+               '/system', '409600', '-j', '0']
+    ext4fs_output, exit_code = RunCommand(command)
+    self.assertEqual(0, exit_code)
+
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '40960',
+        'mount_point' : 'system',
+    }
+    self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
+
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '413696',
+        'mount_point' : 'system',
+    }
+    self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
+
+    common.Cleanup()
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 10ec0d3..fb26b66 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -13,22 +13,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
 import os
-import shutil
+import subprocess
 import tempfile
 import time
 import unittest
 import zipfile
-
 from hashlib import sha1
 
 import common
+import test_utils
 import validate_target_files
+from rangelib import RangeSet
+
 
 KiB = 1024
 MiB = 1024 * KiB
 GiB = 1024 * MiB
 
+
 def get_2gb_string():
   size = int(2 * GiB + 1)
   block_size = 4 * KiB
@@ -309,18 +313,352 @@
     finally:
       os.remove(zip_file_name)
 
-class InstallRecoveryScriptFormatTest(unittest.TestCase):
-  """Check the format of install-recovery.sh
+  def test_ZipDelete(self):
+    zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
+    output_zip = zipfile.ZipFile(zip_file.name, 'w',
+                                 compression=zipfile.ZIP_DEFLATED)
+    with tempfile.NamedTemporaryFile() as entry_file:
+      entry_file.write(os.urandom(1024))
+      common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
+      common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
+      common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
+      common.ZipClose(output_zip)
+    zip_file.close()
 
-  Its format should match between common.py and validate_target_files.py."""
+    try:
+      common.ZipDelete(zip_file.name, 'Test2')
+      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+        entries = check_zip.namelist()
+        self.assertTrue('Test1' in entries)
+        self.assertFalse('Test2' in entries)
+        self.assertTrue('Test3' in entries)
+
+      self.assertRaises(AssertionError, common.ZipDelete, zip_file.name,
+                        'Test2')
+      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+        entries = check_zip.namelist()
+        self.assertTrue('Test1' in entries)
+        self.assertFalse('Test2' in entries)
+        self.assertTrue('Test3' in entries)
+
+      common.ZipDelete(zip_file.name, ['Test3'])
+      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+        entries = check_zip.namelist()
+        self.assertTrue('Test1' in entries)
+        self.assertFalse('Test2' in entries)
+        self.assertFalse('Test3' in entries)
+
+      common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
+      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+        entries = check_zip.namelist()
+        self.assertFalse('Test1' in entries)
+        self.assertFalse('Test2' in entries)
+        self.assertFalse('Test3' in entries)
+    finally:
+      os.remove(zip_file.name)
+
+
+class CommonApkUtilsTest(unittest.TestCase):
+  """Tests the APK utils related functions."""
+
+  APKCERTS_TXT1 = (
+      'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
+      ' private_key="certs/devkey.pk8"\n'
+      'name="Settings.apk"'
+      ' certificate="build/target/product/security/platform.x509.pem"'
+      ' private_key="build/target/product/security/platform.pk8"\n'
+      'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
+  )
+
+  APKCERTS_CERTMAP1 = {
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
+  }
+
+  APKCERTS_TXT2 = (
+      'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
+      ' private_key="certs/compressed1.pk8" compressed="gz"\n'
+      'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
+      ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+      'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
+      ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+      'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
+      ' private_key="certs/compressed3.pk8" compressed="gz"\n'
+  )
+
+  APKCERTS_CERTMAP2 = {
+      'Compressed1.apk' : 'certs/compressed1',
+      'Compressed2a.apk' : 'certs/compressed2',
+      'Compressed2b.apk' : 'certs/compressed2',
+      'Compressed3.apk' : 'certs/compressed3',
+  }
+
+  APKCERTS_TXT3 = (
+      'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
+      ' private_key="certs/compressed4.pk8" compressed="xz"\n'
+  )
+
+  APKCERTS_CERTMAP3 = {
+      'Compressed4.apk' : 'certs/compressed4',
+  }
 
   def setUp(self):
-    self._tempdir = tempfile.mkdtemp()
+    self.testdata_dir = test_utils.get_testdata_dir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _write_apkcerts_txt(apkcerts_txt, additional=None):
+    if additional is None:
+      additional = []
+    target_files = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
+      for entry in additional:
+        target_files_zip.writestr(entry, '')
+    return target_files
+
+  def test_ReadApkCerts_NoncompressedApks(self):
+    target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
+    self.assertIsNone(ext)
+
+  def test_ReadApkCerts_CompressedApks(self):
+    # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
+    # not stored in '.gz' format, so it shouldn't be considered as installed.
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT2,
+        ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
+    self.assertEqual('.gz', ext)
+
+    # Alternative case with '.xz'.
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
+    self.assertEqual('.xz', ext)
+
+  def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
+        ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    certmap_merged = self.APKCERTS_CERTMAP1.copy()
+    certmap_merged.update(self.APKCERTS_CERTMAP2)
+    self.assertDictEqual(certmap_merged, certmap)
+    self.assertEqual('.gz', ext)
+
+  def test_ReadApkCerts_MultipleCompressionMethods(self):
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
+        ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+  def test_ReadApkCerts_MismatchingKeys(self):
+    malformed_apkcerts_txt = (
+        'name="App1.apk" certificate="certs/cert1.x509.pem"'
+        ' private_key="certs/cert2.pk8"\n'
+    )
+    target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+  def test_ExtractPublicKey(self):
+    cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+    with open(pubkey, 'rb') as pubkey_fp:
+      self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
+
+  def test_ExtractPublicKey_invalidInput(self):
+    wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
+    self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+
+  def test_ParseCertificate(self):
+    cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+
+    cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    expected, _ = proc.communicate()
+    self.assertEqual(0, proc.returncode)
+
+    with open(cert) as cert_fp:
+      actual = common.ParseCertificate(cert_fp.read())
+    self.assertEqual(expected, actual)
+
+
+class CommonUtilsTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_GetSparseImage_emptyBlockMapFile(self):
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([
+              (0xCAC1, 6),
+              (0xCAC3, 3),
+              (0xCAC1, 4)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr('IMAGES/system.map', '')
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir = common.UnzipTemp(target_files)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+    self.assertDictEqual(
+        {
+            '__COPY': RangeSet("0"),
+            '__NONZERO-0': RangeSet("1-5 9-12"),
+        },
+        sparse_image.file_map)
+
+  def test_GetSparseImage_invalidImageName(self):
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'system2', None, None, False)
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'unknown', None, None, False)
+
+  def test_GetSparseImage_missingBlockMapFile(self):
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([
+              (0xCAC1, 6),
+              (0xCAC3, 3),
+              (0xCAC1, 4)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir = common.UnzipTemp(target_files)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(
+          AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+          False)
+
+  def test_GetSparseImage_sharedBlocks_notAllowed(self):
+    """Tests the case of having overlapping blocks but disallowed."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      # Block 10 is shared between two files.
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 10-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir = common.UnzipTemp(target_files)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(
+          AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+          False)
+
+  def test_GetSparseImage_sharedBlocks_allowed(self):
+    """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      # Construct an image with a care_map of "0-5 9-12".
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      # Block 10 is shared between two files.
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 10-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir = common.UnzipTemp(target_files)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
+
+    self.assertDictEqual(
+        {
+            '__COPY': RangeSet("0"),
+            '__NONZERO-0': RangeSet("6-8 13-15"),
+            '/system/file1': RangeSet("1-5 9-10"),
+            '/system/file2': RangeSet("11-12"),
+        },
+        sparse_image.file_map)
+
+    # '/system/file2' should be marked with 'uses_shared_blocks', but not with
+    # 'incomplete'.
+    self.assertTrue(
+        sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
+    self.assertNotIn(
+        'incomplete', sparse_image.file_map['/system/file2'].extra)
+
+    # All other entries should look normal without any tags.
+    self.assertFalse(sparse_image.file_map['__COPY'].extra)
+    self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
+    self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+
+  def test_GetSparseImage_incompleteRanges(self):
+    """Tests the case of ext4 images with holes."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 11-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      # '/system/file2' has less blocks listed (2) than actual (3).
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir = common.UnzipTemp(target_files)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+    self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+    self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
+
+
+class InstallRecoveryScriptFormatTest(unittest.TestCase):
+  """Checks the format of install-recovery.sh.
+
+  Its format should match between common.py and validate_target_files.py.
+  """
+
+  def setUp(self):
+    self._tempdir = common.MakeTempDir()
     # Create a dummy dict that contains the fstab info for boot&recovery.
     self._info = {"fstab" : {}}
-    dummy_fstab = \
-        ["/dev/soc.0/by-name/boot /boot emmc defaults defaults",
-         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
+    dummy_fstab = [
+        "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
+        "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
     self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
     # Construct the gzipped recovery.img and boot.img
     self.recovery_data = bytearray([
@@ -369,4 +707,4 @@
                                                         self._info)
 
   def tearDown(self):
-    shutil.rmtree(self._tempdir)
+    common.Cleanup()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
new file mode 100644
index 0000000..97687e7
--- /dev/null
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -0,0 +1,1284 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import copy
+import os
+import os.path
+import subprocess
+import unittest
+import zipfile
+
+import common
+import test_utils
+from ota_from_target_files import (
+    _LoadOemDicts, AbOtaPropertyFiles, BuildInfo, GetPackageMetadata,
+    GetTargetFilesZipForSecondaryImages,
+    GetTargetFilesZipWithoutPostinstallConfig, NonAbOtaPropertyFiles,
+    Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
+    StreamingPropertyFiles, WriteFingerprintAssertion)
+
+
+def construct_target_files(secondary=False):
+  """Returns a target-files.zip file for generating OTA packages."""
+  target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+  with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    # META/update_engine_config.txt
+    target_files_zip.writestr(
+        'META/update_engine_config.txt',
+        "PAYLOAD_MAJOR_VERSION=2\nPAYLOAD_MINOR_VERSION=4\n")
+
+    # META/postinstall_config.txt
+    target_files_zip.writestr(
+        POSTINSTALL_CONFIG,
+        '\n'.join([
+            "RUN_POSTINSTALL_system=true",
+            "POSTINSTALL_PATH_system=system/bin/otapreopt_script",
+            "FILESYSTEM_TYPE_system=ext4",
+            "POSTINSTALL_OPTIONAL_system=true",
+        ]))
+
+    # META/ab_partitions.txt
+    ab_partitions = ['boot', 'system', 'vendor']
+    target_files_zip.writestr(
+        'META/ab_partitions.txt',
+        '\n'.join(ab_partitions))
+
+    # Create dummy images for each of them.
+    for partition in ab_partitions:
+      target_files_zip.writestr('IMAGES/' + partition + '.img',
+                                os.urandom(len(partition)))
+
+    if secondary:
+      target_files_zip.writestr('IMAGES/system_other.img',
+                                os.urandom(len("system_other")))
+
+  return target_files
+
+
+class MockScriptWriter(object):
+  """A class that mocks edify_generator.EdifyGenerator.
+
+  It simply pushes the incoming arguments onto script stack, which is to assert
+  the calls to EdifyGenerator functions.
+  """
+
+  def __init__(self):
+    self.script = []
+
+  def Mount(self, *args):
+    self.script.append(('Mount',) + args)
+
+  def AssertDevice(self, *args):
+    self.script.append(('AssertDevice',) + args)
+
+  def AssertOemProperty(self, *args):
+    self.script.append(('AssertOemProperty',) + args)
+
+  def AssertFingerprintOrThumbprint(self, *args):
+    self.script.append(('AssertFingerprintOrThumbprint',) + args)
+
+  def AssertSomeFingerprint(self, *args):
+    self.script.append(('AssertSomeFingerprint',) + args)
+
+  def AssertSomeThumbprint(self, *args):
+    self.script.append(('AssertSomeThumbprint',) + args)
+
+
+class BuildInfoTest(unittest.TestCase):
+
+  TEST_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.product.name' : 'product-name',
+          'ro.build.fingerprint' : 'build-fingerprint',
+          'ro.build.foo' : 'build-foo',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+  }
+
+  TEST_INFO_DICT_USES_OEM_PROPS = {
+      'build.prop' : {
+          'ro.product.name' : 'product-name',
+          'ro.build.thumbprint' : 'build-thumbprint',
+          'ro.build.bar' : 'build-bar',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+      'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+  }
+
+  TEST_OEM_DICTS = [
+      {
+          'ro.product.brand' : 'brand1',
+          'ro.product.device' : 'device1',
+      },
+      {
+          'ro.product.brand' : 'brand2',
+          'ro.product.device' : 'device2',
+      },
+      {
+          'ro.product.brand' : 'brand3',
+          'ro.product.device' : 'device3',
+      },
+  ]
+
+  def test_init(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('product-device', target_info.device)
+    self.assertEqual('build-fingerprint', target_info.fingerprint)
+    self.assertFalse(target_info.is_ab)
+    self.assertIsNone(target_info.oem_props)
+
+  def test_init_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('device1', target_info.device)
+    self.assertEqual('brand1/product-name/device1:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Swap the order in oem_dicts, which would lead to different BuildInfo.
+    oem_dicts = copy.copy(self.TEST_OEM_DICTS)
+    oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, oem_dicts)
+    self.assertEqual('device3', target_info.device)
+    self.assertEqual('brand3/product-name/device3:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Missing oem_dict should be rejected.
+    self.assertRaises(AssertionError, BuildInfo,
+                      self.TEST_INFO_DICT_USES_OEM_PROPS, None)
+
+  def test___getitem__(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+
+  def test___getitem__with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertRaises(KeyError,
+                      lambda: target_info['build.prop']['ro.build.foo'])
+
+  def test_get(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
+
+  def test_get_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
+    self.assertRaises(KeyError,
+                      lambda: target_info.get('build.prop')['ro.build.foo'])
+
+  def test_GetBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_WriteMountOemScript(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteMountOemScript(script_writer)
+    self.assertEqual([('Mount', '/oem', None)], script_writer.script)
+
+  def test_WriteDeviceAssertions(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual([('AssertDevice', 'product-device')], script_writer.script)
+
+  def test_WriteDeviceAssertions_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual(
+        [
+            ('AssertOemProperty', 'ro.product.device',
+             ['device1', 'device2', 'device3'], False),
+            ('AssertOemProperty', 'ro.product.brand',
+             ['brand1', 'brand2', 'brand3'], False),
+        ],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_without_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+    source_info_dict['build.prop']['ro.build.fingerprint'] = (
+        'source-build-fingerprint')
+    source_info = BuildInfo(source_info_dict, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeFingerprint', 'source-build-fingerprint',
+          'build-fingerprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_source_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_target_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info = BuildInfo(self.TEST_INFO_DICT, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_both_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+    source_info_dict['build.prop']['ro.build.thumbprint'] = (
+        'source-build-thumbprint')
+    source_info = BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeThumbprint', 'build-thumbprint',
+          'source-build-thumbprint')],
+        script_writer.script)
+
+
+class LoadOemDictsTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_NoneDict(self):
+    self.assertIsNone(_LoadOemDicts(None))
+
+  def test_SingleDict(self):
+    dict_file = common.MakeTempFile()
+    with open(dict_file, 'w') as dict_fp:
+      dict_fp.write('abc=1\ndef=2\nxyz=foo\na.b.c=bar\n')
+
+    oem_dicts = _LoadOemDicts([dict_file])
+    self.assertEqual(1, len(oem_dicts))
+    self.assertEqual('foo', oem_dicts[0]['xyz'])
+    self.assertEqual('bar', oem_dicts[0]['a.b.c'])
+
+  def test_MultipleDicts(self):
+    oem_source = []
+    for i in range(3):
+      dict_file = common.MakeTempFile()
+      with open(dict_file, 'w') as dict_fp:
+        dict_fp.write(
+            'ro.build.index={}\ndef=2\nxyz=foo\na.b.c=bar\n'.format(i))
+      oem_source.append(dict_file)
+
+    oem_dicts = _LoadOemDicts(oem_source)
+    self.assertEqual(3, len(oem_dicts))
+    for i, oem_dict in enumerate(oem_dicts):
+      self.assertEqual('2', oem_dict['def'])
+      self.assertEqual('foo', oem_dict['xyz'])
+      self.assertEqual('bar', oem_dict['a.b.c'])
+      self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
+
+
+class OtaFromTargetFilesTest(unittest.TestCase):
+
+  TEST_TARGET_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-target',
+          'ro.build.version.incremental' : 'build-version-incremental-target',
+          'ro.build.version.sdk' : '27',
+          'ro.build.version.security_patch' : '2017-12-01',
+          'ro.build.date.utc' : '1500000000',
+      },
+  }
+
+  TEST_SOURCE_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-source',
+          'ro.build.version.incremental' : 'build-version-incremental-source',
+          'ro.build.version.sdk' : '25',
+          'ro.build.version.security_patch' : '2016-12-01',
+          'ro.build.date.utc' : '1400000000',
+      },
+  }
+
+  def setUp(self):
+    # Reset the global options as in ota_from_target_files.py.
+    common.OPTIONS.incremental_source = None
+    common.OPTIONS.downgrade = False
+    common.OPTIONS.timestamp = False
+    common.OPTIONS.wipe_user_data = False
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_GetPackageMetadata_abOta_full(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_abOta_incremental(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_full(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_incremental(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_wipe(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  @staticmethod
+  def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
+    (target_info['build.prop']['ro.build.date.utc'],
+     source_info['build.prop']['ro.build.date.utc']) = (
+         source_info['build.prop']['ro.build.date.utc'],
+         target_info['build.prop']['ro.build.date.utc'])
+
+  def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+                      source_info)
+
+  def test_GetPackageMetadata_downgrade(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.downgrade = True
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-downgrade' : 'yes',
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_overrideTimestamp(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.timestamp = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000001',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetTargetFilesZipForSecondaryImages(self):
+    input_file = construct_target_files(secondary=True)
+    target_file = GetTargetFilesZipForSecondaryImages(input_file)
+
+    with zipfile.ZipFile(target_file) as verify_zip:
+      namelist = verify_zip.namelist()
+
+    self.assertIn('META/ab_partitions.txt', namelist)
+    self.assertIn('IMAGES/boot.img', namelist)
+    self.assertIn('IMAGES/system.img', namelist)
+    self.assertIn('IMAGES/vendor.img', namelist)
+    self.assertIn(POSTINSTALL_CONFIG, namelist)
+
+    self.assertNotIn('IMAGES/system_other.img', namelist)
+    self.assertNotIn('IMAGES/system.map', namelist)
+
+  def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self):
+    input_file = construct_target_files(secondary=True)
+    target_file = GetTargetFilesZipForSecondaryImages(
+        input_file, skip_postinstall=True)
+
+    with zipfile.ZipFile(target_file) as verify_zip:
+      namelist = verify_zip.namelist()
+
+    self.assertIn('META/ab_partitions.txt', namelist)
+    self.assertIn('IMAGES/boot.img', namelist)
+    self.assertIn('IMAGES/system.img', namelist)
+    self.assertIn('IMAGES/vendor.img', namelist)
+
+    self.assertNotIn('IMAGES/system_other.img', namelist)
+    self.assertNotIn('IMAGES/system.map', namelist)
+    self.assertNotIn(POSTINSTALL_CONFIG, namelist)
+
+  def test_GetTargetFilesZipWithoutPostinstallConfig(self):
+    input_file = construct_target_files()
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+    with zipfile.ZipFile(target_file) as verify_zip:
+      self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+  def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self):
+    input_file = construct_target_files()
+    common.ZipDelete(input_file, POSTINSTALL_CONFIG)
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+    with zipfile.ZipFile(target_file) as verify_zip:
+      self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+
+class TestPropertyFiles(PropertyFiles):
+  """A class that extends PropertyFiles for testing purpose."""
+
+  def __init__(self):
+    super(TestPropertyFiles, self).__init__()
+    self.name = 'ota-test-property-files'
+    self.required = (
+        'required-entry1',
+        'required-entry2',
+    )
+    self.optional = (
+        'optional-entry1',
+        'optional-entry2',
+    )
+
+
+class PropertyFilesTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _construct_zip_package(entries):
+    zip_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+      for entry in entries:
+        zip_fp.writestr(
+            entry,
+            entry.replace('.', '-').upper(),
+            zipfile.ZIP_STORED)
+    return zip_file
+
+  @staticmethod
+  def _parse_property_files_string(data):
+    result = {}
+    for token in data.split(','):
+      name, info = token.split(':', 1)
+      result[name] = info
+    return result
+
+  def _verify_entries(self, input_file, tokens, entries):
+    for entry in entries:
+      offset, size = map(int, tokens[entry].split(':'))
+      with open(input_file, 'rb') as input_fp:
+        input_fp.seek(offset)
+        if entry == 'metadata':
+          expected = b'META-INF/COM/ANDROID/METADATA'
+        else:
+          expected = entry.replace('.', '-').upper().encode()
+        self.assertEqual(expected, input_fp.read(size))
+
+  def test_Compute(self):
+    entries = (
+        'required-entry1',
+        'required-entry2',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    self.assertEqual(3, len(tokens))
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Compute_withOptionalEntries(self):
+    entries = (
+        'required-entry1',
+        'required-entry2',
+        'optional-entry1',
+        'optional-entry2',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    self.assertEqual(5, len(tokens))
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Compute_missingRequiredEntry(self):
+    entries = (
+        'required-entry2',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      self.assertRaises(KeyError, property_files.Compute, zip_fp)
+
+  def test_Finalize(self):
+    entries = [
+        'required-entry1',
+        'required-entry2',
+        'META-INF/com/android/metadata',
+    ]
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
+    tokens = self._parse_property_files_string(streaming_metadata)
+
+    self.assertEqual(3, len(tokens))
+    # 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
+    # streaming metadata.
+    entries[2] = 'metadata'
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Finalize_assertReservedLength(self):
+    entries = (
+        'required-entry1',
+        'required-entry2',
+        'optional-entry1',
+        'optional-entry2',
+        'META-INF/com/android/metadata',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # First get the raw metadata string (i.e. without padding space).
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      raw_length = len(raw_metadata)
+
+      # Now pass in the exact expected length.
+      streaming_metadata = property_files.Finalize(zip_fp, raw_length)
+      self.assertEqual(raw_length, len(streaming_metadata))
+
+      # Or pass in insufficient length.
+      self.assertRaises(
+          AssertionError,
+          property_files.Finalize,
+          zip_fp,
+          raw_length - 1)
+
+      # Or pass in a much larger size.
+      streaming_metadata = property_files.Finalize(
+          zip_fp,
+          raw_length + 20)
+      self.assertEqual(raw_length + 20, len(streaming_metadata))
+      self.assertEqual(' ' * 20, streaming_metadata[raw_length:])
+
+  def test_Verify(self):
+    entries = (
+        'required-entry1',
+        'required-entry2',
+        'optional-entry1',
+        'optional-entry2',
+        'META-INF/com/android/metadata',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = TestPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # First get the raw metadata string (i.e. without padding space).
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+
+      # Should pass the test if verification passes.
+      property_files.Verify(zip_fp, raw_metadata)
+
+      # Or raise on verification failure.
+      self.assertRaises(
+          AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x')
+
+
+class StreamingPropertyFilesTest(PropertyFilesTest):
+  """Additional sanity checks specialized for StreamingPropertyFiles."""
+
+  def test_init(self):
+    property_files = StreamingPropertyFiles()
+    self.assertEqual('ota-streaming-property-files', property_files.name)
+    self.assertEqual(
+        (
+            'payload.bin',
+            'payload_properties.txt',
+        ),
+        property_files.required)
+    self.assertEqual(
+        (
+            'care_map.txt',
+            'compatibility.zip',
+        ),
+        property_files.optional)
+
+  def test_Compute(self):
+    entries = (
+        'payload.bin',
+        'payload_properties.txt',
+        'care_map.txt',
+        'compatibility.zip',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = StreamingPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    self.assertEqual(5, len(tokens))
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Finalize(self):
+    entries = [
+        'payload.bin',
+        'payload_properties.txt',
+        'care_map.txt',
+        'compatibility.zip',
+        'META-INF/com/android/metadata',
+    ]
+    zip_file = self._construct_zip_package(entries)
+    property_files = StreamingPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
+    tokens = self._parse_property_files_string(streaming_metadata)
+
+    self.assertEqual(5, len(tokens))
+    # 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
+    # streaming metadata.
+    entries[4] = 'metadata'
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Verify(self):
+    entries = (
+        'payload.bin',
+        'payload_properties.txt',
+        'care_map.txt',
+        'compatibility.zip',
+        'META-INF/com/android/metadata',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = StreamingPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # First get the raw metadata string (i.e. without padding space).
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+
+      # Should pass the test if verification passes.
+      property_files.Verify(zip_fp, raw_metadata)
+
+      # Or raise on verification failure.
+      self.assertRaises(
+          AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x')
+
+
+class AbOtaPropertyFilesTest(PropertyFilesTest):
+  """Additional sanity checks specialized for AbOtaPropertyFiles."""
+
+  # The size for payload and metadata signature size.
+  SIGNATURE_SIZE = 256
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.wipe_user_data = False
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = None
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def test_init(self):
+    property_files = AbOtaPropertyFiles()
+    self.assertEqual('ota-property-files', property_files.name)
+    self.assertEqual(
+        (
+            'payload.bin',
+            'payload_properties.txt',
+        ),
+        property_files.required)
+    self.assertEqual(
+        (
+            'care_map.txt',
+            'compatibility.zip',
+        ),
+        property_files.optional)
+
+  def test_GetPayloadMetadataOffsetAndSize(self):
+    target_file = construct_target_files()
+    payload = Payload()
+    payload.Generate(target_file)
+
+    payload_signer = PayloadSigner()
+    payload.Sign(payload_signer)
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    # Find out the payload metadata offset and size.
+    property_files = AbOtaPropertyFiles()
+    with zipfile.ZipFile(output_file) as input_zip:
+      # pylint: disable=protected-access
+      payload_offset, metadata_total = (
+          property_files._GetPayloadMetadataOffsetAndSize(input_zip))
+
+    # Read in the metadata signature directly.
+    with open(output_file, 'rb') as verify_fp:
+      verify_fp.seek(payload_offset + metadata_total - self.SIGNATURE_SIZE)
+      metadata_signature = verify_fp.read(self.SIGNATURE_SIZE)
+
+    # Now we extract the metadata hash via brillo_update_payload script, which
+    # will serve as the oracle result.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ['brillo_update_payload', 'hash',
+           '--unsigned_payload', payload.payload_file,
+           '--signature_size', str(self.SIGNATURE_SIZE),
+           '--metadata_hash_file', metadata_sig_file,
+           '--payload_hash_file', payload_sig_file]
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = proc.communicate()
+    self.assertEqual(
+        0, proc.returncode,
+        'Failed to run brillo_update_payload: {}'.format(stdoutdata))
+
+    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+
+    # Finally we can compare the two signatures.
+    with open(signed_metadata_sig_file, 'rb') as verify_fp:
+      self.assertEqual(verify_fp.read(), metadata_signature)
+
+  @staticmethod
+  def _construct_zip_package_withValidPayload(with_metadata=False):
+    # Cannot use _construct_zip_package() since we need a "valid" payload.bin.
+    target_file = construct_target_files()
+    payload = Payload()
+    payload.Generate(target_file)
+
+    payload_signer = PayloadSigner()
+    payload.Sign(payload_signer)
+
+    zip_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+      # 'payload.bin',
+      payload.WriteToZip(zip_fp)
+
+      # Other entries.
+      entries = ['care_map.txt', 'compatibility.zip']
+
+      # Put META-INF/com/android/metadata if needed.
+      if with_metadata:
+        entries.append('META-INF/com/android/metadata')
+
+      for entry in entries:
+        zip_fp.writestr(
+            entry, entry.replace('.', '-').upper(), zipfile.ZIP_STORED)
+
+    return zip_file
+
+  def test_Compute(self):
+    zip_file = self._construct_zip_package_withValidPayload()
+    property_files = AbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    # "6" indcludes the four entries above, one metadata entry, and one entry
+    # for payload-metadata.bin.
+    self.assertEqual(6, len(tokens))
+    self._verify_entries(
+        zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
+
+  def test_Finalize(self):
+    zip_file = self._construct_zip_package_withValidPayload(with_metadata=True)
+    property_files = AbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+
+    tokens = self._parse_property_files_string(property_files_string)
+    # "6" indcludes the four entries above, one metadata entry, and one entry
+    # for payload-metadata.bin.
+    self.assertEqual(6, len(tokens))
+    self._verify_entries(
+        zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
+
+  def test_Verify(self):
+    zip_file = self._construct_zip_package_withValidPayload(with_metadata=True)
+    property_files = AbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+
+      property_files.Verify(zip_fp, raw_metadata)
+
+
+class NonAbOtaPropertyFilesTest(PropertyFilesTest):
+  """Additional sanity checks specialized for NonAbOtaPropertyFiles."""
+
+  def test_init(self):
+    property_files = NonAbOtaPropertyFiles()
+    self.assertEqual('ota-property-files', property_files.name)
+    self.assertEqual((), property_files.required)
+    self.assertEqual((), property_files.optional)
+
+  def test_Compute(self):
+    entries = ()
+    zip_file = self._construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    self.assertEqual(1, len(tokens))
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Finalize(self):
+    entries = [
+        'META-INF/com/android/metadata',
+    ]
+    zip_file = self._construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+    tokens = self._parse_property_files_string(property_files_string)
+
+    self.assertEqual(1, len(tokens))
+    # 'META-INF/com/android/metadata' will be key'd as 'metadata'.
+    entries[0] = 'metadata'
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Verify(self):
+    entries = (
+        'META-INF/com/android/metadata',
+    )
+    zip_file = self._construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      # pylint: disable=protected-access
+      raw_metadata = property_files._GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+
+      property_files.Verify(zip_fp, raw_metadata)
+
+
+class PayloadSignerTest(unittest.TestCase):
+
+  SIGFILE = 'sigfile.bin'
+  SIGNED_SIGFILE = 'signed-sigfile.bin'
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = []
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def _assertFilesEqual(self, file1, file2):
+    with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
+      self.assertEqual(fp1.read(), fp2.read())
+
+  def test_init(self):
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withPassword(self):
+    common.OPTIONS.package_key = os.path.join(
+        self.testdata_dir, 'testkey_with_passwd')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : 'foo',
+    }
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withExternalSigner(self):
+    common.OPTIONS.payload_signer = 'abc'
+    common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
+    payload_signer = PayloadSigner()
+    self.assertEqual('abc', payload_signer.signer)
+    self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
+
+  def test_Sign(self):
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_openssl(self):
+    """Uses openssl as the external payload signer."""
+    common.OPTIONS.payload_signer = 'openssl'
+    common.OPTIONS.payload_signer_args = [
+        'pkeyutl', '-sign', '-keyform', 'DER', '-inkey',
+        os.path.join(self.testdata_dir, 'testkey.pk8'),
+        '-pkeyopt', 'digest:sha256']
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_script(self):
+    """Uses testdata/payload_signer.sh as the external payload signer."""
+    common.OPTIONS.payload_signer = os.path.join(
+        self.testdata_dir, 'payload_signer.sh')
+    common.OPTIONS.payload_signer_args = [
+        os.path.join(self.testdata_dir, 'testkey.pk8')]
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+
+class PayloadTest(unittest.TestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.wipe_user_data = False
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = None
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _create_payload_full(secondary=False):
+    target_file = construct_target_files(secondary)
+    payload = Payload(secondary)
+    payload.Generate(target_file)
+    return payload
+
+  @staticmethod
+  def _create_payload_incremental():
+    target_file = construct_target_files()
+    source_file = construct_target_files()
+    payload = Payload()
+    payload.Generate(target_file, source_file)
+    return payload
+
+  def test_Generate_full(self):
+    payload = self._create_payload_full()
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_incremental(self):
+    payload = self._create_payload_incremental()
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_additionalArgs(self):
+    target_file = construct_target_files()
+    source_file = construct_target_files()
+    payload = Payload()
+    # This should work the same as calling payload.Generate(target_file,
+    # source_file).
+    payload.Generate(
+        target_file, additional_args=["--source_image", source_file])
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_invalidInput(self):
+    target_file = construct_target_files()
+    common.ZipDelete(target_file, 'IMAGES/vendor.img')
+    payload = Payload()
+    self.assertRaises(AssertionError, payload.Generate, target_file)
+
+  def test_Sign_full(self):
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    import check_ota_package_signature
+    check_ota_package_signature.VerifyAbOtaPayload(
+        os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+        output_file)
+
+  def test_Sign_incremental(self):
+    payload = self._create_payload_incremental()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    import check_ota_package_signature
+    check_ota_package_signature.VerifyAbOtaPayload(
+        os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+        output_file)
+
+  def test_Sign_withDataWipe(self):
+    common.OPTIONS.wipe_user_data = True
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    with open(payload.payload_properties) as properties_fp:
+      self.assertIn("POWERWASH=1", properties_fp.read())
+
+  def test_Sign_secondary(self):
+    payload = self._create_payload_full(secondary=True)
+    payload.Sign(PayloadSigner())
+
+    with open(payload.payload_properties) as properties_fp:
+      self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
+
+  def test_Sign_badSigner(self):
+    """Tests that signing failure can be captured."""
+    payload = self._create_payload_full()
+    payload_signer = PayloadSigner()
+    payload_signer.signer_args.append('bad-option')
+    self.assertRaises(AssertionError, payload.Sign, payload_signer)
+
+  def test_WriteToZip(self):
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    with zipfile.ZipFile(output_file) as verify_zip:
+      # First make sure we have the essential entries.
+      namelist = verify_zip.namelist()
+      self.assertIn(Payload.PAYLOAD_BIN, namelist)
+      self.assertIn(Payload.PAYLOAD_PROPERTIES_TXT, namelist)
+
+      # Then assert these entries are stored.
+      for entry_info in verify_zip.infolist():
+        if entry_info.filename not in (Payload.PAYLOAD_BIN,
+                                       Payload.PAYLOAD_PROPERTIES_TXT):
+          continue
+        self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
+
+  def test_WriteToZip_unsignedPayload(self):
+    """Unsigned payloads should not be allowed to be written to zip."""
+    payload = self._create_payload_full()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
+
+    # Also test with incremental payload.
+    payload = self._create_payload_incremental()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
+
+  def test_WriteToZip_secondary(self):
+    payload = self._create_payload_full(secondary=True)
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    with zipfile.ZipFile(output_file) as verify_zip:
+      # First make sure we have the essential entries.
+      namelist = verify_zip.namelist()
+      self.assertIn(Payload.SECONDARY_PAYLOAD_BIN, namelist)
+      self.assertIn(Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
+
+      # Then assert these entries are stored.
+      for entry_info in verify_zip.infolist():
+        if entry_info.filename not in (
+            Payload.SECONDARY_PAYLOAD_BIN,
+            Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+          continue
+        self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
new file mode 100644
index 0000000..26f9e10
--- /dev/null
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -0,0 +1,213 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import print_function
+
+import base64
+import os.path
+import unittest
+import zipfile
+
+import common
+import test_utils
+from sign_target_files_apks import (
+    EditTags, ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
+
+
+class SignTargetFilesApksTest(unittest.TestCase):
+
+  MAC_PERMISSIONS_XML = """<?xml version="1.0" encoding="iso-8859-1"?>
+<policy>
+  <signer signature="{}"><seinfo value="platform"/></signer>
+  <signer signature="{}"><seinfo value="media"/></signer>
+</policy>"""
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_EditTags(self):
+    self.assertEqual(EditTags('dev-keys'), ('release-keys'))
+    self.assertEqual(EditTags('test-keys'), ('release-keys'))
+
+    # Multiple tags.
+    self.assertEqual(EditTags('abc,dev-keys,xyz'), ('abc,release-keys,xyz'))
+
+    # Tags are sorted.
+    self.assertEqual(EditTags('xyz,abc,dev-keys,xyz'), ('abc,release-keys,xyz'))
+
+  def test_RewriteProps(self):
+    props = (
+        ('', '\n'),
+        ('ro.build.fingerprint=foo/bar/dev-keys',
+         'ro.build.fingerprint=foo/bar/release-keys\n'),
+        ('ro.build.thumbprint=foo/bar/dev-keys',
+         'ro.build.thumbprint=foo/bar/release-keys\n'),
+        ('ro.vendor.build.fingerprint=foo/bar/dev-keys',
+         'ro.vendor.build.fingerprint=foo/bar/release-keys\n'),
+        ('ro.vendor.build.thumbprint=foo/bar/dev-keys',
+         'ro.vendor.build.thumbprint=foo/bar/release-keys\n'),
+        ('# comment line 1', '# comment line 1\n'),
+        ('ro.bootimage.build.fingerprint=foo/bar/dev-keys',
+         'ro.bootimage.build.fingerprint=foo/bar/release-keys\n'),
+        ('ro.build.description='
+         'sailfish-user 8.0.0 OPR6.170623.012 4283428 dev-keys',
+         'ro.build.description='
+         'sailfish-user 8.0.0 OPR6.170623.012 4283428 release-keys\n'),
+        ('ro.build.tags=dev-keys', 'ro.build.tags=release-keys\n'),
+        ('# comment line 2', '# comment line 2\n'),
+        ('ro.build.display.id=OPR6.170623.012 dev-keys',
+         'ro.build.display.id=OPR6.170623.012\n'),
+        ('# comment line 3', '# comment line 3\n'),
+    )
+
+    # Assert the case for each individual line.
+    for prop, output in props:
+      self.assertEqual(RewriteProps(prop), output)
+
+    # Concatenate all the input lines.
+    self.assertEqual(RewriteProps('\n'.join([prop[0] for prop in props])),
+                     ''.join([prop[1] for prop in props]))
+
+  def test_ReplaceVerityKeyId(self):
+    BOOT_CMDLINE1 = (
+        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
+        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
+        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
+        "buildvariant=userdebug "
+        "veritykeyid=id:7e4333f9bba00adfe0ede979e28ed1920492b40f\n")
+
+    BOOT_CMDLINE2 = (
+        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
+        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
+        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
+        "buildvariant=userdebug "
+        "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
+
+    input_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'w') as input_zip:
+      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
+
+    # Test with the first certificate.
+    cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
+
+    with zipfile.ZipFile(output_file) as output_zip:
+      self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline'))
+
+    # Test with the second certificate.
+    cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
+
+    with zipfile.ZipFile(output_file) as output_zip:
+      self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline'))
+
+  def test_ReplaceVerityKeyId_no_veritykeyid(self):
+    BOOT_CMDLINE = (
+        "console=ttyHSL0,115200,n8 androidboot.hardware=bullhead boot_cpus=0-5 "
+        "lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
+        "loop.max_part=7\n")
+
+    input_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'w') as input_zip:
+      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, None)
+
+    with zipfile.ZipFile(output_file) as output_zip:
+      self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline'))
+
+  def test_ReplaceCerts(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+    cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    with open(cert3_path) as cert3_fp:
+      cert3 = cert3_fp.read()
+
+    # Replace cert1 with cert3.
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    output_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert3)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert3_path[:-9],
+    }
+
+    self.assertEqual(output_xml, ReplaceCerts(input_xml))
+
+  def test_ReplaceCerts_duplicateEntries(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+
+    # Replace cert1 with cert2, which leads to duplicate entries.
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert2_path[:-9],
+    }
+    self.assertRaises(AssertionError, ReplaceCerts, input_xml)
+
+  def test_ReplaceCerts_skipNonExistentCerts(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+    cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    with open(cert3_path) as cert3_fp:
+      cert3 = cert3_fp.read()
+
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    output_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert3)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert3_path[:-9],
+        'non-existent' : cert3_path[:-9],
+        cert2_path[:-9] : 'non-existent',
+    }
+    self.assertEqual(output_xml, ReplaceCerts(input_xml))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
new file mode 100644
index 0000000..e64355b
--- /dev/null
+++ b/tools/releasetools/test_utils.py
@@ -0,0 +1,96 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Utils for running unittests.
+"""
+
+import os
+import os.path
+import struct
+
+import common
+
+
+def get_testdata_dir():
+  """Returns the testdata dir, in relative to the script dir."""
+  # The script dir is the one we want, which could be different from pwd.
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  return os.path.join(current_dir, 'testdata')
+
+
+def construct_sparse_image(chunks):
+  """Returns a sparse image file constructed from the given chunks.
+
+  From system/core/libsparse/sparse_format.h.
+  typedef struct sparse_header {
+    __le32 magic;  // 0xed26ff3a
+    __le16 major_version;  // (0x1) - reject images with higher major versions
+    __le16 minor_version;  // (0x0) - allow images with higer minor versions
+    __le16 file_hdr_sz;  // 28 bytes for first revision of the file format
+    __le16 chunk_hdr_sz;  // 12 bytes for first revision of the file format
+    __le32 blk_sz;  // block size in bytes, must be a multiple of 4 (4096)
+    __le32 total_blks;  // total blocks in the non-sparse output image
+    __le32 total_chunks;  // total chunks in the sparse input image
+    __le32 image_checksum;  // CRC32 checksum of the original data, counting
+                            // "don't care" as 0. Standard 802.3 polynomial,
+                            // use a Public Domain table implementation
+  } sparse_header_t;
+
+  typedef struct chunk_header {
+    __le16 chunk_type;  // 0xCAC1 -> raw; 0xCAC2 -> fill;
+                        // 0xCAC3 -> don't care
+    __le16 reserved1;
+    __le32 chunk_sz;  // in blocks in output image
+    __le32 total_sz;  // in bytes of chunk input file including chunk header
+                      // and data
+  } chunk_header_t;
+
+  Args:
+    chunks: A list of chunks to be written. Each entry should be a tuple of
+        (chunk_type, block_number).
+
+  Returns:
+    Filename of the created sparse image.
+  """
+  SPARSE_HEADER_MAGIC = 0xED26FF3A
+  SPARSE_HEADER_FORMAT = "<I4H4I"
+  CHUNK_HEADER_FORMAT = "<2H2I"
+
+  sparse_image = common.MakeTempFile(prefix='sparse-', suffix='.img')
+  with open(sparse_image, 'wb') as fp:
+    fp.write(struct.pack(
+        SPARSE_HEADER_FORMAT, SPARSE_HEADER_MAGIC, 1, 0, 28, 12, 4096,
+        sum(chunk[1] for chunk in chunks),
+        len(chunks), 0))
+
+    for chunk in chunks:
+      data_size = 0
+      if chunk[0] == 0xCAC1:
+        data_size = 4096 * chunk[1]
+      elif chunk[0] == 0xCAC2:
+        data_size = 4
+      elif chunk[0] == 0xCAC3:
+        pass
+      else:
+        assert False, "Unsupported chunk type: {}".format(chunk[0])
+
+      fp.write(struct.pack(
+          CHUNK_HEADER_FORMAT, chunk[0], 0, chunk[1], data_size + 12))
+      if data_size != 0:
+        fp.write(os.urandom(data_size))
+
+  return sparse_image
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
new file mode 100644
index 0000000..bae648f
--- /dev/null
+++ b/tools/releasetools/test_validate_target_files.py
@@ -0,0 +1,180 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Unittests for validate_target_files.py.
+
+Note: This file calls functions in build_image.py that hard-code the path in
+relative to ANDROID_BUILD_TOP (e.g.
+system/extras/verity/build_verity_metadata.py). So the test needs to be
+triggered under ANDROID_BUILD_TOP or the top-level OTA tools directory (i.e.
+the one after unzipping otatools.zip).
+
+  (from ANDROID_BUILD_TOP)
+  $ PYTHONPATH=build/make/tools/releasetools python -m unittest \\
+      test_validate_target_files
+
+  (from OTA tools directory)
+  $ PYTHONPATH=releasetools python -m unittest test_validate_target_files
+"""
+
+from __future__ import print_function
+
+import os
+import os.path
+import shutil
+import subprocess
+import unittest
+
+import build_image
+import common
+import test_utils
+from validate_target_files import ValidateVerifiedBootImages
+
+
+class ValidateTargetFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def _generate_boot_image(self, output_file):
+    kernel = common.MakeTempFile(prefix='kernel-')
+    with open(kernel, 'wb') as kernel_fp:
+      kernel_fp.write(os.urandom(10))
+
+    cmd = ['mkbootimg', '--kernel', kernel, '-o', output_file]
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = proc.communicate()
+    self.assertEqual(
+        0, proc.returncode,
+        "Failed to run mkbootimg: {}".format(stdoutdata))
+
+    cmd = ['boot_signer', '/boot', output_file,
+           os.path.join(self.testdata_dir, 'testkey.pk8'),
+           os.path.join(self.testdata_dir, 'testkey.x509.pem'), output_file]
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = proc.communicate()
+    self.assertEqual(
+        0, proc.returncode,
+        "Failed to sign boot image with boot_signer: {}".format(stdoutdata))
+
+  def test_ValidateVerifiedBootImages_bootImage(self):
+    input_tmp = common.MakeTempDir()
+    os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+    boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+    self._generate_boot_image(boot_image)
+
+    info_dict = {
+        'boot_signer' : 'true',
+    }
+    options = {
+        'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+    }
+    ValidateVerifiedBootImages(input_tmp, info_dict, options)
+
+  def test_ValidateVerifiedBootImages_bootImage_wrongKey(self):
+    input_tmp = common.MakeTempDir()
+    os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+    boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+    self._generate_boot_image(boot_image)
+
+    info_dict = {
+        'boot_signer' : 'true',
+    }
+    options = {
+        'verity_key' : os.path.join(self.testdata_dir, 'verity.x509.pem'),
+    }
+    self.assertRaises(
+        AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
+        options)
+
+  def test_ValidateVerifiedBootImages_bootImage_corrupted(self):
+    input_tmp = common.MakeTempDir()
+    os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+    boot_image = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+    self._generate_boot_image(boot_image)
+
+    # Corrupt the late byte of the image.
+    with open(boot_image, 'r+b') as boot_fp:
+      boot_fp.seek(-1, os.SEEK_END)
+      last_byte = boot_fp.read(1)
+      last_byte = chr(255 - ord(last_byte))
+      boot_fp.seek(-1, os.SEEK_END)
+      boot_fp.write(last_byte)
+
+    info_dict = {
+        'boot_signer' : 'true',
+    }
+    options = {
+        'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+    }
+    self.assertRaises(
+        AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
+        options)
+
+  def _generate_system_image(self, output_file):
+    verity_fec = True
+    partition_size = 1024 * 1024
+    adjusted_size, verity_size = build_image.AdjustPartitionSizeForVerity(
+        partition_size, verity_fec)
+
+    # Use an empty root directory.
+    system_root = common.MakeTempDir()
+    cmd = ['mkuserimg_mke2fs.sh', '-s', system_root, output_file, 'ext4',
+           '/system', str(adjusted_size), '-j', '0']
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = proc.communicate()
+    self.assertEqual(
+        0, proc.returncode,
+        "Failed to create system image with mkuserimg_mke2fs.sh: {}".format(
+            stdoutdata))
+
+    # Append the verity metadata.
+    prop_dict = {
+        'original_partition_size' : str(partition_size),
+        'partition_size' : str(adjusted_size),
+        'verity_block_device' : '/dev/block/system',
+        'verity_key' : os.path.join(self.testdata_dir, 'testkey'),
+        'verity_signer_cmd' : 'verity_signer',
+        'verity_size' : str(verity_size),
+    }
+    self.assertTrue(
+        build_image.MakeVerityEnabledImage(output_file, verity_fec, prop_dict))
+
+  def test_ValidateVerifiedBootImages_systemImage(self):
+    input_tmp = common.MakeTempDir()
+    os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+    system_image = os.path.join(input_tmp, 'IMAGES', 'system.img')
+    self._generate_system_image(system_image)
+
+    # Pack the verity key.
+    verity_key_mincrypt = os.path.join(
+        input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+    os.makedirs(os.path.dirname(verity_key_mincrypt))
+    shutil.copyfile(
+        os.path.join(self.testdata_dir, 'testkey_mincrypt'),
+        verity_key_mincrypt)
+
+    info_dict = {
+        'verity' : 'true',
+    }
+    options = {
+        'verity_key' : os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+        'verity_key_mincrypt' : verity_key_mincrypt,
+    }
+    ValidateVerifiedBootImages(input_tmp, info_dict, options)
diff --git a/tools/releasetools/testdata/media.x509.pem b/tools/releasetools/testdata/media.x509.pem
new file mode 100644
index 0000000..98cd443
--- /dev/null
+++ b/tools/releasetools/testdata/media.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJAPK5jmEjVyxOMA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMzQwNTdaFw0zNTA5MDEyMzQwNTdaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAK4lDFoW75f8KGmsZRsyF8w2ug6GlkFo1YoE
+n0DOhYZxI6P/tPbZScM88to6BcI+rKpX2AOImxdZvPWefG8hiQriUIW37VaqYmwJ
+ie+czTY2LKDo0blgP9TYModnkmzMCQxot3Wuf/MJNMw2nvKFWiZn3wxmf9DHz12O
+umVYBnNzA7tiRybquu37cvB+16dqs8uaOBxLfc2AmxQNiR8AITvkAfWNagamHq3D
+qcLxxlZyhbCa4JNCpm+kIer5Ot91c6AowzHXBgGrOvfMhAM+znx3KjpbhrDb6dd3
+w6SKqYAe3O4ngVifRNnkETl5YAV2qZQQuoEJElna2YxsaP94S48CAQOjgfwwgfkw
+HQYDVR0OBBYEFMopPKqLwO0+VC7vQgWiv/K1fk11MIHJBgNVHSMEgcEwgb6AFMop
+PKqLwO0+VC7vQgWiv/K1fk11oYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAPK5jmEjVyxOMAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAITelRbV5KhyF6c9qEhwSPUzc6X3
+M/OQ1hvfPMnlJRYlv8qnwxWcriddFyqa4eh21UWBJ6xUL2gpDdUQwAKdj1Hg7hVr
+e3tazbOUJBuOx4t05cQsXK+uFWyvW9GZojonUk2gct6743hGSlM2MLDk0P+34I7L
+cB+ttjecdEZ/bgDG7YiFlTgHkgOHVgB4csjjAHr0I6V6LKs6KChptkxLe9X8GH0K
+fiQVll1ark4Hpt91G0p16Xk8kYphK4HNC2KK7gFo3ETkexDTWTJghJ1q321yfcJE
+RMIh0/nsw2jK0HmZ8rgQW8HyDTjUEGbMFBHCV6lupDSfV0ZWVQfk6AIKGoE=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/payload_signer.sh b/tools/releasetools/testdata/payload_signer.sh
new file mode 100755
index 0000000..a44ef34
--- /dev/null
+++ b/tools/releasetools/testdata/payload_signer.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+# The script will be called with 'payload_signer.sh <key> -in <input> -out <output>'.
+openssl pkeyutl -sign -keyform DER -inkey $1 -pkeyopt digest:sha256 -in $3 -out $5
diff --git a/tools/releasetools/testdata/platform.x509.pem b/tools/releasetools/testdata/platform.x509.pem
new file mode 100644
index 0000000..087f02e
--- /dev/null
+++ b/tools/releasetools/testdata/platform.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJALOZgIbQVs/6MA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMjQwNTBaFw0zNTA5MDEyMjQwNTBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAJx4BZKsDV04HN6qZezIpgBuNkgMbXIHsSAR
+vlCGOqvitV0Amt9xRtbyICKAx81Ne9smJDuKgGwms0sTdSOkkmgiSQTcAUk+fArP
+GgXIdPabA3tgMJ2QdNJCgOFrrSqHNDYZUer3KkgtCbIEsYdeEqyYwap3PWgAuer9
+5W1Yvtjo2hb5o2AJnDeoNKbf7be2tEoEngeiafzPLFSW8s821k35CjuNjzSjuqtM
+9TNxqydxmzulh1StDFP8FOHbRdUeI0+76TybpO35zlQmE1DsU1YHv2mi/0qgfbX3
+6iANCabBtJ4hQC+J7RGQiTqrWpGA8VLoL4WkV1PPX8GQccXuyCcCAQOjgfwwgfkw
+HQYDVR0OBBYEFE/koLPdnLop9x1yh8Tnw48ghsKZMIHJBgNVHSMEgcEwgb6AFE/k
+oLPdnLop9x1yh8Tnw48ghsKZoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJALOZgIbQVs/6MAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAFclUbjZOh9z3g9tRp+G2tZwFAAp
+PIigzXzXeLc9r8wZf6t25iEuVsHHYc/EL9cz3lLFCuCIFM78CjtaGkNGBU2Cnx2C
+tCsgSL+ItdFJKe+F9g7dEtctVWV+IuPoXQTIMdYT0Zk4u4mCJH+jISVroS0dao+S
+6h2xw3Mxe6DAN/DRr/ZFrvIkl5+6bnoUvAJccbmBOM7z3fwFlhfPJIRc97QNY4L3
+J17XOElatuWTG5QhdlxJG3L7aOCA29tYwgKdNHyLMozkPvaosVUz7fvpib1qSN1L
+IC7alMarjdW4OZID2q4u1EYjLk/pvZYTlMYwDlE448/Shebk5INTjLixs1c=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/sigfile.bin b/tools/releasetools/testdata/sigfile.bin
new file mode 100644
index 0000000..8682216
--- /dev/null
+++ b/tools/releasetools/testdata/sigfile.bin
@@ -0,0 +1 @@
+ºQàÂÜ¢”¡½¨Gpø£Õùù°ÔÖ'[4KéL¡c
\ No newline at end of file
diff --git a/tools/releasetools/testdata/signed-sigfile.bin b/tools/releasetools/testdata/signed-sigfile.bin
new file mode 100644
index 0000000..86d2f9e
--- /dev/null
+++ b/tools/releasetools/testdata/signed-sigfile.bin
@@ -0,0 +1,2 @@
+R¡&‹EÿsÁ%ø?¹|¤œ&Í€ñzbSŠA[ßtqç†WKґl¦àÙÙås¥Ò~Fcæ	`ž¯¾Í#
+T{Ý×Û½F­ÒÁŸxƒø1‰6̋=Q°•ŒVæ^Tß°ØxX£¶/þ#©êI'ÜîtcLp““¬­ŸëovzђRá:õóWþ9(¹Á26Û̬ábÂBP1¸6ãnÒß±QÕC©gh;r‰²O}%Ľõˆáo6ã”d“ê´Éãå2Y`¦ÕÛ¼ª¥_R“OrCa,èI"n(`–ínñÜÐbaiö¹Å¨ÔäS„×Ê)kžO[`6c¬e
\ No newline at end of file
diff --git a/tools/releasetools/testdata/testkey.pk8 b/tools/releasetools/testdata/testkey.pk8
new file mode 100644
index 0000000..99be291
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey.pubkey.pem b/tools/releasetools/testdata/testkey.pubkey.pem
new file mode 100644
index 0000000..418ae60
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.pubkey.pem
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvjvyO2LwWgmQNyq7z+xK
+04eg0t3AL4y2NhpAAOzVnFyCArFcFjLTGQDDvkbZP6N12O6+dwJoPLntnm9A+VnP
+IFFRHg0HUWSbHM+Qk8Jgv2/2AVkAUj5J1r9t4X+2WI0eRzJP15Zjn68pQKGmcyci
+ry0gbvmYvXL2ZUmTm56DmEfCUCRIY2IGJ/CcMnFeItVU0LxKsV5Mlt5BO0Vv/CV4
+EaiOLwyCnoZuUhYto7dHlO/47v/H9zhkJC54OA1dkD38EPgO5GnfhGFSNXQRmJDT
+XrFgd6O+QO4yUNX8lYP10MzimUpItZa05t68NADqwYl3T7nWzvuC9r4IqZDyPf21
+TQIDAQAB
+-----END PUBLIC KEY-----
diff --git a/tools/releasetools/testdata/testkey.x509.pem b/tools/releasetools/testdata/testkey.x509.pem
new file mode 100644
index 0000000..65c8085
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJAN/FvjYzGNOKMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDM0NTFaFw00NTA2MDUwMDM0NTFaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAL478jti8FoJkDcqu8/sStOHoNLdwC+MtjYa
+QADs1ZxcggKxXBYy0xkAw75G2T+jddjuvncCaDy57Z5vQPlZzyBRUR4NB1FkmxzP
+kJPCYL9v9gFZAFI+Sda/beF/tliNHkcyT9eWY5+vKUChpnMnIq8tIG75mL1y9mVJ
+k5ueg5hHwlAkSGNiBifwnDJxXiLVVNC8SrFeTJbeQTtFb/wleBGoji8Mgp6GblIW
+LaO3R5Tv+O7/x/c4ZCQueDgNXZA9/BD4DuRp34RhUjV0EZiQ016xYHejvkDuMlDV
+/JWD9dDM4plKSLWWtObevDQA6sGJd0+51s77gva+CKmQ8j39tU0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNJPJZDpq6tc/19Z2kxPA2bj9D6UMB8GA1UdIwQYMBaAFNJPJZDp
+q6tc/19Z2kxPA2bj9D6UMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBABSUG9qrwV3WcClDJwqkNLN4yeVVYzkRMGA8/XqOiYrW4zh0mKDLfr6OeU1C
+AKwZBLhhql59Po25r4gcwPiTN2DkoCfb3T59XG8J54PAgTQjIAZ3J+mGZplnmuD3
+wj+UGUpPe0qTr33ZPoJfwxVo4RVnOt/UCsIGXch0HS/BIdpechqP0w4rOHUbq6EA
+8UEi5irKSDOU9b/5rD/tX2f4nGwJlKQEHWrsj9LLKlaL7fX36ghoSxN/pBJOhedg
+/VjT6xbaEwfyhC6Zj9av5Xl7UdpYt+rBMroAGenz0OSxKhIphdcx4ZMhvfkBoYG9
+Crupdqe+kUsfg2RlPb5grQ3klMo=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/testkey_mincrypt b/tools/releasetools/testdata/testkey_mincrypt
new file mode 100644
index 0000000..7f5d31b
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_mincrypt
Binary files differ
diff --git a/tools/releasetools/testdata/testkey_with_passwd.pk8 b/tools/releasetools/testdata/testkey_with_passwd.pk8
new file mode 100644
index 0000000..3d567de
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey_with_passwd.x509.pem b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
new file mode 100644
index 0000000..449396e
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJANefUd3Piu0yMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDI3NDRaFw00NTA2MDUwMDI3NDRaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBALBoA4c+qCQKapQAVGclbousC5J/L0TNZJEd
+KSW2nzXUHIwgTQ3r82227xkIvjnqXMCsc0q3/N2gGKR4sHqA30JO9Dyfgsx1ISaR
+GXe5cG048m5U5snplQgvPovtah9ZyvwNPzWPYC3uceJaDxKQKwVdsV+mOWM6WmpQ
+bdLO37jxfytyAbzaz3sG5HA3FSB8rX/xDM6If18NsxSHpcjaOjZXC4Fg6wlp0klY
+5/qhFEdmieu2zQVelXjoJfKSku8tPa7kZeDU/F3uLUq/U/xvFk7NVsRV+QvYOdQK
+1QECc/3yv1TKNAN3huWTgzCX6bMHmi09Npw3MQaGY0oS34cH9x0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNsJZ0n9Opeea0rVAzL+1jwkDKzPMB8GA1UdIwQYMBaAFNsJZ0n9
+Opeea0rVAzL+1jwkDKzPMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBAJ/bzIzA+NrYwPEv56XKf6Vuj81+M1rTHAsH9PqbOvJT7iM7aU7wAl6vmXAo
+DQtvKoOBMdIXprapwe0quHCQm7PGxg+RRegr+dcTSVJFv1plnODOBOEAVlEfFwuW
+Cz0USF2jrNq+4ciH5zPL1a31ONb1rMkxJXQ/tAi0x8m6tZz+jsbE0wO6qB80UmkA
+4WY2Tu/gnAvFpD8plkiU0EKwedBHAcaFFZkQp23MKsVZ3UBqsqzzfXDYV1Oa6rIy
+XIZpI2Gx75pvAb57T2ap/yl0DBEAu7Nmpll0GCsgeJVdy7tS4LNj96Quya3CHWQw
+WNTVuan0KZqwDIm4Xn1oHUFQ9vc=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/verity.x509.pem b/tools/releasetools/testdata/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/tools/releasetools/testdata/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 4b34820..e8cea29 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -17,46 +17,39 @@
 """
 Validate a given (signed) target_files.zip.
 
-It performs checks to ensure the integrity of the input zip.
+It performs the following checks to assert the integrity of the input zip.
+
  - It verifies the file consistency between the ones in IMAGES/system.img (read
    via IMAGES/system.map) and the ones under unpacked folder of SYSTEM/. The
    same check also applies to the vendor image if present.
+
+ - It verifies the install-recovery script consistency, by comparing the
+   checksums in the script against the ones of IMAGES/{boot,recovery}.img.
+
+ - It verifies the signed Verified Boot related images, for both of Verified
+   Boot 1.0 and 2.0 (aka AVB).
 """
 
-import common
+import argparse
+import filecmp
 import logging
 import os.path
 import re
-import sparse_img
-import sys
+import subprocess
+import zipfile
 
-
-def _GetImage(which, tmpdir):
-  assert which in ('system', 'vendor')
-
-  path = os.path.join(tmpdir, 'IMAGES', which + '.img')
-  mappath = os.path.join(tmpdir, 'IMAGES', which + '.map')
-
-  # Map file must exist (allowed to be empty).
-  assert os.path.exists(path) and os.path.exists(mappath)
-
-  clobbered_blocks = '0'
-  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
+import common
 
 
 def _ReadFile(file_name, unpacked_name, round_up=False):
   """Constructs and returns a File object. Rounds up its size if needed."""
 
-  def RoundUpTo4K(value):
-    rounded_up = value + 4095
-    return rounded_up - (rounded_up % 4096)
-
   assert os.path.exists(unpacked_name)
   with open(unpacked_name, 'r') as f:
     file_data = f.read()
   file_size = len(file_data)
   if round_up:
-    file_size_rounded_up = RoundUpTo4K(file_size)
+    file_size_rounded_up = common.RoundUpTo4K(file_size)
     file_data += '\0' * (file_size_rounded_up - file_size)
   return common.File(file_name, file_data)
 
@@ -64,13 +57,13 @@
 def ValidateFileAgainstSha1(input_tmp, file_name, file_path, expected_sha1):
   """Check if the file has the expected SHA-1."""
 
-  logging.info('Validating the SHA-1 of {}'.format(file_name))
+  logging.info('Validating the SHA-1 of %s', file_name)
   unpacked_name = os.path.join(input_tmp, file_path)
   assert os.path.exists(unpacked_name)
   actual_sha1 = _ReadFile(file_name, unpacked_name, False).sha1
   assert actual_sha1 == expected_sha1, \
       'SHA-1 mismatches for {}. actual {}, expected {}'.format(
-      file_name, actual_sha1, expected_sha1)
+          file_name, actual_sha1, expected_sha1)
 
 
 def ValidateFileConsistency(input_zip, input_tmp):
@@ -78,33 +71,31 @@
 
   def CheckAllFiles(which):
     logging.info('Checking %s image.', which)
-    image = _GetImage(which, input_tmp)
+    # Allow having shared blocks when loading the sparse image, because allowing
+    # that doesn't affect the checks below (we will have all the blocks on file,
+    # unless it's skipped due to the holes).
+    image = common.GetSparseImage(which, input_tmp, input_zip, True)
     prefix = '/' + which
     for entry in image.file_map:
+      # Skip entries like '__NONZERO-0'.
       if not entry.startswith(prefix):
         continue
 
       # Read the blocks that the file resides. Note that it will contain the
       # bytes past the file length, which is expected to be padded with '\0's.
       ranges = image.file_map[entry]
+
+      incomplete = ranges.extra.get('incomplete', False)
+      if incomplete:
+        logging.warning('Skipping %s that has incomplete block list', entry)
+        continue
+
       blocks_sha1 = image.RangeSha1(ranges)
 
       # The filename under unpacked directory, such as SYSTEM/bin/sh.
       unpacked_name = os.path.join(
           input_tmp, which.upper(), entry[(len(prefix) + 1):])
       unpacked_file = _ReadFile(entry, unpacked_name, True)
-      file_size = unpacked_file.size
-
-      # block.map may contain less blocks, because mke2fs may skip allocating
-      # blocks if they contain all zeros. We can't reconstruct such a file from
-      # its block list. (Bug: 65213616)
-      if file_size > ranges.size() * 4096:
-        logging.warning(
-            'Skipping %s that has less blocks: file size %d-byte,'
-            ' ranges %s (%d-byte)', entry, file_size, ranges,
-            ranges.size() * 4096)
-        continue
-
       file_sha1 = unpacked_file.sha1
       assert blocks_sha1 == file_sha1, \
           'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
@@ -147,10 +138,10 @@
 
   script_path = 'SYSTEM/bin/install-recovery.sh'
   if not os.path.exists(os.path.join(input_tmp, script_path)):
-    logging.info('{} does not exist in input_tmp'.format(script_path))
+    logging.info('%s does not exist in input_tmp', script_path)
     return
 
-  logging.info('Checking {}'.format(script_path))
+  logging.info('Checking %s', script_path)
   with open(os.path.join(input_tmp, script_path), 'r') as script:
     lines = script.read().strip().split('\n')
   assert len(lines) >= 6
@@ -168,7 +159,7 @@
     expected_recovery_sha1 = applypatch_argv[3].strip()
     assert expected_recovery_check_sha1 == expected_recovery_sha1
     ValidateFileAgainstSha1(input_tmp, 'recovery.img',
-        'SYSTEM/etc/recovery.img', expected_recovery_sha1)
+                            'SYSTEM/etc/recovery.img', expected_recovery_sha1)
   else:
     # We're patching boot.img to get recovery.img where bonus_args is optional
     if applypatch_argv[1] == "-b":
@@ -182,44 +173,165 @@
     boot_info = applypatch_argv[boot_info_index].strip().split(':')
     assert len(boot_info) == 4
     ValidateFileAgainstSha1(input_tmp, file_name='boot.img',
-        file_path='IMAGES/boot.img', expected_sha1=boot_info[3])
+                            file_path='IMAGES/boot.img',
+                            expected_sha1=boot_info[3])
 
     recovery_sha1_index = boot_info_index + 2
     expected_recovery_sha1 = applypatch_argv[recovery_sha1_index]
     assert expected_recovery_check_sha1 == expected_recovery_sha1
     ValidateFileAgainstSha1(input_tmp, file_name='recovery.img',
-        file_path='IMAGES/recovery.img',
-        expected_sha1=expected_recovery_sha1)
+                            file_path='IMAGES/recovery.img',
+                            expected_sha1=expected_recovery_sha1)
 
-  logging.info('Done checking {}'.format(script_path))
+  logging.info('Done checking %s', script_path)
 
 
-def main(argv):
-  def option_handler():
-    return True
+def ValidateVerifiedBootImages(input_tmp, info_dict, options):
+  """Validates the Verified Boot related images.
 
-  args = common.ParseOptions(
-      argv, __doc__, extra_opts="",
-      extra_long_opts=[],
-      extra_option_handler=option_handler)
+  For Verified Boot 1.0, it verifies the signatures of the bootable images
+  (boot/recovery etc), as well as the dm-verity metadata in system images
+  (system/vendor/product). For Verified Boot 2.0, it calls avbtool to verify
+  vbmeta.img, which in turn verifies all the descriptors listed in vbmeta.
 
-  if len(args) != 1:
-    common.Usage(__doc__)
-    sys.exit(1)
+  Args:
+    input_tmp: The top-level directory of unpacked target-files.zip.
+    info_dict: The loaded info dict.
+    options: A dict that contains the user-supplied public keys to be used for
+        image verification. In particular, 'verity_key' is used to verify the
+        bootable images in VB 1.0, and the vbmeta image in VB 2.0, where
+        applicable. 'verity_key_mincrypt' will be used to verify the system
+        images in VB 1.0.
+
+  Raises:
+    AssertionError: On any verification failure.
+  """
+  # Verified boot 1.0 (images signed with boot_signer and verity_signer).
+  if info_dict.get('boot_signer') == 'true':
+    logging.info('Verifying Verified Boot images...')
+
+    # Verify the boot/recovery images (signed with boot_signer), against the
+    # given X.509 encoded pubkey (or falling back to the one in the info_dict if
+    # none given).
+    verity_key = options['verity_key']
+    if verity_key is None:
+      verity_key = info_dict['verity_key'] + '.x509.pem'
+    for image in ('boot.img', 'recovery.img', 'recovery-two-step.img'):
+      image_path = os.path.join(input_tmp, 'IMAGES', image)
+      if not os.path.exists(image_path):
+        continue
+
+      cmd = ['boot_signer', '-verify', image_path, '-certificate', verity_key]
+      proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+      stdoutdata, _ = proc.communicate()
+      assert proc.returncode == 0, \
+          'Failed to verify {} with boot_signer:\n{}'.format(image, stdoutdata)
+      logging.info(
+          'Verified %s with boot_signer (key: %s):\n%s', image, verity_key,
+          stdoutdata.rstrip())
+
+  # Verify verity signed system images in Verified Boot 1.0. Note that not using
+  # 'elif' here, since 'boot_signer' and 'verity' are not bundled in VB 1.0.
+  if info_dict.get('verity') == 'true':
+    # First verify that the verity key that's built into the root image (as
+    # /verity_key) matches the one given via command line, if any.
+    if info_dict.get("system_root_image") == "true":
+      verity_key_mincrypt = os.path.join(input_tmp, 'ROOT', 'verity_key')
+    else:
+      verity_key_mincrypt = os.path.join(
+          input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+    assert os.path.exists(verity_key_mincrypt), 'Missing verity_key'
+
+    if options['verity_key_mincrypt'] is None:
+      logging.warn(
+          'Skipped checking the content of /verity_key, as the key file not '
+          'provided. Use --verity_key_mincrypt to specify.')
+    else:
+      expected_key = options['verity_key_mincrypt']
+      assert filecmp.cmp(expected_key, verity_key_mincrypt, shallow=False), \
+          "Mismatching mincrypt verity key files"
+      logging.info('Verified the content of /verity_key')
+
+    # Then verify the verity signed system/vendor/product images, against the
+    # verity pubkey in mincrypt format.
+    for image in ('system.img', 'vendor.img', 'product.img'):
+      image_path = os.path.join(input_tmp, 'IMAGES', image)
+
+      # We are not checking if the image is actually enabled via info_dict (e.g.
+      # 'system_verity_block_device=...'). Because it's most likely a bug that
+      # skips signing some of the images in signed target-files.zip, while
+      # having the top-level verity flag enabled.
+      if not os.path.exists(image_path):
+        continue
+
+      cmd = ['verity_verifier', image_path, '-mincrypt', verity_key_mincrypt]
+      proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+      stdoutdata, _ = proc.communicate()
+      assert proc.returncode == 0, \
+          'Failed to verify {} with verity_verifier (key: {}):\n{}'.format(
+              image, verity_key_mincrypt, stdoutdata)
+      logging.info(
+          'Verified %s with verity_verifier (key: %s):\n%s', image,
+          verity_key_mincrypt, stdoutdata.rstrip())
+
+  # Handle the case of Verified Boot 2.0 (AVB).
+  if info_dict.get("avb_enable") == "true":
+    logging.info('Verifying Verified Boot 2.0 (AVB) images...')
+
+    key = options['verity_key']
+    if key is None:
+      key = info_dict['avb_vbmeta_key_path']
+    # avbtool verifies all the images that have descriptors listed in vbmeta.
+    image = os.path.join(input_tmp, 'IMAGES', 'vbmeta.img')
+    cmd = ['avbtool', 'verify_image', '--image', image, '--key', key]
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = proc.communicate()
+    assert proc.returncode == 0, \
+        'Failed to verify {} with verity_verifier (key: {}):\n{}'.format(
+            image, key, stdoutdata)
+
+    logging.info(
+        'Verified %s with avbtool (key: %s):\n%s', image, key,
+        stdoutdata.rstrip())
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__,
+      formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      'target_files',
+      help='the input target_files.zip to be validated')
+  parser.add_argument(
+      '--verity_key',
+      help='the verity public key to verify the bootable images (Verified '
+           'Boot 1.0), or the vbmeta image (Verified Boot 2.0), where '
+           'applicable')
+  parser.add_argument(
+      '--verity_key_mincrypt',
+      help='the verity public key in mincrypt format to verify the system '
+           'images, if target using Verified Boot 1.0')
+  args = parser.parse_args()
+
+  # Unprovided args will have 'None' as the value.
+  options = vars(args)
 
   logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
   date_format = '%Y/%m/%d %H:%M:%S'
   logging.basicConfig(level=logging.INFO, format=logging_format,
                       datefmt=date_format)
 
-  logging.info("Unzipping the input target_files.zip: %s", args[0])
-  input_tmp, input_zip = common.UnzipTemp(args[0])
+  logging.info("Unzipping the input target_files.zip: %s", args.target_files)
+  input_tmp = common.UnzipTemp(args.target_files)
 
-  ValidateFileConsistency(input_zip, input_tmp)
+  with zipfile.ZipFile(args.target_files, 'r') as input_zip:
+    ValidateFileConsistency(input_zip, input_tmp)
 
   info_dict = common.LoadInfoDict(input_tmp)
   ValidateInstallRecoveryScript(input_tmp, info_dict)
 
+  ValidateVerifiedBootImages(input_tmp, info_dict, options)
+
   # TODO: Check if the OTA keys have been properly updated (the ones on /system,
   # in recovery image).
 
@@ -228,6 +340,6 @@
 
 if __name__ == '__main__':
   try:
-    main(sys.argv[1:])
+    main()
   finally:
     common.Cleanup()
diff --git a/tools/soong_to_convert.py b/tools/soong_to_convert.py
index 3d62d43..083f6f7 100755
--- a/tools/soong_to_convert.py
+++ b/tools/soong_to_convert.py
@@ -76,8 +76,10 @@
     problems = dict()
     deps = dict()
     reverse_deps = dict()
+    module_types = dict()
 
-    for (module, problem, dependencies) in reader:
+    for (module, module_type, problem, dependencies) in reader:
+        module_types[module] = module_type
         problems[module] = problem
         deps[module] = [d for d in dependencies.strip().split(' ') if d != ""]
         for dep in deps[module]:
@@ -94,16 +96,19 @@
         extra = ""
         if len(problems[module]) > 0:
             extra = " ({})".format(problems[module])
-        results.append((count_deps(reverse_deps, module, []), module + extra))
+        results.append((count_deps(reverse_deps, module, []), module + extra, module_types[module]))
 
     return sorted(results, key=lambda result: (-result[0], result[1]))
 
+def filter(results, module_type):
+    return [x for x in results if x[2] == module_type]
+
 def display(results):
     """Displays the results"""
     count_header = "# Blocked on"
     count_width = len(count_header)
     print("{} Module (potential problems)".format(count_header))
-    for (count, module) in results:
+    for (count, module, module_type) in results:
         print("{:>{}} {}".format(count, count_width, module))
 
 def main(filename):
@@ -111,7 +116,15 @@
     with open(filename, 'rb') as csvfile:
         results = process(csv.reader(csvfile))
 
-    display(results)
+    native_results = filter(results, "native")
+    java_results = filter(results, "java")
+
+    print("native modules ready to convert")
+    display(native_results)
+
+    print("")
+    print("java modules ready to convert")
+    display(java_results)
 
 if __name__ == "__main__":
     if len(sys.argv) != 2:
diff --git a/tools/warn.py b/tools/warn.py
index cc63de4..01398be 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -509,6 +509,26 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Fields that can be null should be annotated @Nullable',
+     'patterns': [r".*: warning: \[FieldMissingNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Method parameters that aren\'t checked for null shouldn\'t be annotated @Nullable',
+     'patterns': [r".*: warning: \[ParameterNotNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Methods that can return null should be annotated @Nullable',
+     'patterns': [r".*: warning: \[ReturnMissingNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Use parameter comments to document ambiguous literals',
+     'patterns': [r".*: warning: \[BooleanParameter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Field name is CONSTANT CASE, but field is not static and final',
      'patterns': [r".*: warning: \[ConstantField\] .+"]},
     {'category': 'java',
@@ -519,11 +539,21 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+     'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Prefer \'L\' to \'l\' for the suffix to long literals',
      'patterns': [r".*: warning: \[LongLiteralLowerCaseSuffix\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: A private method that does not reference the enclosing instance can be static',
+     'patterns': [r".*: warning: \[MethodCanBeStatic\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: C-style array declarations should not be used',
      'patterns': [r".*: warning: \[MixedArrayDimensions\] .+"]},
     {'category': 'java',
@@ -539,11 +569,21 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Avoid having multiple unary operators acting on the same variable in a method call',
+     'patterns': [r".*: warning: \[MultipleUnaryOperatorsInMethodCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Package names should match the directory they are declared in',
      'patterns': [r".*: warning: \[PackageLocation\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Non-standard parameter comment; prefer `/*paramName=*/ arg`',
+     'patterns': [r".*: warning: \[ParameterComment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Utility classes (only static members) are not designed to be instantiated and should be made noninstantiable with a default constructor.',
      'patterns': [r".*: warning: \[PrivateConstructorForUtilityClass\] .+"]},
     {'category': 'java',
@@ -554,11 +594,31 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: The default case of a switch should appear at the end of the last statement group',
+     'patterns': [r".*: warning: \[SwitchDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Unchecked exceptions do not need to be declared in the method signature.',
      'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
+     'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Constructors and methods with the same name should appear sequentially with no other code in between',
+     'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Unnecessary call to NullPointerTester#setDefault',
+     'patterns': [r".*: warning: \[UnnecessarySetDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Using static imports for types is unnecessary',
      'patterns': [r".*: warning: \[UnnecessaryStaticImport\] .+"]},
     {'category': 'java',
@@ -567,6 +627,61 @@
          'Java: Wildcard imports, static or otherwise, should not be used',
      'patterns': [r".*: warning: \[WildcardImport\] .+"]},
     {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: ',
+     'patterns': [r".*: warning: \[RemoveFieldPrefixes\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Prefer assertThrows to ExpectedException',
+     'patterns': [r".*: warning: \[ExpectedExceptionMigration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Logger instances are not constants -- they are mutable and have side effects -- and should not be named using CONSTANT CASE',
+     'patterns': [r".*: warning: \[LoggerVariableCase\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Prefer assertThrows to @Test(expected=...)',
+     'patterns': [r".*: warning: \[TestExceptionMigration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Public fields must be final.',
+     'patterns': [r".*: warning: \[NonFinalPublicFields\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Private fields that are only assigned in the initializer should be made final.',
+     'patterns': [r".*: warning: \[PrivateFieldsNotAssigned\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Lists returned by methods should be immutable.',
+     'patterns': [r".*: warning: \[ReturnedListNotImmutable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Parameters to log methods should not be generated by a call to String.format() or MessageFormat.format().',
+     'patterns': [r".*: warning: \[SaferLoggerFormat\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Parameters to log methods should not be generated by a call to toString(); see b/22986665.',
+     'patterns': [r".*: warning: \[SaferLoggerToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: A call to Binder.clearCallingIdentity() should be followed by Binder.restoreCallingIdentity() in a finally block. Otherwise the wrong Binder identity may be used by subsequent code.',
+     'patterns': [r".*: warning: \[BinderIdentityRestoredDangerously\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Classes extending PreferenceActivity must implement isValidFragment such that it does not unconditionally return true to prevent vulnerability to fragment injection attacks.',
+     'patterns': [r".*: warning: \[FragmentInjection\] .+"]},
+    {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
          'Java: Subclasses of Fragment must be instantiable via Class#newInstance(): the class must be public, static and have a public nullary constructor',
@@ -579,6 +694,26 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: A wakelock acquired with a timeout may be released by the system before calling `release`, even after checking `isHeld()`. If so, it will throw a RuntimeException. Please wrap in a try/catch block.',
+     'patterns': [r".*: warning: \[WakelockReleasedDangerously\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Arguments are in the wrong order or could be commented for clarity.',
+     'patterns': [r".*: warning: \[ArgumentSelectionDefectChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Arguments are swapped in assertEquals-like call',
+     'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: An equality test between objects with incompatible types always returns false',
+     'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: @AssistedInject and @Inject should not be used on different constructors in the same class.',
      'patterns': [r".*: warning: \[AssistedInjectAndInjectOnConstructors\] .+"]},
     {'category': 'java',
@@ -604,11 +739,21 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
+     'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Double-checked locking on non-volatile fields is unsafe',
      'patterns': [r".*: warning: \[DoubleCheckedLocking\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Annotations should always be immutable',
+     'patterns': [r".*: warning: \[ImmutableAnnotationChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Enums should always be immutable',
      'patterns': [r".*: warning: \[ImmutableEnumChecker\] .+"]},
     {'category': 'java',
@@ -629,13 +774,13 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: A different potential argument is more similar to the name of the parameter than the existing argument; this may be an error',
-     'patterns': [r".*: warning: \[ArgumentParameterMismatch\] .+"]},
+         'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
+     'patterns': [r".*: warning: \[AssertFalse\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
-     'patterns': [r".*: warning: \[AssertFalse\] .+"]},
+         'Java: This assertion throws an AssertionError if it fails, which will be caught by an enclosing try block.',
+     'patterns': [r".*: warning: \[AssertionFailureIgnored\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -664,23 +809,63 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Duration can be expressed more clearly with different units',
+     'patterns': [r".*: warning: \[CanonicalDuration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logging or rethrowing exceptions should usually be preferred to catching and calling printStackTrace',
+     'patterns': [r".*: warning: \[CatchAndPrintStackTrace\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Ignoring exceptions and calling fail() is unnecessary, and makes test output less useful',
+     'patterns': [r".*: warning: \[CatchFail\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Inner class is non-static but does not reference enclosing class',
      'patterns': [r".*: warning: \[ClassCanBeStatic\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Class.newInstance() bypasses exception checking; prefer getConstructor().newInstance()',
+         'Java: Class.newInstance() bypasses exception checking; prefer getDeclaredConstructor().newInstance()',
      'patterns': [r".*: warning: \[ClassNewInstance\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Implicit use of the platform default charset, which can result in e.g. non-ASCII characters being silently replaced with \'?\' in many environments',
-     'patterns': [r".*: warning: \[DefaultCharset\] .+"]},
+         'Java: The type of the array parameter of Collection.toArray needs to be compatible with the array type',
+     'patterns': [r".*: warning: \[CollectionToArraySafeParameter\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: This code, which counts elements using a loop, can be replaced by a simpler library method',
-     'patterns': [r".*: warning: \[ElementsCountedInLoop\] .+"]},
+         'Java: Collector.of() should not use state',
+     'patterns': [r".*: warning: \[CollectorShouldNotUseState\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Class should not implement both `Comparable` and `Comparator`',
+     'patterns': [r".*: warning: \[ComparableAndComparator\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constructors should not invoke overridable methods.',
+     'patterns': [r".*: warning: \[ConstructorInvokesOverridable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constructors should not pass the \'this\' reference out in method invocations, since the object may not be fully constructed.',
+     'patterns': [r".*: warning: \[ConstructorLeaksThis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: DateFormat is not thread-safe, and should not be used as a constant field.',
+     'patterns': [r".*: warning: \[DateFormatConstant\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Implicit use of the platform default charset, which can result in differing behavior between JVM executions or incorrect behavior if the encoding of the data source doesn\'t match expectations.',
+     'patterns': [r".*: warning: \[DefaultCharset\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -694,8 +879,13 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: An equality test between objects with incompatible types always returns false',
-     'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+         'Java: Calls to ExpectedException#expect should always be followed by exactly one statement.',
+     'patterns': [r".*: warning: \[ExpectedExceptionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switch case may fall through',
+     'patterns': [r".*: warning: \[FallThrough\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -704,26 +894,61 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Use parentheses to make the precedence explicit',
+     'patterns': [r".*: warning: \[FloatCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Floating point literal loses precision',
+     'patterns': [r".*: warning: \[FloatingPointLiteralPrecision\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Overloads will be ambiguous when passing lambda arguments',
      'patterns': [r".*: warning: \[FunctionalInterfaceClash\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Return value of methods returning Future must be checked. Ignoring returned Futures suppresses exceptions thrown from the code that completes the Future.',
+     'patterns': [r".*: warning: \[FutureReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Calling getClass() on an enum may return a subclass of the enum type',
      'patterns': [r".*: warning: \[GetClassOnEnum\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Hiding fields of superclasses may cause confusion and errors',
+     'patterns': [r".*: warning: \[HidingField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: This annotation has incompatible modifiers as specified by its @IncompatibleModifiers annotation',
      'patterns': [r".*: warning: \[IncompatibleModifiers\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: This for loop increments the same variable in the header and in the body',
+     'patterns': [r".*: warning: \[IncrementInForLoopAndHeader\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Please also override int read(byte[], int, int), otherwise multi-byte reads from this input stream are likely to be slow.',
      'patterns': [r".*: warning: \[InputStreamSlowMultibyteRead\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Casting inside an if block should be plausibly consistent with the instanceof type',
+     'patterns': [r".*: warning: \[InstanceOfAndCastMatchWrongType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Expression of type int may overflow before being assigned to a long',
+     'patterns': [r".*: warning: \[IntLongMath\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Class should not implement both `Iterable` and `Iterator`',
      'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
     {'category': 'java',
@@ -734,16 +959,41 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Some JUnit4 construct cannot be used in a JUnit3 context. Convert your class to JUnit4 style to use them.',
+     'patterns': [r".*: warning: \[JUnit4ClassUsedInJUnit3\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Test class inherits from JUnit 3\'s TestCase but has JUnit 4 @Test annotations.',
      'patterns': [r".*: warning: \[JUnitAmbiguousTestClass\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: The Google Java Style Guide requires switch statements to have an explicit default',
+         'Java: Never reuse class names from java.lang',
+     'patterns': [r".*: warning: \[JavaLangClash\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Suggests alternatives to obsolete JDK classes.',
+     'patterns': [r".*: warning: \[JdkObsolete\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Assignment where a boolean expression was expected; use == if this assignment wasn\'t expected or add parentheses for clarity.',
+     'patterns': [r".*: warning: \[LogicalAssignment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switches on enum types should either handle all values, or have a default case.',
      'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: The Google Java Style Guide requires that each switch statement includes a default statement group, even if it contains no code. (This requirement is lifted for any switch statement that covers all values of an enum.)',
+     'patterns': [r".*: warning: \[MissingDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Not calling fail() when expecting an exception masks bugs',
      'patterns': [r".*: warning: \[MissingFail\] .+"]},
     {'category': 'java',
@@ -754,11 +1004,36 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Compound assignments to bytes, shorts, chars, and floats hide dangerous casts',
+         'Java: Modifying a collection while iterating over it in a loop may cause a ConcurrentModificationException to be thrown.',
+     'patterns': [r".*: warning: \[ModifyCollectionInEnhancedForLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Multiple calls to either parallel or sequential are unnecessary and cause confusion.',
+     'patterns': [r".*: warning: \[MultipleParallelOrSequentialCalls\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constant field declarations should use the immutable type (such as ImmutableList) instead of the general collection interface type (such as List)',
+     'patterns': [r".*: warning: \[MutableConstantField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Method return type should use the immutable type (such as ImmutableList) instead of the general collection interface type (such as List)',
+     'patterns': [r".*: warning: \[MutableMethodReturnType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Compound assignments may hide dangerous casts',
      'patterns': [r".*: warning: \[NarrowingCompoundAssignment\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Nested instanceOf conditions of disjoint types create blocks of code that never execute',
+     'patterns': [r".*: warning: \[NestedInstanceOfConditions\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: This update of a volatile variable is non-atomic',
      'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
     {'category': 'java',
@@ -794,6 +1069,31 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: One should not call optional.get() inside an if statement that checks !optional.isPresent',
+     'patterns': [r".*: warning: \[OptionalNotPresent\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: String literal contains format specifiers, but is not passed to a format method',
+     'patterns': [r".*: warning: \[OrphanedFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: To return a custom message with a Throwable class, one should override getMessage() instead of toString() for Throwable.',
+     'patterns': [r".*: warning: \[OverrideThrowableToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Varargs doesn\'t agree for overridden method',
+     'patterns': [r".*: warning: \[Overrides\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Detects `/* name= */`-style comments on actual parameters where the name doesn\'t match the formal parameter',
+     'patterns': [r".*: warning: \[ParameterName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Preconditions only accepts the %s placeholder in error message strings',
      'patterns': [r".*: warning: \[PreconditionsInvalidPlaceholder\] .+"]},
     {'category': 'java',
@@ -809,6 +1109,16 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: BugChecker has incorrect ProvidesFix tag, please update',
+     'patterns': [r".*: warning: \[ProvidesFix\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: reachabilityFence should always be called inside a finally block',
+     'patterns': [r".*: warning: \[ReachabilityFenceUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Thrown exception is a subtype of another',
      'patterns': [r".*: warning: \[RedundantThrows\] .+"]},
     {'category': 'java',
@@ -824,8 +1134,18 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: A static variable or method should not be accessed from an object instance',
-     'patterns': [r".*: warning: \[StaticAccessedFromInstance\] .+"]},
+         'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+     'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: A static variable or method should be qualified with a class name, not expression',
+     'patterns': [r".*: warning: \[StaticQualifiedUsingExpression\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Streams that encapsulate a closeable resource should be closed using try-with-resources',
+     'patterns': [r".*: warning: \[StreamResourceLeak\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -834,13 +1154,43 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: String.split should never take only a single argument; it has surprising behavior',
+     'patterns': [r".*: warning: \[StringSplit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Prefer Splitter to String.split',
+     'patterns': [r".*: warning: \[StringSplitter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
+     'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Thread.join needs to be surrounded by a loop until it succeeds, as in Uninterruptibles.joinUninterruptibly.',
+     'patterns': [r".*: warning: \[ThreadJoinLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: ThreadLocals should be stored in static fields',
+     'patterns': [r".*: warning: \[ThreadLocalUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
+     'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Truth Library assert is called on a constant.',
      'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: An object is tested for equality to itself using Truth Libraries.',
-     'patterns': [r".*: warning: \[TruthSelfEquals\] .+"]},
+         'Java: Type parameter declaration overrides another type parameter already declared',
+     'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -849,11 +1199,31 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Creation of a Set/HashSet/HashMap of java.net.URL. equals() and hashCode() of java.net.URL class make blocking internet connections.',
+     'patterns': [r".*: warning: \[URLEqualsHashCode\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switch handles all enum values; an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
+     'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Finalizer may run before native code finishes execution',
+     'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Unsynchronized method overrides a synchronized method.',
      'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
+     'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Non-constant variable missing @Var annotation',
      'patterns': [r".*: warning: \[Var\] .+"]},
     {'category': 'java',
@@ -862,6 +1232,151 @@
          'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
      'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
     {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Pluggable Type checker internal error',
+     'patterns': [r".*: warning: \[PluggableTypeChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Invalid message format-style format specifier ({0}), expected printf-style (%s)',
+     'patterns': [r".*: warning: \[FloggerMessageFormat\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logger level check is already implied in the log() call. An explicit at[Level]().isEnabled() check is redundant.',
+     'patterns': [r".*: warning: \[FloggerRedundantIsEnabled\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Calling withCause(Throwable) with an inline allocated Throwable is discouraged. Consider using withStackTrace(StackSize) instead, and specifying a reduced stack size (e.g. SMALL, MEDIUM or LARGE) instead of FULL, to improve performance.',
+     'patterns': [r".*: warning: \[FloggerWithCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Use withCause to associate Exceptions with log statements',
+     'patterns': [r".*: warning: \[FloggerWithoutCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: No bug exists to track an ignored test',
+     'patterns': [r".*: warning: \[IgnoredTestWithoutBug\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: @Ignore is preferred to @Suppress for JUnit4 tests. @Suppress may silently fail in JUnit4 (that is, tests may run anyway.)',
+     'patterns': [r".*: warning: \[JUnit4SuppressWithoutIgnore\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Medium and large test classes should document why they are medium or large',
+     'patterns': [r".*: warning: \[JUnit4TestAttributeMissing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: java.net.IDN implements the older IDNA2003 standard. Prefer com.google.i18n.Idn, which implements the newer UTS #46 standard',
+     'patterns': [r".*: warning: \[JavaNetIdn\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Consider requiring strict parsing on JodaDurationFlag instances. Before adjusting existing flags, check the documentation and your existing configuration to avoid crashes!',
+     'patterns': [r".*: warning: \[JodaDurationFlagStrictParsing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logging an exception and throwing it (or a new exception) for the same exceptional situation is an anti-pattern.',
+     'patterns': [r".*: warning: \[LogAndThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: FormattingLogger uses wrong or mismatched format string',
+     'patterns': [r".*: warning: \[MisusedFormattingLogger\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Flags should be final',
+     'patterns': [r".*: warning: \[NonFinalFlag\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Reading a flag from a static field or initializer block will cause it to always receive the default value and will cause an IllegalFlagStateException if the flag is ever set.',
+     'patterns': [r".*: warning: \[StaticFlagUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Apps must use BuildCompat.isAtLeastO to check whether they\'re running on Android O',
+     'patterns': [r".*: warning: \[UnsafeSdkVersionCheck\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Logging tag cannot be longer than 23 characters.',
+     'patterns': [r".*: warning: \[LogTagLength\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Relative class name passed to ComponentName constructor',
+     'patterns': [r".*: warning: \[RelativeComponentName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Explicitly enumerate all cases in switch statements for certain enum types.',
+     'patterns': [r".*: warning: \[EnumerateAllCasesInEnumSwitch\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Do not call assumeTrue(tester.getExperimentValueFor(...)). Use @RequireEndToEndTestExperiment instead.',
+     'patterns': [r".*: warning: \[JUnitAssumeExperiment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The accessed field or method is not visible here. Note that the default production visibility for @VisibleForTesting is Visibility.PRIVATE.',
+     'patterns': [r".*: warning: \[VisibleForTestingChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Detects errors encountered building Error Prone plugins',
+     'patterns': [r".*: warning: \[ErrorPronePluginCorrectness\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Parcelable CREATOR fields should be Creator\u003cT>',
+     'patterns': [r".*: warning: \[ParcelableCreatorType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Enforce reflected Parcelables are kept by Proguard',
+     'patterns': [r".*: warning: \[ReflectedParcelable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Any class that extends IntentService should have @Nullable notation on method onHandleIntent(@Nullable Intent intent) and handle the case if intent is null.',
+     'patterns': [r".*: warning: \[OnHandleIntentNullableChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: In many cases, randomUUID is not necessary, and it slows the performance, which can be quite severe especially when this operation happens at start up time. Consider replacing it with cheaper alternatives, like object.hashCode() or IdGenerator.INSTANCE.getRandomId()',
+     'patterns': [r".*: warning: \[UUIDChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: DynamicActivity.findViewById(int) is slow and should not be used inside View.onDraw(Canvas)!',
+     'patterns': [r".*: warning: \[NoFindViewByIdInOnDrawChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Passing Throwable/Exception argument to the message format L.x(). Calling L.w(tag, message, ex) instead of L.w(tag, ex, message)',
+     'patterns': [r".*: warning: \[WrongThrowableArgumentInLogChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: New splicers are disallowed on paths that are being Libsearched',
+     'patterns': [r".*: warning: \[BlacklistedSplicerPathChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Object serialized in Bundle may have been flattened to base type.',
+     'patterns': [r".*: warning: \[BundleDeserializationCast\] .+"]},
+    {'category': 'java',
      'severity': Severity.HIGH,
      'description':
          'Java: Log tag too long, cannot exceed 23 characters.',
@@ -879,11 +1394,6 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Static and default methods in interfaces are not allowed in android builds.',
-     'patterns': [r".*: warning: \[StaticOrDefaultInterfaceMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Incompatible type as argument to Object-accepting Java collections method',
      'patterns': [r".*: warning: \[CollectionIncompatibleType\] .+"]},
     {'category': 'java',
@@ -909,113 +1419,8 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: @AssistedInject and @Inject cannot be used on the same constructor.',
-     'patterns': [r".*: warning: \[AssistedInjectAndInjectOnSameConstructor\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @AutoFactory and @Inject should not be used in the same type.',
-     'patterns': [r".*: warning: \[AutoFactoryAtInject\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Injected constructors cannot be optional nor have binding annotations',
-     'patterns': [r".*: warning: \[InjectedConstructorAnnotations\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A scoping annotation\'s Target should include TYPE and METHOD.',
-     'patterns': [r".*: warning: \[InjectInvalidTargetingOnScopingAnnotation\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Abstract and default methods are not injectable with javax.inject.Inject',
-     'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @javax.inject.Inject cannot be put on a final field.',
-     'patterns': [r".*: warning: \[JavaxInjectOnFinalField\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: This class has more than one @Inject-annotated constructor. Please remove the @Inject annotation from all but one of them.',
-     'patterns': [r".*: warning: \[MoreThanOneInjectableConstructor\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Using more than one qualifier annotation on the same element is not allowed.',
-     'patterns': [r".*: warning: \[InjectMoreThanOneQualifier\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A class can be annotated with at most one scope annotation.',
-     'patterns': [r".*: warning: \[InjectMoreThanOneScopeAnnotationOnClass\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Annotations cannot be both Scope annotations and Qualifier annotations: this causes confusion when trying to use them.',
-     'patterns': [r".*: warning: \[OverlappingQualifierAndScopeAnnotation\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Qualifier applied to a method that isn\'t a @Provides method. This method won\'t be used for dependency injection',
-     'patterns': [r".*: warning: \[QualifierOnMethodWithoutProvides\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scope annotation on an interface or abstact class is not allowed',
-     'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scoping and qualifier annotations must have runtime retention.',
-     'patterns': [r".*: warning: \[InjectScopeOrQualifierAnnotationRetention\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: `@Multibinds` is the new way to declare multibindings.',
-     'patterns': [r".*: warning: \[MultibindsInsteadOfMultibindings\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Dagger @Provides methods may not return null unless annotated with @Nullable',
-     'patterns': [r".*: warning: \[DaggerProvidesNull\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scope annotation on implementation class of AssistedInject factory is not allowed',
-     'patterns': [r".*: warning: \[GuiceAssistedInjectScoping\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A constructor cannot have two @Assisted parameters of the same type unless they are disambiguated with named @Assisted annotations.',
-     'patterns': [r".*: warning: \[GuiceAssistedParameters\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Although Guice allows injecting final fields, doing so is disallowed because the injected value may not be visible to other threads.',
-     'patterns': [r".*: warning: \[GuiceInjectOnFinalField\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: This method is not annotated with @Inject, but it overrides a method that is  annotated with @javax.inject.Inject. The method will not be Injected.',
-     'patterns': [r".*: warning: \[OverridesJavaxInjectableMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @Provides methods need to be declared in a Module to have any effect.',
-     'patterns': [r".*: warning: \[ProvidesMethodOutsideOfModule\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
-     'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Invalid @GuardedBy expression',
-     'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+     'patterns': [r".*: warning: \[GuardedBy\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1034,13 +1439,13 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An argument is more similar to a different parameter; the arguments may have been swapped.',
-     'patterns': [r".*: warning: \[ArgumentParameterSwap\] .+"]},
+         'Java: Reference equality used to compare arrays',
+     'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Reference equality used to compare arrays',
-     'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
+         'Java: Arrays.fill(Object[], Object) called with incompatible types.',
+     'patterns': [r".*: warning: \[ArrayFillIncompatibleType\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1089,6 +1494,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+     'patterns': [r".*: warning: \[ComparableType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: This comparison method violates the contract',
      'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
     {'category': 'java',
@@ -1104,6 +1514,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Non-trivial compile time constant boolean expressions shouldn\'t be used.',
+     'patterns': [r".*: warning: \[ComplexBooleanConstant\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: A conditional expression with numeric operands of differing types will perform binary numeric promotion of the operands; when these operands are of reference types, the expression\'s result may not be of the expected type.',
+     'patterns': [r".*: warning: \[ConditionalExpressionNumericPromotion\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Compile-time constant expression overflows',
      'patterns': [r".*: warning: \[ConstantOverflow\] .+"]},
     {'category': 'java',
@@ -1114,11 +1534,21 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Thread created but not started',
+     'patterns': [r".*: warning: \[DeadThread\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Division by integer literal zero',
      'patterns': [r".*: warning: \[DivZero\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: This method should not be called.',
+     'patterns': [r".*: warning: \[DoNotCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Empty statement after if',
      'patterns': [r".*: warning: \[EmptyIf\] .+"]},
     {'category': 'java',
@@ -1129,7 +1559,12 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class',
+         'Java: == must be used in equals method to check equality to itself or an infinite loop will occur.',
+     'patterns': [r".*: warning: \[EqualsReference\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
      'patterns': [r".*: warning: \[ForOverride\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1144,6 +1579,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: DoubleMath.fuzzyEquals should never be used in an Object.equals() method',
+     'patterns': [r".*: warning: \[FuzzyEqualsShouldNotBeUsedInEqualsMethod\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Calling getClass() on an annotation may return a proxy class',
      'patterns': [r".*: warning: \[GetClassOnAnnotation\] .+"]},
     {'category': 'java',
@@ -1154,17 +1594,12 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An object is tested for equality to itself using Guava Libraries',
-     'patterns': [r".*: warning: \[GuavaSelfEquals\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: contains() is a legacy method that is equivalent to containsValue()',
      'patterns': [r".*: warning: \[HashtableContains\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Writing "a && a", "a || a", "a & a", or "a | a" is equivalent to "a".',
+         'Java: A binary expression where both operands are the same is usually incorrect.',
      'patterns': [r".*: warning: \[IdentityBinaryExpression\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1174,6 +1609,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: The first argument to indexOf is a Unicode code point, and the second is the index to start the search from',
+     'patterns': [r".*: warning: \[IndexOfChar\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Conditional expression in varargs call contains array and non-array arguments',
+     'patterns': [r".*: warning: \[InexactVarargsConditional\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: This method always recurses, and will cause a StackOverflowError',
      'patterns': [r".*: warning: \[InfiniteRecursion\] .+"]},
     {'category': 'java',
@@ -1189,36 +1634,71 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Invalid time zone identifier. TimeZone.getTimeZone(String) will silently return GMT instead of the time zone you intended.',
+     'patterns': [r".*: warning: \[InvalidTimeZoneID\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: The argument to Class#isInstance(Object) should not be a Class',
      'patterns': [r".*: warning: \[IsInstanceOfClass\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+     'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: jMock tests must have a @RunWith(JMock.class) annotation, or the Mockery field must have a @Rule JUnit annotation',
      'patterns': [r".*: warning: \[JMockTestWithoutRunWithOrRuleAnnotation\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Test method will not be run; please prefix name with "test"',
+         'Java: Test method will not be run; please correct method signature (Should be public, non-static, and method name should begin with "test").',
      'patterns': [r".*: warning: \[JUnit3TestNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: setUp() method will not be run; Please add a @Before annotation',
+         'Java: This method should be static',
+     'patterns': [r".*: warning: \[JUnit4ClassAnnotationNonStatic\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: setUp() method will not be run; please add JUnit\'s @Before annotation',
      'patterns': [r".*: warning: \[JUnit4SetUpNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: tearDown() method will not be run; Please add an @After annotation',
+         'Java: tearDown() method will not be run; please add JUnit\'s @After annotation',
      'patterns': [r".*: warning: \[JUnit4TearDownNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Test method will not be run; please add @Test annotation',
+         'Java: This looks like a test method but is not run; please add @Test or @Ignore, or, if this is a helper method, reduce its visibility.',
      'patterns': [r".*: warning: \[JUnit4TestNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: An object is tested for reference equality to itself using JUnit library.',
+     'patterns': [r".*: warning: \[JUnitAssertSameCheck\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: This pattern will silently corrupt certain byte sequences from the serialized protocol message. Use ByteString or byte[] directly',
+     'patterns': [r".*: warning: \[LiteByteStringUtf8\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Loop condition is never modified in loop body.',
+     'patterns': [r".*: warning: \[LoopConditionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Overriding method is missing a call to overridden super method',
+     'patterns': [r".*: warning: \[MissingSuperCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
      'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
     {'category': 'java',
@@ -1239,6 +1719,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: The result of this method must be closed.',
+     'patterns': [r".*: warning: \[MustBeClosedChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The first argument to nCopies is the number of copies, and the second is the item to copy',
+     'patterns': [r".*: warning: \[NCopiesOfChar\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: @NoAllocation was specified on this method, but something was found that would trigger an allocation',
      'patterns': [r".*: warning: \[NoAllocation\] .+"]},
     {'category': 'java',
@@ -1259,6 +1749,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: This conditional expression may evaluate to null, which will result in an NPE when the result is unboxed.',
+     'patterns': [r".*: warning: \[NullTernary\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Numeric comparison using reference equality instead of value equality',
      'patterns': [r".*: warning: \[NumericEquality\] .+"]},
     {'category': 'java',
@@ -1269,11 +1764,6 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Varargs doesn\'t agree for overridden method',
-     'patterns': [r".*: warning: \[Overrides\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Declaring types inside package-info.java files is very bad form',
      'patterns': [r".*: warning: \[PackageInfo\] .+"]},
     {'category': 'java',
@@ -1289,6 +1779,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Using ::equals as an incompatible Predicate; the predicate will always return false',
+     'patterns': [r".*: warning: \[PredicateIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Access to a private protocol buffer field is forbidden. This protocol buffer carries a security contract, and can only be created using an approved library. Direct access to the fields is forbidden.',
+     'patterns': [r".*: warning: \[PrivateSecurityContractProtoAccess\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Protobuf fields cannot be null',
      'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
     {'category': 'java',
@@ -1299,6 +1799,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: To get the tag number of a protocol buffer enum, use getNumber() instead.',
+     'patterns': [r".*: warning: \[ProtocolBufferOrdinal\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Casting a random number in the range [0.0, 1.0) to an integer or long always results in 0.',
+     'patterns': [r".*: warning: \[RandomCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Use Random.nextInt(int).  Random.nextInt() % n can have negative results',
      'patterns': [r".*: warning: \[RandomModInteger\] .+"]},
     {'category': 'java',
@@ -1324,13 +1834,13 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Variable compared to itself',
-     'patterns': [r".*: warning: \[SelfEquality\] .+"]},
+         'Java: Testing an object for equality with itself will always be true.',
+     'patterns': [r".*: warning: \[SelfEquals\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An object is tested for equality to itself',
-     'patterns': [r".*: warning: \[SelfEquals\] .+"]},
+         'Java: This method must be called with an even number of arguments.',
+     'patterns': [r".*: warning: \[ShouldHaveEvenArgs\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1359,6 +1869,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Throwing \'null\' always results in a NullPointerException being thrown.',
+     'patterns': [r".*: warning: \[ThrowNull\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: isEqualTo should not be used to test an object for equality with itself; the assertion will never fail.',
+     'patterns': [r".*: warning: \[TruthSelfEquals\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Catching Throwable/Error masks failures from fail() or assert*() in the try block',
      'patterns': [r".*: warning: \[TryFailThrowable\] .+"]},
     {'category': 'java',
@@ -1384,8 +1904,193 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: `var` should not be used as a type name.',
+     'patterns': [r".*: warning: \[VarTypeName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Method parameter has wrong package',
      'patterns': [r".*: warning: \[ParameterPackage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Type declaration annotated with @ThreadSafe is not thread safe',
+     'patterns': [r".*: warning: \[ThreadSafe\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of class, field, or method that is not compatible with legacy Android devices',
+     'patterns': [r".*: warning: \[AndroidApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Invalid use of Flogger format string',
+     'patterns': [r".*: warning: \[AndroidFloggerFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use TunnelException.getCauseAs(Class) instead of casting the result of TunnelException.getCause().',
+     'patterns': [r".*: warning: \[DoNotCastTunnelExceptionCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Identifies undesirable mocks.',
+     'patterns': [r".*: warning: \[DoNotMock_ForJavaBuilder\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Duration Flag should NOT have units in the variable name or the @FlagSpec\'s name or altName field.',
+     'patterns': [r".*: warning: \[DurationFlagWithUnits\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Duration.get() only works with SECONDS or NANOS.',
+     'patterns': [r".*: warning: \[DurationGetTemporalUnit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Invalid printf-style format string',
+     'patterns': [r".*: warning: \[FloggerFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Test class may not be run because it is missing a @RunWith annotation',
+     'patterns': [r".*: warning: \[JUnit4RunWithMissing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of class, field, or method that is not compatible with JDK 7',
+     'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of java.time.Duration.withNanos(int) is not allowed.',
+     'patterns': [r".*: warning: \[JavaDurationWithNanos\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of java.time.Duration.withSeconds(long) is not allowed.',
+     'patterns': [r".*: warning: \[JavaDurationWithSeconds\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: java.time APIs that silently use the default system time-zone are not allowed.',
+     'patterns': [r".*: warning: \[JavaTimeDefaultTimeZone\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of new Duration(long) is not allowed. Please use Duration.millis(long) instead.',
+     'patterns': [r".*: warning: \[JodaDurationConstructor\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of duration.withMillis(long) is not allowed. Please use Duration.millis(long) instead.',
+     'patterns': [r".*: warning: \[JodaDurationWithMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of instant.withMillis(long) is not allowed. Please use new Instant(long) instead.',
+     'patterns': [r".*: warning: \[JodaInstantWithMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of JodaTime\'s type.plus(long) or type.minus(long) is not allowed (where \u003ctype> = {Duration,Instant,DateTime,DateMidnight}). Please use type.plus(Duration.millis(long)) or type.minus(Duration.millis(long)) instead.',
+     'patterns': [r".*: warning: \[JodaPlusMinusLong\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Changing JodaTime\'s current time is not allowed in non-testonly code.',
+     'patterns': [r".*: warning: \[JodaSetCurrentMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of Joda-Time\'s DateTime.toDateTime(), Duration.toDuration(), Instant.toInstant(), Interval.toInterval(), and Period.toPeriod() are not allowed.',
+     'patterns': [r".*: warning: \[JodaToSelf\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of JodaTime\'s type.withDurationAdded(long, int) (where \u003ctype> = {Duration,Instant,DateTime}). Please use type.withDurationAdded(Duration.millis(long), int) instead.',
+     'patterns': [r".*: warning: \[JodaWithDurationAddedLong\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: LanguageCode comparison using reference equality instead of value equality',
+     'patterns': [r".*: warning: \[LanguageCodeEquality\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The zero argument toString is not part of the Localizable interface and likely is just the java Object toString.  You probably want to call toString(Locale).',
+     'patterns': [r".*: warning: \[LocalizableWrongToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Period.get() only works with YEARS, MONTHS, or DAYS.',
+     'patterns': [r".*: warning: \[PeriodGetTemporalUnit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Return value of methods returning Promise must be checked. Ignoring returned Promises suppresses exceptions thrown from the code that completes the Promises.',
+     'patterns': [r".*: warning: \[PromiseReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: When returning a Promise, use thenChain() instead of then()',
+     'patterns': [r".*: warning: \[PromiseThenReturningPromise\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Streams.iterating() is unsafe for use except in the header of a for-each loop; please see its Javadoc for details.',
+     'patterns': [r".*: warning: \[StreamsIteratingNotInLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: TemporalAccessor.get() only works for certain values of ChronoField.',
+     'patterns': [r".*: warning: \[TemporalAccessorGetChronoField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Try-with-resources is not supported in this code, use try/finally instead',
+     'patterns': [r".*: warning: \[TryWithResources\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Adds checkOrThrow calls where needed',
+     'patterns': [r".*: warning: \[AddCheckOrThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Equality on Nano protos (== or .equals) might not be the same in Lite',
+     'patterns': [r".*: warning: \[ForbidNanoEquality\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Submessages of a proto cannot be mutated',
+     'patterns': [r".*: warning: \[ForbidSubmessageMutation\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Repeated fields on proto messages cannot be directly referenced',
+     'patterns': [r".*: warning: \[NanoUnsafeRepeatedFieldUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Requires that non-@enum int assignments to @enum ints is wrapped in a checkOrThrow',
+     'patterns': [r".*: warning: \[RequireCheckOrThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Assignments into repeated field elements must be sequential',
+     'patterns': [r".*: warning: \[RequireSequentialRepeatedFields\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Future.get in Google Now Producers code',
+     'patterns': [r".*: warning: \[FutureGetInNowProducers\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: @SimpleEnum applied to non-enum type',
+     'patterns': [r".*: warning: \[SimpleEnumUsage\] .+"]},
 
     # End warnings generated by Error Prone
 
@@ -2371,7 +3076,7 @@
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
-    if os.path.exists(root_path + '/build/tools/warn.py'):
+    if os.path.exists(root_path + '/build/make/tools/warn.py'):
       android_root = root_path
       return root_path
   return ''